summaryrefslogtreecommitdiffstats
path: root/chromium/third_party/webrtc
diff options
context:
space:
mode:
authorJocelyn Turcotte <jocelyn.turcotte@digia.com>2014-08-08 14:30:41 +0200
committerJocelyn Turcotte <jocelyn.turcotte@digia.com>2014-08-12 13:49:54 +0200
commitab0a50979b9eb4dfa3320eff7e187e41efedf7a9 (patch)
tree498dfb8a97ff3361a9f7486863a52bb4e26bb898 /chromium/third_party/webrtc
parent4ce69f7403811819800e7c5ae1318b2647e778d1 (diff)
Update Chromium to beta version 37.0.2062.68
Change-Id: I188e3b5aff1bec75566014291b654eb19f5bc8ca Reviewed-by: Andras Becsi <andras.becsi@digia.com>
Diffstat (limited to 'chromium/third_party/webrtc')
-rw-r--r--chromium/third_party/webrtc/BUILD.gn174
-rw-r--r--chromium/third_party/webrtc/OWNERS6
-rw-r--r--chromium/third_party/webrtc/PRESUBMIT.py8
-rw-r--r--chromium/third_party/webrtc/base/BUILD.gn723
-rw-r--r--chromium/third_party/webrtc/base/OWNERS7
-rw-r--r--chromium/third_party/webrtc/base/asyncfile.cc21
-rw-r--r--chromium/third_party/webrtc/base/asyncfile.h40
-rw-r--r--chromium/third_party/webrtc/base/asynchttprequest.cc116
-rw-r--r--chromium/third_party/webrtc/base/asynchttprequest.h104
-rw-r--r--chromium/third_party/webrtc/base/asynchttprequest_unittest.cc233
-rw-r--r--chromium/third_party/webrtc/base/asyncinvoker-inl.h129
-rw-r--r--chromium/third_party/webrtc/base/asyncinvoker.cc91
-rw-r--r--chromium/third_party/webrtc/base/asyncinvoker.h134
-rw-r--r--chromium/third_party/webrtc/base/asyncpacketsocket.h140
-rw-r--r--chromium/third_party/webrtc/base/asyncresolverinterface.h47
-rw-r--r--chromium/third_party/webrtc/base/asyncsocket.cc44
-rw-r--r--chromium/third_party/webrtc/base/asyncsocket.h124
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket.cc299
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket.h100
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc53
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket.cc122
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket.h63
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc53
-rw-r--r--chromium/third_party/webrtc/base/atomicops.h149
-rw-r--r--chromium/third_party/webrtc/base/atomicops_unittest.cc79
-rw-r--r--chromium/third_party/webrtc/base/autodetectproxy.cc282
-rw-r--r--chromium/third_party/webrtc/base/autodetectproxy.h90
-rw-r--r--chromium/third_party/webrtc/base/autodetectproxy_unittest.cc131
-rw-r--r--chromium/third_party/webrtc/base/bandwidthsmoother.cc84
-rw-r--r--chromium/third_party/webrtc/base/bandwidthsmoother.h59
-rw-r--r--chromium/third_party/webrtc/base/bandwidthsmoother_unittest.cc116
-rw-r--r--chromium/third_party/webrtc/base/base.gyp756
-rw-r--r--chromium/third_party/webrtc/base/base64.cc260
-rw-r--r--chromium/third_party/webrtc/base/base64.h104
-rw-r--r--chromium/third_party/webrtc/base/base64_unittest.cc1001
-rw-r--r--chromium/third_party/webrtc/base/base_tests.gyp156
-rw-r--r--chromium/third_party/webrtc/base/basicdefs.h20
-rw-r--r--chromium/third_party/webrtc/base/basictypes.h134
-rw-r--r--chromium/third_party/webrtc/base/basictypes_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/base/bind.h587
-rw-r--r--chromium/third_party/webrtc/base/bind.h.pump138
-rw-r--r--chromium/third_party/webrtc/base/bind_unittest.cc67
-rw-r--r--chromium/third_party/webrtc/base/buffer.h102
-rw-r--r--chromium/third_party/webrtc/base/buffer_unittest.cc143
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer.cc234
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer.h119
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer_unittest.cc211
-rw-r--r--chromium/third_party/webrtc/base/byteorder.h168
-rw-r--r--chromium/third_party/webrtc/base/byteorder_unittest.cc83
-rw-r--r--chromium/third_party/webrtc/base/callback.h261
-rw-r--r--chromium/third_party/webrtc/base/callback.h.pump103
-rw-r--r--chromium/third_party/webrtc/base/callback_unittest.cc81
-rw-r--r--chromium/third_party/webrtc/base/checks.cc34
-rw-r--r--chromium/third_party/webrtc/base/checks.h30
-rw-r--r--chromium/third_party/webrtc/base/common.cc64
-rw-r--r--chromium/third_party/webrtc/base/common.h200
-rw-r--r--chromium/third_party/webrtc/base/compile_assert.h82
-rw-r--r--chromium/third_party/webrtc/base/constructormagic.h41
-rw-r--r--chromium/third_party/webrtc/base/cpumonitor.cc423
-rw-r--r--chromium/third_party/webrtc/base/cpumonitor.h123
-rw-r--r--chromium/third_party/webrtc/base/cpumonitor_unittest.cc388
-rw-r--r--chromium/third_party/webrtc/base/crc32.cc52
-rw-r--r--chromium/third_party/webrtc/base/crc32.h34
-rw-r--r--chromium/third_party/webrtc/base/crc32_unittest.cc35
-rw-r--r--chromium/third_party/webrtc/base/criticalsection.h179
-rw-r--r--chromium/third_party/webrtc/base/criticalsection_unittest.cc146
-rw-r--r--chromium/third_party/webrtc/base/cryptstring.h183
-rw-r--r--chromium/third_party/webrtc/base/dbus.cc396
-rw-r--r--chromium/third_party/webrtc/base/dbus.h168
-rw-r--r--chromium/third_party/webrtc/base/dbus_unittest.cc232
-rw-r--r--chromium/third_party/webrtc/base/diskcache.cc347
-rw-r--r--chromium/third_party/webrtc/base/diskcache.h125
-rw-r--r--chromium/third_party/webrtc/base/diskcache_win32.cc86
-rw-r--r--chromium/third_party/webrtc/base/diskcache_win32.h29
-rw-r--r--chromium/third_party/webrtc/base/dscp.h45
-rw-r--r--chromium/third_party/webrtc/base/event.cc135
-rw-r--r--chromium/third_party/webrtc/base/event.h51
-rw-r--r--chromium/third_party/webrtc/base/event_unittest.cc42
-rw-r--r--chromium/third_party/webrtc/base/fakecpumonitor.h32
-rw-r--r--chromium/third_party/webrtc/base/fakenetwork.h119
-rw-r--r--chromium/third_party/webrtc/base/fakesslidentity.h94
-rw-r--r--chromium/third_party/webrtc/base/faketaskrunner.h38
-rw-r--r--chromium/third_party/webrtc/base/filelock.cc62
-rw-r--r--chromium/third_party/webrtc/base/filelock.h53
-rw-r--r--chromium/third_party/webrtc/base/filelock_unittest.cc87
-rw-r--r--chromium/third_party/webrtc/base/fileutils.cc307
-rw-r--r--chromium/third_party/webrtc/base/fileutils.h459
-rw-r--r--chromium/third_party/webrtc/base/fileutils_mock.h253
-rw-r--r--chromium/third_party/webrtc/base/fileutils_unittest.cc131
-rw-r--r--chromium/third_party/webrtc/base/firewallsocketserver.cc239
-rw-r--r--chromium/third_party/webrtc/base/firewallsocketserver.h120
-rw-r--r--chromium/third_party/webrtc/base/flags.cc299
-rw-r--r--chromium/third_party/webrtc/base/flags.h270
-rw-r--r--chromium/third_party/webrtc/base/gunit.h88
-rw-r--r--chromium/third_party/webrtc/base/gunit_prod.h24
-rw-r--r--chromium/third_party/webrtc/base/helpers.cc296
-rw-r--r--chromium/third_party/webrtc/base/helpers.h56
-rw-r--r--chromium/third_party/webrtc/base/helpers_unittest.cc78
-rw-r--r--chromium/third_party/webrtc/base/httpbase.cc877
-rw-r--r--chromium/third_party/webrtc/base/httpbase.h181
-rw-r--r--chromium/third_party/webrtc/base/httpbase_unittest.cc520
-rw-r--r--chromium/third_party/webrtc/base/httpclient.cc829
-rw-r--r--chromium/third_party/webrtc/base/httpclient.h202
-rw-r--r--chromium/third_party/webrtc/base/httpcommon-inl.h131
-rw-r--r--chromium/third_party/webrtc/base/httpcommon.cc1045
-rw-r--r--chromium/third_party/webrtc/base/httpcommon.h446
-rw-r--r--chromium/third_party/webrtc/base/httpcommon_unittest.cc165
-rw-r--r--chromium/third_party/webrtc/base/httprequest.cc110
-rw-r--r--chromium/third_party/webrtc/base/httprequest.h115
-rw-r--r--chromium/third_party/webrtc/base/httpserver.cc288
-rw-r--r--chromium/third_party/webrtc/base/httpserver.h137
-rw-r--r--chromium/third_party/webrtc/base/httpserver_unittest.cc130
-rw-r--r--chromium/third_party/webrtc/base/ifaddrs-android.cc223
-rw-r--r--chromium/third_party/webrtc/base/ifaddrs-android.h39
-rw-r--r--chromium/third_party/webrtc/base/iosfilesystem.mm53
-rw-r--r--chromium/third_party/webrtc/base/ipaddress.cc449
-rw-r--r--chromium/third_party/webrtc/base/ipaddress.h141
-rw-r--r--chromium/third_party/webrtc/base/ipaddress_unittest.cc859
-rw-r--r--chromium/third_party/webrtc/base/json.cc296
-rw-r--r--chromium/third_party/webrtc/base/json.h89
-rw-r--r--chromium/third_party/webrtc/base/json_unittest.cc277
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable.cc156
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable.cc.def69
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable.h69
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable.h.def83
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable_unittest.cc55
-rw-r--r--chromium/third_party/webrtc/base/libdbusglibsymboltable.cc24
-rw-r--r--chromium/third_party/webrtc/base/libdbusglibsymboltable.h56
-rw-r--r--chromium/third_party/webrtc/base/linked_ptr.h125
-rw-r--r--chromium/third_party/webrtc/base/linux.cc348
-rw-r--r--chromium/third_party/webrtc/base/linux.h123
-rw-r--r--chromium/third_party/webrtc/base/linux_unittest.cc104
-rw-r--r--chromium/third_party/webrtc/base/linuxfdwalk.c81
-rw-r--r--chromium/third_party/webrtc/base/linuxfdwalk.h34
-rw-r--r--chromium/third_party/webrtc/base/linuxfdwalk_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/base/linuxwindowpicker.cc818
-rw-r--r--chromium/third_party/webrtc/base/linuxwindowpicker.h51
-rw-r--r--chromium/third_party/webrtc/base/linuxwindowpicker_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/base/logging.cc618
-rw-r--r--chromium/third_party/webrtc/base/logging.h387
-rw-r--r--chromium/third_party/webrtc/base/logging_unittest.cc138
-rw-r--r--chromium/third_party/webrtc/base/macasyncsocket.cc477
-rw-r--r--chromium/third_party/webrtc/base/macasyncsocket.h97
-rw-r--r--chromium/third_party/webrtc/base/maccocoasocketserver.h48
-rw-r--r--chromium/third_party/webrtc/base/maccocoasocketserver.mm140
-rw-r--r--chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm50
-rw-r--r--chromium/third_party/webrtc/base/maccocoathreadhelper.h27
-rw-r--r--chromium/third_party/webrtc/base/maccocoathreadhelper.mm44
-rw-r--r--chromium/third_party/webrtc/base/macconversion.cc159
-rw-r--r--chromium/third_party/webrtc/base/macconversion.h39
-rw-r--r--chromium/third_party/webrtc/base/macsocketserver.cc378
-rw-r--r--chromium/third_party/webrtc/base/macsocketserver.h136
-rw-r--r--chromium/third_party/webrtc/base/macsocketserver_unittest.cc237
-rw-r--r--chromium/third_party/webrtc/base/macutils.cc221
-rw-r--r--chromium/third_party/webrtc/base/macutils.h59
-rw-r--r--chromium/third_party/webrtc/base/macutils_unittest.cc43
-rw-r--r--chromium/third_party/webrtc/base/macwindowpicker.cc256
-rw-r--r--chromium/third_party/webrtc/base/macwindowpicker.h37
-rw-r--r--chromium/third_party/webrtc/base/macwindowpicker_unittest.cc45
-rw-r--r--chromium/third_party/webrtc/base/mathutils.h20
-rw-r--r--chromium/third_party/webrtc/base/md5.cc222
-rw-r--r--chromium/third_party/webrtc/base/md5.h45
-rw-r--r--chromium/third_party/webrtc/base/md5digest.h46
-rw-r--r--chromium/third_party/webrtc/base/md5digest_unittest.cc79
-rw-r--r--chromium/third_party/webrtc/base/messagedigest.cc180
-rw-r--r--chromium/third_party/webrtc/base/messagedigest.h109
-rw-r--r--chromium/third_party/webrtc/base/messagedigest_unittest.cc151
-rw-r--r--chromium/third_party/webrtc/base/messagehandler.cc20
-rw-r--r--chromium/third_party/webrtc/base/messagehandler.h68
-rw-r--r--chromium/third_party/webrtc/base/messagequeue.cc384
-rw-r--r--chromium/third_party/webrtc/base/messagequeue.h254
-rw-r--r--chromium/third_party/webrtc/base/messagequeue_unittest.cc140
-rw-r--r--chromium/third_party/webrtc/base/move.h213
-rw-r--r--chromium/third_party/webrtc/base/multipart.cc253
-rw-r--r--chromium/third_party/webrtc/base/multipart.h79
-rw-r--r--chromium/third_party/webrtc/base/multipart_unittest.cc125
-rw-r--r--chromium/third_party/webrtc/base/nat_unittest.cc345
-rw-r--r--chromium/third_party/webrtc/base/natserver.cc186
-rw-r--r--chromium/third_party/webrtc/base/natserver.h110
-rw-r--r--chromium/third_party/webrtc/base/natsocketfactory.cc487
-rw-r--r--chromium/third_party/webrtc/base/natsocketfactory.h166
-rw-r--r--chromium/third_party/webrtc/base/nattypes.cc55
-rw-r--r--chromium/third_party/webrtc/base/nattypes.h47
-rw-r--r--chromium/third_party/webrtc/base/nethelpers.cc150
-rw-r--r--chromium/third_party/webrtc/base/nethelpers.h65
-rw-r--r--chromium/third_party/webrtc/base/network.cc658
-rw-r--r--chromium/third_party/webrtc/base/network.h245
-rw-r--r--chromium/third_party/webrtc/base/network_unittest.cc617
-rw-r--r--chromium/third_party/webrtc/base/nssidentity.cc521
-rw-r--r--chromium/third_party/webrtc/base/nssidentity.h130
-rw-r--r--chromium/third_party/webrtc/base/nssstreamadapter.cc1020
-rw-r--r--chromium/third_party/webrtc/base/nssstreamadapter.h111
-rw-r--r--chromium/third_party/webrtc/base/nullsocketserver.h61
-rw-r--r--chromium/third_party/webrtc/base/nullsocketserver_unittest.cc47
-rw-r--r--chromium/third_party/webrtc/base/openssl.h20
-rw-r--r--chromium/third_party/webrtc/base/openssladapter.cc884
-rw-r--r--chromium/third_party/webrtc/base/openssladapter.h88
-rw-r--r--chromium/third_party/webrtc/base/openssldigest.cc122
-rw-r--r--chromium/third_party/webrtc/base/openssldigest.h50
-rw-r--r--chromium/third_party/webrtc/base/opensslidentity.cc366
-rw-r--r--chromium/third_party/webrtc/base/opensslidentity.h150
-rw-r--r--chromium/third_party/webrtc/base/opensslstreamadapter.cc857
-rw-r--r--chromium/third_party/webrtc/base/opensslstreamadapter.h198
-rw-r--r--chromium/third_party/webrtc/base/optionsfile.cc184
-rw-r--r--chromium/third_party/webrtc/base/optionsfile.h49
-rw-r--r--chromium/third_party/webrtc/base/optionsfile_unittest.cc168
-rw-r--r--chromium/third_party/webrtc/base/pathutils.cc251
-rw-r--r--chromium/third_party/webrtc/base/pathutils.h163
-rw-r--r--chromium/third_party/webrtc/base/pathutils_unittest.cc48
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver.cc1659
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver.h120
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc274
-rw-r--r--chromium/third_party/webrtc/base/posix.cc131
-rw-r--r--chromium/third_party/webrtc/base/posix.h25
-rw-r--r--chromium/third_party/webrtc/base/profiler.cc186
-rw-r--r--chromium/third_party/webrtc/base/profiler.h161
-rw-r--r--chromium/third_party/webrtc/base/profiler_unittest.cc113
-rw-r--r--chromium/third_party/webrtc/base/proxy_unittest.cc135
-rw-r--r--chromium/third_party/webrtc/base/proxydetect.cc1246
-rw-r--r--chromium/third_party/webrtc/base/proxydetect.h31
-rw-r--r--chromium/third_party/webrtc/base/proxydetect_unittest.cc164
-rw-r--r--chromium/third_party/webrtc/base/proxyinfo.cc20
-rw-r--r--chromium/third_party/webrtc/base/proxyinfo.h42
-rw-r--r--chromium/third_party/webrtc/base/proxyserver.cc144
-rw-r--r--chromium/third_party/webrtc/base/proxyserver.h96
-rw-r--r--chromium/third_party/webrtc/base/ratelimiter.cc29
-rw-r--r--chromium/third_party/webrtc/base/ratelimiter.h63
-rw-r--r--chromium/third_party/webrtc/base/ratelimiter_unittest.cc59
-rw-r--r--chromium/third_party/webrtc/base/ratetracker.cc63
-rw-r--r--chromium/third_party/webrtc/base/ratetracker.h42
-rw-r--r--chromium/third_party/webrtc/base/ratetracker_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/base/refcount.h78
-rw-r--r--chromium/third_party/webrtc/base/referencecountedsingletonfactory.h157
-rw-r--r--chromium/third_party/webrtc/base/referencecountedsingletonfactory_unittest.cc132
-rw-r--r--chromium/third_party/webrtc/base/rollingaccumulator.h172
-rw-r--r--chromium/third_party/webrtc/base/rollingaccumulator_unittest.cc118
-rw-r--r--chromium/third_party/webrtc/base/safe_conversions.h79
-rw-r--r--chromium/third_party/webrtc/base/safe_conversions_impl.h188
-rw-r--r--chromium/third_party/webrtc/base/schanneladapter.cc702
-rw-r--r--chromium/third_party/webrtc/base/schanneladapter.h77
-rw-r--r--chromium/third_party/webrtc/base/scoped_autorelease_pool.h59
-rw-r--r--chromium/third_party/webrtc/base/scoped_autorelease_pool.mm25
-rw-r--r--chromium/third_party/webrtc/base/scoped_ptr.h595
-rw-r--r--chromium/third_party/webrtc/base/scoped_ref_ptr.h147
-rw-r--r--chromium/third_party/webrtc/base/scopedptrcollection.h60
-rw-r--r--chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc73
-rw-r--r--chromium/third_party/webrtc/base/sec_buffer.h156
-rw-r--r--chromium/third_party/webrtc/base/sha1.cc286
-rw-r--r--chromium/third_party/webrtc/base/sha1.h32
-rw-r--r--chromium/third_party/webrtc/base/sha1digest.h47
-rw-r--r--chromium/third_party/webrtc/base/sha1digest_unittest.cc82
-rw-r--r--chromium/third_party/webrtc/base/sharedexclusivelock.cc44
-rw-r--r--chromium/third_party/webrtc/base/sharedexclusivelock.h76
-rw-r--r--chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc218
-rw-r--r--chromium/third_party/webrtc/base/signalthread.cc149
-rw-r--r--chromium/third_party/webrtc/base/signalthread.h156
-rw-r--r--chromium/third_party/webrtc/base/signalthread_unittest.cc198
-rw-r--r--chromium/third_party/webrtc/base/sigslot.h2850
-rw-r--r--chromium/third_party/webrtc/base/sigslot_unittest.cc250
-rw-r--r--chromium/third_party/webrtc/base/sigslotrepeater.h94
-rwxr-xr-xchromium/third_party/webrtc/base/sigslottester.h199
-rwxr-xr-xchromium/third_party/webrtc/base/sigslottester.h.pump85
-rwxr-xr-xchromium/third_party/webrtc/base/sigslottester_unittest.cc86
-rw-r--r--chromium/third_party/webrtc/base/socket.h188
-rw-r--r--chromium/third_party/webrtc/base/socket_unittest.cc1012
-rw-r--r--chromium/third_party/webrtc/base/socket_unittest.h88
-rw-r--r--chromium/third_party/webrtc/base/socketadapters.cc893
-rw-r--r--chromium/third_party/webrtc/base/socketadapters.h244
-rw-r--r--chromium/third_party/webrtc/base/socketaddress.cc383
-rw-r--r--chromium/third_party/webrtc/base/socketaddress.h214
-rw-r--r--chromium/third_party/webrtc/base/socketaddress_unittest.cc335
-rw-r--r--chromium/third_party/webrtc/base/socketaddresspair.cc41
-rw-r--r--chromium/third_party/webrtc/base/socketaddresspair.h41
-rw-r--r--chromium/third_party/webrtc/base/socketfactory.h38
-rw-r--r--chromium/third_party/webrtc/base/socketpool.cc280
-rw-r--r--chromium/third_party/webrtc/base/socketpool.h143
-rw-r--r--chromium/third_party/webrtc/base/socketserver.h44
-rw-r--r--chromium/third_party/webrtc/base/socketstream.cc121
-rw-r--r--chromium/third_party/webrtc/base/socketstream.h57
-rw-r--r--chromium/third_party/webrtc/base/ssladapter.cc97
-rw-r--r--chromium/third_party/webrtc/base/ssladapter.h61
-rw-r--r--chromium/third_party/webrtc/base/sslconfig.h33
-rw-r--r--chromium/third_party/webrtc/base/sslfingerprint.cc96
-rw-r--r--chromium/third_party/webrtc/base/sslfingerprint.h50
-rw-r--r--chromium/third_party/webrtc/base/sslidentity.cc154
-rw-r--r--chromium/third_party/webrtc/base/sslidentity.h172
-rw-r--r--chromium/third_party/webrtc/base/sslidentity_unittest.cc208
-rw-r--r--chromium/third_party/webrtc/base/sslroots.h4932
-rw-r--r--chromium/third_party/webrtc/base/sslsocketfactory.cc175
-rw-r--r--chromium/third_party/webrtc/base/sslsocketfactory.h81
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapter.cc77
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapter.h162
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc940
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapterhelper.cc130
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapterhelper.h118
-rw-r--r--chromium/third_party/webrtc/base/stream.cc1335
-rw-r--r--chromium/third_party/webrtc/base/stream.h820
-rw-r--r--chromium/third_party/webrtc/base/stream_unittest.cc492
-rw-r--r--chromium/third_party/webrtc/base/stringdigest.h17
-rw-r--r--chromium/third_party/webrtc/base/stringencode.cc657
-rw-r--r--chromium/third_party/webrtc/base/stringencode.h210
-rw-r--r--chromium/third_party/webrtc/base/stringencode_unittest.cc385
-rw-r--r--chromium/third_party/webrtc/base/stringutils.cc133
-rw-r--r--chromium/third_party/webrtc/base/stringutils.h318
-rw-r--r--chromium/third_party/webrtc/base/stringutils_unittest.cc109
-rw-r--r--chromium/third_party/webrtc/base/systeminfo.cc518
-rw-r--r--chromium/third_party/webrtc/base/systeminfo.h81
-rw-r--r--chromium/third_party/webrtc/base/systeminfo_unittest.cc194
-rw-r--r--chromium/third_party/webrtc/base/task.cc272
-rw-r--r--chromium/third_party/webrtc/base/task.h177
-rw-r--r--chromium/third_party/webrtc/base/task_unittest.cc545
-rw-r--r--chromium/third_party/webrtc/base/taskparent.cc95
-rw-r--r--chromium/third_party/webrtc/base/taskparent.h62
-rw-r--r--chromium/third_party/webrtc/base/taskrunner.cc224
-rw-r--r--chromium/third_party/webrtc/base/taskrunner.h100
-rw-r--r--chromium/third_party/webrtc/base/template_util.h112
-rw-r--r--chromium/third_party/webrtc/base/testbase64.h5
-rw-r--r--chromium/third_party/webrtc/base/testclient.cc148
-rw-r--r--chromium/third_party/webrtc/base/testclient.h93
-rw-r--r--chromium/third_party/webrtc/base/testclient_unittest.cc77
-rw-r--r--chromium/third_party/webrtc/base/testechoserver.h73
-rw-r--r--chromium/third_party/webrtc/base/testutils.h629
-rw-r--r--chromium/third_party/webrtc/base/thread.cc560
-rw-r--r--chromium/third_party/webrtc/base/thread.h294
-rw-r--r--chromium/third_party/webrtc/base/thread_checker.h91
-rw-r--r--chromium/third_party/webrtc/base/thread_checker_impl.cc44
-rw-r--r--chromium/third_party/webrtc/base/thread_checker_impl.h51
-rw-r--r--chromium/third_party/webrtc/base/thread_checker_unittest.cc205
-rw-r--r--chromium/third_party/webrtc/base/thread_unittest.cc444
-rw-r--r--chromium/third_party/webrtc/base/timeutils.cc203
-rw-r--r--chromium/third_party/webrtc/base/timeutils.h96
-rw-r--r--chromium/third_party/webrtc/base/timeutils_unittest.cc169
-rw-r--r--chromium/third_party/webrtc/base/timing.cc112
-rw-r--r--chromium/third_party/webrtc/base/timing.h59
-rw-r--r--chromium/third_party/webrtc/base/transformadapter.cc185
-rw-r--r--chromium/third_party/webrtc/base/transformadapter.h80
-rw-r--r--chromium/third_party/webrtc/base/unittest_main.cc101
-rw-r--r--chromium/third_party/webrtc/base/unixfilesystem.cc572
-rw-r--r--chromium/third_party/webrtc/base/unixfilesystem.h126
-rw-r--r--chromium/third_party/webrtc/base/urlencode.cc183
-rw-r--r--chromium/third_party/webrtc/base/urlencode.h46
-rw-r--r--chromium/third_party/webrtc/base/urlencode_unittest.cc83
-rw-r--r--chromium/third_party/webrtc/base/versionparsing.cc57
-rw-r--r--chromium/third_party/webrtc/base/versionparsing.h35
-rw-r--r--chromium/third_party/webrtc/base/versionparsing_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/base/virtualsocket_unittest.cc1001
-rw-r--r--chromium/third_party/webrtc/base/virtualsocketserver.cc1101
-rw-r--r--chromium/third_party/webrtc/base/virtualsocketserver.h234
-rw-r--r--chromium/third_party/webrtc/base/win32.cc456
-rw-r--r--chromium/third_party/webrtc/base/win32.h129
-rw-r--r--chromium/third_party/webrtc/base/win32_unittest.cc62
-rw-r--r--chromium/third_party/webrtc/base/win32filesystem.cc460
-rw-r--r--chromium/third_party/webrtc/base/win32filesystem.h101
-rw-r--r--chromium/third_party/webrtc/base/win32regkey.cc1102
-rw-r--r--chromium/third_party/webrtc/base/win32regkey.h337
-rw-r--r--chromium/third_party/webrtc/base/win32regkey_unittest.cc590
-rw-r--r--chromium/third_party/webrtc/base/win32securityerrors.cc49
-rw-r--r--chromium/third_party/webrtc/base/win32socketinit.cc46
-rw-r--r--chromium/third_party/webrtc/base/win32socketinit.h20
-rw-r--r--chromium/third_party/webrtc/base/win32socketserver.cc850
-rw-r--r--chromium/third_party/webrtc/base/win32socketserver.h164
-rw-r--r--chromium/third_party/webrtc/base/win32socketserver_unittest.cc157
-rw-r--r--chromium/third_party/webrtc/base/win32toolhelp.h172
-rw-r--r--chromium/third_party/webrtc/base/win32toolhelp_unittest.cc278
-rw-r--r--chromium/third_party/webrtc/base/win32window.cc121
-rw-r--r--chromium/third_party/webrtc/base/win32window.h60
-rw-r--r--chromium/third_party/webrtc/base/win32window_unittest.cc66
-rw-r--r--chromium/third_party/webrtc/base/win32windowpicker.cc143
-rw-r--r--chromium/third_party/webrtc/base/win32windowpicker.h39
-rw-r--r--chromium/third_party/webrtc/base/win32windowpicker_unittest.cc99
-rw-r--r--chromium/third_party/webrtc/base/window.h124
-rw-r--r--chromium/third_party/webrtc/base/windowpicker.h84
-rw-r--r--chromium/third_party/webrtc/base/windowpicker_unittest.cc67
-rw-r--r--chromium/third_party/webrtc/base/windowpickerfactory.h59
-rw-r--r--chromium/third_party/webrtc/base/winfirewall.cc155
-rw-r--r--chromium/third_party/webrtc/base/winfirewall.h56
-rw-r--r--chromium/third_party/webrtc/base/winfirewall_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/base/winping.cc359
-rw-r--r--chromium/third_party/webrtc/base/winping.h103
-rw-r--r--chromium/third_party/webrtc/base/worker.cc75
-rw-r--r--chromium/third_party/webrtc/base/worker.h72
-rw-r--r--chromium/third_party/webrtc/build/OWNERS11
-rw-r--r--chromium/third_party/webrtc/build/apk_tests.gyp73
-rw-r--r--chromium/third_party/webrtc/build/apk_tests_noop.gyp16
-rw-r--r--chromium/third_party/webrtc/build/common.gypi108
-rw-r--r--chromium/third_party/webrtc/build/download_vs_toolchain.py30
-rw-r--r--chromium/third_party/webrtc/build/generate_asm_header.gypi79
-rw-r--r--chromium/third_party/webrtc/build/generate_asm_header.py74
-rwxr-xr-xchromium/third_party/webrtc/build/gyp_webrtc99
-rw-r--r--chromium/third_party/webrtc/build/gyp_webrtc.py24
-rw-r--r--chromium/third_party/webrtc/build/isolate.gypi83
-rw-r--r--chromium/third_party/webrtc/build/merge_libs.gyp4
-rw-r--r--chromium/third_party/webrtc/build/merge_libs.py5
-rw-r--r--chromium/third_party/webrtc/build/protoc.gypi1
-rw-r--r--chromium/third_party/webrtc/build/webrtc.gni57
-rw-r--r--chromium/third_party/webrtc/call.h53
-rw-r--r--chromium/third_party/webrtc/common.gyp20
-rw-r--r--chromium/third_party/webrtc/common_audio/OWNERS7
-rw-r--r--chromium/third_party/webrtc/common_audio/audio_util.cc31
-rw-r--r--chromium/third_party/webrtc/common_audio/audio_util_unittest.cc56
-rw-r--r--chromium/third_party/webrtc/common_audio/common_audio.gyp11
-rw-r--r--chromium/third_party/webrtc/common_audio/common_audio_unittests.isolate12
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter.cc119
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter.h40
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter_neon.cc72
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter_neon.h37
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter_sse.cc80
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter_sse.h37
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter_unittest.cc207
-rw-r--r--chromium/third_party/webrtc/common_audio/include/audio_util.h75
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/include/push_resampler.h15
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_resampler.cc40
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.cc46
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.h14
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc102
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc30
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h24
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h2
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/complex_fft.c31
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h23
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h2
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/randomization_functions.c36
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/real_fft_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/signal_processing_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c4
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/splitting_filter.c60
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/include/webrtc_vad.h6
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad_core.c8
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad_core.h14
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad_sp.c2
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad_sp.h2
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/webrtc_vad.c10
-rw-r--r--chromium/third_party/webrtc/common_types.h344
-rw-r--r--chromium/third_party/webrtc/common_video/OWNERS7
-rw-r--r--chromium/third_party/webrtc/common_video/common_video_unittests.gyp4
-rw-r--r--chromium/third_party/webrtc/common_video/common_video_unittests.isolate16
-rw-r--r--chromium/third_party/webrtc/common_video/i420_video_frame.cc15
-rw-r--r--chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc91
-rw-r--r--chromium/third_party/webrtc/common_video/interface/i420_video_frame.h21
-rw-r--r--chromium/third_party/webrtc/common_video/interface/texture_video_frame.h1
-rw-r--r--chromium/third_party/webrtc/common_video/interface/video_image.h3
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/common_video/plane.cc7
-rw-r--r--chromium/third_party/webrtc/common_video/plane.h3
-rw-r--r--chromium/third_party/webrtc/common_video/texture_video_frame.cc33
-rw-r--r--chromium/third_party/webrtc/common_video/texture_video_frame_unittest.cc23
-rw-r--r--chromium/third_party/webrtc/config.cc53
-rw-r--r--chromium/third_party/webrtc/config.h55
-rw-r--r--chromium/third_party/webrtc/engine_configurations.h7
-rw-r--r--chromium/third_party/webrtc/examples/OWNERS2
-rw-r--r--chromium/third_party/webrtc/examples/android/OWNERS4
-rw-r--r--chromium/third_party/webrtc/examples/android/media_demo/jni/on_load.cc4
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/AndroidManifest.xml22
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/README23
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/build.xml92
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.cc107
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h67
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/jni/opensl_runner.cc129
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/project.properties16
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/res/drawable/logo.pngbin0 -> 2574 bytes
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/res/layout/open_sl_demo.xml22
-rw-r--r--chromium/third_party/webrtc/examples/android/opensl_loopback/res/values/strings.xml7
-rw-r--r--chromium/third_party/webrtc/experiments.h27
-rw-r--r--chromium/third_party/webrtc/frame_callback.h2
-rw-r--r--chromium/third_party/webrtc/modules/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c30
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c102
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c365
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c43
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi24
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c327
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c133
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c29
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc249
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c174
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc119
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc124
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h90
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/OWNERS1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h94
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc269
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc142
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc54
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc206
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc514
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc430
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc436
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h90
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc339
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h79
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc150
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc956
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h336
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc171
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc358
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h84
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc500
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h86
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc510
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h94
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc366
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc349
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc1263
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h1224
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc267
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc259
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc903
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h138
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h77
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc1151
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h399
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc153
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc319
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc251
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc136
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc108
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc471
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h86
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi153
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc3048
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h455
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c493
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc88
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h77
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc264
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc516
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h276
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc931
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc213
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc308
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc165
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h120
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc394
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c783
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h274
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc260
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h137
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c247
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h47
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc162
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h95
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c427
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c155
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c782
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h128
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h97
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc135
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c132
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc185
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h168
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc102
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc235
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h107
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc260
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h158
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc228
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc425
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h164
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc297
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc121
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c532
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h807
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc353
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h136
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc89
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c120
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c232
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc226
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h199
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc307
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc192
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h56
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc142
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c367
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c1220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc904
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h187
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc46
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h152
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h276
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h230
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h454
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h336
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h300
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c45
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h69
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c131
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c570
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc366
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c55
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h99
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h39
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi328
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h374
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h81
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc210
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc1947
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h406
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc498
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h56
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc421
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi248
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc1437
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate44
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c279
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc190
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h68
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h88
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c851
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc264
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h371
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc529
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc430
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h90
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc777
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c232
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c527
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c54
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c531
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c1502
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc96
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h140
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c240
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c820
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c152
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc170
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h109
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc107
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc164
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc216
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h111
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc52
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc111
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h68
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc327
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h46
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc132
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h32
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc115
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h100
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc628
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc155
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc202
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc147
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc147
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c1769
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc778
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc81
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h68
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc254
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc491
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h271
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc931
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc214
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc308
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc165
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h120
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc394
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc260
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h137
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h47
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc162
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc135
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc185
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h168
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc102
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc235
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h106
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc260
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h158
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc228
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc425
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h164
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc297
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc121
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc353
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h136
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc89
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc226
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h116
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc307
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc192
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h56
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc142
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc867
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h157
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h141
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h267
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc360
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h104
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h99
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc52
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc209
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc1902
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h360
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc229
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc418
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi198
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc1219
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc190
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h68
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h88
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc288
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h144
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc560
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc372
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h83
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc694
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h74
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc95
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc170
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h109
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc107
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h100
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc164
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc216
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h111
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc111
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h68
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc327
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc423
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc726
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/Android.mk3
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/OWNERS7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc10
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device.gypi71
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate12
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc586
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h202
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc70
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc69
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h13
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc38
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc54
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk9
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c102
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c774
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c304
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c15
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h21
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c1213
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c58
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c3
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c29
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h44
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c16
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc496
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi50
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc646
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h132
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi28
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/common.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/debug.proto19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc70
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h25
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp45
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp45
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h164
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h54
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc132
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi51
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc35
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c78
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c279
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c273
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c1008
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/processing_component.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/processing_component.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/rms_level.cc61
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/rms_level.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc90
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/typing_detection.h93
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c106
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h74
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc143
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c67
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h10
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc287
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h87
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc211
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h8
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc304
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h59
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc54
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h33
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h11
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm64
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc91
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h66
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc54
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h16
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm91
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc75
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc8
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h19
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm451
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc360
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc16
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc92
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h35
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc324
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h99
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc461
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h159
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc46
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h25
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h12
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc189
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm230
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc44
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc90
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h3
-rw-r--r--chromium/third_party/webrtc/modules/interface/module_common_types.h71
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/avi_file.cc47
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/avi_file.h5
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc1
-rw-r--r--chromium/third_party/webrtc/modules/modules.gyp120
-rw-r--r--chromium/third_party/webrtc/modules/modules_tests.isolate22
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests.isolate142
-rw-r--r--chromium/third_party/webrtc/modules/pacing/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/pacing/include/paced_sender.h22
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.cc39
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc46
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc162
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h32
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h43
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc128
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h21
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi60
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc85
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc36
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc251
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc58
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc119
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc163
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc74
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h36
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc116
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc80
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h50
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h42
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc17
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc69
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc362
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h61
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc173
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc85
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc112
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc695
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h726
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc592
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc98
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc107
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h10
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc92
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc51
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc46
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc37
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc32
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc528
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc438
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc368
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h58
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc89
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc165
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc28
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc163
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h29
-rw-r--r--chromium/third_party/webrtc/modules/utility/interface/process_thread.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc117
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc106
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc165
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h32
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc56
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h8
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc19
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_coder.cc5
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_coder.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc80
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/OWNERS8
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc77
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h11
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc94
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc29
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc63
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h19
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h50
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/include/video_capture.h19
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h39
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm377
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm13
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h40
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm287
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm7
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture.gypi28
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc67
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h15
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate12
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc17
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc66
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc35
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc111
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h23
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc121
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h10
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc34
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc47
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc270
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h136
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/packet.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc54
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc248
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h63
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc56
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timing.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc156
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc166
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc60
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc12
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc14
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc31
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc19
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/video_render/OWNERS7
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm6
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm13
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h10
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm52
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.gypi25
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_frames.cc124
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_frames.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate12
-rw-r--r--chromium/third_party/webrtc/overrides/webrtc/base/basictypes.h108
-rw-r--r--chromium/third_party/webrtc/overrides/webrtc/base/constructormagic.h20
-rw-r--r--chromium/third_party/webrtc/overrides/webrtc/base/logging.cc318
-rw-r--r--chromium/third_party/webrtc/overrides/webrtc/base/logging.h221
-rw-r--r--chromium/third_party/webrtc/overrides/webrtc/base/win32socketinit.cc28
-rw-r--r--chromium/third_party/webrtc/supplement.gypi6
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/aligned_malloc.h12
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/atomic32.h2
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/clock.h26
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/compile_assert.h11
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/constructor_magic.h50
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/critical_section_wrapper.h23
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/field_trial.h70
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/list_wrapper.h107
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/rtp_to_ntp.h50
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/rw_lock_wrapper.h30
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/scoped_ptr.h153
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/scoped_refptr.h2
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/scoped_vector.h149
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/stl_util.h265
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/template_util.h2
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/thread_annotations.h99
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/timestamp_extrapolator.h56
-rw-r--r--chromium/third_party/webrtc/system_wrappers/interface/utf_util_win.h57
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/Android.mk7
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/OWNERS6
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc26
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.c397
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.h56
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/clock.cc52
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/clock_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/cpu_features_android.c4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/critical_section_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/field_trial_default.cc22
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/list_no_stl.cc241
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/list_no_stl.h78
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/list_stl.cc207
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/list_stl.h65
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/list_unittest.cc475
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/move.h11
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc150
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc146
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/scoped_vector_unittest.cc328
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/stl_util_unittest.cc250
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/system_wrappers.gyp48
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/system_wrappers_tests.gyp8
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/system_wrappers_unittests.isolate12
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/timestamp_extrapolator.cc230
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/trace_impl.cc7
-rw-r--r--chromium/third_party/webrtc/test/libtest/libtest.gyp26
-rw-r--r--chromium/third_party/webrtc/test/metrics.gyp54
-rw-r--r--chromium/third_party/webrtc/test/metrics_unittests.isolate38
-rw-r--r--chromium/third_party/webrtc/test/test.gyp49
-rw-r--r--chromium/third_party/webrtc/test/test_support_unittests.isolate14
-rw-r--r--chromium/third_party/webrtc/test/testsupport/fileutils.cc41
-rw-r--r--chromium/third_party/webrtc/test/testsupport/fileutils.h9
-rw-r--r--chromium/third_party/webrtc/test/testsupport/fileutils_unittest.cc13
-rw-r--r--chromium/third_party/webrtc/test/testsupport/metrics/video_metrics.cc4
-rw-r--r--chromium/third_party/webrtc/test/webrtc_test_common.gyp124
-rw-r--r--chromium/third_party/webrtc/tools/OWNERS9
-rwxr-xr-xchromium/third_party/webrtc/tools/barcode_tools/barcode_decoder.py1
-rw-r--r--chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.cc89
-rw-r--r--chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.h15
-rw-r--r--chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc2
-rw-r--r--chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/OWNERS1
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/README12
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/adapter.js211
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/loopback_test.html227
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/loopback_test.js240
-rwxr-xr-xchromium/third_party/webrtc/tools/loopback_test/record-test.sh60
-rwxr-xr-xchromium/third_party/webrtc/tools/loopback_test/run-server.sh15
-rw-r--r--chromium/third_party/webrtc/tools/loopback_test/stat_tracker.js94
-rw-r--r--chromium/third_party/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc29
-rw-r--r--chromium/third_party/webrtc/tools/simple_command_line_parser.h2
-rw-r--r--chromium/third_party/webrtc/tools/tools.gyp6
-rw-r--r--chromium/third_party/webrtc/tools/tools_unittests.isolate16
-rw-r--r--chromium/third_party/webrtc/typedefs.h20
-rw-r--r--chromium/third_party/webrtc/video/OWNERS5
-rw-r--r--chromium/third_party/webrtc/video/bitrate_estimator_tests.cc79
-rw-r--r--chromium/third_party/webrtc/video/call.cc240
-rw-r--r--chromium/third_party/webrtc/video/call_perf_tests.cc479
-rw-r--r--chromium/third_party/webrtc/video/call_tests.cc934
-rw-r--r--chromium/third_party/webrtc/video/full_stack.cc143
-rw-r--r--chromium/third_party/webrtc/video/loopback.cc92
-rw-r--r--chromium/third_party/webrtc/video/rampup_tests.cc539
-rw-r--r--chromium/third_party/webrtc/video/receive_statistics_proxy.cc100
-rw-r--r--chromium/third_party/webrtc/video/receive_statistics_proxy.h89
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.cc123
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.h93
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc244
-rw-r--r--chromium/third_party/webrtc/video/transport_adapter.cc16
-rw-r--r--chromium/third_party/webrtc/video/transport_adapter.h5
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream.cc109
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream.h19
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.cc352
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.h47
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream_tests.cc742
-rw-r--r--chromium/third_party/webrtc/video/webrtc_video.gypi4
-rw-r--r--chromium/third_party/webrtc/video_engine/Android.mk2
-rw-r--r--chromium/third_party/webrtc/video_engine/OWNERS7
-rw-r--r--chromium/third_party/webrtc/video_engine/call_stats.cc1
-rw-r--r--chromium/third_party/webrtc/video_engine/call_stats.h2
-rw-r--r--chromium/third_party/webrtc/video_engine/encoder_state_feedback.h2
-rw-r--r--chromium/third_party/webrtc/video_engine/encoder_state_feedback_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_base.h118
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_encryption.h51
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_errors.h6
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_image_process.h7
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_network.h14
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_render.h11
-rw-r--r--chromium/third_party/webrtc/video_engine/include/vie_rtp_rtcp.h53
-rw-r--r--chromium/third_party/webrtc/video_engine/mock/mock_vie_frame_provider_base.h33
-rw-r--r--chromium/third_party/webrtc/video_engine/overuse_frame_detector.cc342
-rw-r--r--chromium/third_party/webrtc/video_engine/overuse_frame_detector.h77
-rw-r--r--chromium/third_party/webrtc/video_engine/overuse_frame_detector_unittest.cc402
-rw-r--r--chromium/third_party/webrtc/video_engine/stream_synchronization.cc37
-rw-r--r--chromium/third_party/webrtc/video_engine/stream_synchronization.h4
-rw-r--r--chromium/third_party/webrtc/video_engine/stream_synchronization_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.gypi6
-rw-r--r--chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.isolate14
-rw-r--r--chromium/third_party/webrtc/video_engine/test/libvietest/libvietest.gypi21
-rw-r--r--chromium/third_party/webrtc/video_engine/video_engine_core.gypi10
-rw-r--r--chromium/third_party/webrtc/video_engine/video_engine_core_unittests.isolate12
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_base_impl.cc196
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_base_impl.h6
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_capture_impl.cc157
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_capturer.cc201
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_capturer.h15
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_capturer_unittest.cc263
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel.cc750
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel.h31
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel_group.cc107
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel_group.h6
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel_manager.cc107
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_channel_manager.h19
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_codec_impl.cc338
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_codec_unittest.cc230
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_defines.h10
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_encoder.cc390
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_encoder.h9
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_encryption_impl.cc111
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_encryption_impl.h43
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_external_codec_impl.cc75
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_file_image.cc20
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_frame_provider_base.cc24
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_image_process_impl.cc74
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_impl.cc77
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_impl.h9
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_input_manager.cc88
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_manager_base.h19
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_network_impl.cc92
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_network_impl.h6
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_receiver.cc224
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_receiver.h31
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_remb.cc23
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_remb_unittest.cc17
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_render_impl.cc119
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_render_manager.cc93
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_render_manager.h6
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_renderer.cc2
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.cc582
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.h13
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_sender.cc103
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_sender.h9
-rw-r--r--chromium/third_party/webrtc/video_engine/vie_sync_module.cc53
-rw-r--r--chromium/third_party/webrtc/video_engine_tests.isolate14
-rw-r--r--chromium/third_party/webrtc/video_receive_stream.h78
-rw-r--r--chromium/third_party/webrtc/video_send_stream.h121
-rw-r--r--chromium/third_party/webrtc/voice_engine/Android.mk5
-rw-r--r--chromium/third_party/webrtc/voice_engine/OWNERS7
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel.cc1505
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel.h224
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel_manager.h2
-rw-r--r--chromium/third_party/webrtc/voice_engine/dtmf_inband_queue.cc8
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/mock/fake_voe_external_media.h2
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_base.h24
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_call_report.h87
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_codec.h48
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_dtmf.h16
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_encryption.h63
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_errors.h8
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_external_media.h54
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_file.h43
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_hardware.h53
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_network.h11
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_rtp_rtcp.h119
-rw-r--r--chromium/third_party/webrtc/voice_engine/include/voe_volume_control.h18
-rw-r--r--chromium/third_party/webrtc/voice_engine/output_mixer.cc64
-rw-r--r--chromium/third_party/webrtc/voice_engine/output_mixer.h12
-rw-r--r--chromium/third_party/webrtc/voice_engine/output_mixer_internal.cc70
-rw-r--r--chromium/third_party/webrtc/voice_engine/output_mixer_internal.h33
-rw-r--r--chromium/third_party/webrtc/voice_engine/output_mixer_unittest.cc221
-rw-r--r--chromium/third_party/webrtc/voice_engine/transmit_mixer.cc314
-rw-r--r--chromium/third_party/webrtc/voice_engine/transmit_mixer.h41
-rw-r--r--chromium/third_party/webrtc/voice_engine/utility.cc236
-rw-r--r--chromium/third_party/webrtc/voice_engine/utility.h79
-rw-r--r--chromium/third_party/webrtc/voice_engine/utility_unittest.cc263
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_auto_test.isolate12
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_base_impl.cc336
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_base_impl.h47
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_call_report_impl.cc383
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_call_report_impl.h57
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_codec_impl.cc217
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_codec_impl.h20
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_codec_unittest.cc50
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.cc46
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.h5
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_encryption_impl.cc96
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_encryption_impl.h40
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc197
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_external_media_impl.h17
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_file_impl.cc762
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_file_impl.h41
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_hardware_impl.cc237
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_hardware_impl.h19
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_neteq_stats_impl.cc2
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_neteq_stats_unittest.cc285
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_network_impl.cc12
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_network_impl.h4
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.cc368
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.h58
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_video_sync_impl.cc8
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.cc158
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.h8
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine.gyp32
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine_defines.h57
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine_impl.cc26
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine_impl.h18
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine_unittests.isolate12
-rw-r--r--chromium/third_party/webrtc/webrtc.gyp5
-rw-r--r--chromium/third_party/webrtc/webrtc_examples.gyp93
-rw-r--r--chromium/third_party/webrtc/webrtc_perf_tests.isolate18
-rw-r--r--chromium/third_party/webrtc/webrtc_tests.gypi45
1531 files changed, 145043 insertions, 80019 deletions
diff --git a/chromium/third_party/webrtc/BUILD.gn b/chromium/third_party/webrtc/BUILD.gn
new file mode 100644
index 00000000000..a86c8a7670d
--- /dev/null
+++ b/chromium/third_party/webrtc/BUILD.gn
@@ -0,0 +1,174 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/arm.gni")
+import("//build/config/crypto.gni")
+import("//build/config/linux/pkg_config.gni")
+import("build/webrtc.gni")
+
+# Contains the defines and includes in common.gypi that are duplicated both as
+# target_defaults and direct_dependent_settings.
+config("common_inherited_config") {
+ defines = []
+ if (build_with_mozilla) {
+ defines += [ "WEBRTC_MOZILLA_BUILD" ]
+ }
+ if (build_with_chromium) {
+ defines = [
+ "WEBRTC_CHROMIUM_BUILD",
+ "LOGGING_INSIDE_WEBRTC",
+ ]
+ include_dirs = [
+ # overrides must be included first as that is the mechanism for
+ # selecting the override headers in Chromium.
+ "overrides",
+ # Allow includes to be prefixed with webrtc/ in case it is not an
+ # immediate subdirectory of the top-level.
+ "..",
+ ]
+ }
+ if (is_posix) {
+ defines += [ "WEBRTC_POSIX" ]
+ }
+ if (is_ios) {
+ defines += [
+ "WEBRTC_MAC",
+ "WEBRTC_IOS",
+ ]
+ }
+ if (is_linux) {
+ defines += [ "WEBRTC_LINUX" ]
+ }
+ if (is_mac) {
+ defines += [ "WEBRTC_MAC" ]
+ }
+ if (is_win) {
+ defines += [ "WEBRTC_WIN" ]
+ }
+ if (is_android) {
+ defines += [
+ "WEBRTC_LINUX",
+ "WEBRTC_ANDROID",
+ ]
+ if (enable_android_opensl) {
+ defines += [ "WEBRTC_ANDROID_OPENSLES" ]
+ }
+ }
+}
+
+pkg_config("dbus-glib") {
+ packages = [ "dbus-glib-1" ]
+}
+
+config("common_config") {
+ if (restrict_webrtc_logging) {
+ defines = [ "WEBRTC_RESTRICT_LOGGING" ]
+ }
+
+ if (have_dbus_glib) {
+ defines += [ "HAVE_DBUS_GLIB" ]
+ # TODO(kjellander): Investigate this, it seems like include <dbus/dbus.h>
+ # is still not found even if the execution of
+ # build/config/linux/pkg-config.py dbus-glib-1 returns correct include
+ # dirs on Linux.
+ all_dependent_configs = [ "dbus-glib" ]
+ }
+
+ if (enable_video) {
+ defines += [ "WEBRTC_MODULE_UTILITY_VIDEO" ]
+ }
+
+ if (!build_with_chromium) {
+ if (is_posix) {
+ # -Wextra is currently disabled in Chromium"s common.gypi. Enable
+ # for targets that can handle it. For Android/arm64 right now
+ # there will be an "enumeral and non-enumeral type in conditional
+ # expression" warning in android_tools/ndk_experimental"s version
+ # of stlport.
+ # See: https://code.google.com/p/chromium/issues/detail?id=379699
+ if (cpu_arch != "arm64" || !is_android) {
+ cflags = [
+ "-Wextra",
+ # We need to repeat some flags from Chromium"s common.gypi
+ # here that get overridden by -Wextra.
+ "-Wno-unused-parameter",
+ "-Wno-missing-field-initializers",
+ "-Wno-strict-overflow",
+ ]
+ cflags_cc = [
+ "-Wnon-virtual-dtor",
+ # This is enabled for clang; enable for gcc as well.
+ "-Woverloaded-virtual",
+ ]
+ }
+ }
+
+ if (is_clang) {
+ cflags += [ "-Wthread-safety" ]
+ }
+ }
+
+ if (cpu_arch == "arm") {
+ defines += [ "WEBRTC_ARCH_ARM" ]
+ if (arm_version == 7) {
+ defines += [ "WEBRTC_ARCH_ARM_V7" ]
+ if (arm_use_neon) {
+ defines += [ "WEBRTC_ARCH_ARM_NEON" ]
+ } else {
+ defines += [ "WEBRTC_DETECT_ARM_NEON" ]
+ }
+ }
+ }
+
+ if (cpu_arch == "mipsel") {
+ defines += [ "MIPS32_LE" ]
+ if (mips_fpu) {
+ defines += [ "MIPS_FPU_LE" ]
+ cflags += [ "-mhard-float" ]
+ } else {
+ cflags += [ "-msoft-float" ]
+ }
+ if (mips_arch_variant == "mips32r2") {
+ defines += [ "MIPS32_R2_LE" ]
+ cflags += [ "-mips32r2" ]
+ cflags_cc += [ "-mips32r2" ]
+ }
+ if (mips_dsp_rev == 1) {
+ defines += [ "MIPS_DSP_R1_LE" ]
+ cflags += [ "-mdsp" ]
+ cflags_cc += [ "-mdsp" ]
+ } else if (mips_dsp_rev == 2) {
+ defines += [
+ "MIPS_DSP_R1_LE",
+ "MIPS_DSP_R2_LE",
+ ]
+ cflags += [ "-mdspr2" ]
+ cflags_cc += [ "-mdspr2" ]
+ }
+ }
+
+ # TODO(kjellander): Handle warnings on Windows where WebRTC differ from the
+ # default warnings set in build/config/compiler/BUILD.gn.
+
+ if (is_android && is_clang) {
+ # The Android NDK doesn"t provide optimized versions of these
+ # functions. Ensure they are disabled for all compilers.
+ cflags += [
+ "-fno-builtin-cos",
+ "-fno-builtin-sin",
+ "-fno-builtin-cosf",
+ "-fno-builtin-sinf",
+ ]
+ }
+}
+
+static_library("webrtc") {
+ deps = [
+ "base:webrtc_base",
+ ]
+}
diff --git a/chromium/third_party/webrtc/OWNERS b/chromium/third_party/webrtc/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/PRESUBMIT.py b/chromium/third_party/webrtc/PRESUBMIT.py
index 4d4a3d50a7a..4132c168749 100644
--- a/chromium/third_party/webrtc/PRESUBMIT.py
+++ b/chromium/third_party/webrtc/PRESUBMIT.py
@@ -8,13 +8,13 @@
def _LicenseHeader(input_api):
"""Returns the license header regexp."""
- # Accept any year number from 2011 to the current year
+ # Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y'))
- allowed_years = (str(s) for s in reversed(xrange(2011, current_year + 1)))
+ allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
license_header = (
- r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
- r'All Rights Reserved\.\n'
+ r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
+ r'All [Rr]ights [Rr]eserved\.\n'
r'.*?\n'
r'.*? Use of this source code is governed by a BSD-style license\n'
r'.*? that can be found in the LICENSE file in the root of the source\n'
diff --git a/chromium/third_party/webrtc/base/BUILD.gn b/chromium/third_party/webrtc/base/BUILD.gn
new file mode 100644
index 00000000000..41180901485
--- /dev/null
+++ b/chromium/third_party/webrtc/base/BUILD.gn
@@ -0,0 +1,723 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/crypto.gni")
+import("../build/webrtc.gni")
+
+config("webrtc_base_config") {
+ include_dirs = [
+ "//third_party/jsoncpp/overrides/include",
+ "//third_party/jsoncpp/source/include",
+ ]
+
+ defines = [
+ "FEATURE_ENABLE_SSL",
+ "GTEST_RELATIVE_PATH",
+ ]
+
+ # TODO(henrike): issue 3307, make webrtc_base build without disabling
+ # these flags.
+ cflags_cc = [ "-Wno-non-virtual-dtor" ]
+}
+
+config("webrtc_base_chromium_config") {
+ defines = [
+ "NO_MAIN_THREAD_WRAPPING",
+ "SSL_USE_NSS",
+ ]
+}
+
+config("openssl_config") {
+ defines = [
+ "SSL_USE_OPENSSL",
+ "HAVE_OPENSSL_SSL_H",
+ ]
+}
+
+config("no_openssl_config") {
+ defines = [
+ "SSL_USE_NSS",
+ "HAVE_NSS_SSL_H",
+ "SSL_USE_NSS_RNG",
+ ]
+}
+
+config("android_config") {
+ defines = [ "HAVE_OPENSSL_SSL_H" ]
+}
+
+config("no_android_config") {
+ defines = [
+ "HAVE_NSS_SSL_H",
+ "SSL_USE_NSS_RNG",
+ ]
+}
+
+config("ios_config") {
+ ldflags = [
+ #"Foundation.framework", # Already included in //build/config:default_libs.
+ "Security.framework",
+ "SystemConfiguration.framework",
+ #"UIKit.framework", # Already included in //build/config:default_libs.
+ ]
+}
+
+config("mac_config") {
+ ldflags = [
+ "Cocoa.framework",
+ #"Foundation.framework", # Already included in //build/config:default_libs.
+ #"IOKit.framework", # Already included in //build/config:default_libs.
+ #"Security.framework", # Already included in //build/config:default_libs.
+ "SystemConfiguration.framework",
+ ]
+}
+
+config("mac_x86_config") {
+ libs = [
+ #"Carbon.framework", # Already included in //build/config:default_libs.
+ ]
+}
+
+config("linux_system_ssl_config") {
+ visibility = ":*" # Only targets in this file can depend on this.
+
+ # TODO(kjellander): Find out how to convert GYP include_dirs+ (i.e. insert
+ # first in the include path?).
+ include_dirs = [ "//net/third_party/nss/ssl" ]
+
+ configs = [ "//third_party/nss:system_nss_no_ssl_config" ]
+}
+
+# Provides the same functionality as the build/linux/system.gyp:ssl GYP target.
+# This cannot be in build/linux/BUILD.gn since targets in build/ are not allowed
+# to depend on targets outside of it. This could be replaced by the Chromium
+# //crypto:platform target, but as WebRTC currently don't sync src/crypto from
+# Chromium, it is not possible today.
+config("linux_system_ssl") {
+ if (use_openssl) {
+ deps = [ "//third_party/openssl" ]
+ } else {
+ deps = [ "//net/third_party/nss/ssl:libssl" ]
+
+ direct_dependent_configs = [
+ ":linux_system_ssl_config",
+ ]
+
+ if (is_clang) {
+ cflags = [
+ # There is a broken header guard in /usr/include/nss/secmod.h:
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=884072
+ "-Wno-header-guard",
+ ]
+ }
+ }
+}
+
+static_library("webrtc_base") {
+ cflags = []
+ cflags_cc = []
+
+ configs -= [ "//build/config/compiler:chromium_code" ]
+ configs += [ "//build/config/compiler:no_chromium_code" ]
+ configs += [
+ "..:common_inherited_config",
+ "..:common_config",
+ ":webrtc_base_config",
+ ]
+
+ direct_dependent_configs = [
+ "..:common_inherited_config",
+ ":webrtc_base_config",
+ ]
+
+ defines = [
+ "LOGGING=1",
+ "USE_WEBRTC_DEV_BRANCH",
+ ]
+
+ sources = [
+ "asyncfile.cc",
+ "asyncfile.h",
+ "asynchttprequest.cc",
+ "asynchttprequest.h",
+ "asyncinvoker.cc",
+ "asyncinvoker.h",
+ "asyncinvoker-inl.h",
+ "asyncpacketsocket.h",
+ "asyncresolverinterface.h",
+ "asyncsocket.cc",
+ "asyncsocket.h",
+ "asynctcpsocket.cc",
+ "asynctcpsocket.h",
+ "asyncudpsocket.cc",
+ "asyncudpsocket.h",
+ "atomicops.h",
+ "autodetectproxy.cc",
+ "autodetectproxy.h",
+ "bandwidthsmoother.cc",
+ "bandwidthsmoother.h",
+ "base64.cc",
+ "base64.h",
+ "basicdefs.h",
+ "basictypes.h",
+ "bind.h",
+ "bind.h.pump",
+ "buffer.h",
+ "bytebuffer.cc",
+ "bytebuffer.h",
+ "byteorder.h",
+ "callback.h",
+ "callback.h.pump",
+ "checks.cc",
+ "checks.h",
+ "common.cc",
+ "common.h",
+ "constructormagic.h",
+ "cpumonitor.cc",
+ "cpumonitor.h",
+ "crc32.cc",
+ "crc32.h",
+ "criticalsection.h",
+ "cryptstring.h",
+ "dbus.cc",
+ "dbus.h",
+ "diskcache.cc",
+ "diskcache.h",
+ "event.cc",
+ "event.h",
+ "filelock.cc",
+ "filelock.h",
+ "fileutils.cc",
+ "fileutils.h",
+ "fileutils_mock.h",
+ "firewallsocketserver.cc",
+ "firewallsocketserver.h",
+ "flags.cc",
+ "flags.h",
+ "genericslot.h",
+ "genericslot.h.pump",
+ "gunit_prod.h",
+ "helpers.cc",
+ "helpers.h",
+ "httpbase.cc",
+ "httpbase.h",
+ "httpclient.cc",
+ "httpclient.h",
+ "httpcommon-inl.h",
+ "httpcommon.cc",
+ "httpcommon.h",
+ "httprequest.cc",
+ "httprequest.h",
+ "httpserver.cc",
+ "httpserver.h",
+ "ifaddrs-android.cc",
+ "ifaddrs-android.h",
+ "iosfilesystem.mm",
+ "ipaddress.cc",
+ "ipaddress.h",
+ "json.cc",
+ "json.h",
+ "latebindingsymboltable.cc",
+ "latebindingsymboltable.cc.def",
+ "latebindingsymboltable.h",
+ "latebindingsymboltable.h.def",
+ "libdbusglibsymboltable.cc",
+ "libdbusglibsymboltable.h",
+ "linux.cc",
+ "linux.h",
+ "linuxfdwalk.c",
+ "linuxfdwalk.h",
+ "linuxwindowpicker.cc",
+ "linuxwindowpicker.h",
+ "linked_ptr.h",
+ "logging.cc",
+ "logging.h",
+ "macasyncsocket.cc",
+ "macasyncsocket.h",
+ "maccocoasocketserver.h",
+ "maccocoasocketserver.mm",
+ "maccocoathreadhelper.h",
+ "maccocoathreadhelper.mm",
+ "macconversion.cc",
+ "macconversion.h",
+ "macsocketserver.cc",
+ "macsocketserver.h",
+ "macutils.cc",
+ "macutils.h",
+ "macwindowpicker.cc",
+ "macwindowpicker.h",
+ "mathutils.h",
+ "md5.cc",
+ "md5.h",
+ "md5digest.h",
+ "messagedigest.cc",
+ "messagedigest.h",
+ "messagehandler.cc",
+ "messagehandler.h",
+ "messagequeue.cc",
+ "messagequeue.h",
+ "multipart.cc",
+ "multipart.h",
+ "natserver.cc",
+ "natserver.h",
+ "natsocketfactory.cc",
+ "natsocketfactory.h",
+ "nattypes.cc",
+ "nattypes.h",
+ "nethelpers.cc",
+ "nethelpers.h",
+ "network.cc",
+ "network.h",
+ "nssidentity.cc",
+ "nssidentity.h",
+ "nssstreamadapter.cc",
+ "nssstreamadapter.h",
+ "nullsocketserver.h",
+ "openssl.h",
+ "openssladapter.cc",
+ "openssladapter.h",
+ "openssldigest.cc",
+ "openssldigest.h",
+ "opensslidentity.cc",
+ "opensslidentity.h",
+ "opensslstreamadapter.cc",
+ "opensslstreamadapter.h",
+ "optionsfile.cc",
+ "optionsfile.h",
+ "pathutils.cc",
+ "pathutils.h",
+ "physicalsocketserver.cc",
+ "physicalsocketserver.h",
+ "posix.cc",
+ "posix.h",
+ "profiler.cc",
+ "profiler.h",
+ "proxydetect.cc",
+ "proxydetect.h",
+ "proxyinfo.cc",
+ "proxyinfo.h",
+ "proxyserver.cc",
+ "proxyserver.h",
+ "ratelimiter.cc",
+ "ratelimiter.h",
+ "ratetracker.cc",
+ "ratetracker.h",
+ "refcount.h",
+ "referencecountedsingletonfactory.h",
+ "rollingaccumulator.h",
+ "schanneladapter.cc",
+ "schanneladapter.h",
+ "scoped_autorelease_pool.h",
+ "scoped_autorelease_pool.mm",
+ "scoped_ptr.h",
+ "scoped_ref_ptr.h",
+ "scopedptrcollection.h",
+ "sec_buffer.h",
+ "sha1.cc",
+ "sha1.h",
+ "sha1digest.h",
+ "sharedexclusivelock.cc",
+ "sharedexclusivelock.h",
+ "signalthread.cc",
+ "signalthread.h",
+ "sigslot.h",
+ "sigslotrepeater.h",
+ "socket.h",
+ "socketadapters.cc",
+ "socketadapters.h",
+ "socketaddress.cc",
+ "socketaddress.h",
+ "socketaddresspair.cc",
+ "socketaddresspair.h",
+ "socketfactory.h",
+ "socketpool.cc",
+ "socketpool.h",
+ "socketserver.h",
+ "socketstream.cc",
+ "socketstream.h",
+ "ssladapter.cc",
+ "ssladapter.h",
+ "sslconfig.h",
+ "sslfingerprint.cc",
+ "sslfingerprint.h",
+ "sslidentity.cc",
+ "sslidentity.h",
+ "sslroots.h",
+ "sslsocketfactory.cc",
+ "sslsocketfactory.h",
+ "sslstreamadapter.cc",
+ "sslstreamadapter.h",
+ "sslstreamadapterhelper.cc",
+ "sslstreamadapterhelper.h",
+ "stream.cc",
+ "stream.h",
+ "stringdigest.h",
+ "stringencode.cc",
+ "stringencode.h",
+ "stringutils.cc",
+ "stringutils.h",
+ "systeminfo.cc",
+ "systeminfo.h",
+ "task.cc",
+ "task.h",
+ "taskparent.cc",
+ "taskparent.h",
+ "taskrunner.cc",
+ "taskrunner.h",
+ "testclient.cc",
+ "testclient.h",
+ "thread.cc",
+ "thread.h",
+ "timeutils.cc",
+ "timeutils.h",
+ "timing.cc",
+ "timing.h",
+ "transformadapter.cc",
+ "transformadapter.h",
+ "unixfilesystem.cc",
+ "unixfilesystem.h",
+ "urlencode.cc",
+ "urlencode.h",
+ "versionparsing.cc",
+ "versionparsing.h",
+ "virtualsocketserver.cc",
+ "virtualsocketserver.h",
+ "win32.cc",
+ "win32.h",
+ "win32filesystem.cc",
+ "win32filesystem.h",
+ "win32regkey.cc",
+ "win32regkey.h",
+ "win32securityerrors.cc",
+ "win32socketinit.cc",
+ "win32socketinit.h",
+ "win32socketserver.cc",
+ "win32socketserver.h",
+ "win32window.cc",
+ "win32window.h",
+ "win32windowpicker.cc",
+ "win32windowpicker.h",
+ "window.h",
+ "windowpicker.h",
+ "windowpickerfactory.h",
+ "winfirewall.cc",
+ "winfirewall.h",
+ "winping.cc",
+ "winping.h",
+ "worker.cc",
+ "worker.h",
+ ]
+
+ if (build_with_chromium) {
+ sources += [
+ "../overrides/webrtc/base/basictypes.h",
+ "../overrides/webrtc/base/constructormagic.h",
+ "../overrides/webrtc/base/logging.cc",
+ "../overrides/webrtc/base/logging.h",
+ ]
+ if (is_win) {
+ sources += [ "../overrides/webrtc/base/win32socketinit.cc" ]
+ }
+ sources -= [
+ "asyncinvoker.cc",
+ "asyncinvoker.h",
+ "asyncinvoker-inl.h",
+ "asyncresolverinterface.h",
+ "atomicops.h",
+ "bandwidthsmoother.cc",
+ "bandwidthsmoother.h",
+ "basictypes.h",
+ "bind.h",
+ "bind.h.pump",
+ "buffer.h",
+ "callback.h",
+ "callback.h.pump",
+ "constructormagic.h",
+ "dbus.cc",
+ "dbus.h",
+ "filelock.cc",
+ "filelock.h",
+ "fileutils_mock.h",
+ "genericslot.h",
+ "genericslot.h.pump",
+ "httpserver.cc",
+ "httpserver.h",
+ "json.cc",
+ "json.h",
+ "latebindingsymboltable.cc",
+ "latebindingsymboltable.cc.def",
+ "latebindingsymboltable.h",
+ "latebindingsymboltable.h.def",
+ "libdbusglibsymboltable.cc",
+ "libdbusglibsymboltable.h",
+ "linuxfdwalk.c",
+ "linuxfdwalk.h",
+ "linuxwindowpicker.cc",
+ "linuxwindowpicker.h",
+ "logging.cc",
+ "logging.h",
+ #"macasyncsocket.cc",
+ #"macasyncsocket.h",
+ #"maccocoasocketserver.h",
+ #"maccocoasocketserver.mm",
+ #"macsocketserver.cc",
+ #"macsocketserver.h",
+ #"macwindowpicker.cc",
+ #"macwindowpicker.h",
+ "mathutils.h",
+ "multipart.cc",
+ "multipart.h",
+ "natserver.cc",
+ "natserver.h",
+ "natsocketfactory.cc",
+ "natsocketfactory.h",
+ "nattypes.cc",
+ "nattypes.h",
+ "openssl.h",
+ "optionsfile.cc",
+ "optionsfile.h",
+ "posix.cc",
+ "posix.h",
+ "profiler.cc",
+ "profiler.h",
+ "proxyserver.cc",
+ "proxyserver.h",
+ "refcount.h",
+ "referencecountedsingletonfactory.h",
+ "rollingaccumulator.h",
+ #"safe_conversions.h",
+ #"safe_conversions_impl.h",
+ "scopedptrcollection.h",
+ "scoped_ref_ptr.h",
+ "sec_buffer.h",
+ "sharedexclusivelock.cc",
+ "sharedexclusivelock.h",
+ "sslconfig.h",
+ "sslroots.h",
+ "stringdigest.h",
+ #"testbase64.h",
+ "testclient.cc",
+ "testclient.h",
+ #"testutils.h",
+ "transformadapter.cc",
+ "transformadapter.h",
+ "versionparsing.cc",
+ "versionparsing.h",
+ "virtualsocketserver.cc",
+ "virtualsocketserver.h",
+ #"win32regkey.cc",
+ #"win32regkey.h",
+ #"win32socketinit.cc",
+ #"win32socketinit.h",
+ #"win32socketserver.cc",
+ #"win32socketserver.h",
+ #"win32toolhelp.h",
+ "window.h",
+ "windowpickerfactory.h",
+ "windowpicker.h",
+ ]
+
+ include_dirs = [
+ "../overrides",
+ "../../openssl/openssl/include",
+ ]
+
+ direct_dependent_configs += [ ":webrtc_base_chromium_config" ]
+ } else {
+ if (is_win) {
+ sources += [
+ "diskcache_win32.cc",
+ "diskcache_win32.h",
+ ]
+ }
+
+ deps = [ "//third_party/jsoncpp" ]
+ }
+
+ # TODO(henrike): issue 3307, make webrtc_base build without disabling
+ # these flags.
+ cflags += [
+ "-Wno-extra",
+ "-Wno-all",
+ ]
+ cflags_cc += [ "-Wno-non-virtual-dtor" ]
+
+ if (use_openssl) {
+ direct_dependent_configs += [ ":openssl_config" ]
+
+ deps = [ "//third_party/openssl" ]
+ } else {
+ direct_dependent_configs += [ ":no_openssl_config" ]
+ }
+
+ if (is_android) {
+ direct_dependent_configs += [ ":android_config" ]
+
+ libs = [
+ "log",
+ "GLESv2"
+ ]
+ } else {
+ direct_dependent_configs += [ ":no_android_config" ]
+
+ sources -= [
+ "ifaddrs-android.cc",
+ "ifaddrs-android.h",
+ ]
+ }
+
+ if (is_ios) {
+ all_dependent_configs += [ ":ios_config" ]
+
+ deps = [ "//net/third_party/nss/ssl:libssl" ]
+ }
+
+ if (is_linux) {
+ libs = [
+ "crypto",
+ "dl",
+ "rt",
+ "Xext",
+ "X11",
+ "Xcomposite",
+ "Xrender",
+ ]
+ configs += [ "//third_party/nss:system_nss_no_ssl_config" ]
+ } else {
+ sources -= [
+ "dbus.cc",
+ "dbus.h",
+ "libdbusglibsymboltable.cc",
+ "libdbusglibsymboltable.h",
+ "linuxfdwalk.c",
+ "linuxfdwalk.h",
+ "linuxwindowpicker.cc",
+ "linuxwindowpicker.h",
+ ]
+ }
+
+ if (is_mac) {
+ all_dependent_configs = [ ":mac_config" ]
+
+ libs = [
+ "crypto", # $(SDKROOT)/usr/lib/libcrypto.dylib
+ "ssl", # $(SDKROOT)/usr/lib/libssl.dylib
+ ]
+ if (cpu_arch == "x86") {
+ all_dependent_configs += [ ":mac_x86_config" ]
+ }
+ } else {
+ sources -= [
+ "macasyncsocket.cc",
+ "macasyncsocket.h",
+ "maccocoasocketserver.h",
+ #"maccocoasocketserver.mm", # Seems to be excluded by default with GN.
+ "macconversion.cc",
+ "macconversion.h",
+ "macsocketserver.cc",
+ "macsocketserver.h",
+ "macutils.cc",
+ "macutils.h",
+ "macwindowpicker.cc",
+ "macwindowpicker.h",
+ ]
+ }
+
+ if (is_win) {
+ libs = [
+ "crypt32.lib",
+ "iphlpapi.lib",
+ "secur32.lib",
+ ]
+
+ cflags += [
+ # Suppress warnings about WIN32_LEAN_AND_MEAN.
+ "/wd4005",
+ "/wd4703",
+ ]
+
+ defines += [ "_CRT_NONSTDC_NO_DEPRECATE" ]
+ } else {
+ sources -= [
+ "schanneladapter.cc",
+ "schanneladapter.h",
+ "winping.cc",
+ "winping.h",
+ "winfirewall.cc",
+ "winfirewall.h",
+ # The files below were covered by a regex exclude in GYP.
+ "win32.cc",
+ "win32.h",
+ "win32filesystem.cc",
+ "win32filesystem.h",
+ "win32regkey.cc",
+ "win32regkey.h",
+ "win32securityerrors.cc",
+ "win32socketinit.cc",
+ "win32socketinit.h",
+ "win32socketserver.cc",
+ "win32socketserver.h",
+ "win32window.cc",
+ "win32window.h",
+ "win32windowpicker.cc",
+ "win32windowpicker.h",
+ ]
+ }
+
+ if (is_posix) {
+ if (is_debug) {
+ defines += [ "_DEBUG" ]
+ }
+ } else {
+ sources -= [
+ "latebindingsymboltable.cc",
+ "latebindingsymboltable.h",
+ "posix.cc",
+ "posix.h",
+ "unixfilesystem.cc",
+ "unixfilesystem.h",
+ ]
+ }
+
+ if (is_ios || (is_mac && cpu_arch != "x86")) {
+ defines += [ "CARBON_DEPRECATED=YES" ]
+ }
+
+ if (is_ios || !is_posix) {
+ sources -= [
+ "openssl.h",
+ "openssladapter.cc",
+ "openssladapter.h",
+ "openssldigest.cc",
+ "openssldigest.h",
+ "opensslidentity.cc",
+ "opensslidentity.h",
+ "opensslstreamadapter.cc",
+ "opensslstreamadapter.h",
+ ]
+ }
+
+ if (!is_linux && !is_android) {
+ sources -= [
+ "linux.cc",
+ "linux.h",
+ ]
+ }
+
+ if (is_mac || is_ios || is_win) {
+ deps += [
+ "//net/third_party/nss/ssl:libssl",
+ "//third_party/nss:nspr",
+ "//third_party/nss:nss",
+ ]
+ }
+
+ if (is_posix && !is_mac && !is_ios && !is_android) {
+ configs += [ ":linux_system_ssl" ]
+ }
+}
diff --git a/chromium/third_party/webrtc/base/OWNERS b/chromium/third_party/webrtc/base/OWNERS
new file mode 100644
index 00000000000..4091a93d740
--- /dev/null
+++ b/chromium/third_party/webrtc/base/OWNERS
@@ -0,0 +1,7 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+perkj@webrtc.org
+henrika@webrtc.org
+henrikg@webrtc.org
+mflodman@webrtc.org
+niklas.enbom@webrtc.org \ No newline at end of file
diff --git a/chromium/third_party/webrtc/base/asyncfile.cc b/chromium/third_party/webrtc/base/asyncfile.cc
new file mode 100644
index 00000000000..ea904c554ea
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncfile.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asyncfile.h"
+
+namespace rtc {
+
+AsyncFile::AsyncFile() {
+}
+
+AsyncFile::~AsyncFile() {
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncfile.h b/chromium/third_party/webrtc/base/asyncfile.h
new file mode 100644
index 00000000000..dd467668802
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncfile.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCFILE_H__
+#define WEBRTC_BASE_ASYNCFILE_H__
+
+#include "webrtc/base/sigslot.h"
+
+namespace rtc {
+
+// Provides the ability to perform file I/O asynchronously.
+// TODO: Create a common base class with AsyncSocket.
+class AsyncFile {
+ public:
+ AsyncFile();
+ virtual ~AsyncFile();
+
+ // Determines whether the file will receive read events.
+ virtual bool readable() = 0;
+ virtual void set_readable(bool value) = 0;
+
+ // Determines whether the file will receive write events.
+ virtual bool writable() = 0;
+ virtual void set_writable(bool value) = 0;
+
+ sigslot::signal1<AsyncFile*> SignalReadEvent;
+ sigslot::signal1<AsyncFile*> SignalWriteEvent;
+ sigslot::signal2<AsyncFile*, int> SignalCloseEvent;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCFILE_H__
diff --git a/chromium/third_party/webrtc/base/asynchttprequest.cc b/chromium/third_party/webrtc/base/asynchttprequest.cc
new file mode 100644
index 00000000000..23042f17de4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynchttprequest.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asynchttprequest.h"
+
+namespace rtc {
+
+enum {
+ MSG_TIMEOUT = SignalThread::ST_MSG_FIRST_AVAILABLE,
+ MSG_LAUNCH_REQUEST
+};
+static const int kDefaultHTTPTimeout = 30 * 1000; // 30 sec
+
+///////////////////////////////////////////////////////////////////////////////
+// AsyncHttpRequest
+///////////////////////////////////////////////////////////////////////////////
+
+AsyncHttpRequest::AsyncHttpRequest(const std::string &user_agent)
+ : start_delay_(0),
+ firewall_(NULL),
+ port_(80),
+ secure_(false),
+ timeout_(kDefaultHTTPTimeout),
+ fail_redirect_(false),
+ factory_(Thread::Current()->socketserver(), user_agent),
+ pool_(&factory_),
+ client_(user_agent.c_str(), &pool_),
+ error_(HE_NONE) {
+ client_.SignalHttpClientComplete.connect(this,
+ &AsyncHttpRequest::OnComplete);
+}
+
+AsyncHttpRequest::~AsyncHttpRequest() {
+}
+
+void AsyncHttpRequest::OnWorkStart() {
+ if (start_delay_ <= 0) {
+ LaunchRequest();
+ } else {
+ Thread::Current()->PostDelayed(start_delay_, this, MSG_LAUNCH_REQUEST);
+ }
+}
+
+void AsyncHttpRequest::OnWorkStop() {
+ // worker is already quitting, no need to explicitly quit
+ LOG(LS_INFO) << "HttpRequest cancelled";
+}
+
+void AsyncHttpRequest::OnComplete(HttpClient* client, HttpErrorType error) {
+ Thread::Current()->Clear(this, MSG_TIMEOUT);
+
+ set_error(error);
+ if (!error) {
+ LOG(LS_INFO) << "HttpRequest completed successfully";
+
+ std::string value;
+ if (client_.response().hasHeader(HH_LOCATION, &value)) {
+ response_redirect_ = value.c_str();
+ }
+ } else {
+ LOG(LS_INFO) << "HttpRequest completed with error: " << error;
+ }
+
+ worker()->Quit();
+}
+
+void AsyncHttpRequest::OnMessage(Message* message) {
+ switch (message->message_id) {
+ case MSG_TIMEOUT:
+ LOG(LS_INFO) << "HttpRequest timed out";
+ client_.reset();
+ worker()->Quit();
+ break;
+ case MSG_LAUNCH_REQUEST:
+ LaunchRequest();
+ break;
+ default:
+ SignalThread::OnMessage(message);
+ break;
+ }
+}
+
+void AsyncHttpRequest::DoWork() {
+ // Do nothing while we wait for the request to finish. We only do this so
+ // that we can be a SignalThread; in the future this class should not be
+ // a SignalThread, since it does not need to spawn a new thread.
+ Thread::Current()->ProcessMessages(kForever);
+}
+
+void AsyncHttpRequest::LaunchRequest() {
+ factory_.SetProxy(proxy_);
+ if (secure_)
+ factory_.UseSSL(host_.c_str());
+
+ bool transparent_proxy = (port_ == 80) &&
+ ((proxy_.type == PROXY_HTTPS) || (proxy_.type == PROXY_UNKNOWN));
+ if (transparent_proxy) {
+ client_.set_proxy(proxy_);
+ }
+ client_.set_fail_redirect(fail_redirect_);
+ client_.set_server(SocketAddress(host_, port_));
+
+ LOG(LS_INFO) << "HttpRequest start: " << host_ + client_.request().path;
+
+ Thread::Current()->PostDelayed(timeout_, this, MSG_TIMEOUT);
+ client_.start();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asynchttprequest.h b/chromium/third_party/webrtc/base/asynchttprequest.h
new file mode 100644
index 00000000000..b8d13db8f0b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynchttprequest.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCHTTPREQUEST_H_
+#define WEBRTC_BASE_ASYNCHTTPREQUEST_H_
+
+#include <string>
+#include "webrtc/base/event.h"
+#include "webrtc/base/httpclient.h"
+#include "webrtc/base/signalthread.h"
+#include "webrtc/base/socketpool.h"
+#include "webrtc/base/sslsocketfactory.h"
+
+namespace rtc {
+
+class FirewallManager;
+
+///////////////////////////////////////////////////////////////////////////////
+// AsyncHttpRequest
+// Performs an HTTP request on a background thread. Notifies on the foreground
+// thread once the request is done (successfully or unsuccessfully).
+///////////////////////////////////////////////////////////////////////////////
+
+class AsyncHttpRequest : public SignalThread {
+ public:
+ explicit AsyncHttpRequest(const std::string &user_agent);
+ ~AsyncHttpRequest();
+
+ // If start_delay is less than or equal to zero, this starts immediately.
+ // Start_delay defaults to zero.
+ int start_delay() const { return start_delay_; }
+ void set_start_delay(int delay) { start_delay_ = delay; }
+
+ const ProxyInfo& proxy() const { return proxy_; }
+ void set_proxy(const ProxyInfo& proxy) {
+ proxy_ = proxy;
+ }
+ void set_firewall(FirewallManager * firewall) {
+ firewall_ = firewall;
+ }
+
+ // The DNS name of the host to connect to.
+ const std::string& host() { return host_; }
+ void set_host(const std::string& host) { host_ = host; }
+
+ // The port to connect to on the target host.
+ int port() { return port_; }
+ void set_port(int port) { port_ = port; }
+
+ // Whether the request should use SSL.
+ bool secure() { return secure_; }
+ void set_secure(bool secure) { secure_ = secure; }
+
+ // Time to wait on the download, in ms.
+ int timeout() { return timeout_; }
+ void set_timeout(int timeout) { timeout_ = timeout; }
+
+ // Fail redirects to allow analysis of redirect urls, etc.
+ bool fail_redirect() const { return fail_redirect_; }
+ void set_fail_redirect(bool redirect) { fail_redirect_ = redirect; }
+
+ // Returns the redirect when redirection occurs
+ const std::string& response_redirect() { return response_redirect_; }
+
+ HttpRequestData& request() { return client_.request(); }
+ HttpResponseData& response() { return client_.response(); }
+ HttpErrorType error() { return error_; }
+
+ protected:
+ void set_error(HttpErrorType error) { error_ = error; }
+ virtual void OnWorkStart();
+ virtual void OnWorkStop();
+ void OnComplete(HttpClient* client, HttpErrorType error);
+ virtual void OnMessage(Message* message);
+ virtual void DoWork();
+
+ private:
+ void LaunchRequest();
+
+ int start_delay_;
+ ProxyInfo proxy_;
+ FirewallManager* firewall_;
+ std::string host_;
+ int port_;
+ bool secure_;
+ int timeout_;
+ bool fail_redirect_;
+ SslSocketFactory factory_;
+ ReuseSocketPool pool_;
+ HttpClient client_;
+ HttpErrorType error_;
+ std::string response_redirect_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCHTTPREQUEST_H_
diff --git a/chromium/third_party/webrtc/base/asynchttprequest_unittest.cc b/chromium/third_party/webrtc/base/asynchttprequest_unittest.cc
new file mode 100644
index 00000000000..0bfd795b1d3
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynchttprequest_unittest.cc
@@ -0,0 +1,233 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+#include "webrtc/base/asynchttprequest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpserver.h"
+#include "webrtc/base/socketstream.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+static const SocketAddress kServerAddr("127.0.0.1", 0);
+static const SocketAddress kServerHostnameAddr("localhost", 0);
+static const char kServerGetPath[] = "/get";
+static const char kServerPostPath[] = "/post";
+static const char kServerResponse[] = "This is a test";
+
+class TestHttpServer : public HttpServer, public sigslot::has_slots<> {
+ public:
+ TestHttpServer(Thread* thread, const SocketAddress& addr) :
+ socket_(thread->socketserver()->CreateAsyncSocket(addr.family(),
+ SOCK_STREAM)) {
+ socket_->Bind(addr);
+ socket_->Listen(5);
+ socket_->SignalReadEvent.connect(this, &TestHttpServer::OnAccept);
+ }
+
+ SocketAddress address() const { return socket_->GetLocalAddress(); }
+ void Close() const { socket_->Close(); }
+
+ private:
+ void OnAccept(AsyncSocket* socket) {
+ AsyncSocket* new_socket = socket_->Accept(NULL);
+ if (new_socket) {
+ HandleConnection(new SocketStream(new_socket));
+ }
+ }
+ rtc::scoped_ptr<AsyncSocket> socket_;
+};
+
+class AsyncHttpRequestTest : public testing::Test,
+ public sigslot::has_slots<> {
+ public:
+ AsyncHttpRequestTest()
+ : started_(false),
+ done_(false),
+ server_(Thread::Current(), kServerAddr) {
+ server_.SignalHttpRequest.connect(this, &AsyncHttpRequestTest::OnRequest);
+ }
+
+ bool started() const { return started_; }
+ bool done() const { return done_; }
+
+ AsyncHttpRequest* CreateGetRequest(const std::string& host, int port,
+ const std::string& path) {
+ rtc::AsyncHttpRequest* request =
+ new rtc::AsyncHttpRequest("unittest");
+ request->SignalWorkDone.connect(this,
+ &AsyncHttpRequestTest::OnRequestDone);
+ request->request().verb = rtc::HV_GET;
+ request->set_host(host);
+ request->set_port(port);
+ request->request().path = path;
+ request->response().document.reset(new MemoryStream());
+ return request;
+ }
+ AsyncHttpRequest* CreatePostRequest(const std::string& host, int port,
+ const std::string& path,
+ const std::string content_type,
+ StreamInterface* content) {
+ rtc::AsyncHttpRequest* request =
+ new rtc::AsyncHttpRequest("unittest");
+ request->SignalWorkDone.connect(this,
+ &AsyncHttpRequestTest::OnRequestDone);
+ request->request().verb = rtc::HV_POST;
+ request->set_host(host);
+ request->set_port(port);
+ request->request().path = path;
+ request->request().setContent(content_type, content);
+ request->response().document.reset(new MemoryStream());
+ return request;
+ }
+
+ const TestHttpServer& server() const { return server_; }
+
+ protected:
+ void OnRequest(HttpServer* server, HttpServerTransaction* t) {
+ started_ = true;
+
+ if (t->request.path == kServerGetPath) {
+ t->response.set_success("text/plain", new MemoryStream(kServerResponse));
+ } else if (t->request.path == kServerPostPath) {
+ // reverse the data and reply
+ size_t size;
+ StreamInterface* in = t->request.document.get();
+ StreamInterface* out = new MemoryStream();
+ in->GetSize(&size);
+ for (size_t i = 0; i < size; ++i) {
+ char ch;
+ in->SetPosition(size - i - 1);
+ in->Read(&ch, 1, NULL, NULL);
+ out->Write(&ch, 1, NULL, NULL);
+ }
+ out->Rewind();
+ t->response.set_success("text/plain", out);
+ } else {
+ t->response.set_error(404);
+ }
+ server_.Respond(t);
+ }
+ void OnRequestDone(SignalThread* thread) {
+ done_ = true;
+ }
+
+ private:
+ bool started_;
+ bool done_;
+ TestHttpServer server_;
+};
+
+TEST_F(AsyncHttpRequestTest, TestGetSuccess) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ kServerHostnameAddr.hostname(), server().address().port(),
+ kServerGetPath);
+ EXPECT_FALSE(started());
+ req->Start();
+ EXPECT_TRUE_WAIT(started(), 5000); // Should have started by now.
+ EXPECT_TRUE_WAIT(done(), 5000);
+ std::string response;
+ EXPECT_EQ(200U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->Rewind();
+ req->response().document->ReadLine(&response);
+ EXPECT_EQ(kServerResponse, response);
+ req->Release();
+}
+
+TEST_F(AsyncHttpRequestTest, TestGetNotFound) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ kServerHostnameAddr.hostname(), server().address().port(),
+ "/bad");
+ req->Start();
+ EXPECT_TRUE_WAIT(done(), 5000);
+ size_t size;
+ EXPECT_EQ(404U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->GetSize(&size);
+ EXPECT_EQ(0U, size);
+ req->Release();
+}
+
+TEST_F(AsyncHttpRequestTest, TestGetToNonServer) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ "127.0.0.1", server().address().port(),
+ kServerGetPath);
+ // Stop the server before we send the request.
+ server().Close();
+ req->Start();
+ EXPECT_TRUE_WAIT(done(), 10000);
+ size_t size;
+ EXPECT_EQ(500U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->GetSize(&size);
+ EXPECT_EQ(0U, size);
+ req->Release();
+}
+
+TEST_F(AsyncHttpRequestTest, DISABLED_TestGetToInvalidHostname) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ "invalid", server().address().port(),
+ kServerGetPath);
+ req->Start();
+ EXPECT_TRUE_WAIT(done(), 5000);
+ size_t size;
+ EXPECT_EQ(500U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->GetSize(&size);
+ EXPECT_EQ(0U, size);
+ req->Release();
+}
+
+TEST_F(AsyncHttpRequestTest, TestPostSuccess) {
+ AsyncHttpRequest* req = CreatePostRequest(
+ kServerHostnameAddr.hostname(), server().address().port(),
+ kServerPostPath, "text/plain", new MemoryStream("abcd1234"));
+ req->Start();
+ EXPECT_TRUE_WAIT(done(), 5000);
+ std::string response;
+ EXPECT_EQ(200U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->Rewind();
+ req->response().document->ReadLine(&response);
+ EXPECT_EQ("4321dcba", response);
+ req->Release();
+}
+
+// Ensure that we shut down properly even if work is outstanding.
+TEST_F(AsyncHttpRequestTest, TestCancel) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ kServerHostnameAddr.hostname(), server().address().port(),
+ kServerGetPath);
+ req->Start();
+ req->Destroy(true);
+}
+
+TEST_F(AsyncHttpRequestTest, TestGetSuccessDelay) {
+ AsyncHttpRequest* req = CreateGetRequest(
+ kServerHostnameAddr.hostname(), server().address().port(),
+ kServerGetPath);
+ req->set_start_delay(10); // Delay 10ms.
+ req->Start();
+ Thread::SleepMs(5);
+ EXPECT_FALSE(started()); // Should not have started immediately.
+ EXPECT_TRUE_WAIT(started(), 5000); // Should have started by now.
+ EXPECT_TRUE_WAIT(done(), 5000);
+ std::string response;
+ EXPECT_EQ(200U, req->response().scode);
+ ASSERT_TRUE(req->response().document);
+ req->response().document->Rewind();
+ req->response().document->ReadLine(&response);
+ EXPECT_EQ(kServerResponse, response);
+ req->Release();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncinvoker-inl.h b/chromium/third_party/webrtc/base/asyncinvoker-inl.h
new file mode 100644
index 00000000000..733cb0e7973
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncinvoker-inl.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCINVOKER_INL_H_
+#define WEBRTC_BASE_ASYNCINVOKER_INL_H_
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/callback.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+class AsyncInvoker;
+
+// Helper class for AsyncInvoker. Runs a task and triggers a callback
+// on the calling thread if necessary. Instances are ref-counted so their
+// lifetime can be independent of AsyncInvoker.
+class AsyncClosure : public RefCountInterface {
+ public:
+ virtual ~AsyncClosure() {}
+ // Runs the asynchronous task, and triggers a callback to the calling
+ // thread if needed. Should be called from the target thread.
+ virtual void Execute() = 0;
+};
+
+// Simple closure that doesn't trigger a callback for the calling thread.
+template <class FunctorT>
+class FireAndForgetAsyncClosure : public AsyncClosure {
+ public:
+ explicit FireAndForgetAsyncClosure(const FunctorT& functor)
+ : functor_(functor) {}
+ virtual void Execute() {
+ functor_();
+ }
+ private:
+ FunctorT functor_;
+};
+
+// Base class for closures that may trigger a callback for the calling thread.
+// Listens for the "destroyed" signals from the calling thread and the invoker,
+// and cancels the callback to the calling thread if either is destroyed.
+class NotifyingAsyncClosureBase : public AsyncClosure,
+ public sigslot::has_slots<> {
+ public:
+ virtual ~NotifyingAsyncClosureBase() { disconnect_all(); }
+
+ protected:
+ NotifyingAsyncClosureBase(AsyncInvoker* invoker, Thread* calling_thread);
+ void TriggerCallback();
+ void SetCallback(const Callback0<void>& callback) {
+ CritScope cs(&crit_);
+ callback_ = callback;
+ }
+ bool CallbackCanceled() const { return calling_thread_ == NULL; }
+
+ private:
+ Callback0<void> callback_;
+ CriticalSection crit_;
+ AsyncInvoker* invoker_;
+ Thread* calling_thread_;
+
+ void CancelCallback();
+};
+
+// Closures that have a non-void return value and require a callback.
+template <class ReturnT, class FunctorT, class HostT>
+class NotifyingAsyncClosure : public NotifyingAsyncClosureBase {
+ public:
+ NotifyingAsyncClosure(AsyncInvoker* invoker,
+ Thread* calling_thread,
+ const FunctorT& functor,
+ void (HostT::*callback)(ReturnT),
+ HostT* callback_host)
+ : NotifyingAsyncClosureBase(invoker, calling_thread),
+ functor_(functor),
+ callback_(callback),
+ callback_host_(callback_host) {}
+ virtual void Execute() {
+ ReturnT result = functor_();
+ if (!CallbackCanceled()) {
+ SetCallback(Callback0<void>(Bind(callback_, callback_host_, result)));
+ TriggerCallback();
+ }
+ }
+
+ private:
+ FunctorT functor_;
+ void (HostT::*callback_)(ReturnT);
+ HostT* callback_host_;
+};
+
+// Closures that have a void return value and require a callback.
+template <class FunctorT, class HostT>
+class NotifyingAsyncClosure<void, FunctorT, HostT>
+ : public NotifyingAsyncClosureBase {
+ public:
+ NotifyingAsyncClosure(AsyncInvoker* invoker,
+ Thread* calling_thread,
+ const FunctorT& functor,
+ void (HostT::*callback)(),
+ HostT* callback_host)
+ : NotifyingAsyncClosureBase(invoker, calling_thread),
+ functor_(functor) {
+ SetCallback(Callback0<void>(Bind(callback, callback_host)));
+ }
+ virtual void Execute() {
+ functor_();
+ TriggerCallback();
+ }
+
+ private:
+ FunctorT functor_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCINVOKER_INL_H_
diff --git a/chromium/third_party/webrtc/base/asyncinvoker.cc b/chromium/third_party/webrtc/base/asyncinvoker.cc
new file mode 100644
index 00000000000..ee423f11057
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncinvoker.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asyncinvoker.h"
+
+namespace rtc {
+
+AsyncInvoker::AsyncInvoker() : destroying_(false) {}
+
+AsyncInvoker::~AsyncInvoker() {
+ destroying_ = true;
+ SignalInvokerDestroyed();
+ // Messages for this need to be cleared *before* our destructor is complete.
+ MessageQueueManager::Clear(this);
+}
+
+void AsyncInvoker::OnMessage(Message* msg) {
+ // Get the AsyncClosure shared ptr from this message's data.
+ ScopedRefMessageData<AsyncClosure>* data =
+ static_cast<ScopedRefMessageData<AsyncClosure>*>(msg->pdata);
+ scoped_refptr<AsyncClosure> closure = data->data();
+ delete msg->pdata;
+ msg->pdata = NULL;
+
+ // Execute the closure and trigger the return message if needed.
+ closure->Execute();
+}
+
+void AsyncInvoker::Flush(Thread* thread, uint32 id /*= MQID_ANY*/) {
+ if (destroying_) return;
+
+ // Run this on |thread| to reduce the number of context switches.
+ if (Thread::Current() != thread) {
+ thread->Invoke<void>(Bind(&AsyncInvoker::Flush, this, thread, id));
+ return;
+ }
+
+ MessageList removed;
+ thread->Clear(this, id, &removed);
+ for (MessageList::iterator it = removed.begin(); it != removed.end(); ++it) {
+ // This message was pending on this thread, so run it now.
+ thread->Send(it->phandler,
+ it->message_id,
+ it->pdata);
+ }
+}
+
+void AsyncInvoker::DoInvoke(Thread* thread, AsyncClosure* closure,
+ uint32 id) {
+ if (destroying_) {
+ LOG(LS_WARNING) << "Tried to invoke while destroying the invoker.";
+ // Since this call transwers ownership of |closure|, we clean it up here.
+ delete closure;
+ return;
+ }
+ thread->Post(this, id, new ScopedRefMessageData<AsyncClosure>(closure));
+}
+
+NotifyingAsyncClosureBase::NotifyingAsyncClosureBase(AsyncInvoker* invoker,
+ Thread* calling_thread)
+ : invoker_(invoker), calling_thread_(calling_thread) {
+ calling_thread->SignalQueueDestroyed.connect(
+ this, &NotifyingAsyncClosureBase::CancelCallback);
+ invoker->SignalInvokerDestroyed.connect(
+ this, &NotifyingAsyncClosureBase::CancelCallback);
+}
+
+void NotifyingAsyncClosureBase::TriggerCallback() {
+ CritScope cs(&crit_);
+ if (!CallbackCanceled() && !callback_.empty()) {
+ invoker_->AsyncInvoke<void>(calling_thread_, callback_);
+ }
+}
+
+void NotifyingAsyncClosureBase::CancelCallback() {
+ // If the callback is triggering when this is called, block the
+ // destructor of the dying object here by waiting until the callback
+ // is done triggering.
+ CritScope cs(&crit_);
+ // calling_thread_ == NULL means do not trigger the callback.
+ calling_thread_ = NULL;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncinvoker.h b/chromium/third_party/webrtc/base/asyncinvoker.h
new file mode 100644
index 00000000000..ee9a03c80a4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncinvoker.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCINVOKER_H_
+#define WEBRTC_BASE_ASYNCINVOKER_H_
+
+#include "webrtc/base/asyncinvoker-inl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/scopedptrcollection.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+// Invokes function objects (aka functors) asynchronously on a Thread, and
+// owns the lifetime of calls (ie, when this object is destroyed, calls in
+// flight are cancelled). AsyncInvoker can optionally execute a user-specified
+// function when the asynchronous call is complete, or operates in
+// fire-and-forget mode otherwise.
+//
+// AsyncInvoker does not own the thread it calls functors on.
+//
+// A note about async calls and object lifetimes: users should
+// be mindful of object lifetimes when calling functions asynchronously and
+// ensure objects used by the function _cannot_ be deleted between the
+// invocation and execution of the functor. AsyncInvoker is designed to
+// help: any calls in flight will be cancelled when the AsyncInvoker used to
+// make the call is destructed, and any calls executing will be allowed to
+// complete before AsyncInvoker destructs.
+//
+// The easiest way to ensure lifetimes are handled correctly is to create a
+// class that owns the Thread and AsyncInvoker objects, and then call its
+// methods asynchronously as needed.
+//
+// Example:
+// class MyClass {
+// public:
+// void FireAsyncTaskWithResult(Thread* thread, int x) {
+// // Specify a callback to get the result upon completion.
+// invoker_.AsyncInvoke<int>(
+// thread, Bind(&MyClass::AsyncTaskWithResult, this, x),
+// &MyClass::OnTaskComplete, this);
+// }
+// void FireAnotherAsyncTask(Thread* thread) {
+// // No callback specified means fire-and-forget.
+// invoker_.AsyncInvoke<void>(
+// thread, Bind(&MyClass::AnotherAsyncTask, this));
+//
+// private:
+// int AsyncTaskWithResult(int x) {
+// // Some long running process...
+// return x * x;
+// }
+// void AnotherAsyncTask() {
+// // Some other long running process...
+// }
+// void OnTaskComplete(int result) { result_ = result; }
+//
+// AsyncInvoker invoker_;
+// int result_;
+// };
+class AsyncInvoker : public MessageHandler {
+ public:
+ AsyncInvoker();
+ virtual ~AsyncInvoker();
+
+ // Call |functor| asynchronously on |thread|, with no callback upon
+ // completion. Returns immediately.
+ template <class ReturnT, class FunctorT>
+ void AsyncInvoke(Thread* thread,
+ const FunctorT& functor,
+ uint32 id = 0) {
+ AsyncClosure* closure =
+ new RefCountedObject<FireAndForgetAsyncClosure<FunctorT> >(functor);
+ DoInvoke(thread, closure, id);
+ }
+
+ // Call |functor| asynchronously on |thread|, calling |callback| when done.
+ template <class ReturnT, class FunctorT, class HostT>
+ void AsyncInvoke(Thread* thread,
+ const FunctorT& functor,
+ void (HostT::*callback)(ReturnT),
+ HostT* callback_host,
+ uint32 id = 0) {
+ AsyncClosure* closure =
+ new RefCountedObject<NotifyingAsyncClosure<ReturnT, FunctorT, HostT> >(
+ this, Thread::Current(), functor, callback, callback_host);
+ DoInvoke(thread, closure, id);
+ }
+
+ // Call |functor| asynchronously on |thread|, calling |callback| when done.
+ // Overloaded for void return.
+ template <class ReturnT, class FunctorT, class HostT>
+ void AsyncInvoke(Thread* thread,
+ const FunctorT& functor,
+ void (HostT::*callback)(),
+ HostT* callback_host,
+ uint32 id = 0) {
+ AsyncClosure* closure =
+ new RefCountedObject<NotifyingAsyncClosure<void, FunctorT, HostT> >(
+ this, Thread::Current(), functor, callback, callback_host);
+ DoInvoke(thread, closure, id);
+ }
+
+ // Synchronously execute on |thread| all outstanding calls we own
+ // that are pending on |thread|, and wait for calls to complete
+ // before returning. Optionally filter by message id.
+ // The destructor will not wait for outstanding calls, so if that
+ // behavior is desired, call Flush() before destroying this object.
+ void Flush(Thread* thread, uint32 id = MQID_ANY);
+
+ // Signaled when this object is destructed.
+ sigslot::signal0<> SignalInvokerDestroyed;
+
+ private:
+ virtual void OnMessage(Message* msg);
+ void DoInvoke(Thread* thread, AsyncClosure* closure, uint32 id);
+
+ bool destroying_;
+
+ DISALLOW_COPY_AND_ASSIGN(AsyncInvoker);
+};
+
+} // namespace rtc
+
+
+#endif // WEBRTC_BASE_ASYNCINVOKER_H_
diff --git a/chromium/third_party/webrtc/base/asyncpacketsocket.h b/chromium/third_party/webrtc/base/asyncpacketsocket.h
new file mode 100644
index 00000000000..dd91ea1f173
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncpacketsocket.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCPACKETSOCKET_H_
+#define WEBRTC_BASE_ASYNCPACKETSOCKET_H_
+
+#include "webrtc/base/dscp.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socket.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+// This structure holds the info needed to update the packet send time header
+// extension, including the information needed to update the authentication tag
+// after changing the value.
+struct PacketTimeUpdateParams {
+ PacketTimeUpdateParams()
+ : rtp_sendtime_extension_id(-1), srtp_auth_tag_len(-1),
+ srtp_packet_index(-1) {
+ }
+
+ int rtp_sendtime_extension_id; // extension header id present in packet.
+ std::vector<char> srtp_auth_key; // Authentication key.
+ int srtp_auth_tag_len; // Authentication tag length.
+ int64 srtp_packet_index; // Required for Rtp Packet authentication.
+};
+
+// This structure holds meta information for the packet which is about to send
+// over network.
+struct PacketOptions {
+ PacketOptions() : dscp(DSCP_NO_CHANGE) {}
+ explicit PacketOptions(DiffServCodePoint dscp) : dscp(dscp) {}
+
+ DiffServCodePoint dscp;
+ PacketTimeUpdateParams packet_time_params;
+};
+
+// This structure will have the information about when packet is actually
+// received by socket.
+struct PacketTime {
+ PacketTime() : timestamp(-1), not_before(-1) {}
+ PacketTime(int64 timestamp, int64 not_before)
+ : timestamp(timestamp), not_before(not_before) {
+ }
+
+ int64 timestamp; // Receive time after socket delivers the data.
+ int64 not_before; // Earliest possible time the data could have arrived,
+ // indicating the potential error in the |timestamp| value,
+ // in case the system, is busy. For example, the time of
+ // the last select() call.
+ // If unknown, this value will be set to zero.
+};
+
+inline PacketTime CreatePacketTime(int64 not_before) {
+ return PacketTime(TimeMicros(), not_before);
+}
+
+// Provides the ability to receive packets asynchronously. Sends are not
+// buffered since it is acceptable to drop packets under high load.
+class AsyncPacketSocket : public sigslot::has_slots<> {
+ public:
+ enum State {
+ STATE_CLOSED,
+ STATE_BINDING,
+ STATE_BOUND,
+ STATE_CONNECTING,
+ STATE_CONNECTED
+ };
+
+ AsyncPacketSocket() { }
+ virtual ~AsyncPacketSocket() { }
+
+ // Returns current local address. Address may be set to NULL if the
+ // socket is not bound yet (GetState() returns STATE_BINDING).
+ virtual SocketAddress GetLocalAddress() const = 0;
+
+ // Returns remote address. Returns zeroes if this is not a client TCP socket.
+ virtual SocketAddress GetRemoteAddress() const = 0;
+
+ // Send a packet.
+ virtual int Send(const void *pv, size_t cb, const PacketOptions& options) = 0;
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr,
+ const PacketOptions& options) = 0;
+
+ // Close the socket.
+ virtual int Close() = 0;
+
+ // Returns current state of the socket.
+ virtual State GetState() const = 0;
+
+ // Get/set options.
+ virtual int GetOption(Socket::Option opt, int* value) = 0;
+ virtual int SetOption(Socket::Option opt, int value) = 0;
+
+ // Get/Set current error.
+ // TODO: Remove SetError().
+ virtual int GetError() const = 0;
+ virtual void SetError(int error) = 0;
+
+ // Emitted each time a packet is read. Used only for UDP and
+ // connected TCP sockets.
+ sigslot::signal5<AsyncPacketSocket*, const char*, size_t,
+ const SocketAddress&,
+ const PacketTime&> SignalReadPacket;
+
+ // Emitted when the socket is currently able to send.
+ sigslot::signal1<AsyncPacketSocket*> SignalReadyToSend;
+
+ // Emitted after address for the socket is allocated, i.e. binding
+ // is finished. State of the socket is changed from BINDING to BOUND
+ // (for UDP and server TCP sockets) or CONNECTING (for client TCP
+ // sockets).
+ sigslot::signal2<AsyncPacketSocket*, const SocketAddress&> SignalAddressReady;
+
+ // Emitted for client TCP sockets when state is changed from
+ // CONNECTING to CONNECTED.
+ sigslot::signal1<AsyncPacketSocket*> SignalConnect;
+
+ // Emitted for client TCP sockets when state is changed from
+ // CONNECTED to CLOSED.
+ sigslot::signal2<AsyncPacketSocket*, int> SignalClose;
+
+ // Used only for listening TCP sockets.
+ sigslot::signal2<AsyncPacketSocket*, AsyncPacketSocket*> SignalNewConnection;
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncPacketSocket);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCPACKETSOCKET_H_
diff --git a/chromium/third_party/webrtc/base/asyncresolverinterface.h b/chromium/third_party/webrtc/base/asyncresolverinterface.h
new file mode 100644
index 00000000000..4b401bd975c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncresolverinterface.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCRESOLVERINTERFACE_H_
+#define WEBRTC_BASE_ASYNCRESOLVERINTERFACE_H_
+
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+
+// This interface defines the methods to resolve the address asynchronously.
+class AsyncResolverInterface {
+ public:
+ AsyncResolverInterface() {}
+ virtual ~AsyncResolverInterface() {}
+
+ // Start address resolve process.
+ virtual void Start(const SocketAddress& addr) = 0;
+ // Returns top most resolved address of |family|
+ virtual bool GetResolvedAddress(int family, SocketAddress* addr) const = 0;
+ // Returns error from resolver.
+ virtual int GetError() const = 0;
+ // Delete the resolver.
+ virtual void Destroy(bool wait) = 0;
+ // Returns top most resolved IPv4 address if address is resolved successfully.
+ // Otherwise returns address set in SetAddress.
+ SocketAddress address() const {
+ SocketAddress addr;
+ GetResolvedAddress(AF_INET, &addr);
+ return addr;
+ }
+
+ // This signal is fired when address resolve process is completed.
+ sigslot::signal1<AsyncResolverInterface*> SignalDone;
+};
+
+} // namespace rtc
+
+#endif
diff --git a/chromium/third_party/webrtc/base/asyncsocket.cc b/chromium/third_party/webrtc/base/asyncsocket.cc
new file mode 100644
index 00000000000..d565c6e991a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncsocket.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asyncsocket.h"
+
+namespace rtc {
+
+AsyncSocket::AsyncSocket() {
+}
+
+AsyncSocket::~AsyncSocket() {
+}
+
+AsyncSocketAdapter::AsyncSocketAdapter(AsyncSocket* socket) : socket_(NULL) {
+ Attach(socket);
+}
+
+AsyncSocketAdapter::~AsyncSocketAdapter() {
+ delete socket_;
+}
+
+void AsyncSocketAdapter::Attach(AsyncSocket* socket) {
+ ASSERT(!socket_);
+ socket_ = socket;
+ if (socket_) {
+ socket_->SignalConnectEvent.connect(this,
+ &AsyncSocketAdapter::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this,
+ &AsyncSocketAdapter::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this,
+ &AsyncSocketAdapter::OnWriteEvent);
+ socket_->SignalCloseEvent.connect(this,
+ &AsyncSocketAdapter::OnCloseEvent);
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncsocket.h b/chromium/third_party/webrtc/base/asyncsocket.h
new file mode 100644
index 00000000000..a6f3158e90f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncsocket.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCSOCKET_H_
+#define WEBRTC_BASE_ASYNCSOCKET_H_
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socket.h"
+
+namespace rtc {
+
+// TODO: Remove Socket and rename AsyncSocket to Socket.
+
+// Provides the ability to perform socket I/O asynchronously.
+class AsyncSocket : public Socket {
+ public:
+ AsyncSocket();
+ virtual ~AsyncSocket();
+
+ virtual AsyncSocket* Accept(SocketAddress* paddr) = 0;
+
+ // SignalReadEvent and SignalWriteEvent use multi_threaded_local to allow
+ // access concurrently from different thread.
+ // For example SignalReadEvent::connect will be called in AsyncUDPSocket ctor
+ // but at the same time the SocketDispatcher maybe signaling the read event.
+ // ready to read
+ sigslot::signal1<AsyncSocket*,
+ sigslot::multi_threaded_local> SignalReadEvent;
+ // ready to write
+ sigslot::signal1<AsyncSocket*,
+ sigslot::multi_threaded_local> SignalWriteEvent;
+ sigslot::signal1<AsyncSocket*> SignalConnectEvent; // connected
+ sigslot::signal2<AsyncSocket*, int> SignalCloseEvent; // closed
+};
+
+class AsyncSocketAdapter : public AsyncSocket, public sigslot::has_slots<> {
+ public:
+ // The adapted socket may explicitly be NULL, and later assigned using Attach.
+ // However, subclasses which support detached mode must override any methods
+ // that will be called during the detached period (usually GetState()), to
+ // avoid dereferencing a null pointer.
+ explicit AsyncSocketAdapter(AsyncSocket* socket);
+ virtual ~AsyncSocketAdapter();
+ void Attach(AsyncSocket* socket);
+ virtual SocketAddress GetLocalAddress() const {
+ return socket_->GetLocalAddress();
+ }
+ virtual SocketAddress GetRemoteAddress() const {
+ return socket_->GetRemoteAddress();
+ }
+ virtual int Bind(const SocketAddress& addr) {
+ return socket_->Bind(addr);
+ }
+ virtual int Connect(const SocketAddress& addr) {
+ return socket_->Connect(addr);
+ }
+ virtual int Send(const void* pv, size_t cb) {
+ return socket_->Send(pv, cb);
+ }
+ virtual int SendTo(const void* pv, size_t cb, const SocketAddress& addr) {
+ return socket_->SendTo(pv, cb, addr);
+ }
+ virtual int Recv(void* pv, size_t cb) {
+ return socket_->Recv(pv, cb);
+ }
+ virtual int RecvFrom(void* pv, size_t cb, SocketAddress* paddr) {
+ return socket_->RecvFrom(pv, cb, paddr);
+ }
+ virtual int Listen(int backlog) {
+ return socket_->Listen(backlog);
+ }
+ virtual AsyncSocket* Accept(SocketAddress* paddr) {
+ return socket_->Accept(paddr);
+ }
+ virtual int Close() {
+ return socket_->Close();
+ }
+ virtual int GetError() const {
+ return socket_->GetError();
+ }
+ virtual void SetError(int error) {
+ return socket_->SetError(error);
+ }
+ virtual ConnState GetState() const {
+ return socket_->GetState();
+ }
+ virtual int EstimateMTU(uint16* mtu) {
+ return socket_->EstimateMTU(mtu);
+ }
+ virtual int GetOption(Option opt, int* value) {
+ return socket_->GetOption(opt, value);
+ }
+ virtual int SetOption(Option opt, int value) {
+ return socket_->SetOption(opt, value);
+ }
+
+ protected:
+ virtual void OnConnectEvent(AsyncSocket* socket) {
+ SignalConnectEvent(this);
+ }
+ virtual void OnReadEvent(AsyncSocket* socket) {
+ SignalReadEvent(this);
+ }
+ virtual void OnWriteEvent(AsyncSocket* socket) {
+ SignalWriteEvent(this);
+ }
+ virtual void OnCloseEvent(AsyncSocket* socket, int err) {
+ SignalCloseEvent(this, err);
+ }
+
+ AsyncSocket* socket_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCSOCKET_H_
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket.cc b/chromium/third_party/webrtc/base/asynctcpsocket.cc
new file mode 100644
index 00000000000..0f7abd5a6cf
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynctcpsocket.cc
@@ -0,0 +1,299 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asynctcpsocket.h"
+
+#include <string.h>
+
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+
+#if defined(WEBRTC_POSIX)
+#include <errno.h>
+#endif // WEBRTC_POSIX
+
+namespace rtc {
+
+static const size_t kMaxPacketSize = 64 * 1024;
+
+typedef uint16 PacketLength;
+static const size_t kPacketLenSize = sizeof(PacketLength);
+
+static const size_t kBufSize = kMaxPacketSize + kPacketLenSize;
+
+static const int kListenBacklog = 5;
+
+// Binds and connects |socket|
+AsyncSocket* AsyncTCPSocketBase::ConnectSocket(
+ rtc::AsyncSocket* socket,
+ const rtc::SocketAddress& bind_address,
+ const rtc::SocketAddress& remote_address) {
+ rtc::scoped_ptr<rtc::AsyncSocket> owned_socket(socket);
+ if (socket->Bind(bind_address) < 0) {
+ LOG(LS_ERROR) << "Bind() failed with error " << socket->GetError();
+ return NULL;
+ }
+ if (socket->Connect(remote_address) < 0) {
+ LOG(LS_ERROR) << "Connect() failed with error " << socket->GetError();
+ return NULL;
+ }
+ return owned_socket.release();
+}
+
+AsyncTCPSocketBase::AsyncTCPSocketBase(AsyncSocket* socket, bool listen,
+ size_t max_packet_size)
+ : socket_(socket),
+ listen_(listen),
+ insize_(max_packet_size),
+ inpos_(0),
+ outsize_(max_packet_size),
+ outpos_(0) {
+ inbuf_ = new char[insize_];
+ outbuf_ = new char[outsize_];
+
+ ASSERT(socket_.get() != NULL);
+ socket_->SignalConnectEvent.connect(
+ this, &AsyncTCPSocketBase::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this, &AsyncTCPSocketBase::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this, &AsyncTCPSocketBase::OnWriteEvent);
+ socket_->SignalCloseEvent.connect(this, &AsyncTCPSocketBase::OnCloseEvent);
+
+ if (listen_) {
+ if (socket_->Listen(kListenBacklog) < 0) {
+ LOG(LS_ERROR) << "Listen() failed with error " << socket_->GetError();
+ }
+ }
+}
+
+AsyncTCPSocketBase::~AsyncTCPSocketBase() {
+ delete [] inbuf_;
+ delete [] outbuf_;
+}
+
+SocketAddress AsyncTCPSocketBase::GetLocalAddress() const {
+ return socket_->GetLocalAddress();
+}
+
+SocketAddress AsyncTCPSocketBase::GetRemoteAddress() const {
+ return socket_->GetRemoteAddress();
+}
+
+int AsyncTCPSocketBase::Close() {
+ return socket_->Close();
+}
+
+AsyncTCPSocket::State AsyncTCPSocketBase::GetState() const {
+ switch (socket_->GetState()) {
+ case Socket::CS_CLOSED:
+ return STATE_CLOSED;
+ case Socket::CS_CONNECTING:
+ if (listen_) {
+ return STATE_BOUND;
+ } else {
+ return STATE_CONNECTING;
+ }
+ case Socket::CS_CONNECTED:
+ return STATE_CONNECTED;
+ default:
+ ASSERT(false);
+ return STATE_CLOSED;
+ }
+}
+
+int AsyncTCPSocketBase::GetOption(Socket::Option opt, int* value) {
+ return socket_->GetOption(opt, value);
+}
+
+int AsyncTCPSocketBase::SetOption(Socket::Option opt, int value) {
+ return socket_->SetOption(opt, value);
+}
+
+int AsyncTCPSocketBase::GetError() const {
+ return socket_->GetError();
+}
+
+void AsyncTCPSocketBase::SetError(int error) {
+ return socket_->SetError(error);
+}
+
+int AsyncTCPSocketBase::SendTo(const void *pv, size_t cb,
+ const SocketAddress& addr,
+ const rtc::PacketOptions& options) {
+ if (addr == GetRemoteAddress())
+ return Send(pv, cb, options);
+
+ ASSERT(false);
+ socket_->SetError(ENOTCONN);
+ return -1;
+}
+
+int AsyncTCPSocketBase::SendRaw(const void * pv, size_t cb) {
+ if (outpos_ + cb > outsize_) {
+ socket_->SetError(EMSGSIZE);
+ return -1;
+ }
+
+ memcpy(outbuf_ + outpos_, pv, cb);
+ outpos_ += cb;
+
+ return FlushOutBuffer();
+}
+
+int AsyncTCPSocketBase::FlushOutBuffer() {
+ int res = socket_->Send(outbuf_, outpos_);
+ if (res <= 0) {
+ return res;
+ }
+ if (static_cast<size_t>(res) <= outpos_) {
+ outpos_ -= res;
+ } else {
+ ASSERT(false);
+ return -1;
+ }
+ if (outpos_ > 0) {
+ memmove(outbuf_, outbuf_ + res, outpos_);
+ }
+ return res;
+}
+
+void AsyncTCPSocketBase::AppendToOutBuffer(const void* pv, size_t cb) {
+ ASSERT(outpos_ + cb < outsize_);
+ memcpy(outbuf_ + outpos_, pv, cb);
+ outpos_ += cb;
+}
+
+void AsyncTCPSocketBase::OnConnectEvent(AsyncSocket* socket) {
+ SignalConnect(this);
+}
+
+void AsyncTCPSocketBase::OnReadEvent(AsyncSocket* socket) {
+ ASSERT(socket_.get() == socket);
+
+ if (listen_) {
+ rtc::SocketAddress address;
+ rtc::AsyncSocket* new_socket = socket->Accept(&address);
+ if (!new_socket) {
+ // TODO: Do something better like forwarding the error
+ // to the user.
+ LOG(LS_ERROR) << "TCP accept failed with error " << socket_->GetError();
+ return;
+ }
+
+ HandleIncomingConnection(new_socket);
+
+ // Prime a read event in case data is waiting.
+ new_socket->SignalReadEvent(new_socket);
+ } else {
+ int len = socket_->Recv(inbuf_ + inpos_, insize_ - inpos_);
+ if (len < 0) {
+ // TODO: Do something better like forwarding the error to the user.
+ if (!socket_->IsBlocking()) {
+ LOG(LS_ERROR) << "Recv() returned error: " << socket_->GetError();
+ }
+ return;
+ }
+
+ inpos_ += len;
+
+ ProcessInput(inbuf_, &inpos_);
+
+ if (inpos_ >= insize_) {
+ LOG(LS_ERROR) << "input buffer overflow";
+ ASSERT(false);
+ inpos_ = 0;
+ }
+ }
+}
+
+void AsyncTCPSocketBase::OnWriteEvent(AsyncSocket* socket) {
+ ASSERT(socket_.get() == socket);
+
+ if (outpos_ > 0) {
+ FlushOutBuffer();
+ }
+
+ if (outpos_ == 0) {
+ SignalReadyToSend(this);
+ }
+}
+
+void AsyncTCPSocketBase::OnCloseEvent(AsyncSocket* socket, int error) {
+ SignalClose(this, error);
+}
+
+// AsyncTCPSocket
+// Binds and connects |socket| and creates AsyncTCPSocket for
+// it. Takes ownership of |socket|. Returns NULL if bind() or
+// connect() fail (|socket| is destroyed in that case).
+AsyncTCPSocket* AsyncTCPSocket::Create(
+ AsyncSocket* socket,
+ const SocketAddress& bind_address,
+ const SocketAddress& remote_address) {
+ return new AsyncTCPSocket(AsyncTCPSocketBase::ConnectSocket(
+ socket, bind_address, remote_address), false);
+}
+
+AsyncTCPSocket::AsyncTCPSocket(AsyncSocket* socket, bool listen)
+ : AsyncTCPSocketBase(socket, listen, kBufSize) {
+}
+
+int AsyncTCPSocket::Send(const void *pv, size_t cb,
+ const rtc::PacketOptions& options) {
+ if (cb > kBufSize) {
+ SetError(EMSGSIZE);
+ return -1;
+ }
+
+ // If we are blocking on send, then silently drop this packet
+ if (!IsOutBufferEmpty())
+ return static_cast<int>(cb);
+
+ PacketLength pkt_len = HostToNetwork16(static_cast<PacketLength>(cb));
+ AppendToOutBuffer(&pkt_len, kPacketLenSize);
+ AppendToOutBuffer(pv, cb);
+
+ int res = FlushOutBuffer();
+ if (res <= 0) {
+ // drop packet if we made no progress
+ ClearOutBuffer();
+ return res;
+ }
+
+ // We claim to have sent the whole thing, even if we only sent partial
+ return static_cast<int>(cb);
+}
+
+void AsyncTCPSocket::ProcessInput(char * data, size_t* len) {
+ SocketAddress remote_addr(GetRemoteAddress());
+
+ while (true) {
+ if (*len < kPacketLenSize)
+ return;
+
+ PacketLength pkt_len = rtc::GetBE16(data);
+ if (*len < kPacketLenSize + pkt_len)
+ return;
+
+ SignalReadPacket(this, data + kPacketLenSize, pkt_len, remote_addr,
+ CreatePacketTime(0));
+
+ *len -= kPacketLenSize + pkt_len;
+ if (*len > 0) {
+ memmove(data, data + kPacketLenSize + pkt_len, *len);
+ }
+ }
+}
+
+void AsyncTCPSocket::HandleIncomingConnection(AsyncSocket* socket) {
+ SignalNewConnection(this, new AsyncTCPSocket(socket, false));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket.h b/chromium/third_party/webrtc/base/asynctcpsocket.h
new file mode 100644
index 00000000000..ddee2615ab5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynctcpsocket.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCTCPSOCKET_H_
+#define WEBRTC_BASE_ASYNCTCPSOCKET_H_
+
+#include "webrtc/base/asyncpacketsocket.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socketfactory.h"
+
+namespace rtc {
+
+// Simulates UDP semantics over TCP. Send and Recv packet sizes
+// are preserved, and drops packets silently on Send, rather than
+// buffer them in user space.
+class AsyncTCPSocketBase : public AsyncPacketSocket {
+ public:
+ AsyncTCPSocketBase(AsyncSocket* socket, bool listen, size_t max_packet_size);
+ virtual ~AsyncTCPSocketBase();
+
+ // Pure virtual methods to send and recv data.
+ virtual int Send(const void *pv, size_t cb,
+ const rtc::PacketOptions& options) = 0;
+ virtual void ProcessInput(char* data, size_t* len) = 0;
+ // Signals incoming connection.
+ virtual void HandleIncomingConnection(AsyncSocket* socket) = 0;
+
+ virtual SocketAddress GetLocalAddress() const;
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr,
+ const rtc::PacketOptions& options);
+ virtual int Close();
+
+ virtual State GetState() const;
+ virtual int GetOption(Socket::Option opt, int* value);
+ virtual int SetOption(Socket::Option opt, int value);
+ virtual int GetError() const;
+ virtual void SetError(int error);
+
+ protected:
+ // Binds and connects |socket| and creates AsyncTCPSocket for
+ // it. Takes ownership of |socket|. Returns NULL if bind() or
+ // connect() fail (|socket| is destroyed in that case).
+ static AsyncSocket* ConnectSocket(AsyncSocket* socket,
+ const SocketAddress& bind_address,
+ const SocketAddress& remote_address);
+ virtual int SendRaw(const void* pv, size_t cb);
+ int FlushOutBuffer();
+ // Add data to |outbuf_|.
+ void AppendToOutBuffer(const void* pv, size_t cb);
+
+ // Helper methods for |outpos_|.
+ bool IsOutBufferEmpty() const { return outpos_ == 0; }
+ void ClearOutBuffer() { outpos_ = 0; }
+
+ private:
+ // Called by the underlying socket
+ void OnConnectEvent(AsyncSocket* socket);
+ void OnReadEvent(AsyncSocket* socket);
+ void OnWriteEvent(AsyncSocket* socket);
+ void OnCloseEvent(AsyncSocket* socket, int error);
+
+ scoped_ptr<AsyncSocket> socket_;
+ bool listen_;
+ char* inbuf_, * outbuf_;
+ size_t insize_, inpos_, outsize_, outpos_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncTCPSocketBase);
+};
+
+class AsyncTCPSocket : public AsyncTCPSocketBase {
+ public:
+ // Binds and connects |socket| and creates AsyncTCPSocket for
+ // it. Takes ownership of |socket|. Returns NULL if bind() or
+ // connect() fail (|socket| is destroyed in that case).
+ static AsyncTCPSocket* Create(AsyncSocket* socket,
+ const SocketAddress& bind_address,
+ const SocketAddress& remote_address);
+ AsyncTCPSocket(AsyncSocket* socket, bool listen);
+ virtual ~AsyncTCPSocket() {}
+
+ virtual int Send(const void* pv, size_t cb,
+ const rtc::PacketOptions& options);
+ virtual void ProcessInput(char* data, size_t* len);
+ virtual void HandleIncomingConnection(AsyncSocket* socket);
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncTCPSocket);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCTCPSOCKET_H_
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc b/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc
new file mode 100644
index 00000000000..b9317586406
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/asynctcpsocket.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+namespace rtc {
+
+class AsyncTCPSocketTest
+ : public testing::Test,
+ public sigslot::has_slots<> {
+ public:
+ AsyncTCPSocketTest()
+ : pss_(new rtc::PhysicalSocketServer),
+ vss_(new rtc::VirtualSocketServer(pss_.get())),
+ socket_(vss_->CreateAsyncSocket(SOCK_STREAM)),
+ tcp_socket_(new AsyncTCPSocket(socket_, true)),
+ ready_to_send_(false) {
+ tcp_socket_->SignalReadyToSend.connect(this,
+ &AsyncTCPSocketTest::OnReadyToSend);
+ }
+
+ void OnReadyToSend(rtc::AsyncPacketSocket* socket) {
+ ready_to_send_ = true;
+ }
+
+ protected:
+ scoped_ptr<PhysicalSocketServer> pss_;
+ scoped_ptr<VirtualSocketServer> vss_;
+ AsyncSocket* socket_;
+ scoped_ptr<AsyncTCPSocket> tcp_socket_;
+ bool ready_to_send_;
+};
+
+TEST_F(AsyncTCPSocketTest, OnWriteEvent) {
+ EXPECT_FALSE(ready_to_send_);
+ socket_->SignalWriteEvent(socket_);
+ EXPECT_TRUE(ready_to_send_);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket.cc b/chromium/third_party/webrtc/base/asyncudpsocket.cc
new file mode 100644
index 00000000000..3e2ecc4cda4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncudpsocket.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+static const int BUF_SIZE = 64 * 1024;
+
+AsyncUDPSocket* AsyncUDPSocket::Create(
+ AsyncSocket* socket,
+ const SocketAddress& bind_address) {
+ scoped_ptr<AsyncSocket> owned_socket(socket);
+ if (socket->Bind(bind_address) < 0) {
+ LOG(LS_ERROR) << "Bind() failed with error " << socket->GetError();
+ return NULL;
+ }
+ return new AsyncUDPSocket(owned_socket.release());
+}
+
+AsyncUDPSocket* AsyncUDPSocket::Create(SocketFactory* factory,
+ const SocketAddress& bind_address) {
+ AsyncSocket* socket =
+ factory->CreateAsyncSocket(bind_address.family(), SOCK_DGRAM);
+ if (!socket)
+ return NULL;
+ return Create(socket, bind_address);
+}
+
+AsyncUDPSocket::AsyncUDPSocket(AsyncSocket* socket)
+ : socket_(socket) {
+ ASSERT(socket_);
+ size_ = BUF_SIZE;
+ buf_ = new char[size_];
+
+ // The socket should start out readable but not writable.
+ socket_->SignalReadEvent.connect(this, &AsyncUDPSocket::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this, &AsyncUDPSocket::OnWriteEvent);
+}
+
+AsyncUDPSocket::~AsyncUDPSocket() {
+ delete [] buf_;
+}
+
+SocketAddress AsyncUDPSocket::GetLocalAddress() const {
+ return socket_->GetLocalAddress();
+}
+
+SocketAddress AsyncUDPSocket::GetRemoteAddress() const {
+ return socket_->GetRemoteAddress();
+}
+
+int AsyncUDPSocket::Send(const void *pv, size_t cb,
+ const rtc::PacketOptions& options) {
+ return socket_->Send(pv, cb);
+}
+
+int AsyncUDPSocket::SendTo(const void *pv, size_t cb,
+ const SocketAddress& addr,
+ const rtc::PacketOptions& options) {
+ return socket_->SendTo(pv, cb, addr);
+}
+
+int AsyncUDPSocket::Close() {
+ return socket_->Close();
+}
+
+AsyncUDPSocket::State AsyncUDPSocket::GetState() const {
+ return STATE_BOUND;
+}
+
+int AsyncUDPSocket::GetOption(Socket::Option opt, int* value) {
+ return socket_->GetOption(opt, value);
+}
+
+int AsyncUDPSocket::SetOption(Socket::Option opt, int value) {
+ return socket_->SetOption(opt, value);
+}
+
+int AsyncUDPSocket::GetError() const {
+ return socket_->GetError();
+}
+
+void AsyncUDPSocket::SetError(int error) {
+ return socket_->SetError(error);
+}
+
+void AsyncUDPSocket::OnReadEvent(AsyncSocket* socket) {
+ ASSERT(socket_.get() == socket);
+
+ SocketAddress remote_addr;
+ int len = socket_->RecvFrom(buf_, size_, &remote_addr);
+ if (len < 0) {
+ // An error here typically means we got an ICMP error in response to our
+ // send datagram, indicating the remote address was unreachable.
+ // When doing ICE, this kind of thing will often happen.
+ // TODO: Do something better like forwarding the error to the user.
+ SocketAddress local_addr = socket_->GetLocalAddress();
+ LOG(LS_INFO) << "AsyncUDPSocket[" << local_addr.ToSensitiveString() << "] "
+ << "receive failed with error " << socket_->GetError();
+ return;
+ }
+
+ // TODO: Make sure that we got all of the packet.
+ // If we did not, then we should resize our buffer to be large enough.
+ SignalReadPacket(this, buf_, static_cast<size_t>(len), remote_addr,
+ CreatePacketTime(0));
+}
+
+void AsyncUDPSocket::OnWriteEvent(AsyncSocket* socket) {
+ SignalReadyToSend(this);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket.h b/chromium/third_party/webrtc/base/asyncudpsocket.h
new file mode 100644
index 00000000000..ac64dca681b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncudpsocket.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ASYNCUDPSOCKET_H_
+#define WEBRTC_BASE_ASYNCUDPSOCKET_H_
+
+#include "webrtc/base/asyncpacketsocket.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socketfactory.h"
+
+namespace rtc {
+
+// Provides the ability to receive packets asynchronously. Sends are not
+// buffered since it is acceptable to drop packets under high load.
+class AsyncUDPSocket : public AsyncPacketSocket {
+ public:
+ // Binds |socket| and creates AsyncUDPSocket for it. Takes ownership
+ // of |socket|. Returns NULL if bind() fails (|socket| is destroyed
+ // in that case).
+ static AsyncUDPSocket* Create(AsyncSocket* socket,
+ const SocketAddress& bind_address);
+ // Creates a new socket for sending asynchronous UDP packets using an
+ // asynchronous socket from the given factory.
+ static AsyncUDPSocket* Create(SocketFactory* factory,
+ const SocketAddress& bind_address);
+ explicit AsyncUDPSocket(AsyncSocket* socket);
+ virtual ~AsyncUDPSocket();
+
+ virtual SocketAddress GetLocalAddress() const;
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int Send(const void *pv, size_t cb,
+ const rtc::PacketOptions& options);
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr,
+ const rtc::PacketOptions& options);
+ virtual int Close();
+
+ virtual State GetState() const;
+ virtual int GetOption(Socket::Option opt, int* value);
+ virtual int SetOption(Socket::Option opt, int value);
+ virtual int GetError() const;
+ virtual void SetError(int error);
+
+ private:
+ // Called when the underlying socket is ready to be read from.
+ void OnReadEvent(AsyncSocket* socket);
+ // Called when the underlying socket is ready to send.
+ void OnWriteEvent(AsyncSocket* socket);
+
+ scoped_ptr<AsyncSocket> socket_;
+ char* buf_;
+ size_t size_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ASYNCUDPSOCKET_H_
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc b/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc
new file mode 100644
index 00000000000..bd65940fcb8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+namespace rtc {
+
+class AsyncUdpSocketTest
+ : public testing::Test,
+ public sigslot::has_slots<> {
+ public:
+ AsyncUdpSocketTest()
+ : pss_(new rtc::PhysicalSocketServer),
+ vss_(new rtc::VirtualSocketServer(pss_.get())),
+ socket_(vss_->CreateAsyncSocket(SOCK_DGRAM)),
+ udp_socket_(new AsyncUDPSocket(socket_)),
+ ready_to_send_(false) {
+ udp_socket_->SignalReadyToSend.connect(this,
+ &AsyncUdpSocketTest::OnReadyToSend);
+ }
+
+ void OnReadyToSend(rtc::AsyncPacketSocket* socket) {
+ ready_to_send_ = true;
+ }
+
+ protected:
+ scoped_ptr<PhysicalSocketServer> pss_;
+ scoped_ptr<VirtualSocketServer> vss_;
+ AsyncSocket* socket_;
+ scoped_ptr<AsyncUDPSocket> udp_socket_;
+ bool ready_to_send_;
+};
+
+TEST_F(AsyncUdpSocketTest, OnWriteEvent) {
+ EXPECT_FALSE(ready_to_send_);
+ socket_->SignalWriteEvent(socket_);
+ EXPECT_TRUE(ready_to_send_);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/atomicops.h b/chromium/third_party/webrtc/base/atomicops.h
new file mode 100644
index 00000000000..6096e8c0844
--- /dev/null
+++ b/chromium/third_party/webrtc/base/atomicops.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ATOMICOPS_H_
+#define WEBRTC_BASE_ATOMICOPS_H_
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+// A single-producer, single-consumer, fixed-size queue.
+// All methods not ending in Unsafe can be safely called without locking,
+// provided that calls to consumer methods (Peek/Pop) or producer methods (Push)
+// only happen on a single thread per method type. If multiple threads need to
+// read simultaneously or write simultaneously, other synchronization is
+// necessary. Synchronization is also required if a call into any Unsafe method
+// could happen at the same time as a call to any other method.
+template <typename T>
+class FixedSizeLockFreeQueue {
+ private:
+// Atomic primitives and memory barrier
+#if defined(__arm__)
+ typedef uint32 Atomic32;
+
+ // Copied from google3/base/atomicops-internals-arm-v6plus.h
+ static inline void MemoryBarrier() {
+ asm volatile("dmb":::"memory");
+ }
+
+ // Adapted from google3/base/atomicops-internals-arm-v6plus.h
+ static inline void AtomicIncrement(volatile Atomic32* ptr) {
+ Atomic32 str_success, value;
+ asm volatile (
+ "1:\n"
+ "ldrex %1, [%2]\n"
+ "add %1, %1, #1\n"
+ "strex %0, %1, [%2]\n"
+ "teq %0, #0\n"
+ "bne 1b"
+ : "=&r"(str_success), "=&r"(value)
+ : "r" (ptr)
+ : "cc", "memory");
+ }
+#elif !defined(SKIP_ATOMIC_CHECK)
+#error "No atomic operations defined for the given architecture."
+#endif
+
+ public:
+ // Constructs an empty queue, with capacity 0.
+ FixedSizeLockFreeQueue() : pushed_count_(0),
+ popped_count_(0),
+ capacity_(0),
+ data_() {}
+ // Constructs an empty queue with the given capacity.
+ FixedSizeLockFreeQueue(size_t capacity) : pushed_count_(0),
+ popped_count_(0),
+ capacity_(capacity),
+ data_(new T[capacity]) {}
+
+ // Pushes a value onto the queue. Returns true if the value was successfully
+ // pushed (there was space in the queue). This method can be safely called at
+ // the same time as PeekFront/PopFront.
+ bool PushBack(T value) {
+ if (capacity_ == 0) {
+ LOG(LS_WARNING) << "Queue capacity is 0.";
+ return false;
+ }
+ if (IsFull()) {
+ return false;
+ }
+
+ data_[pushed_count_ % capacity_] = value;
+ // Make sure the data is written before the count is incremented, so other
+ // threads can't see the value exists before being able to read it.
+ MemoryBarrier();
+ AtomicIncrement(&pushed_count_);
+ return true;
+ }
+
+ // Retrieves the oldest value pushed onto the queue. Returns true if there was
+ // an item to peek (the queue was non-empty). This method can be safely called
+ // at the same time as PushBack.
+ bool PeekFront(T* value_out) {
+ if (capacity_ == 0) {
+ LOG(LS_WARNING) << "Queue capacity is 0.";
+ return false;
+ }
+ if (IsEmpty()) {
+ return false;
+ }
+
+ *value_out = data_[popped_count_ % capacity_];
+ return true;
+ }
+
+ // Retrieves the oldest value pushed onto the queue and removes it from the
+ // queue. Returns true if there was an item to pop (the queue was non-empty).
+ // This method can be safely called at the same time as PushBack.
+ bool PopFront(T* value_out) {
+ if (PeekFront(value_out)) {
+ AtomicIncrement(&popped_count_);
+ return true;
+ }
+ return false;
+ }
+
+ // Clears the current items in the queue and sets the new (fixed) size. This
+ // method cannot be called at the same time as any other method.
+ void ClearAndResizeUnsafe(int new_capacity) {
+ capacity_ = new_capacity;
+ data_.reset(new T[new_capacity]);
+ pushed_count_ = 0;
+ popped_count_ = 0;
+ }
+
+ // Returns true if there is no space left in the queue for new elements.
+ int IsFull() const { return pushed_count_ == popped_count_ + capacity_; }
+ // Returns true if there are no elements in the queue.
+ int IsEmpty() const { return pushed_count_ == popped_count_; }
+ // Returns the current number of elements in the queue. This is always in the
+ // range [0, capacity]
+ size_t Size() const { return pushed_count_ - popped_count_; }
+
+ // Returns the capacity of the queue (max size).
+ size_t capacity() const { return capacity_; }
+
+ private:
+ volatile Atomic32 pushed_count_;
+ volatile Atomic32 popped_count_;
+ size_t capacity_;
+ rtc::scoped_ptr<T[]> data_;
+ DISALLOW_COPY_AND_ASSIGN(FixedSizeLockFreeQueue);
+};
+
+}
+
+#endif // WEBRTC_BASE_ATOMICOPS_H_
diff --git a/chromium/third_party/webrtc/base/atomicops_unittest.cc b/chromium/third_party/webrtc/base/atomicops_unittest.cc
new file mode 100644
index 00000000000..5152c4de6c6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/atomicops_unittest.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__arm__)
+// For testing purposes, define faked versions of the atomic operations
+#include "webrtc/base/basictypes.h"
+namespace rtc {
+typedef uint32 Atomic32;
+static inline void MemoryBarrier() { }
+static inline void AtomicIncrement(volatile Atomic32* ptr) {
+ *ptr = *ptr + 1;
+}
+}
+#define SKIP_ATOMIC_CHECK
+#endif
+
+#include "webrtc/base/atomicops.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/logging.h"
+
+TEST(FixedSizeLockFreeQueueTest, TestDefaultConstruct) {
+ rtc::FixedSizeLockFreeQueue<int> queue;
+ EXPECT_EQ(0u, queue.capacity());
+ EXPECT_EQ(0u, queue.Size());
+ EXPECT_FALSE(queue.PushBack(1));
+ int val;
+ EXPECT_FALSE(queue.PopFront(&val));
+}
+
+TEST(FixedSizeLockFreeQueueTest, TestConstruct) {
+ rtc::FixedSizeLockFreeQueue<int> queue(5);
+ EXPECT_EQ(5u, queue.capacity());
+ EXPECT_EQ(0u, queue.Size());
+ int val;
+ EXPECT_FALSE(queue.PopFront(&val));
+}
+
+TEST(FixedSizeLockFreeQueueTest, TestPushPop) {
+ rtc::FixedSizeLockFreeQueue<int> queue(2);
+ EXPECT_EQ(2u, queue.capacity());
+ EXPECT_EQ(0u, queue.Size());
+ EXPECT_TRUE(queue.PushBack(1));
+ EXPECT_EQ(1u, queue.Size());
+ EXPECT_TRUE(queue.PushBack(2));
+ EXPECT_EQ(2u, queue.Size());
+ EXPECT_FALSE(queue.PushBack(3));
+ EXPECT_EQ(2u, queue.Size());
+ int val;
+ EXPECT_TRUE(queue.PopFront(&val));
+ EXPECT_EQ(1, val);
+ EXPECT_EQ(1u, queue.Size());
+ EXPECT_TRUE(queue.PopFront(&val));
+ EXPECT_EQ(2, val);
+ EXPECT_EQ(0u, queue.Size());
+ EXPECT_FALSE(queue.PopFront(&val));
+ EXPECT_EQ(0u, queue.Size());
+}
+
+TEST(FixedSizeLockFreeQueueTest, TestResize) {
+ rtc::FixedSizeLockFreeQueue<int> queue(2);
+ EXPECT_EQ(2u, queue.capacity());
+ EXPECT_EQ(0u, queue.Size());
+ EXPECT_TRUE(queue.PushBack(1));
+ EXPECT_EQ(1u, queue.Size());
+
+ queue.ClearAndResizeUnsafe(5);
+ EXPECT_EQ(5u, queue.capacity());
+ EXPECT_EQ(0u, queue.Size());
+ int val;
+ EXPECT_FALSE(queue.PopFront(&val));
+}
diff --git a/chromium/third_party/webrtc/base/autodetectproxy.cc b/chromium/third_party/webrtc/base/autodetectproxy.cc
new file mode 100644
index 00000000000..bc54b96383a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/autodetectproxy.cc
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/autodetectproxy.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/httpcommon-inl.h"
+#include "webrtc/base/nethelpers.h"
+
+namespace rtc {
+
+static const ProxyType TEST_ORDER[] = {
+ PROXY_HTTPS, PROXY_SOCKS5, PROXY_UNKNOWN
+};
+
+static const int kSavedStringLimit = 128;
+
+static void SaveStringToStack(char *dst,
+ const std::string &src,
+ size_t dst_size) {
+ strncpy(dst, src.c_str(), dst_size - 1);
+ dst[dst_size - 1] = '\0';
+}
+
+AutoDetectProxy::AutoDetectProxy(const std::string& user_agent)
+ : agent_(user_agent), resolver_(NULL), socket_(NULL), next_(0) {
+}
+
+AutoDetectProxy::~AutoDetectProxy() {
+ if (resolver_) {
+ resolver_->Destroy(false);
+ }
+}
+
+void AutoDetectProxy::DoWork() {
+ // TODO: Try connecting to server_url without proxy first here?
+ if (!server_url_.empty()) {
+ LOG(LS_INFO) << "GetProxySettingsForUrl(" << server_url_ << ") - start";
+ GetProxyForUrl(agent_.c_str(), server_url_.c_str(), &proxy_);
+ LOG(LS_INFO) << "GetProxySettingsForUrl - stop";
+ }
+ Url<char> url(proxy_.address.HostAsURIString());
+ if (url.valid()) {
+ LOG(LS_WARNING) << "AutoDetectProxy removing http prefix on proxy host";
+ proxy_.address.SetIP(url.host());
+ }
+ LOG(LS_INFO) << "AutoDetectProxy found proxy at " << proxy_.address;
+ if (proxy_.type == PROXY_UNKNOWN) {
+ LOG(LS_INFO) << "AutoDetectProxy initiating proxy classification";
+ Next();
+ // Process I/O until Stop()
+ Thread::Current()->ProcessMessages(kForever);
+ // Clean up the autodetect socket, from the thread that created it
+ delete socket_;
+ }
+ // TODO: If we found a proxy, try to use it to verify that it
+ // works by sending a request to server_url. This could either be
+ // done here or by the HttpPortAllocator.
+}
+
+void AutoDetectProxy::OnMessage(Message *msg) {
+ if (MSG_UNRESOLVABLE == msg->message_id) {
+ // If we can't resolve the proxy, skip straight to failure.
+ Complete(PROXY_UNKNOWN);
+ } else if (MSG_TIMEOUT == msg->message_id) {
+ OnCloseEvent(socket_, ETIMEDOUT);
+ } else {
+ // This must be the ST_MSG_WORKER_DONE message that deletes the
+ // AutoDetectProxy object. We have observed crashes within this stack that
+ // seem to be highly reproducible for a small subset of users and thus are
+ // probably correlated with a specific proxy setting, so copy potentially
+ // relevant information onto the stack to make it available in Windows
+ // minidumps.
+
+ // Save the user agent and the number of auto-detection passes that we
+ // needed.
+ char agent[kSavedStringLimit];
+ SaveStringToStack(agent, agent_, sizeof agent);
+
+ int next = next_;
+
+ // Now the detected proxy config (minus the password field, which could be
+ // sensitive).
+ ProxyType type = proxy().type;
+
+ char address_hostname[kSavedStringLimit];
+ SaveStringToStack(address_hostname,
+ proxy().address.hostname(),
+ sizeof address_hostname);
+
+ IPAddress address_ip = proxy().address.ipaddr();
+
+ uint16 address_port = proxy().address.port();
+
+ char autoconfig_url[kSavedStringLimit];
+ SaveStringToStack(autoconfig_url,
+ proxy().autoconfig_url,
+ sizeof autoconfig_url);
+
+ bool autodetect = proxy().autodetect;
+
+ char bypass_list[kSavedStringLimit];
+ SaveStringToStack(bypass_list, proxy().bypass_list, sizeof bypass_list);
+
+ char username[kSavedStringLimit];
+ SaveStringToStack(username, proxy().username, sizeof username);
+
+ SignalThread::OnMessage(msg);
+
+ // Log the gathered data at a log level that will never actually be enabled
+ // so that the compiler is forced to retain the data on the stack.
+ LOG(LS_SENSITIVE) << agent << " " << next << " " << type << " "
+ << address_hostname << " " << address_ip << " "
+ << address_port << " " << autoconfig_url << " "
+ << autodetect << " " << bypass_list << " " << username;
+ }
+}
+
+void AutoDetectProxy::OnResolveResult(AsyncResolverInterface* resolver) {
+ if (resolver != resolver_) {
+ return;
+ }
+ int error = resolver_->GetError();
+ if (error == 0) {
+ LOG(LS_VERBOSE) << "Resolved " << proxy_.address << " to "
+ << resolver_->address();
+ proxy_.address = resolver_->address();
+ if (!DoConnect()) {
+ Thread::Current()->Post(this, MSG_TIMEOUT);
+ }
+ } else {
+ LOG(LS_INFO) << "Failed to resolve " << resolver_->address();
+ resolver_->Destroy(false);
+ resolver_ = NULL;
+ proxy_.address = SocketAddress();
+ Thread::Current()->Post(this, MSG_UNRESOLVABLE);
+ }
+}
+
+void AutoDetectProxy::Next() {
+ if (TEST_ORDER[next_] >= PROXY_UNKNOWN) {
+ Complete(PROXY_UNKNOWN);
+ return;
+ }
+
+ LOG(LS_VERBOSE) << "AutoDetectProxy connecting to "
+ << proxy_.address.ToSensitiveString();
+
+ if (socket_) {
+ Thread::Current()->Clear(this, MSG_TIMEOUT);
+ Thread::Current()->Clear(this, MSG_UNRESOLVABLE);
+ socket_->Close();
+ Thread::Current()->Dispose(socket_);
+ socket_ = NULL;
+ }
+ int timeout = 2000;
+ if (proxy_.address.IsUnresolvedIP()) {
+ // Launch an asyncresolver. This thread will spin waiting for it.
+ timeout += 2000;
+ if (!resolver_) {
+ resolver_ = new AsyncResolver();
+ }
+ resolver_->SignalDone.connect(this, &AutoDetectProxy::OnResolveResult);
+ resolver_->Start(proxy_.address);
+ } else {
+ if (!DoConnect()) {
+ Thread::Current()->Post(this, MSG_TIMEOUT);
+ return;
+ }
+ }
+ Thread::Current()->PostDelayed(timeout, this, MSG_TIMEOUT);
+}
+
+bool AutoDetectProxy::DoConnect() {
+ if (resolver_) {
+ resolver_->Destroy(false);
+ resolver_ = NULL;
+ }
+ socket_ =
+ Thread::Current()->socketserver()->CreateAsyncSocket(
+ proxy_.address.family(), SOCK_STREAM);
+ if (!socket_) {
+ LOG(LS_VERBOSE) << "Unable to create socket for " << proxy_.address;
+ return false;
+ }
+ socket_->SignalConnectEvent.connect(this, &AutoDetectProxy::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this, &AutoDetectProxy::OnReadEvent);
+ socket_->SignalCloseEvent.connect(this, &AutoDetectProxy::OnCloseEvent);
+ socket_->Connect(proxy_.address);
+ return true;
+}
+
+void AutoDetectProxy::Complete(ProxyType type) {
+ Thread::Current()->Clear(this, MSG_TIMEOUT);
+ Thread::Current()->Clear(this, MSG_UNRESOLVABLE);
+ if (socket_) {
+ socket_->Close();
+ }
+
+ proxy_.type = type;
+ LoggingSeverity sev = (proxy_.type == PROXY_UNKNOWN) ? LS_ERROR : LS_INFO;
+ LOG_V(sev) << "AutoDetectProxy detected "
+ << proxy_.address.ToSensitiveString()
+ << " as type " << proxy_.type;
+
+ Thread::Current()->Quit();
+}
+
+void AutoDetectProxy::OnConnectEvent(AsyncSocket * socket) {
+ std::string probe;
+
+ switch (TEST_ORDER[next_]) {
+ case PROXY_HTTPS:
+ probe.assign("CONNECT www.google.com:443 HTTP/1.0\r\n"
+ "User-Agent: ");
+ probe.append(agent_);
+ probe.append("\r\n"
+ "Host: www.google.com\r\n"
+ "Content-Length: 0\r\n"
+ "Proxy-Connection: Keep-Alive\r\n"
+ "\r\n");
+ break;
+ case PROXY_SOCKS5:
+ probe.assign("\005\001\000", 3);
+ break;
+ default:
+ ASSERT(false);
+ return;
+ }
+
+ LOG(LS_VERBOSE) << "AutoDetectProxy probing type " << TEST_ORDER[next_]
+ << " sending " << probe.size() << " bytes";
+ socket_->Send(probe.data(), probe.size());
+}
+
+void AutoDetectProxy::OnReadEvent(AsyncSocket * socket) {
+ char data[257];
+ int len = socket_->Recv(data, 256);
+ if (len > 0) {
+ data[len] = 0;
+ LOG(LS_VERBOSE) << "AutoDetectProxy read " << len << " bytes";
+ }
+
+ switch (TEST_ORDER[next_]) {
+ case PROXY_HTTPS:
+ if ((len >= 2) && (data[0] == '\x05')) {
+ Complete(PROXY_SOCKS5);
+ return;
+ }
+ if ((len >= 5) && (strncmp(data, "HTTP/", 5) == 0)) {
+ Complete(PROXY_HTTPS);
+ return;
+ }
+ break;
+ case PROXY_SOCKS5:
+ if ((len >= 2) && (data[0] == '\x05')) {
+ Complete(PROXY_SOCKS5);
+ return;
+ }
+ break;
+ default:
+ ASSERT(false);
+ return;
+ }
+
+ ++next_;
+ Next();
+}
+
+void AutoDetectProxy::OnCloseEvent(AsyncSocket * socket, int error) {
+ LOG(LS_VERBOSE) << "AutoDetectProxy closed with error: " << error;
+ ++next_;
+ Next();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/autodetectproxy.h b/chromium/third_party/webrtc/base/autodetectproxy.h
new file mode 100644
index 00000000000..45e9c40bef8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/autodetectproxy.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_AUTODETECTPROXY_H_
+#define WEBRTC_BASE_AUTODETECTPROXY_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/cryptstring.h"
+#include "webrtc/base/proxydetect.h"
+#include "webrtc/base/proxyinfo.h"
+#include "webrtc/base/signalthread.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// AutoDetectProxy
+///////////////////////////////////////////////////////////////////////////////
+
+class AsyncResolverInterface;
+class AsyncSocket;
+
+class AutoDetectProxy : public SignalThread {
+ public:
+ explicit AutoDetectProxy(const std::string& user_agent);
+
+ const ProxyInfo& proxy() const { return proxy_; }
+
+ void set_server_url(const std::string& url) {
+ server_url_ = url;
+ }
+ void set_proxy(const SocketAddress& proxy) {
+ proxy_.type = PROXY_UNKNOWN;
+ proxy_.address = proxy;
+ }
+ void set_auth_info(bool use_auth, const std::string& username,
+ const CryptString& password) {
+ if (use_auth) {
+ proxy_.username = username;
+ proxy_.password = password;
+ }
+ }
+ // Default implementation of GetProxySettingsForUrl. Override for special
+ // implementation.
+ virtual bool GetProxyForUrl(const char* agent, const char* url,
+ rtc::ProxyInfo* proxy) {
+ return GetProxySettingsForUrl(agent, url, proxy, true);
+ }
+ enum { MSG_TIMEOUT = SignalThread::ST_MSG_FIRST_AVAILABLE,
+ MSG_UNRESOLVABLE,
+ ADP_MSG_FIRST_AVAILABLE};
+
+ protected:
+ virtual ~AutoDetectProxy();
+
+ // SignalThread Interface
+ virtual void DoWork();
+ virtual void OnMessage(Message *msg);
+
+ void Next();
+ void Complete(ProxyType type);
+
+ void OnConnectEvent(AsyncSocket * socket);
+ void OnReadEvent(AsyncSocket * socket);
+ void OnCloseEvent(AsyncSocket * socket, int error);
+ void OnResolveResult(AsyncResolverInterface* resolver);
+ bool DoConnect();
+
+ private:
+ std::string agent_;
+ std::string server_url_;
+ ProxyInfo proxy_;
+ AsyncResolverInterface* resolver_;
+ AsyncSocket* socket_;
+ int next_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(AutoDetectProxy);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_AUTODETECTPROXY_H_
diff --git a/chromium/third_party/webrtc/base/autodetectproxy_unittest.cc b/chromium/third_party/webrtc/base/autodetectproxy_unittest.cc
new file mode 100644
index 00000000000..80f220f2f0a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/autodetectproxy_unittest.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/autodetectproxy.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/httpcommon-inl.h"
+
+namespace rtc {
+
+static const char kUserAgent[] = "";
+static const char kPath[] = "/";
+static const char kHost[] = "relay.google.com";
+static const uint16 kPort = 443;
+static const bool kSecure = true;
+// At most, AutoDetectProxy should take ~6 seconds. Each connect step is
+// allotted 2 seconds, with the initial resolution + connect given an
+// extra 2 seconds. The slowest case is:
+// 1) Resolution + HTTPS takes full 4 seconds and fails (but resolution
+// succeeds).
+// 2) SOCKS5 takes the full 2 seconds.
+// Socket creation time seems unbounded, and has been observed to take >1 second
+// on a linux machine under load. As such, we allow for 10 seconds for timeout,
+// though could still end up with some flakiness.
+static const int kTimeoutMs = 10000;
+
+class AutoDetectProxyTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ AutoDetectProxyTest() : auto_detect_proxy_(NULL), done_(false) {}
+
+ protected:
+ bool Create(const std::string &user_agent,
+ const std::string &path,
+ const std::string &host,
+ uint16 port,
+ bool secure,
+ bool startnow) {
+ auto_detect_proxy_ = new AutoDetectProxy(user_agent);
+ EXPECT_TRUE(auto_detect_proxy_ != NULL);
+ if (!auto_detect_proxy_) {
+ return false;
+ }
+ Url<char> host_url(path, host, port);
+ host_url.set_secure(secure);
+ auto_detect_proxy_->set_server_url(host_url.url());
+ auto_detect_proxy_->SignalWorkDone.connect(
+ this,
+ &AutoDetectProxyTest::OnWorkDone);
+ if (startnow) {
+ auto_detect_proxy_->Start();
+ }
+ return true;
+ }
+
+ bool Run(int timeout_ms) {
+ EXPECT_TRUE_WAIT(done_, timeout_ms);
+ return done_;
+ }
+
+ void SetProxy(const SocketAddress& proxy) {
+ auto_detect_proxy_->set_proxy(proxy);
+ }
+
+ void Start() {
+ auto_detect_proxy_->Start();
+ }
+
+ void TestCopesWithProxy(const SocketAddress& proxy) {
+ // Tests that at least autodetect doesn't crash for a given proxy address.
+ ASSERT_TRUE(Create(kUserAgent,
+ kPath,
+ kHost,
+ kPort,
+ kSecure,
+ false));
+ SetProxy(proxy);
+ Start();
+ ASSERT_TRUE(Run(kTimeoutMs));
+ }
+
+ private:
+ void OnWorkDone(rtc::SignalThread *thread) {
+ AutoDetectProxy *auto_detect_proxy =
+ static_cast<rtc::AutoDetectProxy *>(thread);
+ EXPECT_TRUE(auto_detect_proxy == auto_detect_proxy_);
+ auto_detect_proxy_ = NULL;
+ auto_detect_proxy->Release();
+ done_ = true;
+ }
+
+ AutoDetectProxy *auto_detect_proxy_;
+ bool done_;
+};
+
+TEST_F(AutoDetectProxyTest, TestDetectUnresolvedProxy) {
+ TestCopesWithProxy(rtc::SocketAddress("localhost", 9999));
+}
+
+TEST_F(AutoDetectProxyTest, TestDetectUnresolvableProxy) {
+ TestCopesWithProxy(rtc::SocketAddress("invalid", 9999));
+}
+
+TEST_F(AutoDetectProxyTest, TestDetectIPv6Proxy) {
+ TestCopesWithProxy(rtc::SocketAddress("::1", 9999));
+}
+
+TEST_F(AutoDetectProxyTest, TestDetectIPv4Proxy) {
+ TestCopesWithProxy(rtc::SocketAddress("127.0.0.1", 9999));
+}
+
+// Test that proxy detection completes successfully. (Does not actually verify
+// the correct detection result since we don't know what proxy to expect on an
+// arbitrary machine.)
+TEST_F(AutoDetectProxyTest, TestProxyDetection) {
+ ASSERT_TRUE(Create(kUserAgent,
+ kPath,
+ kHost,
+ kPort,
+ kSecure,
+ true));
+ ASSERT_TRUE(Run(kTimeoutMs));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bandwidthsmoother.cc b/chromium/third_party/webrtc/base/bandwidthsmoother.cc
new file mode 100644
index 00000000000..0cbf3f3d1aa
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bandwidthsmoother.cc
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/bandwidthsmoother.h"
+
+#include <limits.h>
+
+namespace rtc {
+
+BandwidthSmoother::BandwidthSmoother(int initial_bandwidth_guess,
+ uint32 time_between_increase,
+ double percent_increase,
+ size_t samples_count_to_average,
+ double min_sample_count_percent)
+ : time_between_increase_(time_between_increase),
+ percent_increase_(rtc::_max(1.0, percent_increase)),
+ time_at_last_change_(0),
+ bandwidth_estimation_(initial_bandwidth_guess),
+ accumulator_(samples_count_to_average),
+ min_sample_count_percent_(
+ rtc::_min(1.0,
+ rtc::_max(0.0, min_sample_count_percent))) {
+}
+
+// Samples a new bandwidth measurement
+// returns true if the bandwidth estimation changed
+bool BandwidthSmoother::Sample(uint32 sample_time, int bandwidth) {
+ if (bandwidth < 0) {
+ return false;
+ }
+
+ accumulator_.AddSample(bandwidth);
+
+ if (accumulator_.count() < static_cast<size_t>(
+ accumulator_.max_count() * min_sample_count_percent_)) {
+ // We have not collected enough samples yet.
+ return false;
+ }
+
+ // Replace bandwidth with the mean of sampled bandwidths.
+ const int mean_bandwidth = static_cast<int>(accumulator_.ComputeMean());
+
+ if (mean_bandwidth < bandwidth_estimation_) {
+ time_at_last_change_ = sample_time;
+ bandwidth_estimation_ = mean_bandwidth;
+ return true;
+ }
+
+ const int old_bandwidth_estimation = bandwidth_estimation_;
+ const double increase_threshold_d = percent_increase_ * bandwidth_estimation_;
+ if (increase_threshold_d > INT_MAX) {
+ // If bandwidth goes any higher we would overflow.
+ return false;
+ }
+
+ const int increase_threshold = static_cast<int>(increase_threshold_d);
+ if (mean_bandwidth < increase_threshold) {
+ time_at_last_change_ = sample_time;
+ // The value of bandwidth_estimation remains the same if we don't exceed
+ // percent_increase_ * bandwidth_estimation_ for at least
+ // time_between_increase_ time.
+ } else if (sample_time >= time_at_last_change_ + time_between_increase_) {
+ time_at_last_change_ = sample_time;
+ if (increase_threshold == 0) {
+ // Bandwidth_estimation_ must be zero. Assume a jump from zero to a
+ // positive bandwidth means we have regained connectivity.
+ bandwidth_estimation_ = mean_bandwidth;
+ } else {
+ bandwidth_estimation_ = increase_threshold;
+ }
+ }
+ // Else don't make a change.
+
+ return old_bandwidth_estimation != bandwidth_estimation_;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bandwidthsmoother.h b/chromium/third_party/webrtc/base/bandwidthsmoother.h
new file mode 100644
index 00000000000..cf5a25dfa86
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bandwidthsmoother.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BANDWIDTHSMOOTHER_H_
+#define WEBRTC_BASE_BANDWIDTHSMOOTHER_H_
+
+#include "webrtc/base/rollingaccumulator.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+// The purpose of BandwidthSmoother is to smooth out bandwidth
+// estimations so that 'trstate' messages can be triggered when we
+// are "sure" there is sufficient bandwidth. To avoid frequent fluctuations,
+// we take a slightly pessimistic view of our bandwidth. We only increase
+// our estimation when we have sampled bandwidth measurements of values
+// at least as large as the current estimation * percent_increase
+// for at least time_between_increase time. If a sampled bandwidth
+// is less than our current estimation we immediately decrease our estimation
+// to that sampled value.
+// We retain the initial bandwidth guess as our current bandwidth estimation
+// until we have received (min_sample_count_percent * samples_count_to_average)
+// number of samples. Min_sample_count_percent must be in range [0, 1].
+class BandwidthSmoother {
+ public:
+ BandwidthSmoother(int initial_bandwidth_guess,
+ uint32 time_between_increase,
+ double percent_increase,
+ size_t samples_count_to_average,
+ double min_sample_count_percent);
+
+ // Samples a new bandwidth measurement.
+ // bandwidth is expected to be non-negative.
+ // returns true if the bandwidth estimation changed
+ bool Sample(uint32 sample_time, int bandwidth);
+
+ int get_bandwidth_estimation() const {
+ return bandwidth_estimation_;
+ }
+
+ private:
+ uint32 time_between_increase_;
+ double percent_increase_;
+ uint32 time_at_last_change_;
+ int bandwidth_estimation_;
+ RollingAccumulator<int> accumulator_;
+ double min_sample_count_percent_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_BANDWIDTHSMOOTHER_H_
diff --git a/chromium/third_party/webrtc/base/bandwidthsmoother_unittest.cc b/chromium/third_party/webrtc/base/bandwidthsmoother_unittest.cc
new file mode 100644
index 00000000000..132c0b13a79
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bandwidthsmoother_unittest.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits.h>
+
+#include "webrtc/base/bandwidthsmoother.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+static const int kTimeBetweenIncrease = 10;
+static const double kPercentIncrease = 1.1;
+static const size_t kSamplesCountToAverage = 2;
+static const double kMinSampleCountPercent = 1.0;
+
+TEST(BandwidthSmootherTest, TestSampleIncrease) {
+ BandwidthSmoother mon(1000, // initial_bandwidth_guess
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ kSamplesCountToAverage,
+ kMinSampleCountPercent);
+
+ int bandwidth_sample = 1000;
+ EXPECT_EQ(bandwidth_sample, mon.get_bandwidth_estimation());
+ bandwidth_sample =
+ static_cast<int>(bandwidth_sample * kPercentIncrease);
+ EXPECT_FALSE(mon.Sample(9, bandwidth_sample));
+ EXPECT_TRUE(mon.Sample(10, bandwidth_sample));
+ EXPECT_EQ(bandwidth_sample, mon.get_bandwidth_estimation());
+ int next_expected_est =
+ static_cast<int>(bandwidth_sample * kPercentIncrease);
+ bandwidth_sample *= 2;
+ EXPECT_TRUE(mon.Sample(20, bandwidth_sample));
+ EXPECT_EQ(next_expected_est, mon.get_bandwidth_estimation());
+}
+
+TEST(BandwidthSmootherTest, TestSampleIncreaseFromZero) {
+ BandwidthSmoother mon(0, // initial_bandwidth_guess
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ kSamplesCountToAverage,
+ kMinSampleCountPercent);
+
+ const int kBandwidthSample = 1000;
+ EXPECT_EQ(0, mon.get_bandwidth_estimation());
+ EXPECT_FALSE(mon.Sample(9, kBandwidthSample));
+ EXPECT_TRUE(mon.Sample(10, kBandwidthSample));
+ EXPECT_EQ(kBandwidthSample, mon.get_bandwidth_estimation());
+}
+
+TEST(BandwidthSmootherTest, TestSampleDecrease) {
+ BandwidthSmoother mon(1000, // initial_bandwidth_guess
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ kSamplesCountToAverage,
+ kMinSampleCountPercent);
+
+ const int kBandwidthSample = 999;
+ EXPECT_EQ(1000, mon.get_bandwidth_estimation());
+ EXPECT_FALSE(mon.Sample(1, kBandwidthSample));
+ EXPECT_EQ(1000, mon.get_bandwidth_estimation());
+ EXPECT_TRUE(mon.Sample(2, kBandwidthSample));
+ EXPECT_EQ(kBandwidthSample, mon.get_bandwidth_estimation());
+}
+
+TEST(BandwidthSmootherTest, TestSampleTooFewSamples) {
+ BandwidthSmoother mon(1000, // initial_bandwidth_guess
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ 10, // 10 samples.
+ 0.5); // 5 min samples.
+
+ const int kBandwidthSample = 500;
+ EXPECT_EQ(1000, mon.get_bandwidth_estimation());
+ EXPECT_FALSE(mon.Sample(1, kBandwidthSample));
+ EXPECT_FALSE(mon.Sample(2, kBandwidthSample));
+ EXPECT_FALSE(mon.Sample(3, kBandwidthSample));
+ EXPECT_FALSE(mon.Sample(4, kBandwidthSample));
+ EXPECT_EQ(1000, mon.get_bandwidth_estimation());
+ EXPECT_TRUE(mon.Sample(5, kBandwidthSample));
+ EXPECT_EQ(kBandwidthSample, mon.get_bandwidth_estimation());
+}
+
+TEST(BandwidthSmootherTest, TestSampleRollover) {
+ const int kHugeBandwidth = 2000000000; // > INT_MAX/1.1
+ BandwidthSmoother mon(kHugeBandwidth,
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ kSamplesCountToAverage,
+ kMinSampleCountPercent);
+
+ EXPECT_FALSE(mon.Sample(10, INT_MAX));
+ EXPECT_FALSE(mon.Sample(11, INT_MAX));
+ EXPECT_EQ(kHugeBandwidth, mon.get_bandwidth_estimation());
+}
+
+TEST(BandwidthSmootherTest, TestSampleNegative) {
+ BandwidthSmoother mon(1000, // initial_bandwidth_guess
+ kTimeBetweenIncrease,
+ kPercentIncrease,
+ kSamplesCountToAverage,
+ kMinSampleCountPercent);
+
+ EXPECT_FALSE(mon.Sample(10, -1));
+ EXPECT_FALSE(mon.Sample(11, -1));
+ EXPECT_EQ(1000, mon.get_bandwidth_estimation());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/base.gyp b/chromium/third_party/webrtc/base/base.gyp
new file mode 100644
index 00000000000..330ea82a207
--- /dev/null
+++ b/chromium/third_party/webrtc/base/base.gyp
@@ -0,0 +1,756 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [ '../build/common.gypi', ],
+ 'conditions': [
+ ['os_posix == 1 and OS != "mac" and OS != "ios"', {
+ 'conditions': [
+ ['sysroot!=""', {
+ 'variables': {
+ 'pkg-config': '../../../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+ },
+ }, {
+ 'variables': {
+ 'pkg-config': 'pkg-config'
+ },
+ }],
+ ],
+ }],
+ ],
+ 'targets': [
+ {
+ 'target_name': 'webrtc_base',
+ 'type': 'static_library',
+ 'defines': [
+ 'FEATURE_ENABLE_SSL',
+ 'GTEST_RELATIVE_PATH',
+ 'LOGGING=1',
+ 'USE_WEBRTC_DEV_BRANCH',
+ ],
+ 'sources': [
+ 'asyncfile.cc',
+ 'asyncfile.h',
+ 'asynchttprequest.cc',
+ 'asynchttprequest.h',
+ 'asyncinvoker.cc',
+ 'asyncinvoker.h',
+ 'asyncinvoker-inl.h',
+ 'asyncpacketsocket.h',
+ 'asyncresolverinterface.h',
+ 'asyncsocket.cc',
+ 'asyncsocket.h',
+ 'asynctcpsocket.cc',
+ 'asynctcpsocket.h',
+ 'asyncudpsocket.cc',
+ 'asyncudpsocket.h',
+ 'atomicops.h',
+ 'autodetectproxy.cc',
+ 'autodetectproxy.h',
+ 'bandwidthsmoother.cc',
+ 'bandwidthsmoother.h',
+ 'base64.cc',
+ 'base64.h',
+ 'basicdefs.h',
+ 'basictypes.h',
+ 'bind.h',
+ 'bind.h.pump',
+ 'buffer.h',
+ 'bytebuffer.cc',
+ 'bytebuffer.h',
+ 'byteorder.h',
+ 'callback.h',
+ 'callback.h.pump',
+ 'checks.cc',
+ 'checks.h',
+ 'common.cc',
+ 'common.h',
+ 'constructormagic.h',
+ 'cpumonitor.cc',
+ 'cpumonitor.h',
+ 'crc32.cc',
+ 'crc32.h',
+ 'criticalsection.h',
+ 'cryptstring.h',
+ 'dbus.cc',
+ 'dbus.h',
+ 'diskcache.cc',
+ 'diskcache.h',
+ 'diskcache_win32.cc',
+ 'diskcache_win32.h',
+ 'event.cc',
+ 'event.h',
+ 'filelock.cc',
+ 'filelock.h',
+ 'fileutils.cc',
+ 'fileutils.h',
+ 'fileutils_mock.h',
+ 'firewallsocketserver.cc',
+ 'firewallsocketserver.h',
+ 'flags.cc',
+ 'flags.h',
+ 'gunit_prod.h',
+ 'helpers.cc',
+ 'helpers.h',
+ 'httpbase.cc',
+ 'httpbase.h',
+ 'httpclient.cc',
+ 'httpclient.h',
+ 'httpcommon-inl.h',
+ 'httpcommon.cc',
+ 'httpcommon.h',
+ 'httprequest.cc',
+ 'httprequest.h',
+ 'httpserver.cc',
+ 'httpserver.h',
+ 'ifaddrs-android.cc',
+ 'ifaddrs-android.h',
+ 'iosfilesystem.mm',
+ 'ipaddress.cc',
+ 'ipaddress.h',
+ 'json.cc',
+ 'json.h',
+ 'latebindingsymboltable.cc',
+ 'latebindingsymboltable.cc.def',
+ 'latebindingsymboltable.h',
+ 'latebindingsymboltable.h.def',
+ 'libdbusglibsymboltable.cc',
+ 'libdbusglibsymboltable.h',
+ 'linux.cc',
+ 'linux.h',
+ 'linuxfdwalk.c',
+ 'linuxfdwalk.h',
+ 'linuxwindowpicker.cc',
+ 'linuxwindowpicker.h',
+ 'linked_ptr.h',
+ 'logging.cc',
+ 'logging.h',
+ 'macasyncsocket.cc',
+ 'macasyncsocket.h',
+ 'maccocoasocketserver.h',
+ 'maccocoasocketserver.mm',
+ 'maccocoathreadhelper.h',
+ 'maccocoathreadhelper.mm',
+ 'macconversion.cc',
+ 'macconversion.h',
+ 'macsocketserver.cc',
+ 'macsocketserver.h',
+ 'macutils.cc',
+ 'macutils.h',
+ 'macwindowpicker.cc',
+ 'macwindowpicker.h',
+ 'mathutils.h',
+ 'md5.cc',
+ 'md5.h',
+ 'md5digest.h',
+ 'messagedigest.cc',
+ 'messagedigest.h',
+ 'messagehandler.cc',
+ 'messagehandler.h',
+ 'messagequeue.cc',
+ 'messagequeue.h',
+ 'multipart.cc',
+ 'multipart.h',
+ 'natserver.cc',
+ 'natserver.h',
+ 'natsocketfactory.cc',
+ 'natsocketfactory.h',
+ 'nattypes.cc',
+ 'nattypes.h',
+ 'nethelpers.cc',
+ 'nethelpers.h',
+ 'network.cc',
+ 'network.h',
+ 'nssidentity.cc',
+ 'nssidentity.h',
+ 'nssstreamadapter.cc',
+ 'nssstreamadapter.h',
+ 'nullsocketserver.h',
+ 'openssl.h',
+ 'openssladapter.cc',
+ 'openssladapter.h',
+ 'openssldigest.cc',
+ 'openssldigest.h',
+ 'opensslidentity.cc',
+ 'opensslidentity.h',
+ 'opensslstreamadapter.cc',
+ 'opensslstreamadapter.h',
+ 'optionsfile.cc',
+ 'optionsfile.h',
+ 'pathutils.cc',
+ 'pathutils.h',
+ 'physicalsocketserver.cc',
+ 'physicalsocketserver.h',
+ 'posix.cc',
+ 'posix.h',
+ 'profiler.cc',
+ 'profiler.h',
+ 'proxydetect.cc',
+ 'proxydetect.h',
+ 'proxyinfo.cc',
+ 'proxyinfo.h',
+ 'proxyserver.cc',
+ 'proxyserver.h',
+ 'ratelimiter.cc',
+ 'ratelimiter.h',
+ 'ratetracker.cc',
+ 'ratetracker.h',
+ 'refcount.h',
+ 'referencecountedsingletonfactory.h',
+ 'rollingaccumulator.h',
+ 'safe_conversions.h',
+ 'safe_conversions_impl.h',
+ 'schanneladapter.cc',
+ 'schanneladapter.h',
+ 'scoped_autorelease_pool.h',
+ 'scoped_autorelease_pool.mm',
+ 'scoped_ptr.h',
+ 'scoped_ref_ptr.h',
+ 'scopedptrcollection.h',
+ 'sec_buffer.h',
+ 'sha1.cc',
+ 'sha1.h',
+ 'sha1digest.h',
+ 'sharedexclusivelock.cc',
+ 'sharedexclusivelock.h',
+ 'signalthread.cc',
+ 'signalthread.h',
+ 'sigslot.h',
+ 'sigslotrepeater.h',
+ 'socket.h',
+ 'socketadapters.cc',
+ 'socketadapters.h',
+ 'socketaddress.cc',
+ 'socketaddress.h',
+ 'socketaddresspair.cc',
+ 'socketaddresspair.h',
+ 'socketfactory.h',
+ 'socketpool.cc',
+ 'socketpool.h',
+ 'socketserver.h',
+ 'socketstream.cc',
+ 'socketstream.h',
+ 'ssladapter.cc',
+ 'ssladapter.h',
+ 'sslconfig.h',
+ 'sslfingerprint.cc',
+ 'sslfingerprint.h',
+ 'sslidentity.cc',
+ 'sslidentity.h',
+ 'sslroots.h',
+ 'sslsocketfactory.cc',
+ 'sslsocketfactory.h',
+ 'sslstreamadapter.cc',
+ 'sslstreamadapter.h',
+ 'sslstreamadapterhelper.cc',
+ 'sslstreamadapterhelper.h',
+ 'stream.cc',
+ 'stream.h',
+ 'stringdigest.h',
+ 'stringencode.cc',
+ 'stringencode.h',
+ 'stringutils.cc',
+ 'stringutils.h',
+ 'systeminfo.cc',
+ 'systeminfo.h',
+ 'task.cc',
+ 'task.h',
+ 'taskparent.cc',
+ 'taskparent.h',
+ 'taskrunner.cc',
+ 'taskrunner.h',
+ 'testclient.cc',
+ 'testclient.h',
+ 'thread.cc',
+ 'thread.h',
+ 'thread_checker.h',
+ 'thread_checker_impl.cc',
+ 'thread_checker_impl.h',
+ 'timeutils.cc',
+ 'timeutils.h',
+ 'timing.cc',
+ 'timing.h',
+ 'transformadapter.cc',
+ 'transformadapter.h',
+ 'unixfilesystem.cc',
+ 'unixfilesystem.h',
+ 'urlencode.cc',
+ 'urlencode.h',
+ 'versionparsing.cc',
+ 'versionparsing.h',
+ 'virtualsocketserver.cc',
+ 'virtualsocketserver.h',
+ 'win32.cc',
+ 'win32.h',
+ 'win32filesystem.cc',
+ 'win32filesystem.h',
+ 'win32regkey.cc',
+ 'win32regkey.h',
+ 'win32securityerrors.cc',
+ 'win32socketinit.cc',
+ 'win32socketinit.h',
+ 'win32socketserver.cc',
+ 'win32socketserver.h',
+ 'win32window.cc',
+ 'win32window.h',
+ 'win32windowpicker.cc',
+ 'win32windowpicker.h',
+ 'window.h',
+ 'windowpicker.h',
+ 'windowpickerfactory.h',
+ 'winfirewall.cc',
+ 'winfirewall.h',
+ 'winping.cc',
+ 'winping.h',
+ 'worker.cc',
+ 'worker.h',
+ '../overrides/webrtc/base/basictypes.h',
+ '../overrides/webrtc/base/constructormagic.h',
+ '../overrides/webrtc/base/logging.cc',
+ '../overrides/webrtc/base/logging.h',
+ '../overrides/webrtc/base/win32socketinit.cc',
+ ],
+ # TODO(henrike): issue 3307, make webrtc_base build without disabling
+ # these flags.
+ 'cflags!': [
+ '-Wextra',
+ '-Wall',
+ ],
+ 'cflags_cc!': [
+ '-Wnon-virtual-dtor',
+ ],
+ 'direct_dependent_settings': {
+ 'cflags_cc!': [
+ '-Wnon-virtual-dtor',
+ ],
+ 'defines': [
+ 'FEATURE_ENABLE_SSL',
+ 'GTEST_RELATIVE_PATH',
+ ],
+ },
+ 'include_dirs': [
+ '../../third_party/jsoncpp/overrides/include',
+ '../../third_party/jsoncpp/source/include',
+ ],
+ 'conditions': [
+ ['build_with_chromium==1', {
+ 'include_dirs': [
+ '../overrides',
+ '../../openssl/openssl/include',
+ ],
+ 'sources!': [
+ 'asyncinvoker.cc',
+ 'asyncinvoker.h',
+ 'asyncinvoker-inl.h',
+ 'asyncresolverinterface.h',
+ 'atomicops.h',
+ 'bandwidthsmoother.cc',
+ 'bandwidthsmoother.h',
+ 'basictypes.h',
+ 'bind.h',
+ 'bind.h.pump',
+ 'buffer.h',
+ 'callback.h',
+ 'callback.h.pump',
+ 'constructormagic.h',
+ 'dbus.cc',
+ 'dbus.h',
+ 'diskcache_win32.cc',
+ 'diskcache_win32.h',
+ 'filelock.cc',
+ 'filelock.h',
+ 'fileutils_mock.h',
+ 'genericslot.h',
+ 'genericslot.h.pump',
+ 'httpserver.cc',
+ 'httpserver.h',
+ 'json.cc',
+ 'json.h',
+ 'latebindingsymboltable.cc',
+ 'latebindingsymboltable.cc.def',
+ 'latebindingsymboltable.h',
+ 'latebindingsymboltable.h.def',
+ 'libdbusglibsymboltable.cc',
+ 'libdbusglibsymboltable.h',
+ 'linuxfdwalk.c',
+ 'linuxfdwalk.h',
+ 'linuxwindowpicker.cc',
+ 'linuxwindowpicker.h',
+ 'logging.cc',
+ 'logging.h',
+ 'macasyncsocket.cc',
+ 'macasyncsocket.h',
+ 'maccocoasocketserver.h',
+ 'maccocoasocketserver.mm',
+ 'macsocketserver.cc',
+ 'macsocketserver.h',
+ 'macwindowpicker.cc',
+ 'macwindowpicker.h',
+ 'mathutils.h',
+ 'multipart.cc',
+ 'multipart.h',
+ 'natserver.cc',
+ 'natserver.h',
+ 'natsocketfactory.cc',
+ 'natsocketfactory.h',
+ 'nattypes.cc',
+ 'nattypes.h',
+ 'openssl.h',
+ 'optionsfile.cc',
+ 'optionsfile.h',
+ 'posix.cc',
+ 'posix.h',
+ 'profiler.cc',
+ 'profiler.h',
+ 'proxyserver.cc',
+ 'proxyserver.h',
+ 'refcount.h',
+ 'referencecountedsingletonfactory.h',
+ 'rollingaccumulator.h',
+ 'safe_conversions.h',
+ 'safe_conversions_impl.h',
+ 'scopedptrcollection.h',
+ 'scoped_ref_ptr.h',
+ 'sec_buffer.h',
+ 'sharedexclusivelock.cc',
+ 'sharedexclusivelock.h',
+ 'sslconfig.h',
+ 'sslroots.h',
+ 'stringdigest.h',
+ 'testbase64.h',
+ 'testclient.cc',
+ 'testclient.h',
+ 'transformadapter.cc',
+ 'transformadapter.h',
+ 'versionparsing.cc',
+ 'versionparsing.h',
+ 'virtualsocketserver.cc',
+ 'virtualsocketserver.h',
+ 'win32regkey.cc',
+ 'win32regkey.h',
+ 'win32socketinit.cc',
+ 'win32socketinit.h',
+ 'win32socketserver.cc',
+ 'win32socketserver.h',
+ 'window.h',
+ 'windowpickerfactory.h',
+ 'windowpicker.h',
+ ],
+ 'defines': [
+ 'NO_MAIN_THREAD_WRAPPING',
+ 'SSL_USE_NSS',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'NO_MAIN_THREAD_WRAPPING',
+ 'SSL_USE_NSS',
+ ],
+ },
+ }, {
+ 'conditions': [
+ ['build_json==1', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+ ],
+ }, {
+ 'include_dirs': [
+ '<(json_root)',
+ ],
+ 'defines': [
+ # When defined changes the include path for json.h to where it
+ # is expected to be when building json outside of the standalone
+ # build.
+ 'WEBRTC_EXTERNAL_JSON',
+ ],
+ }],
+ ],
+ 'sources!': [
+ '../overrides/webrtc/base/basictypes.h',
+ '../overrides/webrtc/base/constructormagic.h',
+ '../overrides/webrtc/base/win32socketinit.cc',
+ '../overrides/webrtc/base/logging.cc',
+ '../overrides/webrtc/base/logging.h',
+ ],
+ }],
+ ['use_openssl==1', {
+ 'defines': [
+ 'SSL_USE_OPENSSL',
+ 'HAVE_OPENSSL_SSL_H',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'SSL_USE_OPENSSL',
+ 'HAVE_OPENSSL_SSL_H',
+ ],
+ },
+ 'conditions': [
+ ['build_ssl==1', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/openssl/openssl.gyp:openssl',
+ ],
+ }, {
+ 'include_dirs': [
+ '<(ssl_root)',
+ ],
+ }],
+ ],
+ }, {
+ 'defines': [
+ 'SSL_USE_NSS',
+ 'HAVE_NSS_SSL_H',
+ 'SSL_USE_NSS_RNG',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'SSL_USE_NSS',
+ 'HAVE_NSS_SSL_H',
+ 'SSL_USE_NSS_RNG',
+ ],
+ },
+ }],
+ ['OS == "android"', {
+ 'defines': [
+ 'HAVE_OPENSSL_SSL_H'
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'HAVE_OPENSSL_SSL_H'
+ ],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-llog',
+ '-lGLESv2',
+ ],
+ },
+ }, {
+ 'defines': [
+ 'HAVE_NSS_SSL_H'
+ 'SSL_USE_NSS_RNG',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'HAVE_NSS_SSL_H'
+ 'SSL_USE_NSS_RNG',
+ ],
+ },
+ 'sources!': [
+ 'ifaddrs-android.cc',
+ 'ifaddrs-android.h',
+ ],
+ }],
+ ['OS=="ios"', {
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework Foundation',
+ '-framework Security',
+ '-framework SystemConfiguration',
+ '-framework UIKit',
+ ],
+ },
+ },
+ 'conditions': [
+ ['build_ssl==1', {
+ 'dependencies': [
+ '<(DEPTH)/net/third_party/nss/ssl.gyp:libssl',
+ ]
+ }, {
+ 'include_dirs': [
+ '<(ssl_root)',
+ ],
+ }],
+ ],
+ }],
+ ['OS=="linux"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lcrypto',
+ '-ldl',
+ '-lrt',
+ '-lXext',
+ '-lX11',
+ '-lXcomposite',
+ '-lXrender',
+ '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+ ],
+ },
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags nss)',
+ ],
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+ ],
+ }, {
+ 'sources!': [
+ 'dbus.cc',
+ 'dbus.h',
+ 'libdbusglibsymboltable.cc',
+ 'libdbusglibsymboltable.h',
+ 'linuxfdwalk.c',
+ 'linuxwindowpicker.cc',
+ 'linuxwindowpicker.h',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/usr/lib/libcrypto.dylib',
+ '$(SDKROOT)/usr/lib/libssl.dylib',
+ ],
+ },
+ 'all_dependent_settings': {
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework Cocoa',
+ '-framework Foundation',
+ '-framework IOKit',
+ '-framework Security',
+ '-framework SystemConfiguration',
+ ],
+ },
+ },
+ },
+ 'conditions': [
+ ['target_arch=="ia32"', {
+ 'all_dependent_settings': {
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework Carbon',
+ ],
+ },
+ },
+ },
+ }],
+ ],
+ }, {
+ 'sources!': [
+ 'macasyncsocket.cc',
+ 'macasyncsocket.h',
+ 'maccocoasocketserver.h',
+ 'maccocoasocketserver.mm',
+ 'macconversion.cc',
+ 'macconversion.h',
+ 'macsocketserver.cc',
+ 'macsocketserver.h',
+ 'macutils.cc',
+ 'macutils.h',
+ 'macwindowpicker.cc',
+ 'macwindowpicker.h',
+ ],
+ }],
+ ['OS=="win"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lcrypt32.lib',
+ '-liphlpapi.lib',
+ '-lsecur32.lib',
+ ],
+ },
+ # Suppress warnings about WIN32_LEAN_AND_MEAN.
+ 'msvs_disabled_warnings': [4005, 4703],
+ 'defines': [
+ '_CRT_NONSTDC_NO_DEPRECATE',
+ ],
+ }, {
+ 'sources/': [
+ ['exclude', 'win32[a-z0-9]*\\.(h|cc)$'],
+ ],
+ 'sources!': [
+ 'schanneladapter.cc',
+ 'schanneladapter.h',
+ 'winping.cc',
+ 'winping.h',
+ 'winfirewall.cc',
+ 'winfirewall.h',
+ ],
+ }],
+ ['os_posix==0', {
+ 'sources!': [
+ 'latebindingsymboltable.cc',
+ 'latebindingsymboltable.h',
+ 'posix.cc',
+ 'posix.h',
+ 'unixfilesystem.cc',
+ 'unixfilesystem.h',
+ ],
+ }, {
+ 'configurations': {
+ 'Debug_Base': {
+ 'defines': [
+ # Chromium's build/common.gypi defines this for all posix
+ # _except_ for ios & mac. We want it there as well, e.g.
+ # because ASSERT and friends trigger off of it.
+ '_DEBUG',
+ ],
+ },
+ }
+ }],
+ ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', {
+ 'defines': [
+ 'CARBON_DEPRECATED=YES',
+ ],
+ }],
+ ['OS!="ios" and OS!="mac"', {
+ 'sources!': [
+ 'scoped_autorelease_pool.mm',
+ ],
+ }],
+ ['OS=="ios" or os_posix==0', {
+ 'sources!': [
+ 'openssl.h',
+ 'openssladapter.cc',
+ 'openssladapter.h',
+ 'openssldigest.cc',
+ 'openssldigest.h',
+ 'opensslidentity.cc',
+ 'opensslidentity.h',
+ 'opensslstreamadapter.cc',
+ 'opensslstreamadapter.h',
+ ],
+ }],
+ ['OS!="linux" and OS!="android"', {
+ 'sources!': [
+ 'linux.cc',
+ 'linux.h',
+ ],
+ }],
+ ['OS == "mac" or OS == "ios" or OS == "win"', {
+ 'conditions': [
+ ['build_ssl==1', {
+ 'dependencies': [
+ '<(DEPTH)/net/third_party/nss/ssl.gyp:libssl',
+ '<(DEPTH)/third_party/nss/nss.gyp:nspr',
+ '<(DEPTH)/third_party/nss/nss.gyp:nss',
+ ],
+ }, {
+ 'include_dirs': [
+ '<(ssl_root)',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1 and OS != "mac" and OS != "ios" and OS != "android"', {
+ 'conditions': [
+ ['build_ssl==1', {
+ 'dependencies': [
+ '<(DEPTH)/build/linux/system.gyp:ssl',
+ ],
+ }, {
+ 'include_dirs': [
+ '<(ssl_root)',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/base/base64.cc b/chromium/third_party/webrtc/base/base64.cc
new file mode 100644
index 00000000000..7ed373e8965
--- /dev/null
+++ b/chromium/third_party/webrtc/base/base64.cc
@@ -0,0 +1,260 @@
+
+//*********************************************************************
+//* Base64 - a simple base64 encoder and decoder.
+//*
+//* Copyright (c) 1999, Bob Withers - bwit@pobox.com
+//*
+//* This code may be freely used for any purpose, either personal
+//* or commercial, provided the authors copyright notice remains
+//* intact.
+//*
+//* Enhancements by Stanley Yamane:
+//* o reverse lookup table for the decode function
+//* o reserve string buffer space in advance
+//*
+//*********************************************************************
+
+#include "webrtc/base/base64.h"
+
+#include <string.h>
+
+#include "webrtc/base/common.h"
+
+using std::vector;
+
+namespace rtc {
+
+static const char kPad = '=';
+static const unsigned char pd = 0xFD; // Padding
+static const unsigned char sp = 0xFE; // Whitespace
+static const unsigned char il = 0xFF; // Illegal base64 character
+
+const char Base64::Base64Table[] =
+// 0000000000111111111122222222223333333333444444444455555555556666
+// 0123456789012345678901234567890123456789012345678901234567890123
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+
+// Decode Table gives the index of any valid base64 character in the
+// Base64 table
+// 65 == A, 97 == a, 48 == 0, 43 == +, 47 == /
+
+const unsigned char Base64::DecodeTable[] = {
+// 0 1 2 3 4 5 6 7 8 9
+ il,il,il,il,il,il,il,il,il,sp, // 0 - 9
+ sp,sp,sp,sp,il,il,il,il,il,il, // 10 - 19
+ il,il,il,il,il,il,il,il,il,il, // 20 - 29
+ il,il,sp,il,il,il,il,il,il,il, // 30 - 39
+ il,il,il,62,il,il,il,63,52,53, // 40 - 49
+ 54,55,56,57,58,59,60,61,il,il, // 50 - 59
+ il,pd,il,il,il, 0, 1, 2, 3, 4, // 60 - 69
+ 5, 6, 7, 8, 9,10,11,12,13,14, // 70 - 79
+ 15,16,17,18,19,20,21,22,23,24, // 80 - 89
+ 25,il,il,il,il,il,il,26,27,28, // 90 - 99
+ 29,30,31,32,33,34,35,36,37,38, // 100 - 109
+ 39,40,41,42,43,44,45,46,47,48, // 110 - 119
+ 49,50,51,il,il,il,il,il,il,il, // 120 - 129
+ il,il,il,il,il,il,il,il,il,il, // 130 - 139
+ il,il,il,il,il,il,il,il,il,il, // 140 - 149
+ il,il,il,il,il,il,il,il,il,il, // 150 - 159
+ il,il,il,il,il,il,il,il,il,il, // 160 - 169
+ il,il,il,il,il,il,il,il,il,il, // 170 - 179
+ il,il,il,il,il,il,il,il,il,il, // 180 - 189
+ il,il,il,il,il,il,il,il,il,il, // 190 - 199
+ il,il,il,il,il,il,il,il,il,il, // 200 - 209
+ il,il,il,il,il,il,il,il,il,il, // 210 - 219
+ il,il,il,il,il,il,il,il,il,il, // 220 - 229
+ il,il,il,il,il,il,il,il,il,il, // 230 - 239
+ il,il,il,il,il,il,il,il,il,il, // 240 - 249
+ il,il,il,il,il,il // 250 - 255
+};
+
+bool Base64::IsBase64Char(char ch) {
+ return (('A' <= ch) && (ch <= 'Z')) ||
+ (('a' <= ch) && (ch <= 'z')) ||
+ (('0' <= ch) && (ch <= '9')) ||
+ (ch == '+') || (ch == '/');
+}
+
+bool Base64::GetNextBase64Char(char ch, char* next_ch) {
+ if (next_ch == NULL) {
+ return false;
+ }
+ const char* p = strchr(Base64Table, ch);
+ if (!p)
+ return false;
+ ++p;
+ *next_ch = (*p) ? *p : Base64Table[0];
+ return true;
+}
+
+bool Base64::IsBase64Encoded(const std::string& str) {
+ for (size_t i = 0; i < str.size(); ++i) {
+ if (!IsBase64Char(str.at(i)))
+ return false;
+ }
+ return true;
+}
+
+void Base64::EncodeFromArray(const void* data, size_t len,
+ std::string* result) {
+ ASSERT(NULL != result);
+ result->clear();
+ result->resize(((len + 2) / 3) * 4);
+ const unsigned char* byte_data = static_cast<const unsigned char*>(data);
+
+ unsigned char c;
+ size_t i = 0;
+ size_t dest_ix = 0;
+ while (i < len) {
+ c = (byte_data[i] >> 2) & 0x3f;
+ (*result)[dest_ix++] = Base64Table[c];
+
+ c = (byte_data[i] << 4) & 0x3f;
+ if (++i < len) {
+ c |= (byte_data[i] >> 4) & 0x0f;
+ }
+ (*result)[dest_ix++] = Base64Table[c];
+
+ if (i < len) {
+ c = (byte_data[i] << 2) & 0x3f;
+ if (++i < len) {
+ c |= (byte_data[i] >> 6) & 0x03;
+ }
+ (*result)[dest_ix++] = Base64Table[c];
+ } else {
+ (*result)[dest_ix++] = kPad;
+ }
+
+ if (i < len) {
+ c = byte_data[i] & 0x3f;
+ (*result)[dest_ix++] = Base64Table[c];
+ ++i;
+ } else {
+ (*result)[dest_ix++] = kPad;
+ }
+ }
+}
+
+size_t Base64::GetNextQuantum(DecodeFlags parse_flags, bool illegal_pads,
+ const char* data, size_t len, size_t* dpos,
+ unsigned char qbuf[4], bool* padded)
+{
+ size_t byte_len = 0, pad_len = 0, pad_start = 0;
+ for (; (byte_len < 4) && (*dpos < len); ++*dpos) {
+ qbuf[byte_len] = DecodeTable[static_cast<unsigned char>(data[*dpos])];
+ if ((il == qbuf[byte_len]) || (illegal_pads && (pd == qbuf[byte_len]))) {
+ if (parse_flags != DO_PARSE_ANY)
+ break;
+ // Ignore illegal characters
+ } else if (sp == qbuf[byte_len]) {
+ if (parse_flags == DO_PARSE_STRICT)
+ break;
+ // Ignore spaces
+ } else if (pd == qbuf[byte_len]) {
+ if (byte_len < 2) {
+ if (parse_flags != DO_PARSE_ANY)
+ break;
+ // Ignore unexpected padding
+ } else if (byte_len + pad_len >= 4) {
+ if (parse_flags != DO_PARSE_ANY)
+ break;
+ // Ignore extra pads
+ } else {
+ if (1 == ++pad_len) {
+ pad_start = *dpos;
+ }
+ }
+ } else {
+ if (pad_len > 0) {
+ if (parse_flags != DO_PARSE_ANY)
+ break;
+ // Ignore pads which are followed by data
+ pad_len = 0;
+ }
+ ++byte_len;
+ }
+ }
+ for (size_t i = byte_len; i < 4; ++i) {
+ qbuf[i] = 0;
+ }
+ if (4 == byte_len + pad_len) {
+ *padded = true;
+ } else {
+ *padded = false;
+ if (pad_len) {
+ // Roll back illegal padding
+ *dpos = pad_start;
+ }
+ }
+ return byte_len;
+}
+
+bool Base64::DecodeFromArray(const char* data, size_t len, DecodeFlags flags,
+ std::string* result, size_t* data_used) {
+ return DecodeFromArrayTemplate<std::string>(
+ data, len, flags, result, data_used);
+}
+
+bool Base64::DecodeFromArray(const char* data, size_t len, DecodeFlags flags,
+ vector<char>* result, size_t* data_used) {
+ return DecodeFromArrayTemplate<vector<char> >(data, len, flags, result,
+ data_used);
+}
+
+template<typename T>
+bool Base64::DecodeFromArrayTemplate(const char* data, size_t len,
+ DecodeFlags flags, T* result,
+ size_t* data_used)
+{
+ ASSERT(NULL != result);
+ ASSERT(flags <= (DO_PARSE_MASK | DO_PAD_MASK | DO_TERM_MASK));
+
+ const DecodeFlags parse_flags = flags & DO_PARSE_MASK;
+ const DecodeFlags pad_flags = flags & DO_PAD_MASK;
+ const DecodeFlags term_flags = flags & DO_TERM_MASK;
+ ASSERT(0 != parse_flags);
+ ASSERT(0 != pad_flags);
+ ASSERT(0 != term_flags);
+
+ result->clear();
+ result->reserve(len);
+
+ size_t dpos = 0;
+ bool success = true, padded;
+ unsigned char c, qbuf[4];
+ while (dpos < len) {
+ size_t qlen = GetNextQuantum(parse_flags, (DO_PAD_NO == pad_flags),
+ data, len, &dpos, qbuf, &padded);
+ c = (qbuf[0] << 2) | ((qbuf[1] >> 4) & 0x3);
+ if (qlen >= 2) {
+ result->push_back(c);
+ c = ((qbuf[1] << 4) & 0xf0) | ((qbuf[2] >> 2) & 0xf);
+ if (qlen >= 3) {
+ result->push_back(c);
+ c = ((qbuf[2] << 6) & 0xc0) | qbuf[3];
+ if (qlen >= 4) {
+ result->push_back(c);
+ c = 0;
+ }
+ }
+ }
+ if (qlen < 4) {
+ if ((DO_TERM_ANY != term_flags) && (0 != c)) {
+ success = false; // unused bits
+ }
+ if ((DO_PAD_YES == pad_flags) && !padded) {
+ success = false; // expected padding
+ }
+ break;
+ }
+ }
+ if ((DO_TERM_BUFFER == term_flags) && (dpos != len)) {
+ success = false; // unused chars
+ }
+ if (data_used) {
+ *data_used = dpos;
+ }
+ return success;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/base64.h b/chromium/third_party/webrtc/base/base64.h
new file mode 100644
index 00000000000..d5a7dd84c9d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/base64.h
@@ -0,0 +1,104 @@
+
+//*********************************************************************
+//* C_Base64 - a simple base64 encoder and decoder.
+//*
+//* Copyright (c) 1999, Bob Withers - bwit@pobox.com
+//*
+//* This code may be freely used for any purpose, either personal
+//* or commercial, provided the authors copyright notice remains
+//* intact.
+//*********************************************************************
+
+#ifndef WEBRTC_BASE_BASE64_H__
+#define WEBRTC_BASE_BASE64_H__
+
+#include <string>
+#include <vector>
+
+namespace rtc {
+
+class Base64
+{
+public:
+ enum DecodeOption {
+ DO_PARSE_STRICT = 1, // Parse only base64 characters
+ DO_PARSE_WHITE = 2, // Parse only base64 and whitespace characters
+ DO_PARSE_ANY = 3, // Parse all characters
+ DO_PARSE_MASK = 3,
+
+ DO_PAD_YES = 4, // Padding is required
+ DO_PAD_ANY = 8, // Padding is optional
+ DO_PAD_NO = 12, // Padding is disallowed
+ DO_PAD_MASK = 12,
+
+ DO_TERM_BUFFER = 16, // Must termiante at end of buffer
+ DO_TERM_CHAR = 32, // May terminate at any character boundary
+ DO_TERM_ANY = 48, // May terminate at a sub-character bit offset
+ DO_TERM_MASK = 48,
+
+ // Strictest interpretation
+ DO_STRICT = DO_PARSE_STRICT | DO_PAD_YES | DO_TERM_BUFFER,
+
+ DO_LAX = DO_PARSE_ANY | DO_PAD_ANY | DO_TERM_CHAR,
+ };
+ typedef int DecodeFlags;
+
+ static bool IsBase64Char(char ch);
+
+ // Get the char next to the |ch| from the Base64Table.
+ // If the |ch| is the last one in the Base64Table then returns
+ // the first one from the table.
+ // Expects the |ch| be a base64 char.
+ // The result will be saved in |next_ch|.
+ // Returns true on success.
+ static bool GetNextBase64Char(char ch, char* next_ch);
+
+ // Determines whether the given string consists entirely of valid base64
+ // encoded characters.
+ static bool IsBase64Encoded(const std::string& str);
+
+ static void EncodeFromArray(const void* data, size_t len,
+ std::string* result);
+ static bool DecodeFromArray(const char* data, size_t len, DecodeFlags flags,
+ std::string* result, size_t* data_used);
+ static bool DecodeFromArray(const char* data, size_t len, DecodeFlags flags,
+ std::vector<char>* result, size_t* data_used);
+
+ // Convenience Methods
+ static inline std::string Encode(const std::string& data) {
+ std::string result;
+ EncodeFromArray(data.data(), data.size(), &result);
+ return result;
+ }
+ static inline std::string Decode(const std::string& data, DecodeFlags flags) {
+ std::string result;
+ DecodeFromArray(data.data(), data.size(), flags, &result, NULL);
+ return result;
+ }
+ static inline bool Decode(const std::string& data, DecodeFlags flags,
+ std::string* result, size_t* data_used)
+ {
+ return DecodeFromArray(data.data(), data.size(), flags, result, data_used);
+ }
+ static inline bool Decode(const std::string& data, DecodeFlags flags,
+ std::vector<char>* result, size_t* data_used)
+ {
+ return DecodeFromArray(data.data(), data.size(), flags, result, data_used);
+ }
+
+private:
+ static const char Base64Table[];
+ static const unsigned char DecodeTable[];
+
+ static size_t GetNextQuantum(DecodeFlags parse_flags, bool illegal_pads,
+ const char* data, size_t len, size_t* dpos,
+ unsigned char qbuf[4], bool* padded);
+ template<typename T>
+ static bool DecodeFromArrayTemplate(const char* data, size_t len,
+ DecodeFlags flags, T* result,
+ size_t* data_used);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_BASE64_H__
diff --git a/chromium/third_party/webrtc/base/base64_unittest.cc b/chromium/third_party/webrtc/base/base64_unittest.cc
new file mode 100644
index 00000000000..c4d4072449c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/base64_unittest.cc
@@ -0,0 +1,1001 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/base64.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/stream.h"
+
+#include "webrtc/base/testbase64.h"
+
+using namespace std;
+using namespace rtc;
+
+static struct {
+ size_t plain_length;
+ const char* plaintext;
+ const char* cyphertext;
+} base64_tests[] = {
+
+ // Basic bit patterns;
+ // values obtained with "echo -n '...' | uuencode -m test"
+
+ { 1, "\000", "AA==" },
+ { 1, "\001", "AQ==" },
+ { 1, "\002", "Ag==" },
+ { 1, "\004", "BA==" },
+ { 1, "\010", "CA==" },
+ { 1, "\020", "EA==" },
+ { 1, "\040", "IA==" },
+ { 1, "\100", "QA==" },
+ { 1, "\200", "gA==" },
+
+ { 1, "\377", "/w==" },
+ { 1, "\376", "/g==" },
+ { 1, "\375", "/Q==" },
+ { 1, "\373", "+w==" },
+ { 1, "\367", "9w==" },
+ { 1, "\357", "7w==" },
+ { 1, "\337", "3w==" },
+ { 1, "\277", "vw==" },
+ { 1, "\177", "fw==" },
+ { 2, "\000\000", "AAA=" },
+ { 2, "\000\001", "AAE=" },
+ { 2, "\000\002", "AAI=" },
+ { 2, "\000\004", "AAQ=" },
+ { 2, "\000\010", "AAg=" },
+ { 2, "\000\020", "ABA=" },
+ { 2, "\000\040", "ACA=" },
+ { 2, "\000\100", "AEA=" },
+ { 2, "\000\200", "AIA=" },
+ { 2, "\001\000", "AQA=" },
+ { 2, "\002\000", "AgA=" },
+ { 2, "\004\000", "BAA=" },
+ { 2, "\010\000", "CAA=" },
+ { 2, "\020\000", "EAA=" },
+ { 2, "\040\000", "IAA=" },
+ { 2, "\100\000", "QAA=" },
+ { 2, "\200\000", "gAA=" },
+
+ { 2, "\377\377", "//8=" },
+ { 2, "\377\376", "//4=" },
+ { 2, "\377\375", "//0=" },
+ { 2, "\377\373", "//s=" },
+ { 2, "\377\367", "//c=" },
+ { 2, "\377\357", "/+8=" },
+ { 2, "\377\337", "/98=" },
+ { 2, "\377\277", "/78=" },
+ { 2, "\377\177", "/38=" },
+ { 2, "\376\377", "/v8=" },
+ { 2, "\375\377", "/f8=" },
+ { 2, "\373\377", "+/8=" },
+ { 2, "\367\377", "9/8=" },
+ { 2, "\357\377", "7/8=" },
+ { 2, "\337\377", "3/8=" },
+ { 2, "\277\377", "v/8=" },
+ { 2, "\177\377", "f/8=" },
+
+ { 3, "\000\000\000", "AAAA" },
+ { 3, "\000\000\001", "AAAB" },
+ { 3, "\000\000\002", "AAAC" },
+ { 3, "\000\000\004", "AAAE" },
+ { 3, "\000\000\010", "AAAI" },
+ { 3, "\000\000\020", "AAAQ" },
+ { 3, "\000\000\040", "AAAg" },
+ { 3, "\000\000\100", "AABA" },
+ { 3, "\000\000\200", "AACA" },
+ { 3, "\000\001\000", "AAEA" },
+ { 3, "\000\002\000", "AAIA" },
+ { 3, "\000\004\000", "AAQA" },
+ { 3, "\000\010\000", "AAgA" },
+ { 3, "\000\020\000", "ABAA" },
+ { 3, "\000\040\000", "ACAA" },
+ { 3, "\000\100\000", "AEAA" },
+ { 3, "\000\200\000", "AIAA" },
+ { 3, "\001\000\000", "AQAA" },
+ { 3, "\002\000\000", "AgAA" },
+ { 3, "\004\000\000", "BAAA" },
+ { 3, "\010\000\000", "CAAA" },
+ { 3, "\020\000\000", "EAAA" },
+ { 3, "\040\000\000", "IAAA" },
+ { 3, "\100\000\000", "QAAA" },
+ { 3, "\200\000\000", "gAAA" },
+
+ { 3, "\377\377\377", "////" },
+ { 3, "\377\377\376", "///+" },
+ { 3, "\377\377\375", "///9" },
+ { 3, "\377\377\373", "///7" },
+ { 3, "\377\377\367", "///3" },
+ { 3, "\377\377\357", "///v" },
+ { 3, "\377\377\337", "///f" },
+ { 3, "\377\377\277", "//+/" },
+ { 3, "\377\377\177", "//9/" },
+ { 3, "\377\376\377", "//7/" },
+ { 3, "\377\375\377", "//3/" },
+ { 3, "\377\373\377", "//v/" },
+ { 3, "\377\367\377", "//f/" },
+ { 3, "\377\357\377", "/+//" },
+ { 3, "\377\337\377", "/9//" },
+ { 3, "\377\277\377", "/7//" },
+ { 3, "\377\177\377", "/3//" },
+ { 3, "\376\377\377", "/v//" },
+ { 3, "\375\377\377", "/f//" },
+ { 3, "\373\377\377", "+///" },
+ { 3, "\367\377\377", "9///" },
+ { 3, "\357\377\377", "7///" },
+ { 3, "\337\377\377", "3///" },
+ { 3, "\277\377\377", "v///" },
+ { 3, "\177\377\377", "f///" },
+
+ // Random numbers: values obtained with
+ //
+ // #! /bin/bash
+ // dd bs=$1 count=1 if=/dev/random of=/tmp/bar.random
+ // od -N $1 -t o1 /tmp/bar.random
+ // uuencode -m test < /tmp/bar.random
+ //
+ // where $1 is the number of bytes (2, 3)
+
+ { 2, "\243\361", "o/E=" },
+ { 2, "\024\167", "FHc=" },
+ { 2, "\313\252", "y6o=" },
+ { 2, "\046\041", "JiE=" },
+ { 2, "\145\236", "ZZ4=" },
+ { 2, "\254\325", "rNU=" },
+ { 2, "\061\330", "Mdg=" },
+ { 2, "\245\032", "pRo=" },
+ { 2, "\006\000", "BgA=" },
+ { 2, "\375\131", "/Vk=" },
+ { 2, "\303\210", "w4g=" },
+ { 2, "\040\037", "IB8=" },
+ { 2, "\261\372", "sfo=" },
+ { 2, "\335\014", "3Qw=" },
+ { 2, "\233\217", "m48=" },
+ { 2, "\373\056", "+y4=" },
+ { 2, "\247\232", "p5o=" },
+ { 2, "\107\053", "Rys=" },
+ { 2, "\204\077", "hD8=" },
+ { 2, "\276\211", "vok=" },
+ { 2, "\313\110", "y0g=" },
+ { 2, "\363\376", "8/4=" },
+ { 2, "\251\234", "qZw=" },
+ { 2, "\103\262", "Q7I=" },
+ { 2, "\142\312", "Yso=" },
+ { 2, "\067\211", "N4k=" },
+ { 2, "\220\001", "kAE=" },
+ { 2, "\152\240", "aqA=" },
+ { 2, "\367\061", "9zE=" },
+ { 2, "\133\255", "W60=" },
+ { 2, "\176\035", "fh0=" },
+ { 2, "\032\231", "Gpk=" },
+
+ { 3, "\013\007\144", "Cwdk" },
+ { 3, "\030\112\106", "GEpG" },
+ { 3, "\047\325\046", "J9Um" },
+ { 3, "\310\160\022", "yHAS" },
+ { 3, "\131\100\237", "WUCf" },
+ { 3, "\064\342\134", "NOJc" },
+ { 3, "\010\177\004", "CH8E" },
+ { 3, "\345\147\205", "5WeF" },
+ { 3, "\300\343\360", "wOPw" },
+ { 3, "\061\240\201", "MaCB" },
+ { 3, "\225\333\044", "ldsk" },
+ { 3, "\215\137\352", "jV/q" },
+ { 3, "\371\147\160", "+Wdw" },
+ { 3, "\030\320\051", "GNAp" },
+ { 3, "\044\174\241", "JHyh" },
+ { 3, "\260\127\037", "sFcf" },
+ { 3, "\111\045\033", "SSUb" },
+ { 3, "\202\114\107", "gkxH" },
+ { 3, "\057\371\042", "L/ki" },
+ { 3, "\223\247\244", "k6ek" },
+ { 3, "\047\216\144", "J45k" },
+ { 3, "\203\070\327", "gzjX" },
+ { 3, "\247\140\072", "p2A6" },
+ { 3, "\124\115\116", "VE1O" },
+ { 3, "\157\162\050", "b3Io" },
+ { 3, "\357\223\004", "75ME" },
+ { 3, "\052\117\156", "Kk9u" },
+ { 3, "\347\154\000", "52wA" },
+ { 3, "\303\012\142", "wwpi" },
+ { 3, "\060\035\362", "MB3y" },
+ { 3, "\130\226\361", "WJbx" },
+ { 3, "\173\013\071", "ews5" },
+ { 3, "\336\004\027", "3gQX" },
+ { 3, "\357\366\234", "7/ac" },
+ { 3, "\353\304\111", "68RJ" },
+ { 3, "\024\264\131", "FLRZ" },
+ { 3, "\075\114\251", "PUyp" },
+ { 3, "\315\031\225", "zRmV" },
+ { 3, "\154\201\276", "bIG+" },
+ { 3, "\200\066\072", "gDY6" },
+ { 3, "\142\350\267", "Yui3" },
+ { 3, "\033\000\166", "GwB2" },
+ { 3, "\210\055\077", "iC0/" },
+ { 3, "\341\037\124", "4R9U" },
+ { 3, "\161\103\152", "cUNq" },
+ { 3, "\270\142\131", "uGJZ" },
+ { 3, "\337\076\074", "3z48" },
+ { 3, "\375\106\362", "/Uby" },
+ { 3, "\227\301\127", "l8FX" },
+ { 3, "\340\002\234", "4AKc" },
+ { 3, "\121\064\033", "UTQb" },
+ { 3, "\157\134\143", "b1xj" },
+ { 3, "\247\055\327", "py3X" },
+ { 3, "\340\142\005", "4GIF" },
+ { 3, "\060\260\143", "MLBj" },
+ { 3, "\075\203\170", "PYN4" },
+ { 3, "\143\160\016", "Y3AO" },
+ { 3, "\313\013\063", "ywsz" },
+ { 3, "\174\236\135", "fJ5d" },
+ { 3, "\103\047\026", "QycW" },
+ { 3, "\365\005\343", "9QXj" },
+ { 3, "\271\160\223", "uXCT" },
+ { 3, "\362\255\172", "8q16" },
+ { 3, "\113\012\015", "SwoN" },
+
+ // various lengths, generated by this python script:
+ //
+ // from string import lowercase as lc
+ // for i in range(27):
+ // print '{ %2d, "%s",%s "%s" },' % (i, lc[:i], ' ' * (26-i),
+ // lc[:i].encode('base64').strip())
+
+ { 0, "abcdefghijklmnopqrstuvwxyz", "" },
+ { 1, "abcdefghijklmnopqrstuvwxyz", "YQ==" },
+ { 2, "abcdefghijklmnopqrstuvwxyz", "YWI=" },
+ { 3, "abcdefghijklmnopqrstuvwxyz", "YWJj" },
+ { 4, "abcdefghijklmnopqrstuvwxyz", "YWJjZA==" },
+ { 5, "abcdefghijklmnopqrstuvwxyz", "YWJjZGU=" },
+ { 6, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVm" },
+ { 7, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZw==" },
+ { 8, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2g=" },
+ { 9, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hp" },
+ { 10, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpag==" },
+ { 11, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpams=" },
+ { 12, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamts" },
+ { 13, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbQ==" },
+ { 14, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW4=" },
+ { 15, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5v" },
+ { 16, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcA==" },
+ { 17, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHE=" },
+ { 18, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFy" },
+ { 19, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFycw==" },
+ { 20, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3Q=" },
+ { 21, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1" },
+ { 22, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dg==" },
+ { 23, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnc=" },
+ { 24, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4" },
+ { 25, "abcdefghijklmnopqrstuvwxy", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eQ==" },
+ { 26, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo=" },
+};
+#if 0
+static struct {
+ const char* plaintext;
+ const char* cyphertext;
+} base64_strings[] = {
+
+ // The first few Google quotes
+ // Cyphertext created with "uuencode - GNU sharutils 4.2.1"
+ {
+ "Everyone! We're teetering on the brink of disaster."
+ " - Sergey Brin, 6/24/99, regarding the company's state "
+ "after the unleashing of Netscape/Google search",
+
+ "RXZlcnlvbmUhICBXZSdyZSB0ZWV0ZXJpbmcgb24gdGhlIGJyaW5rIG9mIGRp"
+ "c2FzdGVyLiAtIFNlcmdleSBCcmluLCA2LzI0Lzk5LCByZWdhcmRpbmcgdGhl"
+ "IGNvbXBhbnkncyBzdGF0ZSBhZnRlciB0aGUgdW5sZWFzaGluZyBvZiBOZXRz"
+ "Y2FwZS9Hb29nbGUgc2VhcmNo" },
+
+ {
+ "I'm not sure why we're still alive, but we seem to be."
+ " - Larry Page, 6/24/99, while hiding in the kitchenette "
+ "during the Netscape traffic overflow",
+
+ "SSdtIG5vdCBzdXJlIHdoeSB3ZSdyZSBzdGlsbCBhbGl2ZSwgYnV0IHdlIHNl"
+ "ZW0gdG8gYmUuIC0gTGFycnkgUGFnZSwgNi8yNC85OSwgd2hpbGUgaGlkaW5n"
+ "IGluIHRoZSBraXRjaGVuZXR0ZSBkdXJpbmcgdGhlIE5ldHNjYXBlIHRyYWZm"
+ "aWMgb3ZlcmZsb3c" },
+
+ {
+ "I think kids want porn."
+ " - Sergey Brin, 6/99, on why Google shouldn't prioritize a "
+ "filtered search for children and families",
+
+ "SSB0aGluayBraWRzIHdhbnQgcG9ybi4gLSBTZXJnZXkgQnJpbiwgNi85OSwg"
+ "b24gd2h5IEdvb2dsZSBzaG91bGRuJ3QgcHJpb3JpdGl6ZSBhIGZpbHRlcmVk"
+ "IHNlYXJjaCBmb3IgY2hpbGRyZW4gYW5kIGZhbWlsaWVz" },
+};
+#endif
+// Compare bytes 0..len-1 of x and y. If not equal, abort with verbose error
+// message showing position and numeric value that differed.
+// Handles embedded nulls just like any other byte.
+// Only added because string.compare() in gcc-3.3.3 seems to misbehave with
+// embedded nulls.
+// TODO: switch back to string.compare() if/when gcc is fixed
+#define EXPECT_EQ_ARRAY(len, x, y, msg) \
+ for (size_t j = 0; j < len; ++j) { \
+ if (x[j] != y[j]) { \
+ LOG(LS_ERROR) << "" # x << " != " # y \
+ << " byte " << j << " msg: " << msg; \
+ } \
+ }
+
+size_t Base64Escape(const unsigned char *src, size_t szsrc, char *dest,
+ size_t szdest) {
+ std::string escaped;
+ Base64::EncodeFromArray((const char *)src, szsrc, &escaped);
+ memcpy(dest, escaped.data(), min(escaped.size(), szdest));
+ return escaped.size();
+}
+
+size_t Base64Unescape(const char *src, size_t szsrc, char *dest,
+ size_t szdest) {
+ std::string unescaped;
+ EXPECT_TRUE(Base64::DecodeFromArray(src, szsrc, Base64::DO_LAX, &unescaped,
+ NULL));
+ memcpy(dest, unescaped.data(), min(unescaped.size(), szdest));
+ return unescaped.size();
+}
+
+size_t Base64Unescape(const char *src, size_t szsrc, string *s) {
+ EXPECT_TRUE(Base64::DecodeFromArray(src, szsrc, Base64::DO_LAX, s, NULL));
+ return s->size();
+}
+
+TEST(Base64, EncodeDecodeBattery) {
+ LOG(LS_VERBOSE) << "Testing base-64";
+
+ size_t i;
+
+ // Check the short strings; this tests the math (and boundaries)
+ for( i = 0; i < sizeof(base64_tests) / sizeof(base64_tests[0]); ++i ) {
+ char encode_buffer[100];
+ size_t encode_length;
+ char decode_buffer[100];
+ size_t decode_length;
+ size_t cypher_length;
+
+ LOG(LS_VERBOSE) << "B64: " << base64_tests[i].cyphertext;
+
+ const unsigned char* unsigned_plaintext =
+ reinterpret_cast<const unsigned char*>(base64_tests[i].plaintext);
+
+ cypher_length = strlen(base64_tests[i].cyphertext);
+
+ // The basic escape function:
+ memset(encode_buffer, 0, sizeof(encode_buffer));
+ encode_length = Base64Escape(unsigned_plaintext,
+ base64_tests[i].plain_length,
+ encode_buffer,
+ sizeof(encode_buffer));
+ // Is it of the expected length?
+ EXPECT_EQ(encode_length, cypher_length);
+
+ // Is it the expected encoded value?
+ EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext);
+
+ // If we encode it into a buffer of exactly the right length...
+ memset(encode_buffer, 0, sizeof(encode_buffer));
+ encode_length = Base64Escape(unsigned_plaintext,
+ base64_tests[i].plain_length,
+ encode_buffer,
+ cypher_length);
+ // Is it still of the expected length?
+ EXPECT_EQ(encode_length, cypher_length);
+
+ // And is the value still correct? (i.e., not losing the last byte)
+ EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext);
+
+ // If we decode it back:
+ memset(decode_buffer, 0, sizeof(decode_buffer));
+ decode_length = Base64Unescape(encode_buffer,
+ cypher_length,
+ decode_buffer,
+ sizeof(decode_buffer));
+
+ // Is it of the expected length?
+ EXPECT_EQ(decode_length, base64_tests[i].plain_length);
+
+ // Is it the expected decoded value?
+ EXPECT_EQ(0, memcmp(decode_buffer, base64_tests[i].plaintext, decode_length));
+
+ // Our decoder treats the padding '=' characters at the end as
+ // optional. If encode_buffer has any, run some additional
+ // tests that fiddle with them.
+ char* first_equals = strchr(encode_buffer, '=');
+ if (first_equals) {
+ // How many equals signs does the string start with?
+ int equals = (*(first_equals+1) == '=') ? 2 : 1;
+
+ // Try chopping off the equals sign(s) entirely. The decoder
+ // should still be okay with this.
+ string decoded2("this junk should also be ignored");
+ *first_equals = '\0';
+ EXPECT_NE(0U, Base64Unescape(encode_buffer, first_equals-encode_buffer,
+ &decoded2));
+ EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length);
+ EXPECT_EQ_ARRAY(decoded2.size(), decoded2.data(), base64_tests[i].plaintext, i);
+
+ size_t len;
+
+ // try putting some extra stuff after the equals signs, or in between them
+ if (equals == 2) {
+ sprintfn(first_equals, 6, " = = ");
+ len = first_equals - encode_buffer + 5;
+ } else {
+ sprintfn(first_equals, 6, " = ");
+ len = first_equals - encode_buffer + 3;
+ }
+ decoded2.assign("this junk should be ignored");
+ EXPECT_NE(0U, Base64Unescape(encode_buffer, len, &decoded2));
+ EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length);
+ EXPECT_EQ_ARRAY(decoded2.size(), decoded2, base64_tests[i].plaintext, i);
+ }
+ }
+}
+
+// here's a weird case: a giant base64 encoded stream which broke our base64
+// decoding. Let's test it explicitly.
+const char SpecificTest[] =
+ "/9j/4AAQSkZJRgABAgEASABIAAD/4Q0HRXhpZgAATU0AKgAAAAgADAEOAAIAAAAgAAAAngEPAAI\n"
+ "AAAAFAAAAvgEQAAIAAAAJAAAAwwESAAMAAAABAAEAAAEaAAUAAAABAAAAzAEbAAUAAAABAAAA1A\n"
+ "EoAAMAAAABAAIAAAExAAIAAAAUAAAA3AEyAAIAAAAUAAAA8AE8AAIAAAAQAAABBAITAAMAAAABA\n"
+ "AIAAIdpAAQAAAABAAABFAAAAsQgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgAFNPTlkA\n"
+ "RFNDLVAyMDAAAAAASAAAAAEAAABIAAAAAUFkb2JlIFBob3Rvc2hvcCA3LjAAMjAwNzowMTozMCA\n"
+ "yMzoxMDowNABNYWMgT1MgWCAxMC40LjgAAByCmgAFAAAAAQAAAmqCnQAFAAAAAQAAAnKIIgADAA\n"
+ "AAAQACAACIJwADAAAAAQBkAACQAAAHAAAABDAyMjCQAwACAAAAFAAAAnqQBAACAAAAFAAAAo6RA\n"
+ "QAHAAAABAECAwCRAgAFAAAAAQAAAqKSBAAKAAAAAQAAAqqSBQAFAAAAAQAAArKSBwADAAAAAQAF\n"
+ "AACSCAADAAAAAQAAAACSCQADAAAAAQAPAACSCgAFAAAAAQAAArqgAAAHAAAABDAxMDCgAQADAAA\n"
+ "AAf//AACgAgAEAAAAAQAAAGSgAwAEAAAAAQAAAGSjAAAHAAAAAQMAAACjAQAHAAAAAQEAAACkAQ\n"
+ "ADAAAAAQAAAACkAgADAAAAAQAAAACkAwADAAAAAQAAAACkBgADAAAAAQAAAACkCAADAAAAAQAAA\n"
+ "ACkCQADAAAAAQAAAACkCgADAAAAAQAAAAAAAAAAAAAACgAAAZAAAAAcAAAACjIwMDc6MDE6MjAg\n"
+ "MjM6MDU6NTIAMjAwNzowMToyMCAyMzowNTo1MgAAAAAIAAAAAQAAAAAAAAAKAAAAMAAAABAAAAB\n"
+ "PAAAACgAAAAYBAwADAAAAAQAGAAABGgAFAAAAAQAAAxIBGwAFAAAAAQAAAxoBKAADAAAAAQACAA\n"
+ "ACAQAEAAAAAQAAAyICAgAEAAAAAQAACd0AAAAAAAAASAAAAAEAAABIAAAAAf/Y/+AAEEpGSUYAA\n"
+ "QIBAEgASAAA/+0ADEFkb2JlX0NNAAL/7gAOQWRvYmUAZIAAAAAB/9sAhAAMCAgICQgMCQkMEQsK\n"
+ "CxEVDwwMDxUYExMVExMYEQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAQ0LCw0\n"
+ "ODRAODhAUDg4OFBQODg4OFBEMDAwMDBERDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA\n"
+ "wMDAz/wAARCABkAGQDASIAAhEBAxEB/90ABAAH/8QBPwAAAQUBAQEBAQEAAAAAAAAAAwABAgQFB\n"
+ "gcICQoLAQABBQEBAQEBAQAAAAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhED\n"
+ "BCESMQVBUWETInGBMgYUkaGxQiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0Nhf\n"
+ "SVeJl8rOEw9N14/NGJ5SkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAg\n"
+ "IBAgQEAwQFBgcHBgU1AQACEQMhMRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTxJ\n"
+ "QYWorKDByY1wtJEk1SjF2RFVTZ0ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm\n"
+ "9ic3R1dnd4eXp7fH/9oADAMBAAIRAxEAPwDy7bKNTUXNLz9EaJPDWMjxH4ozhtpYwaACT8ShaaW\n"
+ "bW0uEc9/JFfjj0Q4Hk/PRDxwX7y47W9z/AN9Cv4+O3ILK2DcRqT2CaSvEbcl1Jbz37KG1dBldLo\n"
+ "qaS4l9xGjG9v6yoDAdYIaIjUk+AREgo4y5sapirb8Yl0NHHdKvBNm4yA1o5Pc+SPEFvCWqB3HZF\n"
+ "Hj2SbWQ/afGFP0bHP8ATY0uc4w1o1JPkkimGiS2KvqlnmBkOZQTyydzgPMM9v8A0lp4v1Nx9gF1\n"
+ "tpdqJaGtH/S3I0i3lISXW/8AMqnd/O2bfg2eUkqVYf/Q8zuncO4Bj7lZ+n7f5Mj5KsJcY8NUZ4d\n"
+ "uEDVo1HkeU0rg3Om4H2rabCWUN7DQuK1n5FWKW4uCwG92gDRJBS6exhxmMboQI+Cv4WFTQ42Bs2\n"
+ "fvnkkqEmy2YxoMMbpVzaz6jt+RbpHZs8lzkHqrasKkYOKP0jgDfZ4N/wDM1tNrcWfSPmRyq9uNV\n"
+ "DnFg2s97i7UkjxKVrq0eVz3spZsja+ASDzwsh9jnOk/JFzb3XZD3v1c4yT8UACTCniKDUnKz5Nj\n"
+ "G33XV1DV73BrT8dF23SejV4zg9g33cOsPb+SxVvqv9ViwNy8vS0iWs/daf8A0Y5dpTi1sADGxCR\n"
+ "K1o0YBEmInlXWYbDBcDLdPJXa8f71Yrx2jnUoAqLnfZK5hJaW2vdwEk5a/wD/0fN6Ia/e76IiVf\n"
+ "xavUL7CPpnT4LNbYXAVjuQt/AqDmNYO/Kjnoy4hr5J8SwMhrRMaeSvbsxrfUazcOw4UX0Cisem2\n"
+ "SBoD4+Kz8nC6llbSLCRrubJA8kwUWbUDa29X1PMa7aQWjuDC0MXMdbDbhI7eazBiUfZ6GOYRe1s\n"
+ "WvGgJ8Vbw2+m4Bx9s6JpNHuuGo1FF53r/SHYua61gLse0lzXeBP5rkvqx0o5vVWz7WY49QkiQSP\n"
+ "oN/tLoevW/ogxv0HA7tJ0AnhT+pdDGYVl/wCdcTPkGn2NU0JWNWvlgAbHV6fEqdu2gR/r2WlWwt\n"
+ "AA5VXAEsLXTqJafArQY5rRr9LiPBJiZsZCI1pJjxCi0j4oncSICSkWwzwkjeaSch//0vO7sP7Lm\n"
+ "enO9ogtd5FbPT3Q5pCpZVc4ld3Lmn3O8j9EI2BYdunKjOobMQIyI+rusc2wx4d0eutwGnHh/uQc\n"
+ "Ha7ladj6mVANGvcqOgz0Go7HJ12/GEHcwvB/dPY6ImbbaMaASGuIBjkN7qofs9Ubg9g7OI9p/t/\n"
+ "RTSmhTHr0v6eSz6UgCPP2/wAVu9Ex2V49dVY2iACB4BZeVXQ/AJ3gzGnnOi2+kACpru8flUsNmt\n"
+ "zHRf6xfWCnoeAfTh2ZaQKazx/Ke7+QxcKz61fWA2uuObaC4zGhaPJrXBL64ZFmR124O09ENraPK\n"
+ "N3/AH5GqxIrZVUyp2K2vfdkENsDnxuex9m4Ox9n82xSgNd9D+p/XR1npgseR9ppOy4Dx/NfH/CL\n"
+ "oQJGunmvMv8AFq3KHVcq3HkYQbD2nuSf0I/rMavSg6TLjLigQhJ7Z58v9QkmlsTOqSCn/9PzL7R\n"
+ "d6Qq3n0wZ2zotXpT9xLfFYvkr/S7jXeB8E0jRkhKpC3q8LcJ/kmCrTnkuAPCq4do9Q/ytVbuAeY\n"
+ "Gg5lQybQK+82GBqEQUA1kOHPYf3LLsoyN36G5w8iUfHxepbXE2l0cApALgLHzBq9UxhTXU5hMC1\n"
+ "ktnSCup6S4Ctk+C5XqVGcaHPfuiuHkeTTuWz0+9zaKiH6CC0/yXBSQ2a/MxojV57634rq+v2PLY\n"
+ "be1r2nsYG13/AFKxbfCBMcr0brGAzrGEwCG31ncx0SfBzf7S4+zoHUWWsJq3hz9oLfcBH77R9H+\n"
+ "0pA13u/qPgDp/Q6ri39JlfpXkDx+h/msWn1L6wdO6bSbcrIbU2Q0xLnSe21kuVejJspbVS5+4bd\n"
+ "ocBAkD/orG+tP1ar67Wy7GtZTm1SCXfRsb+a18fRe38x6SG3/44H1Z3f0y2I+l6DoSXD/8xPrDs\n"
+ "3enVu3bdnqN3R+//USSVo//1PLohhce+gRWS0Nsby3lRgFkKxQyW7SgUh3em5Tbq2uB9wWw1wey\n"
+ "J1XGV2XYdm5k7e4WzidXY9oMwo5RZ4T6Hd1ixwfp96PWbAJBVTHzK7O6Ky5oJB1HZMqmUEFlkGy\n"
+ "xpa4zI1Hkq31dy7bMN9BAc3HeWAnnbyxEycmuup1jiAGglZ31PyrmZ9tQg1WtNj54EHR3/S2qTH\n"
+ "1Yc5GgD1FFtzPdWGkd2AyflogZmRmsz6PSrbXbdo+txOrP337f3fzVo15DK2uyrTtqpBOnBKx6b\n"
+ "7MjJsz7tHWOAYP3WD6LU6cqGjFCNl1MmvLcxv6YtDTLSAqP27LrdtYHXFnJZI+Tp3MWg68OpDPv\n"
+ "UMUM2lkQBoouKQ6swjE9Nml+1sz1PW+z6xt27zuj+skrX2ZvqR5z8kkuOfdPt43/1fMm/grFG6f\n"
+ "Lss9JA7JG7tnZs/SfJUrfS3foJ9TvHCopJsV8nWx/t24bJn8Fo/5TjWJXMJIS+i+G36TsZ/7Q9P\n"
+ "8ATfzfeOFofVSZv2/zvt+O3X/v65dJPjt/BiyfN1/wn0zre79nVej/ADG8ep4x2/6Srjd6TdviF\n"
+ "52ko8m6/Ht9X1KnftEo+POwxzK8mSTF46vrH6T1/OEl5Okkl//Z/+0uHFBob3Rvc2hvcCAzLjAA\n"
+ "OEJJTQQEAAAAAAArHAIAAAIAAhwCeAAfICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAA\n"
+ "4QklNBCUAAAAAABD7Caa9B0wqNp2P4sxXqayFOEJJTQPqAAAAAB2wPD94bWwgdmVyc2lvbj0iMS\n"
+ "4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPCFET0NUWVBFIHBsaXN0IFBVQkxJQyAiLS8vQXBwbGUgQ\n"
+ "29tcHV0ZXIvL0RURCBQTElTVCAxLjAvL0VOIiAiaHR0cDovL3d3dy5hcHBsZS5jb20vRFREcy9Q\n"
+ "cm9wZXJ0eUxpc3QtMS4wLmR0ZCI+CjxwbGlzdCB2ZXJzaW9uPSIxLjAiPgo8ZGljdD4KCTxrZXk\n"
+ "+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1Ib3Jpem9udGFsUmVzPC9rZXk+Cgk8ZGljdD\n"
+ "4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCTxzdHJpbmc+Y\n"
+ "29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCTxrZXk+Y29tLmFwcGxlLnByaW50\n"
+ "LnRpY2tldC5pdGVtQXJyYXk8L2tleT4KCQk8YXJyYXk+CgkJCTxkaWN0PgoJCQkJPGtleT5jb20\n"
+ "uYXBwbGUucHJpbnQuUGFnZUZvcm1hdC5QTUhvcml6b250YWxSZXM8L2tleT4KCQkJCTxyZWFsPj\n"
+ "cyPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJC\n"
+ "QkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJpbmc+CgkJCQk8a2V5PmNv\n"
+ "bS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGRhdGU+MjAwNy0wMS0zMFQ\n"
+ "yMjowODo0MVo8L2RhdGU+CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuc3RhdGVGbG\n"
+ "FnPC9rZXk+CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY3Q+CgkJPC9hcnJheT4KC\n"
+ "TwvZGljdD4KCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwv\n"
+ "a2V5PgoJPGRpY3Q+CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4\n"
+ "KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvbS\n"
+ "5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+CgkJPGFycmF5PgoJCQk8ZGljdD4KC\n"
+ "QkJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwva2V5PgoJ\n"
+ "CQkJPGludGVnZXI+MTwvaW50ZWdlcj4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5\n"
+ "jbGllbnQ8L2tleT4KCQkJCTxzdHJpbmc+Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW\n"
+ "5nPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lm1vZERhdGU8L2tleT4KCQkJCTxkY\n"
+ "XRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQu\n"
+ "dGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJPGludGVnZXI+MDwvaW50ZWdlcj4KCQkJPC9kaWN\n"
+ "0PgoJCTwvYXJyYXk+Cgk8L2RpY3Q+Cgk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0Ll\n"
+ "BNU2NhbGluZzwva2V5PgoJPGRpY3Q+CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZ\n"
+ "WF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4K\n"
+ "CQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+CgkJPGFycmF5Pgo\n"
+ "JCQk8ZGljdD4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1TY2FsaW5nPC\n"
+ "9rZXk+CgkJCQk8cmVhbD4xPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0L\n"
+ "mNsaWVudDwva2V5PgoJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJp\n"
+ "bmc+CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGR\n"
+ "hdGU+MjAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC\n"
+ "50aWNrZXQuc3RhdGVGbGFnPC9rZXk+CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY\n"
+ "3Q+CgkJPC9hcnJheT4KCTwvZGljdD4KCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQu\n"
+ "UE1WZXJ0aWNhbFJlczwva2V5PgoJPGRpY3Q+CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V\n"
+ "0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cm\n"
+ "luZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+CgkJPGFyc\n"
+ "mF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1WZXJ0\n"
+ "aWNhbFJlczwva2V5PgoJCQkJPHJlYWw+NzI8L3JlYWw+CgkJCQk8a2V5PmNvbS5hcHBsZS5wcml\n"
+ "udC50aWNrZXQuY2xpZW50PC9rZXk+CgkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbm\n"
+ "FnZXI8L3N0cmluZz4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZ\n"
+ "Xk+CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT4KCQkJCTxrZXk+Y29tLmFw\n"
+ "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI\n"
+ "+CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5jb20uYXBwbGUucHJpbnQuUG\n"
+ "FnZUZvcm1hdC5QTVZlcnRpY2FsU2NhbGluZzwva2V5PgoJPGRpY3Q+CgkJPGtleT5jb20uYXBwb\n"
+ "GUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGlu\n"
+ "Z21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF\n"
+ "5PC9rZXk+CgkJPGFycmF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2\n"
+ "VGb3JtYXQuUE1WZXJ0aWNhbFNjYWxpbmc8L2tleT4KCQkJCTxyZWFsPjE8L3JlYWw+CgkJCQk8a\n"
+ "2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW50PC9rZXk+CgkJCQk8c3RyaW5nPmNvbS5h\n"
+ "cHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnR\n"
+ "pY2tldC5tb2REYXRlPC9rZXk+CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT\n"
+ "4KCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpb\n"
+ "nRlZ2VyPjA8L2ludGVnZXI+CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5j\n"
+ "b20uYXBwbGUucHJpbnQuc3ViVGlja2V0LnBhcGVyX2luZm9fdGlja2V0PC9rZXk+Cgk8ZGljdD4\n"
+ "KCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlUmVjdDwva2\n"
+ "V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5P\n"
+ "goJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJPGtleT5j\n"
+ "b20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+CgkJCQk8ZGl\n"
+ "jdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlUm\n"
+ "VjdDwva2V5PgoJCQkJCTxhcnJheT4KCQkJCQkJPHJlYWw+MC4wPC9yZWFsPgoJCQkJCQk8cmVhb\n"
+ "D4wLjA8L3JlYWw+CgkJCQkJCTxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+NTc2PC9yZWFs\n"
+ "PgoJCQkJCTwvYXJyYXk+CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDw\n"
+ "va2V5PgoJCQkJCTxzdHJpbmc+Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQ\n"
+ "kJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+M\n"
+ "jAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlj\n"
+ "a2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+CgkJCQk8L2RpY3Q\n"
+ "+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYX\n"
+ "QuUE1BZGp1c3RlZFBhcGVyUmVjdDwva2V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5wc\n"
+ "mludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21h\n"
+ "bmFnZXI8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheTw\n"
+ "va2V5PgoJCQk8YXJyYXk+CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYW\n"
+ "dlRm9ybWF0LlBNQWRqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+CgkJCQkJCTxyZ\n"
+ "WFsPi0xODwvcmVhbD4KCQkJCQkJPHJlYWw+LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3Jl\n"
+ "YWw+CgkJCQkJCTxyZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+Y29tLmF\n"
+ "wcGxlLnByaW50LnRpY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcm\n"
+ "ludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQub\n"
+ "W9kRGF0ZTwva2V5PgoJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJ\n"
+ "CTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWd\n"
+ "lcj4wPC9pbnRlZ2VyPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5Pm\n"
+ "NvbS5hcHBsZS5wcmludC5QYXBlckluZm8uUE1QYXBlck5hbWU8L2tleT4KCQk8ZGljdD4KCQkJP\n"
+ "GtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20u\n"
+ "YXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcml\n"
+ "udC50aWNrZXQuaXRlbUFycmF5PC9rZXk+CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZX\n"
+ "k+Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVBhcGVyTmFtZTwva2V5PgoJCQkJCTxzdHJpb\n"
+ "mc+bmEtbGV0dGVyPC9zdHJpbmc+CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNs\n"
+ "aWVudDwva2V5PgoJCQkJCTxzdHJpbmc+Y29tLmFwcGxlLnByaW50LnBtLlBvc3RTY3JpcHQ8L3N\n"
+ "0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQ\n"
+ "kJCTxkYXRlPjIwMDMtMDctMDFUMTc6NDk6MzZaPC9kYXRlPgoJCQkJCTxrZXk+Y29tLmFwcGxlL\n"
+ "nByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4xPC9pbnRlZ2VyPgoJ\n"
+ "CQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC5\n"
+ "QYXBlckluZm8uUE1VbmFkanVzdGVkUGFnZVJlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb2\n"
+ "0uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUuc\n"
+ "HJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNr\n"
+ "ZXQuaXRlbUFycmF5PC9rZXk+CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+Y29tLmF\n"
+ "wcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYWdlUmVjdDwva2V5PgoJCQkJCTxhcn\n"
+ "JheT4KCQkJCQkJPHJlYWw+MC4wPC9yZWFsPgoJCQkJCQk8cmVhbD4wLjA8L3JlYWw+CgkJCQkJC\n"
+ "TxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+NTc2PC9yZWFsPgoJCQkJCTwvYXJyYXk+CgkJ\n"
+ "CQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJCQkJCTxzdHJpbmc\n"
+ "+Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQkJCTxrZXk+Y29tLmFwcGxlLn\n"
+ "ByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+MjAwNy0wMS0zMFQyMjowODo0M\n"
+ "Vo8L2RhdGU+CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LnN0YXRlRmxhZzwva2V5\n"
+ "PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+CgkJCQk8L2RpY3Q+CgkJCTwvYXJyYXk+CgkJPC9\n"
+ "kaWN0PgoJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYXBlcl\n"
+ "JlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b\n"
+ "3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5n\n"
+ "PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+CgkJCTxhcnJ\n"
+ "heT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYW\n"
+ "RqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+CgkJCQkJCTxyZWFsPi0xODwvcmVhb\n"
+ "D4KCQkJCQkJPHJlYWw+LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3JlYWw+CgkJCQkJCTxy\n"
+ "ZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnR\n"
+ "pY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZX\n"
+ "I8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5P\n"
+ "goJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJCTxrZXk+Y29tLmFw\n"
+ "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4wPC9pbnRlZ2V\n"
+ "yPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wcm\n"
+ "ludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+CgkJPGRpY3Q+CgkJCTxrZXk+Y29tL\n"
+ "mFwcGxlLnByaW50LnRpY2tldC5jcmVhdG9yPC9rZXk+CgkJCTxzdHJpbmc+Y29tLmFwcGxlLnBy\n"
+ "aW50LnBtLlBvc3RTY3JpcHQ8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V\n"
+ "0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcH\n"
+ "BsZS5wcmludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+CgkJCQkJPHN0cmluZz5VU\n"
+ "yBMZXR0ZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW50\n"
+ "PC9rZXk+CgkJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5\n"
+ "nPgoJCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPG\n"
+ "RhdGU+MjAwMy0wNy0wMVQxNzo0OTozNlo8L2RhdGU+CgkJCQkJPGtleT5jb20uYXBwbGUucHJpb\n"
+ "nQudGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjE8L2ludGVnZXI+CgkJCQk8\n"
+ "L2RpY3Q+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2t\n"
+ "ldC5BUElWZXJzaW9uPC9rZXk+CgkJPHN0cmluZz4wMC4yMDwvc3RyaW5nPgoJCTxrZXk+Y29tLm\n"
+ "FwcGxlLnByaW50LnRpY2tldC5wcml2YXRlTG9jazwva2V5PgoJCTxmYWxzZS8+CgkJPGtleT5jb\n"
+ "20uYXBwbGUucHJpbnQudGlja2V0LnR5cGU8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmlu\n"
+ "dC5QYXBlckluZm9UaWNrZXQ8L3N0cmluZz4KCTwvZGljdD4KCTxrZXk+Y29tLmFwcGxlLnByaW5\n"
+ "0LnRpY2tldC5BUElWZXJzaW9uPC9rZXk+Cgk8c3RyaW5nPjAwLjIwPC9zdHJpbmc+Cgk8a2V5Pm\n"
+ "NvbS5hcHBsZS5wcmludC50aWNrZXQucHJpdmF0ZUxvY2s8L2tleT4KCTxmYWxzZS8+Cgk8a2V5P\n"
+ "mNvbS5hcHBsZS5wcmludC50aWNrZXQudHlwZTwva2V5PgoJPHN0cmluZz5jb20uYXBwbGUucHJp\n"
+ "bnQuUGFnZUZvcm1hdFRpY2tldDwvc3RyaW5nPgo8L2RpY3Q+CjwvcGxpc3Q+CjhCSU0D6QAAAAA\n"
+ "AeAADAAAASABIAAAAAALeAkD/7v/uAwYCUgNnBSgD/AACAAAASABIAAAAAALYAigAAQAAAGQAAA\n"
+ "ABAAMDAwAAAAF//wABAAEAAAAAAAAAAAAAAABoCAAZAZAAAAAAACAAAAAAAAAAAAAAAAAAAAAAA\n"
+ "AAAAAAAAAAAADhCSU0D7QAAAAAAEABIAAAAAQABAEgAAAABAAE4QklNBCYAAAAAAA4AAAAAAAAA\n"
+ "AAAAP4AAADhCSU0EDQAAAAAABAAAAB44QklNBBkAAAAAAAQAAAAeOEJJTQPzAAAAAAAJAAAAAAA\n"
+ "AAAABADhCSU0ECgAAAAAAAQAAOEJJTScQAAAAAAAKAAEAAAAAAAAAAThCSU0D9QAAAAAASAAvZm\n"
+ "YAAQBsZmYABgAAAAAAAQAvZmYAAQChmZoABgAAAAAAAQAyAAAAAQBaAAAABgAAAAAAAQA1AAAAA\n"
+ "QAtAAAABgAAAAAAAThCSU0D+AAAAAAAcAAA/////////////////////////////wPoAAAAAP//\n"
+ "//////////////////////////8D6AAAAAD/////////////////////////////A+gAAAAA///\n"
+ "//////////////////////////wPoAAA4QklNBAgAAAAAABAAAAABAAACQAAAAkAAAAAAOEJJTQ\n"
+ "QeAAAAAAAEAAAAADhCSU0EGgAAAAADRQAAAAYAAAAAAAAAAAAAAGQAAABkAAAACABEAFMAQwAwA\n"
+ "DIAMwAyADUAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAGQAAABkAAAAAAAAAAAA\n"
+ "AAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAEAAAAAAABudWxsAAAAAgAAAAZib3VuZHN\n"
+ "PYmpjAAAAAQAAAAAAAFJjdDEAAAAEAAAAAFRvcCBsb25nAAAAAAAAAABMZWZ0bG9uZwAAAAAAAA\n"
+ "AAQnRvbWxvbmcAAABkAAAAAFJnaHRsb25nAAAAZAAAAAZzbGljZXNWbExzAAAAAU9iamMAAAABA\n"
+ "AAAAAAFc2xpY2UAAAASAAAAB3NsaWNlSURsb25nAAAAAAAAAAdncm91cElEbG9uZwAAAAAAAAAG\n"
+ "b3JpZ2luZW51bQAAAAxFU2xpY2VPcmlnaW4AAAANYXV0b0dlbmVyYXRlZAAAAABUeXBlZW51bQA\n"
+ "AAApFU2xpY2VUeXBlAAAAAEltZyAAAAAGYm91bmRzT2JqYwAAAAEAAAAAAABSY3QxAAAABAAAAA\n"
+ "BUb3AgbG9uZwAAAAAAAAAATGVmdGxvbmcAAAAAAAAAAEJ0b21sb25nAAAAZAAAAABSZ2h0bG9uZ\n"
+ "wAAAGQAAAADdXJsVEVYVAAAAAEAAAAAAABudWxsVEVYVAAAAAEAAAAAAABNc2dlVEVYVAAAAAEA\n"
+ "AAAAAAZhbHRUYWdURVhUAAAAAQAAAAAADmNlbGxUZXh0SXNIVE1MYm9vbAEAAAAIY2VsbFRleHR\n"
+ "URVhUAAAAAQAAAAAACWhvcnpBbGlnbmVudW0AAAAPRVNsaWNlSG9yekFsaWduAAAAB2RlZmF1bH\n"
+ "QAAAAJdmVydEFsaWduZW51bQAAAA9FU2xpY2VWZXJ0QWxpZ24AAAAHZGVmYXVsdAAAAAtiZ0Nvb\n"
+ "G9yVHlwZWVudW0AAAARRVNsaWNlQkdDb2xvclR5cGUAAAAATm9uZQAAAAl0b3BPdXRzZXRsb25n\n"
+ "AAAAAAAAAApsZWZ0T3V0c2V0bG9uZwAAAAAAAAAMYm90dG9tT3V0c2V0bG9uZwAAAAAAAAALcml\n"
+ "naHRPdXRzZXRsb25nAAAAAAA4QklNBBEAAAAAAAEBADhCSU0EFAAAAAAABAAAAAE4QklNBAwAAA\n"
+ "AACfkAAAABAAAAZAAAAGQAAAEsAAB1MAAACd0AGAAB/9j/4AAQSkZJRgABAgEASABIAAD/7QAMQ\n"
+ "WRvYmVfQ00AAv/uAA5BZG9iZQBkgAAAAAH/2wCEAAwICAgJCAwJCQwRCwoLERUPDAwPFRgTExUT\n"
+ "ExgRDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwBDQsLDQ4NEA4OEBQODg4UFA4\n"
+ "ODg4UEQwMDAwMEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/AABEIAGQAZA\n"
+ "MBIgACEQEDEQH/3QAEAAf/xAE/AAABBQEBAQEBAQAAAAAAAAADAAECBAUGBwgJCgsBAAEFAQEBA\n"
+ "QEBAAAAAAAAAAEAAgMEBQYHCAkKCxAAAQQBAwIEAgUHBggFAwwzAQACEQMEIRIxBUFRYRMicYEy\n"
+ "BhSRobFCIyQVUsFiMzRygtFDByWSU/Dh8WNzNRaisoMmRJNUZEXCo3Q2F9JV4mXys4TD03Xj80Y\n"
+ "nlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3EQACAgECBAQDBAUGBwcGBT\n"
+ "UBAAIRAyExEgRBUWFxIhMFMoGRFKGxQiPBUtHwMyRi4XKCkkNTFWNzNPElBhaisoMHJjXC0kSTV\n"
+ "KMXZEVVNnRl4vKzhMPTdePzRpSkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2JzdHV2d3h5ent8f/\n"
+ "2gAMAwEAAhEDEQA/APLtso1NRc0vP0Rok8NYyPEfijOG2ljBoAJPxKFppZtbS4Rz38kV+OPRDge\n"
+ "T89EPHBfvLjtb3P8A30K/j47cgsrYNxGpPYJpK8RtyXUlvPfsobV0GV0uippLiX3EaMb2/rKgMB\n"
+ "1ghoiNST4BESCjjLmxqmKtvxiXQ0cd0q8E2bjIDWjk9z5I8QW8JaoHcdkUePZJtZD9p8YU/Rsc/\n"
+ "wBNjS5zjDWjUk+SSKYaJLYq+qWeYGQ5lBPLJ3OA8wz2/wDSWni/U3H2AXW2l2oloa0f9LcjSLeU\n"
+ "hJdb/wAyqd387Zt+DZ5SSpVh/9DzO6dw7gGPuVn6ft/kyPkqwlxjw1Rnh24QNWjUeR5TSuDc6bg\n"
+ "fatpsJZQ3sNC4rWfkVYpbi4LAb3aANEkFLp7GHGYxuhAj4K/hYVNDjYGzZ++eSSoSbLZjGgwxul\n"
+ "XNrPqO35FukdmzyXOQeqtqwqRg4o/SOAN9ng3/AMzW02txZ9I+ZHKr241UOcWDaz3uLtSSPEpWu\n"
+ "rR5XPeylmyNr4BIPPCyH2Oc6T8kXNvddkPe/VzjJPxQAJMKeIoNScrPk2MbfddXUNXvcGtPx0Xb\n"
+ "dJ6NXjOD2Dfdw6w9v5LFW+q/1WLA3Ly9LSJaz91p/wDRjl2lOLWwAMbEJErWjRgESYieVdZhsMF\n"
+ "wMt08ldrx/vVivHaOdSgCoud9krmElpba93ASTlr/AP/R83ohr97voiJV/Fq9QvsI+mdPgs1thc\n"
+ "BWO5C38CoOY1g78qOejLiGvknxLAyGtExp5K9uzGt9RrNw7DhRfQKKx6bZIGgPj4rPycLqWVtIs\n"
+ "JGu5skDyTBRZtQNrb1fU8xrtpBaO4MLQxcx1sNuEjt5rMGJR9noY5hF7Wxa8aAnxVvDb6bgHH2z\n"
+ "omk0e64ajUUXnev9Idi5rrWAux7SXNd4E/muS+rHSjm9VbPtZjj1CSJBI+g3+0uh69b+iDG/QcD\n"
+ "u0nQCeFP6l0MZhWX/AJ1xM+QafY1TQlY1a+WABsdXp8Sp27aBH+vZaVbC0ADlVcASwtdOolp8Ct\n"
+ "BjmtGv0uI8EmJmxkIjWkmPEKLSPiidxIgJKRbDPCSN5pJyH//S87uw/suZ6c72iC13kVs9PdDmk\n"
+ "KllVziV3cuafc7yP0QjYFh26cqM6hsxAjIj6u6xzbDHh3R663AaceH+5BwdruVp2PqZUA0a9yo6\n"
+ "DPQajscnXb8YQdzC8H909joiZttoxoBIa4gGOQ3uqh+z1RuD2Ds4j2n+39FNKaFMevS/p5LPpSA\n"
+ "I8/b/ABW70THZXj11VjaIAIHgFl5VdD8AneDMaec6Lb6QAKmu7x+VSw2a3MdF/rF9YKeh4B9OHZ\n"
+ "lpAprPH8p7v5DFwrPrV9YDa645toLjMaFo8mtcEvrhkWZHXbg7T0Q2to8o3f8AfkarEitlVTKnY\n"
+ "ra992QQ2wOfG57H2bg7H2fzbFKA130P6n9dHWemCx5H2mk7LgPH818f8IuhAka6ea8y/wAWrcod\n"
+ "VyrceRhBsPae5J/Qj+sxq9KDpMuMuKBCEntnny/1CSaWxM6pIKf/0/MvtF3pCrefTBnbOi1elP3\n"
+ "Et8Vi+Sv9LuNd4HwTSNGSEqkLerwtwn+SYKtOeS4A8Krh2j1D/K1Vu4B5gaDmVDJtAr7zYYGoRB\n"
+ "QDWQ4c9h/csuyjI3fobnDyJR8fF6ltcTaXRwCkAuAsfMGr1TGFNdTmEwLWS2dIK6npLgK2T4Lle\n"
+ "pUZxoc9+6K4eR5NO5bPT73NoqIfoILT/JcFJDZr8zGiNXnvrfiur6/Y8tht7WvaexgbXf8AUrFt\n"
+ "8IExyvRusYDOsYTAIbfWdzHRJ8HN/tLj7OgdRZawmreHP2gt9wEfvtH0f7SkDXe7+o+AOn9DquL\n"
+ "f0mV+leQPH6H+axafUvrB07ptJtyshtTZDTEudJ7bWS5V6MmyltVLn7ht2hwECQP+isb60/Vqvr\n"
+ "tbLsa1lObVIJd9Gxv5rXx9F7fzHpIbf/jgfVnd/TLYj6XoOhJcP/zE+sOzd6dW7dt2eo3dH7/9R\n"
+ "JJWj//U8uiGFx76BFZLQ2xvLeVGAWQrFDJbtKBSHd6blNura4H3BbDXB7InVcZXZdh2bmTt7hbO\n"
+ "J1dj2gzCjlFnhPod3WLHB+n3o9ZsAkFVMfMrs7orLmgkHUdkyqZQQWWQbLGlrjMjUeSrfV3Ltsw\n"
+ "30EBzcd5YCedvLETJya66nWOIAaCVnfU/KuZn21CDVa02PngQdHf9LapMfVhzkaAPUUW3M91YaR\n"
+ "3YDJ+WiBmZGazPo9Kttdt2j63E6s/fft/d/NWjXkMra7KtO2qkE6cErHpvsyMmzPu0dY4Bg/dYP\n"
+ "otTpyoaMUI2XUya8tzG/pi0NMtICo/bsut21gdcWclkj5OncxaDrw6kM+9QxQzaWRAGii4pDqzC\n"
+ "MT02aX7WzPU9b7PrG3bvO6P6yStfZm+pHnPySS4590+3jf/V8yb+CsUbp8uyz0kDskbu2dmz9J8\n"
+ "lSt9Ld+gn1O8cKikmxXydbH+3bhsmfwWj/lONYlcwkhL6L4bfpOxn/tD0/wBN/N944Wh9VJm/b/\n"
+ "O+347df+/rl0k+O38GLJ83X/CfTOt7v2dV6P8AMbx6njHb/pKuN3pN2+IXnaSjybr8e31fUqd+0\n"
+ "Sj487DHMryZJMXjq+sfpPX84SXk6SSX/9kAOEJJTQQhAAAAAABVAAAAAQEAAAAPAEEAZABvAGIA\n"
+ "ZQAgAFAAaABvAHQAbwBzAGgAbwBwAAAAEwBBAGQAbwBiAGUAIABQAGgAbwB0AG8AcwBoAG8AcAA\n"
+ "gADcALgAwAAAAAQA4QklNBAYAAAAAAAcABQAAAAEBAP/hFWdodHRwOi8vbnMuYWRvYmUuY29tL3\n"
+ "hhcC8xLjAvADw/eHBhY2tldCBiZWdpbj0n77u/JyBpZD0nVzVNME1wQ2VoaUh6cmVTek5UY3prY\n"
+ "zlkJz8+Cjw/YWRvYmUteGFwLWZpbHRlcnMgZXNjPSJDUiI/Pgo8eDp4YXBtZXRhIHhtbG5zOng9\n"
+ "J2Fkb2JlOm5zOm1ldGEvJyB4OnhhcHRrPSdYTVAgdG9vbGtpdCAyLjguMi0zMywgZnJhbWV3b3J\n"
+ "rIDEuNSc+CjxyZGY6UkRGIHhtbG5zOnJkZj0naHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi\n"
+ "1yZGYtc3ludGF4LW5zIycgeG1sbnM6aVg9J2h0dHA6Ly9ucy5hZG9iZS5jb20vaVgvMS4wLyc+C\n"
+ "gogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXVpZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC05\n"
+ "MWQ1NDAzZjkyZjknCiAgeG1sbnM6cGRmPSdodHRwOi8vbnMuYWRvYmUuY29tL3BkZi8xLjMvJz4\n"
+ "KICA8IS0tIHBkZjpTdWJqZWN0IGlzIGFsaWFzZWQgLS0+CiA8L3JkZjpEZXNjcmlwdGlvbj4KCi\n"
+ "A8cmRmOkRlc2NyaXB0aW9uIGFib3V0PSd1dWlkOjIyZDAyYjBhLWIyNDktMTFkYi04YWY4LTkxZ\n"
+ "DU0MDNmOTJmOScKICB4bWxuczpwaG90b3Nob3A9J2h0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9z\n"
+ "aG9wLzEuMC8nPgogIDwhLS0gcGhvdG9zaG9wOkNhcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwvcmR\n"
+ "mOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMGEtYj\n"
+ "I0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcD0naHR0cDovL25zLmFkb2JlL\n"
+ "mNvbS94YXAvMS4wLyc+CiAgPCEtLSB4YXA6RGVzY3JpcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwv\n"
+ "cmRmOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMGE\n"
+ "tYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcE1NPSdodHRwOi8vbnMuYW\n"
+ "RvYmUuY29tL3hhcC8xLjAvbW0vJz4KICA8eGFwTU06RG9jdW1lbnRJRD5hZG9iZTpkb2NpZDpwa\n"
+ "G90b3Nob3A6MjJkMDJiMDYtYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5PC94YXBNTTpEb2N1\n"
+ "bWVudElEPgogPC9yZGY6RGVzY3JpcHRpb24+CgogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXV\n"
+ "pZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC05MWQ1NDAzZjkyZjknCiAgeG1sbnM6ZGM9J2h0dH\n"
+ "A6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvJz4KICA8ZGM6ZGVzY3JpcHRpb24+CiAgIDxyZ\n"
+ "GY6QWx0PgogICAgPHJkZjpsaSB4bWw6bGFuZz0neC1kZWZhdWx0Jz4gICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgPC9yZGY6bGk+CiAgIDwvcmRmOkFsdD4KICA8L2RjOmRlc2NyaXB0aW9\n"
+ "uPgogPC9yZGY6RGVzY3JpcHRpb24+Cgo8L3JkZjpSREY+CjwveDp4YXBtZXRhPgogICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA\n"
+ "ogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg\n"
+ "ICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI\n"
+ "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAg\n"
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA\n"
+ "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgIC\n"
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKPD94cGFja2V0IGVuZD0ndyc/P\n"
+ "v/uAA5BZG9iZQBkQAAAAAH/2wCEAAQDAwMDAwQDAwQGBAMEBgcFBAQFBwgGBgcGBggKCAkJCQkI\n"
+ "CgoMDAwMDAoMDAwMDAwMDAwMDAwMDAwMDAwMDAwBBAUFCAcIDwoKDxQODg4UFA4ODg4UEQwMDAw\n"
+ "MEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/AABEIAGQAZAMBEQACEQEDEQ\n"
+ "H/3QAEAA3/xAGiAAAABwEBAQEBAAAAAAAAAAAEBQMCBgEABwgJCgsBAAICAwEBAQEBAAAAAAAAA\n"
+ "AEAAgMEBQYHCAkKCxAAAgEDAwIEAgYHAwQCBgJzAQIDEQQABSESMUFRBhNhInGBFDKRoQcVsUIj\n"
+ "wVLR4TMWYvAkcoLxJUM0U5KismNzwjVEJ5OjszYXVGR0w9LiCCaDCQoYGYSURUaktFbTVSga8uP\n"
+ "zxNTk9GV1hZWltcXV5fVmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9zhIWGh4iJiouMjY6PgpOUlZaX\n"
+ "mJmam5ydnp+So6SlpqeoqaqrrK2ur6EQACAgECAwUFBAUGBAgDA20BAAIRAwQhEjFBBVETYSIGc\n"
+ "YGRMqGx8BTB0eEjQhVSYnLxMyQ0Q4IWklMlomOywgdz0jXiRIMXVJMICQoYGSY2RRonZHRVN/Kj\n"
+ "s8MoKdPj84SUpLTE1OT0ZXWFlaW1xdXl9UZWZnaGlqa2xtbm9kdXZ3eHl6e3x9fn9zhIWGh4iJi\n"
+ "ouMjY6Pg5SVlpeYmZqbnJ2en5KjpKWmp6ipqqusra6vr/2gAMAwEAAhEDEQA/APBnplwPAdR+GB\n"
+ "KY6dYtNG1w39yh4+xb+zIksgEfFaRSSoIx8f7RPRRkSWQimM+lRmwWVXFWYigHxUUVoMiJM+Fj0\n"
+ "tg0RBegLE0Wu+3c+GTBazFCGI7HtSp9slbFYYzyoBsegw2hY1Afl3wqqRqahk+0tDgKpgu4DAUU\n"
+ "+HY+GRS2ePiMKtUB3G+KGuONq//Q8OzpFbW5WnxMop4k9crG5ZnZNJkEOn21utVRYw7HxZtz+OR\n"
+ "vdsrZ2lRtci4aVxFEQA0neg/ZXxJpTITNNuOFss0vSotYNvZ2qGRkPKSTqiU8Sdqk5SZU5Ix8XJ\n"
+ "NNZ8k6bp8TtM73OputUtYq0Unux/hkRkJOzZLCAN2KR+VpbtSkCBaDnIzdlWu59u+XeJTjeASk8\n"
+ "+juZOESEAVqx8BvU/PJibScTrTy09560hkWOGFd2YgFnPQKD19zhOSkxw2l8Vm6XAiYb8gg+k5O\n"
+ "9mnhoon9H3cs5s7WF5pp29OGGMFndyaAKBuTiEEPQLD8h/NDmNdYlttNkYjlbFjcXCr3LLH8II8\n"
+ "C2WUGviZvon/OPWkm3RNSv72SYllMkKxQRV67CQMSKYQAxMkR/wBC56d61P0heel4cYuVOXWvTp\n"
+ "h4Qjjf/9Hw5qBYyISaqjBV+QpvkAzKcki4HomnIxck/wBhtlR2bhunvlDywddMUl4zW+kQ9FQ8X\n"
+ "nfuSewrtmPkycPvc/DhMhvyegXOrWWhmLQPKlsj6xIAiLCoZkY96nv7npmJvI2XOjQFMl0fyRqM\n"
+ "NoxvZvrGt33wlATwiMnVnY1LEdSfuyXF3KIDmUu88w2XlnTl8raAlb2ZFfVL0jdYRtQnxc7BfDC\n"
+ "OaJR7nm3me5tdOtjbMvp3ZRXkV6chVQRX79hmVjgZG+jgZ5jHGhzecXF5LPL6jEjstSSaDM51Ka\n"
+ "6MZ9S1C0sEBe8uZo4YCBXdjxGw60wEWyEqfUHkT8vLXRJFuLdTcaqfhlvWUErtukZ3ABPUjIXTE\n"
+ "m3rGmeV2Tk5UKz/AG/E/wAcgZKya20C3b02kjYtH8AqCygbkUH0nLYlgUb+gbWtPbpXt/n2ybB/\n"
+ "/9Lw4oaVxGd+PxH3qBkGaY3KyiSP01IkiUclH8sg+LKydm6INvZvKsFu+kWtvD8LRoFNRup6moO\n"
+ "aqd277HsGW+XPLmn6XM17FF6l7vW4fd2Zuu+RFls2tmUNrLJb7TSBertGQGqetDkxE0na0pvtHs\n"
+ "QkszWyiGAG5laYlnkeMVHJj8sA5rPk+SvMepTalqlxd3B5zTOXdj/MxqafLpm5xioh5nPK5kpRG\n"
+ "pkcKAST0A6k5NpfUP5K/ki1ssHmHzF+71KRQ8Nud/Qibb/kYw6/yjbrXISlSH07YaHbWyxx2kXE\n"
+ "KACB2zHJtLI7XSelBRvH2xCpvaaTDHXkOTVBPcUG2479RlsdmJVPRtvV+ylenQ0y62FP/9PxRpo\n"
+ "WG5FxKKxKFDA+GVS5NsebLdFsRePc3siVW4f4QR0QVAGYeSXR2unhtZ6s60K6jt+MMSFwtF2+xX\n"
+ "wr7eGUGLlRPQMsE2vxQm7itxKg3VCfT2+nb8cDYaCDtfOXmCCcROrQrUhkkCHYn6emRMqZxjbLd\n"
+ "F1+W/4xajHzjNCtQKMffETWUdngX5p+QZ9A8xS6hbo0ui37NNDPT7DOalHpsCD08Rmyw5ARTpdV\n"
+ "gIPEF35MeRn80ed4S5EdrpKm9kZ15K0iH92hB7Me/tmS60vt/QrCYyekiBdgSTXcjqV9q9MokFD\n"
+ "N7S3aFVVR8RoK9zldqndvAY6nffr/AGYQqLhjdpCoIAZW22HavU/LJBUP9WblX0xTw7fOmWsX/9\n"
+ "Tw7FdvMqWkQ3Z1qfED+mQIbI77PX/LFis9vBajZm2Y+x65rMh3t30Bsze400aVaIbSLk6r8CMRT\n"
+ "l/NmOcllnGDD9Y8uecNfEEiXrMgDGWAyGOOu5WlB+vMrHODTlxZCdjsyFdB006VpVtLasurQxBL\n"
+ "64WiLI4/aFT1ANOXemV5piR2b9NiljB4yyHy9CLOVI5GJhB+CvXY9R8xmINzs5HNZ+Z96BZpbxA\n"
+ "fVJo39UFefwopYgL4nMiMd2qZoIn/AJx00u3t/Lt7qpp9Yv5GLf5MUTERqfbvmzBeezjd9H+VlL\n"
+ "wSQzBqsvOGQD7L12rXsemPNxmXQSxxIPU2nFV4HYqR1xEUWj4ZAxBryr2G+J2VGDZlLrxUH6KZA\n"
+ "Fkqb15VFelfwy+2FP8A/9Xxlf6AdA182Yk9eFeLxSjoVfcfSMo4uIOfkweFOnpvlWYrLEwNFAA+\n"
+ "nMOYdrhFvQLeSO7coBXiK8iKiv07Zj8Ac4QtNrW1njUcKcT+yAR/xGmR4WcsStLpTuPU9IFaEsV\n"
+ "BP3k4m2AgBzSwyQNcIwNTE1aI3wnam9O2Ug7s5Ckk/NDndeVXa2H78MqqV6jmeBp9+ZWKXqDjZ4\n"
+ "+gvVvy30qCy0qzsLRBCnBI2VdgUTqPvOZ7y+Q7pz+bn5q6d+VflZxZlJ/NN4ypptk5qtB9qRwDX\n"
+ "gn/AAx2y2ItpfKFv+eH5qNeTajJ5ovVaVywSqvEtTUKqupAA6D2y0BNPtv/AJx//M5PzL8mJeXT\n"
+ "L+ndPf6rqarSpkAqsnEAAeoN6DpkJRYci9lROSgSUUH9o9K5Tw0ztfSHnXkOtK9q+PHwydq//9b\n"
+ "yxrVoZNBtNSA5zRMPXmH8j0CLXuBmHE+qneamHpEuqYeV7pzFVTRgQK5XMNmnlb1vyyY5QA1OwJ\n"
+ "+eUF2seTOLu5s7azVIVAkpVn/hhnIALG73Yz5jvb1dICqzpDNIqyFD8SxH7R28cxibZCiWOsdJs\n"
+ "PTM6XNstPhnkjIhcHuJBVfvOCiUSn0TfWrTTLjyw8guA/PifTO3xcxxA8a5ZAbimvJP0m3p/kFF\n"
+ "WxhmpWQJ9NW3zZPHz5vlb/nIDVbrWfzO1RJhxGnpDaRL/khA1T7ktmSOTAJhZaAUtLawsbayl8v\n"
+ "xWi3Gpay0cF3HPcFRJJHJMXVrcJ8UaAFG5LWjF8tAYW9H/wCcOo9bTzxrt/owkTyksZW5gkIKvI\n"
+ "7k26nvyReRJHyyBWT7dWQyOWlbnK2526e1O1MqIUFE84uPLkOdK9RXI0E2/wD/1/DA1bURZLY/W\n"
+ "ZDZqwb0eXw7dMgIi7bjllVXsz7yNcfWC0Vd3Ip92Y2UOz0cnsPlwyx8xQ/u24sMxCadoJp9LOXk\n"
+ "VX/uwRUE0BI8cokbLMyoKouHu2MaKGXw7fLDwgoGSkbHpaNZyLLHRSKcFFQQRvUdMlwUFOQyLzr\n"
+ "ztpCaba6fPau4ijv4OURY8AjVFKV7ZZiO+7Vnh6XvXkSWNbW2WTb92KDxIFMzwHlZc3zX+fuizW\n"
+ "f5p3ty8XGDU4YLmCQiisyII3+4rvl8UB5ffEghRGvOm7AbnvWvjk1fen/ONPldPKP5aWOpPCfr2\n"
+ "uE31y6q2wbaMEn+VAMDSdyzrzj+avlHyTp0l/r2rxWFuHWJuIeacu4qFCRgsajfBwsty89/6Gr/\n"
+ "ACa9an+JL/hSnrfoubhXwpXpjwhaL//Q8E1AqtcAZMs8l6i1nqMa1oSVP0VynKLDmaWdSfQXl69\n"
+ "jF1Jv8MhDb5rpB3AO7INRRLhhGp4R05FgaGvTMU8200xS70zVDMRp2pTIOvBmB3PgQP15kxIcnD\n"
+ "LH/EEz0rRvOJhldr9pQtCqyd6VrShGTqw5d4ARv9jHfOGl+ZJNMluLkyenaFbiRdqFYW5nrWuwO\n"
+ "MKB5MdSMRxnhlu9N8p6lLFpti63FUjCtFJTrDKvse2bEDZ4XJ9RZB+YPli2/Mjy5bxoUi1a0YS2\n"
+ "85UOwIXiy9jRu+TBppfOF1+V3m22vrdpNPM8cs/oo0VJlUqQPjValR3+IZNNvtLS9Yu9Mi0/TJr\n"
+ "kyp6QhWVVCIWRATsKBemwwFrDzT87fybs/wA1bW21PRb+DTvNlgGSRp6iC8i3KJJx+y6n7D0Pwm\n"
+ "hxBZXT55/6Fi/Nf0PW+qWXq+t6X1X67F6vD/ftK04V/wBl344U8b//0fBapxheVh9ocV+nviqY2\n"
+ "/qQJDew/bioWHiuQ8m0bbvaPKGtQ6jaxSo9JloCK75gZI0Xb4sgkHo8MouoAvP94BsRmGY7uWJU\n"
+ "gzbypOQpNOvIdK4Nw2WCE2tXulTkjEEbdafgclxMhFBas93dwyQzsWDghlJFONKHJCZtjOFBJfy\n"
+ "j1y9vPL9zpbIs0WkXL2sUjA8hDXlGCRXtt07ZuYvL5KJeo6bfajbkzWkcToR8dqshZ6in2fhNK/\n"
+ "PDTUlXmHVvMdr5o0v9H2kdrqGpfu7m0nkY87Uf7tkKAU4/s03ynLkEBbfihx7dGT6va67LbRMNR\n"
+ "aKOBuUTKgIBXoK1BOYR1M3aQ0mOt9yxUeZNdtJhFapLqMluSXkg5oxJrUMW5KevQ9MmNXXNqOiH\n"
+ "Rr/Hmv8A1r9I/oj95w+r+j9Yf1+NP5+nXtTD+dF8tkfkOlv/0vC3ph7f0/alcVTbS4A8QibuKb5\n"
+ "RI05EBYRFpdX3ly79a2qYCavH/EY7TCYyMD5PSdD8+wXUSn1ArDqOhBzFlipz4ZwWbaV5htbsgF\n"
+ "qg9crMXKErGyYwajFGzxyHlGSePbbwyqg5UZlCaxrFpaWU95LIqrEjMAT4Dp9OShGy1ZslBhv/A\n"
+ "Dj9rd/a+aL+xUK+m38L3d0HrxRo2HFtu5D8c27y8t30raarbWkU+u6g4gsNORn+EcUaSh2Pc0/4\n"
+ "lgtAjezzbT9SutY1i782al8Nxdyotqh6xWybIg+jc5q8s+I27bFDgFPQp9RE+nrag70+L6crrZu\n"
+ "4jajokdv6LW/Dii1Wo61PXKQN3KPK0L+h4/rnD/K5V78a5LhXxd3/0/DMXXtwxVNtL9Xkaf3f7N\n"
+ "etfbKMjdjtkZ9D6ufrlK0+HpX8coF9HJ26sXvfqXrf7i/U+uften/d/wCyrmQL6uOav0pvpP8Ai\n"
+ "b1F+rV59+vH6a5XLhcjH4nRmY/xpxHP0/UptWvT6Mx/RbmjxWK+aP8AFf1M/pCv1Kvxen9inavf\n"
+ "MrFwXtzcLUeLXq5Mv/I3nz1b0v8AjofuKVry9KrUpTanOlf9jmQ68va/zH9b/COn/o7/AI431mP\n"
+ "65SvLh+zWvbl9rMfNfC34K4kmj9T6lD6FKclp/DNYXZx5srsPrHor6nXvkgxTPS/U+rv6dPU5mt\n"
+ "fngFN5ulv+l/pL/Lp/scerHo//2Q==\n";
+
+static std::string gCommandLine;
+
+TEST(Base64, LargeSample) {
+ LOG(LS_VERBOSE) << "Testing specific base64 file";
+
+ char unescaped[64 * 1024];
+
+ // unescape that massive blob above
+ size_t size = Base64Unescape(SpecificTest,
+ sizeof(SpecificTest),
+ unescaped,
+ sizeof(unescaped));
+
+ EXPECT_EQ(size, sizeof(testbase64));
+ EXPECT_EQ(0, memcmp(testbase64, unescaped, sizeof(testbase64)));
+}
+
+bool DecodeTest(const char* encoded, size_t expect_unparsed,
+ const char* decoded, Base64::DecodeFlags flags)
+{
+ std::string result;
+ size_t consumed = 0, encoded_len = strlen(encoded);
+ bool success = Base64::DecodeFromArray(encoded, encoded_len, flags,
+ &result, &consumed);
+ size_t unparsed = encoded_len - consumed;
+ EXPECT_EQ(expect_unparsed, unparsed) << "\"" << encoded
+ << "\" -> \"" << decoded
+ << "\"";
+ EXPECT_STREQ(decoded, result.c_str());
+ return success;
+}
+
+#define Flags(x,y,z) \
+ Base64::DO_PARSE_##x | Base64::DO_PAD_##y | Base64::DO_TERM_##z
+
+TEST(Base64, DecodeParseOptions) {
+ // Trailing whitespace
+ EXPECT_TRUE (DecodeTest("YWJjZA== ", 1, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA== ", 0, "abcd", Flags(WHITE, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA== ", 0, "abcd", Flags(ANY, YES, CHAR)));
+
+ // Embedded whitespace
+ EXPECT_FALSE(DecodeTest("YWJjZA= =", 3, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA= =", 0, "abcd", Flags(WHITE, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA= =", 0, "abcd", Flags(ANY, YES, CHAR)));
+
+ // Embedded non-base64 characters
+ EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(WHITE, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA=*=", 0, "abcd", Flags(ANY, YES, CHAR)));
+
+ // Unexpected padding characters
+ EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(STRICT, YES, CHAR)));
+ EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(WHITE, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YW=JjZA==", 0, "abcd", Flags(ANY, YES, CHAR)));
+}
+
+TEST(Base64, DecodePadOptions) {
+ // Padding
+ EXPECT_TRUE (DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, ANY, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA==", 2, "abcd", Flags(STRICT, NO, CHAR)));
+
+ // Incomplete padding
+ EXPECT_FALSE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, ANY, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, NO, CHAR)));
+
+ // No padding
+ EXPECT_FALSE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, YES, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, ANY, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, NO, CHAR)));
+}
+
+TEST(Base64, DecodeTerminateOptions) {
+ // Complete quantum
+ EXPECT_TRUE (DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, BUFFER)));
+ EXPECT_TRUE (DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, ANY)));
+
+ // Complete quantum with trailing data
+ EXPECT_FALSE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, BUFFER)));
+ EXPECT_TRUE (DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, ANY)));
+
+ // Incomplete quantum
+ EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, BUFFER)));
+ EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, CHAR)));
+ EXPECT_TRUE (DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, ANY)));
+}
+
+TEST(Base64, GetNextBase64Char) {
+ // The table looks like this:
+ // "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
+ char next_char;
+ EXPECT_TRUE(Base64::GetNextBase64Char('A', &next_char));
+ EXPECT_EQ('B', next_char);
+ EXPECT_TRUE(Base64::GetNextBase64Char('Z', &next_char));
+ EXPECT_EQ('a', next_char);
+ EXPECT_TRUE(Base64::GetNextBase64Char('/', &next_char));
+ EXPECT_EQ('A', next_char);
+ EXPECT_FALSE(Base64::GetNextBase64Char('&', &next_char));
+ EXPECT_FALSE(Base64::GetNextBase64Char('Z', NULL));
+}
diff --git a/chromium/third_party/webrtc/base/base_tests.gyp b/chromium/third_party/webrtc/base/base_tests.gyp
new file mode 100644
index 00000000000..c5cc7b809ab
--- /dev/null
+++ b/chromium/third_party/webrtc/base/base_tests.gyp
@@ -0,0 +1,156 @@
+# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [ '../build/common.gypi', ],
+ 'targets': [
+ {
+ 'target_name': 'webrtc_base_tests_utils',
+ 'type': 'static_library',
+ 'sources': [
+ 'unittest_main.cc',
+ # Also use this as a convenient dumping ground for misc files that are
+ # included by multiple targets below.
+ 'fakecpumonitor.h',
+ 'fakenetwork.h',
+ 'fakesslidentity.h',
+ 'faketaskrunner.h',
+ 'gunit.h',
+ 'testbase64.h',
+ 'testechoserver.h',
+ 'testutils.h',
+ 'win32toolhelp.h',
+ ],
+ 'dependencies': [
+ 'base.gyp:webrtc_base',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ },
+ {
+ 'target_name': 'webrtc_base_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ 'base.gyp:webrtc_base',
+ 'webrtc_base_tests_utils',
+ ],
+ 'sources': [
+ 'asynchttprequest_unittest.cc',
+ 'atomicops_unittest.cc',
+ 'autodetectproxy_unittest.cc',
+ 'bandwidthsmoother_unittest.cc',
+ 'base64_unittest.cc',
+ 'basictypes_unittest.cc',
+ 'bind_unittest.cc',
+ 'buffer_unittest.cc',
+ 'bytebuffer_unittest.cc',
+ 'byteorder_unittest.cc',
+ 'callback_unittest.cc',
+ 'cpumonitor_unittest.cc',
+ 'crc32_unittest.cc',
+ 'criticalsection_unittest.cc',
+ 'event_unittest.cc',
+ 'filelock_unittest.cc',
+ 'fileutils_unittest.cc',
+ 'helpers_unittest.cc',
+ 'httpbase_unittest.cc',
+ 'httpcommon_unittest.cc',
+ 'httpserver_unittest.cc',
+ 'ipaddress_unittest.cc',
+ 'logging_unittest.cc',
+ 'md5digest_unittest.cc',
+ 'messagedigest_unittest.cc',
+ 'messagequeue_unittest.cc',
+ 'multipart_unittest.cc',
+ 'nat_unittest.cc',
+ 'network_unittest.cc',
+ 'nullsocketserver_unittest.cc',
+ 'optionsfile_unittest.cc',
+ 'pathutils_unittest.cc',
+ 'physicalsocketserver_unittest.cc',
+ 'profiler_unittest.cc',
+ 'proxy_unittest.cc',
+ 'proxydetect_unittest.cc',
+ 'ratelimiter_unittest.cc',
+ 'ratetracker_unittest.cc',
+ 'referencecountedsingletonfactory_unittest.cc',
+ 'rollingaccumulator_unittest.cc',
+ 'scopedptrcollection_unittest.cc',
+ 'sha1digest_unittest.cc',
+ 'sharedexclusivelock_unittest.cc',
+ 'signalthread_unittest.cc',
+ 'sigslot_unittest.cc',
+ 'sigslottester.h',
+ 'sigslottester.h.pump',
+ 'socket_unittest.cc',
+ 'socket_unittest.h',
+ 'socketaddress_unittest.cc',
+ 'stream_unittest.cc',
+ 'stringencode_unittest.cc',
+ 'stringutils_unittest.cc',
+ # TODO(ronghuawu): Reenable this test.
+ # 'systeminfo_unittest.cc',
+ 'task_unittest.cc',
+ 'testclient_unittest.cc',
+ 'thread_checker_unittest.cc',
+ 'thread_unittest.cc',
+ 'timeutils_unittest.cc',
+ 'urlencode_unittest.cc',
+ 'versionparsing_unittest.cc',
+ 'virtualsocket_unittest.cc',
+ # TODO(ronghuawu): Reenable this test.
+ # 'windowpicker_unittest.cc',
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'sources': [
+ 'latebindingsymboltable_unittest.cc',
+ # TODO(ronghuawu): Reenable this test.
+ # 'linux_unittest.cc',
+ 'linuxfdwalk_unittest.cc',
+ ],
+ }],
+ ['OS=="win"', {
+ 'sources': [
+ 'win32_unittest.cc',
+ 'win32regkey_unittest.cc',
+ 'win32socketserver_unittest.cc',
+ 'win32toolhelp_unittest.cc',
+ 'win32window_unittest.cc',
+ 'win32windowpicker_unittest.cc',
+ 'winfirewall_unittest.cc',
+ ],
+ 'sources!': [
+ # TODO(ronghuawu): Fix TestUdpReadyToSendIPv6 on windows bot
+ # then reenable these tests.
+ 'physicalsocketserver_unittest.cc',
+ 'socket_unittest.cc',
+ 'win32socketserver_unittest.cc',
+ 'win32windowpicker_unittest.cc',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'sources': [
+ 'macsocketserver_unittest.cc',
+ 'macutils_unittest.cc',
+ ],
+ }],
+ ['os_posix==1', {
+ 'sources': [
+ 'sslidentity_unittest.cc',
+ 'sslstreamadapter_unittest.cc',
+ ],
+ }],
+ ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', {
+ 'defines': [
+ 'CARBON_DEPRECATED=YES',
+ ],
+ }],
+ ], # conditions
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/base/basicdefs.h b/chromium/third_party/webrtc/base/basicdefs.h
new file mode 100644
index 00000000000..1dee2ae6580
--- /dev/null
+++ b/chromium/third_party/webrtc/base/basicdefs.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BASICDEFS_H_
+#define WEBRTC_BASE_BASICDEFS_H_
+
+#if HAVE_CONFIG_H
+#include "config.h" // NOLINT
+#endif
+
+#define ARRAY_SIZE(x) (static_cast<int>(sizeof(x) / sizeof(x[0])))
+
+#endif // WEBRTC_BASE_BASICDEFS_H_
diff --git a/chromium/third_party/webrtc/base/basictypes.h b/chromium/third_party/webrtc/base/basictypes.h
new file mode 100644
index 00000000000..7649a43f855
--- /dev/null
+++ b/chromium/third_party/webrtc/base/basictypes.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BASICTYPES_H_
+#define WEBRTC_BASE_BASICTYPES_H_
+
+#include <stddef.h> // for NULL, size_t
+
+#if !(defined(_MSC_VER) && (_MSC_VER < 1600))
+#include <stdint.h> // for uintptr_t
+#endif
+
+#ifdef HAVE_CONFIG_H
+#include "config.h" // NOLINT
+#endif
+
+#include "webrtc/base/constructormagic.h"
+
+#if !defined(INT_TYPES_DEFINED)
+#define INT_TYPES_DEFINED
+#ifdef COMPILER_MSVC
+typedef unsigned __int64 uint64;
+typedef __int64 int64;
+#ifndef INT64_C
+#define INT64_C(x) x ## I64
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## UI64
+#endif
+#define INT64_F "I64"
+#else // COMPILER_MSVC
+// On Mac OS X, cssmconfig.h defines uint64 as uint64_t
+// TODO(fbarchard): Use long long for compatibility with chromium on BSD/OSX.
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+typedef uint64_t uint64;
+typedef int64_t int64;
+#ifndef INT64_C
+#define INT64_C(x) x ## LL
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## ULL
+#endif
+#define INT64_F "l"
+#elif defined(__LP64__)
+typedef unsigned long uint64; // NOLINT
+typedef long int64; // NOLINT
+#ifndef INT64_C
+#define INT64_C(x) x ## L
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## UL
+#endif
+#define INT64_F "l"
+#else // __LP64__
+typedef unsigned long long uint64; // NOLINT
+typedef long long int64; // NOLINT
+#ifndef INT64_C
+#define INT64_C(x) x ## LL
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## ULL
+#endif
+#define INT64_F "ll"
+#endif // __LP64__
+#endif // COMPILER_MSVC
+typedef unsigned int uint32;
+typedef int int32;
+typedef unsigned short uint16; // NOLINT
+typedef short int16; // NOLINT
+typedef unsigned char uint8;
+typedef signed char int8;
+#endif // INT_TYPES_DEFINED
+
+// Detect compiler is for x86 or x64.
+#if defined(__x86_64__) || defined(_M_X64) || \
+ defined(__i386__) || defined(_M_IX86)
+#define CPU_X86 1
+#endif
+// Detect compiler is for arm.
+#if defined(__arm__) || defined(_M_ARM)
+#define CPU_ARM 1
+#endif
+#if defined(CPU_X86) && defined(CPU_ARM)
+#error CPU_X86 and CPU_ARM both defined.
+#endif
+#if !defined(ARCH_CPU_BIG_ENDIAN) && !defined(ARCH_CPU_LITTLE_ENDIAN)
+// x86, arm or GCC provided __BYTE_ORDER__ macros
+#if CPU_X86 || CPU_ARM || \
+ (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
+#define ARCH_CPU_LITTLE_ENDIAN
+#elif defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+#define ARCH_CPU_BIG_ENDIAN
+#else
+#error ARCH_CPU_BIG_ENDIAN or ARCH_CPU_LITTLE_ENDIAN should be defined.
+#endif
+#endif
+#if defined(ARCH_CPU_BIG_ENDIAN) && defined(ARCH_CPU_LITTLE_ENDIAN)
+#error ARCH_CPU_BIG_ENDIAN and ARCH_CPU_LITTLE_ENDIAN both defined.
+#endif
+
+#if defined(WEBRTC_WIN)
+typedef int socklen_t;
+#endif
+
+// The following only works for C++
+#ifdef __cplusplus
+namespace rtc {
+ template<class T> inline T _min(T a, T b) { return (a > b) ? b : a; }
+ template<class T> inline T _max(T a, T b) { return (a < b) ? b : a; }
+
+ // For wait functions that take a number of milliseconds, kForever indicates
+ // unlimited time.
+ const int kForever = -1;
+}
+
+#define ALIGNP(p, t) \
+ (reinterpret_cast<uint8*>(((reinterpret_cast<uintptr_t>(p) + \
+ ((t) - 1)) & ~((t) - 1))))
+#define RTC_IS_ALIGNED(p, a) (!((uintptr_t)(p) & ((a) - 1)))
+
+// Use these to declare and define a static local variable (static T;) so that
+// it is leaked so that its destructors are not called at exit.
+#define LIBJINGLE_DEFINE_STATIC_LOCAL(type, name, arguments) \
+ static type& name = *new type arguments
+
+#endif // __cplusplus
+#endif // WEBRTC_BASE_BASICTYPES_H_
diff --git a/chromium/third_party/webrtc/base/basictypes_unittest.cc b/chromium/third_party/webrtc/base/basictypes_unittest.cc
new file mode 100644
index 00000000000..20515ecf969
--- /dev/null
+++ b/chromium/third_party/webrtc/base/basictypes_unittest.cc
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/basictypes.h"
+
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+TEST(BasicTypesTest, Endian) {
+ uint16 v16 = 0x1234u;
+ uint8 first_byte = *reinterpret_cast<uint8*>(&v16);
+#if defined(ARCH_CPU_LITTLE_ENDIAN)
+ EXPECT_EQ(0x34u, first_byte);
+#elif defined(ARCH_CPU_BIG_ENDIAN)
+ EXPECT_EQ(0x12u, first_byte);
+#endif
+}
+
+TEST(BasicTypesTest, SizeOfTypes) {
+ int8 i8 = -1;
+ uint8 u8 = 1u;
+ int16 i16 = -1;
+ uint16 u16 = 1u;
+ int32 i32 = -1;
+ uint32 u32 = 1u;
+ int64 i64 = -1;
+ uint64 u64 = 1u;
+ EXPECT_EQ(1u, sizeof(i8));
+ EXPECT_EQ(1u, sizeof(u8));
+ EXPECT_EQ(2u, sizeof(i16));
+ EXPECT_EQ(2u, sizeof(u16));
+ EXPECT_EQ(4u, sizeof(i32));
+ EXPECT_EQ(4u, sizeof(u32));
+ EXPECT_EQ(8u, sizeof(i64));
+ EXPECT_EQ(8u, sizeof(u64));
+ EXPECT_GT(0, i8);
+ EXPECT_LT(0u, u8);
+ EXPECT_GT(0, i16);
+ EXPECT_LT(0u, u16);
+ EXPECT_GT(0, i32);
+ EXPECT_LT(0u, u32);
+ EXPECT_GT(0, i64);
+ EXPECT_LT(0u, u64);
+}
+
+TEST(BasicTypesTest, SizeOfConstants) {
+ EXPECT_EQ(8u, sizeof(INT64_C(0)));
+ EXPECT_EQ(8u, sizeof(UINT64_C(0)));
+ EXPECT_EQ(8u, sizeof(INT64_C(0x1234567887654321)));
+ EXPECT_EQ(8u, sizeof(UINT64_C(0x8765432112345678)));
+}
+
+// Test CPU_ macros
+#if !defined(CPU_ARM) && defined(__arm__)
+#error expected CPU_ARM to be defined.
+#endif
+#if !defined(CPU_X86) && (defined(WEBRTC_WIN) || defined(WEBRTC_MAC) && !defined(WEBRTC_IOS))
+#error expected CPU_X86 to be defined.
+#endif
+#if !defined(ARCH_CPU_LITTLE_ENDIAN) && \
+ (defined(WEBRTC_WIN) || defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) || defined(CPU_X86))
+#error expected ARCH_CPU_LITTLE_ENDIAN to be defined.
+#endif
+
+// TODO(fbarchard): Test all macros in basictypes.h
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bind.h b/chromium/third_party/webrtc/base/bind.h
new file mode 100644
index 00000000000..2e3104edfd3
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bind.h
@@ -0,0 +1,587 @@
+// This file was GENERATED by command:
+// pump.py bind.h.pump
+// DO NOT EDIT BY HAND!!!
+
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// To generate bind.h from bind.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py bind.h.pump
+
+// Bind() is an overloaded function that converts method calls into function
+// objects (aka functors). It captures any arguments to the method by value
+// when Bind is called, producing a stateful, nullary function object. Care
+// should be taken about the lifetime of objects captured by Bind(); the
+// returned functor knows nothing about the lifetime of the method's object or
+// any arguments passed by pointer, and calling the functor with a destroyed
+// object will surely do bad things.
+//
+// Example usage:
+// struct Foo {
+// int Test1() { return 42; }
+// int Test2() const { return 52; }
+// int Test3(int x) { return x*x; }
+// float Test4(int x, float y) { return x + y; }
+// };
+//
+// int main() {
+// Foo foo;
+// cout << rtc::Bind(&Foo::Test1, &foo)() << endl;
+// cout << rtc::Bind(&Foo::Test2, &foo)() << endl;
+// cout << rtc::Bind(&Foo::Test3, &foo, 3)() << endl;
+// cout << rtc::Bind(&Foo::Test4, &foo, 7, 8.5f)() << endl;
+// }
+
+#ifndef WEBRTC_BASE_BIND_H_
+#define WEBRTC_BASE_BIND_H_
+
+#define NONAME
+
+namespace rtc {
+namespace detail {
+// This is needed because the template parameters in Bind can't be resolved
+// if they're used both as parameters of the function pointer type and as
+// parameters to Bind itself: the function pointer parameters are exact
+// matches to the function prototype, but the parameters to bind have
+// references stripped. This trick allows the compiler to dictate the Bind
+// parameter types rather than deduce them.
+template <class T> struct identity { typedef T type; };
+} // namespace detail
+
+template <class ObjectT, class MethodT, class R>
+class MethodFunctor0 {
+ public:
+ MethodFunctor0(MethodT method, ObjectT* object)
+ : method_(method), object_(object) {}
+ R operator()() const {
+ return (object_->*method_)(); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+};
+
+template <class FunctorT, class R>
+class Functor0 {
+ public:
+ explicit Functor0(const FunctorT& functor)
+ : functor_(functor) {}
+ R operator()() const {
+ return functor_(); }
+ private:
+ FunctorT functor_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)()
+
+template <class ObjectT, class R>
+MethodFunctor0<ObjectT, FP_T(NONAME), R>
+Bind(FP_T(method), ObjectT* object) {
+ return MethodFunctor0<ObjectT, FP_T(NONAME), R>(
+ method, object);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)() const
+
+template <class ObjectT, class R>
+MethodFunctor0<const ObjectT, FP_T(NONAME), R>
+Bind(FP_T(method), const ObjectT* object) {
+ return MethodFunctor0<const ObjectT, FP_T(NONAME), R>(
+ method, object);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)()
+
+template <class R>
+Functor0<FP_T(NONAME), R>
+Bind(FP_T(function)) {
+ return Functor0<FP_T(NONAME), R>(
+ function);
+}
+
+#undef FP_T
+
+template <class ObjectT, class MethodT, class R,
+ class P1>
+class MethodFunctor1 {
+ public:
+ MethodFunctor1(MethodT method, ObjectT* object,
+ P1 p1)
+ : method_(method), object_(object),
+ p1_(p1) {}
+ R operator()() const {
+ return (object_->*method_)(p1_); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+ P1 p1_;
+};
+
+template <class FunctorT, class R,
+ class P1>
+class Functor1 {
+ public:
+ Functor1(const FunctorT& functor, P1 p1)
+ : functor_(functor),
+ p1_(p1) {}
+ R operator()() const {
+ return functor_(p1_); }
+ private:
+ FunctorT functor_;
+ P1 p1_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)(P1)
+
+template <class ObjectT, class R,
+ class P1>
+MethodFunctor1<ObjectT, FP_T(NONAME), R, P1>
+Bind(FP_T(method), ObjectT* object,
+ typename detail::identity<P1>::type p1) {
+ return MethodFunctor1<ObjectT, FP_T(NONAME), R, P1>(
+ method, object, p1);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)(P1) const
+
+template <class ObjectT, class R,
+ class P1>
+MethodFunctor1<const ObjectT, FP_T(NONAME), R, P1>
+Bind(FP_T(method), const ObjectT* object,
+ typename detail::identity<P1>::type p1) {
+ return MethodFunctor1<const ObjectT, FP_T(NONAME), R, P1>(
+ method, object, p1);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)(P1)
+
+template <class R,
+ class P1>
+Functor1<FP_T(NONAME), R, P1>
+Bind(FP_T(function),
+ typename detail::identity<P1>::type p1) {
+ return Functor1<FP_T(NONAME), R, P1>(
+ function, p1);
+}
+
+#undef FP_T
+
+template <class ObjectT, class MethodT, class R,
+ class P1,
+ class P2>
+class MethodFunctor2 {
+ public:
+ MethodFunctor2(MethodT method, ObjectT* object,
+ P1 p1,
+ P2 p2)
+ : method_(method), object_(object),
+ p1_(p1),
+ p2_(p2) {}
+ R operator()() const {
+ return (object_->*method_)(p1_, p2_); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+ P1 p1_;
+ P2 p2_;
+};
+
+template <class FunctorT, class R,
+ class P1,
+ class P2>
+class Functor2 {
+ public:
+ Functor2(const FunctorT& functor, P1 p1, P2 p2)
+ : functor_(functor),
+ p1_(p1),
+ p2_(p2) {}
+ R operator()() const {
+ return functor_(p1_, p2_); }
+ private:
+ FunctorT functor_;
+ P1 p1_;
+ P2 p2_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)(P1, P2)
+
+template <class ObjectT, class R,
+ class P1,
+ class P2>
+MethodFunctor2<ObjectT, FP_T(NONAME), R, P1, P2>
+Bind(FP_T(method), ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2) {
+ return MethodFunctor2<ObjectT, FP_T(NONAME), R, P1, P2>(
+ method, object, p1, p2);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)(P1, P2) const
+
+template <class ObjectT, class R,
+ class P1,
+ class P2>
+MethodFunctor2<const ObjectT, FP_T(NONAME), R, P1, P2>
+Bind(FP_T(method), const ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2) {
+ return MethodFunctor2<const ObjectT, FP_T(NONAME), R, P1, P2>(
+ method, object, p1, p2);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)(P1, P2)
+
+template <class R,
+ class P1,
+ class P2>
+Functor2<FP_T(NONAME), R, P1, P2>
+Bind(FP_T(function),
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2) {
+ return Functor2<FP_T(NONAME), R, P1, P2>(
+ function, p1, p2);
+}
+
+#undef FP_T
+
+template <class ObjectT, class MethodT, class R,
+ class P1,
+ class P2,
+ class P3>
+class MethodFunctor3 {
+ public:
+ MethodFunctor3(MethodT method, ObjectT* object,
+ P1 p1,
+ P2 p2,
+ P3 p3)
+ : method_(method), object_(object),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3) {}
+ R operator()() const {
+ return (object_->*method_)(p1_, p2_, p3_); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+};
+
+template <class FunctorT, class R,
+ class P1,
+ class P2,
+ class P3>
+class Functor3 {
+ public:
+ Functor3(const FunctorT& functor, P1 p1, P2 p2, P3 p3)
+ : functor_(functor),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3) {}
+ R operator()() const {
+ return functor_(p1_, p2_, p3_); }
+ private:
+ FunctorT functor_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3)
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3>
+MethodFunctor3<ObjectT, FP_T(NONAME), R, P1, P2, P3>
+Bind(FP_T(method), ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3) {
+ return MethodFunctor3<ObjectT, FP_T(NONAME), R, P1, P2, P3>(
+ method, object, p1, p2, p3);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3) const
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3>
+MethodFunctor3<const ObjectT, FP_T(NONAME), R, P1, P2, P3>
+Bind(FP_T(method), const ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3) {
+ return MethodFunctor3<const ObjectT, FP_T(NONAME), R, P1, P2, P3>(
+ method, object, p1, p2, p3);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)(P1, P2, P3)
+
+template <class R,
+ class P1,
+ class P2,
+ class P3>
+Functor3<FP_T(NONAME), R, P1, P2, P3>
+Bind(FP_T(function),
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3) {
+ return Functor3<FP_T(NONAME), R, P1, P2, P3>(
+ function, p1, p2, p3);
+}
+
+#undef FP_T
+
+template <class ObjectT, class MethodT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+class MethodFunctor4 {
+ public:
+ MethodFunctor4(MethodT method, ObjectT* object,
+ P1 p1,
+ P2 p2,
+ P3 p3,
+ P4 p4)
+ : method_(method), object_(object),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3),
+ p4_(p4) {}
+ R operator()() const {
+ return (object_->*method_)(p1_, p2_, p3_, p4_); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+ P4 p4_;
+};
+
+template <class FunctorT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+class Functor4 {
+ public:
+ Functor4(const FunctorT& functor, P1 p1, P2 p2, P3 p3, P4 p4)
+ : functor_(functor),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3),
+ p4_(p4) {}
+ R operator()() const {
+ return functor_(p1_, p2_, p3_, p4_); }
+ private:
+ FunctorT functor_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+ P4 p4_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3, P4)
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+MethodFunctor4<ObjectT, FP_T(NONAME), R, P1, P2, P3, P4>
+Bind(FP_T(method), ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4) {
+ return MethodFunctor4<ObjectT, FP_T(NONAME), R, P1, P2, P3, P4>(
+ method, object, p1, p2, p3, p4);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3, P4) const
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+MethodFunctor4<const ObjectT, FP_T(NONAME), R, P1, P2, P3, P4>
+Bind(FP_T(method), const ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4) {
+ return MethodFunctor4<const ObjectT, FP_T(NONAME), R, P1, P2, P3, P4>(
+ method, object, p1, p2, p3, p4);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)(P1, P2, P3, P4)
+
+template <class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+Functor4<FP_T(NONAME), R, P1, P2, P3, P4>
+Bind(FP_T(function),
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4) {
+ return Functor4<FP_T(NONAME), R, P1, P2, P3, P4>(
+ function, p1, p2, p3, p4);
+}
+
+#undef FP_T
+
+template <class ObjectT, class MethodT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+class MethodFunctor5 {
+ public:
+ MethodFunctor5(MethodT method, ObjectT* object,
+ P1 p1,
+ P2 p2,
+ P3 p3,
+ P4 p4,
+ P5 p5)
+ : method_(method), object_(object),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3),
+ p4_(p4),
+ p5_(p5) {}
+ R operator()() const {
+ return (object_->*method_)(p1_, p2_, p3_, p4_, p5_); }
+ private:
+ MethodT method_;
+ ObjectT* object_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+ P4 p4_;
+ P5 p5_;
+};
+
+template <class FunctorT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+class Functor5 {
+ public:
+ Functor5(const FunctorT& functor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5)
+ : functor_(functor),
+ p1_(p1),
+ p2_(p2),
+ p3_(p3),
+ p4_(p4),
+ p5_(p5) {}
+ R operator()() const {
+ return functor_(p1_, p2_, p3_, p4_, p5_); }
+ private:
+ FunctorT functor_;
+ P1 p1_;
+ P2 p2_;
+ P3 p3_;
+ P4 p4_;
+ P5 p5_;
+};
+
+
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3, P4, P5)
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+MethodFunctor5<ObjectT, FP_T(NONAME), R, P1, P2, P3, P4, P5>
+Bind(FP_T(method), ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4,
+ typename detail::identity<P5>::type p5) {
+ return MethodFunctor5<ObjectT, FP_T(NONAME), R, P1, P2, P3, P4, P5>(
+ method, object, p1, p2, p3, p4, p5);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)(P1, P2, P3, P4, P5) const
+
+template <class ObjectT, class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+MethodFunctor5<const ObjectT, FP_T(NONAME), R, P1, P2, P3, P4, P5>
+Bind(FP_T(method), const ObjectT* object,
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4,
+ typename detail::identity<P5>::type p5) {
+ return MethodFunctor5<const ObjectT, FP_T(NONAME), R, P1, P2, P3, P4, P5>(
+ method, object, p1, p2, p3, p4, p5);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)(P1, P2, P3, P4, P5)
+
+template <class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+Functor5<FP_T(NONAME), R, P1, P2, P3, P4, P5>
+Bind(FP_T(function),
+ typename detail::identity<P1>::type p1,
+ typename detail::identity<P2>::type p2,
+ typename detail::identity<P3>::type p3,
+ typename detail::identity<P4>::type p4,
+ typename detail::identity<P5>::type p5) {
+ return Functor5<FP_T(NONAME), R, P1, P2, P3, P4, P5>(
+ function, p1, p2, p3, p4, p5);
+}
+
+#undef FP_T
+
+} // namespace rtc
+
+#undef NONAME
+
+#endif // WEBRTC_BASE_BIND_H_
diff --git a/chromium/third_party/webrtc/base/bind.h.pump b/chromium/third_party/webrtc/base/bind.h.pump
new file mode 100644
index 00000000000..b5663c45df4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bind.h.pump
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// To generate bind.h from bind.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py bind.h.pump
+
+// Bind() is an overloaded function that converts method calls into function
+// objects (aka functors). It captures any arguments to the method by value
+// when Bind is called, producing a stateful, nullary function object. Care
+// should be taken about the lifetime of objects captured by Bind(); the
+// returned functor knows nothing about the lifetime of the method's object or
+// any arguments passed by pointer, and calling the functor with a destroyed
+// object will surely do bad things.
+//
+// Example usage:
+// struct Foo {
+// int Test1() { return 42; }
+// int Test2() const { return 52; }
+// int Test3(int x) { return x*x; }
+// float Test4(int x, float y) { return x + y; }
+// };
+//
+// int main() {
+// Foo foo;
+// cout << rtc::Bind(&Foo::Test1, &foo)() << endl;
+// cout << rtc::Bind(&Foo::Test2, &foo)() << endl;
+// cout << rtc::Bind(&Foo::Test3, &foo, 3)() << endl;
+// cout << rtc::Bind(&Foo::Test4, &foo, 7, 8.5f)() << endl;
+// }
+
+#ifndef WEBRTC_BASE_BIND_H_
+#define WEBRTC_BASE_BIND_H_
+
+#define NONAME
+
+namespace rtc {
+namespace detail {
+// This is needed because the template parameters in Bind can't be resolved
+// if they're used both as parameters of the function pointer type and as
+// parameters to Bind itself: the function pointer parameters are exact
+// matches to the function prototype, but the parameters to bind have
+// references stripped. This trick allows the compiler to dictate the Bind
+// parameter types rather than deduce them.
+template <class T> struct identity { typedef T type; };
+} // namespace detail
+
+$var n = 5
+$range i 0..n
+$for i [[
+$range j 1..i
+
+template <class ObjectT, class MethodT, class R$for j [[,
+ class P$j]]>
+class MethodFunctor$i {
+ public:
+ MethodFunctor$i(MethodT method, ObjectT* object$for j [[,
+ P$j p$j]])
+ : method_(method), object_(object)$for j [[,
+ p$(j)_(p$j)]] {}
+ R operator()() const {
+ return (object_->*method_)($for j , [[p$(j)_]]); }
+ private:
+ MethodT method_;
+ ObjectT* object_;$for j [[
+
+ P$j p$(j)_;]]
+
+};
+
+template <class FunctorT, class R$for j [[,
+ class P$j]]>
+class Functor$i {
+ public:
+ $if i == 0 [[explicit ]]
+Functor$i(const FunctorT& functor$for j [[, P$j p$j]])
+ : functor_(functor)$for j [[,
+ p$(j)_(p$j)]] {}
+ R operator()() const {
+ return functor_($for j , [[p$(j)_]]); }
+ private:
+ FunctorT functor_;$for j [[
+
+ P$j p$(j)_;]]
+
+};
+
+
+#define FP_T(x) R (ObjectT::*x)($for j , [[P$j]])
+
+template <class ObjectT, class R$for j [[,
+ class P$j]]>
+MethodFunctor$i<ObjectT, FP_T(NONAME), R$for j [[, P$j]]>
+Bind(FP_T(method), ObjectT* object$for j [[,
+ typename detail::identity<P$j>::type p$j]]) {
+ return MethodFunctor$i<ObjectT, FP_T(NONAME), R$for j [[, P$j]]>(
+ method, object$for j [[, p$j]]);
+}
+
+#undef FP_T
+#define FP_T(x) R (ObjectT::*x)($for j , [[P$j]]) const
+
+template <class ObjectT, class R$for j [[,
+ class P$j]]>
+MethodFunctor$i<const ObjectT, FP_T(NONAME), R$for j [[, P$j]]>
+Bind(FP_T(method), const ObjectT* object$for j [[,
+ typename detail::identity<P$j>::type p$j]]) {
+ return MethodFunctor$i<const ObjectT, FP_T(NONAME), R$for j [[, P$j]]>(
+ method, object$for j [[, p$j]]);
+}
+
+#undef FP_T
+#define FP_T(x) R (*x)($for j , [[P$j]])
+
+template <class R$for j [[,
+ class P$j]]>
+Functor$i<FP_T(NONAME), R$for j [[, P$j]]>
+Bind(FP_T(function)$for j [[,
+ typename detail::identity<P$j>::type p$j]]) {
+ return Functor$i<FP_T(NONAME), R$for j [[, P$j]]>(
+ function$for j [[, p$j]]);
+}
+
+#undef FP_T
+
+]]
+
+} // namespace rtc
+
+#undef NONAME
+
+#endif // WEBRTC_BASE_BIND_H_
diff --git a/chromium/third_party/webrtc/base/bind_unittest.cc b/chromium/third_party/webrtc/base/bind_unittest.cc
new file mode 100644
index 00000000000..ed8dd5cf2d2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bind_unittest.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+namespace {
+
+struct MethodBindTester {
+ void NullaryVoid() { ++call_count; }
+ int NullaryInt() { ++call_count; return 1; }
+ int NullaryConst() const { ++call_count; return 2; }
+ void UnaryVoid(int dummy) { ++call_count; }
+ template <class T> T Identity(T value) { ++call_count; return value; }
+ int UnaryByRef(int& value) const { ++call_count; return ++value; } // NOLINT
+ int Multiply(int a, int b) const { ++call_count; return a * b; }
+ mutable int call_count;
+};
+
+int Return42() { return 42; }
+int Negate(int a) { return -a; }
+int Multiply(int a, int b) { return a * b; }
+
+} // namespace
+
+TEST(BindTest, BindToMethod) {
+ MethodBindTester object = {0};
+ EXPECT_EQ(0, object.call_count);
+ Bind(&MethodBindTester::NullaryVoid, &object)();
+ EXPECT_EQ(1, object.call_count);
+ EXPECT_EQ(1, Bind(&MethodBindTester::NullaryInt, &object)());
+ EXPECT_EQ(2, object.call_count);
+ EXPECT_EQ(2, Bind(&MethodBindTester::NullaryConst,
+ static_cast<const MethodBindTester*>(&object))());
+ EXPECT_EQ(3, object.call_count);
+ Bind(&MethodBindTester::UnaryVoid, &object, 5)();
+ EXPECT_EQ(4, object.call_count);
+ EXPECT_EQ(100, Bind(&MethodBindTester::Identity<int>, &object, 100)());
+ EXPECT_EQ(5, object.call_count);
+ const std::string string_value("test string");
+ EXPECT_EQ(string_value, Bind(&MethodBindTester::Identity<std::string>,
+ &object, string_value)());
+ EXPECT_EQ(6, object.call_count);
+ int value = 11;
+ EXPECT_EQ(12, Bind(&MethodBindTester::UnaryByRef, &object, value)());
+ EXPECT_EQ(12, value);
+ EXPECT_EQ(7, object.call_count);
+ EXPECT_EQ(56, Bind(&MethodBindTester::Multiply, &object, 7, 8)());
+ EXPECT_EQ(8, object.call_count);
+}
+
+TEST(BindTest, BindToFunction) {
+ EXPECT_EQ(42, Bind(&Return42)());
+ EXPECT_EQ(3, Bind(&Negate, -3)());
+ EXPECT_EQ(56, Bind(&Multiply, 8, 7)());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/buffer.h b/chromium/third_party/webrtc/base/buffer.h
new file mode 100644
index 00000000000..dbe7b1aa752
--- /dev/null
+++ b/chromium/third_party/webrtc/base/buffer.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BUFFER_H_
+#define WEBRTC_BASE_BUFFER_H_
+
+#include <string.h>
+
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+// Basic buffer class, can be grown and shrunk dynamically.
+// Unlike std::string/vector, does not initialize data when expanding capacity.
+class Buffer {
+ public:
+ Buffer() {
+ Construct(NULL, 0, 0);
+ }
+ Buffer(const void* data, size_t length) {
+ Construct(data, length, length);
+ }
+ Buffer(const void* data, size_t length, size_t capacity) {
+ Construct(data, length, capacity);
+ }
+ Buffer(const Buffer& buf) {
+ Construct(buf.data(), buf.length(), buf.length());
+ }
+
+ const char* data() const { return data_.get(); }
+ char* data() { return data_.get(); }
+ // TODO: should this be size(), like STL?
+ size_t length() const { return length_; }
+ size_t capacity() const { return capacity_; }
+
+ Buffer& operator=(const Buffer& buf) {
+ if (&buf != this) {
+ Construct(buf.data(), buf.length(), buf.length());
+ }
+ return *this;
+ }
+ bool operator==(const Buffer& buf) const {
+ return (length_ == buf.length() &&
+ memcmp(data_.get(), buf.data(), length_) == 0);
+ }
+ bool operator!=(const Buffer& buf) const {
+ return !operator==(buf);
+ }
+
+ void SetData(const void* data, size_t length) {
+ ASSERT(data != NULL || length == 0);
+ SetLength(length);
+ memcpy(data_.get(), data, length);
+ }
+ void AppendData(const void* data, size_t length) {
+ ASSERT(data != NULL || length == 0);
+ size_t old_length = length_;
+ SetLength(length_ + length);
+ memcpy(data_.get() + old_length, data, length);
+ }
+ void SetLength(size_t length) {
+ SetCapacity(length);
+ length_ = length;
+ }
+ void SetCapacity(size_t capacity) {
+ if (capacity > capacity_) {
+ rtc::scoped_ptr<char[]> data(new char[capacity]);
+ memcpy(data.get(), data_.get(), length_);
+ data_.swap(data);
+ capacity_ = capacity;
+ }
+ }
+
+ void TransferTo(Buffer* buf) {
+ ASSERT(buf != NULL);
+ buf->data_.reset(data_.release());
+ buf->length_ = length_;
+ buf->capacity_ = capacity_;
+ Construct(NULL, 0, 0);
+ }
+
+ protected:
+ void Construct(const void* data, size_t length, size_t capacity) {
+ data_.reset(new char[capacity_ = capacity]);
+ SetData(data, length);
+ }
+
+ scoped_ptr<char[]> data_;
+ size_t length_;
+ size_t capacity_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_BUFFER_H_
diff --git a/chromium/third_party/webrtc/base/buffer_unittest.cc b/chromium/third_party/webrtc/base/buffer_unittest.cc
new file mode 100644
index 00000000000..71b3f89e3f0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/buffer_unittest.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+static const char kTestData[] = {
+ 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF
+};
+
+TEST(BufferTest, TestConstructDefault) {
+ Buffer buf;
+ EXPECT_EQ(0U, buf.length());
+ EXPECT_EQ(0U, buf.capacity());
+ EXPECT_EQ(Buffer(), buf);
+}
+
+TEST(BufferTest, TestConstructEmptyWithCapacity) {
+ Buffer buf(NULL, 0, 256U);
+ EXPECT_EQ(0U, buf.length());
+ EXPECT_EQ(256U, buf.capacity());
+ EXPECT_EQ(Buffer(), buf);
+}
+
+TEST(BufferTest, TestConstructData) {
+ Buffer buf(kTestData, sizeof(kTestData));
+ EXPECT_EQ(sizeof(kTestData), buf.length());
+ EXPECT_EQ(sizeof(kTestData), buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(Buffer(kTestData, sizeof(kTestData)), buf);
+}
+
+TEST(BufferTest, TestConstructDataWithCapacity) {
+ Buffer buf(kTestData, sizeof(kTestData), 256U);
+ EXPECT_EQ(sizeof(kTestData), buf.length());
+ EXPECT_EQ(256U, buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(Buffer(kTestData, sizeof(kTestData)), buf);
+}
+
+TEST(BufferTest, TestConstructCopy) {
+ Buffer buf1(kTestData, sizeof(kTestData), 256), buf2(buf1);
+ EXPECT_EQ(sizeof(kTestData), buf2.length());
+ EXPECT_EQ(sizeof(kTestData), buf2.capacity()); // capacity isn't copied
+ EXPECT_EQ(0, memcmp(buf2.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(buf1, buf2);
+}
+
+TEST(BufferTest, TestAssign) {
+ Buffer buf1, buf2(kTestData, sizeof(kTestData), 256);
+ EXPECT_NE(buf1, buf2);
+ buf1 = buf2;
+ EXPECT_EQ(sizeof(kTestData), buf1.length());
+ EXPECT_EQ(sizeof(kTestData), buf1.capacity()); // capacity isn't copied
+ EXPECT_EQ(0, memcmp(buf1.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(buf1, buf2);
+}
+
+TEST(BufferTest, TestSetData) {
+ Buffer buf;
+ buf.SetData(kTestData, sizeof(kTestData));
+ EXPECT_EQ(sizeof(kTestData), buf.length());
+ EXPECT_EQ(sizeof(kTestData), buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestAppendData) {
+ Buffer buf(kTestData, sizeof(kTestData));
+ buf.AppendData(kTestData, sizeof(kTestData));
+ EXPECT_EQ(2 * sizeof(kTestData), buf.length());
+ EXPECT_EQ(2 * sizeof(kTestData), buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(0, memcmp(buf.data() + sizeof(kTestData),
+ kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestSetLengthSmaller) {
+ Buffer buf;
+ buf.SetData(kTestData, sizeof(kTestData));
+ buf.SetLength(sizeof(kTestData) / 2);
+ EXPECT_EQ(sizeof(kTestData) / 2, buf.length());
+ EXPECT_EQ(sizeof(kTestData), buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData) / 2));
+}
+
+TEST(BufferTest, TestSetLengthLarger) {
+ Buffer buf;
+ buf.SetData(kTestData, sizeof(kTestData));
+ buf.SetLength(sizeof(kTestData) * 2);
+ EXPECT_EQ(sizeof(kTestData) * 2, buf.length());
+ EXPECT_EQ(sizeof(kTestData) * 2, buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestSetCapacitySmaller) {
+ Buffer buf;
+ buf.SetData(kTestData, sizeof(kTestData));
+ buf.SetCapacity(sizeof(kTestData) / 2); // should be ignored
+ EXPECT_EQ(sizeof(kTestData), buf.length());
+ EXPECT_EQ(sizeof(kTestData), buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestSetCapacityLarger) {
+ Buffer buf(kTestData, sizeof(kTestData));
+ buf.SetCapacity(sizeof(kTestData) * 2);
+ EXPECT_EQ(sizeof(kTestData), buf.length());
+ EXPECT_EQ(sizeof(kTestData) * 2, buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestSetCapacityThenSetLength) {
+ Buffer buf(kTestData, sizeof(kTestData));
+ buf.SetCapacity(sizeof(kTestData) * 4);
+ memcpy(buf.data() + sizeof(kTestData), kTestData, sizeof(kTestData));
+ buf.SetLength(sizeof(kTestData) * 2);
+ EXPECT_EQ(sizeof(kTestData) * 2, buf.length());
+ EXPECT_EQ(sizeof(kTestData) * 4, buf.capacity());
+ EXPECT_EQ(0, memcmp(buf.data(), kTestData, sizeof(kTestData)));
+ EXPECT_EQ(0, memcmp(buf.data() + sizeof(kTestData),
+ kTestData, sizeof(kTestData)));
+}
+
+TEST(BufferTest, TestTransfer) {
+ Buffer buf1(kTestData, sizeof(kTestData), 256U), buf2;
+ buf1.TransferTo(&buf2);
+ EXPECT_EQ(0U, buf1.length());
+ EXPECT_EQ(0U, buf1.capacity());
+ EXPECT_EQ(sizeof(kTestData), buf2.length());
+ EXPECT_EQ(256U, buf2.capacity()); // capacity does transfer
+ EXPECT_EQ(0, memcmp(buf2.data(), kTestData, sizeof(kTestData)));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bytebuffer.cc b/chromium/third_party/webrtc/base/bytebuffer.cc
new file mode 100644
index 00000000000..6133759e59a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bytebuffer.cc
@@ -0,0 +1,234 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/bytebuffer.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include <algorithm>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/byteorder.h"
+
+namespace rtc {
+
+static const int DEFAULT_SIZE = 4096;
+
+ByteBuffer::ByteBuffer() {
+ Construct(NULL, DEFAULT_SIZE, ORDER_NETWORK);
+}
+
+ByteBuffer::ByteBuffer(ByteOrder byte_order) {
+ Construct(NULL, DEFAULT_SIZE, byte_order);
+}
+
+ByteBuffer::ByteBuffer(const char* bytes, size_t len) {
+ Construct(bytes, len, ORDER_NETWORK);
+}
+
+ByteBuffer::ByteBuffer(const char* bytes, size_t len, ByteOrder byte_order) {
+ Construct(bytes, len, byte_order);
+}
+
+ByteBuffer::ByteBuffer(const char* bytes) {
+ Construct(bytes, strlen(bytes), ORDER_NETWORK);
+}
+
+void ByteBuffer::Construct(const char* bytes, size_t len,
+ ByteOrder byte_order) {
+ version_ = 0;
+ start_ = 0;
+ size_ = len;
+ byte_order_ = byte_order;
+ bytes_ = new char[size_];
+
+ if (bytes) {
+ end_ = len;
+ memcpy(bytes_, bytes, end_);
+ } else {
+ end_ = 0;
+ }
+}
+
+ByteBuffer::~ByteBuffer() {
+ delete[] bytes_;
+}
+
+bool ByteBuffer::ReadUInt8(uint8* val) {
+ if (!val) return false;
+
+ return ReadBytes(reinterpret_cast<char*>(val), 1);
+}
+
+bool ByteBuffer::ReadUInt16(uint16* val) {
+ if (!val) return false;
+
+ uint16 v;
+ if (!ReadBytes(reinterpret_cast<char*>(&v), 2)) {
+ return false;
+ } else {
+ *val = (byte_order_ == ORDER_NETWORK) ? NetworkToHost16(v) : v;
+ return true;
+ }
+}
+
+bool ByteBuffer::ReadUInt24(uint32* val) {
+ if (!val) return false;
+
+ uint32 v = 0;
+ char* read_into = reinterpret_cast<char*>(&v);
+ if (byte_order_ == ORDER_NETWORK || IsHostBigEndian()) {
+ ++read_into;
+ }
+
+ if (!ReadBytes(read_into, 3)) {
+ return false;
+ } else {
+ *val = (byte_order_ == ORDER_NETWORK) ? NetworkToHost32(v) : v;
+ return true;
+ }
+}
+
+bool ByteBuffer::ReadUInt32(uint32* val) {
+ if (!val) return false;
+
+ uint32 v;
+ if (!ReadBytes(reinterpret_cast<char*>(&v), 4)) {
+ return false;
+ } else {
+ *val = (byte_order_ == ORDER_NETWORK) ? NetworkToHost32(v) : v;
+ return true;
+ }
+}
+
+bool ByteBuffer::ReadUInt64(uint64* val) {
+ if (!val) return false;
+
+ uint64 v;
+ if (!ReadBytes(reinterpret_cast<char*>(&v), 8)) {
+ return false;
+ } else {
+ *val = (byte_order_ == ORDER_NETWORK) ? NetworkToHost64(v) : v;
+ return true;
+ }
+}
+
+bool ByteBuffer::ReadString(std::string* val, size_t len) {
+ if (!val) return false;
+
+ if (len > Length()) {
+ return false;
+ } else {
+ val->append(bytes_ + start_, len);
+ start_ += len;
+ return true;
+ }
+}
+
+bool ByteBuffer::ReadBytes(char* val, size_t len) {
+ if (len > Length()) {
+ return false;
+ } else {
+ memcpy(val, bytes_ + start_, len);
+ start_ += len;
+ return true;
+ }
+}
+
+void ByteBuffer::WriteUInt8(uint8 val) {
+ WriteBytes(reinterpret_cast<const char*>(&val), 1);
+}
+
+void ByteBuffer::WriteUInt16(uint16 val) {
+ uint16 v = (byte_order_ == ORDER_NETWORK) ? HostToNetwork16(val) : val;
+ WriteBytes(reinterpret_cast<const char*>(&v), 2);
+}
+
+void ByteBuffer::WriteUInt24(uint32 val) {
+ uint32 v = (byte_order_ == ORDER_NETWORK) ? HostToNetwork32(val) : val;
+ char* start = reinterpret_cast<char*>(&v);
+ if (byte_order_ == ORDER_NETWORK || IsHostBigEndian()) {
+ ++start;
+ }
+ WriteBytes(start, 3);
+}
+
+void ByteBuffer::WriteUInt32(uint32 val) {
+ uint32 v = (byte_order_ == ORDER_NETWORK) ? HostToNetwork32(val) : val;
+ WriteBytes(reinterpret_cast<const char*>(&v), 4);
+}
+
+void ByteBuffer::WriteUInt64(uint64 val) {
+ uint64 v = (byte_order_ == ORDER_NETWORK) ? HostToNetwork64(val) : val;
+ WriteBytes(reinterpret_cast<const char*>(&v), 8);
+}
+
+void ByteBuffer::WriteString(const std::string& val) {
+ WriteBytes(val.c_str(), val.size());
+}
+
+void ByteBuffer::WriteBytes(const char* val, size_t len) {
+ memcpy(ReserveWriteBuffer(len), val, len);
+}
+
+char* ByteBuffer::ReserveWriteBuffer(size_t len) {
+ if (Length() + len > Capacity())
+ Resize(Length() + len);
+
+ char* start = bytes_ + end_;
+ end_ += len;
+ return start;
+}
+
+void ByteBuffer::Resize(size_t size) {
+ size_t len = _min(end_ - start_, size);
+ if (size <= size_) {
+ // Don't reallocate, just move data backwards
+ memmove(bytes_, bytes_ + start_, len);
+ } else {
+ // Reallocate a larger buffer.
+ size_ = _max(size, 3 * size_ / 2);
+ char* new_bytes = new char[size_];
+ memcpy(new_bytes, bytes_ + start_, len);
+ delete [] bytes_;
+ bytes_ = new_bytes;
+ }
+ start_ = 0;
+ end_ = len;
+ ++version_;
+}
+
+bool ByteBuffer::Consume(size_t size) {
+ if (size > Length())
+ return false;
+ start_ += size;
+ return true;
+}
+
+ByteBuffer::ReadPosition ByteBuffer::GetReadPosition() const {
+ return ReadPosition(start_, version_);
+}
+
+bool ByteBuffer::SetReadPosition(const ReadPosition &position) {
+ if (position.version_ != version_) {
+ return false;
+ }
+ start_ = position.start_;
+ return true;
+}
+
+void ByteBuffer::Clear() {
+ memset(bytes_, 0, size_);
+ start_ = end_ = 0;
+ ++version_;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bytebuffer.h b/chromium/third_party/webrtc/base/bytebuffer.h
new file mode 100644
index 00000000000..1934f418e5a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bytebuffer.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BYTEBUFFER_H_
+#define WEBRTC_BASE_BYTEBUFFER_H_
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
+
+namespace rtc {
+
+class ByteBuffer {
+ public:
+
+ enum ByteOrder {
+ ORDER_NETWORK = 0, // Default, use network byte order (big endian).
+ ORDER_HOST, // Use the native order of the host.
+ };
+
+ // |byte_order| defines order of bytes in the buffer.
+ ByteBuffer();
+ explicit ByteBuffer(ByteOrder byte_order);
+ ByteBuffer(const char* bytes, size_t len);
+ ByteBuffer(const char* bytes, size_t len, ByteOrder byte_order);
+
+ // Initializes buffer from a zero-terminated string.
+ explicit ByteBuffer(const char* bytes);
+
+ ~ByteBuffer();
+
+ const char* Data() const { return bytes_ + start_; }
+ size_t Length() const { return end_ - start_; }
+ size_t Capacity() const { return size_ - start_; }
+ ByteOrder Order() const { return byte_order_; }
+
+ // Read a next value from the buffer. Return false if there isn't
+ // enough data left for the specified type.
+ bool ReadUInt8(uint8* val);
+ bool ReadUInt16(uint16* val);
+ bool ReadUInt24(uint32* val);
+ bool ReadUInt32(uint32* val);
+ bool ReadUInt64(uint64* val);
+ bool ReadBytes(char* val, size_t len);
+
+ // Appends next |len| bytes from the buffer to |val|. Returns false
+ // if there is less than |len| bytes left.
+ bool ReadString(std::string* val, size_t len);
+
+ // Write value to the buffer. Resizes the buffer when it is
+ // neccessary.
+ void WriteUInt8(uint8 val);
+ void WriteUInt16(uint16 val);
+ void WriteUInt24(uint32 val);
+ void WriteUInt32(uint32 val);
+ void WriteUInt64(uint64 val);
+ void WriteString(const std::string& val);
+ void WriteBytes(const char* val, size_t len);
+
+ // Reserves the given number of bytes and returns a char* that can be written
+ // into. Useful for functions that require a char* buffer and not a
+ // ByteBuffer.
+ char* ReserveWriteBuffer(size_t len);
+
+ // Resize the buffer to the specified |size|. This invalidates any remembered
+ // seek positions.
+ void Resize(size_t size);
+
+ // Moves current position |size| bytes forward. Returns false if
+ // there is less than |size| bytes left in the buffer. Consume doesn't
+ // permanently remove data, so remembered read positions are still valid
+ // after this call.
+ bool Consume(size_t size);
+
+ // Clears the contents of the buffer. After this, Length() will be 0.
+ void Clear();
+
+ // Used with GetReadPosition/SetReadPosition.
+ class ReadPosition {
+ friend class ByteBuffer;
+ ReadPosition(size_t start, int version)
+ : start_(start), version_(version) { }
+ size_t start_;
+ int version_;
+ };
+
+ // Remembers the current read position for a future SetReadPosition. Any
+ // calls to Shift or Resize in the interim will invalidate the position.
+ ReadPosition GetReadPosition() const;
+
+ // If the given position is still valid, restores that read position.
+ bool SetReadPosition(const ReadPosition &position);
+
+ private:
+ void Construct(const char* bytes, size_t size, ByteOrder byte_order);
+
+ char* bytes_;
+ size_t size_;
+ size_t start_;
+ size_t end_;
+ int version_;
+ ByteOrder byte_order_;
+
+ // There are sensible ways to define these, but they aren't needed in our code
+ // base.
+ DISALLOW_COPY_AND_ASSIGN(ByteBuffer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_BYTEBUFFER_H_
diff --git a/chromium/third_party/webrtc/base/bytebuffer_unittest.cc b/chromium/third_party/webrtc/base/bytebuffer_unittest.cc
new file mode 100644
index 00000000000..f4b0504efc8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/bytebuffer_unittest.cc
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+TEST(ByteBufferTest, TestByteOrder) {
+ uint16 n16 = 1;
+ uint32 n32 = 1;
+ uint64 n64 = 1;
+
+ EXPECT_EQ(n16, NetworkToHost16(HostToNetwork16(n16)));
+ EXPECT_EQ(n32, NetworkToHost32(HostToNetwork32(n32)));
+ EXPECT_EQ(n64, NetworkToHost64(HostToNetwork64(n64)));
+
+ if (IsHostBigEndian()) {
+ // The host is the network (big) endian.
+ EXPECT_EQ(n16, HostToNetwork16(n16));
+ EXPECT_EQ(n32, HostToNetwork32(n32));
+ EXPECT_EQ(n64, HostToNetwork64(n64));
+
+ // GetBE converts big endian to little endian here.
+ EXPECT_EQ(n16 >> 8, GetBE16(&n16));
+ EXPECT_EQ(n32 >> 24, GetBE32(&n32));
+ EXPECT_EQ(n64 >> 56, GetBE64(&n64));
+ } else {
+ // The host is little endian.
+ EXPECT_NE(n16, HostToNetwork16(n16));
+ EXPECT_NE(n32, HostToNetwork32(n32));
+ EXPECT_NE(n64, HostToNetwork64(n64));
+
+ // GetBE converts little endian to big endian here.
+ EXPECT_EQ(GetBE16(&n16), HostToNetwork16(n16));
+ EXPECT_EQ(GetBE32(&n32), HostToNetwork32(n32));
+ EXPECT_EQ(GetBE64(&n64), HostToNetwork64(n64));
+
+ // GetBE converts little endian to big endian here.
+ EXPECT_EQ(n16 << 8, GetBE16(&n16));
+ EXPECT_EQ(n32 << 24, GetBE32(&n32));
+ EXPECT_EQ(n64 << 56, GetBE64(&n64));
+ }
+}
+
+TEST(ByteBufferTest, TestBufferLength) {
+ ByteBuffer buffer;
+ size_t size = 0;
+ EXPECT_EQ(size, buffer.Length());
+
+ buffer.WriteUInt8(1);
+ ++size;
+ EXPECT_EQ(size, buffer.Length());
+
+ buffer.WriteUInt16(1);
+ size += 2;
+ EXPECT_EQ(size, buffer.Length());
+
+ buffer.WriteUInt24(1);
+ size += 3;
+ EXPECT_EQ(size, buffer.Length());
+
+ buffer.WriteUInt32(1);
+ size += 4;
+ EXPECT_EQ(size, buffer.Length());
+
+ buffer.WriteUInt64(1);
+ size += 8;
+ EXPECT_EQ(size, buffer.Length());
+
+ EXPECT_TRUE(buffer.Consume(0));
+ EXPECT_EQ(size, buffer.Length());
+
+ EXPECT_TRUE(buffer.Consume(4));
+ size -= 4;
+ EXPECT_EQ(size, buffer.Length());
+}
+
+TEST(ByteBufferTest, TestGetSetReadPosition) {
+ ByteBuffer buffer("ABCDEF", 6);
+ EXPECT_EQ(6U, buffer.Length());
+ ByteBuffer::ReadPosition pos(buffer.GetReadPosition());
+ EXPECT_TRUE(buffer.SetReadPosition(pos));
+ EXPECT_EQ(6U, buffer.Length());
+ std::string read;
+ EXPECT_TRUE(buffer.ReadString(&read, 3));
+ EXPECT_EQ("ABC", read);
+ EXPECT_EQ(3U, buffer.Length());
+ EXPECT_TRUE(buffer.SetReadPosition(pos));
+ EXPECT_EQ(6U, buffer.Length());
+ read.clear();
+ EXPECT_TRUE(buffer.ReadString(&read, 3));
+ EXPECT_EQ("ABC", read);
+ EXPECT_EQ(3U, buffer.Length());
+ // For a resize by writing Capacity() number of bytes.
+ size_t capacity = buffer.Capacity();
+ buffer.ReserveWriteBuffer(buffer.Capacity());
+ EXPECT_EQ(capacity + 3U, buffer.Length());
+ EXPECT_FALSE(buffer.SetReadPosition(pos));
+ read.clear();
+ EXPECT_TRUE(buffer.ReadString(&read, 3));
+ EXPECT_EQ("DEF", read);
+}
+
+TEST(ByteBufferTest, TestReadWriteBuffer) {
+ ByteBuffer::ByteOrder orders[2] = { ByteBuffer::ORDER_HOST,
+ ByteBuffer::ORDER_NETWORK };
+ for (size_t i = 0; i < ARRAY_SIZE(orders); i++) {
+ ByteBuffer buffer(orders[i]);
+ EXPECT_EQ(orders[i], buffer.Order());
+ uint8 ru8;
+ EXPECT_FALSE(buffer.ReadUInt8(&ru8));
+
+ // Write and read uint8.
+ uint8 wu8 = 1;
+ buffer.WriteUInt8(wu8);
+ EXPECT_TRUE(buffer.ReadUInt8(&ru8));
+ EXPECT_EQ(wu8, ru8);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read uint16.
+ uint16 wu16 = (1 << 8) + 1;
+ buffer.WriteUInt16(wu16);
+ uint16 ru16;
+ EXPECT_TRUE(buffer.ReadUInt16(&ru16));
+ EXPECT_EQ(wu16, ru16);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read uint24.
+ uint32 wu24 = (3 << 16) + (2 << 8) + 1;
+ buffer.WriteUInt24(wu24);
+ uint32 ru24;
+ EXPECT_TRUE(buffer.ReadUInt24(&ru24));
+ EXPECT_EQ(wu24, ru24);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read uint32.
+ uint32 wu32 = (4 << 24) + (3 << 16) + (2 << 8) + 1;
+ buffer.WriteUInt32(wu32);
+ uint32 ru32;
+ EXPECT_TRUE(buffer.ReadUInt32(&ru32));
+ EXPECT_EQ(wu32, ru32);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read uint64.
+ uint32 another32 = (8 << 24) + (7 << 16) + (6 << 8) + 5;
+ uint64 wu64 = (static_cast<uint64>(another32) << 32) + wu32;
+ buffer.WriteUInt64(wu64);
+ uint64 ru64;
+ EXPECT_TRUE(buffer.ReadUInt64(&ru64));
+ EXPECT_EQ(wu64, ru64);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read string.
+ std::string write_string("hello");
+ buffer.WriteString(write_string);
+ std::string read_string;
+ EXPECT_TRUE(buffer.ReadString(&read_string, write_string.size()));
+ EXPECT_EQ(write_string, read_string);
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read bytes
+ char write_bytes[] = "foo";
+ buffer.WriteBytes(write_bytes, 3);
+ char read_bytes[3];
+ EXPECT_TRUE(buffer.ReadBytes(read_bytes, 3));
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(write_bytes[i], read_bytes[i]);
+ }
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read reserved buffer space
+ char* write_dst = buffer.ReserveWriteBuffer(3);
+ memcpy(write_dst, write_bytes, 3);
+ memset(read_bytes, 0, 3);
+ EXPECT_TRUE(buffer.ReadBytes(read_bytes, 3));
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(write_bytes[i], read_bytes[i]);
+ }
+ EXPECT_EQ(0U, buffer.Length());
+
+ // Write and read in order.
+ buffer.WriteUInt8(wu8);
+ buffer.WriteUInt16(wu16);
+ buffer.WriteUInt24(wu24);
+ buffer.WriteUInt32(wu32);
+ buffer.WriteUInt64(wu64);
+ EXPECT_TRUE(buffer.ReadUInt8(&ru8));
+ EXPECT_EQ(wu8, ru8);
+ EXPECT_TRUE(buffer.ReadUInt16(&ru16));
+ EXPECT_EQ(wu16, ru16);
+ EXPECT_TRUE(buffer.ReadUInt24(&ru24));
+ EXPECT_EQ(wu24, ru24);
+ EXPECT_TRUE(buffer.ReadUInt32(&ru32));
+ EXPECT_EQ(wu32, ru32);
+ EXPECT_TRUE(buffer.ReadUInt64(&ru64));
+ EXPECT_EQ(wu64, ru64);
+ EXPECT_EQ(0U, buffer.Length());
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/byteorder.h b/chromium/third_party/webrtc/base/byteorder.h
new file mode 100644
index 00000000000..d907d9e4128
--- /dev/null
+++ b/chromium/third_party/webrtc/base/byteorder.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_BYTEORDER_H_
+#define WEBRTC_BASE_BYTEORDER_H_
+
+#if defined(WEBRTC_POSIX) && !defined(__native_client__)
+#include <arpa/inet.h>
+#endif
+
+#if defined(WEBRTC_WIN)
+#include <stdlib.h>
+#endif
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// Reading and writing of little and big-endian numbers from memory
+// TODO: Optimized versions, with direct read/writes of
+// integers in host-endian format, when the platform supports it.
+
+inline void Set8(void* memory, size_t offset, uint8 v) {
+ static_cast<uint8*>(memory)[offset] = v;
+}
+
+inline uint8 Get8(const void* memory, size_t offset) {
+ return static_cast<const uint8*>(memory)[offset];
+}
+
+inline void SetBE16(void* memory, uint16 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 8));
+ Set8(memory, 1, static_cast<uint8>(v >> 0));
+}
+
+inline void SetBE32(void* memory, uint32 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 24));
+ Set8(memory, 1, static_cast<uint8>(v >> 16));
+ Set8(memory, 2, static_cast<uint8>(v >> 8));
+ Set8(memory, 3, static_cast<uint8>(v >> 0));
+}
+
+inline void SetBE64(void* memory, uint64 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 56));
+ Set8(memory, 1, static_cast<uint8>(v >> 48));
+ Set8(memory, 2, static_cast<uint8>(v >> 40));
+ Set8(memory, 3, static_cast<uint8>(v >> 32));
+ Set8(memory, 4, static_cast<uint8>(v >> 24));
+ Set8(memory, 5, static_cast<uint8>(v >> 16));
+ Set8(memory, 6, static_cast<uint8>(v >> 8));
+ Set8(memory, 7, static_cast<uint8>(v >> 0));
+}
+
+inline uint16 GetBE16(const void* memory) {
+ return static_cast<uint16>((Get8(memory, 0) << 8) |
+ (Get8(memory, 1) << 0));
+}
+
+inline uint32 GetBE32(const void* memory) {
+ return (static_cast<uint32>(Get8(memory, 0)) << 24) |
+ (static_cast<uint32>(Get8(memory, 1)) << 16) |
+ (static_cast<uint32>(Get8(memory, 2)) << 8) |
+ (static_cast<uint32>(Get8(memory, 3)) << 0);
+}
+
+inline uint64 GetBE64(const void* memory) {
+ return (static_cast<uint64>(Get8(memory, 0)) << 56) |
+ (static_cast<uint64>(Get8(memory, 1)) << 48) |
+ (static_cast<uint64>(Get8(memory, 2)) << 40) |
+ (static_cast<uint64>(Get8(memory, 3)) << 32) |
+ (static_cast<uint64>(Get8(memory, 4)) << 24) |
+ (static_cast<uint64>(Get8(memory, 5)) << 16) |
+ (static_cast<uint64>(Get8(memory, 6)) << 8) |
+ (static_cast<uint64>(Get8(memory, 7)) << 0);
+}
+
+inline void SetLE16(void* memory, uint16 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 0));
+ Set8(memory, 1, static_cast<uint8>(v >> 8));
+}
+
+inline void SetLE32(void* memory, uint32 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 0));
+ Set8(memory, 1, static_cast<uint8>(v >> 8));
+ Set8(memory, 2, static_cast<uint8>(v >> 16));
+ Set8(memory, 3, static_cast<uint8>(v >> 24));
+}
+
+inline void SetLE64(void* memory, uint64 v) {
+ Set8(memory, 0, static_cast<uint8>(v >> 0));
+ Set8(memory, 1, static_cast<uint8>(v >> 8));
+ Set8(memory, 2, static_cast<uint8>(v >> 16));
+ Set8(memory, 3, static_cast<uint8>(v >> 24));
+ Set8(memory, 4, static_cast<uint8>(v >> 32));
+ Set8(memory, 5, static_cast<uint8>(v >> 40));
+ Set8(memory, 6, static_cast<uint8>(v >> 48));
+ Set8(memory, 7, static_cast<uint8>(v >> 56));
+}
+
+inline uint16 GetLE16(const void* memory) {
+ return static_cast<uint16>((Get8(memory, 0) << 0) |
+ (Get8(memory, 1) << 8));
+}
+
+inline uint32 GetLE32(const void* memory) {
+ return (static_cast<uint32>(Get8(memory, 0)) << 0) |
+ (static_cast<uint32>(Get8(memory, 1)) << 8) |
+ (static_cast<uint32>(Get8(memory, 2)) << 16) |
+ (static_cast<uint32>(Get8(memory, 3)) << 24);
+}
+
+inline uint64 GetLE64(const void* memory) {
+ return (static_cast<uint64>(Get8(memory, 0)) << 0) |
+ (static_cast<uint64>(Get8(memory, 1)) << 8) |
+ (static_cast<uint64>(Get8(memory, 2)) << 16) |
+ (static_cast<uint64>(Get8(memory, 3)) << 24) |
+ (static_cast<uint64>(Get8(memory, 4)) << 32) |
+ (static_cast<uint64>(Get8(memory, 5)) << 40) |
+ (static_cast<uint64>(Get8(memory, 6)) << 48) |
+ (static_cast<uint64>(Get8(memory, 7)) << 56);
+}
+
+// Check if the current host is big endian.
+inline bool IsHostBigEndian() {
+ static const int number = 1;
+ return 0 == *reinterpret_cast<const char*>(&number);
+}
+
+inline uint16 HostToNetwork16(uint16 n) {
+ uint16 result;
+ SetBE16(&result, n);
+ return result;
+}
+
+inline uint32 HostToNetwork32(uint32 n) {
+ uint32 result;
+ SetBE32(&result, n);
+ return result;
+}
+
+inline uint64 HostToNetwork64(uint64 n) {
+ uint64 result;
+ SetBE64(&result, n);
+ return result;
+}
+
+inline uint16 NetworkToHost16(uint16 n) {
+ return GetBE16(&n);
+}
+
+inline uint32 NetworkToHost32(uint32 n) {
+ return GetBE32(&n);
+}
+
+inline uint64 NetworkToHost64(uint64 n) {
+ return GetBE64(&n);
+}
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_BYTEORDER_H_
diff --git a/chromium/third_party/webrtc/base/byteorder_unittest.cc b/chromium/third_party/webrtc/base/byteorder_unittest.cc
new file mode 100644
index 00000000000..f4e7df3b71a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/byteorder_unittest.cc
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/byteorder.h"
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+// Test memory set functions put values into memory in expected order.
+TEST(ByteOrderTest, TestSet) {
+ uint8 buf[8] = { 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u };
+ Set8(buf, 0, 0xfb);
+ Set8(buf, 1, 0x12);
+ EXPECT_EQ(0xfb, buf[0]);
+ EXPECT_EQ(0x12, buf[1]);
+ SetBE16(buf, 0x1234);
+ EXPECT_EQ(0x12, buf[0]);
+ EXPECT_EQ(0x34, buf[1]);
+ SetLE16(buf, 0x1234);
+ EXPECT_EQ(0x34, buf[0]);
+ EXPECT_EQ(0x12, buf[1]);
+ SetBE32(buf, 0x12345678);
+ EXPECT_EQ(0x12, buf[0]);
+ EXPECT_EQ(0x34, buf[1]);
+ EXPECT_EQ(0x56, buf[2]);
+ EXPECT_EQ(0x78, buf[3]);
+ SetLE32(buf, 0x12345678);
+ EXPECT_EQ(0x78, buf[0]);
+ EXPECT_EQ(0x56, buf[1]);
+ EXPECT_EQ(0x34, buf[2]);
+ EXPECT_EQ(0x12, buf[3]);
+ SetBE64(buf, UINT64_C(0x0123456789abcdef));
+ EXPECT_EQ(0x01, buf[0]);
+ EXPECT_EQ(0x23, buf[1]);
+ EXPECT_EQ(0x45, buf[2]);
+ EXPECT_EQ(0x67, buf[3]);
+ EXPECT_EQ(0x89, buf[4]);
+ EXPECT_EQ(0xab, buf[5]);
+ EXPECT_EQ(0xcd, buf[6]);
+ EXPECT_EQ(0xef, buf[7]);
+ SetLE64(buf, UINT64_C(0x0123456789abcdef));
+ EXPECT_EQ(0xef, buf[0]);
+ EXPECT_EQ(0xcd, buf[1]);
+ EXPECT_EQ(0xab, buf[2]);
+ EXPECT_EQ(0x89, buf[3]);
+ EXPECT_EQ(0x67, buf[4]);
+ EXPECT_EQ(0x45, buf[5]);
+ EXPECT_EQ(0x23, buf[6]);
+ EXPECT_EQ(0x01, buf[7]);
+}
+
+// Test memory get functions get values from memory in expected order.
+TEST(ByteOrderTest, TestGet) {
+ uint8 buf[8];
+ buf[0] = 0x01u;
+ buf[1] = 0x23u;
+ buf[2] = 0x45u;
+ buf[3] = 0x67u;
+ buf[4] = 0x89u;
+ buf[5] = 0xabu;
+ buf[6] = 0xcdu;
+ buf[7] = 0xefu;
+ EXPECT_EQ(0x01u, Get8(buf, 0));
+ EXPECT_EQ(0x23u, Get8(buf, 1));
+ EXPECT_EQ(0x0123u, GetBE16(buf));
+ EXPECT_EQ(0x2301u, GetLE16(buf));
+ EXPECT_EQ(0x01234567u, GetBE32(buf));
+ EXPECT_EQ(0x67452301u, GetLE32(buf));
+ EXPECT_EQ(UINT64_C(0x0123456789abcdef), GetBE64(buf));
+ EXPECT_EQ(UINT64_C(0xefcdab8967452301), GetLE64(buf));
+}
+
+} // namespace rtc
+
diff --git a/chromium/third_party/webrtc/base/callback.h b/chromium/third_party/webrtc/base/callback.h
new file mode 100644
index 00000000000..949510e9505
--- /dev/null
+++ b/chromium/third_party/webrtc/base/callback.h
@@ -0,0 +1,261 @@
+// This file was GENERATED by command:
+// pump.py callback.h.pump
+// DO NOT EDIT BY HAND!!!
+
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// To generate callback.h from callback.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py callback.h.pump
+
+// Callbacks are callable object containers. They can hold a function pointer
+// or a function object and behave like a value type. Internally, data is
+// reference-counted, making copies and pass-by-value inexpensive.
+//
+// Callbacks are typed using template arguments. The format is:
+// CallbackN<ReturnType, ParamType1, ..., ParamTypeN>
+// where N is the number of arguments supplied to the callable object.
+// Callbacks are invoked using operator(), just like a function or a function
+// object. Default-constructed callbacks are "empty," and executing an empty
+// callback does nothing. A callback can be made empty by assigning it from
+// a default-constructed callback.
+//
+// Callbacks are similar in purpose to std::function (which isn't available on
+// all platforms we support) and a lightweight alternative to sigslots. Since
+// they effectively hide the type of the object they call, they're useful in
+// breaking dependencies between objects that need to interact with one another.
+// Notably, they can hold the results of Bind(), std::bind*, etc, without
+// needing
+// to know the resulting object type of those calls.
+//
+// Sigslots, on the other hand, provide a fuller feature set, such as multiple
+// subscriptions to a signal, optional thread-safety, and lifetime tracking of
+// slots. When these features are needed, choose sigslots.
+//
+// Example:
+// int sqr(int x) { return x * x; }
+// struct AddK {
+// int k;
+// int operator()(int x) const { return x + k; }
+// } add_k = {5};
+//
+// Callback1<int, int> my_callback;
+// cout << my_callback.empty() << endl; // true
+//
+// my_callback = Callback1<int, int>(&sqr);
+// cout << my_callback.empty() << endl; // false
+// cout << my_callback(3) << endl; // 9
+//
+// my_callback = Callback1<int, int>(add_k);
+// cout << my_callback(10) << endl; // 15
+//
+// my_callback = Callback1<int, int>();
+// cout << my_callback.empty() << endl; // true
+
+#ifndef WEBRTC_BASE_CALLBACK_H_
+#define WEBRTC_BASE_CALLBACK_H_
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace rtc {
+
+template <class R>
+class Callback0 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback0() {}
+ template <class T> Callback0(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()() {
+ if (empty())
+ return R();
+ return helper_->Run();
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run() = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run() {
+ return functor_();
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+template <class R,
+ class P1>
+class Callback1 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback1() {}
+ template <class T> Callback1(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()(P1 p1) {
+ if (empty())
+ return R();
+ return helper_->Run(p1);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run(P1 p1) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run(P1 p1) {
+ return functor_(p1);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+template <class R,
+ class P1,
+ class P2>
+class Callback2 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback2() {}
+ template <class T> Callback2(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()(P1 p1, P2 p2) {
+ if (empty())
+ return R();
+ return helper_->Run(p1, p2);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run(P1 p1, P2 p2) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run(P1 p1, P2 p2) {
+ return functor_(p1, p2);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+template <class R,
+ class P1,
+ class P2,
+ class P3>
+class Callback3 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback3() {}
+ template <class T> Callback3(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()(P1 p1, P2 p2, P3 p3) {
+ if (empty())
+ return R();
+ return helper_->Run(p1, p2, p3);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run(P1 p1, P2 p2, P3 p3) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run(P1 p1, P2 p2, P3 p3) {
+ return functor_(p1, p2, p3);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+template <class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4>
+class Callback4 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback4() {}
+ template <class T> Callback4(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()(P1 p1, P2 p2, P3 p3, P4 p4) {
+ if (empty())
+ return R();
+ return helper_->Run(p1, p2, p3, p4);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) {
+ return functor_(p1, p2, p3, p4);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+template <class R,
+ class P1,
+ class P2,
+ class P3,
+ class P4,
+ class P5>
+class Callback5 {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback5() {}
+ template <class T> Callback5(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
+ if (empty())
+ return R();
+ return helper_->Run(p1, p2, p3, p4, p5);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
+ return functor_(p1, p2, p3, p4, p5);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+} // namespace rtc
+
+#endif // WEBRTC_BASE_CALLBACK_H_
diff --git a/chromium/third_party/webrtc/base/callback.h.pump b/chromium/third_party/webrtc/base/callback.h.pump
new file mode 100644
index 00000000000..86957df526b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/callback.h.pump
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// To generate callback.h from callback.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py callback.h.pump
+
+// Callbacks are callable object containers. They can hold a function pointer
+// or a function object and behave like a value type. Internally, data is
+// reference-counted, making copies and pass-by-value inexpensive.
+//
+// Callbacks are typed using template arguments. The format is:
+// CallbackN<ReturnType, ParamType1, ..., ParamTypeN>
+// where N is the number of arguments supplied to the callable object.
+// Callbacks are invoked using operator(), just like a function or a function
+// object. Default-constructed callbacks are "empty," and executing an empty
+// callback does nothing. A callback can be made empty by assigning it from
+// a default-constructed callback.
+//
+// Callbacks are similar in purpose to std::function (which isn't available on
+// all platforms we support) and a lightweight alternative to sigslots. Since
+// they effectively hide the type of the object they call, they're useful in
+// breaking dependencies between objects that need to interact with one another.
+// Notably, they can hold the results of Bind(), std::bind*, etc, without needing
+// to know the resulting object type of those calls.
+//
+// Sigslots, on the other hand, provide a fuller feature set, such as multiple
+// subscriptions to a signal, optional thread-safety, and lifetime tracking of
+// slots. When these features are needed, choose sigslots.
+//
+// Example:
+// int sqr(int x) { return x * x; }
+// struct AddK {
+// int k;
+// int operator()(int x) const { return x + k; }
+// } add_k = {5};
+//
+// Callback1<int, int> my_callback;
+// cout << my_callback.empty() << endl; // true
+//
+// my_callback = Callback1<int, int>(&sqr);
+// cout << my_callback.empty() << endl; // false
+// cout << my_callback(3) << endl; // 9
+//
+// my_callback = Callback1<int, int>(add_k);
+// cout << my_callback(10) << endl; // 15
+//
+// my_callback = Callback1<int, int>();
+// cout << my_callback.empty() << endl; // true
+
+#ifndef WEBRTC_BASE_CALLBACK_H_
+#define WEBRTC_BASE_CALLBACK_H_
+
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace rtc {
+
+$var n = 5
+$range i 0..n
+$for i [[
+$range j 1..i
+
+template <class R$for j [[,
+ class P$j]]>
+class Callback$i {
+ public:
+ // Default copy operations are appropriate for this class.
+ Callback$i() {}
+ template <class T> Callback$i(const T& functor)
+ : helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
+ R operator()($for j , [[P$j p$j]]) {
+ if (empty())
+ return R();
+ return helper_->Run($for j , [[p$j]]);
+ }
+ bool empty() const { return !helper_; }
+
+ private:
+ struct Helper : RefCountInterface {
+ virtual ~Helper() {}
+ virtual R Run($for j , [[P$j p$j]]) = 0;
+ };
+ template <class T> struct HelperImpl : Helper {
+ explicit HelperImpl(const T& functor) : functor_(functor) {}
+ virtual R Run($for j , [[P$j p$j]]) {
+ return functor_($for j , [[p$j]]);
+ }
+ T functor_;
+ };
+ scoped_refptr<Helper> helper_;
+};
+
+]]
+} // namespace rtc
+
+#endif // WEBRTC_BASE_CALLBACK_H_
diff --git a/chromium/third_party/webrtc/base/callback_unittest.cc b/chromium/third_party/webrtc/base/callback_unittest.cc
new file mode 100644
index 00000000000..66c939140ee
--- /dev/null
+++ b/chromium/third_party/webrtc/base/callback_unittest.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/callback.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+namespace {
+
+void f() {}
+int g() { return 42; }
+int h(int x) { return x * x; }
+void i(int& x) { x *= x; } // NOLINT: Testing refs
+
+struct BindTester {
+ int a() { return 24; }
+ int b(int x) const { return x * x; }
+};
+
+} // namespace
+
+TEST(CallbackTest, VoidReturn) {
+ Callback0<void> cb;
+ EXPECT_TRUE(cb.empty());
+ cb(); // Executing an empty callback should not crash.
+ cb = Callback0<void>(&f);
+ EXPECT_FALSE(cb.empty());
+ cb();
+}
+
+TEST(CallbackTest, IntReturn) {
+ Callback0<int> cb;
+ EXPECT_TRUE(cb.empty());
+ cb = Callback0<int>(&g);
+ EXPECT_FALSE(cb.empty());
+ EXPECT_EQ(42, cb());
+ EXPECT_EQ(42, cb());
+}
+
+TEST(CallbackTest, OneParam) {
+ Callback1<int, int> cb1(&h);
+ EXPECT_FALSE(cb1.empty());
+ EXPECT_EQ(9, cb1(-3));
+ EXPECT_EQ(100, cb1(10));
+
+ // Try clearing a callback.
+ cb1 = Callback1<int, int>();
+ EXPECT_TRUE(cb1.empty());
+
+ // Try a callback with a ref parameter.
+ Callback1<void, int&> cb2(&i);
+ int x = 3;
+ cb2(x);
+ EXPECT_EQ(9, x);
+ cb2(x);
+ EXPECT_EQ(81, x);
+}
+
+TEST(CallbackTest, WithBind) {
+ BindTester t;
+ Callback0<int> cb1 = Bind(&BindTester::a, &t);
+ EXPECT_EQ(24, cb1());
+ EXPECT_EQ(24, cb1());
+ cb1 = Bind(&BindTester::b, &t, 10);
+ EXPECT_EQ(100, cb1());
+ EXPECT_EQ(100, cb1());
+ cb1 = Bind(&BindTester::b, &t, 5);
+ EXPECT_EQ(25, cb1());
+ EXPECT_EQ(25, cb1());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/checks.cc b/chromium/third_party/webrtc/base/checks.cc
new file mode 100644
index 00000000000..67f5003dedf
--- /dev/null
+++ b/chromium/third_party/webrtc/base/checks.cc
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdarg.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+void Fatal(const char* file, int line, const char* format, ...) {
+ char msg[256];
+
+ va_list arguments;
+ va_start(arguments, format);
+ vsnprintf(msg, sizeof(msg), format, arguments);
+ va_end(arguments);
+
+ LOG(LS_ERROR) << "\n\n#\n# Fatal error in " << file
+ << ", line " << line << "\n#" << msg
+ << "\n#\n";
+ abort();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/checks.h b/chromium/third_party/webrtc/base/checks.h
new file mode 100644
index 00000000000..5a2841ae9b5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/checks.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This module contains some basic debugging facilities.
+// Originally comes from shared/commandlineflags/checks.h
+
+#ifndef WEBRTC_BASE_CHECKS_H_
+#define WEBRTC_BASE_CHECKS_H_
+
+#include <string.h>
+
+namespace rtc {
+
+// Prints an error message to stderr and aborts execution.
+void Fatal(const char* file, int line, const char* format, ...);
+
+} // namespace rtc
+
+// The UNREACHABLE macro is very useful during development.
+#define UNREACHABLE() \
+ rtc::Fatal(__FILE__, __LINE__, "unreachable code")
+
+#endif // WEBRTC_BASE_CHECKS_H_
diff --git a/chromium/third_party/webrtc/base/common.cc b/chromium/third_party/webrtc/base/common.cc
new file mode 100644
index 00000000000..8ea475b0e20
--- /dev/null
+++ b/chromium/third_party/webrtc/base/common.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <signal.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#if WEBRTC_WIN
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreServices/CoreServices.h>
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+
+#include <algorithm>
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+
+//////////////////////////////////////////////////////////////////////
+// Assertions
+//////////////////////////////////////////////////////////////////////
+
+namespace rtc {
+
+void Break() {
+#if WEBRTC_WIN
+ ::DebugBreak();
+#else // !WEBRTC_WIN
+ // On POSIX systems, SIGTRAP signals debuggers to break without killing the
+ // process. If a debugger isn't attached, the uncaught SIGTRAP will crash the
+ // app.
+ raise(SIGTRAP);
+#endif
+ // If a debugger wasn't attached, we will have crashed by this point. If a
+ // debugger is attached, we'll continue from here.
+}
+
+static AssertLogger custom_assert_logger_ = NULL;
+
+void SetCustomAssertLogger(AssertLogger logger) {
+ custom_assert_logger_ = logger;
+}
+
+void LogAssert(const char* function, const char* file, int line,
+ const char* expression) {
+ if (custom_assert_logger_) {
+ custom_assert_logger_(function, file, line, expression);
+ } else {
+ LOG(LS_ERROR) << file << "(" << line << ")" << ": ASSERT FAILED: "
+ << expression << " @ " << function;
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/common.h b/chromium/third_party/webrtc/base/common.h
new file mode 100644
index 00000000000..7f4a756391a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/common.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_COMMON_H_ // NOLINT
+#define WEBRTC_BASE_COMMON_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
+
+#if defined(_MSC_VER)
+// warning C4355: 'this' : used in base member initializer list
+#pragma warning(disable:4355)
+#endif
+
+//////////////////////////////////////////////////////////////////////
+// General Utilities
+//////////////////////////////////////////////////////////////////////
+
+#ifndef RTC_UNUSED
+#define RTC_UNUSED(x) RtcUnused(static_cast<const void*>(&x))
+#define RTC_UNUSED2(x, y) RtcUnused(static_cast<const void*>(&x)); \
+ RtcUnused(static_cast<const void*>(&y))
+#define RTC_UNUSED3(x, y, z) RtcUnused(static_cast<const void*>(&x)); \
+ RtcUnused(static_cast<const void*>(&y)); \
+ RtcUnused(static_cast<const void*>(&z))
+#define RTC_UNUSED4(x, y, z, a) RtcUnused(static_cast<const void*>(&x)); \
+ RtcUnused(static_cast<const void*>(&y)); \
+ RtcUnused(static_cast<const void*>(&z)); \
+ RtcUnused(static_cast<const void*>(&a))
+#define RTC_UNUSED5(x, y, z, a, b) RtcUnused(static_cast<const void*>(&x)); \
+ RtcUnused(static_cast<const void*>(&y)); \
+ RtcUnused(static_cast<const void*>(&z)); \
+ RtcUnused(static_cast<const void*>(&a)); \
+ RtcUnused(static_cast<const void*>(&b))
+inline void RtcUnused(const void*) {}
+#endif // RTC_UNUSED
+
+#if !defined(WEBRTC_WIN)
+
+#ifndef strnicmp
+#define strnicmp(x, y, n) strncasecmp(x, y, n)
+#endif
+
+#ifndef stricmp
+#define stricmp(x, y) strcasecmp(x, y)
+#endif
+
+// TODO(fbarchard): Remove this. std::max should be used everywhere in the code.
+// NOMINMAX must be defined where we include <windows.h>.
+#define stdmax(x, y) std::max(x, y)
+#else
+#define stdmax(x, y) rtc::_max(x, y)
+#endif
+
+#define ARRAY_SIZE(x) (static_cast<int>(sizeof(x) / sizeof(x[0])))
+
+/////////////////////////////////////////////////////////////////////////////
+// Assertions
+/////////////////////////////////////////////////////////////////////////////
+
+#ifndef ENABLE_DEBUG
+#define ENABLE_DEBUG _DEBUG
+#endif // !defined(ENABLE_DEBUG)
+
+// Even for release builds, allow for the override of LogAssert. Though no
+// macro is provided, this can still be used for explicit runtime asserts
+// and allow applications to override the assert behavior.
+
+namespace rtc {
+
+
+// If a debugger is attached, triggers a debugger breakpoint. If a debugger is
+// not attached, forces program termination.
+void Break();
+
+// LogAssert writes information about an assertion to the log. It's called by
+// Assert (and from the ASSERT macro in debug mode) before any other action
+// is taken (e.g. breaking the debugger, abort()ing, etc.).
+void LogAssert(const char* function, const char* file, int line,
+ const char* expression);
+
+typedef void (*AssertLogger)(const char* function,
+ const char* file,
+ int line,
+ const char* expression);
+
+// Sets a custom assert logger to be used instead of the default LogAssert
+// behavior. To clear the custom assert logger, pass NULL for |logger| and the
+// default behavior will be restored. Only one custom assert logger can be set
+// at a time, so this should generally be set during application startup and
+// only by one component.
+void SetCustomAssertLogger(AssertLogger logger);
+
+} // namespace rtc
+
+#if ENABLE_DEBUG
+
+namespace rtc {
+
+inline bool Assert(bool result, const char* function, const char* file,
+ int line, const char* expression) {
+ if (!result) {
+ LogAssert(function, file, line, expression);
+ Break();
+ return false;
+ }
+ return true;
+}
+
+} // namespace rtc
+
+#if defined(_MSC_VER) && _MSC_VER < 1300
+#define __FUNCTION__ ""
+#endif
+
+#ifndef ASSERT
+#define ASSERT(x) \
+ (void)rtc::Assert((x), __FUNCTION__, __FILE__, __LINE__, #x)
+#endif
+
+#ifndef VERIFY
+#define VERIFY(x) rtc::Assert((x), __FUNCTION__, __FILE__, __LINE__, #x)
+#endif
+
+#else // !ENABLE_DEBUG
+
+namespace rtc {
+
+inline bool ImplicitCastToBool(bool result) { return result; }
+
+} // namespace rtc
+
+#ifndef ASSERT
+#define ASSERT(x) (void)0
+#endif
+
+#ifndef VERIFY
+#define VERIFY(x) rtc::ImplicitCastToBool(x)
+#endif
+
+#endif // !ENABLE_DEBUG
+
+#define COMPILE_TIME_ASSERT(expr) char CTA_UNIQUE_NAME[expr]
+#define CTA_UNIQUE_NAME CTA_MAKE_NAME(__LINE__)
+#define CTA_MAKE_NAME(line) CTA_MAKE_NAME2(line)
+#define CTA_MAKE_NAME2(line) constraint_ ## line
+
+// Forces compiler to inline, even against its better judgement. Use wisely.
+#if defined(__GNUC__)
+#define FORCE_INLINE __attribute__((always_inline))
+#elif defined(WEBRTC_WIN)
+#define FORCE_INLINE __forceinline
+#else
+#define FORCE_INLINE
+#endif
+
+// Borrowed from Chromium's base/compiler_specific.h.
+// Annotate a virtual method indicating it must be overriding a virtual
+// method in the parent class.
+// Use like:
+// virtual void foo() OVERRIDE;
+#if defined(WEBRTC_WIN)
+#define OVERRIDE override
+#elif defined(__clang__)
+// Clang defaults to C++03 and warns about using override. Squelch that.
+// Intentionally no push/pop here so all users of OVERRIDE ignore the warning
+// too. This is like passing -Wno-c++11-extensions, except that GCC won't die
+// (because it won't see this pragma).
+#pragma clang diagnostic ignored "-Wc++11-extensions"
+#define OVERRIDE override
+#elif defined(__GNUC__) && __cplusplus >= 201103 && \
+ (__GNUC__ * 10000 + __GNUC_MINOR__ * 100) >= 40700
+// GCC 4.7 supports explicit virtual overrides when C++11 support is enabled.
+#define OVERRIDE override
+#else
+#define OVERRIDE
+#endif
+
+// Annotate a function indicating the caller must examine the return value.
+// Use like:
+// int foo() WARN_UNUSED_RESULT;
+// To explicitly ignore a result, see |ignore_result()| in <base/basictypes.h>.
+// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and
+// libjingle are merged.
+#if !defined(WARN_UNUSED_RESULT)
+#if defined(__GNUC__)
+#define WARN_UNUSED_RESULT __attribute__((warn_unused_result))
+#else
+#define WARN_UNUSED_RESULT
+#endif
+#endif // WARN_UNUSED_RESULT
+
+#endif // WEBRTC_BASE_COMMON_H_ // NOLINT
diff --git a/chromium/third_party/webrtc/base/compile_assert.h b/chromium/third_party/webrtc/base/compile_assert.h
new file mode 100644
index 00000000000..6d4249c8fba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/compile_assert.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// COMPILE_ASSERT macro, borrowed from google3/base/macros.h.
+#ifndef WEBRTC_BASE_COMPILE_ASSERT_H_
+#define WEBRTC_BASE_COMPILE_ASSERT_H_
+
+// The COMPILE_ASSERT macro can be used to verify that a compile time
+// expression is true. For example, you could use it to verify the
+// size of a static array:
+//
+// COMPILE_ASSERT(ARRAYSIZE(content_type_names) == CONTENT_NUM_TYPES,
+// content_type_names_incorrect_size);
+//
+// or to make sure a struct is smaller than a certain size:
+//
+// COMPILE_ASSERT(sizeof(foo) < 128, foo_too_large);
+//
+// The second argument to the macro is the name of the variable. If
+// the expression is false, most compilers will issue a warning/error
+// containing the name of the variable.
+
+// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and
+// libjingle are merged.
+#if !defined(COMPILE_ASSERT)
+template <bool>
+struct CompileAssert {
+};
+
+#define COMPILE_ASSERT(expr, msg) \
+ typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1] // NOLINT
+#endif // COMPILE_ASSERT
+
+// Implementation details of COMPILE_ASSERT:
+//
+// - COMPILE_ASSERT works by defining an array type that has -1
+// elements (and thus is invalid) when the expression is false.
+//
+// - The simpler definition
+//
+// #define COMPILE_ASSERT(expr, msg) typedef char msg[(expr) ? 1 : -1]
+//
+// does not work, as gcc supports variable-length arrays whose sizes
+// are determined at run-time (this is gcc's extension and not part
+// of the C++ standard). As a result, gcc fails to reject the
+// following code with the simple definition:
+//
+// int foo;
+// COMPILE_ASSERT(foo, msg); // not supposed to compile as foo is
+// // not a compile-time constant.
+//
+// - By using the type CompileAssert<(bool(expr))>, we ensures that
+// expr is a compile-time constant. (Template arguments must be
+// determined at compile-time.)
+//
+// - The outer parentheses in CompileAssert<(bool(expr))> are necessary
+// to work around a bug in gcc 3.4.4 and 4.0.1. If we had written
+//
+// CompileAssert<bool(expr)>
+//
+// instead, these compilers will refuse to compile
+//
+// COMPILE_ASSERT(5 > 0, some_message);
+//
+// (They seem to think the ">" in "5 > 0" marks the end of the
+// template argument list.)
+//
+// - The array size is (bool(expr) ? 1 : -1), instead of simply
+//
+// ((expr) ? 1 : -1).
+//
+// This is to avoid running into a bug in MS VC 7.1, which
+// causes ((0.0) ? 1 : -1) to incorrectly evaluate to 1.
+
+#endif // WEBRTC_BASE_COMPILE_ASSERT_H_
diff --git a/chromium/third_party/webrtc/base/constructormagic.h b/chromium/third_party/webrtc/base/constructormagic.h
new file mode 100644
index 00000000000..c20be2b32fc
--- /dev/null
+++ b/chromium/third_party/webrtc/base/constructormagic.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_CONSTRUCTORMAGIC_H_
+#define WEBRTC_BASE_CONSTRUCTORMAGIC_H_
+
+#define DISALLOW_ASSIGN(TypeName) \
+ void operator=(const TypeName&)
+
+// A macro to disallow the evil copy constructor and operator= functions
+// This should be used in the private: declarations for a class.
+// Undefine this, just in case. Some third-party includes have their own
+// version.
+#undef DISALLOW_COPY_AND_ASSIGN
+#define DISALLOW_COPY_AND_ASSIGN(TypeName) \
+ TypeName(const TypeName&); \
+ DISALLOW_ASSIGN(TypeName)
+
+// Alternative, less-accurate legacy name.
+#define DISALLOW_EVIL_CONSTRUCTORS(TypeName) \
+ DISALLOW_COPY_AND_ASSIGN(TypeName)
+
+// A macro to disallow all the implicit constructors, namely the
+// default constructor, copy constructor and operator= functions.
+//
+// This should be used in the private: declarations for a class
+// that wants to prevent anyone from instantiating it. This is
+// especially useful for classes containing only static methods.
+#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \
+ TypeName(); \
+ DISALLOW_EVIL_CONSTRUCTORS(TypeName)
+
+
+#endif // WEBRTC_BASE_CONSTRUCTORMAGIC_H_
diff --git a/chromium/third_party/webrtc/base/cpumonitor.cc b/chromium/third_party/webrtc/base/cpumonitor.cc
new file mode 100644
index 00000000000..c881b48c5a1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/cpumonitor.cc
@@ -0,0 +1,423 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/cpumonitor.h"
+
+#include <string>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/systeminfo.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#include <winternl.h>
+#endif
+
+#if defined(WEBRTC_POSIX)
+#include <sys/time.h>
+#endif
+
+#if defined(WEBRTC_MAC)
+#include <mach/mach_host.h>
+#include <mach/mach_init.h>
+#include <mach/mach_port.h>
+#include <mach/host_info.h>
+#include <mach/task.h>
+#endif // defined(WEBRTC_MAC)
+
+#if defined(WEBRTC_LINUX)
+#include <sys/resource.h>
+#include <errno.h>
+#include <stdio.h>
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/pathutils.h"
+#endif // defined(WEBRTC_LINUX)
+
+#if defined(WEBRTC_MAC)
+static uint64 TimeValueTToInt64(const time_value_t &time_value) {
+ return rtc::kNumMicrosecsPerSec * time_value.seconds +
+ time_value.microseconds;
+}
+#endif // defined(WEBRTC_MAC)
+
+// How CpuSampler works
+// When threads switch, the time they spent is accumulated to system counters.
+// The time can be treated as user, kernel or idle.
+// user time is applications.
+// kernel time is the OS, including the thread switching code itself.
+// typically kernel time indicates IO.
+// idle time is a process that wastes time when nothing is ready to run.
+//
+// User time is broken down by process (application). One of the applications
+// is the current process. When you add up all application times, this is
+// system time. If only your application is running, system time should be the
+// same as process time.
+//
+// All cores contribute to these accumulators. A dual core process is able to
+// process twice as many cycles as a single core. The actual code efficiency
+// may be worse, due to contention, but the available cycles is exactly twice
+// as many, and the cpu load will reflect the efficiency. Hyperthreads behave
+// the same way. The load will reflect 200%, but the actual amount of work
+// completed will be much less than a true dual core.
+//
+// Total available performance is the sum of all accumulators.
+// If you tracked this for 1 second, it would essentially give you the clock
+// rate - number of cycles per second.
+// Speed step / Turbo Boost is not considered, so infact more processing time
+// may be available.
+
+namespace rtc {
+
+// Note Tests on Windows show 600 ms is minimum stable interval for Windows 7.
+static const int32 kDefaultInterval = 950; // Slightly under 1 second.
+
+CpuSampler::CpuSampler()
+ : min_load_interval_(kDefaultInterval)
+#if defined(WEBRTC_WIN)
+ , get_system_times_(NULL),
+ nt_query_system_information_(NULL),
+ force_fallback_(false)
+#endif
+ {
+}
+
+CpuSampler::~CpuSampler() {
+}
+
+// Set minimum interval in ms between computing new load values. Default 950.
+void CpuSampler::set_load_interval(int min_load_interval) {
+ min_load_interval_ = min_load_interval;
+}
+
+bool CpuSampler::Init() {
+ sysinfo_.reset(new SystemInfo);
+ cpus_ = sysinfo_->GetMaxCpus();
+ if (cpus_ == 0) {
+ return false;
+ }
+#if defined(WEBRTC_WIN)
+ // Note that GetSystemTimes is available in Windows XP SP1 or later.
+ // http://msdn.microsoft.com/en-us/library/ms724400.aspx
+ // NtQuerySystemInformation is used as a fallback.
+ if (!force_fallback_) {
+ get_system_times_ = GetProcAddress(GetModuleHandle(L"kernel32.dll"),
+ "GetSystemTimes");
+ }
+ nt_query_system_information_ = GetProcAddress(GetModuleHandle(L"ntdll.dll"),
+ "NtQuerySystemInformation");
+ if ((get_system_times_ == NULL) && (nt_query_system_information_ == NULL)) {
+ return false;
+ }
+#endif
+#if defined(WEBRTC_LINUX)
+ Pathname sname("/proc/stat");
+ sfile_.reset(Filesystem::OpenFile(sname, "rb"));
+ if (!sfile_) {
+ LOG_ERR(LS_ERROR) << "open proc/stat failed:";
+ return false;
+ }
+ if (!sfile_->DisableBuffering()) {
+ LOG_ERR(LS_ERROR) << "could not disable buffering for proc/stat";
+ return false;
+ }
+#endif // defined(WEBRTC_LINUX)
+ GetProcessLoad(); // Initialize values.
+ GetSystemLoad();
+ // Help next user call return valid data by recomputing load.
+ process_.prev_load_time_ = 0u;
+ system_.prev_load_time_ = 0u;
+ return true;
+}
+
+float CpuSampler::UpdateCpuLoad(uint64 current_total_times,
+ uint64 current_cpu_times,
+ uint64 *prev_total_times,
+ uint64 *prev_cpu_times) {
+ float result = 0.f;
+ if (current_total_times < *prev_total_times ||
+ current_cpu_times < *prev_cpu_times) {
+ LOG(LS_ERROR) << "Inconsistent time values are passed. ignored";
+ } else {
+ const uint64 cpu_diff = current_cpu_times - *prev_cpu_times;
+ const uint64 total_diff = current_total_times - *prev_total_times;
+ result = (total_diff == 0ULL ? 0.f :
+ static_cast<float>(1.0f * cpu_diff / total_diff));
+ if (result > static_cast<float>(cpus_)) {
+ result = static_cast<float>(cpus_);
+ }
+ *prev_total_times = current_total_times;
+ *prev_cpu_times = current_cpu_times;
+ }
+ return result;
+}
+
+float CpuSampler::GetSystemLoad() {
+ uint32 timenow = Time();
+ int elapsed = static_cast<int>(TimeDiff(timenow, system_.prev_load_time_));
+ if (min_load_interval_ != 0 && system_.prev_load_time_ != 0u &&
+ elapsed < min_load_interval_) {
+ return system_.prev_load_;
+ }
+#if defined(WEBRTC_WIN)
+ uint64 total_times, cpu_times;
+
+ typedef BOOL (_stdcall *GST_PROC)(LPFILETIME, LPFILETIME, LPFILETIME);
+ typedef NTSTATUS (WINAPI *QSI_PROC)(SYSTEM_INFORMATION_CLASS,
+ PVOID, ULONG, PULONG);
+
+ GST_PROC get_system_times = reinterpret_cast<GST_PROC>(get_system_times_);
+ QSI_PROC nt_query_system_information = reinterpret_cast<QSI_PROC>(
+ nt_query_system_information_);
+
+ if (get_system_times) {
+ FILETIME idle_time, kernel_time, user_time;
+ if (!get_system_times(&idle_time, &kernel_time, &user_time)) {
+ LOG(LS_ERROR) << "::GetSystemTimes() failed: " << ::GetLastError();
+ return 0.f;
+ }
+ // kernel_time includes Kernel idle time, so no need to
+ // include cpu_time as total_times
+ total_times = ToUInt64(kernel_time) + ToUInt64(user_time);
+ cpu_times = total_times - ToUInt64(idle_time);
+
+ } else {
+ if (nt_query_system_information) {
+ ULONG returned_length = 0;
+ scoped_ptr<SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION[]> processor_info(
+ new SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION[cpus_]);
+ nt_query_system_information(
+ ::SystemProcessorPerformanceInformation,
+ reinterpret_cast<void*>(processor_info.get()),
+ cpus_ * sizeof(SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION),
+ &returned_length);
+
+ if (returned_length !=
+ (cpus_ * sizeof(SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION))) {
+ LOG(LS_ERROR) << "NtQuerySystemInformation has unexpected size";
+ return 0.f;
+ }
+
+ uint64 current_idle = 0;
+ uint64 current_kernel = 0;
+ uint64 current_user = 0;
+ for (int ix = 0; ix < cpus_; ++ix) {
+ current_idle += processor_info[ix].IdleTime.QuadPart;
+ current_kernel += processor_info[ix].UserTime.QuadPart;
+ current_user += processor_info[ix].KernelTime.QuadPart;
+ }
+ total_times = current_kernel + current_user;
+ cpu_times = total_times - current_idle;
+ } else {
+ return 0.f;
+ }
+ }
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_MAC)
+ mach_port_t mach_host = mach_host_self();
+ host_cpu_load_info_data_t cpu_info;
+ mach_msg_type_number_t info_count = HOST_CPU_LOAD_INFO_COUNT;
+ kern_return_t kr = host_statistics(mach_host, HOST_CPU_LOAD_INFO,
+ reinterpret_cast<host_info_t>(&cpu_info),
+ &info_count);
+ mach_port_deallocate(mach_task_self(), mach_host);
+ if (KERN_SUCCESS != kr) {
+ LOG(LS_ERROR) << "::host_statistics() failed";
+ return 0.f;
+ }
+
+ const uint64 cpu_times = cpu_info.cpu_ticks[CPU_STATE_NICE] +
+ cpu_info.cpu_ticks[CPU_STATE_SYSTEM] +
+ cpu_info.cpu_ticks[CPU_STATE_USER];
+ const uint64 total_times = cpu_times + cpu_info.cpu_ticks[CPU_STATE_IDLE];
+#endif // defined(WEBRTC_MAC)
+
+#if defined(WEBRTC_LINUX)
+ if (!sfile_) {
+ LOG(LS_ERROR) << "Invalid handle for proc/stat";
+ return 0.f;
+ }
+ std::string statbuf;
+ sfile_->SetPosition(0);
+ if (!sfile_->ReadLine(&statbuf)) {
+ LOG_ERR(LS_ERROR) << "Could not read proc/stat file";
+ return 0.f;
+ }
+
+ unsigned long long user;
+ unsigned long long nice;
+ unsigned long long system;
+ unsigned long long idle;
+ if (sscanf(statbuf.c_str(), "cpu %Lu %Lu %Lu %Lu",
+ &user, &nice,
+ &system, &idle) != 4) {
+ LOG_ERR(LS_ERROR) << "Could not parse cpu info";
+ return 0.f;
+ }
+ const uint64 cpu_times = nice + system + user;
+ const uint64 total_times = cpu_times + idle;
+#endif // defined(WEBRTC_LINUX)
+
+#if defined(__native_client__)
+ // TODO(ryanpetrie): Implement this via PPAPI when it's available.
+ const uint64 cpu_times = 0;
+ const uint64 total_times = 0;
+#endif // defined(__native_client__)
+
+ system_.prev_load_time_ = timenow;
+ system_.prev_load_ = UpdateCpuLoad(total_times,
+ cpu_times * cpus_,
+ &system_.prev_total_times_,
+ &system_.prev_cpu_times_);
+ return system_.prev_load_;
+}
+
+float CpuSampler::GetProcessLoad() {
+ uint32 timenow = Time();
+ int elapsed = static_cast<int>(TimeDiff(timenow, process_.prev_load_time_));
+ if (min_load_interval_ != 0 && process_.prev_load_time_ != 0u &&
+ elapsed < min_load_interval_) {
+ return process_.prev_load_;
+ }
+#if defined(WEBRTC_WIN)
+ FILETIME current_file_time;
+ ::GetSystemTimeAsFileTime(&current_file_time);
+
+ FILETIME create_time, exit_time, kernel_time, user_time;
+ if (!::GetProcessTimes(::GetCurrentProcess(),
+ &create_time, &exit_time, &kernel_time, &user_time)) {
+ LOG(LS_ERROR) << "::GetProcessTimes() failed: " << ::GetLastError();
+ return 0.f;
+ }
+
+ const uint64 total_times =
+ ToUInt64(current_file_time) - ToUInt64(create_time);
+ const uint64 cpu_times =
+ (ToUInt64(kernel_time) + ToUInt64(user_time));
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_POSIX)
+ // Common to both OSX and Linux.
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+ const uint64 total_times = tv.tv_sec * kNumMicrosecsPerSec + tv.tv_usec;
+#endif
+
+#if defined(WEBRTC_MAC)
+ // Get live thread usage.
+ task_thread_times_info task_times_info;
+ mach_msg_type_number_t info_count = TASK_THREAD_TIMES_INFO_COUNT;
+
+ if (KERN_SUCCESS != task_info(mach_task_self(), TASK_THREAD_TIMES_INFO,
+ reinterpret_cast<task_info_t>(&task_times_info),
+ &info_count)) {
+ LOG(LS_ERROR) << "::task_info(TASK_THREAD_TIMES_INFO) failed";
+ return 0.f;
+ }
+
+ // Get terminated thread usage.
+ task_basic_info task_term_info;
+ info_count = TASK_BASIC_INFO_COUNT;
+ if (KERN_SUCCESS != task_info(mach_task_self(), TASK_BASIC_INFO,
+ reinterpret_cast<task_info_t>(&task_term_info),
+ &info_count)) {
+ LOG(LS_ERROR) << "::task_info(TASK_BASIC_INFO) failed";
+ return 0.f;
+ }
+
+ const uint64 cpu_times = (TimeValueTToInt64(task_times_info.user_time) +
+ TimeValueTToInt64(task_times_info.system_time) +
+ TimeValueTToInt64(task_term_info.user_time) +
+ TimeValueTToInt64(task_term_info.system_time));
+#endif // defined(WEBRTC_MAC)
+
+#if defined(WEBRTC_LINUX)
+ rusage usage;
+ if (getrusage(RUSAGE_SELF, &usage) < 0) {
+ LOG_ERR(LS_ERROR) << "getrusage failed";
+ return 0.f;
+ }
+
+ const uint64 cpu_times =
+ (usage.ru_utime.tv_sec + usage.ru_stime.tv_sec) * kNumMicrosecsPerSec +
+ usage.ru_utime.tv_usec + usage.ru_stime.tv_usec;
+#endif // defined(WEBRTC_LINUX)
+
+#if defined(__native_client__)
+ // TODO(ryanpetrie): Implement this via PPAPI when it's available.
+ const uint64 cpu_times = 0;
+#endif // defined(__native_client__)
+
+ process_.prev_load_time_ = timenow;
+ process_.prev_load_ = UpdateCpuLoad(total_times,
+ cpu_times,
+ &process_.prev_total_times_,
+ &process_.prev_cpu_times_);
+ return process_.prev_load_;
+}
+
+int CpuSampler::GetMaxCpus() const {
+ return cpus_;
+}
+
+int CpuSampler::GetCurrentCpus() {
+ return sysinfo_->GetCurCpus();
+}
+
+///////////////////////////////////////////////////////////////////
+// Implementation of class CpuMonitor.
+CpuMonitor::CpuMonitor(Thread* thread)
+ : monitor_thread_(thread) {
+}
+
+CpuMonitor::~CpuMonitor() {
+ Stop();
+}
+
+void CpuMonitor::set_thread(Thread* thread) {
+ ASSERT(monitor_thread_ == NULL || monitor_thread_ == thread);
+ monitor_thread_ = thread;
+}
+
+bool CpuMonitor::Start(int period_ms) {
+ if (!monitor_thread_ || !sampler_.Init()) return false;
+
+ monitor_thread_->SignalQueueDestroyed.connect(
+ this, &CpuMonitor::OnMessageQueueDestroyed);
+
+ period_ms_ = period_ms;
+ monitor_thread_->PostDelayed(period_ms_, this);
+
+ return true;
+}
+
+void CpuMonitor::Stop() {
+ if (monitor_thread_) {
+ monitor_thread_->Clear(this);
+ }
+}
+
+void CpuMonitor::OnMessage(Message* msg) {
+ int max_cpus = sampler_.GetMaxCpus();
+ int current_cpus = sampler_.GetCurrentCpus();
+ float process_load = sampler_.GetProcessLoad();
+ float system_load = sampler_.GetSystemLoad();
+ SignalUpdate(current_cpus, max_cpus, process_load, system_load);
+
+ if (monitor_thread_) {
+ monitor_thread_->PostDelayed(period_ms_, this);
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/cpumonitor.h b/chromium/third_party/webrtc/base/cpumonitor.h
new file mode 100644
index 00000000000..39b09b38091
--- /dev/null
+++ b/chromium/third_party/webrtc/base/cpumonitor.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_CPUMONITOR_H_
+#define WEBRTC_BASE_CPUMONITOR_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+#if defined(WEBRTC_LINUX)
+#include "webrtc/base/stream.h"
+#endif // defined(WEBRTC_LINUX)
+
+namespace rtc {
+class Thread;
+class SystemInfo;
+
+struct CpuStats {
+ CpuStats()
+ : prev_total_times_(0),
+ prev_cpu_times_(0),
+ prev_load_(0.f),
+ prev_load_time_(0u) {
+ }
+
+ uint64 prev_total_times_;
+ uint64 prev_cpu_times_;
+ float prev_load_; // Previous load value.
+ uint32 prev_load_time_; // Time previous load value was taken.
+};
+
+// CpuSampler samples the process and system load.
+class CpuSampler {
+ public:
+ CpuSampler();
+ ~CpuSampler();
+
+ // Initialize CpuSampler. Returns true if successful.
+ bool Init();
+
+ // Set minimum interval in ms between computing new load values.
+ // Default 950 ms. Set to 0 to disable interval.
+ void set_load_interval(int min_load_interval);
+
+ // Return CPU load of current process as a float from 0 to 1.
+ float GetProcessLoad();
+
+ // Return CPU load of current process as a float from 0 to 1.
+ float GetSystemLoad();
+
+ // Return number of cpus. Includes hyperthreads.
+ int GetMaxCpus() const;
+
+ // Return current number of cpus available to this process.
+ int GetCurrentCpus();
+
+ // For testing. Allows forcing of fallback to using NTDLL functions.
+ void set_force_fallback(bool fallback) {
+#if defined(WEBRTC_WIN)
+ force_fallback_ = fallback;
+#endif
+ }
+
+ private:
+ float UpdateCpuLoad(uint64 current_total_times,
+ uint64 current_cpu_times,
+ uint64 *prev_total_times,
+ uint64 *prev_cpu_times);
+ CpuStats process_;
+ CpuStats system_;
+ int cpus_;
+ int min_load_interval_; // Minimum time between computing new load.
+ scoped_ptr<SystemInfo> sysinfo_;
+#if defined(WEBRTC_WIN)
+ void* get_system_times_;
+ void* nt_query_system_information_;
+ bool force_fallback_;
+#endif
+#if defined(WEBRTC_LINUX)
+ // File for reading /proc/stat
+ scoped_ptr<FileStream> sfile_;
+#endif // defined(WEBRTC_LINUX)
+};
+
+// CpuMonitor samples and signals the CPU load periodically.
+class CpuMonitor
+ : public rtc::MessageHandler, public sigslot::has_slots<> {
+ public:
+ explicit CpuMonitor(Thread* thread);
+ virtual ~CpuMonitor();
+ void set_thread(Thread* thread);
+
+ bool Start(int period_ms);
+ void Stop();
+ // Signal parameters are current cpus, max cpus, process load and system load.
+ sigslot::signal4<int, int, float, float> SignalUpdate;
+
+ protected:
+ // Override virtual method of parent MessageHandler.
+ virtual void OnMessage(rtc::Message* msg);
+ // Clear the monitor thread and stop sending it messages if the thread goes
+ // away before our lifetime.
+ void OnMessageQueueDestroyed() { monitor_thread_ = NULL; }
+
+ private:
+ Thread* monitor_thread_;
+ CpuSampler sampler_;
+ int period_ms_;
+
+ DISALLOW_COPY_AND_ASSIGN(CpuMonitor);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_CPUMONITOR_H_
diff --git a/chromium/third_party/webrtc/base/cpumonitor_unittest.cc b/chromium/third_party/webrtc/base/cpumonitor_unittest.cc
new file mode 100644
index 00000000000..6d9af5aece7
--- /dev/null
+++ b/chromium/third_party/webrtc/base/cpumonitor_unittest.cc
@@ -0,0 +1,388 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <iomanip>
+#include <iostream>
+#include <vector>
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+#include "webrtc/base/cpumonitor.h"
+#include "webrtc/base/flags.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/timing.h"
+
+namespace rtc {
+
+static const int kMaxCpus = 1024;
+static const int kSettleTime = 100; // Amount of time to between tests.
+static const int kIdleTime = 500; // Amount of time to be idle in ms.
+static const int kBusyTime = 1000; // Amount of time to be busy in ms.
+static const int kLongInterval = 2000; // Interval longer than busy times
+
+class BusyThread : public rtc::Thread {
+ public:
+ BusyThread(double load, double duration, double interval) :
+ load_(load), duration_(duration), interval_(interval) {
+ }
+ virtual ~BusyThread() {
+ Stop();
+ }
+ void Run() {
+ Timing time;
+ double busy_time = interval_ * load_ / 100.0;
+ for (;;) {
+ time.BusyWait(busy_time);
+ time.IdleWait(interval_ - busy_time);
+ if (duration_) {
+ duration_ -= interval_;
+ if (duration_ <= 0) {
+ break;
+ }
+ }
+ }
+ }
+ private:
+ double load_;
+ double duration_;
+ double interval_;
+};
+
+class CpuLoadListener : public sigslot::has_slots<> {
+ public:
+ CpuLoadListener()
+ : current_cpus_(0),
+ cpus_(0),
+ process_load_(.0f),
+ system_load_(.0f),
+ count_(0) {
+ }
+
+ void OnCpuLoad(int current_cpus, int cpus, float proc_load, float sys_load) {
+ current_cpus_ = current_cpus;
+ cpus_ = cpus;
+ process_load_ = proc_load;
+ system_load_ = sys_load;
+ ++count_;
+ }
+
+ int current_cpus() const { return current_cpus_; }
+ int cpus() const { return cpus_; }
+ float process_load() const { return process_load_; }
+ float system_load() const { return system_load_; }
+ int count() const { return count_; }
+
+ private:
+ int current_cpus_;
+ int cpus_;
+ float process_load_;
+ float system_load_;
+ int count_;
+};
+
+// Set affinity (which cpu to run on), but respecting FLAG_affinity:
+// -1 means no affinity - run on whatever cpu is available.
+// 0 .. N means run on specific cpu. The tool will create N threads and call
+// SetThreadAffinity on 0 to N - 1 as cpu. FLAG_affinity sets the first cpu
+// so the range becomes affinity to affinity + N - 1
+// Note that this function affects Windows scheduling, effectively giving
+// the thread with affinity for a specified CPU more priority on that CPU.
+bool SetThreadAffinity(BusyThread* t, int cpu, int affinity) {
+#if defined(WEBRTC_WIN)
+ if (affinity >= 0) {
+ return ::SetThreadAffinityMask(t->GetHandle(),
+ 1 << (cpu + affinity)) != FALSE;
+ }
+#endif
+ return true;
+}
+
+bool SetThreadPriority(BusyThread* t, int prio) {
+ if (!prio) {
+ return true;
+ }
+ bool ok = t->SetPriority(static_cast<rtc::ThreadPriority>(prio));
+ if (!ok) {
+ std::cout << "Error setting thread priority." << std::endl;
+ }
+ return ok;
+}
+
+int CpuLoad(double cpuload, double duration, int numthreads,
+ int priority, double interval, int affinity) {
+ int ret = 0;
+ std::vector<BusyThread*> threads;
+ for (int i = 0; i < numthreads; ++i) {
+ threads.push_back(new BusyThread(cpuload, duration, interval));
+ // NOTE(fbarchard): Priority must be done before Start.
+ if (!SetThreadPriority(threads[i], priority) ||
+ !threads[i]->Start() ||
+ !SetThreadAffinity(threads[i], i, affinity)) {
+ ret = 1;
+ break;
+ }
+ }
+ // Wait on each thread
+ if (ret == 0) {
+ for (int i = 0; i < numthreads; ++i) {
+ threads[i]->Stop();
+ }
+ }
+
+ for (int i = 0; i < numthreads; ++i) {
+ delete threads[i];
+ }
+ return ret;
+}
+
+// Make 2 CPUs busy
+static void CpuTwoBusyLoop(int busytime) {
+ CpuLoad(100.0, busytime / 1000.0, 2, 1, 0.050, -1);
+}
+
+// Make 1 CPUs busy
+static void CpuBusyLoop(int busytime) {
+ CpuLoad(100.0, busytime / 1000.0, 1, 1, 0.050, -1);
+}
+
+// Make 1 use half CPU time.
+static void CpuHalfBusyLoop(int busytime) {
+ CpuLoad(50.0, busytime / 1000.0, 1, 1, 0.050, -1);
+}
+
+void TestCpuSampler(bool test_proc, bool test_sys, bool force_fallback) {
+ CpuSampler sampler;
+ sampler.set_force_fallback(force_fallback);
+ EXPECT_TRUE(sampler.Init());
+ sampler.set_load_interval(100);
+ int cpus = sampler.GetMaxCpus();
+
+ // Test1: CpuSampler under idle situation.
+ Thread::SleepMs(kSettleTime);
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ Thread::SleepMs(kIdleTime);
+
+ float proc_idle = 0.f, sys_idle = 0.f;
+ if (test_proc) {
+ proc_idle = sampler.GetProcessLoad();
+ }
+ if (test_sys) {
+ sys_idle = sampler.GetSystemLoad();
+ }
+ if (test_proc) {
+ LOG(LS_INFO) << "ProcessLoad Idle: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << proc_idle;
+ EXPECT_GE(proc_idle, 0.f);
+ EXPECT_LE(proc_idle, static_cast<float>(cpus));
+ }
+ if (test_sys) {
+ LOG(LS_INFO) << "SystemLoad Idle: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << sys_idle;
+ EXPECT_GE(sys_idle, 0.f);
+ EXPECT_LE(sys_idle, static_cast<float>(cpus));
+ }
+
+ // Test2: CpuSampler with main process at 50% busy.
+ Thread::SleepMs(kSettleTime);
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ CpuHalfBusyLoop(kBusyTime);
+
+ float proc_halfbusy = 0.f, sys_halfbusy = 0.f;
+ if (test_proc) {
+ proc_halfbusy = sampler.GetProcessLoad();
+ }
+ if (test_sys) {
+ sys_halfbusy = sampler.GetSystemLoad();
+ }
+ if (test_proc) {
+ LOG(LS_INFO) << "ProcessLoad Halfbusy: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << proc_halfbusy;
+ EXPECT_GE(proc_halfbusy, 0.f);
+ EXPECT_LE(proc_halfbusy, static_cast<float>(cpus));
+ }
+ if (test_sys) {
+ LOG(LS_INFO) << "SystemLoad Halfbusy: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << sys_halfbusy;
+ EXPECT_GE(sys_halfbusy, 0.f);
+ EXPECT_LE(sys_halfbusy, static_cast<float>(cpus));
+ }
+
+ // Test3: CpuSampler with main process busy.
+ Thread::SleepMs(kSettleTime);
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ CpuBusyLoop(kBusyTime);
+
+ float proc_busy = 0.f, sys_busy = 0.f;
+ if (test_proc) {
+ proc_busy = sampler.GetProcessLoad();
+ }
+ if (test_sys) {
+ sys_busy = sampler.GetSystemLoad();
+ }
+ if (test_proc) {
+ LOG(LS_INFO) << "ProcessLoad Busy: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << proc_busy;
+ EXPECT_GE(proc_busy, 0.f);
+ EXPECT_LE(proc_busy, static_cast<float>(cpus));
+ }
+ if (test_sys) {
+ LOG(LS_INFO) << "SystemLoad Busy: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << sys_busy;
+ EXPECT_GE(sys_busy, 0.f);
+ EXPECT_LE(sys_busy, static_cast<float>(cpus));
+ }
+
+ // Test4: CpuSampler with 2 cpus process busy.
+ if (cpus >= 2) {
+ Thread::SleepMs(kSettleTime);
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ CpuTwoBusyLoop(kBusyTime);
+
+ float proc_twobusy = 0.f, sys_twobusy = 0.f;
+ if (test_proc) {
+ proc_twobusy = sampler.GetProcessLoad();
+ }
+ if (test_sys) {
+ sys_twobusy = sampler.GetSystemLoad();
+ }
+ if (test_proc) {
+ LOG(LS_INFO) << "ProcessLoad 2 CPU Busy:"
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << proc_twobusy;
+ EXPECT_GE(proc_twobusy, 0.f);
+ EXPECT_LE(proc_twobusy, static_cast<float>(cpus));
+ }
+ if (test_sys) {
+ LOG(LS_INFO) << "SystemLoad 2 CPU Busy: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << sys_twobusy;
+ EXPECT_GE(sys_twobusy, 0.f);
+ EXPECT_LE(sys_twobusy, static_cast<float>(cpus));
+ }
+ }
+
+ // Test5: CpuSampler with idle process after being busy.
+ Thread::SleepMs(kSettleTime);
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ Thread::SleepMs(kIdleTime);
+
+ if (test_proc) {
+ proc_idle = sampler.GetProcessLoad();
+ }
+ if (test_sys) {
+ sys_idle = sampler.GetSystemLoad();
+ }
+ if (test_proc) {
+ LOG(LS_INFO) << "ProcessLoad Idle: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << proc_idle;
+ EXPECT_GE(proc_idle, 0.f);
+ EXPECT_LE(proc_idle, proc_busy);
+ }
+ if (test_sys) {
+ LOG(LS_INFO) << "SystemLoad Idle: "
+ << std::setiosflags(std::ios_base::fixed)
+ << std::setprecision(2) << std::setw(6) << sys_idle;
+ EXPECT_GE(sys_idle, 0.f);
+ EXPECT_LE(sys_idle, static_cast<float>(cpus));
+ }
+}
+
+TEST(CpuMonitorTest, TestCpus) {
+ CpuSampler sampler;
+ EXPECT_TRUE(sampler.Init());
+ int current_cpus = sampler.GetCurrentCpus();
+ int cpus = sampler.GetMaxCpus();
+ LOG(LS_INFO) << "Current Cpus: " << std::setw(9) << current_cpus;
+ LOG(LS_INFO) << "Maximum Cpus: " << std::setw(9) << cpus;
+ EXPECT_GT(cpus, 0);
+ EXPECT_LE(cpus, kMaxCpus);
+ EXPECT_GT(current_cpus, 0);
+ EXPECT_LE(current_cpus, cpus);
+}
+
+#if defined(WEBRTC_WIN)
+// Tests overall system CpuSampler using legacy OS fallback code if applicable.
+TEST(CpuMonitorTest, TestGetSystemLoadForceFallback) {
+ TestCpuSampler(false, true, true);
+}
+#endif
+
+// Tests both process and system functions in use at same time.
+TEST(CpuMonitorTest, TestGetBothLoad) {
+ TestCpuSampler(true, true, false);
+}
+
+// Tests a query less than the interval produces the same value.
+TEST(CpuMonitorTest, TestInterval) {
+ CpuSampler sampler;
+ EXPECT_TRUE(sampler.Init());
+
+ // Test1: Set interval to large value so sampler will not update.
+ sampler.set_load_interval(kLongInterval);
+
+ sampler.GetProcessLoad();
+ sampler.GetSystemLoad();
+
+ float proc_orig = sampler.GetProcessLoad();
+ float sys_orig = sampler.GetSystemLoad();
+
+ Thread::SleepMs(kIdleTime);
+
+ float proc_halftime = sampler.GetProcessLoad();
+ float sys_halftime = sampler.GetSystemLoad();
+
+ EXPECT_EQ(proc_orig, proc_halftime);
+ EXPECT_EQ(sys_orig, sys_halftime);
+}
+
+TEST(CpuMonitorTest, TestCpuMonitor) {
+ CpuMonitor monitor(Thread::Current());
+ CpuLoadListener listener;
+ monitor.SignalUpdate.connect(&listener, &CpuLoadListener::OnCpuLoad);
+ EXPECT_TRUE(monitor.Start(10));
+ // We have checked cpu load more than twice.
+ EXPECT_TRUE_WAIT(listener.count() > 2, 1000);
+ EXPECT_GT(listener.current_cpus(), 0);
+ EXPECT_GT(listener.cpus(), 0);
+ EXPECT_GE(listener.process_load(), .0f);
+ EXPECT_GE(listener.system_load(), .0f);
+
+ monitor.Stop();
+ // Wait 20 ms to ake sure all signals are delivered.
+ Thread::Current()->ProcessMessages(20);
+ int old_count = listener.count();
+ Thread::Current()->ProcessMessages(20);
+ // Verfy no more siganls.
+ EXPECT_EQ(old_count, listener.count());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/crc32.cc b/chromium/third_party/webrtc/base/crc32.cc
new file mode 100644
index 00000000000..d643a25a4be
--- /dev/null
+++ b/chromium/third_party/webrtc/base/crc32.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/crc32.h"
+
+#include "webrtc/base/basicdefs.h"
+
+namespace rtc {
+
+// This implementation is based on the sample implementation in RFC 1952.
+
+// CRC32 polynomial, in reversed form.
+// See RFC 1952, or http://en.wikipedia.org/wiki/Cyclic_redundancy_check
+static const uint32 kCrc32Polynomial = 0xEDB88320;
+static uint32 kCrc32Table[256] = { 0 };
+
+static void EnsureCrc32TableInited() {
+ if (kCrc32Table[ARRAY_SIZE(kCrc32Table) - 1])
+ return; // already inited
+ for (uint32 i = 0; i < ARRAY_SIZE(kCrc32Table); ++i) {
+ uint32 c = i;
+ for (size_t j = 0; j < 8; ++j) {
+ if (c & 1) {
+ c = kCrc32Polynomial ^ (c >> 1);
+ } else {
+ c >>= 1;
+ }
+ }
+ kCrc32Table[i] = c;
+ }
+}
+
+uint32 UpdateCrc32(uint32 start, const void* buf, size_t len) {
+ EnsureCrc32TableInited();
+
+ uint32 c = start ^ 0xFFFFFFFF;
+ const uint8* u = static_cast<const uint8*>(buf);
+ for (size_t i = 0; i < len; ++i) {
+ c = kCrc32Table[(c ^ u[i]) & 0xFF] ^ (c >> 8);
+ }
+ return c ^ 0xFFFFFFFF;
+}
+
+} // namespace rtc
+
diff --git a/chromium/third_party/webrtc/base/crc32.h b/chromium/third_party/webrtc/base/crc32.h
new file mode 100644
index 00000000000..99b4cac894e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/crc32.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_CRC32_H_
+#define WEBRTC_BASE_CRC32_H_
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// Updates a CRC32 checksum with |len| bytes from |buf|. |initial| holds the
+// checksum result from the previous update; for the first call, it should be 0.
+uint32 UpdateCrc32(uint32 initial, const void* buf, size_t len);
+
+// Computes a CRC32 checksum using |len| bytes from |buf|.
+inline uint32 ComputeCrc32(const void* buf, size_t len) {
+ return UpdateCrc32(0, buf, len);
+}
+inline uint32 ComputeCrc32(const std::string& str) {
+ return ComputeCrc32(str.c_str(), str.size());
+}
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_CRC32_H_
diff --git a/chromium/third_party/webrtc/base/crc32_unittest.cc b/chromium/third_party/webrtc/base/crc32_unittest.cc
new file mode 100644
index 00000000000..0bfdeeea0d2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/crc32_unittest.cc
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/crc32.h"
+#include "webrtc/base/gunit.h"
+
+#include <string>
+
+namespace rtc {
+
+TEST(Crc32Test, TestBasic) {
+ EXPECT_EQ(0U, ComputeCrc32(""));
+ EXPECT_EQ(0x352441C2U, ComputeCrc32("abc"));
+ EXPECT_EQ(0x171A3F5FU,
+ ComputeCrc32("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"));
+}
+
+TEST(Crc32Test, TestMultipleUpdates) {
+ std::string input =
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
+ uint32 c = 0;
+ for (size_t i = 0; i < input.size(); ++i) {
+ c = UpdateCrc32(c, &input[i], 1);
+ }
+ EXPECT_EQ(0x171A3F5FU, c);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/criticalsection.h b/chromium/third_party/webrtc/base/criticalsection.h
new file mode 100644
index 00000000000..a950a47f592
--- /dev/null
+++ b/chromium/third_party/webrtc/base/criticalsection.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_CRITICALSECTION_H__
+#define WEBRTC_BASE_CRITICALSECTION_H__
+
+#include "webrtc/base/constructormagic.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+#if defined(WEBRTC_POSIX)
+#include <pthread.h>
+#endif
+
+#ifdef _DEBUG
+#define CS_TRACK_OWNER 1
+#endif // _DEBUG
+
+#if CS_TRACK_OWNER
+#define TRACK_OWNER(x) x
+#else // !CS_TRACK_OWNER
+#define TRACK_OWNER(x)
+#endif // !CS_TRACK_OWNER
+
+namespace rtc {
+
+#if defined(WEBRTC_WIN)
+class CriticalSection {
+ public:
+ CriticalSection() {
+ InitializeCriticalSection(&crit_);
+ // Windows docs say 0 is not a valid thread id
+ TRACK_OWNER(thread_ = 0);
+ }
+ ~CriticalSection() {
+ DeleteCriticalSection(&crit_);
+ }
+ void Enter() {
+ EnterCriticalSection(&crit_);
+ TRACK_OWNER(thread_ = GetCurrentThreadId());
+ }
+ bool TryEnter() {
+ if (TryEnterCriticalSection(&crit_) != FALSE) {
+ TRACK_OWNER(thread_ = GetCurrentThreadId());
+ return true;
+ }
+ return false;
+ }
+ void Leave() {
+ TRACK_OWNER(thread_ = 0);
+ LeaveCriticalSection(&crit_);
+ }
+
+#if CS_TRACK_OWNER
+ bool CurrentThreadIsOwner() const { return thread_ == GetCurrentThreadId(); }
+#endif // CS_TRACK_OWNER
+
+ private:
+ CRITICAL_SECTION crit_;
+ TRACK_OWNER(DWORD thread_); // The section's owning thread id
+};
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_POSIX)
+class CriticalSection {
+ public:
+ CriticalSection() {
+ pthread_mutexattr_t mutex_attribute;
+ pthread_mutexattr_init(&mutex_attribute);
+ pthread_mutexattr_settype(&mutex_attribute, PTHREAD_MUTEX_RECURSIVE);
+ pthread_mutex_init(&mutex_, &mutex_attribute);
+ pthread_mutexattr_destroy(&mutex_attribute);
+ TRACK_OWNER(thread_ = 0);
+ }
+ ~CriticalSection() {
+ pthread_mutex_destroy(&mutex_);
+ }
+ void Enter() {
+ pthread_mutex_lock(&mutex_);
+ TRACK_OWNER(thread_ = pthread_self());
+ }
+ bool TryEnter() {
+ if (pthread_mutex_trylock(&mutex_) == 0) {
+ TRACK_OWNER(thread_ = pthread_self());
+ return true;
+ }
+ return false;
+ }
+ void Leave() {
+ TRACK_OWNER(thread_ = 0);
+ pthread_mutex_unlock(&mutex_);
+ }
+
+#if CS_TRACK_OWNER
+ bool CurrentThreadIsOwner() const { return pthread_equal(thread_, pthread_self()); }
+#endif // CS_TRACK_OWNER
+
+ private:
+ pthread_mutex_t mutex_;
+ TRACK_OWNER(pthread_t thread_);
+};
+#endif // WEBRTC_POSIX
+
+// CritScope, for serializing execution through a scope.
+class CritScope {
+ public:
+ explicit CritScope(CriticalSection *pcrit) {
+ pcrit_ = pcrit;
+ pcrit_->Enter();
+ }
+ ~CritScope() {
+ pcrit_->Leave();
+ }
+ private:
+ CriticalSection *pcrit_;
+ DISALLOW_COPY_AND_ASSIGN(CritScope);
+};
+
+// Tries to lock a critical section on construction via
+// CriticalSection::TryEnter, and unlocks on destruction if the
+// lock was taken. Never blocks.
+//
+// IMPORTANT: Unlike CritScope, the lock may not be owned by this thread in
+// subsequent code. Users *must* check locked() to determine if the
+// lock was taken. If you're not calling locked(), you're doing it wrong!
+class TryCritScope {
+ public:
+ explicit TryCritScope(CriticalSection *pcrit) {
+ pcrit_ = pcrit;
+ locked_ = pcrit_->TryEnter();
+ }
+ ~TryCritScope() {
+ if (locked_) {
+ pcrit_->Leave();
+ }
+ }
+ bool locked() const {
+ return locked_;
+ }
+ private:
+ CriticalSection *pcrit_;
+ bool locked_;
+ DISALLOW_COPY_AND_ASSIGN(TryCritScope);
+};
+
+// TODO: Move this to atomicops.h, which can't be done easily because of
+// complex compile rules.
+class AtomicOps {
+ public:
+#if defined(WEBRTC_WIN)
+ // Assumes sizeof(int) == sizeof(LONG), which it is on Win32 and Win64.
+ static int Increment(int* i) {
+ return ::InterlockedIncrement(reinterpret_cast<LONG*>(i));
+ }
+ static int Decrement(int* i) {
+ return ::InterlockedDecrement(reinterpret_cast<LONG*>(i));
+ }
+#else
+ static int Increment(int* i) {
+ return __sync_add_and_fetch(i, 1);
+ }
+ static int Decrement(int* i) {
+ return __sync_sub_and_fetch(i, 1);
+ }
+#endif
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_CRITICALSECTION_H__
diff --git a/chromium/third_party/webrtc/base/criticalsection_unittest.cc b/chromium/third_party/webrtc/base/criticalsection_unittest.cc
new file mode 100644
index 00000000000..e1b05cb0170
--- /dev/null
+++ b/chromium/third_party/webrtc/base/criticalsection_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <set>
+#include <vector>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scopedptrcollection.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+namespace {
+
+const int kLongTime = 10000; // 10 seconds
+const int kNumThreads = 16;
+const int kOperationsToRun = 1000;
+
+template <class T>
+class AtomicOpRunner : public MessageHandler {
+ public:
+ explicit AtomicOpRunner(int initial_value)
+ : value_(initial_value),
+ threads_active_(0),
+ start_event_(true, false),
+ done_event_(true, false) {}
+
+ int value() const { return value_; }
+
+ bool Run() {
+ // Signal all threads to start.
+ start_event_.Set();
+
+ // Wait for all threads to finish.
+ return done_event_.Wait(kLongTime);
+ }
+
+ void SetExpectedThreadCount(int count) {
+ threads_active_ = count;
+ }
+
+ virtual void OnMessage(Message* msg) {
+ std::vector<int> values;
+ values.reserve(kOperationsToRun);
+
+ // Wait to start.
+ ASSERT_TRUE(start_event_.Wait(kLongTime));
+
+ // Generate a bunch of values by updating value_ atomically.
+ for (int i = 0; i < kOperationsToRun; ++i) {
+ values.push_back(T::AtomicOp(&value_));
+ }
+
+ { // Add them all to the set.
+ CritScope cs(&all_values_crit_);
+ for (size_t i = 0; i < values.size(); ++i) {
+ std::pair<std::set<int>::iterator, bool> result =
+ all_values_.insert(values[i]);
+ // Each value should only be taken by one thread, so if this value
+ // has already been added, something went wrong.
+ EXPECT_TRUE(result.second)
+ << "Thread=" << Thread::Current() << " value=" << values[i];
+ }
+ }
+
+ // Signal that we're done.
+ if (AtomicOps::Decrement(&threads_active_) == 0) {
+ done_event_.Set();
+ }
+ }
+
+ private:
+ int value_;
+ int threads_active_;
+ CriticalSection all_values_crit_;
+ std::set<int> all_values_;
+ Event start_event_;
+ Event done_event_;
+};
+
+struct IncrementOp {
+ static int AtomicOp(int* i) { return AtomicOps::Increment(i); }
+};
+
+struct DecrementOp {
+ static int AtomicOp(int* i) { return AtomicOps::Decrement(i); }
+};
+
+void StartThreads(ScopedPtrCollection<Thread>* threads,
+ MessageHandler* handler) {
+ for (int i = 0; i < kNumThreads; ++i) {
+ Thread* thread = new Thread();
+ thread->Start();
+ thread->Post(handler);
+ threads->PushBack(thread);
+ }
+}
+
+} // namespace
+
+TEST(AtomicOpsTest, Simple) {
+ int value = 0;
+ EXPECT_EQ(1, AtomicOps::Increment(&value));
+ EXPECT_EQ(1, value);
+ EXPECT_EQ(2, AtomicOps::Increment(&value));
+ EXPECT_EQ(2, value);
+ EXPECT_EQ(1, AtomicOps::Decrement(&value));
+ EXPECT_EQ(1, value);
+ EXPECT_EQ(0, AtomicOps::Decrement(&value));
+ EXPECT_EQ(0, value);
+}
+
+TEST(AtomicOpsTest, Increment) {
+ // Create and start lots of threads.
+ AtomicOpRunner<IncrementOp> runner(0);
+ ScopedPtrCollection<Thread> threads;
+ StartThreads(&threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+
+ // Release the hounds!
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(kOperationsToRun * kNumThreads, runner.value());
+}
+
+TEST(AtomicOpsTest, Decrement) {
+ // Create and start lots of threads.
+ AtomicOpRunner<DecrementOp> runner(kOperationsToRun * kNumThreads);
+ ScopedPtrCollection<Thread> threads;
+ StartThreads(&threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+
+ // Release the hounds!
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.value());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/cryptstring.h b/chromium/third_party/webrtc/base/cryptstring.h
new file mode 100644
index 00000000000..f43057d8f0f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/cryptstring.h
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef _WEBRTC_BASE_CRYPTSTRING_H_
+#define _WEBRTC_BASE_CRYPTSTRING_H_
+
+#include <string.h>
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/linked_ptr.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+class CryptStringImpl {
+public:
+ virtual ~CryptStringImpl() {}
+ virtual size_t GetLength() const = 0;
+ virtual void CopyTo(char * dest, bool nullterminate) const = 0;
+ virtual std::string UrlEncode() const = 0;
+ virtual CryptStringImpl * Copy() const = 0;
+ virtual void CopyRawTo(std::vector<unsigned char> * dest) const = 0;
+};
+
+class EmptyCryptStringImpl : public CryptStringImpl {
+public:
+ virtual ~EmptyCryptStringImpl() {}
+ virtual size_t GetLength() const { return 0; }
+ virtual void CopyTo(char * dest, bool nullterminate) const {
+ if (nullterminate) {
+ *dest = '\0';
+ }
+ }
+ virtual std::string UrlEncode() const { return ""; }
+ virtual CryptStringImpl * Copy() const { return new EmptyCryptStringImpl(); }
+ virtual void CopyRawTo(std::vector<unsigned char> * dest) const {
+ dest->clear();
+ }
+};
+
+class CryptString {
+public:
+ CryptString() : impl_(new EmptyCryptStringImpl()) {}
+ size_t GetLength() const { return impl_->GetLength(); }
+ void CopyTo(char * dest, bool nullterminate) const { impl_->CopyTo(dest, nullterminate); }
+ CryptString(const CryptString & other) : impl_(other.impl_->Copy()) {}
+ explicit CryptString(const CryptStringImpl & impl) : impl_(impl.Copy()) {}
+ CryptString & operator=(const CryptString & other) {
+ if (this != &other) {
+ impl_.reset(other.impl_->Copy());
+ }
+ return *this;
+ }
+ void Clear() { impl_.reset(new EmptyCryptStringImpl()); }
+ std::string UrlEncode() const { return impl_->UrlEncode(); }
+ void CopyRawTo(std::vector<unsigned char> * dest) const {
+ return impl_->CopyRawTo(dest);
+ }
+
+private:
+ scoped_ptr<const CryptStringImpl> impl_;
+};
+
+
+// Used for constructing strings where a password is involved and we
+// need to ensure that we zero memory afterwards
+class FormatCryptString {
+public:
+ FormatCryptString() {
+ storage_ = new char[32];
+ capacity_ = 32;
+ length_ = 0;
+ storage_[0] = 0;
+ }
+
+ void Append(const std::string & text) {
+ Append(text.data(), text.length());
+ }
+
+ void Append(const char * data, size_t length) {
+ EnsureStorage(length_ + length + 1);
+ memcpy(storage_ + length_, data, length);
+ length_ += length;
+ storage_[length_] = '\0';
+ }
+
+ void Append(const CryptString * password) {
+ size_t len = password->GetLength();
+ EnsureStorage(length_ + len + 1);
+ password->CopyTo(storage_ + length_, true);
+ length_ += len;
+ }
+
+ size_t GetLength() {
+ return length_;
+ }
+
+ const char * GetData() {
+ return storage_;
+ }
+
+
+ // Ensures storage of at least n bytes
+ void EnsureStorage(size_t n) {
+ if (capacity_ >= n) {
+ return;
+ }
+
+ size_t old_capacity = capacity_;
+ char * old_storage = storage_;
+
+ for (;;) {
+ capacity_ *= 2;
+ if (capacity_ >= n)
+ break;
+ }
+
+ storage_ = new char[capacity_];
+
+ if (old_capacity) {
+ memcpy(storage_, old_storage, length_);
+
+ // zero memory in a way that an optimizer won't optimize it out
+ old_storage[0] = 0;
+ for (size_t i = 1; i < old_capacity; i++) {
+ old_storage[i] = old_storage[i - 1];
+ }
+ delete[] old_storage;
+ }
+ }
+
+ ~FormatCryptString() {
+ if (capacity_) {
+ storage_[0] = 0;
+ for (size_t i = 1; i < capacity_; i++) {
+ storage_[i] = storage_[i - 1];
+ }
+ }
+ delete[] storage_;
+ }
+private:
+ char * storage_;
+ size_t capacity_;
+ size_t length_;
+};
+
+class InsecureCryptStringImpl : public CryptStringImpl {
+ public:
+ std::string& password() { return password_; }
+ const std::string& password() const { return password_; }
+
+ virtual ~InsecureCryptStringImpl() {}
+ virtual size_t GetLength() const { return password_.size(); }
+ virtual void CopyTo(char * dest, bool nullterminate) const {
+ memcpy(dest, password_.data(), password_.size());
+ if (nullterminate) dest[password_.size()] = 0;
+ }
+ virtual std::string UrlEncode() const { return password_; }
+ virtual CryptStringImpl * Copy() const {
+ InsecureCryptStringImpl * copy = new InsecureCryptStringImpl;
+ copy->password() = password_;
+ return copy;
+ }
+ virtual void CopyRawTo(std::vector<unsigned char> * dest) const {
+ dest->resize(password_.size());
+ memcpy(&dest->front(), password_.data(), password_.size());
+ }
+ private:
+ std::string password_;
+};
+
+}
+
+#endif // _WEBRTC_BASE_CRYPTSTRING_H_
diff --git a/chromium/third_party/webrtc/base/dbus.cc b/chromium/third_party/webrtc/base/dbus.cc
new file mode 100644
index 00000000000..b8392f9a2c1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/dbus.cc
@@ -0,0 +1,396 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_DBUS_GLIB
+
+#include "webrtc/base/dbus.h"
+
+#include <glib.h>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+// Avoid static object construction/destruction on startup/shutdown.
+static pthread_once_t g_dbus_init_once = PTHREAD_ONCE_INIT;
+static LibDBusGlibSymbolTable *g_dbus_symbol = NULL;
+
+// Releases DBus-Glib symbols.
+static void ReleaseDBusGlibSymbol() {
+ if (g_dbus_symbol != NULL) {
+ delete g_dbus_symbol;
+ g_dbus_symbol = NULL;
+ }
+}
+
+// Loads DBus-Glib symbols.
+static void InitializeDBusGlibSymbol() {
+ // This is thread safe.
+ if (NULL == g_dbus_symbol) {
+ g_dbus_symbol = new LibDBusGlibSymbolTable();
+
+ // Loads dbus-glib
+ if (NULL == g_dbus_symbol || !g_dbus_symbol->Load()) {
+ LOG(LS_WARNING) << "Failed to load dbus-glib symbol table.";
+ ReleaseDBusGlibSymbol();
+ } else {
+ // Nothing we can do if atexit() failed. Just ignore its returned value.
+ atexit(ReleaseDBusGlibSymbol);
+ }
+ }
+}
+
+inline static LibDBusGlibSymbolTable *GetSymbols() {
+ return DBusMonitor::GetDBusGlibSymbolTable();
+}
+
+// Implementation of class DBusSigMessageData
+DBusSigMessageData::DBusSigMessageData(DBusMessage *message)
+ : TypedMessageData<DBusMessage *>(message) {
+ GetSymbols()->dbus_message_ref()(data());
+}
+
+DBusSigMessageData::~DBusSigMessageData() {
+ GetSymbols()->dbus_message_unref()(data());
+}
+
+// Implementation of class DBusSigFilter
+
+// Builds a DBus filter string from given DBus path, interface and member.
+std::string DBusSigFilter::BuildFilterString(const std::string &path,
+ const std::string &interface,
+ const std::string &member) {
+ std::string ret(DBUS_TYPE "='" DBUS_SIGNAL "'");
+ if (!path.empty()) {
+ ret += ("," DBUS_PATH "='");
+ ret += path;
+ ret += "'";
+ }
+ if (!interface.empty()) {
+ ret += ("," DBUS_INTERFACE "='");
+ ret += interface;
+ ret += "'";
+ }
+ if (!member.empty()) {
+ ret += ("," DBUS_MEMBER "='");
+ ret += member;
+ ret += "'";
+ }
+ return ret;
+}
+
+// Forwards the message to the given instance.
+DBusHandlerResult DBusSigFilter::DBusCallback(DBusConnection *dbus_conn,
+ DBusMessage *message,
+ void *instance) {
+ ASSERT(instance);
+ if (instance) {
+ return static_cast<DBusSigFilter *>(instance)->Callback(message);
+ }
+ return DBUS_HANDLER_RESULT_NOT_YET_HANDLED;
+}
+
+// Posts a message to caller thread.
+DBusHandlerResult DBusSigFilter::Callback(DBusMessage *message) {
+ if (caller_thread_) {
+ caller_thread_->Post(this, DSM_SIGNAL, new DBusSigMessageData(message));
+ }
+ // Don't "eat" the message here. Let it pop up.
+ return DBUS_HANDLER_RESULT_NOT_YET_HANDLED;
+}
+
+// From MessageHandler.
+void DBusSigFilter::OnMessage(Message *message) {
+ if (message != NULL && DSM_SIGNAL == message->message_id) {
+ DBusSigMessageData *msg =
+ static_cast<DBusSigMessageData *>(message->pdata);
+ if (msg) {
+ ProcessSignal(msg->data());
+ delete msg;
+ }
+ }
+}
+
+// Definition of private class DBusMonitoringThread.
+// It creates a worker-thread to listen signals on DBus. The worker-thread will
+// be running in a priate GMainLoop forever until either Stop() has been invoked
+// or it hits an error.
+class DBusMonitor::DBusMonitoringThread : public rtc::Thread {
+ public:
+ explicit DBusMonitoringThread(DBusMonitor *monitor,
+ GMainContext *context,
+ GMainLoop *mainloop,
+ std::vector<DBusSigFilter *> *filter_list)
+ : monitor_(monitor),
+ context_(context),
+ mainloop_(mainloop),
+ connection_(NULL),
+ idle_source_(NULL),
+ filter_list_(filter_list) {
+ ASSERT(monitor_);
+ ASSERT(context_);
+ ASSERT(mainloop_);
+ ASSERT(filter_list_);
+ }
+
+ virtual ~DBusMonitoringThread() {
+ Stop();
+ }
+
+ // Override virtual method of Thread. Context: worker-thread.
+ virtual void Run() {
+ ASSERT(NULL == connection_);
+
+ // Setup DBus connection and start monitoring.
+ monitor_->OnMonitoringStatusChanged(DMS_INITIALIZING);
+ if (!Setup()) {
+ LOG(LS_ERROR) << "DBus monitoring setup failed.";
+ monitor_->OnMonitoringStatusChanged(DMS_FAILED);
+ CleanUp();
+ return;
+ }
+ monitor_->OnMonitoringStatusChanged(DMS_RUNNING);
+ g_main_loop_run(mainloop_);
+ monitor_->OnMonitoringStatusChanged(DMS_STOPPED);
+
+ // Done normally. Clean up DBus connection.
+ CleanUp();
+ return;
+ }
+
+ // Override virtual method of Thread. Context: caller-thread.
+ virtual void Stop() {
+ ASSERT(NULL == idle_source_);
+ // Add an idle source and let the gmainloop quit on idle.
+ idle_source_ = g_idle_source_new();
+ if (idle_source_) {
+ g_source_set_callback(idle_source_, &Idle, this, NULL);
+ g_source_attach(idle_source_, context_);
+ } else {
+ LOG(LS_ERROR) << "g_idle_source_new() failed.";
+ QuitGMainloop(); // Try to quit anyway.
+ }
+
+ Thread::Stop(); // Wait for the thread.
+ }
+
+ private:
+ // Registers all DBus filters.
+ void RegisterAllFilters() {
+ ASSERT(NULL != GetSymbols()->dbus_g_connection_get_connection()(
+ connection_));
+
+ for (std::vector<DBusSigFilter *>::iterator it = filter_list_->begin();
+ it != filter_list_->end(); ++it) {
+ DBusSigFilter *filter = (*it);
+ if (!filter) {
+ LOG(LS_ERROR) << "DBusSigFilter list corrupted.";
+ continue;
+ }
+
+ GetSymbols()->dbus_bus_add_match()(
+ GetSymbols()->dbus_g_connection_get_connection()(connection_),
+ filter->filter().c_str(), NULL);
+
+ if (!GetSymbols()->dbus_connection_add_filter()(
+ GetSymbols()->dbus_g_connection_get_connection()(connection_),
+ &DBusSigFilter::DBusCallback, filter, NULL)) {
+ LOG(LS_ERROR) << "dbus_connection_add_filter() failed."
+ << "Filter: " << filter->filter();
+ continue;
+ }
+ }
+ }
+
+ // Unregisters all DBus filters.
+ void UnRegisterAllFilters() {
+ ASSERT(NULL != GetSymbols()->dbus_g_connection_get_connection()(
+ connection_));
+
+ for (std::vector<DBusSigFilter *>::iterator it = filter_list_->begin();
+ it != filter_list_->end(); ++it) {
+ DBusSigFilter *filter = (*it);
+ if (!filter) {
+ LOG(LS_ERROR) << "DBusSigFilter list corrupted.";
+ continue;
+ }
+ GetSymbols()->dbus_connection_remove_filter()(
+ GetSymbols()->dbus_g_connection_get_connection()(connection_),
+ &DBusSigFilter::DBusCallback, filter);
+ }
+ }
+
+ // Sets up the monitoring thread.
+ bool Setup() {
+ g_main_context_push_thread_default(context_);
+
+ // Start connection to dbus.
+ // If dbus daemon is not running, returns false immediately.
+ connection_ = GetSymbols()->dbus_g_bus_get_private()(monitor_->type_,
+ context_, NULL);
+ if (NULL == connection_) {
+ LOG(LS_ERROR) << "dbus_g_bus_get_private() unable to get connection.";
+ return false;
+ }
+ if (NULL == GetSymbols()->dbus_g_connection_get_connection()(connection_)) {
+ LOG(LS_ERROR) << "dbus_g_connection_get_connection() returns NULL. "
+ << "DBus daemon is probably not running.";
+ return false;
+ }
+
+ // Application don't exit if DBus daemon die.
+ GetSymbols()->dbus_connection_set_exit_on_disconnect()(
+ GetSymbols()->dbus_g_connection_get_connection()(connection_), FALSE);
+
+ // Connect all filters.
+ RegisterAllFilters();
+
+ return true;
+ }
+
+ // Cleans up the monitoring thread.
+ void CleanUp() {
+ if (idle_source_) {
+ // We did an attach() with the GSource, so we need to destroy() it.
+ g_source_destroy(idle_source_);
+ // We need to unref() the GSource to end the last reference we got.
+ g_source_unref(idle_source_);
+ idle_source_ = NULL;
+ }
+ if (connection_) {
+ if (GetSymbols()->dbus_g_connection_get_connection()(connection_)) {
+ UnRegisterAllFilters();
+ GetSymbols()->dbus_connection_close()(
+ GetSymbols()->dbus_g_connection_get_connection()(connection_));
+ }
+ GetSymbols()->dbus_g_connection_unref()(connection_);
+ connection_ = NULL;
+ }
+ g_main_loop_unref(mainloop_);
+ mainloop_ = NULL;
+ g_main_context_unref(context_);
+ context_ = NULL;
+ }
+
+ // Handles callback on Idle. We only add this source when ready to stop.
+ static gboolean Idle(gpointer data) {
+ static_cast<DBusMonitoringThread *>(data)->QuitGMainloop();
+ return TRUE;
+ }
+
+ // We only hit this when ready to quit.
+ void QuitGMainloop() {
+ g_main_loop_quit(mainloop_);
+ }
+
+ DBusMonitor *monitor_;
+
+ GMainContext *context_;
+ GMainLoop *mainloop_;
+ DBusGConnection *connection_;
+ GSource *idle_source_;
+
+ std::vector<DBusSigFilter *> *filter_list_;
+};
+
+// Implementation of class DBusMonitor
+
+// Returns DBus-Glib symbol handle. Initialize it first if hasn't.
+LibDBusGlibSymbolTable *DBusMonitor::GetDBusGlibSymbolTable() {
+ // This is multi-thread safe.
+ pthread_once(&g_dbus_init_once, InitializeDBusGlibSymbol);
+
+ return g_dbus_symbol;
+};
+
+// Creates an instance of DBusMonitor
+DBusMonitor *DBusMonitor::Create(DBusBusType type) {
+ if (NULL == DBusMonitor::GetDBusGlibSymbolTable()) {
+ return NULL;
+ }
+ return new DBusMonitor(type);
+}
+
+DBusMonitor::DBusMonitor(DBusBusType type)
+ : type_(type),
+ status_(DMS_NOT_INITIALIZED),
+ monitoring_thread_(NULL) {
+ ASSERT(type_ == DBUS_BUS_SYSTEM || type_ == DBUS_BUS_SESSION);
+}
+
+DBusMonitor::~DBusMonitor() {
+ StopMonitoring();
+}
+
+bool DBusMonitor::AddFilter(DBusSigFilter *filter) {
+ if (monitoring_thread_) {
+ return false;
+ }
+ if (!filter) {
+ return false;
+ }
+ filter_list_.push_back(filter);
+ return true;
+}
+
+bool DBusMonitor::StartMonitoring() {
+ if (!monitoring_thread_) {
+ g_type_init();
+ g_thread_init(NULL);
+ GetSymbols()->dbus_g_thread_init()();
+
+ GMainContext *context = g_main_context_new();
+ if (NULL == context) {
+ LOG(LS_ERROR) << "g_main_context_new() failed.";
+ return false;
+ }
+
+ GMainLoop *mainloop = g_main_loop_new(context, FALSE);
+ if (NULL == mainloop) {
+ LOG(LS_ERROR) << "g_main_loop_new() failed.";
+ g_main_context_unref(context);
+ return false;
+ }
+
+ monitoring_thread_ = new DBusMonitoringThread(this, context, mainloop,
+ &filter_list_);
+ if (monitoring_thread_ == NULL) {
+ LOG(LS_ERROR) << "Failed to create DBus monitoring thread.";
+ g_main_context_unref(context);
+ g_main_loop_unref(mainloop);
+ return false;
+ }
+ monitoring_thread_->Start();
+ }
+ return true;
+}
+
+bool DBusMonitor::StopMonitoring() {
+ if (monitoring_thread_) {
+ monitoring_thread_->Stop();
+ monitoring_thread_ = NULL;
+ }
+ return true;
+}
+
+DBusMonitor::DBusMonitorStatus DBusMonitor::GetStatus() {
+ return status_;
+}
+
+void DBusMonitor::OnMonitoringStatusChanged(DBusMonitorStatus status) {
+ status_ = status;
+}
+
+#undef LATE
+
+} // namespace rtc
+
+#endif // HAVE_DBUS_GLIB
diff --git a/chromium/third_party/webrtc/base/dbus.h b/chromium/third_party/webrtc/base/dbus.h
new file mode 100644
index 00000000000..fb90638bc36
--- /dev/null
+++ b/chromium/third_party/webrtc/base/dbus.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_DBUS_H_
+#define WEBRTC_BASE_DBUS_H_
+
+#ifdef HAVE_DBUS_GLIB
+
+#include <dbus/dbus.h>
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/libdbusglibsymboltable.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+#define DBUS_TYPE "type"
+#define DBUS_SIGNAL "signal"
+#define DBUS_PATH "path"
+#define DBUS_INTERFACE "interface"
+#define DBUS_MEMBER "member"
+
+#ifdef CHROMEOS
+#define CROS_PM_PATH "/"
+#define CROS_PM_INTERFACE "org.chromium.PowerManager"
+#define CROS_SIG_POWERCHANGED "PowerStateChanged"
+#define CROS_VALUE_SLEEP "mem"
+#define CROS_VALUE_RESUME "on"
+#else
+#define UP_PATH "/org/freedesktop/UPower"
+#define UP_INTERFACE "org.freedesktop.UPower"
+#define UP_SIG_SLEEPING "Sleeping"
+#define UP_SIG_RESUMING "Resuming"
+#endif // CHROMEOS
+
+// Wraps a DBus messages.
+class DBusSigMessageData : public TypedMessageData<DBusMessage *> {
+ public:
+ explicit DBusSigMessageData(DBusMessage *message);
+ ~DBusSigMessageData();
+};
+
+// DBusSigFilter is an abstract class that defines the interface of DBus
+// signal handling.
+// The subclasses implement ProcessSignal() for various purposes.
+// When a DBus signal comes, a DSM_SIGNAL message is posted to the caller thread
+// which will then invokes ProcessSignal().
+class DBusSigFilter : protected MessageHandler {
+ public:
+ enum DBusSigMessage { DSM_SIGNAL };
+
+ // This filter string should ususally come from BuildFilterString()
+ explicit DBusSigFilter(const std::string &filter)
+ : caller_thread_(Thread::Current()), filter_(filter) {
+ }
+
+ // Builds a DBus monitor filter string from given DBus path, interface, and
+ // member.
+ // See http://dbus.freedesktop.org/doc/api/html/group__DBusConnection.html
+ static std::string BuildFilterString(const std::string &path,
+ const std::string &interface,
+ const std::string &member);
+
+ // Handles callback on DBus messages by DBus system.
+ static DBusHandlerResult DBusCallback(DBusConnection *dbus_conn,
+ DBusMessage *message,
+ void *instance);
+
+ // Handles callback on DBus messages to each DBusSigFilter instance.
+ DBusHandlerResult Callback(DBusMessage *message);
+
+ // From MessageHandler.
+ virtual void OnMessage(Message *message);
+
+ // Returns the DBus monitor filter string.
+ const std::string &filter() const { return filter_; }
+
+ private:
+ // On caller thread.
+ virtual void ProcessSignal(DBusMessage *message) = 0;
+
+ Thread *caller_thread_;
+ const std::string filter_;
+};
+
+// DBusMonitor is a class for DBus signal monitoring.
+//
+// The caller-thread calls AddFilter() first to add the signals that it wants to
+// monitor and then calls StartMonitoring() to start the monitoring.
+// This will create a worker-thread which listens on DBus connection and sends
+// DBus signals back through the callback.
+// The worker-thread will be running forever until either StopMonitoring() is
+// called from the caller-thread or the worker-thread hit some error.
+//
+// Programming model:
+// 1. Caller-thread: Creates an object of DBusMonitor.
+// 2. Caller-thread: Calls DBusMonitor::AddFilter() one or several times.
+// 3. Caller-thread: StartMonitoring().
+// ...
+// 4. Worker-thread: DBus signal recieved. Post a message to caller-thread.
+// 5. Caller-thread: DBusFilterBase::ProcessSignal() is invoked.
+// ...
+// 6. Caller-thread: StopMonitoring().
+//
+// Assumption:
+// AddFilter(), StartMonitoring(), and StopMonitoring() methods are called by
+// a single thread. Hence, there is no need to make them thread safe.
+class DBusMonitor {
+ public:
+ // Status of DBus monitoring.
+ enum DBusMonitorStatus {
+ DMS_NOT_INITIALIZED, // Not initialized.
+ DMS_INITIALIZING, // Initializing the monitoring thread.
+ DMS_RUNNING, // Monitoring.
+ DMS_STOPPED, // Not monitoring. Stopped normally.
+ DMS_FAILED, // Not monitoring. Failed.
+ };
+
+ // Returns the DBus-Glib symbol table.
+ // We should only use this function to access DBus-Glib symbols.
+ static LibDBusGlibSymbolTable *GetDBusGlibSymbolTable();
+
+ // Creates an instance of DBusMonitor.
+ static DBusMonitor *Create(DBusBusType type);
+ ~DBusMonitor();
+
+ // Adds a filter to DBusMonitor.
+ bool AddFilter(DBusSigFilter *filter);
+
+ // Starts DBus message monitoring.
+ bool StartMonitoring();
+
+ // Stops DBus message monitoring.
+ bool StopMonitoring();
+
+ // Gets the status of DBus monitoring.
+ DBusMonitorStatus GetStatus();
+
+ private:
+ // Forward declaration. Defined in the .cc file.
+ class DBusMonitoringThread;
+
+ explicit DBusMonitor(DBusBusType type);
+
+ // Updates status_ when monitoring status has changed.
+ void OnMonitoringStatusChanged(DBusMonitorStatus status);
+
+ DBusBusType type_;
+ DBusMonitorStatus status_;
+ DBusMonitoringThread *monitoring_thread_;
+ std::vector<DBusSigFilter *> filter_list_;
+};
+
+} // namespace rtc
+
+#endif // HAVE_DBUS_GLIB
+
+#endif // WEBRTC_BASE_DBUS_H_
diff --git a/chromium/third_party/webrtc/base/dbus_unittest.cc b/chromium/third_party/webrtc/base/dbus_unittest.cc
new file mode 100644
index 00000000000..505ddbbc8d6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/dbus_unittest.cc
@@ -0,0 +1,232 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_DBUS_GLIB
+
+#include "webrtc/base/dbus.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+#define SIG_NAME "NameAcquired"
+
+static const uint32 kTimeoutMs = 5000U;
+
+class DBusSigFilterTest : public DBusSigFilter {
+ public:
+ // DBusSigFilterTest listens on DBus service itself for "NameAcquired" signal.
+ // This signal should be received when the application connects to DBus
+ // service and gains ownership of a name.
+ // http://dbus.freedesktop.org/doc/dbus-specification.html
+ DBusSigFilterTest()
+ : DBusSigFilter(GetFilter()),
+ message_received_(false) {
+ }
+
+ bool MessageReceived() {
+ return message_received_;
+ }
+
+ private:
+ static std::string GetFilter() {
+ return rtc::DBusSigFilter::BuildFilterString("", "", SIG_NAME);
+ }
+
+ // Implement virtual method of DBusSigFilter. On caller thread.
+ virtual void ProcessSignal(DBusMessage *message) {
+ EXPECT_TRUE(message != NULL);
+ message_received_ = true;
+ }
+
+ bool message_received_;
+};
+
+TEST(DBusMonitorTest, StartStopStartStop) {
+ DBusSigFilterTest filter;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter));
+
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_NOT_INITIALIZED);
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_RUNNING);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+// DBusMonitorTest listens on DBus service itself for "NameAcquired" signal.
+// This signal should be received when the application connects to DBus
+// service and gains ownership of a name.
+// This test is to make sure that we capture the "NameAcquired" signal.
+TEST(DBusMonitorTest, ReceivedNameAcquiredSignal) {
+ DBusSigFilterTest filter;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter));
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE_WAIT(filter.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+TEST(DBusMonitorTest, ConcurrentMonitors) {
+ DBusSigFilterTest filter1;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor1;
+ monitor1.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor1) {
+ EXPECT_TRUE(monitor1->AddFilter(&filter1));
+ DBusSigFilterTest filter2;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor2;
+ monitor2.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ EXPECT_TRUE(monitor2->AddFilter(&filter2));
+
+ EXPECT_TRUE(monitor1->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor1->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE(monitor2->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor2->GetStatus(), kTimeoutMs);
+
+ EXPECT_TRUE_WAIT(filter2.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE(monitor2->StopMonitoring());
+ EXPECT_EQ(monitor2->GetStatus(), DBusMonitor::DMS_STOPPED);
+
+ EXPECT_TRUE_WAIT(filter1.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE(monitor1->StopMonitoring());
+ EXPECT_EQ(monitor1->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+TEST(DBusMonitorTest, ConcurrentFilters) {
+ DBusSigFilterTest filter1;
+ DBusSigFilterTest filter2;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter1));
+ EXPECT_TRUE(monitor->AddFilter(&filter2));
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+
+ EXPECT_TRUE_WAIT(filter1.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE_WAIT(filter2.MessageReceived(), kTimeoutMs);
+
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+TEST(DBusMonitorTest, NoAddFilterIfRunning) {
+ DBusSigFilterTest filter1;
+ DBusSigFilterTest filter2;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter1));
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_FALSE(monitor->AddFilter(&filter2));
+
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+TEST(DBusMonitorTest, AddFilterAfterStop) {
+ DBusSigFilterTest filter1;
+ DBusSigFilterTest filter2;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter1));
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE_WAIT(filter1.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+
+ EXPECT_TRUE(monitor->AddFilter(&filter2));
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_EQ_WAIT(DBusMonitor::DMS_RUNNING, monitor->GetStatus(), kTimeoutMs);
+ EXPECT_TRUE_WAIT(filter1.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE_WAIT(filter2.MessageReceived(), kTimeoutMs);
+ EXPECT_TRUE(monitor->StopMonitoring());
+ EXPECT_EQ(monitor->GetStatus(), DBusMonitor::DMS_STOPPED);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started. Skipping test.";
+ }
+}
+
+TEST(DBusMonitorTest, StopRightAfterStart) {
+ DBusSigFilterTest filter;
+ rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
+ if (monitor) {
+ EXPECT_TRUE(monitor->AddFilter(&filter));
+
+ EXPECT_TRUE(monitor->StartMonitoring());
+ EXPECT_TRUE(monitor->StopMonitoring());
+
+ // Stop the monitoring thread right after it had been started.
+ // If the monitoring thread got a chance to receive a DBus signal, it would
+ // post a message to the main thread and signal the main thread wakeup.
+ // This message will be cleaned out automatically when the filter get
+ // destructed. Here we also consume the wakeup signal (if there is one) so
+ // that the testing (main) thread is reset to a clean state.
+ rtc::Thread::Current()->ProcessMessages(1);
+ } else {
+ LOG(LS_WARNING) << "DBus Monitor not started.";
+ }
+}
+
+TEST(DBusSigFilter, BuildFilterString) {
+ EXPECT_EQ(DBusSigFilter::BuildFilterString("", "", ""),
+ (DBUS_TYPE "='" DBUS_SIGNAL "'"));
+ EXPECT_EQ(DBusSigFilter::BuildFilterString("p", "", ""),
+ (DBUS_TYPE "='" DBUS_SIGNAL "'," DBUS_PATH "='p'"));
+ EXPECT_EQ(DBusSigFilter::BuildFilterString("p","i", ""),
+ (DBUS_TYPE "='" DBUS_SIGNAL "'," DBUS_PATH "='p',"
+ DBUS_INTERFACE "='i'"));
+ EXPECT_EQ(DBusSigFilter::BuildFilterString("p","i","m"),
+ (DBUS_TYPE "='" DBUS_SIGNAL "'," DBUS_PATH "='p',"
+ DBUS_INTERFACE "='i'," DBUS_MEMBER "='m'"));
+}
+
+} // namespace rtc
+
+#endif // HAVE_DBUS_GLIB
diff --git a/chromium/third_party/webrtc/base/diskcache.cc b/chromium/third_party/webrtc/base/diskcache.cc
new file mode 100644
index 00000000000..f893ce73d3b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/diskcache.cc
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <time.h>
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/diskcache.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+#ifdef _DEBUG
+#define TRANSPARENT_CACHE_NAMES 1
+#else // !_DEBUG
+#define TRANSPARENT_CACHE_NAMES 0
+#endif // !_DEBUG
+
+namespace rtc {
+
+class DiskCache;
+
+///////////////////////////////////////////////////////////////////////////////
+// DiskCacheAdapter
+///////////////////////////////////////////////////////////////////////////////
+
+class DiskCacheAdapter : public StreamAdapterInterface {
+public:
+ DiskCacheAdapter(const DiskCache* cache, const std::string& id, size_t index,
+ StreamInterface* stream)
+ : StreamAdapterInterface(stream), cache_(cache), id_(id), index_(index)
+ { }
+ virtual ~DiskCacheAdapter() {
+ Close();
+ cache_->ReleaseResource(id_, index_);
+ }
+
+private:
+ const DiskCache* cache_;
+ std::string id_;
+ size_t index_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// DiskCache
+///////////////////////////////////////////////////////////////////////////////
+
+DiskCache::DiskCache() : max_cache_(0), total_size_(0), total_accessors_(0) {
+}
+
+DiskCache::~DiskCache() {
+ ASSERT(0 == total_accessors_);
+}
+
+bool DiskCache::Initialize(const std::string& folder, size_t size) {
+ if (!folder_.empty() || !Filesystem::CreateFolder(folder))
+ return false;
+
+ folder_ = folder;
+ max_cache_ = size;
+ ASSERT(0 == total_size_);
+
+ if (!InitializeEntries())
+ return false;
+
+ return CheckLimit();
+}
+
+bool DiskCache::Purge() {
+ if (folder_.empty())
+ return false;
+
+ if (total_accessors_ > 0) {
+ LOG_F(LS_WARNING) << "Cache files open";
+ return false;
+ }
+
+ if (!PurgeFiles())
+ return false;
+
+ map_.clear();
+ return true;
+}
+
+bool DiskCache::LockResource(const std::string& id) {
+ Entry* entry = GetOrCreateEntry(id, true);
+ if (LS_LOCKED == entry->lock_state)
+ return false;
+ if ((LS_UNLOCKED == entry->lock_state) && (entry->accessors > 0))
+ return false;
+ if ((total_size_ > max_cache_) && !CheckLimit()) {
+ LOG_F(LS_WARNING) << "Cache overfull";
+ return false;
+ }
+ entry->lock_state = LS_LOCKED;
+ return true;
+}
+
+StreamInterface* DiskCache::WriteResource(const std::string& id, size_t index) {
+ Entry* entry = GetOrCreateEntry(id, false);
+ if (LS_LOCKED != entry->lock_state)
+ return NULL;
+
+ size_t previous_size = 0;
+ std::string filename(IdToFilename(id, index));
+ FileStream::GetSize(filename, &previous_size);
+ ASSERT(previous_size <= entry->size);
+ if (previous_size > entry->size) {
+ previous_size = entry->size;
+ }
+
+ scoped_ptr<FileStream> file(new FileStream);
+ if (!file->Open(filename, "wb", NULL)) {
+ LOG_F(LS_ERROR) << "Couldn't create cache file";
+ return NULL;
+ }
+
+ entry->streams = stdmax(entry->streams, index + 1);
+ entry->size -= previous_size;
+ total_size_ -= previous_size;
+
+ entry->accessors += 1;
+ total_accessors_ += 1;
+ return new DiskCacheAdapter(this, id, index, file.release());
+}
+
+bool DiskCache::UnlockResource(const std::string& id) {
+ Entry* entry = GetOrCreateEntry(id, false);
+ if (LS_LOCKED != entry->lock_state)
+ return false;
+
+ if (entry->accessors > 0) {
+ entry->lock_state = LS_UNLOCKING;
+ } else {
+ entry->lock_state = LS_UNLOCKED;
+ entry->last_modified = time(0);
+ CheckLimit();
+ }
+ return true;
+}
+
+StreamInterface* DiskCache::ReadResource(const std::string& id,
+ size_t index) const {
+ const Entry* entry = GetEntry(id);
+ if (LS_UNLOCKED != entry->lock_state)
+ return NULL;
+ if (index >= entry->streams)
+ return NULL;
+
+ scoped_ptr<FileStream> file(new FileStream);
+ if (!file->Open(IdToFilename(id, index), "rb", NULL))
+ return NULL;
+
+ entry->accessors += 1;
+ total_accessors_ += 1;
+ return new DiskCacheAdapter(this, id, index, file.release());
+}
+
+bool DiskCache::HasResource(const std::string& id) const {
+ const Entry* entry = GetEntry(id);
+ return (NULL != entry) && (entry->streams > 0);
+}
+
+bool DiskCache::HasResourceStream(const std::string& id, size_t index) const {
+ const Entry* entry = GetEntry(id);
+ if ((NULL == entry) || (index >= entry->streams))
+ return false;
+
+ std::string filename = IdToFilename(id, index);
+
+ return FileExists(filename);
+}
+
+bool DiskCache::DeleteResource(const std::string& id) {
+ Entry* entry = GetOrCreateEntry(id, false);
+ if (!entry)
+ return true;
+
+ if ((LS_UNLOCKED != entry->lock_state) || (entry->accessors > 0))
+ return false;
+
+ bool success = true;
+ for (size_t index = 0; index < entry->streams; ++index) {
+ std::string filename = IdToFilename(id, index);
+
+ if (!FileExists(filename))
+ continue;
+
+ if (!DeleteFile(filename)) {
+ LOG_F(LS_ERROR) << "Couldn't remove cache file: " << filename;
+ success = false;
+ }
+ }
+
+ total_size_ -= entry->size;
+ map_.erase(id);
+ return success;
+}
+
+bool DiskCache::CheckLimit() {
+#ifdef _DEBUG
+ // Temporary check to make sure everything is working correctly.
+ size_t cache_size = 0;
+ for (EntryMap::iterator it = map_.begin(); it != map_.end(); ++it) {
+ cache_size += it->second.size;
+ }
+ ASSERT(cache_size == total_size_);
+#endif // _DEBUG
+
+ // TODO: Replace this with a non-brain-dead algorithm for clearing out the
+ // oldest resources... something that isn't O(n^2)
+ while (total_size_ > max_cache_) {
+ EntryMap::iterator oldest = map_.end();
+ for (EntryMap::iterator it = map_.begin(); it != map_.end(); ++it) {
+ if ((LS_UNLOCKED != it->second.lock_state) || (it->second.accessors > 0))
+ continue;
+ oldest = it;
+ break;
+ }
+ if (oldest == map_.end()) {
+ LOG_F(LS_WARNING) << "All resources are locked!";
+ return false;
+ }
+ for (EntryMap::iterator it = oldest++; it != map_.end(); ++it) {
+ if (it->second.last_modified < oldest->second.last_modified) {
+ oldest = it;
+ }
+ }
+ if (!DeleteResource(oldest->first)) {
+ LOG_F(LS_ERROR) << "Couldn't delete from cache!";
+ return false;
+ }
+ }
+ return true;
+}
+
+std::string DiskCache::IdToFilename(const std::string& id, size_t index) const {
+#ifdef TRANSPARENT_CACHE_NAMES
+ // This escapes colons and other filesystem characters, so the user can't open
+ // special devices (like "COM1:"), or access other directories.
+ size_t buffer_size = id.length()*3 + 1;
+ char* buffer = new char[buffer_size];
+ encode(buffer, buffer_size, id.data(), id.length(),
+ unsafe_filename_characters(), '%');
+ // TODO: ASSERT(strlen(buffer) < FileSystem::MaxBasenameLength());
+#else // !TRANSPARENT_CACHE_NAMES
+ // We might want to just use a hash of the filename at some point, both for
+ // obfuscation, and to avoid both filename length and escaping issues.
+ ASSERT(false);
+#endif // !TRANSPARENT_CACHE_NAMES
+
+ char extension[32];
+ sprintfn(extension, ARRAY_SIZE(extension), ".%u", index);
+
+ Pathname pathname;
+ pathname.SetFolder(folder_);
+ pathname.SetBasename(buffer);
+ pathname.SetExtension(extension);
+
+#ifdef TRANSPARENT_CACHE_NAMES
+ delete [] buffer;
+#endif // TRANSPARENT_CACHE_NAMES
+
+ return pathname.pathname();
+}
+
+bool DiskCache::FilenameToId(const std::string& filename, std::string* id,
+ size_t* index) const {
+ Pathname pathname(filename);
+ unsigned tempdex;
+ if (1 != sscanf(pathname.extension().c_str(), ".%u", &tempdex))
+ return false;
+
+ *index = static_cast<size_t>(tempdex);
+
+ size_t buffer_size = pathname.basename().length() + 1;
+ char* buffer = new char[buffer_size];
+ decode(buffer, buffer_size, pathname.basename().data(),
+ pathname.basename().length(), '%');
+ id->assign(buffer);
+ delete [] buffer;
+ return true;
+}
+
+DiskCache::Entry* DiskCache::GetOrCreateEntry(const std::string& id,
+ bool create) {
+ EntryMap::iterator it = map_.find(id);
+ if (it != map_.end())
+ return &it->second;
+ if (!create)
+ return NULL;
+ Entry e;
+ e.lock_state = LS_UNLOCKED;
+ e.accessors = 0;
+ e.size = 0;
+ e.streams = 0;
+ e.last_modified = time(0);
+ it = map_.insert(EntryMap::value_type(id, e)).first;
+ return &it->second;
+}
+
+void DiskCache::ReleaseResource(const std::string& id, size_t index) const {
+ const Entry* entry = GetEntry(id);
+ if (!entry) {
+ LOG_F(LS_WARNING) << "Missing cache entry";
+ ASSERT(false);
+ return;
+ }
+
+ entry->accessors -= 1;
+ total_accessors_ -= 1;
+
+ if (LS_UNLOCKED != entry->lock_state) {
+ // This is safe, because locked resources only issue WriteResource, which
+ // is non-const. Think about a better way to handle it.
+ DiskCache* this2 = const_cast<DiskCache*>(this);
+ Entry* entry2 = this2->GetOrCreateEntry(id, false);
+
+ size_t new_size = 0;
+ std::string filename(IdToFilename(id, index));
+ FileStream::GetSize(filename, &new_size);
+ entry2->size += new_size;
+ this2->total_size_ += new_size;
+
+ if ((LS_UNLOCKING == entry->lock_state) && (0 == entry->accessors)) {
+ entry2->last_modified = time(0);
+ entry2->lock_state = LS_UNLOCKED;
+ this2->CheckLimit();
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/diskcache.h b/chromium/third_party/webrtc/base/diskcache.h
new file mode 100644
index 00000000000..4ac1be15d66
--- /dev/null
+++ b/chromium/third_party/webrtc/base/diskcache.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_DISKCACHE_H__
+#define WEBRTC_BASE_DISKCACHE_H__
+
+#include <map>
+#include <string>
+
+#if defined(WEBRTC_WIN)
+#undef UnlockResource
+#endif // WEBRTC_WIN
+
+namespace rtc {
+
+class StreamInterface;
+
+///////////////////////////////////////////////////////////////////////////////
+// DiskCache - An LRU cache of streams, stored on disk.
+//
+// Streams are identified by a unique resource id. Multiple streams can be
+// associated with each resource id, distinguished by an index. When old
+// resources are flushed from the cache, all streams associated with those
+// resources are removed together.
+// DiskCache is designed to persist across executions of the program. It is
+// safe for use from an arbitrary number of users on a single thread, but not
+// from multiple threads or other processes.
+///////////////////////////////////////////////////////////////////////////////
+
+class DiskCache {
+public:
+ DiskCache();
+ virtual ~DiskCache();
+
+ bool Initialize(const std::string& folder, size_t size);
+ bool Purge();
+
+ bool LockResource(const std::string& id);
+ StreamInterface* WriteResource(const std::string& id, size_t index);
+ bool UnlockResource(const std::string& id);
+
+ StreamInterface* ReadResource(const std::string& id, size_t index) const;
+
+ bool HasResource(const std::string& id) const;
+ bool HasResourceStream(const std::string& id, size_t index) const;
+ bool DeleteResource(const std::string& id);
+
+ protected:
+ virtual bool InitializeEntries() = 0;
+ virtual bool PurgeFiles() = 0;
+
+ virtual bool FileExists(const std::string& filename) const = 0;
+ virtual bool DeleteFile(const std::string& filename) const = 0;
+
+ enum LockState { LS_UNLOCKED, LS_LOCKED, LS_UNLOCKING };
+ struct Entry {
+ LockState lock_state;
+ mutable size_t accessors;
+ size_t size;
+ size_t streams;
+ time_t last_modified;
+ };
+ typedef std::map<std::string, Entry> EntryMap;
+ friend class DiskCacheAdapter;
+
+ bool CheckLimit();
+
+ std::string IdToFilename(const std::string& id, size_t index) const;
+ bool FilenameToId(const std::string& filename, std::string* id,
+ size_t* index) const;
+
+ const Entry* GetEntry(const std::string& id) const {
+ return const_cast<DiskCache*>(this)->GetOrCreateEntry(id, false);
+ }
+ Entry* GetOrCreateEntry(const std::string& id, bool create);
+
+ void ReleaseResource(const std::string& id, size_t index) const;
+
+ std::string folder_;
+ size_t max_cache_, total_size_;
+ EntryMap map_;
+ mutable size_t total_accessors_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// CacheLock - Automatically manage locking and unlocking, with optional
+// rollback semantics
+///////////////////////////////////////////////////////////////////////////////
+
+class CacheLock {
+public:
+ CacheLock(DiskCache* cache, const std::string& id, bool rollback = false)
+ : cache_(cache), id_(id), rollback_(rollback)
+ {
+ locked_ = cache_->LockResource(id_);
+ }
+ ~CacheLock() {
+ if (locked_) {
+ cache_->UnlockResource(id_);
+ if (rollback_) {
+ cache_->DeleteResource(id_);
+ }
+ }
+ }
+ bool IsLocked() const { return locked_; }
+ void Commit() { rollback_ = false; }
+
+private:
+ DiskCache* cache_;
+ std::string id_;
+ bool rollback_, locked_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_DISKCACHE_H__
diff --git a/chromium/third_party/webrtc/base/diskcache_win32.cc b/chromium/third_party/webrtc/base/diskcache_win32.cc
new file mode 100644
index 00000000000..22012ccc1de
--- /dev/null
+++ b/chromium/third_party/webrtc/base/diskcache_win32.cc
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32.h"
+#include <shellapi.h>
+#include <shlobj.h>
+#include <tchar.h>
+
+#include <time.h>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/diskcache.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+#include "webrtc/base/diskcache_win32.h"
+
+namespace rtc {
+
+bool DiskCacheWin32::InitializeEntries() {
+ // Note: We could store the cache information in a separate file, for faster
+ // initialization. Figuring it out empirically works, too.
+
+ std::wstring path16 = ToUtf16(folder_);
+ path16.append(1, '*');
+
+ WIN32_FIND_DATA find_data;
+ HANDLE find_handle = FindFirstFile(path16.c_str(), &find_data);
+ if (find_handle != INVALID_HANDLE_VALUE) {
+ do {
+ size_t index;
+ std::string id;
+ if (!FilenameToId(ToUtf8(find_data.cFileName), &id, &index))
+ continue;
+
+ Entry* entry = GetOrCreateEntry(id, true);
+ entry->size += find_data.nFileSizeLow;
+ total_size_ += find_data.nFileSizeLow;
+ entry->streams = _max(entry->streams, index + 1);
+ FileTimeToUnixTime(find_data.ftLastWriteTime, &entry->last_modified);
+
+ } while (FindNextFile(find_handle, &find_data));
+
+ FindClose(find_handle);
+ }
+
+ return true;
+}
+
+bool DiskCacheWin32::PurgeFiles() {
+ std::wstring path16 = ToUtf16(folder_);
+ path16.append(1, '*');
+ path16.append(1, '\0');
+
+ SHFILEOPSTRUCT file_op = { 0 };
+ file_op.wFunc = FO_DELETE;
+ file_op.pFrom = path16.c_str();
+ file_op.fFlags = FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
+ | FOF_NORECURSION | FOF_FILESONLY;
+ if (0 != SHFileOperation(&file_op)) {
+ LOG_F(LS_ERROR) << "Couldn't delete cache files";
+ return false;
+ }
+
+ return true;
+}
+
+bool DiskCacheWin32::FileExists(const std::string& filename) const {
+ DWORD result = ::GetFileAttributes(ToUtf16(filename).c_str());
+ return (INVALID_FILE_ATTRIBUTES != result);
+}
+
+bool DiskCacheWin32::DeleteFile(const std::string& filename) const {
+ return ::DeleteFile(ToUtf16(filename).c_str()) != 0;
+}
+
+}
diff --git a/chromium/third_party/webrtc/base/diskcache_win32.h b/chromium/third_party/webrtc/base/diskcache_win32.h
new file mode 100644
index 00000000000..42cb9b02cb0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/diskcache_win32.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_DISKCACHEWIN32_H__
+#define WEBRTC_BASE_DISKCACHEWIN32_H__
+
+#include "webrtc/base/diskcache.h"
+
+namespace rtc {
+
+class DiskCacheWin32 : public DiskCache {
+ protected:
+ virtual bool InitializeEntries();
+ virtual bool PurgeFiles();
+
+ virtual bool FileExists(const std::string& filename) const;
+ virtual bool DeleteFile(const std::string& filename) const;
+};
+
+}
+
+#endif // WEBRTC_BASE_DISKCACHEWIN32_H__
diff --git a/chromium/third_party/webrtc/base/dscp.h b/chromium/third_party/webrtc/base/dscp.h
new file mode 100644
index 00000000000..970ff93b9b9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/dscp.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_DSCP_H_
+#define WEBRTC_BASE_DSCP_H_
+
+namespace rtc {
+// Differentiated Services Code Point.
+// See http://tools.ietf.org/html/rfc2474 for details.
+enum DiffServCodePoint {
+ DSCP_NO_CHANGE = -1,
+ DSCP_DEFAULT = 0, // Same as DSCP_CS0
+ DSCP_CS0 = 0, // The default
+ DSCP_CS1 = 8, // Bulk/background traffic
+ DSCP_AF11 = 10,
+ DSCP_AF12 = 12,
+ DSCP_AF13 = 14,
+ DSCP_CS2 = 16,
+ DSCP_AF21 = 18,
+ DSCP_AF22 = 20,
+ DSCP_AF23 = 22,
+ DSCP_CS3 = 24,
+ DSCP_AF31 = 26,
+ DSCP_AF32 = 28,
+ DSCP_AF33 = 30,
+ DSCP_CS4 = 32,
+ DSCP_AF41 = 34, // Video
+ DSCP_AF42 = 36, // Video
+ DSCP_AF43 = 38, // Video
+ DSCP_CS5 = 40, // Video
+ DSCP_EF = 46, // Voice
+ DSCP_CS6 = 48, // Voice
+ DSCP_CS7 = 56, // Control messages
+};
+
+} // namespace rtc
+
+ #endif // WEBRTC_BASE_DSCP_H_
diff --git a/chromium/third_party/webrtc/base/event.cc b/chromium/third_party/webrtc/base/event.cc
new file mode 100644
index 00000000000..393142ea2c5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/event.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/event.h"
+
+#if defined(WEBRTC_WIN)
+#include <windows.h>
+#elif defined(WEBRTC_POSIX)
+#include <pthread.h>
+#include <sys/time.h>
+#include <time.h>
+#else
+#error "Must define either WEBRTC_WIN or WEBRTC_POSIX."
+#endif
+
+namespace rtc {
+
+#if defined(WEBRTC_WIN)
+
+Event::Event(bool manual_reset, bool initially_signaled)
+ : is_manual_reset_(manual_reset),
+ is_initially_signaled_(initially_signaled) {
+ event_handle_ = ::CreateEvent(NULL, // Security attributes.
+ is_manual_reset_,
+ is_initially_signaled_,
+ NULL); // Name.
+ ASSERT(event_handle_ != NULL);
+}
+
+Event::~Event() {
+ CloseHandle(event_handle_);
+}
+
+void Event::Set() {
+ SetEvent(event_handle_);
+}
+
+void Event::Reset() {
+ ResetEvent(event_handle_);
+}
+
+bool Event::Wait(int cms) {
+ DWORD ms = (cms == kForever)? INFINITE : cms;
+ return (WaitForSingleObject(event_handle_, ms) == WAIT_OBJECT_0);
+}
+
+#elif defined(WEBRTC_POSIX)
+
+Event::Event(bool manual_reset, bool initially_signaled)
+ : is_manual_reset_(manual_reset),
+ event_status_(initially_signaled) {
+ VERIFY(pthread_mutex_init(&event_mutex_, NULL) == 0);
+ VERIFY(pthread_cond_init(&event_cond_, NULL) == 0);
+}
+
+Event::~Event() {
+ pthread_mutex_destroy(&event_mutex_);
+ pthread_cond_destroy(&event_cond_);
+}
+
+void Event::Set() {
+ pthread_mutex_lock(&event_mutex_);
+ event_status_ = true;
+ pthread_cond_broadcast(&event_cond_);
+ pthread_mutex_unlock(&event_mutex_);
+}
+
+void Event::Reset() {
+ pthread_mutex_lock(&event_mutex_);
+ event_status_ = false;
+ pthread_mutex_unlock(&event_mutex_);
+}
+
+bool Event::Wait(int cms) {
+ pthread_mutex_lock(&event_mutex_);
+ int error = 0;
+
+ if (cms != kForever) {
+ // Converting from seconds and microseconds (1e-6) plus
+ // milliseconds (1e-3) to seconds and nanoseconds (1e-9).
+
+ struct timespec ts;
+#if HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE
+ // Use relative time version, which tends to be more efficient for
+ // pthread implementations where provided (like on Android).
+ ts.tv_sec = cms / 1000;
+ ts.tv_nsec = (cms % 1000) * 1000000;
+#else
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ ts.tv_sec = tv.tv_sec + (cms / 1000);
+ ts.tv_nsec = tv.tv_usec * 1000 + (cms % 1000) * 1000000;
+
+ // Handle overflow.
+ if (ts.tv_nsec >= 1000000000) {
+ ts.tv_sec++;
+ ts.tv_nsec -= 1000000000;
+ }
+#endif
+
+ while (!event_status_ && error == 0) {
+#if HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE
+ error = pthread_cond_timedwait_relative_np(
+ &event_cond_, &event_mutex_, &ts);
+#else
+ error = pthread_cond_timedwait(&event_cond_, &event_mutex_, &ts);
+#endif
+ }
+ } else {
+ while (!event_status_ && error == 0)
+ error = pthread_cond_wait(&event_cond_, &event_mutex_);
+ }
+
+ // NOTE(liulk): Exactly one thread will auto-reset this event. All
+ // the other threads will think it's unsignaled. This seems to be
+ // consistent with auto-reset events in WEBRTC_WIN
+ if (error == 0 && !is_manual_reset_)
+ event_status_ = false;
+
+ pthread_mutex_unlock(&event_mutex_);
+
+ return (error == 0);
+}
+
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/event.h b/chromium/third_party/webrtc/base/event.h
new file mode 100644
index 00000000000..f2691a2f894
--- /dev/null
+++ b/chromium/third_party/webrtc/base/event.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_EVENT_H__
+#define WEBRTC_BASE_EVENT_H__
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h" // NOLINT: consider this a system header.
+#elif defined(WEBRTC_POSIX)
+#include <pthread.h>
+#else
+#error "Must define either WEBRTC_WIN or WEBRTC_POSIX."
+#endif
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+class Event {
+ public:
+ Event(bool manual_reset, bool initially_signaled);
+ ~Event();
+
+ void Set();
+ void Reset();
+ bool Wait(int cms);
+
+ private:
+ bool is_manual_reset_;
+
+#if defined(WEBRTC_WIN)
+ bool is_initially_signaled_;
+ HANDLE event_handle_;
+#elif defined(WEBRTC_POSIX)
+ bool event_status_;
+ pthread_mutex_t event_mutex_;
+ pthread_cond_t event_cond_;
+#endif
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_EVENT_H__
diff --git a/chromium/third_party/webrtc/base/event_unittest.cc b/chromium/third_party/webrtc/base/event_unittest.cc
new file mode 100644
index 00000000000..996ad2f95ab
--- /dev/null
+++ b/chromium/third_party/webrtc/base/event_unittest.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+TEST(EventTest, InitiallySignaled) {
+ Event event(false, true);
+ ASSERT_TRUE(event.Wait(0));
+}
+
+TEST(EventTest, ManualReset) {
+ Event event(true, false);
+ ASSERT_FALSE(event.Wait(0));
+
+ event.Set();
+ ASSERT_TRUE(event.Wait(0));
+ ASSERT_TRUE(event.Wait(0));
+
+ event.Reset();
+ ASSERT_FALSE(event.Wait(0));
+}
+
+TEST(EventTest, AutoReset) {
+ Event event(false, false);
+ ASSERT_FALSE(event.Wait(0));
+
+ event.Set();
+ ASSERT_TRUE(event.Wait(0));
+ ASSERT_FALSE(event.Wait(0));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/fakecpumonitor.h b/chromium/third_party/webrtc/base/fakecpumonitor.h
new file mode 100644
index 00000000000..c6ea0f2933c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fakecpumonitor.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FAKECPUMONITOR_H_
+#define WEBRTC_BASE_FAKECPUMONITOR_H_
+
+#include "webrtc/base/cpumonitor.h"
+
+namespace rtc {
+
+class FakeCpuMonitor : public rtc::CpuMonitor {
+ public:
+ explicit FakeCpuMonitor(Thread* thread)
+ : CpuMonitor(thread) {
+ }
+ ~FakeCpuMonitor() {
+ }
+
+ virtual void OnMessage(rtc::Message* msg) {
+ }
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FAKECPUMONITOR_H_
diff --git a/chromium/third_party/webrtc/base/fakenetwork.h b/chromium/third_party/webrtc/base/fakenetwork.h
new file mode 100644
index 00000000000..60773b4099d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fakenetwork.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FAKENETWORK_H_
+#define WEBRTC_BASE_FAKENETWORK_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/network.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+const int kFakeIPv4NetworkPrefixLength = 24;
+const int kFakeIPv6NetworkPrefixLength = 64;
+
+// Fake network manager that allows us to manually specify the IPs to use.
+class FakeNetworkManager : public NetworkManagerBase,
+ public MessageHandler {
+ public:
+ FakeNetworkManager()
+ : thread_(Thread::Current()),
+ next_index_(0),
+ started_(false),
+ sent_first_update_(false) {
+ }
+
+ typedef std::vector<SocketAddress> IfaceList;
+
+ void AddInterface(const SocketAddress& iface) {
+ // ensure a unique name for the interface
+ SocketAddress address("test" + rtc::ToString(next_index_++), 0);
+ address.SetResolvedIP(iface.ipaddr());
+ ifaces_.push_back(address);
+ DoUpdateNetworks();
+ }
+
+ void RemoveInterface(const SocketAddress& iface) {
+ for (IfaceList::iterator it = ifaces_.begin();
+ it != ifaces_.end(); ++it) {
+ if (it->EqualIPs(iface)) {
+ ifaces_.erase(it);
+ break;
+ }
+ }
+ DoUpdateNetworks();
+ }
+
+ virtual void StartUpdating() {
+ if (started_) {
+ if (sent_first_update_)
+ SignalNetworksChanged();
+ return;
+ }
+
+ started_ = true;
+ sent_first_update_ = false;
+ thread_->Post(this);
+ }
+
+ virtual void StopUpdating() {
+ started_ = false;
+ }
+
+ // MessageHandler interface.
+ virtual void OnMessage(Message* msg) {
+ DoUpdateNetworks();
+ }
+
+ private:
+ void DoUpdateNetworks() {
+ if (!started_)
+ return;
+ std::vector<Network*> networks;
+ for (IfaceList::iterator it = ifaces_.begin();
+ it != ifaces_.end(); ++it) {
+ int prefix_length = 0;
+ if (it->ipaddr().family() == AF_INET) {
+ prefix_length = kFakeIPv4NetworkPrefixLength;
+ } else if (it->ipaddr().family() == AF_INET6) {
+ prefix_length = kFakeIPv6NetworkPrefixLength;
+ }
+ IPAddress prefix = TruncateIP(it->ipaddr(), prefix_length);
+ scoped_ptr<Network> net(new Network(it->hostname(),
+ it->hostname(),
+ prefix,
+ prefix_length));
+ net->AddIP(it->ipaddr());
+ networks.push_back(net.release());
+ }
+ bool changed;
+ MergeNetworkList(networks, &changed);
+ if (changed || !sent_first_update_) {
+ SignalNetworksChanged();
+ sent_first_update_ = true;
+ }
+ }
+
+ Thread* thread_;
+ IfaceList ifaces_;
+ int next_index_;
+ bool started_;
+ bool sent_first_update_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FAKENETWORK_H_
diff --git a/chromium/third_party/webrtc/base/fakesslidentity.h b/chromium/third_party/webrtc/base/fakesslidentity.h
new file mode 100644
index 00000000000..717cb6c3be6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fakesslidentity.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FAKESSLIDENTITY_H_
+#define WEBRTC_BASE_FAKESSLIDENTITY_H_
+
+#include <algorithm>
+#include <vector>
+
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/sslidentity.h"
+
+namespace rtc {
+
+class FakeSSLCertificate : public rtc::SSLCertificate {
+ public:
+ // SHA-1 is the default digest algorithm because it is available in all build
+ // configurations used for unit testing.
+ explicit FakeSSLCertificate(const std::string& data)
+ : data_(data), digest_algorithm_(DIGEST_SHA_1) {}
+ explicit FakeSSLCertificate(const std::vector<std::string>& certs)
+ : data_(certs.front()), digest_algorithm_(DIGEST_SHA_1) {
+ std::vector<std::string>::const_iterator it;
+ // Skip certs[0].
+ for (it = certs.begin() + 1; it != certs.end(); ++it) {
+ certs_.push_back(FakeSSLCertificate(*it));
+ }
+ }
+ virtual FakeSSLCertificate* GetReference() const {
+ return new FakeSSLCertificate(*this);
+ }
+ virtual std::string ToPEMString() const {
+ return data_;
+ }
+ virtual void ToDER(Buffer* der_buffer) const {
+ std::string der_string;
+ VERIFY(SSLIdentity::PemToDer(kPemTypeCertificate, data_, &der_string));
+ der_buffer->SetData(der_string.c_str(), der_string.size());
+ }
+ void set_digest_algorithm(const std::string& algorithm) {
+ digest_algorithm_ = algorithm;
+ }
+ virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const {
+ *algorithm = digest_algorithm_;
+ return true;
+ }
+ virtual bool ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const {
+ *length = rtc::ComputeDigest(algorithm, data_.c_str(), data_.size(),
+ digest, size);
+ return (*length != 0);
+ }
+ virtual bool GetChain(SSLCertChain** chain) const {
+ if (certs_.empty())
+ return false;
+ std::vector<SSLCertificate*> new_certs(certs_.size());
+ std::transform(certs_.begin(), certs_.end(), new_certs.begin(), DupCert);
+ *chain = new SSLCertChain(new_certs);
+ return true;
+ }
+
+ private:
+ static FakeSSLCertificate* DupCert(FakeSSLCertificate cert) {
+ return cert.GetReference();
+ }
+ std::string data_;
+ std::vector<FakeSSLCertificate> certs_;
+ std::string digest_algorithm_;
+};
+
+class FakeSSLIdentity : public rtc::SSLIdentity {
+ public:
+ explicit FakeSSLIdentity(const std::string& data) : cert_(data) {}
+ explicit FakeSSLIdentity(const FakeSSLCertificate& cert) : cert_(cert) {}
+ virtual FakeSSLIdentity* GetReference() const {
+ return new FakeSSLIdentity(*this);
+ }
+ virtual const FakeSSLCertificate& certificate() const { return cert_; }
+ private:
+ FakeSSLCertificate cert_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FAKESSLIDENTITY_H_
diff --git a/chromium/third_party/webrtc/base/faketaskrunner.h b/chromium/third_party/webrtc/base/faketaskrunner.h
new file mode 100644
index 00000000000..5408ab8b2c2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/faketaskrunner.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// A fake TaskRunner for use in unit tests.
+
+#ifndef WEBRTC_BASE_FAKETASKRUNNER_H_
+#define WEBRTC_BASE_FAKETASKRUNNER_H_
+
+#include "webrtc/base/taskparent.h"
+#include "webrtc/base/taskrunner.h"
+
+namespace rtc {
+
+class FakeTaskRunner : public TaskRunner {
+ public:
+ FakeTaskRunner() : current_time_(0) {}
+ virtual ~FakeTaskRunner() {}
+
+ virtual void WakeTasks() { RunTasks(); }
+
+ virtual int64 CurrentTime() {
+ // Implement if needed.
+ return current_time_++;
+ }
+
+ int64 current_time_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FAKETASKRUNNER_H_
diff --git a/chromium/third_party/webrtc/base/filelock.cc b/chromium/third_party/webrtc/base/filelock.cc
new file mode 100644
index 00000000000..fc921febcd1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/filelock.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/filelock.h"
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+FileLock::FileLock(const std::string& path, FileStream* file)
+ : path_(path), file_(file) {
+}
+
+FileLock::~FileLock() {
+ MaybeUnlock();
+}
+
+void FileLock::Unlock() {
+ LOG_F(LS_INFO);
+ MaybeUnlock();
+}
+
+void FileLock::MaybeUnlock() {
+ if (file_) {
+ LOG(LS_INFO) << "Unlocking:" << path_;
+ file_->Close();
+ Filesystem::DeleteFile(path_);
+ file_.reset();
+ }
+}
+
+FileLock* FileLock::TryLock(const std::string& path) {
+ FileStream* stream = new FileStream();
+ bool ok = false;
+#if defined(WEBRTC_WIN)
+ // Open and lock in a single operation.
+ ok = stream->OpenShare(path, "a", _SH_DENYRW, NULL);
+#else // WEBRTC_LINUX && !WEBRTC_ANDROID and WEBRTC_MAC && !defined(WEBRTC_IOS)
+ ok = stream->Open(path, "a", NULL) && stream->TryLock();
+#endif
+ if (ok) {
+ return new FileLock(path, stream);
+ } else {
+ // Something failed, either we didn't succeed to open the
+ // file or we failed to lock it. Anyway remove the heap
+ // allocated object and then return NULL to indicate failure.
+ delete stream;
+ return NULL;
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/filelock.h b/chromium/third_party/webrtc/base/filelock.h
new file mode 100644
index 00000000000..46c58ea4aba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/filelock.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FILELOCK_H_
+#define WEBRTC_BASE_FILELOCK_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+class FileStream;
+
+// Implements a very simple cross process lock based on a file.
+// When Lock(...) is called we try to open/create the file in read/write
+// mode without any sharing. (Or locking it with flock(...) on Unix)
+// If the process crash the OS will make sure that the file descriptor
+// is released and another process can accuire the lock.
+// This doesn't work on ancient OSX/Linux versions if used on NFS.
+// (Nfs-client before: ~2.6 and Linux Kernel < 2.6.)
+class FileLock {
+ public:
+ virtual ~FileLock();
+
+ // Attempts to lock the file. The caller owns the returned
+ // lock object. Returns NULL if the file already was locked.
+ static FileLock* TryLock(const std::string& path);
+ void Unlock();
+
+ protected:
+ FileLock(const std::string& path, FileStream* file);
+
+ private:
+ void MaybeUnlock();
+
+ std::string path_;
+ scoped_ptr<FileStream> file_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(FileLock);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FILELOCK_H_
diff --git a/chromium/third_party/webrtc/base/filelock_unittest.cc b/chromium/third_party/webrtc/base/filelock_unittest.cc
new file mode 100644
index 00000000000..eecbf07da69
--- /dev/null
+++ b/chromium/third_party/webrtc/base/filelock_unittest.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/filelock.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+const static std::string kLockFile = "TestLockFile";
+const static int kTimeoutMS = 5000;
+
+class FileLockTest : public testing::Test, public Runnable {
+ public:
+ FileLockTest() : done_(false, false), thread_lock_failed_(false) {
+ }
+
+ virtual void Run(Thread* t) {
+ scoped_ptr<FileLock> lock(FileLock::TryLock(temp_file_.pathname()));
+ // The lock is already owned by the main thread of
+ // this test, therefore the TryLock(...) call should fail.
+ thread_lock_failed_ = lock.get() == NULL;
+ done_.Set();
+ }
+
+ protected:
+ virtual void SetUp() {
+ thread_lock_failed_ = false;
+ Filesystem::GetAppTempFolder(&temp_dir_);
+ temp_file_ = Pathname(temp_dir_.pathname(), kLockFile);
+ }
+
+ void LockOnThread() {
+ locker_.Start(this);
+ done_.Wait(kTimeoutMS);
+ }
+
+ Event done_;
+ Thread locker_;
+ bool thread_lock_failed_;
+ Pathname temp_dir_;
+ Pathname temp_file_;
+};
+
+TEST_F(FileLockTest, TestLockFileDeleted) {
+ scoped_ptr<FileLock> lock(FileLock::TryLock(temp_file_.pathname()));
+ EXPECT_TRUE(lock.get() != NULL);
+ EXPECT_FALSE(Filesystem::IsAbsent(temp_file_.pathname()));
+ lock->Unlock();
+ EXPECT_TRUE(Filesystem::IsAbsent(temp_file_.pathname()));
+}
+
+TEST_F(FileLockTest, TestLock) {
+ scoped_ptr<FileLock> lock(FileLock::TryLock(temp_file_.pathname()));
+ EXPECT_TRUE(lock.get() != NULL);
+}
+
+TEST_F(FileLockTest, TestLockX2) {
+ scoped_ptr<FileLock> lock1(FileLock::TryLock(temp_file_.pathname()));
+ EXPECT_TRUE(lock1.get() != NULL);
+
+ scoped_ptr<FileLock> lock2(FileLock::TryLock(temp_file_.pathname()));
+ EXPECT_TRUE(lock2.get() == NULL);
+}
+
+TEST_F(FileLockTest, TestThreadedLock) {
+ scoped_ptr<FileLock> lock(FileLock::TryLock(temp_file_.pathname()));
+ EXPECT_TRUE(lock.get() != NULL);
+
+ LockOnThread();
+ EXPECT_TRUE(thread_lock_failed_);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/fileutils.cc b/chromium/third_party/webrtc/base/fileutils.cc
new file mode 100644
index 00000000000..60bd0f8f427
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fileutils.cc
@@ -0,0 +1,307 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#if defined(WEBRTC_WIN)
+// TODO(grunell): Remove io.h includes when Chromium has started
+// to use AEC in each source. http://crbug.com/264611.
+#include <io.h>
+#include "webrtc/base/win32.h"
+#endif
+
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/stream.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32filesystem.h"
+#else
+#include "webrtc/base/unixfilesystem.h"
+#endif
+
+#if !defined(WEBRTC_WIN)
+#define MAX_PATH 260
+#endif
+
+namespace rtc {
+
+//////////////////////////
+// Directory Iterator //
+//////////////////////////
+
+// A DirectoryIterator is created with a given directory. It originally points
+// to the first file in the directory, and can be advanecd with Next(). This
+// allows you to get information about each file.
+
+ // Constructor
+DirectoryIterator::DirectoryIterator()
+#ifdef WEBRTC_WIN
+ : handle_(INVALID_HANDLE_VALUE) {
+#else
+ : dir_(NULL), dirent_(NULL) {
+#endif
+}
+
+ // Destructor
+DirectoryIterator::~DirectoryIterator() {
+#if defined(WEBRTC_WIN)
+ if (handle_ != INVALID_HANDLE_VALUE)
+ ::FindClose(handle_);
+#else
+ if (dir_)
+ closedir(dir_);
+#endif
+}
+
+ // Starts traversing a directory.
+ // dir is the directory to traverse
+ // returns true if the directory exists and is valid
+bool DirectoryIterator::Iterate(const Pathname &dir) {
+ directory_ = dir.pathname();
+#if defined(WEBRTC_WIN)
+ if (handle_ != INVALID_HANDLE_VALUE)
+ ::FindClose(handle_);
+ std::string d = dir.pathname() + '*';
+ handle_ = ::FindFirstFile(ToUtf16(d).c_str(), &data_);
+ if (handle_ == INVALID_HANDLE_VALUE)
+ return false;
+#else
+ if (dir_ != NULL)
+ closedir(dir_);
+ dir_ = ::opendir(directory_.c_str());
+ if (dir_ == NULL)
+ return false;
+ dirent_ = readdir(dir_);
+ if (dirent_ == NULL)
+ return false;
+
+ if (::stat(std::string(directory_ + Name()).c_str(), &stat_) != 0)
+ return false;
+#endif
+ return true;
+}
+
+ // Advances to the next file
+ // returns true if there were more files in the directory.
+bool DirectoryIterator::Next() {
+#if defined(WEBRTC_WIN)
+ return ::FindNextFile(handle_, &data_) == TRUE;
+#else
+ dirent_ = ::readdir(dir_);
+ if (dirent_ == NULL)
+ return false;
+
+ return ::stat(std::string(directory_ + Name()).c_str(), &stat_) == 0;
+#endif
+}
+
+ // returns true if the file currently pointed to is a directory
+bool DirectoryIterator::IsDirectory() const {
+#if defined(WEBRTC_WIN)
+ return (data_.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) != FALSE;
+#else
+ return S_ISDIR(stat_.st_mode);
+#endif
+}
+
+ // returns the name of the file currently pointed to
+std::string DirectoryIterator::Name() const {
+#if defined(WEBRTC_WIN)
+ return ToUtf8(data_.cFileName);
+#else
+ assert(dirent_ != NULL);
+ return dirent_->d_name;
+#endif
+}
+
+ // returns the size of the file currently pointed to
+size_t DirectoryIterator::FileSize() const {
+#if !defined(WEBRTC_WIN)
+ return stat_.st_size;
+#else
+ return data_.nFileSizeLow;
+#endif
+}
+
+ // returns the last modified time of this file
+time_t DirectoryIterator::FileModifyTime() const {
+#if defined(WEBRTC_WIN)
+ time_t val;
+ FileTimeToUnixTime(data_.ftLastWriteTime, &val);
+ return val;
+#else
+ return stat_.st_mtime;
+#endif
+}
+
+FilesystemInterface* Filesystem::default_filesystem_ = NULL;
+
+FilesystemInterface *Filesystem::EnsureDefaultFilesystem() {
+ if (!default_filesystem_) {
+#if defined(WEBRTC_WIN)
+ default_filesystem_ = new Win32Filesystem();
+#else
+ default_filesystem_ = new UnixFilesystem();
+#endif
+ }
+ return default_filesystem_;
+}
+
+bool FilesystemInterface::CopyFolder(const Pathname &old_path,
+ const Pathname &new_path) {
+ bool success = true;
+ VERIFY(IsFolder(old_path));
+ Pathname new_dir;
+ new_dir.SetFolder(new_path.pathname());
+ Pathname old_dir;
+ old_dir.SetFolder(old_path.pathname());
+ if (!CreateFolder(new_dir))
+ return false;
+ DirectoryIterator *di = IterateDirectory();
+ if (!di)
+ return false;
+ if (di->Iterate(old_dir.pathname())) {
+ do {
+ if (di->Name() == "." || di->Name() == "..")
+ continue;
+ Pathname source;
+ Pathname dest;
+ source.SetFolder(old_dir.pathname());
+ dest.SetFolder(new_path.pathname());
+ source.SetFilename(di->Name());
+ dest.SetFilename(di->Name());
+ if (!CopyFileOrFolder(source, dest))
+ success = false;
+ } while (di->Next());
+ }
+ delete di;
+ return success;
+}
+
+bool FilesystemInterface::DeleteFolderContents(const Pathname &folder) {
+ bool success = true;
+ VERIFY(IsFolder(folder));
+ DirectoryIterator *di = IterateDirectory();
+ if (!di)
+ return false;
+ if (di->Iterate(folder)) {
+ do {
+ if (di->Name() == "." || di->Name() == "..")
+ continue;
+ Pathname subdir;
+ subdir.SetFolder(folder.pathname());
+ if (di->IsDirectory()) {
+ subdir.AppendFolder(di->Name());
+ if (!DeleteFolderAndContents(subdir)) {
+ success = false;
+ }
+ } else {
+ subdir.SetFilename(di->Name());
+ if (!DeleteFile(subdir)) {
+ success = false;
+ }
+ }
+ } while (di->Next());
+ }
+ delete di;
+ return success;
+}
+
+bool FilesystemInterface::CleanAppTempFolder() {
+ Pathname path;
+ if (!GetAppTempFolder(&path))
+ return false;
+ if (IsAbsent(path))
+ return true;
+ if (!IsTemporaryPath(path)) {
+ ASSERT(false);
+ return false;
+ }
+ return DeleteFolderContents(path);
+}
+
+Pathname Filesystem::GetCurrentDirectory() {
+ return EnsureDefaultFilesystem()->GetCurrentDirectory();
+}
+
+bool CreateUniqueFile(Pathname& path, bool create_empty) {
+ LOG(LS_INFO) << "Path " << path.pathname() << std::endl;
+ // If no folder is supplied, use the temporary folder
+ if (path.folder().empty()) {
+ Pathname temporary_path;
+ if (!Filesystem::GetTemporaryFolder(temporary_path, true, NULL)) {
+ printf("Get temp failed\n");
+ return false;
+ }
+ path.SetFolder(temporary_path.pathname());
+ }
+
+ // If no filename is supplied, use a temporary name
+ if (path.filename().empty()) {
+ std::string folder(path.folder());
+ std::string filename = Filesystem::TempFilename(folder, "gt");
+ path.SetPathname(filename);
+ if (!create_empty) {
+ Filesystem::DeleteFile(path.pathname());
+ }
+ return true;
+ }
+
+ // Otherwise, create a unique name based on the given filename
+ // foo.txt -> foo-N.txt
+ const std::string basename = path.basename();
+ const size_t MAX_VERSION = 100;
+ size_t version = 0;
+ while (version < MAX_VERSION) {
+ std::string pathname = path.pathname();
+
+ if (!Filesystem::IsFile(pathname)) {
+ if (create_empty) {
+ FileStream* fs = Filesystem::OpenFile(pathname, "w");
+ delete fs;
+ }
+ return true;
+ }
+ version += 1;
+ char version_base[MAX_PATH];
+ sprintfn(version_base, ARRAY_SIZE(version_base), "%s-%u",
+ basename.c_str(), version);
+ path.SetBasename(version_base);
+ }
+ return true;
+}
+
+// Taken from Chromium's base/platform_file_*.cc.
+// TODO(grunell): Remove when Chromium has started to use AEC in each source.
+// http://crbug.com/264611.
+FILE* FdopenPlatformFileForWriting(PlatformFile file) {
+#if defined(WEBRTC_WIN)
+ if (file == kInvalidPlatformFileValue)
+ return NULL;
+ int fd = _open_osfhandle(reinterpret_cast<intptr_t>(file), 0);
+ if (fd < 0)
+ return NULL;
+ return _fdopen(fd, "w");
+#else
+ return fdopen(file, "w");
+#endif
+}
+
+bool ClosePlatformFile(PlatformFile file) {
+#if defined(WEBRTC_WIN)
+ return CloseHandle(file) != 0;
+#else
+ return close(file);
+#endif
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/fileutils.h b/chromium/third_party/webrtc/base/fileutils.h
new file mode 100644
index 00000000000..c0a3f88c649
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fileutils.h
@@ -0,0 +1,459 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FILEUTILS_H_
+#define WEBRTC_BASE_FILEUTILS_H_
+
+#include <string>
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#else
+#include <dirent.h>
+#include <stdio.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+#endif
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+class FileStream;
+class Pathname;
+
+//////////////////////////
+// Directory Iterator //
+//////////////////////////
+
+// A DirectoryIterator is created with a given directory. It originally points
+// to the first file in the directory, and can be advanecd with Next(). This
+// allows you to get information about each file.
+
+class DirectoryIterator {
+ friend class Filesystem;
+ public:
+ // Constructor
+ DirectoryIterator();
+ // Destructor
+ virtual ~DirectoryIterator();
+
+ // Starts traversing a directory
+ // dir is the directory to traverse
+ // returns true if the directory exists and is valid
+ // The iterator will point to the first entry in the directory
+ virtual bool Iterate(const Pathname &path);
+
+ // Advances to the next file
+ // returns true if there were more files in the directory.
+ virtual bool Next();
+
+ // returns true if the file currently pointed to is a directory
+ virtual bool IsDirectory() const;
+
+ // returns the name of the file currently pointed to
+ virtual std::string Name() const;
+
+ // returns the size of the file currently pointed to
+ virtual size_t FileSize() const;
+
+ // returns the last modified time of the file currently pointed to
+ virtual time_t FileModifyTime() const;
+
+ // checks whether current file is a special directory file "." or ".."
+ bool IsDots() const {
+ std::string filename(Name());
+ return (filename.compare(".") == 0) || (filename.compare("..") == 0);
+ }
+
+ private:
+ std::string directory_;
+#if defined(WEBRTC_WIN)
+ WIN32_FIND_DATA data_;
+ HANDLE handle_;
+#else
+ DIR *dir_;
+ struct dirent *dirent_;
+ struct stat stat_;
+#endif
+};
+
+enum FileTimeType { FTT_CREATED, FTT_MODIFIED, FTT_ACCESSED };
+
+class FilesystemInterface {
+ public:
+ virtual ~FilesystemInterface() {}
+
+ // Returns a DirectoryIterator for a given pathname.
+ // TODO: Do fancy abstracted stuff
+ virtual DirectoryIterator *IterateDirectory() {
+ return new DirectoryIterator();
+ }
+
+ // Opens a file. Returns an open StreamInterface if function succeeds.
+ // Otherwise, returns NULL.
+ // TODO: Add an error param to indicate failure reason, similar to
+ // FileStream::Open
+ virtual FileStream *OpenFile(const Pathname &filename,
+ const std::string &mode) = 0;
+
+ // Atomically creates an empty file accessible only to the current user if one
+ // does not already exist at the given path, otherwise fails. This is the only
+ // secure way to create a file in a shared temp directory (e.g., C:\Temp on
+ // Windows or /tmp on Linux).
+ // Note that if it is essential that a file be successfully created then the
+ // app must generate random names and retry on failure, or else it will be
+ // vulnerable to a trivial DoS.
+ virtual bool CreatePrivateFile(const Pathname &filename) = 0;
+
+ // This will attempt to delete the path located at filename.
+ // It ASSERTS and returns false if the path points to a folder or a
+ // non-existent file.
+ virtual bool DeleteFile(const Pathname &filename) = 0;
+
+ // This will attempt to delete the empty folder located at 'folder'
+ // It ASSERTS and returns false if the path points to a file or a non-existent
+ // folder. It fails normally if the folder is not empty or can otherwise
+ // not be deleted.
+ virtual bool DeleteEmptyFolder(const Pathname &folder) = 0;
+
+ // This will call IterateDirectory, to get a directory iterator, and then
+ // call DeleteFolderAndContents and DeleteFile on every path contained in this
+ // folder. If the folder is empty, this returns true.
+ virtual bool DeleteFolderContents(const Pathname &folder);
+
+ // This deletes the contents of a folder, recursively, and then deletes
+ // the folder itself.
+ virtual bool DeleteFolderAndContents(const Pathname &folder) {
+ return DeleteFolderContents(folder) && DeleteEmptyFolder(folder);
+ }
+
+ // This will delete whatever is located at path, be it a file or a folder.
+ // If it is a folder, it will delete it recursively by calling
+ // DeleteFolderAndContents
+ bool DeleteFileOrFolder(const Pathname &path) {
+ if (IsFolder(path))
+ return DeleteFolderAndContents(path);
+ else
+ return DeleteFile(path);
+ }
+
+ // Creates a directory. This will call itself recursively to create /foo/bar
+ // even if /foo does not exist. Returns true if the function succeeds.
+ virtual bool CreateFolder(const Pathname &pathname) = 0;
+
+ // This moves a file from old_path to new_path, where "old_path" is a
+ // plain file. This ASSERTs and returns false if old_path points to a
+ // directory, and returns true if the function succeeds.
+ // If the new path is on a different volume than the old path, this function
+ // will attempt to copy and, if that succeeds, delete the old path.
+ virtual bool MoveFolder(const Pathname &old_path,
+ const Pathname &new_path) = 0;
+
+ // This moves a directory from old_path to new_path, where "old_path" is a
+ // directory. This ASSERTs and returns false if old_path points to a plain
+ // file, and returns true if the function succeeds.
+ // If the new path is on a different volume, this function will attempt to
+ // copy and if that succeeds, delete the old path.
+ virtual bool MoveFile(const Pathname &old_path, const Pathname &new_path) = 0;
+
+ // This attempts to move whatever is located at old_path to new_path,
+ // be it a file or folder.
+ bool MoveFileOrFolder(const Pathname &old_path, const Pathname &new_path) {
+ if (IsFile(old_path)) {
+ return MoveFile(old_path, new_path);
+ } else {
+ return MoveFolder(old_path, new_path);
+ }
+ }
+
+ // This copies a file from old_path to new_path. This method ASSERTs and
+ // returns false if old_path is a folder, and returns true if the copy
+ // succeeds.
+ virtual bool CopyFile(const Pathname &old_path, const Pathname &new_path) = 0;
+
+ // This copies a folder from old_path to new_path.
+ bool CopyFolder(const Pathname &old_path, const Pathname &new_path);
+
+ bool CopyFileOrFolder(const Pathname &old_path, const Pathname &new_path) {
+ if (IsFile(old_path))
+ return CopyFile(old_path, new_path);
+ else
+ return CopyFolder(old_path, new_path);
+ }
+
+ // Returns true if pathname refers to a directory
+ virtual bool IsFolder(const Pathname& pathname) = 0;
+
+ // Returns true if pathname refers to a file
+ virtual bool IsFile(const Pathname& pathname) = 0;
+
+ // Returns true if pathname refers to no filesystem object, every parent
+ // directory either exists, or is also absent.
+ virtual bool IsAbsent(const Pathname& pathname) = 0;
+
+ // Returns true if pathname represents a temporary location on the system.
+ virtual bool IsTemporaryPath(const Pathname& pathname) = 0;
+
+ // A folder appropriate for storing temporary files (Contents are
+ // automatically deleted when the program exits)
+ virtual bool GetTemporaryFolder(Pathname &path, bool create,
+ const std::string *append) = 0;
+
+ virtual std::string TempFilename(const Pathname &dir,
+ const std::string &prefix) = 0;
+
+ // Determines the size of the file indicated by path.
+ virtual bool GetFileSize(const Pathname& path, size_t* size) = 0;
+
+ // Determines a timestamp associated with the file indicated by path.
+ virtual bool GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time) = 0;
+
+ // Returns the path to the running application.
+ // Note: This is not guaranteed to work on all platforms. Be aware of the
+ // limitations before using it, and robustly handle failure.
+ virtual bool GetAppPathname(Pathname* path) = 0;
+
+ // Get a folder that is unique to the current application, which is suitable
+ // for sharing data between executions of the app. If the per_user arg is
+ // true, the folder is also specific to the current user.
+ virtual bool GetAppDataFolder(Pathname* path, bool per_user) = 0;
+
+ // Get a temporary folder that is unique to the current user and application.
+ // TODO: Re-evaluate the goals of this function. We probably just need any
+ // directory that won't collide with another existing directory, and which
+ // will be cleaned up when the program exits.
+ virtual bool GetAppTempFolder(Pathname* path) = 0;
+
+ // Delete the contents of the folder returned by GetAppTempFolder
+ bool CleanAppTempFolder();
+
+ virtual bool GetDiskFreeSpace(const Pathname& path, int64 *freebytes) = 0;
+
+ // Returns the absolute path of the current directory.
+ virtual Pathname GetCurrentDirectory() = 0;
+
+ // Note: These might go into some shared config section later, but they're
+ // used by some methods in this interface, so we're leaving them here for now.
+ void SetOrganizationName(const std::string& organization) {
+ organization_name_ = organization;
+ }
+ void GetOrganizationName(std::string* organization) {
+ ASSERT(NULL != organization);
+ *organization = organization_name_;
+ }
+ void SetApplicationName(const std::string& application) {
+ application_name_ = application;
+ }
+ void GetApplicationName(std::string* application) {
+ ASSERT(NULL != application);
+ *application = application_name_;
+ }
+
+ protected:
+ std::string organization_name_;
+ std::string application_name_;
+};
+
+class Filesystem {
+ public:
+ static FilesystemInterface *default_filesystem() {
+ ASSERT(default_filesystem_ != NULL);
+ return default_filesystem_;
+ }
+
+ static void set_default_filesystem(FilesystemInterface *filesystem) {
+ default_filesystem_ = filesystem;
+ }
+
+ static FilesystemInterface *swap_default_filesystem(
+ FilesystemInterface *filesystem) {
+ FilesystemInterface *cur = default_filesystem_;
+ default_filesystem_ = filesystem;
+ return cur;
+ }
+
+ static DirectoryIterator *IterateDirectory() {
+ return EnsureDefaultFilesystem()->IterateDirectory();
+ }
+
+ static bool CreateFolder(const Pathname &pathname) {
+ return EnsureDefaultFilesystem()->CreateFolder(pathname);
+ }
+
+ static FileStream *OpenFile(const Pathname &filename,
+ const std::string &mode) {
+ return EnsureDefaultFilesystem()->OpenFile(filename, mode);
+ }
+
+ static bool CreatePrivateFile(const Pathname &filename) {
+ return EnsureDefaultFilesystem()->CreatePrivateFile(filename);
+ }
+
+ static bool DeleteFile(const Pathname &filename) {
+ return EnsureDefaultFilesystem()->DeleteFile(filename);
+ }
+
+ static bool DeleteEmptyFolder(const Pathname &folder) {
+ return EnsureDefaultFilesystem()->DeleteEmptyFolder(folder);
+ }
+
+ static bool DeleteFolderContents(const Pathname &folder) {
+ return EnsureDefaultFilesystem()->DeleteFolderContents(folder);
+ }
+
+ static bool DeleteFolderAndContents(const Pathname &folder) {
+ return EnsureDefaultFilesystem()->DeleteFolderAndContents(folder);
+ }
+
+ static bool MoveFolder(const Pathname &old_path, const Pathname &new_path) {
+ return EnsureDefaultFilesystem()->MoveFolder(old_path, new_path);
+ }
+
+ static bool MoveFile(const Pathname &old_path, const Pathname &new_path) {
+ return EnsureDefaultFilesystem()->MoveFile(old_path, new_path);
+ }
+
+ static bool CopyFolder(const Pathname &old_path, const Pathname &new_path) {
+ return EnsureDefaultFilesystem()->CopyFolder(old_path, new_path);
+ }
+
+ static bool CopyFile(const Pathname &old_path, const Pathname &new_path) {
+ return EnsureDefaultFilesystem()->CopyFile(old_path, new_path);
+ }
+
+ static bool IsFolder(const Pathname& pathname) {
+ return EnsureDefaultFilesystem()->IsFolder(pathname);
+ }
+
+ static bool IsFile(const Pathname &pathname) {
+ return EnsureDefaultFilesystem()->IsFile(pathname);
+ }
+
+ static bool IsAbsent(const Pathname &pathname) {
+ return EnsureDefaultFilesystem()->IsAbsent(pathname);
+ }
+
+ static bool IsTemporaryPath(const Pathname& pathname) {
+ return EnsureDefaultFilesystem()->IsTemporaryPath(pathname);
+ }
+
+ static bool GetTemporaryFolder(Pathname &path, bool create,
+ const std::string *append) {
+ return EnsureDefaultFilesystem()->GetTemporaryFolder(path, create, append);
+ }
+
+ static std::string TempFilename(const Pathname &dir,
+ const std::string &prefix) {
+ return EnsureDefaultFilesystem()->TempFilename(dir, prefix);
+ }
+
+ static bool GetFileSize(const Pathname& path, size_t* size) {
+ return EnsureDefaultFilesystem()->GetFileSize(path, size);
+ }
+
+ static bool GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time) {
+ return EnsureDefaultFilesystem()->GetFileTime(path, which, time);
+ }
+
+ static bool GetAppPathname(Pathname* path) {
+ return EnsureDefaultFilesystem()->GetAppPathname(path);
+ }
+
+ static bool GetAppDataFolder(Pathname* path, bool per_user) {
+ return EnsureDefaultFilesystem()->GetAppDataFolder(path, per_user);
+ }
+
+ static bool GetAppTempFolder(Pathname* path) {
+ return EnsureDefaultFilesystem()->GetAppTempFolder(path);
+ }
+
+ static bool CleanAppTempFolder() {
+ return EnsureDefaultFilesystem()->CleanAppTempFolder();
+ }
+
+ static bool GetDiskFreeSpace(const Pathname& path, int64 *freebytes) {
+ return EnsureDefaultFilesystem()->GetDiskFreeSpace(path, freebytes);
+ }
+
+ // Definition has to be in the .cc file due to returning forward-declared
+ // Pathname by value.
+ static Pathname GetCurrentDirectory();
+
+ static void SetOrganizationName(const std::string& organization) {
+ EnsureDefaultFilesystem()->SetOrganizationName(organization);
+ }
+
+ static void GetOrganizationName(std::string* organization) {
+ EnsureDefaultFilesystem()->GetOrganizationName(organization);
+ }
+
+ static void SetApplicationName(const std::string& application) {
+ EnsureDefaultFilesystem()->SetApplicationName(application);
+ }
+
+ static void GetApplicationName(std::string* application) {
+ EnsureDefaultFilesystem()->GetApplicationName(application);
+ }
+
+ private:
+ static FilesystemInterface* default_filesystem_;
+
+ static FilesystemInterface *EnsureDefaultFilesystem();
+ DISALLOW_IMPLICIT_CONSTRUCTORS(Filesystem);
+};
+
+class FilesystemScope{
+ public:
+ explicit FilesystemScope(FilesystemInterface *new_fs) {
+ old_fs_ = Filesystem::swap_default_filesystem(new_fs);
+ }
+ ~FilesystemScope() {
+ Filesystem::set_default_filesystem(old_fs_);
+ }
+ private:
+ FilesystemInterface* old_fs_;
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FilesystemScope);
+};
+
+// Generates a unique filename based on the input path. If no path component
+// is specified, it uses the temporary directory. If a filename is provided,
+// up to 100 variations of form basename-N.extension are tried. When
+// create_empty is true, an empty file of this name is created (which
+// decreases the chance of a temporary filename collision with another
+// process).
+bool CreateUniqueFile(Pathname& path, bool create_empty);
+
+// Taken from Chromium's base/platform_file.h.
+// Don't use ClosePlatformFile to close a file opened with FdopenPlatformFile.
+// Use fclose instead.
+// TODO(grunell): Remove when Chromium has started to use AEC in each source.
+// http://crbug.com/264611.
+#if defined(WEBRTC_WIN)
+typedef HANDLE PlatformFile;
+const PlatformFile kInvalidPlatformFileValue = INVALID_HANDLE_VALUE;
+#elif defined(WEBRTC_POSIX)
+typedef int PlatformFile;
+const PlatformFile kInvalidPlatformFileValue = -1;
+#else
+#error Unsupported platform
+#endif
+
+FILE* FdopenPlatformFileForWriting(PlatformFile file);
+bool ClosePlatformFile(PlatformFile file);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FILEUTILS_H_
diff --git a/chromium/third_party/webrtc/base/fileutils_mock.h b/chromium/third_party/webrtc/base/fileutils_mock.h
new file mode 100644
index 00000000000..e9d20a75f4a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fileutils_mock.h
@@ -0,0 +1,253 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FILEUTILS_MOCK_H_
+#define WEBRTC_BASE_FILEUTILS_MOCK_H_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+class FakeFileStream : public FileStream {
+ public:
+ explicit FakeFileStream(const std::string & contents) :
+ string_stream_(contents)
+ {}
+
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ return string_stream_.Read(buffer, buffer_len, read, error);
+ }
+
+ virtual void Close() {
+ return string_stream_.Close();
+ }
+ virtual bool GetSize(size_t* size) const {
+ return string_stream_.GetSize(size);
+ }
+
+ private:
+ StringStream string_stream_;
+};
+
+class FakeDirectoryIterator : public DirectoryIterator {
+ public:
+ typedef std::pair<std::string, std::string> File;
+
+ /*
+ * files should be sorted by directory
+ * put '/' at the end of file if you want it to be a directory
+ *
+ * Sample list:
+ * /var/dir/file1
+ * /var/dir/file2
+ * /var/dir/subdir1/
+ * /var/dir/subdir2/
+ * /var/dir2/file2
+ * /var/dir3/
+ *
+ * you can call Iterate for any path: /var, /var/dir, /var/dir2
+ * unrelated files will be ignored
+ */
+ explicit FakeDirectoryIterator(const std::vector<File>& all_files) :
+ all_files_(all_files) {}
+
+ virtual bool Iterate(const Pathname& path) {
+ path_iterator_ = all_files_.begin();
+ path_ = path.pathname();
+
+ // make sure path ends end with '/'
+ if (path_.rfind(Pathname::DefaultFolderDelimiter()) != path_.size() - 1)
+ path_ += Pathname::DefaultFolderDelimiter();
+
+ return FakeDirectoryIterator::Search(std::string(""));
+ }
+
+ virtual bool Next() {
+ std::string current_name = Name();
+ path_iterator_++;
+ return FakeDirectoryIterator::Search(current_name);
+ }
+
+ bool Search(const std::string& current_name) {
+ for (; path_iterator_ != all_files_.end(); path_iterator_++) {
+ if (path_iterator_->first.find(path_) == 0
+ && Name().compare(current_name) != 0) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ virtual bool IsDirectory() const {
+ std::string sub_path = path_iterator_->first;
+
+ return std::string::npos !=
+ sub_path.find(Pathname::DefaultFolderDelimiter(), path_.size());
+ }
+
+ virtual std::string Name() const {
+ std::string sub_path = path_iterator_->first;
+
+ // path - top level path (ex. /var/lib)
+ // sub_path - subpath under top level path (ex. /var/lib/dir/dir/file )
+ // find shortest non-trivial common path. (ex. /var/lib/dir)
+ size_t start = path_.size();
+ size_t end = sub_path.find(Pathname::DefaultFolderDelimiter(), start);
+
+ if (end != std::string::npos) {
+ return sub_path.substr(start, end - start);
+ } else {
+ return sub_path.substr(start);
+ }
+ }
+
+ private:
+ const std::vector<File> all_files_;
+
+ std::string path_;
+ std::vector<File>::const_iterator path_iterator_;
+};
+
+class FakeFileSystem : public FilesystemInterface {
+ public:
+ typedef std::pair<std::string, std::string> File;
+
+ explicit FakeFileSystem(const std::vector<File>& all_files) :
+ all_files_(all_files) {}
+
+ virtual DirectoryIterator *IterateDirectory() {
+ return new FakeDirectoryIterator(all_files_);
+ }
+
+ virtual FileStream * OpenFile(
+ const Pathname &filename,
+ const std::string &mode) {
+ std::vector<File>::const_iterator i_files = all_files_.begin();
+ std::string path = filename.pathname();
+
+ for (; i_files != all_files_.end(); i_files++) {
+ if (i_files->first.compare(path) == 0) {
+ return new FakeFileStream(i_files->second);
+ }
+ }
+
+ return NULL;
+ }
+
+ bool CreatePrivateFile(const Pathname &filename) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool DeleteFile(const Pathname &filename) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool DeleteEmptyFolder(const Pathname &folder) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool DeleteFolderContents(const Pathname &folder) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool DeleteFolderAndContents(const Pathname &folder) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool CreateFolder(const Pathname &pathname) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool MoveFolder(const Pathname &old_path, const Pathname &new_path) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool MoveFile(const Pathname &old_path, const Pathname &new_path) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool CopyFile(const Pathname &old_path, const Pathname &new_path) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool IsFolder(const Pathname &pathname) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool IsFile(const Pathname &pathname) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool IsAbsent(const Pathname &pathname) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool IsTemporaryPath(const Pathname &pathname) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool GetTemporaryFolder(Pathname &path, bool create,
+ const std::string *append) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ std::string TempFilename(const Pathname &dir, const std::string &prefix) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return std::string();
+ }
+ bool GetFileSize(const Pathname &path, size_t *size) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool GetFileTime(const Pathname &path, FileTimeType which,
+ time_t* time) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool GetAppPathname(Pathname *path) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool GetAppDataFolder(Pathname *path, bool per_user) {
+ EXPECT_TRUE(per_user) << "Unsupported operation";
+#if defined(WEBRTC_WIN)
+ path->SetPathname("c:\\Users\\test_user", "");
+#else
+ path->SetPathname("/home/user/test_user", "");
+#endif
+ return true;
+ }
+ bool GetAppTempFolder(Pathname *path) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ bool GetDiskFreeSpace(const Pathname &path, int64 *freebytes) {
+ EXPECT_TRUE(false) << "Unsupported operation";
+ return false;
+ }
+ Pathname GetCurrentDirectory() {
+ return Pathname();
+ }
+
+ private:
+ const std::vector<File> all_files_;
+};
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FILEUTILS_MOCK_H_
diff --git a/chromium/third_party/webrtc/base/fileutils_unittest.cc b/chromium/third_party/webrtc/base/fileutils_unittest.cc
new file mode 100644
index 00000000000..9076bc7870c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/fileutils_unittest.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+// Make sure we can get a temp folder for the later tests.
+TEST(FilesystemTest, GetTemporaryFolder) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetTemporaryFolder(path, true, NULL));
+}
+
+// Test creating a temp file, reading it back in, and deleting it.
+TEST(FilesystemTest, TestOpenFile) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetTemporaryFolder(path, true, NULL));
+ path.SetPathname(Filesystem::TempFilename(path, "ut"));
+
+ FileStream* fs;
+ char buf[256];
+ size_t bytes;
+
+ fs = Filesystem::OpenFile(path, "wb");
+ ASSERT_TRUE(fs != NULL);
+ EXPECT_EQ(SR_SUCCESS, fs->Write("test", 4, &bytes, NULL));
+ EXPECT_EQ(4U, bytes);
+ delete fs;
+
+ EXPECT_TRUE(Filesystem::IsFile(path));
+
+ fs = Filesystem::OpenFile(path, "rb");
+ ASSERT_TRUE(fs != NULL);
+ EXPECT_EQ(SR_SUCCESS, fs->Read(buf, sizeof(buf), &bytes, NULL));
+ EXPECT_EQ(4U, bytes);
+ delete fs;
+
+ EXPECT_TRUE(Filesystem::DeleteFile(path));
+ EXPECT_FALSE(Filesystem::IsFile(path));
+}
+
+// Test opening a non-existent file.
+TEST(FilesystemTest, TestOpenBadFile) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetTemporaryFolder(path, true, NULL));
+ path.SetFilename("not an actual file");
+
+ EXPECT_FALSE(Filesystem::IsFile(path));
+
+ FileStream* fs = Filesystem::OpenFile(path, "rb");
+ EXPECT_FALSE(fs != NULL);
+}
+
+// Test that CreatePrivateFile fails for existing files and succeeds for
+// non-existent ones.
+TEST(FilesystemTest, TestCreatePrivateFile) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetTemporaryFolder(path, true, NULL));
+ path.SetFilename("private_file_test");
+
+ // First call should succeed because the file doesn't exist yet.
+ EXPECT_TRUE(Filesystem::CreatePrivateFile(path));
+ // Next call should fail, because now it exists.
+ EXPECT_FALSE(Filesystem::CreatePrivateFile(path));
+
+ // Verify that we have permission to open the file for reading and writing.
+ scoped_ptr<FileStream> fs(Filesystem::OpenFile(path, "wb"));
+ EXPECT_TRUE(fs.get() != NULL);
+ // Have to close the file on Windows before it will let us delete it.
+ fs.reset();
+
+ // Verify that we have permission to delete the file.
+ EXPECT_TRUE(Filesystem::DeleteFile(path));
+}
+
+// Test checking for free disk space.
+TEST(FilesystemTest, TestGetDiskFreeSpace) {
+ // Note that we should avoid picking any file/folder which could be located
+ // at the remotely mounted drive/device.
+ Pathname path;
+ ASSERT_TRUE(Filesystem::GetAppDataFolder(&path, true));
+
+ int64 free1 = 0;
+ EXPECT_TRUE(Filesystem::IsFolder(path));
+ EXPECT_FALSE(Filesystem::IsFile(path));
+ EXPECT_TRUE(Filesystem::GetDiskFreeSpace(path, &free1));
+ EXPECT_GT(free1, 0);
+
+ int64 free2 = 0;
+ path.AppendFolder("this_folder_doesnt_exist");
+ EXPECT_FALSE(Filesystem::IsFolder(path));
+ EXPECT_TRUE(Filesystem::IsAbsent(path));
+ EXPECT_TRUE(Filesystem::GetDiskFreeSpace(path, &free2));
+ // These should be the same disk, and disk free space should not have changed
+ // by more than 1% between the two calls.
+ EXPECT_LT(static_cast<int64>(free1 * .9), free2);
+ EXPECT_LT(free2, static_cast<int64>(free1 * 1.1));
+
+ int64 free3 = 0;
+ path.clear();
+ EXPECT_TRUE(path.empty());
+ EXPECT_TRUE(Filesystem::GetDiskFreeSpace(path, &free3));
+ // Current working directory may not be where exe is.
+ // EXPECT_LT(static_cast<int64>(free1 * .9), free3);
+ // EXPECT_LT(free3, static_cast<int64>(free1 * 1.1));
+ EXPECT_GT(free3, 0);
+}
+
+// Tests that GetCurrentDirectory() returns something.
+TEST(FilesystemTest, TestGetCurrentDirectory) {
+ EXPECT_FALSE(Filesystem::GetCurrentDirectory().empty());
+}
+
+// Tests that GetAppPathname returns something.
+TEST(FilesystemTest, TestGetAppPathname) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetAppPathname(&path));
+ EXPECT_FALSE(path.empty());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/firewallsocketserver.cc b/chromium/third_party/webrtc/base/firewallsocketserver.cc
new file mode 100644
index 00000000000..31c18d9817a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/firewallsocketserver.cc
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/firewallsocketserver.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+class FirewallSocket : public AsyncSocketAdapter {
+ public:
+ FirewallSocket(FirewallSocketServer* server, AsyncSocket* socket, int type)
+ : AsyncSocketAdapter(socket), server_(server), type_(type) {
+ }
+
+ virtual int Connect(const SocketAddress& addr) {
+ if (type_ == SOCK_STREAM) {
+ if (!server_->Check(FP_TCP, GetLocalAddress(), addr)) {
+ LOG(LS_VERBOSE) << "FirewallSocket outbound TCP connection from "
+ << GetLocalAddress().ToSensitiveString() << " to "
+ << addr.ToSensitiveString() << " denied";
+ // TODO: Handle this asynchronously.
+ SetError(EHOSTUNREACH);
+ return SOCKET_ERROR;
+ }
+ }
+ return AsyncSocketAdapter::Connect(addr);
+ }
+ virtual int Send(const void* pv, size_t cb) {
+ return SendTo(pv, cb, GetRemoteAddress());
+ }
+ virtual int SendTo(const void* pv, size_t cb, const SocketAddress& addr) {
+ if (type_ == SOCK_DGRAM) {
+ if (!server_->Check(FP_UDP, GetLocalAddress(), addr)) {
+ LOG(LS_VERBOSE) << "FirewallSocket outbound UDP packet from "
+ << GetLocalAddress().ToSensitiveString() << " to "
+ << addr.ToSensitiveString() << " dropped";
+ return static_cast<int>(cb);
+ }
+ }
+ return AsyncSocketAdapter::SendTo(pv, cb, addr);
+ }
+ virtual int Recv(void* pv, size_t cb) {
+ SocketAddress addr;
+ return RecvFrom(pv, cb, &addr);
+ }
+ virtual int RecvFrom(void* pv, size_t cb, SocketAddress* paddr) {
+ if (type_ == SOCK_DGRAM) {
+ while (true) {
+ int res = AsyncSocketAdapter::RecvFrom(pv, cb, paddr);
+ if (res <= 0)
+ return res;
+ if (server_->Check(FP_UDP, *paddr, GetLocalAddress()))
+ return res;
+ LOG(LS_VERBOSE) << "FirewallSocket inbound UDP packet from "
+ << paddr->ToSensitiveString() << " to "
+ << GetLocalAddress().ToSensitiveString() << " dropped";
+ }
+ }
+ return AsyncSocketAdapter::RecvFrom(pv, cb, paddr);
+ }
+
+ virtual int Listen(int backlog) {
+ if (!server_->tcp_listen_enabled()) {
+ LOG(LS_VERBOSE) << "FirewallSocket listen attempt denied";
+ return -1;
+ }
+
+ return AsyncSocketAdapter::Listen(backlog);
+ }
+ virtual AsyncSocket* Accept(SocketAddress* paddr) {
+ SocketAddress addr;
+ while (AsyncSocket* sock = AsyncSocketAdapter::Accept(&addr)) {
+ if (server_->Check(FP_TCP, addr, GetLocalAddress())) {
+ if (paddr)
+ *paddr = addr;
+ return sock;
+ }
+ sock->Close();
+ delete sock;
+ LOG(LS_VERBOSE) << "FirewallSocket inbound TCP connection from "
+ << addr.ToSensitiveString() << " to "
+ << GetLocalAddress().ToSensitiveString() << " denied";
+ }
+ return 0;
+ }
+
+ private:
+ FirewallSocketServer* server_;
+ int type_;
+};
+
+FirewallSocketServer::FirewallSocketServer(SocketServer* server,
+ FirewallManager* manager,
+ bool should_delete_server)
+ : server_(server), manager_(manager),
+ should_delete_server_(should_delete_server),
+ udp_sockets_enabled_(true), tcp_sockets_enabled_(true),
+ tcp_listen_enabled_(true) {
+ if (manager_)
+ manager_->AddServer(this);
+}
+
+FirewallSocketServer::~FirewallSocketServer() {
+ if (manager_)
+ manager_->RemoveServer(this);
+
+ if (server_ && should_delete_server_) {
+ delete server_;
+ server_ = NULL;
+ }
+}
+
+void FirewallSocketServer::AddRule(bool allow, FirewallProtocol p,
+ FirewallDirection d,
+ const SocketAddress& addr) {
+ SocketAddress src, dst;
+ if (d == FD_IN) {
+ dst = addr;
+ } else {
+ src = addr;
+ }
+ AddRule(allow, p, src, dst);
+}
+
+
+void FirewallSocketServer::AddRule(bool allow, FirewallProtocol p,
+ const SocketAddress& src,
+ const SocketAddress& dst) {
+ Rule r;
+ r.allow = allow;
+ r.p = p;
+ r.src = src;
+ r.dst = dst;
+ CritScope scope(&crit_);
+ rules_.push_back(r);
+}
+
+void FirewallSocketServer::ClearRules() {
+ CritScope scope(&crit_);
+ rules_.clear();
+}
+
+bool FirewallSocketServer::Check(FirewallProtocol p,
+ const SocketAddress& src,
+ const SocketAddress& dst) {
+ CritScope scope(&crit_);
+ for (size_t i = 0; i < rules_.size(); ++i) {
+ const Rule& r = rules_[i];
+ if ((r.p != p) && (r.p != FP_ANY))
+ continue;
+ if ((r.src.ipaddr() != src.ipaddr()) && !r.src.IsNil())
+ continue;
+ if ((r.src.port() != src.port()) && (r.src.port() != 0))
+ continue;
+ if ((r.dst.ipaddr() != dst.ipaddr()) && !r.dst.IsNil())
+ continue;
+ if ((r.dst.port() != dst.port()) && (r.dst.port() != 0))
+ continue;
+ return r.allow;
+ }
+ return true;
+}
+
+Socket* FirewallSocketServer::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* FirewallSocketServer::CreateSocket(int family, int type) {
+ return WrapSocket(server_->CreateAsyncSocket(family, type), type);
+}
+
+AsyncSocket* FirewallSocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* FirewallSocketServer::CreateAsyncSocket(int family, int type) {
+ return WrapSocket(server_->CreateAsyncSocket(family, type), type);
+}
+
+AsyncSocket* FirewallSocketServer::WrapSocket(AsyncSocket* sock, int type) {
+ if (!sock ||
+ (type == SOCK_STREAM && !tcp_sockets_enabled_) ||
+ (type == SOCK_DGRAM && !udp_sockets_enabled_)) {
+ LOG(LS_VERBOSE) << "FirewallSocketServer socket creation denied";
+ delete sock;
+ return NULL;
+ }
+ return new FirewallSocket(this, sock, type);
+}
+
+FirewallManager::FirewallManager() {
+}
+
+FirewallManager::~FirewallManager() {
+ assert(servers_.empty());
+}
+
+void FirewallManager::AddServer(FirewallSocketServer* server) {
+ CritScope scope(&crit_);
+ servers_.push_back(server);
+}
+
+void FirewallManager::RemoveServer(FirewallSocketServer* server) {
+ CritScope scope(&crit_);
+ servers_.erase(std::remove(servers_.begin(), servers_.end(), server),
+ servers_.end());
+}
+
+void FirewallManager::AddRule(bool allow, FirewallProtocol p,
+ FirewallDirection d, const SocketAddress& addr) {
+ CritScope scope(&crit_);
+ for (std::vector<FirewallSocketServer*>::const_iterator it =
+ servers_.begin(); it != servers_.end(); ++it) {
+ (*it)->AddRule(allow, p, d, addr);
+ }
+}
+
+void FirewallManager::ClearRules() {
+ CritScope scope(&crit_);
+ for (std::vector<FirewallSocketServer*>::const_iterator it =
+ servers_.begin(); it != servers_.end(); ++it) {
+ (*it)->ClearRules();
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/firewallsocketserver.h b/chromium/third_party/webrtc/base/firewallsocketserver.h
new file mode 100644
index 00000000000..500b7397d10
--- /dev/null
+++ b/chromium/third_party/webrtc/base/firewallsocketserver.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_FIREWALLSOCKETSERVER_H_
+#define WEBRTC_BASE_FIREWALLSOCKETSERVER_H_
+
+#include <vector>
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+
+class FirewallManager;
+
+// This SocketServer shim simulates a rule-based firewall server.
+
+enum FirewallProtocol { FP_UDP, FP_TCP, FP_ANY };
+enum FirewallDirection { FD_IN, FD_OUT, FD_ANY };
+
+class FirewallSocketServer : public SocketServer {
+ public:
+ FirewallSocketServer(SocketServer * server,
+ FirewallManager * manager = NULL,
+ bool should_delete_server = false);
+ virtual ~FirewallSocketServer();
+
+ SocketServer* socketserver() const { return server_; }
+ void set_socketserver(SocketServer* server) {
+ if (server_ && should_delete_server_) {
+ delete server_;
+ server_ = NULL;
+ should_delete_server_ = false;
+ }
+ server_ = server;
+ }
+
+ // Settings to control whether CreateSocket or Socket::Listen succeed.
+ void set_udp_sockets_enabled(bool enabled) { udp_sockets_enabled_ = enabled; }
+ void set_tcp_sockets_enabled(bool enabled) { tcp_sockets_enabled_ = enabled; }
+ bool tcp_listen_enabled() const { return tcp_listen_enabled_; }
+ void set_tcp_listen_enabled(bool enabled) { tcp_listen_enabled_ = enabled; }
+
+ // Rules govern the behavior of Connect/Accept/Send/Recv attempts.
+ void AddRule(bool allow, FirewallProtocol p = FP_ANY,
+ FirewallDirection d = FD_ANY,
+ const SocketAddress& addr = SocketAddress());
+ void AddRule(bool allow, FirewallProtocol p,
+ const SocketAddress& src, const SocketAddress& dst);
+ void ClearRules();
+
+ bool Check(FirewallProtocol p,
+ const SocketAddress& src, const SocketAddress& dst);
+
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ virtual void SetMessageQueue(MessageQueue* queue) {
+ server_->SetMessageQueue(queue);
+ }
+ virtual bool Wait(int cms, bool process_io) {
+ return server_->Wait(cms, process_io);
+ }
+ virtual void WakeUp() {
+ return server_->WakeUp();
+ }
+
+ Socket * WrapSocket(Socket * sock, int type);
+ AsyncSocket * WrapSocket(AsyncSocket * sock, int type);
+
+ private:
+ SocketServer * server_;
+ FirewallManager * manager_;
+ CriticalSection crit_;
+ struct Rule {
+ bool allow;
+ FirewallProtocol p;
+ FirewallDirection d;
+ SocketAddress src;
+ SocketAddress dst;
+ };
+ std::vector<Rule> rules_;
+ bool should_delete_server_;
+ bool udp_sockets_enabled_;
+ bool tcp_sockets_enabled_;
+ bool tcp_listen_enabled_;
+};
+
+// FirewallManager allows you to manage firewalls in multiple threads together
+
+class FirewallManager {
+ public:
+ FirewallManager();
+ ~FirewallManager();
+
+ void AddServer(FirewallSocketServer * server);
+ void RemoveServer(FirewallSocketServer * server);
+
+ void AddRule(bool allow, FirewallProtocol p = FP_ANY,
+ FirewallDirection d = FD_ANY,
+ const SocketAddress& addr = SocketAddress());
+ void ClearRules();
+
+ private:
+ CriticalSection crit_;
+ std::vector<FirewallSocketServer *> servers_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_FIREWALLSOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/flags.cc b/chromium/third_party/webrtc/base/flags.cc
new file mode 100644
index 00000000000..fe7a334a920
--- /dev/null
+++ b/chromium/third_party/webrtc/base/flags.cc
@@ -0,0 +1,299 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#include <shellapi.h>
+#endif
+
+#include "webrtc/base/flags.h"
+
+namespace rtc {
+// -----------------------------------------------------------------------------
+// Implementation of Flag
+
+Flag::Flag(const char* file, const char* name, const char* comment,
+ Type type, void* variable, FlagValue default__)
+ : file_(file),
+ name_(name),
+ comment_(comment),
+ type_(type),
+ variable_(reinterpret_cast<FlagValue*>(variable)),
+ default_(default__) {
+ FlagList::Register(this);
+}
+
+
+void Flag::SetToDefault() {
+ // Note that we cannot simply do '*variable_ = default_;' since
+ // flag variables are not really of type FlagValue and thus may
+ // be smaller! The FlagValue union is simply 'overlayed' on top
+ // of a flag variable for convenient access. Since union members
+ // are guarantee to be aligned at the beginning, this works.
+ switch (type_) {
+ case Flag::BOOL:
+ variable_->b = default_.b;
+ return;
+ case Flag::INT:
+ variable_->i = default_.i;
+ return;
+ case Flag::FLOAT:
+ variable_->f = default_.f;
+ return;
+ case Flag::STRING:
+ variable_->s = default_.s;
+ return;
+ }
+ UNREACHABLE();
+}
+
+
+static const char* Type2String(Flag::Type type) {
+ switch (type) {
+ case Flag::BOOL: return "bool";
+ case Flag::INT: return "int";
+ case Flag::FLOAT: return "float";
+ case Flag::STRING: return "string";
+ }
+ UNREACHABLE();
+ return NULL;
+}
+
+
+static void PrintFlagValue(Flag::Type type, FlagValue* p) {
+ switch (type) {
+ case Flag::BOOL:
+ printf("%s", (p->b ? "true" : "false"));
+ return;
+ case Flag::INT:
+ printf("%d", p->i);
+ return;
+ case Flag::FLOAT:
+ printf("%f", p->f);
+ return;
+ case Flag::STRING:
+ printf("%s", p->s);
+ return;
+ }
+ UNREACHABLE();
+}
+
+
+void Flag::Print(bool print_current_value) {
+ printf(" --%s (%s) type: %s default: ", name_, comment_,
+ Type2String(type_));
+ PrintFlagValue(type_, &default_);
+ if (print_current_value) {
+ printf(" current value: ");
+ PrintFlagValue(type_, variable_);
+ }
+ printf("\n");
+}
+
+
+// -----------------------------------------------------------------------------
+// Implementation of FlagList
+
+Flag* FlagList::list_ = NULL;
+
+
+FlagList::FlagList() {
+ list_ = NULL;
+}
+
+void FlagList::Print(const char* file, bool print_current_value) {
+ // Since flag registration is likely by file (= C++ file),
+ // we don't need to sort by file and still get grouped output.
+ const char* current = NULL;
+ for (Flag* f = list_; f != NULL; f = f->next()) {
+ if (file == NULL || file == f->file()) {
+ if (current != f->file()) {
+ printf("Flags from %s:\n", f->file());
+ current = f->file();
+ }
+ f->Print(print_current_value);
+ }
+ }
+}
+
+
+Flag* FlagList::Lookup(const char* name) {
+ Flag* f = list_;
+ while (f != NULL && strcmp(name, f->name()) != 0)
+ f = f->next();
+ return f;
+}
+
+
+void FlagList::SplitArgument(const char* arg,
+ char* buffer, int buffer_size,
+ const char** name, const char** value,
+ bool* is_bool) {
+ *name = NULL;
+ *value = NULL;
+ *is_bool = false;
+
+ if (*arg == '-') {
+ // find the begin of the flag name
+ arg++; // remove 1st '-'
+ if (*arg == '-')
+ arg++; // remove 2nd '-'
+ if (arg[0] == 'n' && arg[1] == 'o') {
+ arg += 2; // remove "no"
+ *is_bool = true;
+ }
+ *name = arg;
+
+ // find the end of the flag name
+ while (*arg != '\0' && *arg != '=')
+ arg++;
+
+ // get the value if any
+ if (*arg == '=') {
+ // make a copy so we can NUL-terminate flag name
+ int n = static_cast<int>(arg - *name);
+ if (n >= buffer_size)
+ Fatal(__FILE__, __LINE__, "CHECK(%s) failed", "n < buffer_size");
+ memcpy(buffer, *name, n * sizeof(char));
+ buffer[n] = '\0';
+ *name = buffer;
+ // get the value
+ *value = arg + 1;
+ }
+ }
+}
+
+
+int FlagList::SetFlagsFromCommandLine(int* argc, const char** argv,
+ bool remove_flags) {
+ // parse arguments
+ for (int i = 1; i < *argc; /* see below */) {
+ int j = i; // j > 0
+ const char* arg = argv[i++];
+
+ // split arg into flag components
+ char buffer[1024];
+ const char* name;
+ const char* value;
+ bool is_bool;
+ SplitArgument(arg, buffer, sizeof buffer, &name, &value, &is_bool);
+
+ if (name != NULL) {
+ // lookup the flag
+ Flag* flag = Lookup(name);
+ if (flag == NULL) {
+ fprintf(stderr, "Error: unrecognized flag %s\n", arg);
+ return j;
+ }
+
+ // if we still need a flag value, use the next argument if available
+ if (flag->type() != Flag::BOOL && value == NULL) {
+ if (i < *argc) {
+ value = argv[i++];
+ } else {
+ fprintf(stderr, "Error: missing value for flag %s of type %s\n",
+ arg, Type2String(flag->type()));
+ return j;
+ }
+ }
+
+ // set the flag
+ char empty[] = { '\0' };
+ char* endp = empty;
+ switch (flag->type()) {
+ case Flag::BOOL:
+ *flag->bool_variable() = !is_bool;
+ break;
+ case Flag::INT:
+ *flag->int_variable() = strtol(value, &endp, 10);
+ break;
+ case Flag::FLOAT:
+ *flag->float_variable() = strtod(value, &endp);
+ break;
+ case Flag::STRING:
+ *flag->string_variable() = value;
+ break;
+ }
+
+ // handle errors
+ if ((flag->type() == Flag::BOOL && value != NULL) ||
+ (flag->type() != Flag::BOOL && is_bool) ||
+ *endp != '\0') {
+ fprintf(stderr, "Error: illegal value for flag %s of type %s\n",
+ arg, Type2String(flag->type()));
+ return j;
+ }
+
+ // remove the flag & value from the command
+ if (remove_flags)
+ while (j < i)
+ argv[j++] = NULL;
+ }
+ }
+
+ // shrink the argument list
+ if (remove_flags) {
+ int j = 1;
+ for (int i = 1; i < *argc; i++) {
+ if (argv[i] != NULL)
+ argv[j++] = argv[i];
+ }
+ *argc = j;
+ }
+
+ // parsed all flags successfully
+ return 0;
+}
+
+void FlagList::Register(Flag* flag) {
+ assert(flag != NULL && strlen(flag->name()) > 0);
+ if (Lookup(flag->name()) != NULL)
+ Fatal(flag->file(), 0, "flag %s declared twice", flag->name());
+ flag->next_ = list_;
+ list_ = flag;
+}
+
+#if defined(WEBRTC_WIN)
+WindowsCommandLineArguments::WindowsCommandLineArguments() {
+ // start by getting the command line.
+ LPTSTR command_line = ::GetCommandLine();
+ // now, convert it to a list of wide char strings.
+ LPWSTR *wide_argv = ::CommandLineToArgvW(command_line, &argc_);
+ // now allocate an array big enough to hold that many string pointers.
+ argv_ = new char*[argc_];
+
+ // iterate over the returned wide strings;
+ for(int i = 0; i < argc_; ++i) {
+ std::string s = rtc::ToUtf8(wide_argv[i], wcslen(wide_argv[i]));
+ char *buffer = new char[s.length() + 1];
+ rtc::strcpyn(buffer, s.length() + 1, s.c_str());
+
+ // make sure the argv array has the right string at this point.
+ argv_[i] = buffer;
+ }
+ LocalFree(wide_argv);
+}
+
+WindowsCommandLineArguments::~WindowsCommandLineArguments() {
+ // need to free each string in the array, and then the array.
+ for(int i = 0; i < argc_; i++) {
+ delete[] argv_[i];
+ }
+
+ delete[] argv_;
+}
+#endif // WEBRTC_WIN
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/flags.h b/chromium/third_party/webrtc/base/flags.h
new file mode 100644
index 00000000000..5cff1cc365f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/flags.h
@@ -0,0 +1,270 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// Originally comes from shared/commandlineflags/flags.h
+
+// Flags are defined and declared using DEFINE_xxx and DECLARE_xxx macros,
+// where xxx is the flag type. Flags are referred to via FLAG_yyy,
+// where yyy is the flag name. For intialization and iteration of flags,
+// see the FlagList class. For full programmatic access to any
+// flag, see the Flag class.
+//
+// The implementation only relies and basic C++ functionality
+// and needs no special library or STL support.
+
+#ifndef WEBRTC_BASE_FLAGS_H__
+#define WEBRTC_BASE_FLAGS_H__
+
+#include <assert.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+// Internal use only.
+union FlagValue {
+ // Note: Because in C++ non-bool values are silently converted into
+ // bool values ('bool b = "false";' results in b == true!), we pass
+ // and int argument to New_BOOL as this appears to be safer - sigh.
+ // In particular, it prevents the (not uncommon!) bug where a bool
+ // flag is defined via: DEFINE_bool(flag, "false", "some comment");.
+ static FlagValue New_BOOL(int b) {
+ FlagValue v;
+ v.b = (b != 0);
+ return v;
+ }
+
+ static FlagValue New_INT(int i) {
+ FlagValue v;
+ v.i = i;
+ return v;
+ }
+
+ static FlagValue New_FLOAT(float f) {
+ FlagValue v;
+ v.f = f;
+ return v;
+ }
+
+ static FlagValue New_STRING(const char* s) {
+ FlagValue v;
+ v.s = s;
+ return v;
+ }
+
+ bool b;
+ int i;
+ double f;
+ const char* s;
+};
+
+
+// Each flag can be accessed programmatically via a Flag object.
+class Flag {
+ public:
+ enum Type { BOOL, INT, FLOAT, STRING };
+
+ // Internal use only.
+ Flag(const char* file, const char* name, const char* comment,
+ Type type, void* variable, FlagValue default_);
+
+ // General flag information
+ const char* file() const { return file_; }
+ const char* name() const { return name_; }
+ const char* comment() const { return comment_; }
+
+ // Flag type
+ Type type() const { return type_; }
+
+ // Flag variables
+ bool* bool_variable() const {
+ assert(type_ == BOOL);
+ return &variable_->b;
+ }
+
+ int* int_variable() const {
+ assert(type_ == INT);
+ return &variable_->i;
+ }
+
+ double* float_variable() const {
+ assert(type_ == FLOAT);
+ return &variable_->f;
+ }
+
+ const char** string_variable() const {
+ assert(type_ == STRING);
+ return &variable_->s;
+ }
+
+ // Default values
+ bool bool_default() const {
+ assert(type_ == BOOL);
+ return default_.b;
+ }
+
+ int int_default() const {
+ assert(type_ == INT);
+ return default_.i;
+ }
+
+ double float_default() const {
+ assert(type_ == FLOAT);
+ return default_.f;
+ }
+
+ const char* string_default() const {
+ assert(type_ == STRING);
+ return default_.s;
+ }
+
+ // Resets a flag to its default value
+ void SetToDefault();
+
+ // Iteration support
+ Flag* next() const { return next_; }
+
+ // Prints flag information. The current flag value is only printed
+ // if print_current_value is set.
+ void Print(bool print_current_value);
+
+ private:
+ const char* file_;
+ const char* name_;
+ const char* comment_;
+
+ Type type_;
+ FlagValue* variable_;
+ FlagValue default_;
+
+ Flag* next_;
+
+ friend class FlagList; // accesses next_
+};
+
+
+// Internal use only.
+#define DEFINE_FLAG(type, c_type, name, default, comment) \
+ /* define and initialize the flag */ \
+ c_type FLAG_##name = (default); \
+ /* register the flag */ \
+ static rtc::Flag Flag_##name(__FILE__, #name, (comment), \
+ rtc::Flag::type, &FLAG_##name, \
+ rtc::FlagValue::New_##type(default))
+
+
+// Internal use only.
+#define DECLARE_FLAG(c_type, name) \
+ /* declare the external flag */ \
+ extern c_type FLAG_##name
+
+
+// Use the following macros to define a new flag:
+#define DEFINE_bool(name, default, comment) \
+ DEFINE_FLAG(BOOL, bool, name, default, comment)
+#define DEFINE_int(name, default, comment) \
+ DEFINE_FLAG(INT, int, name, default, comment)
+#define DEFINE_float(name, default, comment) \
+ DEFINE_FLAG(FLOAT, double, name, default, comment)
+#define DEFINE_string(name, default, comment) \
+ DEFINE_FLAG(STRING, const char*, name, default, comment)
+
+
+// Use the following macros to declare a flag defined elsewhere:
+#define DECLARE_bool(name) DECLARE_FLAG(bool, name)
+#define DECLARE_int(name) DECLARE_FLAG(int, name)
+#define DECLARE_float(name) DECLARE_FLAG(double, name)
+#define DECLARE_string(name) DECLARE_FLAG(const char*, name)
+
+
+// The global list of all flags.
+class FlagList {
+ public:
+ FlagList();
+
+ // The NULL-terminated list of all flags. Traverse with Flag::next().
+ static Flag* list() { return list_; }
+
+ // If file != NULL, prints information for all flags defined in file;
+ // otherwise prints information for all flags in all files. The current
+ // flag value is only printed if print_current_value is set.
+ static void Print(const char* file, bool print_current_value);
+
+ // Lookup a flag by name. Returns the matching flag or NULL.
+ static Flag* Lookup(const char* name);
+
+ // Helper function to parse flags: Takes an argument arg and splits it into
+ // a flag name and flag value (or NULL if they are missing). is_bool is set
+ // if the arg started with "-no" or "--no". The buffer may be used to NUL-
+ // terminate the name, it must be large enough to hold any possible name.
+ static void SplitArgument(const char* arg,
+ char* buffer, int buffer_size,
+ const char** name, const char** value,
+ bool* is_bool);
+
+ // Set the flag values by parsing the command line. If remove_flags
+ // is set, the flags and associated values are removed from (argc,
+ // argv). Returns 0 if no error occurred. Otherwise, returns the
+ // argv index > 0 for the argument where an error occurred. In that
+ // case, (argc, argv) will remain unchanged indepdendent of the
+ // remove_flags value, and no assumptions about flag settings should
+ // be made.
+ //
+ // The following syntax for flags is accepted (both '-' and '--' are ok):
+ //
+ // --flag (bool flags only)
+ // --noflag (bool flags only)
+ // --flag=value (non-bool flags only, no spaces around '=')
+ // --flag value (non-bool flags only)
+ static int SetFlagsFromCommandLine(int* argc,
+ const char** argv,
+ bool remove_flags);
+ static inline int SetFlagsFromCommandLine(int* argc,
+ char** argv,
+ bool remove_flags) {
+ return SetFlagsFromCommandLine(argc, const_cast<const char**>(argv),
+ remove_flags);
+ }
+
+ // Registers a new flag. Called during program initialization. Not
+ // thread-safe.
+ static void Register(Flag* flag);
+
+ private:
+ static Flag* list_;
+};
+
+#if defined(WEBRTC_WIN)
+// A helper class to translate Windows command line arguments into UTF8,
+// which then allows us to just pass them to the flags system.
+// This encapsulates all the work of getting the command line and translating
+// it to an array of 8-bit strings; all you have to do is create one of these,
+// and then call argc() and argv().
+class WindowsCommandLineArguments {
+ public:
+ WindowsCommandLineArguments();
+ ~WindowsCommandLineArguments();
+
+ int argc() { return argc_; }
+ char **argv() { return argv_; }
+ private:
+ int argc_;
+ char **argv_;
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(WindowsCommandLineArguments);
+};
+#endif // WEBRTC_WIN
+
+} // namespace rtc
+
+#endif // SHARED_COMMANDLINEFLAGS_FLAGS_H__
diff --git a/chromium/third_party/webrtc/base/gunit.h b/chromium/third_party/webrtc/base/gunit.h
new file mode 100644
index 00000000000..6d9c06fef0a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/gunit.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_GUNIT_H_
+#define WEBRTC_BASE_GUNIT_H_
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#if defined(WEBRTC_ANDROID) || defined(GTEST_RELATIVE_PATH)
+#include "gtest/gtest.h"
+#else
+#include "testing/base/public/gunit.h"
+#endif
+
+// Wait until "ex" is true, or "timeout" expires.
+#define WAIT(ex, timeout) \
+ for (uint32 start = rtc::Time(); \
+ !(ex) && rtc::Time() < start + timeout;) \
+ rtc::Thread::Current()->ProcessMessages(1);
+
+// This returns the result of the test in res, so that we don't re-evaluate
+// the expression in the XXXX_WAIT macros below, since that causes problems
+// when the expression is only true the first time you check it.
+#define WAIT_(ex, timeout, res) \
+ do { \
+ uint32 start = rtc::Time(); \
+ res = (ex); \
+ while (!res && rtc::Time() < start + timeout) { \
+ rtc::Thread::Current()->ProcessMessages(1); \
+ res = (ex); \
+ } \
+ } while (0);
+
+// The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout.
+#define EXPECT_TRUE_WAIT(ex, timeout) \
+ do { \
+ bool res; \
+ WAIT_(ex, timeout, res); \
+ if (!res) EXPECT_TRUE(ex); \
+ } while (0);
+
+#define EXPECT_EQ_WAIT(v1, v2, timeout) \
+ do { \
+ bool res; \
+ WAIT_(v1 == v2, timeout, res); \
+ if (!res) EXPECT_EQ(v1, v2); \
+ } while (0);
+
+#define ASSERT_TRUE_WAIT(ex, timeout) \
+ do { \
+ bool res; \
+ WAIT_(ex, timeout, res); \
+ if (!res) ASSERT_TRUE(ex); \
+ } while (0);
+
+#define ASSERT_EQ_WAIT(v1, v2, timeout) \
+ do { \
+ bool res; \
+ WAIT_(v1 == v2, timeout, res); \
+ if (!res) ASSERT_EQ(v1, v2); \
+ } while (0);
+
+// Version with a "soft" timeout and a margin. This logs if the timeout is
+// exceeded, but it only fails if the expression still isn't true after the
+// margin time passes.
+#define EXPECT_TRUE_WAIT_MARGIN(ex, timeout, margin) \
+ do { \
+ bool res; \
+ WAIT_(ex, timeout, res); \
+ if (res) { \
+ break; \
+ } \
+ LOG(LS_WARNING) << "Expression " << #ex << " still not true after " << \
+ timeout << "ms; waiting an additional " << margin << "ms"; \
+ WAIT_(ex, margin, res); \
+ if (!res) { \
+ EXPECT_TRUE(ex); \
+ } \
+ } while (0);
+
+#endif // WEBRTC_BASE_GUNIT_H_
diff --git a/chromium/third_party/webrtc/base/gunit_prod.h b/chromium/third_party/webrtc/base/gunit_prod.h
new file mode 100644
index 00000000000..dc39bbd0eb8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/gunit_prod.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_GUNIT_PROD_H_
+#define WEBRTC_BASE_GUNIT_PROD_H_
+
+#if defined(WEBRTC_ANDROID)
+// Android doesn't use gtest at all, so anything that relies on gtest should
+// check this define first.
+#define NO_GTEST
+#elif defined (GTEST_RELATIVE_PATH)
+#include "gtest/gtest_prod.h"
+#else
+#include "testing/base/gunit_prod.h"
+#endif
+
+#endif // WEBRTC_BASE_GUNIT_PROD_H_
diff --git a/chromium/third_party/webrtc/base/helpers.cc b/chromium/third_party/webrtc/base/helpers.cc
new file mode 100644
index 00000000000..8b14cdfd6ba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/helpers.cc
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/helpers.h"
+
+#include <limits>
+
+#if defined(FEATURE_ENABLE_SSL)
+#include "webrtc/base/sslconfig.h"
+#if defined(SSL_USE_OPENSSL)
+#include <openssl/rand.h>
+#elif defined(SSL_USE_NSS_RNG)
+#include "pk11func.h"
+#else
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <ntsecapi.h>
+#endif // WEBRTC_WIN
+#endif // else
+#endif // FEATURE_ENABLED_SSL
+
+#include "webrtc/base/base64.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/timeutils.h"
+
+// Protect against max macro inclusion.
+#undef max
+
+namespace rtc {
+
+// Base class for RNG implementations.
+class RandomGenerator {
+ public:
+ virtual ~RandomGenerator() {}
+ virtual bool Init(const void* seed, size_t len) = 0;
+ virtual bool Generate(void* buf, size_t len) = 0;
+};
+
+#if defined(SSL_USE_OPENSSL)
+// The OpenSSL RNG. Need to make sure it doesn't run out of entropy.
+class SecureRandomGenerator : public RandomGenerator {
+ public:
+ SecureRandomGenerator() : inited_(false) {
+ }
+ ~SecureRandomGenerator() {
+ }
+ virtual bool Init(const void* seed, size_t len) {
+ // By default, seed from the system state.
+ if (!inited_) {
+ if (RAND_poll() <= 0) {
+ return false;
+ }
+ inited_ = true;
+ }
+ // Allow app data to be mixed in, if provided.
+ if (seed) {
+ RAND_seed(seed, len);
+ }
+ return true;
+ }
+ virtual bool Generate(void* buf, size_t len) {
+ if (!inited_ && !Init(NULL, 0)) {
+ return false;
+ }
+ return (RAND_bytes(reinterpret_cast<unsigned char*>(buf), len) > 0);
+ }
+
+ private:
+ bool inited_;
+};
+
+#elif defined(SSL_USE_NSS_RNG)
+// The NSS RNG.
+class SecureRandomGenerator : public RandomGenerator {
+ public:
+ SecureRandomGenerator() {}
+ ~SecureRandomGenerator() {}
+ virtual bool Init(const void* seed, size_t len) {
+ return true;
+ }
+ virtual bool Generate(void* buf, size_t len) {
+ return (PK11_GenerateRandom(reinterpret_cast<unsigned char*>(buf),
+ static_cast<int>(len)) == SECSuccess);
+ }
+};
+
+#else
+#if defined(WEBRTC_WIN)
+class SecureRandomGenerator : public RandomGenerator {
+ public:
+ SecureRandomGenerator() : advapi32_(NULL), rtl_gen_random_(NULL) {}
+ ~SecureRandomGenerator() {
+ FreeLibrary(advapi32_);
+ }
+
+ virtual bool Init(const void* seed, size_t seed_len) {
+ // We don't do any additional seeding on Win32, we just use the CryptoAPI
+ // RNG (which is exposed as a hidden function off of ADVAPI32 so that we
+ // don't need to drag in all of CryptoAPI)
+ if (rtl_gen_random_) {
+ return true;
+ }
+
+ advapi32_ = LoadLibrary(L"advapi32.dll");
+ if (!advapi32_) {
+ return false;
+ }
+
+ rtl_gen_random_ = reinterpret_cast<RtlGenRandomProc>(
+ GetProcAddress(advapi32_, "SystemFunction036"));
+ if (!rtl_gen_random_) {
+ FreeLibrary(advapi32_);
+ return false;
+ }
+
+ return true;
+ }
+ virtual bool Generate(void* buf, size_t len) {
+ if (!rtl_gen_random_ && !Init(NULL, 0)) {
+ return false;
+ }
+ return (rtl_gen_random_(buf, static_cast<int>(len)) != FALSE);
+ }
+
+ private:
+ typedef BOOL (WINAPI *RtlGenRandomProc)(PVOID, ULONG);
+ HINSTANCE advapi32_;
+ RtlGenRandomProc rtl_gen_random_;
+};
+
+#elif !defined(FEATURE_ENABLE_SSL)
+
+// No SSL implementation -- use rand()
+class SecureRandomGenerator : public RandomGenerator {
+ public:
+ virtual bool Init(const void* seed, size_t len) {
+ if (len >= 4) {
+ srand(*reinterpret_cast<const int*>(seed));
+ } else {
+ srand(*reinterpret_cast<const char*>(seed));
+ }
+ return true;
+ }
+ virtual bool Generate(void* buf, size_t len) {
+ char* bytes = reinterpret_cast<char*>(buf);
+ for (size_t i = 0; i < len; ++i) {
+ bytes[i] = static_cast<char>(rand());
+ }
+ return true;
+ }
+};
+
+#else
+
+#error No SSL implementation has been selected!
+
+#endif // WEBRTC_WIN
+#endif
+
+// A test random generator, for predictable output.
+class TestRandomGenerator : public RandomGenerator {
+ public:
+ TestRandomGenerator() : seed_(7) {
+ }
+ ~TestRandomGenerator() {
+ }
+ virtual bool Init(const void* seed, size_t len) {
+ return true;
+ }
+ virtual bool Generate(void* buf, size_t len) {
+ for (size_t i = 0; i < len; ++i) {
+ static_cast<uint8*>(buf)[i] = static_cast<uint8>(GetRandom());
+ }
+ return true;
+ }
+
+ private:
+ int GetRandom() {
+ return ((seed_ = seed_ * 214013L + 2531011L) >> 16) & 0x7fff;
+ }
+ int seed_;
+};
+
+// TODO: Use Base64::Base64Table instead.
+static const char BASE64[64] = {
+ 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
+ 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
+ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
+ 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'
+};
+
+namespace {
+
+// This round about way of creating a global RNG is to safe-guard against
+// indeterminant static initialization order.
+scoped_ptr<RandomGenerator>& GetGlobalRng() {
+ LIBJINGLE_DEFINE_STATIC_LOCAL(scoped_ptr<RandomGenerator>, global_rng,
+ (new SecureRandomGenerator()));
+ return global_rng;
+}
+
+RandomGenerator& Rng() {
+ return *GetGlobalRng();
+}
+
+} // namespace
+
+void SetRandomTestMode(bool test) {
+ if (!test) {
+ GetGlobalRng().reset(new SecureRandomGenerator());
+ } else {
+ GetGlobalRng().reset(new TestRandomGenerator());
+ }
+}
+
+bool InitRandom(int seed) {
+ return InitRandom(reinterpret_cast<const char*>(&seed), sizeof(seed));
+}
+
+bool InitRandom(const char* seed, size_t len) {
+ if (!Rng().Init(seed, len)) {
+ LOG(LS_ERROR) << "Failed to init random generator!";
+ return false;
+ }
+ return true;
+}
+
+std::string CreateRandomString(size_t len) {
+ std::string str;
+ CreateRandomString(len, &str);
+ return str;
+}
+
+bool CreateRandomString(size_t len,
+ const char* table, int table_size,
+ std::string* str) {
+ str->clear();
+ scoped_ptr<uint8[]> bytes(new uint8[len]);
+ if (!Rng().Generate(bytes.get(), len)) {
+ LOG(LS_ERROR) << "Failed to generate random string!";
+ return false;
+ }
+ str->reserve(len);
+ for (size_t i = 0; i < len; ++i) {
+ str->push_back(table[bytes[i] % table_size]);
+ }
+ return true;
+}
+
+bool CreateRandomString(size_t len, std::string* str) {
+ return CreateRandomString(len, BASE64, 64, str);
+}
+
+bool CreateRandomString(size_t len, const std::string& table,
+ std::string* str) {
+ return CreateRandomString(len, table.c_str(),
+ static_cast<int>(table.size()), str);
+}
+
+uint32 CreateRandomId() {
+ uint32 id;
+ if (!Rng().Generate(&id, sizeof(id))) {
+ LOG(LS_ERROR) << "Failed to generate random id!";
+ }
+ return id;
+}
+
+uint64 CreateRandomId64() {
+ return static_cast<uint64>(CreateRandomId()) << 32 | CreateRandomId();
+}
+
+uint32 CreateRandomNonZeroId() {
+ uint32 id;
+ do {
+ id = CreateRandomId();
+ } while (id == 0);
+ return id;
+}
+
+double CreateRandomDouble() {
+ return CreateRandomId() / (std::numeric_limits<uint32>::max() +
+ std::numeric_limits<double>::epsilon());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/helpers.h b/chromium/third_party/webrtc/base/helpers.h
new file mode 100644
index 00000000000..e46d12a3305
--- /dev/null
+++ b/chromium/third_party/webrtc/base/helpers.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_HELPERS_H_
+#define WEBRTC_BASE_HELPERS_H_
+
+#include <string>
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// For testing, we can return predictable data.
+void SetRandomTestMode(bool test);
+
+// Initializes the RNG, and seeds it with the specified entropy.
+bool InitRandom(int seed);
+bool InitRandom(const char* seed, size_t len);
+
+// Generates a (cryptographically) random string of the given length.
+// We generate base64 values so that they will be printable.
+// WARNING: could silently fail. Use the version below instead.
+std::string CreateRandomString(size_t length);
+
+// Generates a (cryptographically) random string of the given length.
+// We generate base64 values so that they will be printable.
+// Return false if the random number generator failed.
+bool CreateRandomString(size_t length, std::string* str);
+
+// Generates a (cryptographically) random string of the given length,
+// with characters from the given table. Return false if the random
+// number generator failed.
+bool CreateRandomString(size_t length, const std::string& table,
+ std::string* str);
+
+// Generates a random id.
+uint32 CreateRandomId();
+
+// Generates a 64 bit random id.
+uint64 CreateRandomId64();
+
+// Generates a random id > 0.
+uint32 CreateRandomNonZeroId();
+
+// Generates a random double between 0.0 (inclusive) and 1.0 (exclusive).
+double CreateRandomDouble();
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HELPERS_H_
diff --git a/chromium/third_party/webrtc/base/helpers_unittest.cc b/chromium/third_party/webrtc/base/helpers_unittest.cc
new file mode 100644
index 00000000000..7c20540c521
--- /dev/null
+++ b/chromium/third_party/webrtc/base/helpers_unittest.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace rtc {
+
+class RandomTest : public testing::Test {
+ public:
+ static void SetUpTestCase() {
+ rtc::InitializeSSL();
+ }
+
+ static void TearDownTestCase() {
+ rtc::CleanupSSL();
+ }
+};
+
+TEST_F(RandomTest, TestCreateRandomId) {
+ CreateRandomId();
+}
+
+TEST_F(RandomTest, TestCreateRandomDouble) {
+ for (int i = 0; i < 100; ++i) {
+ double r = CreateRandomDouble();
+ EXPECT_GE(r, 0.0);
+ EXPECT_LT(r, 1.0);
+ }
+}
+
+TEST_F(RandomTest, TestCreateNonZeroRandomId) {
+ EXPECT_NE(0U, CreateRandomNonZeroId());
+}
+
+TEST_F(RandomTest, TestCreateRandomString) {
+ std::string random = CreateRandomString(256);
+ EXPECT_EQ(256U, random.size());
+ std::string random2;
+ EXPECT_TRUE(CreateRandomString(256, &random2));
+ EXPECT_NE(random, random2);
+ EXPECT_EQ(256U, random2.size());
+}
+
+TEST_F(RandomTest, TestCreateRandomForTest) {
+ // Make sure we get the output we expect.
+ SetRandomTestMode(true);
+ EXPECT_EQ(2154761789U, CreateRandomId());
+ EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
+
+ // Reset and make sure we get the same output.
+ SetRandomTestMode(true);
+ EXPECT_EQ(2154761789U, CreateRandomId());
+ EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
+
+ // Test different character sets.
+ SetRandomTestMode(true);
+ std::string str;
+ EXPECT_TRUE(CreateRandomString(16, "a", &str));
+ EXPECT_EQ("aaaaaaaaaaaaaaaa", str);
+ EXPECT_TRUE(CreateRandomString(16, "abc", &str));
+ EXPECT_EQ("acbccaaaabbaacbb", str);
+
+ // Turn off test mode for other tests.
+ SetRandomTestMode(false);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpbase.cc b/chromium/third_party/webrtc/base/httpbase.cc
new file mode 100644
index 00000000000..5de2b79d775
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpbase.cc
@@ -0,0 +1,877 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#else // !WEBRTC_WIN
+#define SEC_E_CERT_EXPIRED (-2146893016)
+#endif // !WEBRTC_WIN
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpbase.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/socket.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// Helpers
+//////////////////////////////////////////////////////////////////////
+
+bool MatchHeader(const char* str, size_t len, HttpHeader header) {
+ const char* const header_str = ToString(header);
+ const size_t header_len = strlen(header_str);
+ return (len == header_len) && (_strnicmp(str, header_str, header_len) == 0);
+}
+
+enum {
+ MSG_READ
+};
+
+//////////////////////////////////////////////////////////////////////
+// HttpParser
+//////////////////////////////////////////////////////////////////////
+
+HttpParser::HttpParser() {
+ reset();
+}
+
+HttpParser::~HttpParser() {
+}
+
+void
+HttpParser::reset() {
+ state_ = ST_LEADER;
+ chunked_ = false;
+ data_size_ = SIZE_UNKNOWN;
+}
+
+HttpParser::ProcessResult
+HttpParser::Process(const char* buffer, size_t len, size_t* processed,
+ HttpError* error) {
+ *processed = 0;
+ *error = HE_NONE;
+
+ if (state_ >= ST_COMPLETE) {
+ ASSERT(false);
+ return PR_COMPLETE;
+ }
+
+ while (true) {
+ if (state_ < ST_DATA) {
+ size_t pos = *processed;
+ while ((pos < len) && (buffer[pos] != '\n')) {
+ pos += 1;
+ }
+ if (pos >= len) {
+ break; // don't have a full header
+ }
+ const char* line = buffer + *processed;
+ size_t len = (pos - *processed);
+ *processed = pos + 1;
+ while ((len > 0) && isspace(static_cast<unsigned char>(line[len-1]))) {
+ len -= 1;
+ }
+ ProcessResult result = ProcessLine(line, len, error);
+ LOG(LS_VERBOSE) << "Processed line, result=" << result;
+
+ if (PR_CONTINUE != result) {
+ return result;
+ }
+ } else if (data_size_ == 0) {
+ if (chunked_) {
+ state_ = ST_CHUNKTERM;
+ } else {
+ return PR_COMPLETE;
+ }
+ } else {
+ size_t available = len - *processed;
+ if (available <= 0) {
+ break; // no more data
+ }
+ if ((data_size_ != SIZE_UNKNOWN) && (available > data_size_)) {
+ available = data_size_;
+ }
+ size_t read = 0;
+ ProcessResult result = ProcessData(buffer + *processed, available, read,
+ error);
+ LOG(LS_VERBOSE) << "Processed data, result: " << result << " read: "
+ << read << " err: " << error;
+
+ if (PR_CONTINUE != result) {
+ return result;
+ }
+ *processed += read;
+ if (data_size_ != SIZE_UNKNOWN) {
+ data_size_ -= read;
+ }
+ }
+ }
+
+ return PR_CONTINUE;
+}
+
+HttpParser::ProcessResult
+HttpParser::ProcessLine(const char* line, size_t len, HttpError* error) {
+ LOG_F(LS_VERBOSE) << " state: " << state_ << " line: "
+ << std::string(line, len) << " len: " << len << " err: "
+ << error;
+
+ switch (state_) {
+ case ST_LEADER:
+ state_ = ST_HEADERS;
+ return ProcessLeader(line, len, error);
+
+ case ST_HEADERS:
+ if (len > 0) {
+ const char* value = strchrn(line, len, ':');
+ if (!value) {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ }
+ size_t nlen = (value - line);
+ const char* eol = line + len;
+ do {
+ value += 1;
+ } while ((value < eol) && isspace(static_cast<unsigned char>(*value)));
+ size_t vlen = eol - value;
+ if (MatchHeader(line, nlen, HH_CONTENT_LENGTH)) {
+ // sscanf isn't safe with strings that aren't null-terminated, and there
+ // is no guarantee that |value| is.
+ // Create a local copy that is null-terminated.
+ std::string value_str(value, vlen);
+ unsigned int temp_size;
+ if (sscanf(value_str.c_str(), "%u", &temp_size) != 1) {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ }
+ data_size_ = static_cast<size_t>(temp_size);
+ } else if (MatchHeader(line, nlen, HH_TRANSFER_ENCODING)) {
+ if ((vlen == 7) && (_strnicmp(value, "chunked", 7) == 0)) {
+ chunked_ = true;
+ } else if ((vlen == 8) && (_strnicmp(value, "identity", 8) == 0)) {
+ chunked_ = false;
+ } else {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ }
+ }
+ return ProcessHeader(line, nlen, value, vlen, error);
+ } else {
+ state_ = chunked_ ? ST_CHUNKSIZE : ST_DATA;
+ return ProcessHeaderComplete(chunked_, data_size_, error);
+ }
+ break;
+
+ case ST_CHUNKSIZE:
+ if (len > 0) {
+ char* ptr = NULL;
+ data_size_ = strtoul(line, &ptr, 16);
+ if (ptr != line + len) {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ }
+ state_ = (data_size_ == 0) ? ST_TRAILERS : ST_DATA;
+ } else {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ }
+ break;
+
+ case ST_CHUNKTERM:
+ if (len > 0) {
+ *error = HE_PROTOCOL;
+ return PR_COMPLETE;
+ } else {
+ state_ = chunked_ ? ST_CHUNKSIZE : ST_DATA;
+ }
+ break;
+
+ case ST_TRAILERS:
+ if (len == 0) {
+ return PR_COMPLETE;
+ }
+ // *error = onHttpRecvTrailer();
+ break;
+
+ default:
+ ASSERT(false);
+ break;
+ }
+
+ return PR_CONTINUE;
+}
+
+bool
+HttpParser::is_valid_end_of_input() const {
+ return (state_ == ST_DATA) && (data_size_ == SIZE_UNKNOWN);
+}
+
+void
+HttpParser::complete(HttpError error) {
+ if (state_ < ST_COMPLETE) {
+ state_ = ST_COMPLETE;
+ OnComplete(error);
+ }
+}
+
+//////////////////////////////////////////////////////////////////////
+// HttpBase::DocumentStream
+//////////////////////////////////////////////////////////////////////
+
+class BlockingMemoryStream : public ExternalMemoryStream {
+public:
+ BlockingMemoryStream(char* buffer, size_t size)
+ : ExternalMemoryStream(buffer, size) { }
+
+ virtual StreamResult DoReserve(size_t size, int* error) {
+ return (buffer_length_ >= size) ? SR_SUCCESS : SR_BLOCK;
+ }
+};
+
+class HttpBase::DocumentStream : public StreamInterface {
+public:
+ DocumentStream(HttpBase* base) : base_(base), error_(HE_DEFAULT) { }
+
+ virtual StreamState GetState() const {
+ if (NULL == base_)
+ return SS_CLOSED;
+ if (HM_RECV == base_->mode_)
+ return SS_OPEN;
+ return SS_OPENING;
+ }
+
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (!base_) {
+ if (error) *error = error_;
+ return (HE_NONE == error_) ? SR_EOS : SR_ERROR;
+ }
+
+ if (HM_RECV != base_->mode_) {
+ return SR_BLOCK;
+ }
+
+ // DoReceiveLoop writes http document data to the StreamInterface* document
+ // member of HttpData. In this case, we want this data to be written
+ // directly to our buffer. To accomplish this, we wrap our buffer with a
+ // StreamInterface, and replace the existing document with our wrapper.
+ // When the method returns, we restore the old document. Ideally, we would
+ // pass our StreamInterface* to DoReceiveLoop, but due to the callbacks
+ // of HttpParser, we would still need to store the pointer temporarily.
+ scoped_ptr<StreamInterface>
+ stream(new BlockingMemoryStream(reinterpret_cast<char*>(buffer),
+ buffer_len));
+
+ // Replace the existing document with our wrapped buffer.
+ base_->data_->document.swap(stream);
+
+ // Pump the I/O loop. DoReceiveLoop is guaranteed not to attempt to
+ // complete the I/O process, which means that our wrapper is not in danger
+ // of being deleted. To ensure this, DoReceiveLoop returns true when it
+ // wants complete to be called. We make sure to uninstall our wrapper
+ // before calling complete().
+ HttpError http_error;
+ bool complete = base_->DoReceiveLoop(&http_error);
+
+ // Reinstall the original output document.
+ base_->data_->document.swap(stream);
+
+ // If we reach the end of the receive stream, we disconnect our stream
+ // adapter from the HttpBase, and further calls to read will either return
+ // EOS or ERROR, appropriately. Finally, we call complete().
+ StreamResult result = SR_BLOCK;
+ if (complete) {
+ HttpBase* base = Disconnect(http_error);
+ if (error) *error = error_;
+ result = (HE_NONE == error_) ? SR_EOS : SR_ERROR;
+ base->complete(http_error);
+ }
+
+ // Even if we are complete, if some data was read we must return SUCCESS.
+ // Future Reads will return EOS or ERROR based on the error_ variable.
+ size_t position;
+ stream->GetPosition(&position);
+ if (position > 0) {
+ if (read) *read = position;
+ result = SR_SUCCESS;
+ }
+ return result;
+ }
+
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (error) *error = -1;
+ return SR_ERROR;
+ }
+
+ virtual void Close() {
+ if (base_) {
+ HttpBase* base = Disconnect(HE_NONE);
+ if (HM_RECV == base->mode_ && base->http_stream_) {
+ // Read I/O could have been stalled on the user of this DocumentStream,
+ // so restart the I/O process now that we've removed ourselves.
+ base->http_stream_->PostEvent(SE_READ, 0);
+ }
+ }
+ }
+
+ virtual bool GetAvailable(size_t* size) const {
+ if (!base_ || HM_RECV != base_->mode_)
+ return false;
+ size_t data_size = base_->GetDataRemaining();
+ if (SIZE_UNKNOWN == data_size)
+ return false;
+ if (size)
+ *size = data_size;
+ return true;
+ }
+
+ HttpBase* Disconnect(HttpError error) {
+ ASSERT(NULL != base_);
+ ASSERT(NULL != base_->doc_stream_);
+ HttpBase* base = base_;
+ base_->doc_stream_ = NULL;
+ base_ = NULL;
+ error_ = error;
+ return base;
+ }
+
+private:
+ HttpBase* base_;
+ HttpError error_;
+};
+
+//////////////////////////////////////////////////////////////////////
+// HttpBase
+//////////////////////////////////////////////////////////////////////
+
+HttpBase::HttpBase() : mode_(HM_NONE), data_(NULL), notify_(NULL),
+ http_stream_(NULL), doc_stream_(NULL) {
+}
+
+HttpBase::~HttpBase() {
+ ASSERT(HM_NONE == mode_);
+}
+
+bool
+HttpBase::isConnected() const {
+ return (http_stream_ != NULL) && (http_stream_->GetState() == SS_OPEN);
+}
+
+bool
+HttpBase::attach(StreamInterface* stream) {
+ if ((mode_ != HM_NONE) || (http_stream_ != NULL) || (stream == NULL)) {
+ ASSERT(false);
+ return false;
+ }
+ http_stream_ = stream;
+ http_stream_->SignalEvent.connect(this, &HttpBase::OnHttpStreamEvent);
+ mode_ = (http_stream_->GetState() == SS_OPENING) ? HM_CONNECT : HM_NONE;
+ return true;
+}
+
+StreamInterface*
+HttpBase::detach() {
+ ASSERT(HM_NONE == mode_);
+ if (mode_ != HM_NONE) {
+ return NULL;
+ }
+ StreamInterface* stream = http_stream_;
+ http_stream_ = NULL;
+ if (stream) {
+ stream->SignalEvent.disconnect(this);
+ }
+ return stream;
+}
+
+void
+HttpBase::send(HttpData* data) {
+ ASSERT(HM_NONE == mode_);
+ if (mode_ != HM_NONE) {
+ return;
+ } else if (!isConnected()) {
+ OnHttpStreamEvent(http_stream_, SE_CLOSE, HE_DISCONNECTED);
+ return;
+ }
+
+ mode_ = HM_SEND;
+ data_ = data;
+ len_ = 0;
+ ignore_data_ = chunk_data_ = false;
+
+ if (data_->document) {
+ data_->document->SignalEvent.connect(this, &HttpBase::OnDocumentEvent);
+ }
+
+ std::string encoding;
+ if (data_->hasHeader(HH_TRANSFER_ENCODING, &encoding)
+ && (encoding == "chunked")) {
+ chunk_data_ = true;
+ }
+
+ len_ = data_->formatLeader(buffer_, sizeof(buffer_));
+ len_ += strcpyn(buffer_ + len_, sizeof(buffer_) - len_, "\r\n");
+
+ header_ = data_->begin();
+ if (header_ == data_->end()) {
+ // We must call this at least once, in the case where there are no headers.
+ queue_headers();
+ }
+
+ flush_data();
+}
+
+void
+HttpBase::recv(HttpData* data) {
+ ASSERT(HM_NONE == mode_);
+ if (mode_ != HM_NONE) {
+ return;
+ } else if (!isConnected()) {
+ OnHttpStreamEvent(http_stream_, SE_CLOSE, HE_DISCONNECTED);
+ return;
+ }
+
+ mode_ = HM_RECV;
+ data_ = data;
+ len_ = 0;
+ ignore_data_ = chunk_data_ = false;
+
+ reset();
+ if (doc_stream_) {
+ doc_stream_->SignalEvent(doc_stream_, SE_OPEN | SE_READ, 0);
+ } else {
+ read_and_process_data();
+ }
+}
+
+void
+HttpBase::abort(HttpError err) {
+ if (mode_ != HM_NONE) {
+ if (http_stream_ != NULL) {
+ http_stream_->Close();
+ }
+ do_complete(err);
+ }
+}
+
+StreamInterface* HttpBase::GetDocumentStream() {
+ if (doc_stream_)
+ return NULL;
+ doc_stream_ = new DocumentStream(this);
+ return doc_stream_;
+}
+
+HttpError HttpBase::HandleStreamClose(int error) {
+ if (http_stream_ != NULL) {
+ http_stream_->Close();
+ }
+ if (error == 0) {
+ if ((mode_ == HM_RECV) && is_valid_end_of_input()) {
+ return HE_NONE;
+ } else {
+ return HE_DISCONNECTED;
+ }
+ } else if (error == SOCKET_EACCES) {
+ return HE_AUTH;
+ } else if (error == SEC_E_CERT_EXPIRED) {
+ return HE_CERTIFICATE_EXPIRED;
+ }
+ LOG_F(LS_ERROR) << "(" << error << ")";
+ return (HM_CONNECT == mode_) ? HE_CONNECT_FAILED : HE_SOCKET_ERROR;
+}
+
+bool HttpBase::DoReceiveLoop(HttpError* error) {
+ ASSERT(HM_RECV == mode_);
+ ASSERT(NULL != error);
+
+ // Do to the latency between receiving read notifications from
+ // pseudotcpchannel, we rely on repeated calls to read in order to acheive
+ // ideal throughput. The number of reads is limited to prevent starving
+ // the caller.
+
+ size_t loop_count = 0;
+ const size_t kMaxReadCount = 20;
+ bool process_requires_more_data = false;
+ do {
+ // The most frequent use of this function is response to new data available
+ // on http_stream_. Therefore, we optimize by attempting to read from the
+ // network first (as opposed to processing existing data first).
+
+ if (len_ < sizeof(buffer_)) {
+ // Attempt to buffer more data.
+ size_t read;
+ int read_error;
+ StreamResult read_result = http_stream_->Read(buffer_ + len_,
+ sizeof(buffer_) - len_,
+ &read, &read_error);
+ switch (read_result) {
+ case SR_SUCCESS:
+ ASSERT(len_ + read <= sizeof(buffer_));
+ len_ += read;
+ break;
+ case SR_BLOCK:
+ if (process_requires_more_data) {
+ // We're can't make progress until more data is available.
+ return false;
+ }
+ // Attempt to process the data already in our buffer.
+ break;
+ case SR_EOS:
+ // Clean close, with no error. Fall through to HandleStreamClose.
+ read_error = 0;
+ case SR_ERROR:
+ *error = HandleStreamClose(read_error);
+ return true;
+ }
+ } else if (process_requires_more_data) {
+ // We have too much unprocessed data in our buffer. This should only
+ // occur when a single HTTP header is longer than the buffer size (32K).
+ // Anything longer than that is almost certainly an error.
+ *error = HE_OVERFLOW;
+ return true;
+ }
+
+ // Process data in our buffer. Process is not guaranteed to process all
+ // the buffered data. In particular, it will wait until a complete
+ // protocol element (such as http header, or chunk size) is available,
+ // before processing it in its entirety. Also, it is valid and sometimes
+ // necessary to call Process with an empty buffer, since the state machine
+ // may have interrupted state transitions to complete.
+ size_t processed;
+ ProcessResult process_result = Process(buffer_, len_, &processed,
+ error);
+ ASSERT(processed <= len_);
+ len_ -= processed;
+ memmove(buffer_, buffer_ + processed, len_);
+ switch (process_result) {
+ case PR_CONTINUE:
+ // We need more data to make progress.
+ process_requires_more_data = true;
+ break;
+ case PR_BLOCK:
+ // We're stalled on writing the processed data.
+ return false;
+ case PR_COMPLETE:
+ // *error already contains the correct code.
+ return true;
+ }
+ } while (++loop_count <= kMaxReadCount);
+
+ LOG_F(LS_WARNING) << "danger of starvation";
+ return false;
+}
+
+void
+HttpBase::read_and_process_data() {
+ HttpError error;
+ if (DoReceiveLoop(&error)) {
+ complete(error);
+ }
+}
+
+void
+HttpBase::flush_data() {
+ ASSERT(HM_SEND == mode_);
+
+ // When send_required is true, no more buffering can occur without a network
+ // write.
+ bool send_required = (len_ >= sizeof(buffer_));
+
+ while (true) {
+ ASSERT(len_ <= sizeof(buffer_));
+
+ // HTTP is inherently sensitive to round trip latency, since a frequent use
+ // case is for small requests and responses to be sent back and forth, and
+ // the lack of pipelining forces a single request to take a minimum of the
+ // round trip time. As a result, it is to our benefit to pack as much data
+ // into each packet as possible. Thus, we defer network writes until we've
+ // buffered as much data as possible.
+
+ if (!send_required && (header_ != data_->end())) {
+ // First, attempt to queue more header data.
+ send_required = queue_headers();
+ }
+
+ if (!send_required && data_->document) {
+ // Next, attempt to queue document data.
+
+ const size_t kChunkDigits = 8;
+ size_t offset, reserve;
+ if (chunk_data_) {
+ // Reserve characters at the start for X-byte hex value and \r\n
+ offset = len_ + kChunkDigits + 2;
+ // ... and 2 characters at the end for \r\n
+ reserve = offset + 2;
+ } else {
+ offset = len_;
+ reserve = offset;
+ }
+
+ if (reserve >= sizeof(buffer_)) {
+ send_required = true;
+ } else {
+ size_t read;
+ int error;
+ StreamResult result = data_->document->Read(buffer_ + offset,
+ sizeof(buffer_) - reserve,
+ &read, &error);
+ if (result == SR_SUCCESS) {
+ ASSERT(reserve + read <= sizeof(buffer_));
+ if (chunk_data_) {
+ // Prepend the chunk length in hex.
+ // Note: sprintfn appends a null terminator, which is why we can't
+ // combine it with the line terminator.
+ sprintfn(buffer_ + len_, kChunkDigits + 1, "%.*x",
+ kChunkDigits, read);
+ // Add line terminator to the chunk length.
+ memcpy(buffer_ + len_ + kChunkDigits, "\r\n", 2);
+ // Add line terminator to the end of the chunk.
+ memcpy(buffer_ + offset + read, "\r\n", 2);
+ }
+ len_ = reserve + read;
+ } else if (result == SR_BLOCK) {
+ // Nothing to do but flush data to the network.
+ send_required = true;
+ } else if (result == SR_EOS) {
+ if (chunk_data_) {
+ // Append the empty chunk and empty trailers, then turn off
+ // chunking.
+ ASSERT(len_ + 5 <= sizeof(buffer_));
+ memcpy(buffer_ + len_, "0\r\n\r\n", 5);
+ len_ += 5;
+ chunk_data_ = false;
+ } else if (0 == len_) {
+ // No more data to read, and no more data to write.
+ do_complete();
+ return;
+ }
+ // Although we are done reading data, there is still data which needs
+ // to be flushed to the network.
+ send_required = true;
+ } else {
+ LOG_F(LS_ERROR) << "Read error: " << error;
+ do_complete(HE_STREAM);
+ return;
+ }
+ }
+ }
+
+ if (0 == len_) {
+ // No data currently available to send.
+ if (!data_->document) {
+ // If there is no source document, that means we're done.
+ do_complete();
+ }
+ return;
+ }
+
+ size_t written;
+ int error;
+ StreamResult result = http_stream_->Write(buffer_, len_, &written, &error);
+ if (result == SR_SUCCESS) {
+ ASSERT(written <= len_);
+ len_ -= written;
+ memmove(buffer_, buffer_ + written, len_);
+ send_required = false;
+ } else if (result == SR_BLOCK) {
+ if (send_required) {
+ // Nothing more we can do until network is writeable.
+ return;
+ }
+ } else {
+ ASSERT(result == SR_ERROR);
+ LOG_F(LS_ERROR) << "error";
+ OnHttpStreamEvent(http_stream_, SE_CLOSE, error);
+ return;
+ }
+ }
+
+ ASSERT(false);
+}
+
+bool
+HttpBase::queue_headers() {
+ ASSERT(HM_SEND == mode_);
+ while (header_ != data_->end()) {
+ size_t len = sprintfn(buffer_ + len_, sizeof(buffer_) - len_,
+ "%.*s: %.*s\r\n",
+ header_->first.size(), header_->first.data(),
+ header_->second.size(), header_->second.data());
+ if (len_ + len < sizeof(buffer_) - 3) {
+ len_ += len;
+ ++header_;
+ } else if (len_ == 0) {
+ LOG(WARNING) << "discarding header that is too long: " << header_->first;
+ ++header_;
+ } else {
+ // Not enough room for the next header, write to network first.
+ return true;
+ }
+ }
+ // End of headers
+ len_ += strcpyn(buffer_ + len_, sizeof(buffer_) - len_, "\r\n");
+ return false;
+}
+
+void
+HttpBase::do_complete(HttpError err) {
+ ASSERT(mode_ != HM_NONE);
+ HttpMode mode = mode_;
+ mode_ = HM_NONE;
+ if (data_ && data_->document) {
+ data_->document->SignalEvent.disconnect(this);
+ }
+ data_ = NULL;
+ if ((HM_RECV == mode) && doc_stream_) {
+ ASSERT(HE_NONE != err); // We should have Disconnected doc_stream_ already.
+ DocumentStream* ds = doc_stream_;
+ ds->Disconnect(err);
+ ds->SignalEvent(ds, SE_CLOSE, err);
+ }
+ if (notify_) {
+ notify_->onHttpComplete(mode, err);
+ }
+}
+
+//
+// Stream Signals
+//
+
+void
+HttpBase::OnHttpStreamEvent(StreamInterface* stream, int events, int error) {
+ ASSERT(stream == http_stream_);
+ if ((events & SE_OPEN) && (mode_ == HM_CONNECT)) {
+ do_complete();
+ return;
+ }
+
+ if ((events & SE_WRITE) && (mode_ == HM_SEND)) {
+ flush_data();
+ return;
+ }
+
+ if ((events & SE_READ) && (mode_ == HM_RECV)) {
+ if (doc_stream_) {
+ doc_stream_->SignalEvent(doc_stream_, SE_READ, 0);
+ } else {
+ read_and_process_data();
+ }
+ return;
+ }
+
+ if ((events & SE_CLOSE) == 0)
+ return;
+
+ HttpError http_error = HandleStreamClose(error);
+ if (mode_ == HM_RECV) {
+ complete(http_error);
+ } else if (mode_ != HM_NONE) {
+ do_complete(http_error);
+ } else if (notify_) {
+ notify_->onHttpClosed(http_error);
+ }
+}
+
+void
+HttpBase::OnDocumentEvent(StreamInterface* stream, int events, int error) {
+ ASSERT(stream == data_->document.get());
+ if ((events & SE_WRITE) && (mode_ == HM_RECV)) {
+ read_and_process_data();
+ return;
+ }
+
+ if ((events & SE_READ) && (mode_ == HM_SEND)) {
+ flush_data();
+ return;
+ }
+
+ if (events & SE_CLOSE) {
+ LOG_F(LS_ERROR) << "Read error: " << error;
+ do_complete(HE_STREAM);
+ return;
+ }
+}
+
+//
+// HttpParser Implementation
+//
+
+HttpParser::ProcessResult
+HttpBase::ProcessLeader(const char* line, size_t len, HttpError* error) {
+ *error = data_->parseLeader(line, len);
+ return (HE_NONE == *error) ? PR_CONTINUE : PR_COMPLETE;
+}
+
+HttpParser::ProcessResult
+HttpBase::ProcessHeader(const char* name, size_t nlen, const char* value,
+ size_t vlen, HttpError* error) {
+ std::string sname(name, nlen), svalue(value, vlen);
+ data_->addHeader(sname, svalue);
+ return PR_CONTINUE;
+}
+
+HttpParser::ProcessResult
+HttpBase::ProcessHeaderComplete(bool chunked, size_t& data_size,
+ HttpError* error) {
+ StreamInterface* old_docstream = doc_stream_;
+ if (notify_) {
+ *error = notify_->onHttpHeaderComplete(chunked, data_size);
+ // The request must not be aborted as a result of this callback.
+ ASSERT(NULL != data_);
+ }
+ if ((HE_NONE == *error) && data_->document) {
+ data_->document->SignalEvent.connect(this, &HttpBase::OnDocumentEvent);
+ }
+ if (HE_NONE != *error) {
+ return PR_COMPLETE;
+ }
+ if (old_docstream != doc_stream_) {
+ // Break out of Process loop, since our I/O model just changed.
+ return PR_BLOCK;
+ }
+ return PR_CONTINUE;
+}
+
+HttpParser::ProcessResult
+HttpBase::ProcessData(const char* data, size_t len, size_t& read,
+ HttpError* error) {
+ if (ignore_data_ || !data_->document) {
+ read = len;
+ return PR_CONTINUE;
+ }
+ int write_error = 0;
+ switch (data_->document->Write(data, len, &read, &write_error)) {
+ case SR_SUCCESS:
+ return PR_CONTINUE;
+ case SR_BLOCK:
+ return PR_BLOCK;
+ case SR_EOS:
+ LOG_F(LS_ERROR) << "Unexpected EOS";
+ *error = HE_STREAM;
+ return PR_COMPLETE;
+ case SR_ERROR:
+ default:
+ LOG_F(LS_ERROR) << "Write error: " << write_error;
+ *error = HE_STREAM;
+ return PR_COMPLETE;
+ }
+}
+
+void
+HttpBase::OnComplete(HttpError err) {
+ LOG_F(LS_VERBOSE);
+ do_complete(err);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpbase.h b/chromium/third_party/webrtc/base/httpbase.h
new file mode 100644
index 00000000000..424a61f9f7f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpbase.h
@@ -0,0 +1,181 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_BASE_HTTPBASE_H__
+#define WEBRTC_BASE_HTTPBASE_H__
+
+#include "webrtc/base/httpcommon.h"
+
+namespace rtc {
+
+class StreamInterface;
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpParser - Parses an HTTP stream provided via Process and end_of_input, and
+// generates events for:
+// Structural Elements: Leader, Headers, Document Data
+// Events: End of Headers, End of Document, Errors
+///////////////////////////////////////////////////////////////////////////////
+
+class HttpParser {
+public:
+ enum ProcessResult { PR_CONTINUE, PR_BLOCK, PR_COMPLETE };
+ HttpParser();
+ virtual ~HttpParser();
+
+ void reset();
+ ProcessResult Process(const char* buffer, size_t len, size_t* processed,
+ HttpError* error);
+ bool is_valid_end_of_input() const;
+ void complete(HttpError err);
+
+ size_t GetDataRemaining() const { return data_size_; }
+
+protected:
+ ProcessResult ProcessLine(const char* line, size_t len, HttpError* error);
+
+ // HttpParser Interface
+ virtual ProcessResult ProcessLeader(const char* line, size_t len,
+ HttpError* error) = 0;
+ virtual ProcessResult ProcessHeader(const char* name, size_t nlen,
+ const char* value, size_t vlen,
+ HttpError* error) = 0;
+ virtual ProcessResult ProcessHeaderComplete(bool chunked, size_t& data_size,
+ HttpError* error) = 0;
+ virtual ProcessResult ProcessData(const char* data, size_t len, size_t& read,
+ HttpError* error) = 0;
+ virtual void OnComplete(HttpError err) = 0;
+
+private:
+ enum State {
+ ST_LEADER, ST_HEADERS,
+ ST_CHUNKSIZE, ST_CHUNKTERM, ST_TRAILERS,
+ ST_DATA, ST_COMPLETE
+ } state_;
+ bool chunked_;
+ size_t data_size_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// IHttpNotify
+///////////////////////////////////////////////////////////////////////////////
+
+enum HttpMode { HM_NONE, HM_CONNECT, HM_RECV, HM_SEND };
+
+class IHttpNotify {
+public:
+ virtual ~IHttpNotify() {}
+ virtual HttpError onHttpHeaderComplete(bool chunked, size_t& data_size) = 0;
+ virtual void onHttpComplete(HttpMode mode, HttpError err) = 0;
+ virtual void onHttpClosed(HttpError err) = 0;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpBase - Provides a state machine for implementing HTTP-based components.
+// Attach HttpBase to a StreamInterface which represents a bidirectional HTTP
+// stream, and then call send() or recv() to initiate sending or receiving one
+// side of an HTTP transaction. By default, HttpBase operates as an I/O pump,
+// moving data from the HTTP stream to the HttpData object and vice versa.
+// However, it can also operate in stream mode, in which case the user of the
+// stream interface drives I/O via calls to Read().
+///////////////////////////////////////////////////////////////////////////////
+
+class HttpBase
+: private HttpParser,
+ public sigslot::has_slots<>
+{
+public:
+ HttpBase();
+ virtual ~HttpBase();
+
+ void notify(IHttpNotify* notify) { notify_ = notify; }
+ bool attach(StreamInterface* stream);
+ StreamInterface* stream() { return http_stream_; }
+ StreamInterface* detach();
+ bool isConnected() const;
+
+ void send(HttpData* data);
+ void recv(HttpData* data);
+ void abort(HttpError err);
+
+ HttpMode mode() const { return mode_; }
+
+ void set_ignore_data(bool ignore) { ignore_data_ = ignore; }
+ bool ignore_data() const { return ignore_data_; }
+
+ // Obtaining this stream puts HttpBase into stream mode until the stream
+ // is closed. HttpBase can only expose one open stream interface at a time.
+ // Further calls will return NULL.
+ StreamInterface* GetDocumentStream();
+
+protected:
+ // Do cleanup when the http stream closes (error may be 0 for a clean
+ // shutdown), and return the error code to signal.
+ HttpError HandleStreamClose(int error);
+
+ // DoReceiveLoop acts as a data pump, pulling data from the http stream,
+ // pushing it through the HttpParser, and then populating the HttpData object
+ // based on the callbacks from the parser. One of the most interesting
+ // callbacks is ProcessData, which provides the actual http document body.
+ // This data is then written to the HttpData::document. As a result, data
+ // flows from the network to the document, with some incidental protocol
+ // parsing in between.
+ // Ideally, we would pass in the document* to DoReceiveLoop, to more easily
+ // support GetDocumentStream(). However, since the HttpParser is callback
+ // driven, we are forced to store the pointer somewhere until the callback
+ // is triggered.
+ // Returns true if the received document has finished, and
+ // HttpParser::complete should be called.
+ bool DoReceiveLoop(HttpError* err);
+
+ void read_and_process_data();
+ void flush_data();
+ bool queue_headers();
+ void do_complete(HttpError err = HE_NONE);
+
+ void OnHttpStreamEvent(StreamInterface* stream, int events, int error);
+ void OnDocumentEvent(StreamInterface* stream, int events, int error);
+
+ // HttpParser Interface
+ virtual ProcessResult ProcessLeader(const char* line, size_t len,
+ HttpError* error);
+ virtual ProcessResult ProcessHeader(const char* name, size_t nlen,
+ const char* value, size_t vlen,
+ HttpError* error);
+ virtual ProcessResult ProcessHeaderComplete(bool chunked, size_t& data_size,
+ HttpError* error);
+ virtual ProcessResult ProcessData(const char* data, size_t len, size_t& read,
+ HttpError* error);
+ virtual void OnComplete(HttpError err);
+
+private:
+ class DocumentStream;
+ friend class DocumentStream;
+
+ enum { kBufferSize = 32 * 1024 };
+
+ HttpMode mode_;
+ HttpData* data_;
+ IHttpNotify* notify_;
+ StreamInterface* http_stream_;
+ DocumentStream* doc_stream_;
+ char buffer_[kBufferSize];
+ size_t len_;
+
+ bool ignore_data_, chunk_data_;
+ HttpData::const_iterator header_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HTTPBASE_H__
diff --git a/chromium/third_party/webrtc/base/httpbase_unittest.cc b/chromium/third_party/webrtc/base/httpbase_unittest.cc
new file mode 100644
index 00000000000..6dab0c9acd2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpbase_unittest.cc
@@ -0,0 +1,520 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpbase.h"
+#include "webrtc/base/testutils.h"
+
+namespace rtc {
+
+const char* const kHttpResponse =
+ "HTTP/1.1 200\r\n"
+ "Connection: Keep-Alive\r\n"
+ "Content-Type: text/plain\r\n"
+ "Proxy-Authorization: 42\r\n"
+ "Transfer-Encoding: chunked\r\n"
+ "\r\n"
+ "00000008\r\n"
+ "Goodbye!\r\n"
+ "0\r\n\r\n";
+
+const char* const kHttpEmptyResponse =
+ "HTTP/1.1 200\r\n"
+ "Connection: Keep-Alive\r\n"
+ "Content-Length: 0\r\n"
+ "Proxy-Authorization: 42\r\n"
+ "\r\n";
+
+const char* const kHttpResponsePrefix =
+ "HTTP/1.1 200\r\n"
+ "Connection: Keep-Alive\r\n"
+ "Content-Type: text/plain\r\n"
+ "Proxy-Authorization: 42\r\n"
+ "Transfer-Encoding: chunked\r\n"
+ "\r\n"
+ "8\r\n"
+ "Goodbye!\r\n";
+
+class HttpBaseTest : public testing::Test, public IHttpNotify {
+public:
+ enum EventType { E_HEADER_COMPLETE, E_COMPLETE, E_CLOSED };
+ struct Event {
+ EventType event;
+ bool chunked;
+ size_t data_size;
+ HttpMode mode;
+ HttpError err;
+ };
+ HttpBaseTest() : mem(NULL), obtain_stream(false), http_stream(NULL) { }
+
+ virtual void SetUp() { }
+ virtual void TearDown() {
+ delete http_stream;
+ // Avoid an ASSERT, in case a test doesn't clean up properly
+ base.abort(HE_NONE);
+ }
+
+ virtual HttpError onHttpHeaderComplete(bool chunked, size_t& data_size) {
+ LOG_F(LS_VERBOSE) << "chunked: " << chunked << " size: " << data_size;
+ Event e = { E_HEADER_COMPLETE, chunked, data_size, HM_NONE, HE_NONE};
+ events.push_back(e);
+ if (obtain_stream) {
+ ObtainDocumentStream();
+ }
+ return HE_NONE;
+ }
+ virtual void onHttpComplete(HttpMode mode, HttpError err) {
+ LOG_F(LS_VERBOSE) << "mode: " << mode << " err: " << err;
+ Event e = { E_COMPLETE, false, 0, mode, err };
+ events.push_back(e);
+ }
+ virtual void onHttpClosed(HttpError err) {
+ LOG_F(LS_VERBOSE) << "err: " << err;
+ Event e = { E_CLOSED, false, 0, HM_NONE, err };
+ events.push_back(e);
+ }
+
+ void SetupSource(const char* response);
+
+ void VerifyHeaderComplete(size_t event_count, bool empty_doc);
+ void VerifyDocumentContents(const char* expected_data,
+ size_t expected_length = SIZE_UNKNOWN);
+
+ void ObtainDocumentStream();
+ void VerifyDocumentStreamIsOpening();
+ void VerifyDocumentStreamOpenEvent();
+ void ReadDocumentStreamData(const char* expected_data);
+ void VerifyDocumentStreamIsEOS();
+
+ void SetupDocument(const char* response);
+ void VerifySourceContents(const char* expected_data,
+ size_t expected_length = SIZE_UNKNOWN);
+
+ void VerifyTransferComplete(HttpMode mode, HttpError error);
+
+ HttpBase base;
+ MemoryStream* mem;
+ HttpResponseData data;
+
+ // The source of http data, and source events
+ testing::StreamSource src;
+ std::vector<Event> events;
+
+ // Document stream, and stream events
+ bool obtain_stream;
+ StreamInterface* http_stream;
+ testing::StreamSink sink;
+};
+
+void HttpBaseTest::SetupSource(const char* http_data) {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ src.SetState(SS_OPENING);
+ src.QueueString(http_data);
+
+ base.notify(this);
+ base.attach(&src);
+ EXPECT_TRUE(events.empty());
+
+ src.SetState(SS_OPEN);
+ ASSERT_EQ(1U, events.size());
+ EXPECT_EQ(E_COMPLETE, events[0].event);
+ EXPECT_EQ(HM_CONNECT, events[0].mode);
+ EXPECT_EQ(HE_NONE, events[0].err);
+ events.clear();
+
+ mem = new MemoryStream;
+ data.document.reset(mem);
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyHeaderComplete(size_t event_count, bool empty_doc) {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ ASSERT_EQ(event_count, events.size());
+ EXPECT_EQ(E_HEADER_COMPLETE, events[0].event);
+
+ std::string header;
+ EXPECT_EQ(HVER_1_1, data.version);
+ EXPECT_EQ(static_cast<uint32>(HC_OK), data.scode);
+ EXPECT_TRUE(data.hasHeader(HH_PROXY_AUTHORIZATION, &header));
+ EXPECT_EQ("42", header);
+ EXPECT_TRUE(data.hasHeader(HH_CONNECTION, &header));
+ EXPECT_EQ("Keep-Alive", header);
+
+ if (empty_doc) {
+ EXPECT_FALSE(events[0].chunked);
+ EXPECT_EQ(0U, events[0].data_size);
+
+ EXPECT_TRUE(data.hasHeader(HH_CONTENT_LENGTH, &header));
+ EXPECT_EQ("0", header);
+ } else {
+ EXPECT_TRUE(events[0].chunked);
+ EXPECT_EQ(SIZE_UNKNOWN, events[0].data_size);
+
+ EXPECT_TRUE(data.hasHeader(HH_CONTENT_TYPE, &header));
+ EXPECT_EQ("text/plain", header);
+ EXPECT_TRUE(data.hasHeader(HH_TRANSFER_ENCODING, &header));
+ EXPECT_EQ("chunked", header);
+ }
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyDocumentContents(const char* expected_data,
+ size_t expected_length) {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ if (SIZE_UNKNOWN == expected_length) {
+ expected_length = strlen(expected_data);
+ }
+ EXPECT_EQ(mem, data.document.get());
+
+ size_t length;
+ mem->GetSize(&length);
+ EXPECT_EQ(expected_length, length);
+ EXPECT_TRUE(0 == memcmp(expected_data, mem->GetBuffer(), length));
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::ObtainDocumentStream() {
+ LOG_F(LS_VERBOSE) << "Enter";
+ EXPECT_FALSE(http_stream);
+ http_stream = base.GetDocumentStream();
+ ASSERT_TRUE(NULL != http_stream);
+ sink.Monitor(http_stream);
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyDocumentStreamIsOpening() {
+ LOG_F(LS_VERBOSE) << "Enter";
+ ASSERT_TRUE(NULL != http_stream);
+ EXPECT_EQ(0, sink.Events(http_stream));
+ EXPECT_EQ(SS_OPENING, http_stream->GetState());
+
+ size_t read = 0;
+ char buffer[5] = { 0 };
+ EXPECT_EQ(SR_BLOCK, http_stream->Read(buffer, sizeof(buffer), &read, NULL));
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyDocumentStreamOpenEvent() {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ ASSERT_TRUE(NULL != http_stream);
+ EXPECT_EQ(SE_OPEN | SE_READ, sink.Events(http_stream));
+ EXPECT_EQ(SS_OPEN, http_stream->GetState());
+
+ // HTTP headers haven't arrived yet
+ EXPECT_EQ(0U, events.size());
+ EXPECT_EQ(static_cast<uint32>(HC_INTERNAL_SERVER_ERROR), data.scode);
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::ReadDocumentStreamData(const char* expected_data) {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ ASSERT_TRUE(NULL != http_stream);
+ EXPECT_EQ(SS_OPEN, http_stream->GetState());
+
+ // Pump the HTTP I/O using Read, and verify the results.
+ size_t verified_length = 0;
+ const size_t expected_length = strlen(expected_data);
+ while (verified_length < expected_length) {
+ size_t read = 0;
+ char buffer[5] = { 0 };
+ size_t amt_to_read = _min(expected_length - verified_length, sizeof(buffer));
+ EXPECT_EQ(SR_SUCCESS, http_stream->Read(buffer, amt_to_read, &read, NULL));
+ EXPECT_EQ(amt_to_read, read);
+ EXPECT_TRUE(0 == memcmp(expected_data + verified_length, buffer, read));
+ verified_length += read;
+ }
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyDocumentStreamIsEOS() {
+ LOG_F(LS_VERBOSE) << "Enter";
+
+ ASSERT_TRUE(NULL != http_stream);
+ size_t read = 0;
+ char buffer[5] = { 0 };
+ EXPECT_EQ(SR_EOS, http_stream->Read(buffer, sizeof(buffer), &read, NULL));
+ EXPECT_EQ(SS_CLOSED, http_stream->GetState());
+
+ // When EOS is caused by Read, we don't expect SE_CLOSE
+ EXPECT_EQ(0, sink.Events(http_stream));
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::SetupDocument(const char* document_data) {
+ LOG_F(LS_VERBOSE) << "Enter";
+ src.SetState(SS_OPEN);
+
+ base.notify(this);
+ base.attach(&src);
+ EXPECT_TRUE(events.empty());
+
+ if (document_data) {
+ // Note: we could just call data.set_success("text/plain", mem), but that
+ // won't allow us to use the chunked transfer encoding.
+ mem = new MemoryStream(document_data);
+ data.document.reset(mem);
+ data.setHeader(HH_CONTENT_TYPE, "text/plain");
+ data.setHeader(HH_TRANSFER_ENCODING, "chunked");
+ } else {
+ data.setHeader(HH_CONTENT_LENGTH, "0");
+ }
+ data.scode = HC_OK;
+ data.setHeader(HH_PROXY_AUTHORIZATION, "42");
+ data.setHeader(HH_CONNECTION, "Keep-Alive");
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifySourceContents(const char* expected_data,
+ size_t expected_length) {
+ LOG_F(LS_VERBOSE) << "Enter";
+ if (SIZE_UNKNOWN == expected_length) {
+ expected_length = strlen(expected_data);
+ }
+ std::string contents = src.ReadData();
+ EXPECT_EQ(expected_length, contents.length());
+ EXPECT_TRUE(0 == memcmp(expected_data, contents.data(), expected_length));
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+void HttpBaseTest::VerifyTransferComplete(HttpMode mode, HttpError error) {
+ LOG_F(LS_VERBOSE) << "Enter";
+ // Verify that http operation has completed
+ ASSERT_TRUE(events.size() > 0);
+ size_t last_event = events.size() - 1;
+ EXPECT_EQ(E_COMPLETE, events[last_event].event);
+ EXPECT_EQ(mode, events[last_event].mode);
+ EXPECT_EQ(error, events[last_event].err);
+ LOG_F(LS_VERBOSE) << "Exit";
+}
+
+//
+// Tests
+//
+
+TEST_F(HttpBaseTest, SupportsSend) {
+ // Queue response document
+ SetupDocument("Goodbye!");
+
+ // Begin send
+ base.send(&data);
+
+ // Send completed successfully
+ VerifyTransferComplete(HM_SEND, HE_NONE);
+ VerifySourceContents(kHttpResponse);
+}
+
+TEST_F(HttpBaseTest, SupportsSendNoDocument) {
+ // Queue response document
+ SetupDocument(NULL);
+
+ // Begin send
+ base.send(&data);
+
+ // Send completed successfully
+ VerifyTransferComplete(HM_SEND, HE_NONE);
+ VerifySourceContents(kHttpEmptyResponse);
+}
+
+TEST_F(HttpBaseTest, SignalsCompleteOnInterruptedSend) {
+ // This test is attempting to expose a bug that occurs when a particular
+ // base objects is used for receiving, and then used for sending. In
+ // particular, the HttpParser state is different after receiving. Simulate
+ // that here.
+ SetupSource(kHttpResponse);
+ base.recv(&data);
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+
+ src.Clear();
+ data.clear(true);
+ events.clear();
+ base.detach();
+
+ // Queue response document
+ SetupDocument("Goodbye!");
+
+ // Prevent entire response from being sent
+ const size_t kInterruptedLength = strlen(kHttpResponse) - 1;
+ src.SetWriteBlock(kInterruptedLength);
+
+ // Begin send
+ base.send(&data);
+
+ // Document is mostly complete, but no completion signal yet.
+ EXPECT_TRUE(events.empty());
+ VerifySourceContents(kHttpResponse, kInterruptedLength);
+
+ src.SetState(SS_CLOSED);
+
+ // Send completed with disconnect error, and no additional data.
+ VerifyTransferComplete(HM_SEND, HE_DISCONNECTED);
+ EXPECT_TRUE(src.ReadData().empty());
+}
+
+TEST_F(HttpBaseTest, SupportsReceiveViaDocumentPush) {
+ // Queue response document
+ SetupSource(kHttpResponse);
+
+ // Begin receive
+ base.recv(&data);
+
+ // Document completed successfully
+ VerifyHeaderComplete(2, false);
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+ VerifyDocumentContents("Goodbye!");
+}
+
+TEST_F(HttpBaseTest, SupportsReceiveViaStreamPull) {
+ // Switch to pull mode
+ ObtainDocumentStream();
+ VerifyDocumentStreamIsOpening();
+
+ // Queue response document
+ SetupSource(kHttpResponse);
+ VerifyDocumentStreamIsOpening();
+
+ // Begin receive
+ base.recv(&data);
+
+ // Pull document data
+ VerifyDocumentStreamOpenEvent();
+ ReadDocumentStreamData("Goodbye!");
+ VerifyDocumentStreamIsEOS();
+
+ // Document completed successfully
+ VerifyHeaderComplete(2, false);
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+ VerifyDocumentContents("");
+}
+
+TEST_F(HttpBaseTest, DISABLED_AllowsCloseStreamBeforeDocumentIsComplete) {
+
+ // TODO: Remove extra logging once test failure is understood
+ int old_sev = rtc::LogMessage::GetLogToDebug();
+ rtc::LogMessage::LogToDebug(LS_VERBOSE);
+
+
+ // Switch to pull mode
+ ObtainDocumentStream();
+ VerifyDocumentStreamIsOpening();
+
+ // Queue response document
+ SetupSource(kHttpResponse);
+ VerifyDocumentStreamIsOpening();
+
+ // Begin receive
+ base.recv(&data);
+
+ // Pull some of the data
+ VerifyDocumentStreamOpenEvent();
+ ReadDocumentStreamData("Goodb");
+
+ // We've seen the header by now
+ VerifyHeaderComplete(1, false);
+
+ // Close the pull stream, this will transition back to push I/O.
+ http_stream->Close();
+ Thread::Current()->ProcessMessages(0);
+
+ // Remainder of document completed successfully
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+ VerifyDocumentContents("ye!");
+
+ rtc::LogMessage::LogToDebug(old_sev);
+}
+
+TEST_F(HttpBaseTest, AllowsGetDocumentStreamInResponseToHttpHeader) {
+ // Queue response document
+ SetupSource(kHttpResponse);
+
+ // Switch to pull mode in response to header arrival
+ obtain_stream = true;
+
+ // Begin receive
+ base.recv(&data);
+
+ // We've already seen the header, but not data has arrived
+ VerifyHeaderComplete(1, false);
+ VerifyDocumentContents("");
+
+ // Pull the document data
+ ReadDocumentStreamData("Goodbye!");
+ VerifyDocumentStreamIsEOS();
+
+ // Document completed successfully
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+ VerifyDocumentContents("");
+}
+
+TEST_F(HttpBaseTest, AllowsGetDocumentStreamWithEmptyDocumentBody) {
+ // Queue empty response document
+ SetupSource(kHttpEmptyResponse);
+
+ // Switch to pull mode in response to header arrival
+ obtain_stream = true;
+
+ // Begin receive
+ base.recv(&data);
+
+ // We've already seen the header, but not data has arrived
+ VerifyHeaderComplete(1, true);
+ VerifyDocumentContents("");
+
+ // The document is still open, until we attempt to read
+ ASSERT_TRUE(NULL != http_stream);
+ EXPECT_EQ(SS_OPEN, http_stream->GetState());
+
+ // Attempt to read data, and discover EOS
+ VerifyDocumentStreamIsEOS();
+
+ // Document completed successfully
+ VerifyTransferComplete(HM_RECV, HE_NONE);
+ VerifyDocumentContents("");
+}
+
+TEST_F(HttpBaseTest, SignalsDocumentStreamCloseOnUnexpectedClose) {
+ // Switch to pull mode
+ ObtainDocumentStream();
+ VerifyDocumentStreamIsOpening();
+
+ // Queue response document
+ SetupSource(kHttpResponsePrefix);
+ VerifyDocumentStreamIsOpening();
+
+ // Begin receive
+ base.recv(&data);
+
+ // Pull document data
+ VerifyDocumentStreamOpenEvent();
+ ReadDocumentStreamData("Goodbye!");
+
+ // Simulate unexpected close
+ src.SetState(SS_CLOSED);
+
+ // Observe error event on document stream
+ EXPECT_EQ(testing::SSE_ERROR, sink.Events(http_stream));
+
+ // Future reads give an error
+ int error = 0;
+ char buffer[5] = { 0 };
+ EXPECT_EQ(SR_ERROR, http_stream->Read(buffer, sizeof(buffer), NULL, &error));
+ EXPECT_EQ(HE_DISCONNECTED, error);
+
+ // Document completed with error
+ VerifyHeaderComplete(2, false);
+ VerifyTransferComplete(HM_RECV, HE_DISCONNECTED);
+ VerifyDocumentContents("");
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpclient.cc b/chromium/third_party/webrtc/base/httpclient.cc
new file mode 100644
index 00000000000..62567721050
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpclient.cc
@@ -0,0 +1,829 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <time.h>
+
+#include "webrtc/base/httpcommon-inl.h"
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/diskcache.h"
+#include "webrtc/base/httpclient.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/socketstream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// Helpers
+//////////////////////////////////////////////////////////////////////
+
+namespace {
+
+const size_t kCacheHeader = 0;
+const size_t kCacheBody = 1;
+
+// Convert decimal string to integer
+bool HttpStringToUInt(const std::string& str, size_t* val) {
+ ASSERT(NULL != val);
+ char* eos = NULL;
+ *val = strtoul(str.c_str(), &eos, 10);
+ return (*eos == '\0');
+}
+
+bool HttpShouldCache(const HttpTransaction& t) {
+ bool verb_allows_cache = (t.request.verb == HV_GET)
+ || (t.request.verb == HV_HEAD);
+ bool is_range_response = t.response.hasHeader(HH_CONTENT_RANGE, NULL);
+ bool has_expires = t.response.hasHeader(HH_EXPIRES, NULL);
+ bool request_allows_cache =
+ has_expires || (std::string::npos != t.request.path.find('?'));
+ bool response_allows_cache =
+ has_expires || HttpCodeIsCacheable(t.response.scode);
+
+ bool may_cache = verb_allows_cache
+ && request_allows_cache
+ && response_allows_cache
+ && !is_range_response;
+
+ std::string value;
+ if (t.response.hasHeader(HH_CACHE_CONTROL, &value)) {
+ HttpAttributeList directives;
+ HttpParseAttributes(value.data(), value.size(), directives);
+ // Response Directives Summary:
+ // public - always cacheable
+ // private - do not cache in a shared cache
+ // no-cache - may cache, but must revalidate whether fresh or stale
+ // no-store - sensitive information, do not cache or store in any way
+ // max-age - supplants Expires for staleness
+ // s-maxage - use as max-age for shared caches, ignore otherwise
+ // must-revalidate - may cache, but must revalidate after stale
+ // proxy-revalidate - shared cache must revalidate
+ if (HttpHasAttribute(directives, "no-store", NULL)) {
+ may_cache = false;
+ } else if (HttpHasAttribute(directives, "public", NULL)) {
+ may_cache = true;
+ }
+ }
+ return may_cache;
+}
+
+enum HttpCacheState {
+ HCS_FRESH, // In cache, may use
+ HCS_STALE, // In cache, must revalidate
+ HCS_NONE // Not in cache
+};
+
+HttpCacheState HttpGetCacheState(const HttpTransaction& t) {
+ // Temporaries
+ std::string s_temp;
+ time_t u_temp;
+
+ // Current time
+ size_t now = time(0);
+
+ HttpAttributeList cache_control;
+ if (t.response.hasHeader(HH_CACHE_CONTROL, &s_temp)) {
+ HttpParseAttributes(s_temp.data(), s_temp.size(), cache_control);
+ }
+
+ // Compute age of cache document
+ time_t date;
+ if (!t.response.hasHeader(HH_DATE, &s_temp)
+ || !HttpDateToSeconds(s_temp, &date))
+ return HCS_NONE;
+
+ // TODO: Timestamp when cache request sent and response received?
+ time_t request_time = date;
+ time_t response_time = date;
+
+ time_t apparent_age = 0;
+ if (response_time > date) {
+ apparent_age = response_time - date;
+ }
+
+ size_t corrected_received_age = apparent_age;
+ size_t i_temp;
+ if (t.response.hasHeader(HH_AGE, &s_temp)
+ && HttpStringToUInt(s_temp, (&i_temp))) {
+ u_temp = static_cast<time_t>(i_temp);
+ corrected_received_age = stdmax(apparent_age, u_temp);
+ }
+
+ size_t response_delay = response_time - request_time;
+ size_t corrected_initial_age = corrected_received_age + response_delay;
+ size_t resident_time = now - response_time;
+ size_t current_age = corrected_initial_age + resident_time;
+
+ // Compute lifetime of document
+ size_t lifetime;
+ if (HttpHasAttribute(cache_control, "max-age", &s_temp)) {
+ lifetime = atoi(s_temp.c_str());
+ } else if (t.response.hasHeader(HH_EXPIRES, &s_temp)
+ && HttpDateToSeconds(s_temp, &u_temp)) {
+ lifetime = u_temp - date;
+ } else if (t.response.hasHeader(HH_LAST_MODIFIED, &s_temp)
+ && HttpDateToSeconds(s_temp, &u_temp)) {
+ // TODO: Issue warning 113 if age > 24 hours
+ lifetime = static_cast<size_t>(now - u_temp) / 10;
+ } else {
+ return HCS_STALE;
+ }
+
+ return (lifetime > current_age) ? HCS_FRESH : HCS_STALE;
+}
+
+enum HttpValidatorStrength {
+ HVS_NONE,
+ HVS_WEAK,
+ HVS_STRONG
+};
+
+HttpValidatorStrength
+HttpRequestValidatorLevel(const HttpRequestData& request) {
+ if (HV_GET != request.verb)
+ return HVS_STRONG;
+ return request.hasHeader(HH_RANGE, NULL) ? HVS_STRONG : HVS_WEAK;
+}
+
+HttpValidatorStrength
+HttpResponseValidatorLevel(const HttpResponseData& response) {
+ std::string value;
+ if (response.hasHeader(HH_ETAG, &value)) {
+ bool is_weak = (strnicmp(value.c_str(), "W/", 2) == 0);
+ return is_weak ? HVS_WEAK : HVS_STRONG;
+ }
+ if (response.hasHeader(HH_LAST_MODIFIED, &value)) {
+ time_t last_modified, date;
+ if (HttpDateToSeconds(value, &last_modified)
+ && response.hasHeader(HH_DATE, &value)
+ && HttpDateToSeconds(value, &date)
+ && (last_modified + 60 < date)) {
+ return HVS_STRONG;
+ }
+ return HVS_WEAK;
+ }
+ return HVS_NONE;
+}
+
+std::string GetCacheID(const HttpRequestData& request) {
+ std::string id, url;
+ id.append(ToString(request.verb));
+ id.append("_");
+ request.getAbsoluteUri(&url);
+ id.append(url);
+ return id;
+}
+
+} // anonymous namespace
+
+//////////////////////////////////////////////////////////////////////
+// Public Helpers
+//////////////////////////////////////////////////////////////////////
+
+bool HttpWriteCacheHeaders(const HttpResponseData* response,
+ StreamInterface* output, size_t* size) {
+ size_t length = 0;
+ // Write all unknown and end-to-end headers to a cache file
+ for (HttpData::const_iterator it = response->begin();
+ it != response->end(); ++it) {
+ HttpHeader header;
+ if (FromString(header, it->first) && !HttpHeaderIsEndToEnd(header))
+ continue;
+ length += it->first.length() + 2 + it->second.length() + 2;
+ if (!output)
+ continue;
+ std::string formatted_header(it->first);
+ formatted_header.append(": ");
+ formatted_header.append(it->second);
+ formatted_header.append("\r\n");
+ StreamResult result = output->WriteAll(formatted_header.data(),
+ formatted_header.length(),
+ NULL, NULL);
+ if (SR_SUCCESS != result) {
+ return false;
+ }
+ }
+ if (output && (SR_SUCCESS != output->WriteAll("\r\n", 2, NULL, NULL))) {
+ return false;
+ }
+ length += 2;
+ if (size)
+ *size = length;
+ return true;
+}
+
+bool HttpReadCacheHeaders(StreamInterface* input, HttpResponseData* response,
+ HttpData::HeaderCombine combine) {
+ while (true) {
+ std::string formatted_header;
+ StreamResult result = input->ReadLine(&formatted_header);
+ if ((SR_EOS == result) || (1 == formatted_header.size())) {
+ break;
+ }
+ if (SR_SUCCESS != result) {
+ return false;
+ }
+ size_t end_of_name = formatted_header.find(':');
+ if (std::string::npos == end_of_name) {
+ LOG_F(LS_WARNING) << "Malformed cache header";
+ continue;
+ }
+ size_t start_of_value = end_of_name + 1;
+ size_t end_of_value = formatted_header.length();
+ while ((start_of_value < end_of_value)
+ && isspace(formatted_header[start_of_value]))
+ ++start_of_value;
+ while ((start_of_value < end_of_value)
+ && isspace(formatted_header[end_of_value-1]))
+ --end_of_value;
+ size_t value_length = end_of_value - start_of_value;
+
+ std::string name(formatted_header.substr(0, end_of_name));
+ std::string value(formatted_header.substr(start_of_value, value_length));
+ response->changeHeader(name, value, combine);
+ }
+ return true;
+}
+
+//////////////////////////////////////////////////////////////////////
+// HttpClient
+//////////////////////////////////////////////////////////////////////
+
+const size_t kDefaultRetries = 1;
+const size_t kMaxRedirects = 5;
+
+HttpClient::HttpClient(const std::string& agent, StreamPool* pool,
+ HttpTransaction* transaction)
+ : agent_(agent), pool_(pool),
+ transaction_(transaction), free_transaction_(false),
+ retries_(kDefaultRetries), attempt_(0), redirects_(0),
+ redirect_action_(REDIRECT_DEFAULT),
+ uri_form_(URI_DEFAULT), cache_(NULL), cache_state_(CS_READY),
+ resolver_(NULL) {
+ base_.notify(this);
+ if (NULL == transaction_) {
+ free_transaction_ = true;
+ transaction_ = new HttpTransaction;
+ }
+}
+
+HttpClient::~HttpClient() {
+ base_.notify(NULL);
+ base_.abort(HE_SHUTDOWN);
+ if (resolver_) {
+ resolver_->Destroy(false);
+ }
+ release();
+ if (free_transaction_)
+ delete transaction_;
+}
+
+void HttpClient::reset() {
+ server_.Clear();
+ request().clear(true);
+ response().clear(true);
+ context_.reset();
+ redirects_ = 0;
+ base_.abort(HE_OPERATION_CANCELLED);
+}
+
+void HttpClient::OnResolveResult(AsyncResolverInterface* resolver) {
+ if (resolver != resolver_) {
+ return;
+ }
+ int error = resolver_->GetError();
+ server_ = resolver_->address();
+ resolver_->Destroy(false);
+ resolver_ = NULL;
+ if (error != 0) {
+ LOG(LS_ERROR) << "Error " << error << " resolving name: "
+ << server_;
+ onHttpComplete(HM_CONNECT, HE_CONNECT_FAILED);
+ } else {
+ connect();
+ }
+}
+
+void HttpClient::StartDNSLookup() {
+ resolver_ = new AsyncResolver();
+ resolver_->SignalDone.connect(this, &HttpClient::OnResolveResult);
+ resolver_->Start(server_);
+}
+
+void HttpClient::set_server(const SocketAddress& address) {
+ server_ = address;
+ // Setting 'Host' here allows it to be overridden before starting the request,
+ // if necessary.
+ request().setHeader(HH_HOST, HttpAddress(server_, false), true);
+}
+
+StreamInterface* HttpClient::GetDocumentStream() {
+ return base_.GetDocumentStream();
+}
+
+void HttpClient::start() {
+ if (base_.mode() != HM_NONE) {
+ // call reset() to abort an in-progress request
+ ASSERT(false);
+ return;
+ }
+
+ ASSERT(!IsCacheActive());
+
+ if (request().hasHeader(HH_TRANSFER_ENCODING, NULL)) {
+ // Exact size must be known on the client. Instead of using chunked
+ // encoding, wrap data with auto-caching file or memory stream.
+ ASSERT(false);
+ return;
+ }
+
+ attempt_ = 0;
+
+ // If no content has been specified, using length of 0.
+ request().setHeader(HH_CONTENT_LENGTH, "0", false);
+
+ if (!agent_.empty()) {
+ request().setHeader(HH_USER_AGENT, agent_, false);
+ }
+
+ UriForm uri_form = uri_form_;
+ if (PROXY_HTTPS == proxy_.type) {
+ // Proxies require absolute form
+ uri_form = URI_ABSOLUTE;
+ request().version = HVER_1_0;
+ request().setHeader(HH_PROXY_CONNECTION, "Keep-Alive", false);
+ } else {
+ request().setHeader(HH_CONNECTION, "Keep-Alive", false);
+ }
+
+ if (URI_ABSOLUTE == uri_form) {
+ // Convert to absolute uri form
+ std::string url;
+ if (request().getAbsoluteUri(&url)) {
+ request().path = url;
+ } else {
+ LOG(LS_WARNING) << "Couldn't obtain absolute uri";
+ }
+ } else if (URI_RELATIVE == uri_form) {
+ // Convert to relative uri form
+ std::string host, path;
+ if (request().getRelativeUri(&host, &path)) {
+ request().setHeader(HH_HOST, host);
+ request().path = path;
+ } else {
+ LOG(LS_WARNING) << "Couldn't obtain relative uri";
+ }
+ }
+
+ if ((NULL != cache_) && CheckCache()) {
+ return;
+ }
+
+ connect();
+}
+
+void HttpClient::connect() {
+ int stream_err;
+ if (server_.IsUnresolvedIP()) {
+ StartDNSLookup();
+ return;
+ }
+ StreamInterface* stream = pool_->RequestConnectedStream(server_, &stream_err);
+ if (stream == NULL) {
+ ASSERT(0 != stream_err);
+ LOG(LS_ERROR) << "RequestConnectedStream error: " << stream_err;
+ onHttpComplete(HM_CONNECT, HE_CONNECT_FAILED);
+ } else {
+ base_.attach(stream);
+ if (stream->GetState() == SS_OPEN) {
+ base_.send(&transaction_->request);
+ }
+ }
+}
+
+void HttpClient::prepare_get(const std::string& url) {
+ reset();
+ Url<char> purl(url);
+ set_server(SocketAddress(purl.host(), purl.port()));
+ request().verb = HV_GET;
+ request().path = purl.full_path();
+}
+
+void HttpClient::prepare_post(const std::string& url,
+ const std::string& content_type,
+ StreamInterface* request_doc) {
+ reset();
+ Url<char> purl(url);
+ set_server(SocketAddress(purl.host(), purl.port()));
+ request().verb = HV_POST;
+ request().path = purl.full_path();
+ request().setContent(content_type, request_doc);
+}
+
+void HttpClient::release() {
+ if (StreamInterface* stream = base_.detach()) {
+ pool_->ReturnConnectedStream(stream);
+ }
+}
+
+bool HttpClient::ShouldRedirect(std::string* location) const {
+ // TODO: Unittest redirection.
+ if ((REDIRECT_NEVER == redirect_action_)
+ || !HttpCodeIsRedirection(response().scode)
+ || !response().hasHeader(HH_LOCATION, location)
+ || (redirects_ >= kMaxRedirects))
+ return false;
+ return (REDIRECT_ALWAYS == redirect_action_)
+ || (HC_SEE_OTHER == response().scode)
+ || (HV_HEAD == request().verb)
+ || (HV_GET == request().verb);
+}
+
+bool HttpClient::BeginCacheFile() {
+ ASSERT(NULL != cache_);
+ ASSERT(CS_READY == cache_state_);
+
+ std::string id = GetCacheID(request());
+ CacheLock lock(cache_, id, true);
+ if (!lock.IsLocked()) {
+ LOG_F(LS_WARNING) << "Couldn't lock cache";
+ return false;
+ }
+
+ if (HE_NONE != WriteCacheHeaders(id)) {
+ return false;
+ }
+
+ scoped_ptr<StreamInterface> stream(cache_->WriteResource(id, kCacheBody));
+ if (!stream) {
+ LOG_F(LS_ERROR) << "Couldn't open body cache";
+ return false;
+ }
+ lock.Commit();
+
+ // Let's secretly replace the response document with Folgers Crystals,
+ // er, StreamTap, so that we can mirror the data to our cache.
+ StreamInterface* output = response().document.release();
+ if (!output) {
+ output = new NullStream;
+ }
+ StreamTap* tap = new StreamTap(output, stream.release());
+ response().document.reset(tap);
+ return true;
+}
+
+HttpError HttpClient::WriteCacheHeaders(const std::string& id) {
+ scoped_ptr<StreamInterface> stream(cache_->WriteResource(id, kCacheHeader));
+ if (!stream) {
+ LOG_F(LS_ERROR) << "Couldn't open header cache";
+ return HE_CACHE;
+ }
+
+ if (!HttpWriteCacheHeaders(&transaction_->response, stream.get(), NULL)) {
+ LOG_F(LS_ERROR) << "Couldn't write header cache";
+ return HE_CACHE;
+ }
+
+ return HE_NONE;
+}
+
+void HttpClient::CompleteCacheFile() {
+ // Restore previous response document
+ StreamTap* tap = static_cast<StreamTap*>(response().document.release());
+ response().document.reset(tap->Detach());
+
+ int error;
+ StreamResult result = tap->GetTapResult(&error);
+
+ // Delete the tap and cache stream (which completes cache unlock)
+ delete tap;
+
+ if (SR_SUCCESS != result) {
+ LOG(LS_ERROR) << "Cache file error: " << error;
+ cache_->DeleteResource(GetCacheID(request()));
+ }
+}
+
+bool HttpClient::CheckCache() {
+ ASSERT(NULL != cache_);
+ ASSERT(CS_READY == cache_state_);
+
+ std::string id = GetCacheID(request());
+ if (!cache_->HasResource(id)) {
+ // No cache file available
+ return false;
+ }
+
+ HttpError error = ReadCacheHeaders(id, true);
+
+ if (HE_NONE == error) {
+ switch (HttpGetCacheState(*transaction_)) {
+ case HCS_FRESH:
+ // Cache content is good, read from cache
+ break;
+ case HCS_STALE:
+ // Cache content may be acceptable. Issue a validation request.
+ if (PrepareValidate()) {
+ return false;
+ }
+ // Couldn't validate, fall through.
+ case HCS_NONE:
+ // Cache content is not useable. Issue a regular request.
+ response().clear(false);
+ return false;
+ }
+ }
+
+ if (HE_NONE == error) {
+ error = ReadCacheBody(id);
+ cache_state_ = CS_READY;
+ }
+
+ if (HE_CACHE == error) {
+ LOG_F(LS_WARNING) << "Cache failure, continuing with normal request";
+ response().clear(false);
+ return false;
+ }
+
+ SignalHttpClientComplete(this, error);
+ return true;
+}
+
+HttpError HttpClient::ReadCacheHeaders(const std::string& id, bool override) {
+ scoped_ptr<StreamInterface> stream(cache_->ReadResource(id, kCacheHeader));
+ if (!stream) {
+ return HE_CACHE;
+ }
+
+ HttpData::HeaderCombine combine =
+ override ? HttpData::HC_REPLACE : HttpData::HC_AUTO;
+
+ if (!HttpReadCacheHeaders(stream.get(), &transaction_->response, combine)) {
+ LOG_F(LS_ERROR) << "Error reading cache headers";
+ return HE_CACHE;
+ }
+
+ response().scode = HC_OK;
+ return HE_NONE;
+}
+
+HttpError HttpClient::ReadCacheBody(const std::string& id) {
+ cache_state_ = CS_READING;
+
+ HttpError error = HE_NONE;
+
+ size_t data_size;
+ scoped_ptr<StreamInterface> stream(cache_->ReadResource(id, kCacheBody));
+ if (!stream || !stream->GetAvailable(&data_size)) {
+ LOG_F(LS_ERROR) << "Unavailable cache body";
+ error = HE_CACHE;
+ } else {
+ error = OnHeaderAvailable(false, false, data_size);
+ }
+
+ if ((HE_NONE == error)
+ && (HV_HEAD != request().verb)
+ && response().document) {
+ char buffer[1024 * 64];
+ StreamResult result = Flow(stream.get(), buffer, ARRAY_SIZE(buffer),
+ response().document.get());
+ if (SR_SUCCESS != result) {
+ error = HE_STREAM;
+ }
+ }
+
+ return error;
+}
+
+bool HttpClient::PrepareValidate() {
+ ASSERT(CS_READY == cache_state_);
+ // At this point, request() contains the pending request, and response()
+ // contains the cached response headers. Reformat the request to validate
+ // the cached content.
+ HttpValidatorStrength vs_required = HttpRequestValidatorLevel(request());
+ HttpValidatorStrength vs_available = HttpResponseValidatorLevel(response());
+ if (vs_available < vs_required) {
+ return false;
+ }
+ std::string value;
+ if (response().hasHeader(HH_ETAG, &value)) {
+ request().addHeader(HH_IF_NONE_MATCH, value);
+ }
+ if (response().hasHeader(HH_LAST_MODIFIED, &value)) {
+ request().addHeader(HH_IF_MODIFIED_SINCE, value);
+ }
+ response().clear(false);
+ cache_state_ = CS_VALIDATING;
+ return true;
+}
+
+HttpError HttpClient::CompleteValidate() {
+ ASSERT(CS_VALIDATING == cache_state_);
+
+ std::string id = GetCacheID(request());
+
+ // Merge cached headers with new headers
+ HttpError error = ReadCacheHeaders(id, false);
+ if (HE_NONE != error) {
+ // Rewrite merged headers to cache
+ CacheLock lock(cache_, id);
+ error = WriteCacheHeaders(id);
+ }
+ if (HE_NONE != error) {
+ error = ReadCacheBody(id);
+ }
+ return error;
+}
+
+HttpError HttpClient::OnHeaderAvailable(bool ignore_data, bool chunked,
+ size_t data_size) {
+ // If we are ignoring the data, this is an intermediate header.
+ // TODO: don't signal intermediate headers. Instead, do all header-dependent
+ // processing now, and either set up the next request, or fail outright.
+ // TODO: by default, only write response documents with a success code.
+ SignalHeaderAvailable(this, !ignore_data, ignore_data ? 0 : data_size);
+ if (!ignore_data && !chunked && (data_size != SIZE_UNKNOWN)
+ && response().document) {
+ // Attempt to pre-allocate space for the downloaded data.
+ if (!response().document->ReserveSize(data_size)) {
+ return HE_OVERFLOW;
+ }
+ }
+ return HE_NONE;
+}
+
+//
+// HttpBase Implementation
+//
+
+HttpError HttpClient::onHttpHeaderComplete(bool chunked, size_t& data_size) {
+ if (CS_VALIDATING == cache_state_) {
+ if (HC_NOT_MODIFIED == response().scode) {
+ return CompleteValidate();
+ }
+ // Should we remove conditional headers from request?
+ cache_state_ = CS_READY;
+ cache_->DeleteResource(GetCacheID(request()));
+ // Continue processing response as normal
+ }
+
+ ASSERT(!IsCacheActive());
+ if ((request().verb == HV_HEAD) || !HttpCodeHasBody(response().scode)) {
+ // HEAD requests and certain response codes contain no body
+ data_size = 0;
+ }
+ if (ShouldRedirect(NULL)
+ || ((HC_PROXY_AUTHENTICATION_REQUIRED == response().scode)
+ && (PROXY_HTTPS == proxy_.type))) {
+ // We're going to issue another request, so ignore the incoming data.
+ base_.set_ignore_data(true);
+ }
+
+ HttpError error = OnHeaderAvailable(base_.ignore_data(), chunked, data_size);
+ if (HE_NONE != error) {
+ return error;
+ }
+
+ if ((NULL != cache_)
+ && !base_.ignore_data()
+ && HttpShouldCache(*transaction_)) {
+ if (BeginCacheFile()) {
+ cache_state_ = CS_WRITING;
+ }
+ }
+ return HE_NONE;
+}
+
+void HttpClient::onHttpComplete(HttpMode mode, HttpError err) {
+ if (((HE_DISCONNECTED == err) || (HE_CONNECT_FAILED == err)
+ || (HE_SOCKET_ERROR == err))
+ && (HC_INTERNAL_SERVER_ERROR == response().scode)
+ && (attempt_ < retries_)) {
+ // If the response code has not changed from the default, then we haven't
+ // received anything meaningful from the server, so we are eligible for a
+ // retry.
+ ++attempt_;
+ if (request().document && !request().document->Rewind()) {
+ // Unable to replay the request document.
+ err = HE_STREAM;
+ } else {
+ release();
+ connect();
+ return;
+ }
+ } else if (err != HE_NONE) {
+ // fall through
+ } else if (mode == HM_CONNECT) {
+ base_.send(&transaction_->request);
+ return;
+ } else if ((mode == HM_SEND) || HttpCodeIsInformational(response().scode)) {
+ // If you're interested in informational headers, catch
+ // SignalHeaderAvailable.
+ base_.recv(&transaction_->response);
+ return;
+ } else {
+ if (!HttpShouldKeepAlive(response())) {
+ LOG(LS_VERBOSE) << "HttpClient: closing socket";
+ base_.stream()->Close();
+ }
+ std::string location;
+ if (ShouldRedirect(&location)) {
+ Url<char> purl(location);
+ set_server(SocketAddress(purl.host(), purl.port()));
+ request().path = purl.full_path();
+ if (response().scode == HC_SEE_OTHER) {
+ request().verb = HV_GET;
+ request().clearHeader(HH_CONTENT_TYPE);
+ request().clearHeader(HH_CONTENT_LENGTH);
+ request().document.reset();
+ } else if (request().document && !request().document->Rewind()) {
+ // Unable to replay the request document.
+ ASSERT(REDIRECT_ALWAYS == redirect_action_);
+ err = HE_STREAM;
+ }
+ if (err == HE_NONE) {
+ ++redirects_;
+ context_.reset();
+ response().clear(false);
+ release();
+ start();
+ return;
+ }
+ } else if ((HC_PROXY_AUTHENTICATION_REQUIRED == response().scode)
+ && (PROXY_HTTPS == proxy_.type)) {
+ std::string authorization, auth_method;
+ HttpData::const_iterator begin = response().begin(HH_PROXY_AUTHENTICATE);
+ HttpData::const_iterator end = response().end(HH_PROXY_AUTHENTICATE);
+ for (HttpData::const_iterator it = begin; it != end; ++it) {
+ HttpAuthContext *context = context_.get();
+ HttpAuthResult res = HttpAuthenticate(
+ it->second.data(), it->second.size(),
+ proxy_.address,
+ ToString(request().verb), request().path,
+ proxy_.username, proxy_.password,
+ context, authorization, auth_method);
+ context_.reset(context);
+ if (res == HAR_RESPONSE) {
+ request().setHeader(HH_PROXY_AUTHORIZATION, authorization);
+ if (request().document && !request().document->Rewind()) {
+ err = HE_STREAM;
+ } else {
+ // Explicitly do not reset the HttpAuthContext
+ response().clear(false);
+ // TODO: Reuse socket when authenticating?
+ release();
+ start();
+ return;
+ }
+ } else if (res == HAR_IGNORE) {
+ LOG(INFO) << "Ignoring Proxy-Authenticate: " << auth_method;
+ continue;
+ } else {
+ break;
+ }
+ }
+ }
+ }
+ if (CS_WRITING == cache_state_) {
+ CompleteCacheFile();
+ cache_state_ = CS_READY;
+ } else if (CS_READING == cache_state_) {
+ cache_state_ = CS_READY;
+ }
+ release();
+ SignalHttpClientComplete(this, err);
+}
+
+void HttpClient::onHttpClosed(HttpError err) {
+ // This shouldn't occur, since we return the stream to the pool upon command
+ // completion.
+ ASSERT(false);
+}
+
+//////////////////////////////////////////////////////////////////////
+// HttpClientDefault
+//////////////////////////////////////////////////////////////////////
+
+HttpClientDefault::HttpClientDefault(SocketFactory* factory,
+ const std::string& agent,
+ HttpTransaction* transaction)
+ : ReuseSocketPool(factory ? factory : Thread::Current()->socketserver()),
+ HttpClient(agent, NULL, transaction) {
+ set_pool(this);
+}
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpclient.h b/chromium/third_party/webrtc/base/httpclient.h
new file mode 100644
index 00000000000..b634b934538
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpclient.h
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_HTTPCLIENT_H__
+#define WEBRTC_BASE_HTTPCLIENT_H__
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpbase.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/proxyinfo.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/socketpool.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// Client-specific http utilities
+//////////////////////////////////////////////////////////////////////
+
+// Write cache-relevant response headers to output stream. If size is non-null,
+// it contains the length of the output in bytes. output may be null if only
+// the length is desired.
+bool HttpWriteCacheHeaders(const HttpResponseData* response,
+ StreamInterface* output, size_t* size);
+// Read cached headers from a stream, and them merge them into the response
+// object using the specified combine operation.
+bool HttpReadCacheHeaders(StreamInterface* input,
+ HttpResponseData* response,
+ HttpData::HeaderCombine combine);
+
+//////////////////////////////////////////////////////////////////////
+// HttpClient
+// Implements an HTTP 1.1 client.
+//////////////////////////////////////////////////////////////////////
+
+class DiskCache;
+class HttpClient;
+class IPNetPool;
+
+class SignalThread;
+// What to do: Define STRICT_HTTP_ERROR=1 in your makefile. Use HttpError in
+// your code (HttpErrorType should only be used for code that is shared
+// with groups which have not yet migrated).
+#if STRICT_HTTP_ERROR
+typedef HttpError HttpErrorType;
+#else // !STRICT_HTTP_ERROR
+typedef int HttpErrorType;
+#endif // !STRICT_HTTP_ERROR
+
+class HttpClient : private IHttpNotify, public sigslot::has_slots<> {
+public:
+ // If HttpRequestData and HttpResponseData objects are provided, they must
+ // be freed by the caller. Otherwise, an internal object is allocated.
+ HttpClient(const std::string& agent, StreamPool* pool,
+ HttpTransaction* transaction = NULL);
+ virtual ~HttpClient();
+
+ void set_pool(StreamPool* pool) { pool_ = pool; }
+
+ void set_agent(const std::string& agent) { agent_ = agent; }
+ const std::string& agent() const { return agent_; }
+
+ void set_proxy(const ProxyInfo& proxy) { proxy_ = proxy; }
+ const ProxyInfo& proxy() const { return proxy_; }
+
+ // Request retries occur when the connection closes before the beginning of
+ // an http response is received. In these cases, the http server may have
+ // timed out the keepalive connection before it received our request. Note
+ // that if a request document cannot be rewound, no retry is made. The
+ // default is 1.
+ void set_request_retries(size_t retries) { retries_ = retries; }
+ size_t request_retries() const { return retries_; }
+
+ enum RedirectAction { REDIRECT_DEFAULT, REDIRECT_ALWAYS, REDIRECT_NEVER };
+ void set_redirect_action(RedirectAction action) { redirect_action_ = action; }
+ RedirectAction redirect_action() const { return redirect_action_; }
+ // Deprecated
+ void set_fail_redirect(bool fail_redirect) {
+ redirect_action_ = REDIRECT_NEVER;
+ }
+ bool fail_redirect() const { return (REDIRECT_NEVER == redirect_action_); }
+
+ enum UriForm { URI_DEFAULT, URI_ABSOLUTE, URI_RELATIVE };
+ void set_uri_form(UriForm form) { uri_form_ = form; }
+ UriForm uri_form() const { return uri_form_; }
+
+ void set_cache(DiskCache* cache) { ASSERT(!IsCacheActive()); cache_ = cache; }
+ bool cache_enabled() const { return (NULL != cache_); }
+
+ // reset clears the server, request, and response structures. It will also
+ // abort an active request.
+ void reset();
+
+ void set_server(const SocketAddress& address);
+ const SocketAddress& server() const { return server_; }
+
+ // Note: in order for HttpClient to retry a POST in response to
+ // an authentication challenge, a redirect response, or socket disconnection,
+ // the request document must support 'replaying' by calling Rewind() on it.
+ // In the case where just a subset of a stream should be used as the request
+ // document, the stream may be wrapped with the StreamSegment adapter.
+ HttpTransaction* transaction() { return transaction_; }
+ const HttpTransaction* transaction() const { return transaction_; }
+ HttpRequestData& request() { return transaction_->request; }
+ const HttpRequestData& request() const { return transaction_->request; }
+ HttpResponseData& response() { return transaction_->response; }
+ const HttpResponseData& response() const { return transaction_->response; }
+
+ // convenience methods
+ void prepare_get(const std::string& url);
+ void prepare_post(const std::string& url, const std::string& content_type,
+ StreamInterface* request_doc);
+
+ // Convert HttpClient to a pull-based I/O model.
+ StreamInterface* GetDocumentStream();
+
+ // After you finish setting up your request, call start.
+ void start();
+
+ // Signalled when the header has finished downloading, before the document
+ // content is processed. You may change the response document in response
+ // to this signal. The second parameter indicates whether this is an
+ // intermediate (false) or final (true) header. An intermediate header is
+ // one that generates another request, such as a redirect or authentication
+ // challenge. The third parameter indicates the length of the response
+ // document, or else SIZE_UNKNOWN. Note: Do NOT abort the request in response
+ // to this signal.
+ sigslot::signal3<HttpClient*,bool,size_t> SignalHeaderAvailable;
+ // Signalled when the current request finishes. On success, err is 0.
+ sigslot::signal2<HttpClient*,HttpErrorType> SignalHttpClientComplete;
+
+protected:
+ void connect();
+ void release();
+
+ bool ShouldRedirect(std::string* location) const;
+
+ bool BeginCacheFile();
+ HttpError WriteCacheHeaders(const std::string& id);
+ void CompleteCacheFile();
+
+ bool CheckCache();
+ HttpError ReadCacheHeaders(const std::string& id, bool override);
+ HttpError ReadCacheBody(const std::string& id);
+
+ bool PrepareValidate();
+ HttpError CompleteValidate();
+
+ HttpError OnHeaderAvailable(bool ignore_data, bool chunked, size_t data_size);
+
+ void StartDNSLookup();
+ void OnResolveResult(AsyncResolverInterface* resolver);
+
+ // IHttpNotify Interface
+ virtual HttpError onHttpHeaderComplete(bool chunked, size_t& data_size);
+ virtual void onHttpComplete(HttpMode mode, HttpError err);
+ virtual void onHttpClosed(HttpError err);
+
+private:
+ enum CacheState { CS_READY, CS_WRITING, CS_READING, CS_VALIDATING };
+ bool IsCacheActive() const { return (cache_state_ > CS_READY); }
+
+ std::string agent_;
+ StreamPool* pool_;
+ HttpBase base_;
+ SocketAddress server_;
+ ProxyInfo proxy_;
+ HttpTransaction* transaction_;
+ bool free_transaction_;
+ size_t retries_, attempt_, redirects_;
+ RedirectAction redirect_action_;
+ UriForm uri_form_;
+ scoped_ptr<HttpAuthContext> context_;
+ DiskCache* cache_;
+ CacheState cache_state_;
+ AsyncResolverInterface* resolver_;
+};
+
+//////////////////////////////////////////////////////////////////////
+// HttpClientDefault - Default implementation of HttpClient
+//////////////////////////////////////////////////////////////////////
+
+class HttpClientDefault : public ReuseSocketPool, public HttpClient {
+public:
+ HttpClientDefault(SocketFactory* factory, const std::string& agent,
+ HttpTransaction* transaction = NULL);
+};
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HTTPCLIENT_H__
diff --git a/chromium/third_party/webrtc/base/httpcommon-inl.h b/chromium/third_party/webrtc/base/httpcommon-inl.h
new file mode 100644
index 00000000000..2f525ce7923
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpcommon-inl.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_HTTPCOMMON_INL_H__
+#define WEBRTC_BASE_HTTPCOMMON_INL_H__
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpcommon.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Url
+///////////////////////////////////////////////////////////////////////////////
+
+template<class CTYPE>
+void Url<CTYPE>::do_set_url(const CTYPE* val, size_t len) {
+ if (ascnicmp(val, "http://", 7) == 0) {
+ val += 7; len -= 7;
+ secure_ = false;
+ } else if (ascnicmp(val, "https://", 8) == 0) {
+ val += 8; len -= 8;
+ secure_ = true;
+ } else {
+ clear();
+ return;
+ }
+ const CTYPE* path = strchrn(val, len, static_cast<CTYPE>('/'));
+ if (!path) {
+ path = val + len;
+ }
+ size_t address_length = (path - val);
+ do_set_address(val, address_length);
+ do_set_full_path(path, len - address_length);
+}
+
+template<class CTYPE>
+void Url<CTYPE>::do_set_address(const CTYPE* val, size_t len) {
+ if (const CTYPE* at = strchrn(val, len, static_cast<CTYPE>('@'))) {
+ // Everything before the @ is a user:password combo, so skip it.
+ len -= at - val + 1;
+ val = at + 1;
+ }
+ if (const CTYPE* colon = strchrn(val, len, static_cast<CTYPE>(':'))) {
+ host_.assign(val, colon - val);
+ // Note: In every case, we're guaranteed that colon is followed by a null,
+ // or non-numeric character.
+ port_ = static_cast<uint16>(::strtoul(colon + 1, NULL, 10));
+ // TODO: Consider checking for invalid data following port number.
+ } else {
+ host_.assign(val, len);
+ port_ = HttpDefaultPort(secure_);
+ }
+}
+
+template<class CTYPE>
+void Url<CTYPE>::do_set_full_path(const CTYPE* val, size_t len) {
+ const CTYPE* query = strchrn(val, len, static_cast<CTYPE>('?'));
+ if (!query) {
+ query = val + len;
+ }
+ size_t path_length = (query - val);
+ if (0 == path_length) {
+ // TODO: consider failing in this case.
+ path_.assign(1, static_cast<CTYPE>('/'));
+ } else {
+ ASSERT(val[0] == static_cast<CTYPE>('/'));
+ path_.assign(val, path_length);
+ }
+ query_.assign(query, len - path_length);
+}
+
+template<class CTYPE>
+void Url<CTYPE>::do_get_url(string* val) const {
+ CTYPE protocol[9];
+ asccpyn(protocol, ARRAY_SIZE(protocol), secure_ ? "https://" : "http://");
+ val->append(protocol);
+ do_get_address(val);
+ do_get_full_path(val);
+}
+
+template<class CTYPE>
+void Url<CTYPE>::do_get_address(string* val) const {
+ val->append(host_);
+ if (port_ != HttpDefaultPort(secure_)) {
+ CTYPE format[5], port[32];
+ asccpyn(format, ARRAY_SIZE(format), ":%hu");
+ sprintfn(port, ARRAY_SIZE(port), format, port_);
+ val->append(port);
+ }
+}
+
+template<class CTYPE>
+void Url<CTYPE>::do_get_full_path(string* val) const {
+ val->append(path_);
+ val->append(query_);
+}
+
+template<class CTYPE>
+bool Url<CTYPE>::get_attribute(const string& name, string* value) const {
+ if (query_.empty())
+ return false;
+
+ std::string::size_type pos = query_.find(name, 1);
+ if (std::string::npos == pos)
+ return false;
+
+ pos += name.length() + 1;
+ if ((pos > query_.length()) || (static_cast<CTYPE>('=') != query_[pos-1]))
+ return false;
+
+ std::string::size_type end = query_.find(static_cast<CTYPE>('&'), pos);
+ if (std::string::npos == end) {
+ end = query_.length();
+ }
+ value->assign(query_.substr(pos, end - pos));
+ return true;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HTTPCOMMON_INL_H__
diff --git a/chromium/third_party/webrtc/base/httpcommon.cc b/chromium/third_party/webrtc/base/httpcommon.cc
new file mode 100644
index 00000000000..095cdafef9a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpcommon.cc
@@ -0,0 +1,1045 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <time.h>
+
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#define SECURITY_WIN32
+#include <security.h>
+#endif
+
+#include "webrtc/base/httpcommon-inl.h"
+
+#include "webrtc/base/base64.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/cryptstring.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/stringdigest.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+namespace rtc {
+
+#if defined(WEBRTC_WIN)
+extern const ConstantLabel SECURITY_ERRORS[];
+#endif
+
+//////////////////////////////////////////////////////////////////////
+// Enum - TODO: expose globally later?
+//////////////////////////////////////////////////////////////////////
+
+bool find_string(size_t& index, const std::string& needle,
+ const char* const haystack[], size_t max_index) {
+ for (index=0; index<max_index; ++index) {
+ if (_stricmp(needle.c_str(), haystack[index]) == 0) {
+ return true;
+ }
+ }
+ return false;
+}
+
+template<class E>
+struct Enum {
+ static const char** Names;
+ static size_t Size;
+
+ static inline const char* Name(E val) { return Names[val]; }
+ static inline bool Parse(E& val, const std::string& name) {
+ size_t index;
+ if (!find_string(index, name, Names, Size))
+ return false;
+ val = static_cast<E>(index);
+ return true;
+ }
+
+ E val;
+
+ inline operator E&() { return val; }
+ inline Enum& operator=(E rhs) { val = rhs; return *this; }
+
+ inline const char* name() const { return Name(val); }
+ inline bool assign(const std::string& name) { return Parse(val, name); }
+ inline Enum& operator=(const std::string& rhs) { assign(rhs); return *this; }
+};
+
+#define ENUM(e,n) \
+ template<> const char** Enum<e>::Names = n; \
+ template<> size_t Enum<e>::Size = sizeof(n)/sizeof(n[0])
+
+//////////////////////////////////////////////////////////////////////
+// HttpCommon
+//////////////////////////////////////////////////////////////////////
+
+static const char* kHttpVersions[HVER_LAST+1] = {
+ "1.0", "1.1", "Unknown"
+};
+ENUM(HttpVersion, kHttpVersions);
+
+static const char* kHttpVerbs[HV_LAST+1] = {
+ "GET", "POST", "PUT", "DELETE", "CONNECT", "HEAD"
+};
+ENUM(HttpVerb, kHttpVerbs);
+
+static const char* kHttpHeaders[HH_LAST+1] = {
+ "Age",
+ "Cache-Control",
+ "Connection",
+ "Content-Disposition",
+ "Content-Length",
+ "Content-Range",
+ "Content-Type",
+ "Cookie",
+ "Date",
+ "ETag",
+ "Expires",
+ "Host",
+ "If-Modified-Since",
+ "If-None-Match",
+ "Keep-Alive",
+ "Last-Modified",
+ "Location",
+ "Proxy-Authenticate",
+ "Proxy-Authorization",
+ "Proxy-Connection",
+ "Range",
+ "Set-Cookie",
+ "TE",
+ "Trailers",
+ "Transfer-Encoding",
+ "Upgrade",
+ "User-Agent",
+ "WWW-Authenticate",
+};
+ENUM(HttpHeader, kHttpHeaders);
+
+const char* ToString(HttpVersion version) {
+ return Enum<HttpVersion>::Name(version);
+}
+
+bool FromString(HttpVersion& version, const std::string& str) {
+ return Enum<HttpVersion>::Parse(version, str);
+}
+
+const char* ToString(HttpVerb verb) {
+ return Enum<HttpVerb>::Name(verb);
+}
+
+bool FromString(HttpVerb& verb, const std::string& str) {
+ return Enum<HttpVerb>::Parse(verb, str);
+}
+
+const char* ToString(HttpHeader header) {
+ return Enum<HttpHeader>::Name(header);
+}
+
+bool FromString(HttpHeader& header, const std::string& str) {
+ return Enum<HttpHeader>::Parse(header, str);
+}
+
+bool HttpCodeHasBody(uint32 code) {
+ return !HttpCodeIsInformational(code)
+ && (code != HC_NO_CONTENT) && (code != HC_NOT_MODIFIED);
+}
+
+bool HttpCodeIsCacheable(uint32 code) {
+ switch (code) {
+ case HC_OK:
+ case HC_NON_AUTHORITATIVE:
+ case HC_PARTIAL_CONTENT:
+ case HC_MULTIPLE_CHOICES:
+ case HC_MOVED_PERMANENTLY:
+ case HC_GONE:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool HttpHeaderIsEndToEnd(HttpHeader header) {
+ switch (header) {
+ case HH_CONNECTION:
+ case HH_KEEP_ALIVE:
+ case HH_PROXY_AUTHENTICATE:
+ case HH_PROXY_AUTHORIZATION:
+ case HH_PROXY_CONNECTION: // Note part of RFC... this is non-standard header
+ case HH_TE:
+ case HH_TRAILERS:
+ case HH_TRANSFER_ENCODING:
+ case HH_UPGRADE:
+ return false;
+ default:
+ return true;
+ }
+}
+
+bool HttpHeaderIsCollapsible(HttpHeader header) {
+ switch (header) {
+ case HH_SET_COOKIE:
+ case HH_PROXY_AUTHENTICATE:
+ case HH_WWW_AUTHENTICATE:
+ return false;
+ default:
+ return true;
+ }
+}
+
+bool HttpShouldKeepAlive(const HttpData& data) {
+ std::string connection;
+ if ((data.hasHeader(HH_PROXY_CONNECTION, &connection)
+ || data.hasHeader(HH_CONNECTION, &connection))) {
+ return (_stricmp(connection.c_str(), "Keep-Alive") == 0);
+ }
+ return (data.version >= HVER_1_1);
+}
+
+namespace {
+
+inline bool IsEndOfAttributeName(size_t pos, size_t len, const char * data) {
+ if (pos >= len)
+ return true;
+ if (isspace(static_cast<unsigned char>(data[pos])))
+ return true;
+ // The reason for this complexity is that some attributes may contain trailing
+ // equal signs (like base64 tokens in Negotiate auth headers)
+ if ((pos+1 < len) && (data[pos] == '=') &&
+ !isspace(static_cast<unsigned char>(data[pos+1])) &&
+ (data[pos+1] != '=')) {
+ return true;
+ }
+ return false;
+}
+
+// TODO: unittest for EscapeAttribute and HttpComposeAttributes.
+
+std::string EscapeAttribute(const std::string& attribute) {
+ const size_t kMaxLength = attribute.length() * 2 + 1;
+ char* buffer = STACK_ARRAY(char, kMaxLength);
+ size_t len = escape(buffer, kMaxLength, attribute.data(), attribute.length(),
+ "\"", '\\');
+ return std::string(buffer, len);
+}
+
+} // anonymous namespace
+
+void HttpComposeAttributes(const HttpAttributeList& attributes, char separator,
+ std::string* composed) {
+ std::stringstream ss;
+ for (size_t i=0; i<attributes.size(); ++i) {
+ if (i > 0) {
+ ss << separator << " ";
+ }
+ ss << attributes[i].first;
+ if (!attributes[i].second.empty()) {
+ ss << "=\"" << EscapeAttribute(attributes[i].second) << "\"";
+ }
+ }
+ *composed = ss.str();
+}
+
+void HttpParseAttributes(const char * data, size_t len,
+ HttpAttributeList& attributes) {
+ size_t pos = 0;
+ while (true) {
+ // Skip leading whitespace
+ while ((pos < len) && isspace(static_cast<unsigned char>(data[pos]))) {
+ ++pos;
+ }
+
+ // End of attributes?
+ if (pos >= len)
+ return;
+
+ // Find end of attribute name
+ size_t start = pos;
+ while (!IsEndOfAttributeName(pos, len, data)) {
+ ++pos;
+ }
+
+ HttpAttribute attribute;
+ attribute.first.assign(data + start, data + pos);
+
+ // Attribute has value?
+ if ((pos < len) && (data[pos] == '=')) {
+ ++pos; // Skip '='
+ // Check if quoted value
+ if ((pos < len) && (data[pos] == '"')) {
+ while (++pos < len) {
+ if (data[pos] == '"') {
+ ++pos;
+ break;
+ }
+ if ((data[pos] == '\\') && (pos + 1 < len))
+ ++pos;
+ attribute.second.append(1, data[pos]);
+ }
+ } else {
+ while ((pos < len) &&
+ !isspace(static_cast<unsigned char>(data[pos])) &&
+ (data[pos] != ',')) {
+ attribute.second.append(1, data[pos++]);
+ }
+ }
+ }
+
+ attributes.push_back(attribute);
+ if ((pos < len) && (data[pos] == ',')) ++pos; // Skip ','
+ }
+}
+
+bool HttpHasAttribute(const HttpAttributeList& attributes,
+ const std::string& name,
+ std::string* value) {
+ for (HttpAttributeList::const_iterator it = attributes.begin();
+ it != attributes.end(); ++it) {
+ if (it->first == name) {
+ if (value) {
+ *value = it->second;
+ }
+ return true;
+ }
+ }
+ return false;
+}
+
+bool HttpHasNthAttribute(HttpAttributeList& attributes,
+ size_t index,
+ std::string* name,
+ std::string* value) {
+ if (index >= attributes.size())
+ return false;
+
+ if (name)
+ *name = attributes[index].first;
+ if (value)
+ *value = attributes[index].second;
+ return true;
+}
+
+bool HttpDateToSeconds(const std::string& date, time_t* seconds) {
+ const char* const kTimeZones[] = {
+ "UT", "GMT", "EST", "EDT", "CST", "CDT", "MST", "MDT", "PST", "PDT",
+ "A", "B", "C", "D", "E", "F", "G", "H", "I", "K", "L", "M",
+ "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y"
+ };
+ const int kTimeZoneOffsets[] = {
+ 0, 0, -5, -4, -6, -5, -7, -6, -8, -7,
+ -1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12,
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
+ };
+
+ ASSERT(NULL != seconds);
+ struct tm tval;
+ memset(&tval, 0, sizeof(tval));
+ char month[4], zone[6];
+ memset(month, 0, sizeof(month));
+ memset(zone, 0, sizeof(zone));
+
+ if (7 != sscanf(date.c_str(), "%*3s, %d %3s %d %d:%d:%d %5c",
+ &tval.tm_mday, month, &tval.tm_year,
+ &tval.tm_hour, &tval.tm_min, &tval.tm_sec, zone)) {
+ return false;
+ }
+ switch (toupper(month[2])) {
+ case 'N': tval.tm_mon = (month[1] == 'A') ? 0 : 5; break;
+ case 'B': tval.tm_mon = 1; break;
+ case 'R': tval.tm_mon = (month[0] == 'M') ? 2 : 3; break;
+ case 'Y': tval.tm_mon = 4; break;
+ case 'L': tval.tm_mon = 6; break;
+ case 'G': tval.tm_mon = 7; break;
+ case 'P': tval.tm_mon = 8; break;
+ case 'T': tval.tm_mon = 9; break;
+ case 'V': tval.tm_mon = 10; break;
+ case 'C': tval.tm_mon = 11; break;
+ }
+ tval.tm_year -= 1900;
+ size_t gmt, non_gmt = mktime(&tval);
+ if ((zone[0] == '+') || (zone[0] == '-')) {
+ if (!isdigit(zone[1]) || !isdigit(zone[2])
+ || !isdigit(zone[3]) || !isdigit(zone[4])) {
+ return false;
+ }
+ int hours = (zone[1] - '0') * 10 + (zone[2] - '0');
+ int minutes = (zone[3] - '0') * 10 + (zone[4] - '0');
+ int offset = (hours * 60 + minutes) * 60;
+ gmt = non_gmt + ((zone[0] == '+') ? offset : -offset);
+ } else {
+ size_t zindex;
+ if (!find_string(zindex, zone, kTimeZones, ARRAY_SIZE(kTimeZones))) {
+ return false;
+ }
+ gmt = non_gmt + kTimeZoneOffsets[zindex] * 60 * 60;
+ }
+ // TODO: Android should support timezone, see b/2441195
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID) || defined(BSD)
+ tm *tm_for_timezone = localtime((time_t *)&gmt);
+ *seconds = gmt + tm_for_timezone->tm_gmtoff;
+#else
+ *seconds = gmt - timezone;
+#endif
+ return true;
+}
+
+std::string HttpAddress(const SocketAddress& address, bool secure) {
+ return (address.port() == HttpDefaultPort(secure))
+ ? address.hostname() : address.ToString();
+}
+
+//////////////////////////////////////////////////////////////////////
+// HttpData
+//////////////////////////////////////////////////////////////////////
+
+void
+HttpData::clear(bool release_document) {
+ // Clear headers first, since releasing a document may have far-reaching
+ // effects.
+ headers_.clear();
+ if (release_document) {
+ document.reset();
+ }
+}
+
+void
+HttpData::copy(const HttpData& src) {
+ headers_ = src.headers_;
+}
+
+void
+HttpData::changeHeader(const std::string& name, const std::string& value,
+ HeaderCombine combine) {
+ if (combine == HC_AUTO) {
+ HttpHeader header;
+ // Unrecognized headers are collapsible
+ combine = !FromString(header, name) || HttpHeaderIsCollapsible(header)
+ ? HC_YES : HC_NO;
+ } else if (combine == HC_REPLACE) {
+ headers_.erase(name);
+ combine = HC_NO;
+ }
+ // At this point, combine is one of (YES, NO, NEW)
+ if (combine != HC_NO) {
+ HeaderMap::iterator it = headers_.find(name);
+ if (it != headers_.end()) {
+ if (combine == HC_YES) {
+ it->second.append(",");
+ it->second.append(value);
+ }
+ return;
+ }
+ }
+ headers_.insert(HeaderMap::value_type(name, value));
+}
+
+size_t HttpData::clearHeader(const std::string& name) {
+ return headers_.erase(name);
+}
+
+HttpData::iterator HttpData::clearHeader(iterator header) {
+ iterator deprecated = header++;
+ headers_.erase(deprecated);
+ return header;
+}
+
+bool
+HttpData::hasHeader(const std::string& name, std::string* value) const {
+ HeaderMap::const_iterator it = headers_.find(name);
+ if (it == headers_.end()) {
+ return false;
+ } else if (value) {
+ *value = it->second;
+ }
+ return true;
+}
+
+void HttpData::setContent(const std::string& content_type,
+ StreamInterface* document) {
+ setHeader(HH_CONTENT_TYPE, content_type);
+ setDocumentAndLength(document);
+}
+
+void HttpData::setDocumentAndLength(StreamInterface* document) {
+ // TODO: Consider calling Rewind() here?
+ ASSERT(!hasHeader(HH_CONTENT_LENGTH, NULL));
+ ASSERT(!hasHeader(HH_TRANSFER_ENCODING, NULL));
+ ASSERT(document != NULL);
+ this->document.reset(document);
+ size_t content_length = 0;
+ if (this->document->GetAvailable(&content_length)) {
+ char buffer[32];
+ sprintfn(buffer, sizeof(buffer), "%d", content_length);
+ setHeader(HH_CONTENT_LENGTH, buffer);
+ } else {
+ setHeader(HH_TRANSFER_ENCODING, "chunked");
+ }
+}
+
+//
+// HttpRequestData
+//
+
+void
+HttpRequestData::clear(bool release_document) {
+ verb = HV_GET;
+ path.clear();
+ HttpData::clear(release_document);
+}
+
+void
+HttpRequestData::copy(const HttpRequestData& src) {
+ verb = src.verb;
+ path = src.path;
+ HttpData::copy(src);
+}
+
+size_t
+HttpRequestData::formatLeader(char* buffer, size_t size) const {
+ ASSERT(path.find(' ') == std::string::npos);
+ return sprintfn(buffer, size, "%s %.*s HTTP/%s", ToString(verb), path.size(),
+ path.data(), ToString(version));
+}
+
+HttpError
+HttpRequestData::parseLeader(const char* line, size_t len) {
+ unsigned int vmajor, vminor;
+ int vend, dstart, dend;
+ // sscanf isn't safe with strings that aren't null-terminated, and there is
+ // no guarantee that |line| is. Create a local copy that is null-terminated.
+ std::string line_str(line, len);
+ line = line_str.c_str();
+ if ((sscanf(line, "%*s%n %n%*s%n HTTP/%u.%u",
+ &vend, &dstart, &dend, &vmajor, &vminor) != 2)
+ || (vmajor != 1)) {
+ return HE_PROTOCOL;
+ }
+ if (vminor == 0) {
+ version = HVER_1_0;
+ } else if (vminor == 1) {
+ version = HVER_1_1;
+ } else {
+ return HE_PROTOCOL;
+ }
+ std::string sverb(line, vend);
+ if (!FromString(verb, sverb.c_str())) {
+ return HE_PROTOCOL; // !?! HC_METHOD_NOT_SUPPORTED?
+ }
+ path.assign(line + dstart, line + dend);
+ return HE_NONE;
+}
+
+bool HttpRequestData::getAbsoluteUri(std::string* uri) const {
+ if (HV_CONNECT == verb)
+ return false;
+ Url<char> url(path);
+ if (url.valid()) {
+ uri->assign(path);
+ return true;
+ }
+ std::string host;
+ if (!hasHeader(HH_HOST, &host))
+ return false;
+ url.set_address(host);
+ url.set_full_path(path);
+ uri->assign(url.url());
+ return url.valid();
+}
+
+bool HttpRequestData::getRelativeUri(std::string* host,
+ std::string* path) const
+{
+ if (HV_CONNECT == verb)
+ return false;
+ Url<char> url(this->path);
+ if (url.valid()) {
+ host->assign(url.address());
+ path->assign(url.full_path());
+ return true;
+ }
+ if (!hasHeader(HH_HOST, host))
+ return false;
+ path->assign(this->path);
+ return true;
+}
+
+//
+// HttpResponseData
+//
+
+void
+HttpResponseData::clear(bool release_document) {
+ scode = HC_INTERNAL_SERVER_ERROR;
+ message.clear();
+ HttpData::clear(release_document);
+}
+
+void
+HttpResponseData::copy(const HttpResponseData& src) {
+ scode = src.scode;
+ message = src.message;
+ HttpData::copy(src);
+}
+
+void
+HttpResponseData::set_success(uint32 scode) {
+ this->scode = scode;
+ message.clear();
+ setHeader(HH_CONTENT_LENGTH, "0", false);
+}
+
+void
+HttpResponseData::set_success(const std::string& content_type,
+ StreamInterface* document,
+ uint32 scode) {
+ this->scode = scode;
+ message.erase(message.begin(), message.end());
+ setContent(content_type, document);
+}
+
+void
+HttpResponseData::set_redirect(const std::string& location, uint32 scode) {
+ this->scode = scode;
+ message.clear();
+ setHeader(HH_LOCATION, location);
+ setHeader(HH_CONTENT_LENGTH, "0", false);
+}
+
+void
+HttpResponseData::set_error(uint32 scode) {
+ this->scode = scode;
+ message.clear();
+ setHeader(HH_CONTENT_LENGTH, "0", false);
+}
+
+size_t
+HttpResponseData::formatLeader(char* buffer, size_t size) const {
+ size_t len = sprintfn(buffer, size, "HTTP/%s %lu", ToString(version), scode);
+ if (!message.empty()) {
+ len += sprintfn(buffer + len, size - len, " %.*s",
+ message.size(), message.data());
+ }
+ return len;
+}
+
+HttpError
+HttpResponseData::parseLeader(const char* line, size_t len) {
+ size_t pos = 0;
+ unsigned int vmajor, vminor, temp_scode;
+ int temp_pos;
+ // sscanf isn't safe with strings that aren't null-terminated, and there is
+ // no guarantee that |line| is. Create a local copy that is null-terminated.
+ std::string line_str(line, len);
+ line = line_str.c_str();
+ if (sscanf(line, "HTTP %u%n",
+ &temp_scode, &temp_pos) == 1) {
+ // This server's response has no version. :( NOTE: This happens for every
+ // response to requests made from Chrome plugins, regardless of the server's
+ // behaviour.
+ LOG(LS_VERBOSE) << "HTTP version missing from response";
+ version = HVER_UNKNOWN;
+ } else if ((sscanf(line, "HTTP/%u.%u %u%n",
+ &vmajor, &vminor, &temp_scode, &temp_pos) == 3)
+ && (vmajor == 1)) {
+ // This server's response does have a version.
+ if (vminor == 0) {
+ version = HVER_1_0;
+ } else if (vminor == 1) {
+ version = HVER_1_1;
+ } else {
+ return HE_PROTOCOL;
+ }
+ } else {
+ return HE_PROTOCOL;
+ }
+ scode = temp_scode;
+ pos = static_cast<size_t>(temp_pos);
+ while ((pos < len) && isspace(static_cast<unsigned char>(line[pos]))) ++pos;
+ message.assign(line + pos, len - pos);
+ return HE_NONE;
+}
+
+//////////////////////////////////////////////////////////////////////
+// Http Authentication
+//////////////////////////////////////////////////////////////////////
+
+#define TEST_DIGEST 0
+#if TEST_DIGEST
+/*
+const char * const DIGEST_CHALLENGE =
+ "Digest realm=\"testrealm@host.com\","
+ " qop=\"auth,auth-int\","
+ " nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\","
+ " opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"";
+const char * const DIGEST_METHOD = "GET";
+const char * const DIGEST_URI =
+ "/dir/index.html";;
+const char * const DIGEST_CNONCE =
+ "0a4f113b";
+const char * const DIGEST_RESPONSE =
+ "6629fae49393a05397450978507c4ef1";
+//user_ = "Mufasa";
+//pass_ = "Circle Of Life";
+*/
+const char * const DIGEST_CHALLENGE =
+ "Digest realm=\"Squid proxy-caching web server\","
+ " nonce=\"Nny4QuC5PwiSDixJ\","
+ " qop=\"auth\","
+ " stale=false";
+const char * const DIGEST_URI =
+ "/";
+const char * const DIGEST_CNONCE =
+ "6501d58e9a21cee1e7b5fec894ded024";
+const char * const DIGEST_RESPONSE =
+ "edffcb0829e755838b073a4a42de06bc";
+#endif
+
+std::string quote(const std::string& str) {
+ std::string result;
+ result.push_back('"');
+ for (size_t i=0; i<str.size(); ++i) {
+ if ((str[i] == '"') || (str[i] == '\\'))
+ result.push_back('\\');
+ result.push_back(str[i]);
+ }
+ result.push_back('"');
+ return result;
+}
+
+#if defined(WEBRTC_WIN)
+struct NegotiateAuthContext : public HttpAuthContext {
+ CredHandle cred;
+ CtxtHandle ctx;
+ size_t steps;
+ bool specified_credentials;
+
+ NegotiateAuthContext(const std::string& auth, CredHandle c1, CtxtHandle c2)
+ : HttpAuthContext(auth), cred(c1), ctx(c2), steps(0),
+ specified_credentials(false)
+ { }
+
+ virtual ~NegotiateAuthContext() {
+ DeleteSecurityContext(&ctx);
+ FreeCredentialsHandle(&cred);
+ }
+};
+#endif // WEBRTC_WIN
+
+HttpAuthResult HttpAuthenticate(
+ const char * challenge, size_t len,
+ const SocketAddress& server,
+ const std::string& method, const std::string& uri,
+ const std::string& username, const CryptString& password,
+ HttpAuthContext *& context, std::string& response, std::string& auth_method)
+{
+#if TEST_DIGEST
+ challenge = DIGEST_CHALLENGE;
+ len = strlen(challenge);
+#endif
+
+ HttpAttributeList args;
+ HttpParseAttributes(challenge, len, args);
+ HttpHasNthAttribute(args, 0, &auth_method, NULL);
+
+ if (context && (context->auth_method != auth_method))
+ return HAR_IGNORE;
+
+ // BASIC
+ if (_stricmp(auth_method.c_str(), "basic") == 0) {
+ if (context)
+ return HAR_CREDENTIALS; // Bad credentials
+ if (username.empty())
+ return HAR_CREDENTIALS; // Missing credentials
+
+ context = new HttpAuthContext(auth_method);
+
+ // TODO: convert sensitive to a secure buffer that gets securely deleted
+ //std::string decoded = username + ":" + password;
+ size_t len = username.size() + password.GetLength() + 2;
+ char * sensitive = new char[len];
+ size_t pos = strcpyn(sensitive, len, username.data(), username.size());
+ pos += strcpyn(sensitive + pos, len - pos, ":");
+ password.CopyTo(sensitive + pos, true);
+
+ response = auth_method;
+ response.append(" ");
+ // TODO: create a sensitive-source version of Base64::encode
+ response.append(Base64::Encode(sensitive));
+ memset(sensitive, 0, len);
+ delete [] sensitive;
+ return HAR_RESPONSE;
+ }
+
+ // DIGEST
+ if (_stricmp(auth_method.c_str(), "digest") == 0) {
+ if (context)
+ return HAR_CREDENTIALS; // Bad credentials
+ if (username.empty())
+ return HAR_CREDENTIALS; // Missing credentials
+
+ context = new HttpAuthContext(auth_method);
+
+ std::string cnonce, ncount;
+#if TEST_DIGEST
+ method = DIGEST_METHOD;
+ uri = DIGEST_URI;
+ cnonce = DIGEST_CNONCE;
+#else
+ char buffer[256];
+ sprintf(buffer, "%d", static_cast<int>(time(0)));
+ cnonce = MD5(buffer);
+#endif
+ ncount = "00000001";
+
+ std::string realm, nonce, qop, opaque;
+ HttpHasAttribute(args, "realm", &realm);
+ HttpHasAttribute(args, "nonce", &nonce);
+ bool has_qop = HttpHasAttribute(args, "qop", &qop);
+ bool has_opaque = HttpHasAttribute(args, "opaque", &opaque);
+
+ // TODO: convert sensitive to be secure buffer
+ //std::string A1 = username + ":" + realm + ":" + password;
+ size_t len = username.size() + realm.size() + password.GetLength() + 3;
+ char * sensitive = new char[len]; // A1
+ size_t pos = strcpyn(sensitive, len, username.data(), username.size());
+ pos += strcpyn(sensitive + pos, len - pos, ":");
+ pos += strcpyn(sensitive + pos, len - pos, realm.c_str());
+ pos += strcpyn(sensitive + pos, len - pos, ":");
+ password.CopyTo(sensitive + pos, true);
+
+ std::string A2 = method + ":" + uri;
+ std::string middle;
+ if (has_qop) {
+ qop = "auth";
+ middle = nonce + ":" + ncount + ":" + cnonce + ":" + qop;
+ } else {
+ middle = nonce;
+ }
+ std::string HA1 = MD5(sensitive);
+ memset(sensitive, 0, len);
+ delete [] sensitive;
+ std::string HA2 = MD5(A2);
+ std::string dig_response = MD5(HA1 + ":" + middle + ":" + HA2);
+
+#if TEST_DIGEST
+ ASSERT(strcmp(dig_response.c_str(), DIGEST_RESPONSE) == 0);
+#endif
+
+ std::stringstream ss;
+ ss << auth_method;
+ ss << " username=" << quote(username);
+ ss << ", realm=" << quote(realm);
+ ss << ", nonce=" << quote(nonce);
+ ss << ", uri=" << quote(uri);
+ if (has_qop) {
+ ss << ", qop=" << qop;
+ ss << ", nc=" << ncount;
+ ss << ", cnonce=" << quote(cnonce);
+ }
+ ss << ", response=\"" << dig_response << "\"";
+ if (has_opaque) {
+ ss << ", opaque=" << quote(opaque);
+ }
+ response = ss.str();
+ return HAR_RESPONSE;
+ }
+
+#if defined(WEBRTC_WIN)
+#if 1
+ bool want_negotiate = (_stricmp(auth_method.c_str(), "negotiate") == 0);
+ bool want_ntlm = (_stricmp(auth_method.c_str(), "ntlm") == 0);
+ // SPNEGO & NTLM
+ if (want_negotiate || want_ntlm) {
+ const size_t MAX_MESSAGE = 12000, MAX_SPN = 256;
+ char out_buf[MAX_MESSAGE], spn[MAX_SPN];
+
+#if 0 // Requires funky windows versions
+ DWORD len = MAX_SPN;
+ if (DsMakeSpn("HTTP", server.HostAsURIString().c_str(), NULL,
+ server.port(),
+ 0, &len, spn) != ERROR_SUCCESS) {
+ LOG_F(WARNING) << "(Negotiate) - DsMakeSpn failed";
+ return HAR_IGNORE;
+ }
+#else
+ sprintfn(spn, MAX_SPN, "HTTP/%s", server.ToString().c_str());
+#endif
+
+ SecBuffer out_sec;
+ out_sec.pvBuffer = out_buf;
+ out_sec.cbBuffer = sizeof(out_buf);
+ out_sec.BufferType = SECBUFFER_TOKEN;
+
+ SecBufferDesc out_buf_desc;
+ out_buf_desc.ulVersion = 0;
+ out_buf_desc.cBuffers = 1;
+ out_buf_desc.pBuffers = &out_sec;
+
+ const ULONG NEG_FLAGS_DEFAULT =
+ //ISC_REQ_ALLOCATE_MEMORY
+ ISC_REQ_CONFIDENTIALITY
+ //| ISC_REQ_EXTENDED_ERROR
+ //| ISC_REQ_INTEGRITY
+ | ISC_REQ_REPLAY_DETECT
+ | ISC_REQ_SEQUENCE_DETECT
+ //| ISC_REQ_STREAM
+ //| ISC_REQ_USE_SUPPLIED_CREDS
+ ;
+
+ ::TimeStamp lifetime;
+ SECURITY_STATUS ret = S_OK;
+ ULONG ret_flags = 0, flags = NEG_FLAGS_DEFAULT;
+
+ bool specify_credentials = !username.empty();
+ size_t steps = 0;
+
+ //uint32 now = Time();
+
+ NegotiateAuthContext * neg = static_cast<NegotiateAuthContext *>(context);
+ if (neg) {
+ const size_t max_steps = 10;
+ if (++neg->steps >= max_steps) {
+ LOG(WARNING) << "AsyncHttpsProxySocket::Authenticate(Negotiate) too many retries";
+ return HAR_ERROR;
+ }
+ steps = neg->steps;
+
+ std::string challenge, decoded_challenge;
+ if (HttpHasNthAttribute(args, 1, &challenge, NULL)
+ && Base64::Decode(challenge, Base64::DO_STRICT,
+ &decoded_challenge, NULL)) {
+ SecBuffer in_sec;
+ in_sec.pvBuffer = const_cast<char *>(decoded_challenge.data());
+ in_sec.cbBuffer = static_cast<unsigned long>(decoded_challenge.size());
+ in_sec.BufferType = SECBUFFER_TOKEN;
+
+ SecBufferDesc in_buf_desc;
+ in_buf_desc.ulVersion = 0;
+ in_buf_desc.cBuffers = 1;
+ in_buf_desc.pBuffers = &in_sec;
+
+ ret = InitializeSecurityContextA(&neg->cred, &neg->ctx, spn, flags, 0, SECURITY_NATIVE_DREP, &in_buf_desc, 0, &neg->ctx, &out_buf_desc, &ret_flags, &lifetime);
+ //LOG(INFO) << "$$$ InitializeSecurityContext @ " << TimeSince(now);
+ if (FAILED(ret)) {
+ LOG(LS_ERROR) << "InitializeSecurityContext returned: "
+ << ErrorName(ret, SECURITY_ERRORS);
+ return HAR_ERROR;
+ }
+ } else if (neg->specified_credentials) {
+ // Try again with default credentials
+ specify_credentials = false;
+ delete context;
+ context = neg = 0;
+ } else {
+ return HAR_CREDENTIALS;
+ }
+ }
+
+ if (!neg) {
+ unsigned char userbuf[256], passbuf[256], domainbuf[16];
+ SEC_WINNT_AUTH_IDENTITY_A auth_id, * pauth_id = 0;
+ if (specify_credentials) {
+ memset(&auth_id, 0, sizeof(auth_id));
+ size_t len = password.GetLength()+1;
+ char * sensitive = new char[len];
+ password.CopyTo(sensitive, true);
+ std::string::size_type pos = username.find('\\');
+ if (pos == std::string::npos) {
+ auth_id.UserLength = static_cast<unsigned long>(
+ _min(sizeof(userbuf) - 1, username.size()));
+ memcpy(userbuf, username.c_str(), auth_id.UserLength);
+ userbuf[auth_id.UserLength] = 0;
+ auth_id.DomainLength = 0;
+ domainbuf[auth_id.DomainLength] = 0;
+ auth_id.PasswordLength = static_cast<unsigned long>(
+ _min(sizeof(passbuf) - 1, password.GetLength()));
+ memcpy(passbuf, sensitive, auth_id.PasswordLength);
+ passbuf[auth_id.PasswordLength] = 0;
+ } else {
+ auth_id.UserLength = static_cast<unsigned long>(
+ _min(sizeof(userbuf) - 1, username.size() - pos - 1));
+ memcpy(userbuf, username.c_str() + pos + 1, auth_id.UserLength);
+ userbuf[auth_id.UserLength] = 0;
+ auth_id.DomainLength = static_cast<unsigned long>(
+ _min(sizeof(domainbuf) - 1, pos));
+ memcpy(domainbuf, username.c_str(), auth_id.DomainLength);
+ domainbuf[auth_id.DomainLength] = 0;
+ auth_id.PasswordLength = static_cast<unsigned long>(
+ _min(sizeof(passbuf) - 1, password.GetLength()));
+ memcpy(passbuf, sensitive, auth_id.PasswordLength);
+ passbuf[auth_id.PasswordLength] = 0;
+ }
+ memset(sensitive, 0, len);
+ delete [] sensitive;
+ auth_id.User = userbuf;
+ auth_id.Domain = domainbuf;
+ auth_id.Password = passbuf;
+ auth_id.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI;
+ pauth_id = &auth_id;
+ LOG(LS_VERBOSE) << "Negotiate protocol: Using specified credentials";
+ } else {
+ LOG(LS_VERBOSE) << "Negotiate protocol: Using default credentials";
+ }
+
+ CredHandle cred;
+ ret = AcquireCredentialsHandleA(0, want_negotiate ? NEGOSSP_NAME_A : NTLMSP_NAME_A, SECPKG_CRED_OUTBOUND, 0, pauth_id, 0, 0, &cred, &lifetime);
+ //LOG(INFO) << "$$$ AcquireCredentialsHandle @ " << TimeSince(now);
+ if (ret != SEC_E_OK) {
+ LOG(LS_ERROR) << "AcquireCredentialsHandle error: "
+ << ErrorName(ret, SECURITY_ERRORS);
+ return HAR_IGNORE;
+ }
+
+ //CSecBufferBundle<5, CSecBufferBase::FreeSSPI> sb_out;
+
+ CtxtHandle ctx;
+ ret = InitializeSecurityContextA(&cred, 0, spn, flags, 0, SECURITY_NATIVE_DREP, 0, 0, &ctx, &out_buf_desc, &ret_flags, &lifetime);
+ //LOG(INFO) << "$$$ InitializeSecurityContext @ " << TimeSince(now);
+ if (FAILED(ret)) {
+ LOG(LS_ERROR) << "InitializeSecurityContext returned: "
+ << ErrorName(ret, SECURITY_ERRORS);
+ FreeCredentialsHandle(&cred);
+ return HAR_IGNORE;
+ }
+
+ ASSERT(!context);
+ context = neg = new NegotiateAuthContext(auth_method, cred, ctx);
+ neg->specified_credentials = specify_credentials;
+ neg->steps = steps;
+ }
+
+ if ((ret == SEC_I_COMPLETE_NEEDED) || (ret == SEC_I_COMPLETE_AND_CONTINUE)) {
+ ret = CompleteAuthToken(&neg->ctx, &out_buf_desc);
+ //LOG(INFO) << "$$$ CompleteAuthToken @ " << TimeSince(now);
+ LOG(LS_VERBOSE) << "CompleteAuthToken returned: "
+ << ErrorName(ret, SECURITY_ERRORS);
+ if (FAILED(ret)) {
+ return HAR_ERROR;
+ }
+ }
+
+ //LOG(INFO) << "$$$ NEGOTIATE took " << TimeSince(now) << "ms";
+
+ std::string decoded(out_buf, out_buf + out_sec.cbBuffer);
+ response = auth_method;
+ response.append(" ");
+ response.append(Base64::Encode(decoded));
+ return HAR_RESPONSE;
+ }
+#endif
+#endif // WEBRTC_WIN
+
+ return HAR_IGNORE;
+}
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpcommon.h b/chromium/third_party/webrtc/base/httpcommon.h
new file mode 100644
index 00000000000..c43a9e2761d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpcommon.h
@@ -0,0 +1,446 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_HTTPCOMMON_H__
+#define WEBRTC_BASE_HTTPCOMMON_H__
+
+#include <map>
+#include <string>
+#include <vector>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+class CryptString;
+class SocketAddress;
+
+//////////////////////////////////////////////////////////////////////
+// Constants
+//////////////////////////////////////////////////////////////////////
+
+enum HttpCode {
+ HC_OK = 200,
+ HC_NON_AUTHORITATIVE = 203,
+ HC_NO_CONTENT = 204,
+ HC_PARTIAL_CONTENT = 206,
+
+ HC_MULTIPLE_CHOICES = 300,
+ HC_MOVED_PERMANENTLY = 301,
+ HC_FOUND = 302,
+ HC_SEE_OTHER = 303,
+ HC_NOT_MODIFIED = 304,
+ HC_MOVED_TEMPORARILY = 307,
+
+ HC_BAD_REQUEST = 400,
+ HC_UNAUTHORIZED = 401,
+ HC_FORBIDDEN = 403,
+ HC_NOT_FOUND = 404,
+ HC_PROXY_AUTHENTICATION_REQUIRED = 407,
+ HC_GONE = 410,
+
+ HC_INTERNAL_SERVER_ERROR = 500,
+ HC_NOT_IMPLEMENTED = 501,
+ HC_SERVICE_UNAVAILABLE = 503,
+};
+
+enum HttpVersion {
+ HVER_1_0, HVER_1_1, HVER_UNKNOWN,
+ HVER_LAST = HVER_UNKNOWN
+};
+
+enum HttpVerb {
+ HV_GET, HV_POST, HV_PUT, HV_DELETE, HV_CONNECT, HV_HEAD,
+ HV_LAST = HV_HEAD
+};
+
+enum HttpError {
+ HE_NONE,
+ HE_PROTOCOL, // Received non-valid HTTP data
+ HE_DISCONNECTED, // Connection closed unexpectedly
+ HE_OVERFLOW, // Received too much data for internal buffers
+ HE_CONNECT_FAILED, // The socket failed to connect.
+ HE_SOCKET_ERROR, // An error occurred on a connected socket
+ HE_SHUTDOWN, // Http object is being destroyed
+ HE_OPERATION_CANCELLED, // Connection aborted locally
+ HE_AUTH, // Proxy Authentication Required
+ HE_CERTIFICATE_EXPIRED, // During SSL negotiation
+ HE_STREAM, // Problem reading or writing to the document
+ HE_CACHE, // Problem reading from cache
+ HE_DEFAULT
+};
+
+enum HttpHeader {
+ HH_AGE,
+ HH_CACHE_CONTROL,
+ HH_CONNECTION,
+ HH_CONTENT_DISPOSITION,
+ HH_CONTENT_LENGTH,
+ HH_CONTENT_RANGE,
+ HH_CONTENT_TYPE,
+ HH_COOKIE,
+ HH_DATE,
+ HH_ETAG,
+ HH_EXPIRES,
+ HH_HOST,
+ HH_IF_MODIFIED_SINCE,
+ HH_IF_NONE_MATCH,
+ HH_KEEP_ALIVE,
+ HH_LAST_MODIFIED,
+ HH_LOCATION,
+ HH_PROXY_AUTHENTICATE,
+ HH_PROXY_AUTHORIZATION,
+ HH_PROXY_CONNECTION,
+ HH_RANGE,
+ HH_SET_COOKIE,
+ HH_TE,
+ HH_TRAILERS,
+ HH_TRANSFER_ENCODING,
+ HH_UPGRADE,
+ HH_USER_AGENT,
+ HH_WWW_AUTHENTICATE,
+ HH_LAST = HH_WWW_AUTHENTICATE
+};
+
+const uint16 HTTP_DEFAULT_PORT = 80;
+const uint16 HTTP_SECURE_PORT = 443;
+
+//////////////////////////////////////////////////////////////////////
+// Utility Functions
+//////////////////////////////////////////////////////////////////////
+
+inline HttpError mkerr(HttpError err, HttpError def_err = HE_DEFAULT) {
+ return (err != HE_NONE) ? err : def_err;
+}
+
+const char* ToString(HttpVersion version);
+bool FromString(HttpVersion& version, const std::string& str);
+
+const char* ToString(HttpVerb verb);
+bool FromString(HttpVerb& verb, const std::string& str);
+
+const char* ToString(HttpHeader header);
+bool FromString(HttpHeader& header, const std::string& str);
+
+inline bool HttpCodeIsInformational(uint32 code) { return ((code / 100) == 1); }
+inline bool HttpCodeIsSuccessful(uint32 code) { return ((code / 100) == 2); }
+inline bool HttpCodeIsRedirection(uint32 code) { return ((code / 100) == 3); }
+inline bool HttpCodeIsClientError(uint32 code) { return ((code / 100) == 4); }
+inline bool HttpCodeIsServerError(uint32 code) { return ((code / 100) == 5); }
+
+bool HttpCodeHasBody(uint32 code);
+bool HttpCodeIsCacheable(uint32 code);
+bool HttpHeaderIsEndToEnd(HttpHeader header);
+bool HttpHeaderIsCollapsible(HttpHeader header);
+
+struct HttpData;
+bool HttpShouldKeepAlive(const HttpData& data);
+
+typedef std::pair<std::string, std::string> HttpAttribute;
+typedef std::vector<HttpAttribute> HttpAttributeList;
+void HttpComposeAttributes(const HttpAttributeList& attributes, char separator,
+ std::string* composed);
+void HttpParseAttributes(const char * data, size_t len,
+ HttpAttributeList& attributes);
+bool HttpHasAttribute(const HttpAttributeList& attributes,
+ const std::string& name,
+ std::string* value);
+bool HttpHasNthAttribute(HttpAttributeList& attributes,
+ size_t index,
+ std::string* name,
+ std::string* value);
+
+// Convert RFC1123 date (DoW, DD Mon YYYY HH:MM:SS TZ) to unix timestamp
+bool HttpDateToSeconds(const std::string& date, time_t* seconds);
+
+inline uint16 HttpDefaultPort(bool secure) {
+ return secure ? HTTP_SECURE_PORT : HTTP_DEFAULT_PORT;
+}
+
+// Returns the http server notation for a given address
+std::string HttpAddress(const SocketAddress& address, bool secure);
+
+// functional for insensitive std::string compare
+struct iless {
+ bool operator()(const std::string& lhs, const std::string& rhs) const {
+ return (::_stricmp(lhs.c_str(), rhs.c_str()) < 0);
+ }
+};
+
+// put quotes around a string and escape any quotes inside it
+std::string quote(const std::string& str);
+
+//////////////////////////////////////////////////////////////////////
+// Url
+//////////////////////////////////////////////////////////////////////
+
+template<class CTYPE>
+class Url {
+public:
+ typedef typename Traits<CTYPE>::string string;
+
+ // TODO: Implement Encode/Decode
+ static int Encode(const CTYPE* source, CTYPE* destination, size_t len);
+ static int Encode(const string& source, string& destination);
+ static int Decode(const CTYPE* source, CTYPE* destination, size_t len);
+ static int Decode(const string& source, string& destination);
+
+ Url(const string& url) { do_set_url(url.c_str(), url.size()); }
+ Url(const string& path, const string& host, uint16 port = HTTP_DEFAULT_PORT)
+ : host_(host), port_(port), secure_(HTTP_SECURE_PORT == port)
+ { set_full_path(path); }
+
+ bool valid() const { return !host_.empty(); }
+ void clear() {
+ host_.clear();
+ port_ = HTTP_DEFAULT_PORT;
+ secure_ = false;
+ path_.assign(1, static_cast<CTYPE>('/'));
+ query_.clear();
+ }
+
+ void set_url(const string& val) {
+ do_set_url(val.c_str(), val.size());
+ }
+ string url() const {
+ string val; do_get_url(&val); return val;
+ }
+
+ void set_address(const string& val) {
+ do_set_address(val.c_str(), val.size());
+ }
+ string address() const {
+ string val; do_get_address(&val); return val;
+ }
+
+ void set_full_path(const string& val) {
+ do_set_full_path(val.c_str(), val.size());
+ }
+ string full_path() const {
+ string val; do_get_full_path(&val); return val;
+ }
+
+ void set_host(const string& val) { host_ = val; }
+ const string& host() const { return host_; }
+
+ void set_port(uint16 val) { port_ = val; }
+ uint16 port() const { return port_; }
+
+ void set_secure(bool val) { secure_ = val; }
+ bool secure() const { return secure_; }
+
+ void set_path(const string& val) {
+ if (val.empty()) {
+ path_.assign(1, static_cast<CTYPE>('/'));
+ } else {
+ ASSERT(val[0] == static_cast<CTYPE>('/'));
+ path_ = val;
+ }
+ }
+ const string& path() const { return path_; }
+
+ void set_query(const string& val) {
+ ASSERT(val.empty() || (val[0] == static_cast<CTYPE>('?')));
+ query_ = val;
+ }
+ const string& query() const { return query_; }
+
+ bool get_attribute(const string& name, string* value) const;
+
+private:
+ void do_set_url(const CTYPE* val, size_t len);
+ void do_set_address(const CTYPE* val, size_t len);
+ void do_set_full_path(const CTYPE* val, size_t len);
+
+ void do_get_url(string* val) const;
+ void do_get_address(string* val) const;
+ void do_get_full_path(string* val) const;
+
+ string host_, path_, query_;
+ uint16 port_;
+ bool secure_;
+};
+
+//////////////////////////////////////////////////////////////////////
+// HttpData
+//////////////////////////////////////////////////////////////////////
+
+struct HttpData {
+ typedef std::multimap<std::string, std::string, iless> HeaderMap;
+ typedef HeaderMap::const_iterator const_iterator;
+ typedef HeaderMap::iterator iterator;
+
+ HttpVersion version;
+ scoped_ptr<StreamInterface> document;
+
+ HttpData() : version(HVER_1_1) { }
+
+ enum HeaderCombine { HC_YES, HC_NO, HC_AUTO, HC_REPLACE, HC_NEW };
+ void changeHeader(const std::string& name, const std::string& value,
+ HeaderCombine combine);
+ inline void addHeader(const std::string& name, const std::string& value,
+ bool append = true) {
+ changeHeader(name, value, append ? HC_AUTO : HC_NO);
+ }
+ inline void setHeader(const std::string& name, const std::string& value,
+ bool overwrite = true) {
+ changeHeader(name, value, overwrite ? HC_REPLACE : HC_NEW);
+ }
+ // Returns count of erased headers
+ size_t clearHeader(const std::string& name);
+ // Returns iterator to next header
+ iterator clearHeader(iterator header);
+
+ // keep in mind, this may not do what you want in the face of multiple headers
+ bool hasHeader(const std::string& name, std::string* value) const;
+
+ inline const_iterator begin() const {
+ return headers_.begin();
+ }
+ inline const_iterator end() const {
+ return headers_.end();
+ }
+ inline iterator begin() {
+ return headers_.begin();
+ }
+ inline iterator end() {
+ return headers_.end();
+ }
+ inline const_iterator begin(const std::string& name) const {
+ return headers_.lower_bound(name);
+ }
+ inline const_iterator end(const std::string& name) const {
+ return headers_.upper_bound(name);
+ }
+ inline iterator begin(const std::string& name) {
+ return headers_.lower_bound(name);
+ }
+ inline iterator end(const std::string& name) {
+ return headers_.upper_bound(name);
+ }
+
+ // Convenience methods using HttpHeader
+ inline void changeHeader(HttpHeader header, const std::string& value,
+ HeaderCombine combine) {
+ changeHeader(ToString(header), value, combine);
+ }
+ inline void addHeader(HttpHeader header, const std::string& value,
+ bool append = true) {
+ addHeader(ToString(header), value, append);
+ }
+ inline void setHeader(HttpHeader header, const std::string& value,
+ bool overwrite = true) {
+ setHeader(ToString(header), value, overwrite);
+ }
+ inline void clearHeader(HttpHeader header) {
+ clearHeader(ToString(header));
+ }
+ inline bool hasHeader(HttpHeader header, std::string* value) const {
+ return hasHeader(ToString(header), value);
+ }
+ inline const_iterator begin(HttpHeader header) const {
+ return headers_.lower_bound(ToString(header));
+ }
+ inline const_iterator end(HttpHeader header) const {
+ return headers_.upper_bound(ToString(header));
+ }
+ inline iterator begin(HttpHeader header) {
+ return headers_.lower_bound(ToString(header));
+ }
+ inline iterator end(HttpHeader header) {
+ return headers_.upper_bound(ToString(header));
+ }
+
+ void setContent(const std::string& content_type, StreamInterface* document);
+ void setDocumentAndLength(StreamInterface* document);
+
+ virtual size_t formatLeader(char* buffer, size_t size) const = 0;
+ virtual HttpError parseLeader(const char* line, size_t len) = 0;
+
+protected:
+ virtual ~HttpData() { }
+ void clear(bool release_document);
+ void copy(const HttpData& src);
+
+private:
+ HeaderMap headers_;
+};
+
+struct HttpRequestData : public HttpData {
+ HttpVerb verb;
+ std::string path;
+
+ HttpRequestData() : verb(HV_GET) { }
+
+ void clear(bool release_document);
+ void copy(const HttpRequestData& src);
+
+ virtual size_t formatLeader(char* buffer, size_t size) const;
+ virtual HttpError parseLeader(const char* line, size_t len);
+
+ bool getAbsoluteUri(std::string* uri) const;
+ bool getRelativeUri(std::string* host, std::string* path) const;
+};
+
+struct HttpResponseData : public HttpData {
+ uint32 scode;
+ std::string message;
+
+ HttpResponseData() : scode(HC_INTERNAL_SERVER_ERROR) { }
+ void clear(bool release_document);
+ void copy(const HttpResponseData& src);
+
+ // Convenience methods
+ void set_success(uint32 scode = HC_OK);
+ void set_success(const std::string& content_type, StreamInterface* document,
+ uint32 scode = HC_OK);
+ void set_redirect(const std::string& location,
+ uint32 scode = HC_MOVED_TEMPORARILY);
+ void set_error(uint32 scode);
+
+ virtual size_t formatLeader(char* buffer, size_t size) const;
+ virtual HttpError parseLeader(const char* line, size_t len);
+};
+
+struct HttpTransaction {
+ HttpRequestData request;
+ HttpResponseData response;
+};
+
+//////////////////////////////////////////////////////////////////////
+// Http Authentication
+//////////////////////////////////////////////////////////////////////
+
+struct HttpAuthContext {
+ std::string auth_method;
+ HttpAuthContext(const std::string& auth) : auth_method(auth) { }
+ virtual ~HttpAuthContext() { }
+};
+
+enum HttpAuthResult { HAR_RESPONSE, HAR_IGNORE, HAR_CREDENTIALS, HAR_ERROR };
+
+// 'context' is used by this function to record information between calls.
+// Start by passing a null pointer, then pass the same pointer each additional
+// call. When the authentication attempt is finished, delete the context.
+HttpAuthResult HttpAuthenticate(
+ const char * challenge, size_t len,
+ const SocketAddress& server,
+ const std::string& method, const std::string& uri,
+ const std::string& username, const CryptString& password,
+ HttpAuthContext *& context, std::string& response, std::string& auth_method);
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HTTPCOMMON_H__
diff --git a/chromium/third_party/webrtc/base/httpcommon_unittest.cc b/chromium/third_party/webrtc/base/httpcommon_unittest.cc
new file mode 100644
index 00000000000..10e378987af
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpcommon_unittest.cc
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpcommon-inl.h"
+#include "webrtc/base/httpcommon.h"
+
+namespace rtc {
+
+#define TEST_PROTOCOL "http://"
+#define TEST_HOST "www.google.com"
+#define TEST_PATH "/folder/file.html"
+#define TEST_QUERY "?query=x&attr=y"
+#define TEST_URL TEST_PROTOCOL TEST_HOST TEST_PATH TEST_QUERY
+
+TEST(Url, DecomposesUrls) {
+ Url<char> url(TEST_URL);
+ EXPECT_TRUE(url.valid());
+ EXPECT_FALSE(url.secure());
+ EXPECT_STREQ(TEST_HOST, url.host().c_str());
+ EXPECT_EQ(80, url.port());
+ EXPECT_STREQ(TEST_PATH, url.path().c_str());
+ EXPECT_STREQ(TEST_QUERY, url.query().c_str());
+ EXPECT_STREQ(TEST_HOST, url.address().c_str());
+ EXPECT_STREQ(TEST_PATH TEST_QUERY, url.full_path().c_str());
+ EXPECT_STREQ(TEST_URL, url.url().c_str());
+}
+
+TEST(Url, ComposesUrls) {
+ // Set in constructor
+ Url<char> url(TEST_PATH TEST_QUERY, TEST_HOST, 80);
+ EXPECT_TRUE(url.valid());
+ EXPECT_FALSE(url.secure());
+ EXPECT_STREQ(TEST_HOST, url.host().c_str());
+ EXPECT_EQ(80, url.port());
+ EXPECT_STREQ(TEST_PATH, url.path().c_str());
+ EXPECT_STREQ(TEST_QUERY, url.query().c_str());
+ EXPECT_STREQ(TEST_HOST, url.address().c_str());
+ EXPECT_STREQ(TEST_PATH TEST_QUERY, url.full_path().c_str());
+ EXPECT_STREQ(TEST_URL, url.url().c_str());
+
+ url.clear();
+ EXPECT_FALSE(url.valid());
+ EXPECT_FALSE(url.secure());
+ EXPECT_STREQ("", url.host().c_str());
+ EXPECT_EQ(80, url.port());
+ EXPECT_STREQ("/", url.path().c_str());
+ EXPECT_STREQ("", url.query().c_str());
+
+ // Set component-wise
+ url.set_host(TEST_HOST);
+ url.set_port(80);
+ url.set_path(TEST_PATH);
+ url.set_query(TEST_QUERY);
+ EXPECT_TRUE(url.valid());
+ EXPECT_FALSE(url.secure());
+ EXPECT_STREQ(TEST_HOST, url.host().c_str());
+ EXPECT_EQ(80, url.port());
+ EXPECT_STREQ(TEST_PATH, url.path().c_str());
+ EXPECT_STREQ(TEST_QUERY, url.query().c_str());
+ EXPECT_STREQ(TEST_HOST, url.address().c_str());
+ EXPECT_STREQ(TEST_PATH TEST_QUERY, url.full_path().c_str());
+ EXPECT_STREQ(TEST_URL, url.url().c_str());
+}
+
+TEST(Url, EnsuresNonEmptyPath) {
+ Url<char> url(TEST_PROTOCOL TEST_HOST);
+ EXPECT_TRUE(url.valid());
+ EXPECT_STREQ("/", url.path().c_str());
+
+ url.clear();
+ EXPECT_STREQ("/", url.path().c_str());
+ url.set_path("");
+ EXPECT_STREQ("/", url.path().c_str());
+
+ url.clear();
+ EXPECT_STREQ("/", url.path().c_str());
+ url.set_full_path("");
+ EXPECT_STREQ("/", url.path().c_str());
+}
+
+TEST(Url, GetQueryAttributes) {
+ Url<char> url(TEST_URL);
+ std::string value;
+ EXPECT_TRUE(url.get_attribute("query", &value));
+ EXPECT_STREQ("x", value.c_str());
+ value.clear();
+ EXPECT_TRUE(url.get_attribute("attr", &value));
+ EXPECT_STREQ("y", value.c_str());
+ value.clear();
+ EXPECT_FALSE(url.get_attribute("Query", &value));
+ EXPECT_TRUE(value.empty());
+}
+
+TEST(Url, SkipsUserAndPassword) {
+ Url<char> url("https://mail.google.com:pwd@badsite.com:12345/asdf");
+ EXPECT_TRUE(url.valid());
+ EXPECT_TRUE(url.secure());
+ EXPECT_STREQ("badsite.com", url.host().c_str());
+ EXPECT_EQ(12345, url.port());
+ EXPECT_STREQ("/asdf", url.path().c_str());
+ EXPECT_STREQ("badsite.com:12345", url.address().c_str());
+}
+
+TEST(Url, SkipsUser) {
+ Url<char> url("https://mail.google.com@badsite.com:12345/asdf");
+ EXPECT_TRUE(url.valid());
+ EXPECT_TRUE(url.secure());
+ EXPECT_STREQ("badsite.com", url.host().c_str());
+ EXPECT_EQ(12345, url.port());
+ EXPECT_STREQ("/asdf", url.path().c_str());
+ EXPECT_STREQ("badsite.com:12345", url.address().c_str());
+}
+
+TEST(HttpResponseData, parseLeaderHttp1_0) {
+ static const char kResponseString[] = "HTTP/1.0 200 OK";
+ HttpResponseData response;
+ EXPECT_EQ(HE_NONE, response.parseLeader(kResponseString,
+ sizeof(kResponseString) - 1));
+ EXPECT_EQ(HVER_1_0, response.version);
+ EXPECT_EQ(200U, response.scode);
+}
+
+TEST(HttpResponseData, parseLeaderHttp1_1) {
+ static const char kResponseString[] = "HTTP/1.1 200 OK";
+ HttpResponseData response;
+ EXPECT_EQ(HE_NONE, response.parseLeader(kResponseString,
+ sizeof(kResponseString) - 1));
+ EXPECT_EQ(HVER_1_1, response.version);
+ EXPECT_EQ(200U, response.scode);
+}
+
+TEST(HttpResponseData, parseLeaderHttpUnknown) {
+ static const char kResponseString[] = "HTTP 200 OK";
+ HttpResponseData response;
+ EXPECT_EQ(HE_NONE, response.parseLeader(kResponseString,
+ sizeof(kResponseString) - 1));
+ EXPECT_EQ(HVER_UNKNOWN, response.version);
+ EXPECT_EQ(200U, response.scode);
+}
+
+TEST(HttpResponseData, parseLeaderHttpFailure) {
+ static const char kResponseString[] = "HTTP/1.1 503 Service Unavailable";
+ HttpResponseData response;
+ EXPECT_EQ(HE_NONE, response.parseLeader(kResponseString,
+ sizeof(kResponseString) - 1));
+ EXPECT_EQ(HVER_1_1, response.version);
+ EXPECT_EQ(503U, response.scode);
+}
+
+TEST(HttpResponseData, parseLeaderHttpInvalid) {
+ static const char kResponseString[] = "Durrrrr, what's HTTP?";
+ HttpResponseData response;
+ EXPECT_EQ(HE_PROTOCOL, response.parseLeader(kResponseString,
+ sizeof(kResponseString) - 1));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httprequest.cc b/chromium/third_party/webrtc/base/httprequest.cc
new file mode 100644
index 00000000000..9ce2377e8c7
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httprequest.cc
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/httprequest.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/firewallsocketserver.h"
+#include "webrtc/base/httpclient.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/socketadapters.h"
+#include "webrtc/base/socketpool.h"
+#include "webrtc/base/ssladapter.h"
+
+using namespace rtc;
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpMonitor
+///////////////////////////////////////////////////////////////////////////////
+
+HttpMonitor::HttpMonitor(SocketServer *ss) {
+ ASSERT(Thread::Current() != NULL);
+ ss_ = ss;
+ reset();
+}
+
+void HttpMonitor::Connect(HttpClient *http) {
+ http->SignalHttpClientComplete.connect(this,
+ &HttpMonitor::OnHttpClientComplete);
+}
+
+void HttpMonitor::OnHttpClientComplete(HttpClient * http, HttpErrorType error) {
+ complete_ = true;
+ error_ = error;
+ ss_->WakeUp();
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpRequest
+///////////////////////////////////////////////////////////////////////////////
+
+const int kDefaultHTTPTimeout = 30 * 1000; // 30 sec
+
+HttpRequest::HttpRequest(const std::string &user_agent)
+ : firewall_(0), port_(80), secure_(false),
+ timeout_(kDefaultHTTPTimeout), fail_redirect_(false),
+ client_(user_agent.c_str(), NULL), error_(HE_NONE) {
+}
+
+void HttpRequest::Send() {
+ // TODO: Rewrite this to use the thread's native socket server, and a more
+ // natural flow?
+
+ PhysicalSocketServer physical;
+ SocketServer * ss = &physical;
+ if (firewall_) {
+ ss = new FirewallSocketServer(ss, firewall_);
+ }
+
+ SslSocketFactory factory(ss, client_.agent());
+ factory.SetProxy(proxy_);
+ if (secure_)
+ factory.UseSSL(host_.c_str());
+
+ //factory.SetLogging("HttpRequest");
+
+ ReuseSocketPool pool(&factory);
+ client_.set_pool(&pool);
+
+ bool transparent_proxy = (port_ == 80) && ((proxy_.type == PROXY_HTTPS) ||
+ (proxy_.type == PROXY_UNKNOWN));
+
+ if (transparent_proxy) {
+ client_.set_proxy(proxy_);
+ }
+ client_.set_fail_redirect(fail_redirect_);
+
+ SocketAddress server(host_, port_);
+ client_.set_server(server);
+
+ LOG(LS_INFO) << "HttpRequest start: " << host_ + client_.request().path;
+
+ HttpMonitor monitor(ss);
+ monitor.Connect(&client_);
+ client_.start();
+ ss->Wait(timeout_, true);
+ if (!monitor.done()) {
+ LOG(LS_INFO) << "HttpRequest request timed out";
+ client_.reset();
+ return;
+ }
+
+ set_error(monitor.error());
+ if (error_) {
+ LOG(LS_INFO) << "HttpRequest request error: " << error_;
+ return;
+ }
+
+ std::string value;
+ if (client_.response().hasHeader(HH_LOCATION, &value)) {
+ response_redirect_ = value.c_str();
+ }
+}
diff --git a/chromium/third_party/webrtc/base/httprequest.h b/chromium/third_party/webrtc/base/httprequest.h
new file mode 100644
index 00000000000..37983324c9d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httprequest.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef _HTTPREQUEST_H_
+#define _HTTPREQUEST_H_
+
+#include "webrtc/base/httpclient.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/proxyinfo.h"
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/sslsocketfactory.h" // Deprecated include
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpRequest
+///////////////////////////////////////////////////////////////////////////////
+
+class FirewallManager;
+class MemoryStream;
+
+class HttpRequest {
+public:
+ HttpRequest(const std::string &user_agent);
+
+ void Send();
+
+ void set_proxy(const ProxyInfo& proxy) {
+ proxy_ = proxy;
+ }
+ void set_firewall(FirewallManager * firewall) {
+ firewall_ = firewall;
+ }
+
+ // The DNS name of the host to connect to.
+ const std::string& host() { return host_; }
+ void set_host(const std::string& host) { host_ = host; }
+
+ // The port to connect to on the target host.
+ int port() { return port_; }
+ void set_port(int port) { port_ = port; }
+
+ // Whether the request should use SSL.
+ bool secure() { return secure_; }
+ void set_secure(bool secure) { secure_ = secure; }
+
+ // Returns the redirect when redirection occurs
+ const std::string& response_redirect() { return response_redirect_; }
+
+ // Time to wait on the download, in ms. Default is 5000 (5s)
+ int timeout() { return timeout_; }
+ void set_timeout(int timeout) { timeout_ = timeout; }
+
+ // Fail redirects to allow analysis of redirect urls, etc.
+ bool fail_redirect() const { return fail_redirect_; }
+ void set_fail_redirect(bool fail_redirect) { fail_redirect_ = fail_redirect; }
+
+ HttpRequestData& request() { return client_.request(); }
+ HttpResponseData& response() { return client_.response(); }
+ HttpErrorType error() { return error_; }
+
+protected:
+ void set_error(HttpErrorType error) { error_ = error; }
+
+private:
+ ProxyInfo proxy_;
+ FirewallManager * firewall_;
+ std::string host_;
+ int port_;
+ bool secure_;
+ int timeout_;
+ bool fail_redirect_;
+ HttpClient client_;
+ HttpErrorType error_;
+ std::string response_redirect_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpMonitor
+///////////////////////////////////////////////////////////////////////////////
+
+class HttpMonitor : public sigslot::has_slots<> {
+public:
+ HttpMonitor(SocketServer *ss);
+
+ void reset() {
+ complete_ = false;
+ error_ = HE_DEFAULT;
+ }
+
+ bool done() const { return complete_; }
+ HttpErrorType error() const { return error_; }
+
+ void Connect(HttpClient* http);
+ void OnHttpClientComplete(HttpClient * http, HttpErrorType error);
+
+private:
+ bool complete_;
+ HttpErrorType error_;
+ SocketServer *ss_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc_
+
+#endif // _HTTPREQUEST_H_
diff --git a/chromium/third_party/webrtc/base/httpserver.cc b/chromium/third_party/webrtc/base/httpserver.cc
new file mode 100644
index 00000000000..0d225842260
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpserver.cc
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "webrtc/base/httpcommon-inl.h"
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpserver.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/socketstream.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpServer
+///////////////////////////////////////////////////////////////////////////////
+
+HttpServer::HttpServer() : next_connection_id_(1), closing_(false) {
+}
+
+HttpServer::~HttpServer() {
+ if (closing_) {
+ LOG(LS_WARNING) << "HttpServer::CloseAll has not completed";
+ }
+ for (ConnectionMap::iterator it = connections_.begin();
+ it != connections_.end();
+ ++it) {
+ StreamInterface* stream = it->second->EndProcess();
+ delete stream;
+ delete it->second;
+ }
+}
+
+int
+HttpServer::HandleConnection(StreamInterface* stream) {
+ int connection_id = next_connection_id_++;
+ ASSERT(connection_id != HTTP_INVALID_CONNECTION_ID);
+ Connection* connection = new Connection(connection_id, this);
+ connections_.insert(ConnectionMap::value_type(connection_id, connection));
+ connection->BeginProcess(stream);
+ return connection_id;
+}
+
+void
+HttpServer::Respond(HttpServerTransaction* transaction) {
+ int connection_id = transaction->connection_id();
+ if (Connection* connection = Find(connection_id)) {
+ connection->Respond(transaction);
+ } else {
+ delete transaction;
+ // We may be tempted to SignalHttpComplete, but that implies that a
+ // connection still exists.
+ }
+}
+
+void
+HttpServer::Close(int connection_id, bool force) {
+ if (Connection* connection = Find(connection_id)) {
+ connection->InitiateClose(force);
+ }
+}
+
+void
+HttpServer::CloseAll(bool force) {
+ if (connections_.empty()) {
+ SignalCloseAllComplete(this);
+ return;
+ }
+ closing_ = true;
+ std::list<Connection*> connections;
+ for (ConnectionMap::const_iterator it = connections_.begin();
+ it != connections_.end(); ++it) {
+ connections.push_back(it->second);
+ }
+ for (std::list<Connection*>::const_iterator it = connections.begin();
+ it != connections.end(); ++it) {
+ (*it)->InitiateClose(force);
+ }
+}
+
+HttpServer::Connection*
+HttpServer::Find(int connection_id) {
+ ConnectionMap::iterator it = connections_.find(connection_id);
+ if (it == connections_.end())
+ return NULL;
+ return it->second;
+}
+
+void
+HttpServer::Remove(int connection_id) {
+ ConnectionMap::iterator it = connections_.find(connection_id);
+ if (it == connections_.end()) {
+ ASSERT(false);
+ return;
+ }
+ Connection* connection = it->second;
+ connections_.erase(it);
+ SignalConnectionClosed(this, connection_id, connection->EndProcess());
+ delete connection;
+ if (closing_ && connections_.empty()) {
+ closing_ = false;
+ SignalCloseAllComplete(this);
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpServer::Connection
+///////////////////////////////////////////////////////////////////////////////
+
+HttpServer::Connection::Connection(int connection_id, HttpServer* server)
+ : connection_id_(connection_id), server_(server),
+ current_(NULL), signalling_(false), close_(false) {
+}
+
+HttpServer::Connection::~Connection() {
+ // It's possible that an object hosted inside this transaction signalled
+ // an event which caused the connection to close.
+ Thread::Current()->Dispose(current_);
+}
+
+void
+HttpServer::Connection::BeginProcess(StreamInterface* stream) {
+ base_.notify(this);
+ base_.attach(stream);
+ current_ = new HttpServerTransaction(connection_id_);
+ if (base_.mode() != HM_CONNECT)
+ base_.recv(&current_->request);
+}
+
+StreamInterface*
+HttpServer::Connection::EndProcess() {
+ base_.notify(NULL);
+ base_.abort(HE_DISCONNECTED);
+ return base_.detach();
+}
+
+void
+HttpServer::Connection::Respond(HttpServerTransaction* transaction) {
+ ASSERT(current_ == NULL);
+ current_ = transaction;
+ if (current_->response.begin() == current_->response.end()) {
+ current_->response.set_error(HC_INTERNAL_SERVER_ERROR);
+ }
+ bool keep_alive = HttpShouldKeepAlive(current_->request);
+ current_->response.setHeader(HH_CONNECTION,
+ keep_alive ? "Keep-Alive" : "Close",
+ false);
+ close_ = !HttpShouldKeepAlive(current_->response);
+ base_.send(&current_->response);
+}
+
+void
+HttpServer::Connection::InitiateClose(bool force) {
+ bool request_in_progress = (HM_SEND == base_.mode()) || (NULL == current_);
+ if (!signalling_ && (force || !request_in_progress)) {
+ server_->Remove(connection_id_);
+ } else {
+ close_ = true;
+ }
+}
+
+//
+// IHttpNotify Implementation
+//
+
+HttpError
+HttpServer::Connection::onHttpHeaderComplete(bool chunked, size_t& data_size) {
+ if (data_size == SIZE_UNKNOWN) {
+ data_size = 0;
+ }
+ ASSERT(current_ != NULL);
+ bool custom_document = false;
+ server_->SignalHttpRequestHeader(server_, current_, &custom_document);
+ if (!custom_document) {
+ current_->request.document.reset(new MemoryStream);
+ }
+ return HE_NONE;
+}
+
+void
+HttpServer::Connection::onHttpComplete(HttpMode mode, HttpError err) {
+ if (mode == HM_SEND) {
+ ASSERT(current_ != NULL);
+ signalling_ = true;
+ server_->SignalHttpRequestComplete(server_, current_, err);
+ signalling_ = false;
+ if (close_) {
+ // Force a close
+ err = HE_DISCONNECTED;
+ }
+ }
+ if (err != HE_NONE) {
+ server_->Remove(connection_id_);
+ } else if (mode == HM_CONNECT) {
+ base_.recv(&current_->request);
+ } else if (mode == HM_RECV) {
+ ASSERT(current_ != NULL);
+ // TODO: do we need this?
+ //request_.document_->rewind();
+ HttpServerTransaction* transaction = current_;
+ current_ = NULL;
+ server_->SignalHttpRequest(server_, transaction);
+ } else if (mode == HM_SEND) {
+ Thread::Current()->Dispose(current_->response.document.release());
+ current_->request.clear(true);
+ current_->response.clear(true);
+ base_.recv(&current_->request);
+ } else {
+ ASSERT(false);
+ }
+}
+
+void
+HttpServer::Connection::onHttpClosed(HttpError err) {
+ RTC_UNUSED(err);
+ server_->Remove(connection_id_);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// HttpListenServer
+///////////////////////////////////////////////////////////////////////////////
+
+HttpListenServer::HttpListenServer() {
+ SignalConnectionClosed.connect(this, &HttpListenServer::OnConnectionClosed);
+}
+
+HttpListenServer::~HttpListenServer() {
+}
+
+int HttpListenServer::Listen(const SocketAddress& address) {
+ AsyncSocket* sock =
+ Thread::Current()->socketserver()->CreateAsyncSocket(address.family(),
+ SOCK_STREAM);
+ if (!sock) {
+ return SOCKET_ERROR;
+ }
+ listener_.reset(sock);
+ listener_->SignalReadEvent.connect(this, &HttpListenServer::OnReadEvent);
+ if ((listener_->Bind(address) != SOCKET_ERROR) &&
+ (listener_->Listen(5) != SOCKET_ERROR))
+ return 0;
+ return listener_->GetError();
+}
+
+bool HttpListenServer::GetAddress(SocketAddress* address) const {
+ if (!listener_) {
+ return false;
+ }
+ *address = listener_->GetLocalAddress();
+ return !address->IsNil();
+}
+
+void HttpListenServer::StopListening() {
+ if (listener_) {
+ listener_->Close();
+ }
+}
+
+void HttpListenServer::OnReadEvent(AsyncSocket* socket) {
+ ASSERT(socket == listener_.get());
+ ASSERT(listener_);
+ AsyncSocket* incoming = listener_->Accept(NULL);
+ if (incoming) {
+ StreamInterface* stream = new SocketStream(incoming);
+ //stream = new LoggingAdapter(stream, LS_VERBOSE, "HttpServer", false);
+ HandleConnection(stream);
+ }
+}
+
+void HttpListenServer::OnConnectionClosed(HttpServer* server,
+ int connection_id,
+ StreamInterface* stream) {
+ Thread::Current()->Dispose(stream);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/httpserver.h b/chromium/third_party/webrtc/base/httpserver.h
new file mode 100644
index 00000000000..77de615fef2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpserver.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_HTTPSERVER_H__
+#define WEBRTC_BASE_HTTPSERVER_H__
+
+#include <map>
+#include "webrtc/base/httpbase.h"
+
+namespace rtc {
+
+class AsyncSocket;
+class HttpServer;
+class SocketAddress;
+
+//////////////////////////////////////////////////////////////////////
+// HttpServer
+//////////////////////////////////////////////////////////////////////
+
+const int HTTP_INVALID_CONNECTION_ID = 0;
+
+struct HttpServerTransaction : public HttpTransaction {
+public:
+ HttpServerTransaction(int id) : connection_id_(id) { }
+ int connection_id() const { return connection_id_; }
+
+private:
+ int connection_id_;
+};
+
+class HttpServer {
+public:
+ HttpServer();
+ virtual ~HttpServer();
+
+ int HandleConnection(StreamInterface* stream);
+ // Due to sigslot issues, we can't destroy some streams at an arbitrary time.
+ sigslot::signal3<HttpServer*, int, StreamInterface*> SignalConnectionClosed;
+
+ // This signal occurs when the HTTP request headers have been received, but
+ // before the request body is written to the request document. By default,
+ // the request document is a MemoryStream. By handling this signal, the
+ // document can be overridden, in which case the third signal argument should
+ // be set to true. In the case where the request body should be ignored,
+ // the document can be set to NULL. Note that the transaction object is still
+ // owened by the HttpServer at this point.
+ sigslot::signal3<HttpServer*, HttpServerTransaction*, bool*>
+ SignalHttpRequestHeader;
+
+ // An HTTP request has been made, and is available in the transaction object.
+ // Populate the transaction's response, and then return the object via the
+ // Respond method. Note that during this time, ownership of the transaction
+ // object is transferred, so it may be passed between threads, although
+ // respond must be called on the server's active thread.
+ sigslot::signal2<HttpServer*, HttpServerTransaction*> SignalHttpRequest;
+ void Respond(HttpServerTransaction* transaction);
+
+ // If you want to know when a request completes, listen to this event.
+ sigslot::signal3<HttpServer*, HttpServerTransaction*, int>
+ SignalHttpRequestComplete;
+
+ // Stop processing the connection indicated by connection_id.
+ // Unless force is true, the server will complete sending a response that is
+ // in progress.
+ void Close(int connection_id, bool force);
+ void CloseAll(bool force);
+
+ // After calling CloseAll, this event is signalled to indicate that all
+ // outstanding connections have closed.
+ sigslot::signal1<HttpServer*> SignalCloseAllComplete;
+
+private:
+ class Connection : private IHttpNotify {
+ public:
+ Connection(int connection_id, HttpServer* server);
+ virtual ~Connection();
+
+ void BeginProcess(StreamInterface* stream);
+ StreamInterface* EndProcess();
+
+ void Respond(HttpServerTransaction* transaction);
+ void InitiateClose(bool force);
+
+ // IHttpNotify Interface
+ virtual HttpError onHttpHeaderComplete(bool chunked, size_t& data_size);
+ virtual void onHttpComplete(HttpMode mode, HttpError err);
+ virtual void onHttpClosed(HttpError err);
+
+ int connection_id_;
+ HttpServer* server_;
+ HttpBase base_;
+ HttpServerTransaction* current_;
+ bool signalling_, close_;
+ };
+
+ Connection* Find(int connection_id);
+ void Remove(int connection_id);
+
+ friend class Connection;
+ typedef std::map<int,Connection*> ConnectionMap;
+
+ ConnectionMap connections_;
+ int next_connection_id_;
+ bool closing_;
+};
+
+//////////////////////////////////////////////////////////////////////
+
+class HttpListenServer : public HttpServer, public sigslot::has_slots<> {
+public:
+ HttpListenServer();
+ virtual ~HttpListenServer();
+
+ int Listen(const SocketAddress& address);
+ bool GetAddress(SocketAddress* address) const;
+ void StopListening();
+
+private:
+ void OnReadEvent(AsyncSocket* socket);
+ void OnConnectionClosed(HttpServer* server, int connection_id,
+ StreamInterface* stream);
+
+ scoped_ptr<AsyncSocket> listener_;
+};
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_HTTPSERVER_H__
diff --git a/chromium/third_party/webrtc/base/httpserver_unittest.cc b/chromium/third_party/webrtc/base/httpserver_unittest.cc
new file mode 100644
index 00000000000..0c653cbb9e6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/httpserver_unittest.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpserver.h"
+#include "webrtc/base/testutils.h"
+
+using namespace testing;
+
+namespace rtc {
+
+namespace {
+ const char* const kRequest =
+ "GET /index.html HTTP/1.1\r\n"
+ "Host: localhost\r\n"
+ "\r\n";
+
+ struct HttpServerMonitor : public sigslot::has_slots<> {
+ HttpServerTransaction* transaction;
+ bool server_closed, connection_closed;
+
+ HttpServerMonitor(HttpServer* server)
+ : transaction(NULL), server_closed(false), connection_closed(false) {
+ server->SignalCloseAllComplete.connect(this,
+ &HttpServerMonitor::OnClosed);
+ server->SignalHttpRequest.connect(this, &HttpServerMonitor::OnRequest);
+ server->SignalHttpRequestComplete.connect(this,
+ &HttpServerMonitor::OnRequestComplete);
+ server->SignalConnectionClosed.connect(this,
+ &HttpServerMonitor::OnConnectionClosed);
+ }
+ void OnRequest(HttpServer*, HttpServerTransaction* t) {
+ ASSERT_FALSE(transaction);
+ transaction = t;
+ transaction->response.set_success();
+ transaction->response.setHeader(HH_CONNECTION, "Close");
+ }
+ void OnRequestComplete(HttpServer*, HttpServerTransaction* t, int) {
+ ASSERT_EQ(transaction, t);
+ transaction = NULL;
+ }
+ void OnClosed(HttpServer*) {
+ server_closed = true;
+ }
+ void OnConnectionClosed(HttpServer*, int, StreamInterface* stream) {
+ connection_closed = true;
+ delete stream;
+ }
+ };
+
+ void CreateClientConnection(HttpServer& server,
+ HttpServerMonitor& monitor,
+ bool send_request) {
+ StreamSource* client = new StreamSource;
+ client->SetState(SS_OPEN);
+ server.HandleConnection(client);
+ EXPECT_FALSE(monitor.server_closed);
+ EXPECT_FALSE(monitor.transaction);
+
+ if (send_request) {
+ // Simulate a request
+ client->QueueString(kRequest);
+ EXPECT_FALSE(monitor.server_closed);
+ }
+ }
+} // anonymous namespace
+
+TEST(HttpServer, DoesNotSignalCloseUnlessCloseAllIsCalled) {
+ HttpServer server;
+ HttpServerMonitor monitor(&server);
+ // Add an active client connection
+ CreateClientConnection(server, monitor, true);
+ // Simulate a response
+ ASSERT_TRUE(NULL != monitor.transaction);
+ server.Respond(monitor.transaction);
+ EXPECT_FALSE(monitor.transaction);
+ // Connection has closed, but no server close signal
+ EXPECT_FALSE(monitor.server_closed);
+ EXPECT_TRUE(monitor.connection_closed);
+}
+
+TEST(HttpServer, SignalsCloseWhenNoConnectionsAreActive) {
+ HttpServer server;
+ HttpServerMonitor monitor(&server);
+ // Add an idle client connection
+ CreateClientConnection(server, monitor, false);
+ // Perform graceful close
+ server.CloseAll(false);
+ // Connections have all closed
+ EXPECT_TRUE(monitor.server_closed);
+ EXPECT_TRUE(monitor.connection_closed);
+}
+
+TEST(HttpServer, SignalsCloseAfterGracefulCloseAll) {
+ HttpServer server;
+ HttpServerMonitor monitor(&server);
+ // Add an active client connection
+ CreateClientConnection(server, monitor, true);
+ // Initiate a graceful close
+ server.CloseAll(false);
+ EXPECT_FALSE(monitor.server_closed);
+ // Simulate a response
+ ASSERT_TRUE(NULL != monitor.transaction);
+ server.Respond(monitor.transaction);
+ EXPECT_FALSE(monitor.transaction);
+ // Connections have all closed
+ EXPECT_TRUE(monitor.server_closed);
+ EXPECT_TRUE(monitor.connection_closed);
+}
+
+TEST(HttpServer, SignalsCloseAfterForcedCloseAll) {
+ HttpServer server;
+ HttpServerMonitor monitor(&server);
+ // Add an active client connection
+ CreateClientConnection(server, monitor, true);
+ // Initiate a forceful close
+ server.CloseAll(true);
+ // Connections have all closed
+ EXPECT_TRUE(monitor.server_closed);
+ EXPECT_TRUE(monitor.connection_closed);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ifaddrs-android.cc b/chromium/third_party/webrtc/base/ifaddrs-android.cc
new file mode 100644
index 00000000000..c8363d567a1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ifaddrs-android.cc
@@ -0,0 +1,223 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_ANDROID)
+#include "webrtc/base/ifaddrs-android.h"
+#include <stdlib.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <sys/utsname.h>
+#include <sys/ioctl.h>
+#include <netinet/in.h>
+#include <net/if.h>
+#include <unistd.h>
+#include <errno.h>
+#include <linux/netlink.h>
+#include <linux/rtnetlink.h>
+
+namespace {
+
+struct netlinkrequest {
+ nlmsghdr header;
+ ifaddrmsg msg;
+};
+
+const int kMaxReadSize = 4096;
+
+} // namespace
+
+namespace rtc {
+
+int set_ifname(struct ifaddrs* ifaddr, int interface) {
+ char buf[IFNAMSIZ] = {0};
+ char* name = if_indextoname(interface, buf);
+ if (name == NULL) {
+ return -1;
+ }
+ ifaddr->ifa_name = new char[strlen(name) + 1];
+ strncpy(ifaddr->ifa_name, name, strlen(name) + 1);
+ return 0;
+}
+
+int set_flags(struct ifaddrs* ifaddr) {
+ int fd = socket(AF_INET, SOCK_DGRAM, 0);
+ if (fd == -1) {
+ return -1;
+ }
+ ifreq ifr;
+ memset(&ifr, 0, sizeof(ifr));
+ strncpy(ifr.ifr_name, ifaddr->ifa_name, IFNAMSIZ - 1);
+ int rc = ioctl(fd, SIOCGIFFLAGS, &ifr);
+ close(fd);
+ if (rc == -1) {
+ return -1;
+ }
+ ifaddr->ifa_flags = ifr.ifr_flags;
+ return 0;
+}
+
+int set_addresses(struct ifaddrs* ifaddr, ifaddrmsg* msg, void* data,
+ size_t len) {
+ if (msg->ifa_family == AF_INET) {
+ sockaddr_in* sa = new sockaddr_in;
+ sa->sin_family = AF_INET;
+ memcpy(&sa->sin_addr, data, len);
+ ifaddr->ifa_addr = reinterpret_cast<sockaddr*>(sa);
+ } else if (msg->ifa_family == AF_INET6) {
+ sockaddr_in6* sa = new sockaddr_in6;
+ sa->sin6_family = AF_INET6;
+ sa->sin6_scope_id = msg->ifa_index;
+ memcpy(&sa->sin6_addr, data, len);
+ ifaddr->ifa_addr = reinterpret_cast<sockaddr*>(sa);
+ } else {
+ return -1;
+ }
+ return 0;
+}
+
+int make_prefixes(struct ifaddrs* ifaddr, int family, int prefixlen) {
+ char* prefix = NULL;
+ if (family == AF_INET) {
+ sockaddr_in* mask = new sockaddr_in;
+ mask->sin_family = AF_INET;
+ memset(&mask->sin_addr, 0, sizeof(in_addr));
+ ifaddr->ifa_netmask = reinterpret_cast<sockaddr*>(mask);
+ if (prefixlen > 32) {
+ prefixlen = 32;
+ }
+ prefix = reinterpret_cast<char*>(&mask->sin_addr);
+ } else if (family == AF_INET6) {
+ sockaddr_in6* mask = new sockaddr_in6;
+ mask->sin6_family = AF_INET6;
+ memset(&mask->sin6_addr, 0, sizeof(in6_addr));
+ ifaddr->ifa_netmask = reinterpret_cast<sockaddr*>(mask);
+ if (prefixlen > 128) {
+ prefixlen = 128;
+ }
+ prefix = reinterpret_cast<char*>(&mask->sin6_addr);
+ } else {
+ return -1;
+ }
+ for (int i = 0; i < (prefixlen / 8); i++) {
+ *prefix++ = 0xFF;
+ }
+ char remainder = 0xff;
+ remainder <<= (8 - prefixlen % 8);
+ *prefix = remainder;
+ return 0;
+}
+
+int populate_ifaddrs(struct ifaddrs* ifaddr, ifaddrmsg* msg, void* bytes,
+ size_t len) {
+ if (set_ifname(ifaddr, msg->ifa_index) != 0) {
+ return -1;
+ }
+ if (set_flags(ifaddr) != 0) {
+ return -1;
+ }
+ if (set_addresses(ifaddr, msg, bytes, len) != 0) {
+ return -1;
+ }
+ if (make_prefixes(ifaddr, msg->ifa_family, msg->ifa_prefixlen) != 0) {
+ return -1;
+ }
+ return 0;
+}
+
+int getifaddrs(struct ifaddrs** result) {
+ int fd = socket(PF_NETLINK, SOCK_RAW, NETLINK_ROUTE);
+ if (fd < 0) {
+ return -1;
+ }
+
+ netlinkrequest ifaddr_request;
+ memset(&ifaddr_request, 0, sizeof(ifaddr_request));
+ ifaddr_request.header.nlmsg_flags = NLM_F_ROOT | NLM_F_REQUEST;
+ ifaddr_request.header.nlmsg_type = RTM_GETADDR;
+ ifaddr_request.header.nlmsg_len = NLMSG_LENGTH(sizeof(ifaddrmsg));
+
+ ssize_t count = send(fd, &ifaddr_request, ifaddr_request.header.nlmsg_len, 0);
+ if (static_cast<size_t>(count) != ifaddr_request.header.nlmsg_len) {
+ close(fd);
+ return -1;
+ }
+ struct ifaddrs* start = NULL;
+ struct ifaddrs* current = NULL;
+ char buf[kMaxReadSize];
+ ssize_t amount_read = recv(fd, &buf, kMaxReadSize, 0);
+ while (amount_read > 0) {
+ nlmsghdr* header = reinterpret_cast<nlmsghdr*>(&buf[0]);
+ size_t header_size = static_cast<size_t>(amount_read);
+ for ( ; NLMSG_OK(header, header_size);
+ header = NLMSG_NEXT(header, header_size)) {
+ switch (header->nlmsg_type) {
+ case NLMSG_DONE:
+ // Success. Return.
+ *result = start;
+ close(fd);
+ return 0;
+ case NLMSG_ERROR:
+ close(fd);
+ freeifaddrs(start);
+ return -1;
+ case RTM_NEWADDR: {
+ ifaddrmsg* address_msg =
+ reinterpret_cast<ifaddrmsg*>(NLMSG_DATA(header));
+ rtattr* rta = IFA_RTA(address_msg);
+ ssize_t payload_len = IFA_PAYLOAD(header);
+ while (RTA_OK(rta, payload_len)) {
+ if (rta->rta_type == IFA_ADDRESS) {
+ int family = address_msg->ifa_family;
+ if (family == AF_INET || family == AF_INET6) {
+ ifaddrs* newest = new ifaddrs;
+ memset(newest, 0, sizeof(ifaddrs));
+ if (current) {
+ current->ifa_next = newest;
+ } else {
+ start = newest;
+ }
+ if (populate_ifaddrs(newest, address_msg, RTA_DATA(rta),
+ RTA_PAYLOAD(rta)) != 0) {
+ freeifaddrs(start);
+ *result = NULL;
+ return -1;
+ }
+ current = newest;
+ }
+ }
+ rta = RTA_NEXT(rta, payload_len);
+ }
+ break;
+ }
+ }
+ }
+ amount_read = recv(fd, &buf, kMaxReadSize, 0);
+ }
+ close(fd);
+ freeifaddrs(start);
+ return -1;
+}
+
+void freeifaddrs(struct ifaddrs* addrs) {
+ struct ifaddrs* last = NULL;
+ struct ifaddrs* cursor = addrs;
+ while (cursor) {
+ delete[] cursor->ifa_name;
+ delete cursor->ifa_addr;
+ delete cursor->ifa_netmask;
+ last = cursor;
+ cursor = cursor->ifa_next;
+ delete last;
+ }
+}
+#endif // defined(WEBRTC_ANDROID)
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ifaddrs-android.h b/chromium/third_party/webrtc/base/ifaddrs-android.h
new file mode 100644
index 00000000000..10890af6525
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ifaddrs-android.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_IFADDRS_ANDROID_H_
+#define WEBRTC_BASE_IFADDRS_ANDROID_H_
+
+#include <stdio.h>
+#include <sys/socket.h>
+
+
+// Implementation of getifaddrs for Android.
+// Fills out a list of ifaddr structs (see below) which contain information
+// about every network interface available on the host.
+// See 'man getifaddrs' on Linux or OS X (nb: it is not a POSIX function).
+struct ifaddrs {
+ struct ifaddrs* ifa_next;
+ char* ifa_name;
+ unsigned int ifa_flags;
+ struct sockaddr* ifa_addr;
+ struct sockaddr* ifa_netmask;
+ // Real ifaddrs has broadcast, point to point and data members.
+ // We don't need them (yet?).
+};
+
+namespace rtc {
+
+int getifaddrs(struct ifaddrs** result);
+void freeifaddrs(struct ifaddrs* addrs);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_IFADDRS_ANDROID_H_
diff --git a/chromium/third_party/webrtc/base/iosfilesystem.mm b/chromium/third_party/webrtc/base/iosfilesystem.mm
new file mode 100644
index 00000000000..eb4bbecd584
--- /dev/null
+++ b/chromium/third_party/webrtc/base/iosfilesystem.mm
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file only exists because various iOS system APIs are only
+// available from Objective-C. See unixfilesystem.cc for the only use
+// (enforced by a lack of a header file).
+
+#import <Foundation/NSPathUtilities.h>
+#import <Foundation/NSProcessInfo.h>
+#include <string.h>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/pathutils.h"
+
+// Return a new[]'d |char*| copy of the UTF8 representation of |s|.
+// Caller owns the returned memory and must use delete[] on it.
+static char* copyString(NSString* s) {
+ const char* utf8 = [s UTF8String];
+ size_t len = strlen(utf8) + 1;
+ char* copy = new char[len];
+ // This uses a new[] + strcpy (instead of strdup) because the
+ // receiver expects to be able to delete[] the returned pointer
+ // (instead of free()ing it).
+ strcpy(copy, utf8);
+ return copy;
+}
+
+// Return a (leaked) copy of a directory name suitable for application data.
+char* IOSDataDirectory() {
+ NSArray* paths = NSSearchPathForDirectoriesInDomains(
+ NSApplicationSupportDirectory, NSUserDomainMask, YES);
+ ASSERT([paths count] == 1);
+ return copyString([paths objectAtIndex:0]);
+}
+
+// Return a (leaked) copy of a directory name suitable for use as a $TEMP.
+char* IOSTempDirectory() {
+ return copyString(NSTemporaryDirectory());
+}
+
+// Return the binary's path.
+void IOSAppName(rtc::Pathname* path) {
+ NSProcessInfo *pInfo = [NSProcessInfo processInfo];
+ NSString* argv0 = [[pInfo arguments] objectAtIndex:0];
+ path->SetPathname([argv0 UTF8String]);
+}
diff --git a/chromium/third_party/webrtc/base/ipaddress.cc b/chromium/third_party/webrtc/base/ipaddress.cc
new file mode 100644
index 00000000000..4441e16f6be
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ipaddress.cc
@@ -0,0 +1,449 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_POSIX)
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#ifdef OPENBSD
+#include <netinet/in_systm.h>
+#endif
+#ifndef __native_client__
+#include <netinet/ip.h>
+#endif
+#include <arpa/inet.h>
+#include <netdb.h>
+#include <unistd.h>
+#endif
+
+#include <stdio.h>
+
+#include "webrtc/base/ipaddress.h"
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/win32.h"
+
+namespace rtc {
+
+// Prefixes used for categorizing IPv6 addresses.
+static const in6_addr kV4MappedPrefix = {{{0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0xFF, 0xFF, 0}}};
+static const in6_addr k6To4Prefix = {{{0x20, 0x02, 0}}};
+static const in6_addr kTeredoPrefix = {{{0x20, 0x01, 0x00, 0x00}}};
+static const in6_addr kV4CompatibilityPrefix = {{{0}}};
+static const in6_addr k6BonePrefix = {{{0x3f, 0xfe, 0}}};
+
+bool IPAddress::strip_sensitive_ = false;
+
+static bool IsPrivateV4(uint32 ip);
+static in_addr ExtractMappedAddress(const in6_addr& addr);
+
+uint32 IPAddress::v4AddressAsHostOrderInteger() const {
+ if (family_ == AF_INET) {
+ return NetworkToHost32(u_.ip4.s_addr);
+ } else {
+ return 0;
+ }
+}
+
+size_t IPAddress::Size() const {
+ switch (family_) {
+ case AF_INET:
+ return sizeof(in_addr);
+ case AF_INET6:
+ return sizeof(in6_addr);
+ }
+ return 0;
+}
+
+
+bool IPAddress::operator==(const IPAddress &other) const {
+ if (family_ != other.family_) {
+ return false;
+ }
+ if (family_ == AF_INET) {
+ return memcmp(&u_.ip4, &other.u_.ip4, sizeof(u_.ip4)) == 0;
+ }
+ if (family_ == AF_INET6) {
+ return memcmp(&u_.ip6, &other.u_.ip6, sizeof(u_.ip6)) == 0;
+ }
+ return family_ == AF_UNSPEC;
+}
+
+bool IPAddress::operator!=(const IPAddress &other) const {
+ return !((*this) == other);
+}
+
+bool IPAddress::operator >(const IPAddress &other) const {
+ return (*this) != other && !((*this) < other);
+}
+
+bool IPAddress::operator <(const IPAddress &other) const {
+ // IPv4 is 'less than' IPv6
+ if (family_ != other.family_) {
+ if (family_ == AF_UNSPEC) {
+ return true;
+ }
+ if (family_ == AF_INET && other.family_ == AF_INET6) {
+ return true;
+ }
+ return false;
+ }
+ // Comparing addresses of the same family.
+ switch (family_) {
+ case AF_INET: {
+ return NetworkToHost32(u_.ip4.s_addr) <
+ NetworkToHost32(other.u_.ip4.s_addr);
+ }
+ case AF_INET6: {
+ return memcmp(&u_.ip6.s6_addr, &other.u_.ip6.s6_addr, 16) < 0;
+ }
+ }
+ // Catches AF_UNSPEC and invalid addresses.
+ return false;
+}
+
+std::ostream& operator<<(std::ostream& os, const IPAddress& ip) {
+ os << ip.ToString();
+ return os;
+}
+
+in6_addr IPAddress::ipv6_address() const {
+ return u_.ip6;
+}
+
+in_addr IPAddress::ipv4_address() const {
+ return u_.ip4;
+}
+
+std::string IPAddress::ToString() const {
+ if (family_ != AF_INET && family_ != AF_INET6) {
+ return std::string();
+ }
+ char buf[INET6_ADDRSTRLEN] = {0};
+ const void* src = &u_.ip4;
+ if (family_ == AF_INET6) {
+ src = &u_.ip6;
+ }
+ if (!rtc::inet_ntop(family_, src, buf, sizeof(buf))) {
+ return std::string();
+ }
+ return std::string(buf);
+}
+
+std::string IPAddress::ToSensitiveString() const {
+ if (!strip_sensitive_)
+ return ToString();
+
+ switch (family_) {
+ case AF_INET: {
+ std::string address = ToString();
+ size_t find_pos = address.rfind('.');
+ if (find_pos == std::string::npos)
+ return std::string();
+ address.resize(find_pos);
+ address += ".x";
+ return address;
+ }
+ case AF_INET6: {
+ // TODO(grunell): Return a string of format 1:2:3:x:x:x:x:x or such
+ // instead of zeroing out.
+ return TruncateIP(*this, 128 - 80).ToString();
+ }
+ }
+ return std::string();
+}
+
+IPAddress IPAddress::Normalized() const {
+ if (family_ != AF_INET6) {
+ return *this;
+ }
+ if (!IPIsV4Mapped(*this)) {
+ return *this;
+ }
+ in_addr addr = ExtractMappedAddress(u_.ip6);
+ return IPAddress(addr);
+}
+
+IPAddress IPAddress::AsIPv6Address() const {
+ if (family_ != AF_INET) {
+ return *this;
+ }
+ in6_addr v6addr = kV4MappedPrefix;
+ ::memcpy(&v6addr.s6_addr[12], &u_.ip4.s_addr, sizeof(u_.ip4.s_addr));
+ return IPAddress(v6addr);
+}
+
+void IPAddress::set_strip_sensitive(bool enable) {
+ strip_sensitive_ = enable;
+}
+
+
+bool IsPrivateV4(uint32 ip_in_host_order) {
+ return ((ip_in_host_order >> 24) == 127) ||
+ ((ip_in_host_order >> 24) == 10) ||
+ ((ip_in_host_order >> 20) == ((172 << 4) | 1)) ||
+ ((ip_in_host_order >> 16) == ((192 << 8) | 168)) ||
+ ((ip_in_host_order >> 16) == ((169 << 8) | 254));
+}
+
+in_addr ExtractMappedAddress(const in6_addr& in6) {
+ in_addr ipv4;
+ ::memcpy(&ipv4.s_addr, &in6.s6_addr[12], sizeof(ipv4.s_addr));
+ return ipv4;
+}
+
+bool IPFromAddrInfo(struct addrinfo* info, IPAddress* out) {
+ if (!info || !info->ai_addr) {
+ return false;
+ }
+ if (info->ai_addr->sa_family == AF_INET) {
+ sockaddr_in* addr = reinterpret_cast<sockaddr_in*>(info->ai_addr);
+ *out = IPAddress(addr->sin_addr);
+ return true;
+ } else if (info->ai_addr->sa_family == AF_INET6) {
+ sockaddr_in6* addr = reinterpret_cast<sockaddr_in6*>(info->ai_addr);
+ *out = IPAddress(addr->sin6_addr);
+ return true;
+ }
+ return false;
+}
+
+bool IPFromString(const std::string& str, IPAddress* out) {
+ if (!out) {
+ return false;
+ }
+ in_addr addr;
+ if (rtc::inet_pton(AF_INET, str.c_str(), &addr) == 0) {
+ in6_addr addr6;
+ if (rtc::inet_pton(AF_INET6, str.c_str(), &addr6) == 0) {
+ *out = IPAddress();
+ return false;
+ }
+ *out = IPAddress(addr6);
+ } else {
+ *out = IPAddress(addr);
+ }
+ return true;
+}
+
+bool IPIsAny(const IPAddress& ip) {
+ switch (ip.family()) {
+ case AF_INET:
+ return ip == IPAddress(INADDR_ANY);
+ case AF_INET6:
+ return ip == IPAddress(in6addr_any);
+ case AF_UNSPEC:
+ return false;
+ }
+ return false;
+}
+
+bool IPIsLoopback(const IPAddress& ip) {
+ switch (ip.family()) {
+ case AF_INET: {
+ return ip == IPAddress(INADDR_LOOPBACK);
+ }
+ case AF_INET6: {
+ return ip == IPAddress(in6addr_loopback);
+ }
+ }
+ return false;
+}
+
+bool IPIsPrivate(const IPAddress& ip) {
+ switch (ip.family()) {
+ case AF_INET: {
+ return IsPrivateV4(ip.v4AddressAsHostOrderInteger());
+ }
+ case AF_INET6: {
+ in6_addr v6 = ip.ipv6_address();
+ return (v6.s6_addr[0] == 0xFE && v6.s6_addr[1] == 0x80) ||
+ IPIsLoopback(ip);
+ }
+ }
+ return false;
+}
+
+bool IPIsUnspec(const IPAddress& ip) {
+ return ip.family() == AF_UNSPEC;
+}
+
+size_t HashIP(const IPAddress& ip) {
+ switch (ip.family()) {
+ case AF_INET: {
+ return ip.ipv4_address().s_addr;
+ }
+ case AF_INET6: {
+ in6_addr v6addr = ip.ipv6_address();
+ const uint32* v6_as_ints =
+ reinterpret_cast<const uint32*>(&v6addr.s6_addr);
+ return v6_as_ints[0] ^ v6_as_ints[1] ^ v6_as_ints[2] ^ v6_as_ints[3];
+ }
+ }
+ return 0;
+}
+
+IPAddress TruncateIP(const IPAddress& ip, int length) {
+ if (length < 0) {
+ return IPAddress();
+ }
+ if (ip.family() == AF_INET) {
+ if (length > 31) {
+ return ip;
+ }
+ if (length == 0) {
+ return IPAddress(INADDR_ANY);
+ }
+ int mask = (0xFFFFFFFF << (32 - length));
+ uint32 host_order_ip = NetworkToHost32(ip.ipv4_address().s_addr);
+ in_addr masked;
+ masked.s_addr = HostToNetwork32(host_order_ip & mask);
+ return IPAddress(masked);
+ } else if (ip.family() == AF_INET6) {
+ if (length > 127) {
+ return ip;
+ }
+ if (length == 0) {
+ return IPAddress(in6addr_any);
+ }
+ in6_addr v6addr = ip.ipv6_address();
+ int position = length / 32;
+ int inner_length = 32 - (length - (position * 32));
+ // Note: 64bit mask constant needed to allow possible 32-bit left shift.
+ uint32 inner_mask = 0xFFFFFFFFLL << inner_length;
+ uint32* v6_as_ints =
+ reinterpret_cast<uint32*>(&v6addr.s6_addr);
+ for (int i = 0; i < 4; ++i) {
+ if (i == position) {
+ uint32 host_order_inner = NetworkToHost32(v6_as_ints[i]);
+ v6_as_ints[i] = HostToNetwork32(host_order_inner & inner_mask);
+ } else if (i > position) {
+ v6_as_ints[i] = 0;
+ }
+ }
+ return IPAddress(v6addr);
+ }
+ return IPAddress();
+}
+
+int CountIPMaskBits(IPAddress mask) {
+ uint32 word_to_count = 0;
+ int bits = 0;
+ switch (mask.family()) {
+ case AF_INET: {
+ word_to_count = NetworkToHost32(mask.ipv4_address().s_addr);
+ break;
+ }
+ case AF_INET6: {
+ in6_addr v6addr = mask.ipv6_address();
+ const uint32* v6_as_ints =
+ reinterpret_cast<const uint32*>(&v6addr.s6_addr);
+ int i = 0;
+ for (; i < 4; ++i) {
+ if (v6_as_ints[i] != 0xFFFFFFFF) {
+ break;
+ }
+ }
+ if (i < 4) {
+ word_to_count = NetworkToHost32(v6_as_ints[i]);
+ }
+ bits = (i * 32);
+ break;
+ }
+ default: {
+ return 0;
+ }
+ }
+ if (word_to_count == 0) {
+ return bits;
+ }
+
+ // Public domain bit-twiddling hack from:
+ // http://graphics.stanford.edu/~seander/bithacks.html
+ // Counts the trailing 0s in the word.
+ unsigned int zeroes = 32;
+ word_to_count &= -static_cast<int32>(word_to_count);
+ if (word_to_count) zeroes--;
+ if (word_to_count & 0x0000FFFF) zeroes -= 16;
+ if (word_to_count & 0x00FF00FF) zeroes -= 8;
+ if (word_to_count & 0x0F0F0F0F) zeroes -= 4;
+ if (word_to_count & 0x33333333) zeroes -= 2;
+ if (word_to_count & 0x55555555) zeroes -= 1;
+
+ return bits + (32 - zeroes);
+}
+
+bool IPIsHelper(const IPAddress& ip, const in6_addr& tomatch, int length) {
+ // Helper method for checking IP prefix matches (but only on whole byte
+ // lengths). Length is in bits.
+ in6_addr addr = ip.ipv6_address();
+ return ::memcmp(&addr, &tomatch, (length >> 3)) == 0;
+}
+
+bool IPIs6Bone(const IPAddress& ip) {
+ return IPIsHelper(ip, k6BonePrefix, 16);
+}
+
+bool IPIs6To4(const IPAddress& ip) {
+ return IPIsHelper(ip, k6To4Prefix, 16);
+}
+
+bool IPIsSiteLocal(const IPAddress& ip) {
+ // Can't use the helper because the prefix is 10 bits.
+ in6_addr addr = ip.ipv6_address();
+ return addr.s6_addr[0] == 0xFE && (addr.s6_addr[1] & 0xC0) == 0xC0;
+}
+
+bool IPIsULA(const IPAddress& ip) {
+ // Can't use the helper because the prefix is 7 bits.
+ in6_addr addr = ip.ipv6_address();
+ return (addr.s6_addr[0] & 0xFE) == 0xFC;
+}
+
+bool IPIsTeredo(const IPAddress& ip) {
+ return IPIsHelper(ip, kTeredoPrefix, 32);
+}
+
+bool IPIsV4Compatibility(const IPAddress& ip) {
+ return IPIsHelper(ip, kV4CompatibilityPrefix, 96);
+}
+
+bool IPIsV4Mapped(const IPAddress& ip) {
+ return IPIsHelper(ip, kV4MappedPrefix, 96);
+}
+
+int IPAddressPrecedence(const IPAddress& ip) {
+ // Precedence values from RFC 3484-bis. Prefers native v4 over 6to4/Teredo.
+ if (ip.family() == AF_INET) {
+ return 30;
+ } else if (ip.family() == AF_INET6) {
+ if (IPIsLoopback(ip)) {
+ return 60;
+ } else if (IPIsULA(ip)) {
+ return 50;
+ } else if (IPIsV4Mapped(ip)) {
+ return 30;
+ } else if (IPIs6To4(ip)) {
+ return 20;
+ } else if (IPIsTeredo(ip)) {
+ return 10;
+ } else if (IPIsV4Compatibility(ip) || IPIsSiteLocal(ip) || IPIs6Bone(ip)) {
+ return 1;
+ } else {
+ // A 'normal' IPv6 address.
+ return 40;
+ }
+ }
+ return 0;
+}
+
+} // Namespace talk base
diff --git a/chromium/third_party/webrtc/base/ipaddress.h b/chromium/third_party/webrtc/base/ipaddress.h
new file mode 100644
index 00000000000..e7d649acbfa
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ipaddress.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_IPADDRESS_H_
+#define WEBRTC_BASE_IPADDRESS_H_
+
+#if defined(WEBRTC_POSIX)
+#include <netinet/in.h>
+#include <sys/socket.h>
+#include <arpa/inet.h>
+#include <netdb.h>
+#endif
+#if defined(WEBRTC_WIN)
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#endif
+#include <string.h>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/byteorder.h"
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+namespace rtc {
+
+// Version-agnostic IP address class, wraps a union of in_addr and in6_addr.
+class IPAddress {
+ public:
+ IPAddress() : family_(AF_UNSPEC) {
+ ::memset(&u_, 0, sizeof(u_));
+ }
+
+ explicit IPAddress(const in_addr &ip4) : family_(AF_INET) {
+ memset(&u_, 0, sizeof(u_));
+ u_.ip4 = ip4;
+ }
+
+ explicit IPAddress(const in6_addr &ip6) : family_(AF_INET6) {
+ u_.ip6 = ip6;
+ }
+
+ explicit IPAddress(uint32 ip_in_host_byte_order) : family_(AF_INET) {
+ memset(&u_, 0, sizeof(u_));
+ u_.ip4.s_addr = HostToNetwork32(ip_in_host_byte_order);
+ }
+
+ IPAddress(const IPAddress &other) : family_(other.family_) {
+ ::memcpy(&u_, &other.u_, sizeof(u_));
+ }
+
+ ~IPAddress() {}
+
+ const IPAddress & operator=(const IPAddress &other) {
+ family_ = other.family_;
+ ::memcpy(&u_, &other.u_, sizeof(u_));
+ return *this;
+ }
+
+ bool operator==(const IPAddress &other) const;
+ bool operator!=(const IPAddress &other) const;
+ bool operator <(const IPAddress &other) const;
+ bool operator >(const IPAddress &other) const;
+ friend std::ostream& operator<<(std::ostream& os, const IPAddress& addr);
+
+ int family() const { return family_; }
+ in_addr ipv4_address() const;
+ in6_addr ipv6_address() const;
+
+ // Returns the number of bytes needed to store the raw address.
+ size_t Size() const;
+
+ // Wraps inet_ntop.
+ std::string ToString() const;
+
+ // Same as ToString but anonymizes it by hiding the last part.
+ std::string ToSensitiveString() const;
+
+ // Returns an unmapped address from a possibly-mapped address.
+ // Returns the same address if this isn't a mapped address.
+ IPAddress Normalized() const;
+
+ // Returns this address as an IPv6 address.
+ // Maps v4 addresses (as ::ffff:a.b.c.d), returns v6 addresses unchanged.
+ IPAddress AsIPv6Address() const;
+
+ // For socketaddress' benefit. Returns the IP in host byte order.
+ uint32 v4AddressAsHostOrderInteger() const;
+
+ static void set_strip_sensitive(bool enable);
+
+ private:
+ int family_;
+ union {
+ in_addr ip4;
+ in6_addr ip6;
+ } u_;
+
+ static bool strip_sensitive_;
+};
+
+bool IPFromAddrInfo(struct addrinfo* info, IPAddress* out);
+bool IPFromString(const std::string& str, IPAddress* out);
+bool IPIsAny(const IPAddress& ip);
+bool IPIsLoopback(const IPAddress& ip);
+bool IPIsPrivate(const IPAddress& ip);
+bool IPIsUnspec(const IPAddress& ip);
+size_t HashIP(const IPAddress& ip);
+
+// These are only really applicable for IPv6 addresses.
+bool IPIs6Bone(const IPAddress& ip);
+bool IPIs6To4(const IPAddress& ip);
+bool IPIsSiteLocal(const IPAddress& ip);
+bool IPIsTeredo(const IPAddress& ip);
+bool IPIsULA(const IPAddress& ip);
+bool IPIsV4Compatibility(const IPAddress& ip);
+bool IPIsV4Mapped(const IPAddress& ip);
+
+// Returns the precedence value for this IP as given in RFC3484.
+int IPAddressPrecedence(const IPAddress& ip);
+
+// Returns 'ip' truncated to be 'length' bits long.
+IPAddress TruncateIP(const IPAddress& ip, int length);
+
+// Returns the number of contiguously set bits, counting from the MSB in network
+// byte order, in this IPAddress. Bits after the first 0 encountered are not
+// counted.
+int CountIPMaskBits(IPAddress mask);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_IPADDRESS_H_
diff --git a/chromium/third_party/webrtc/base/ipaddress_unittest.cc b/chromium/third_party/webrtc/base/ipaddress_unittest.cc
new file mode 100644
index 00000000000..657595f68f4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ipaddress_unittest.cc
@@ -0,0 +1,859 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace rtc {
+
+static const unsigned int kIPv4AddrSize = 4;
+static const unsigned int kIPv6AddrSize = 16;
+static const unsigned int kIPv4RFC1918Addr = 0xC0A80701;
+static const unsigned int kIPv4PublicAddr = 0x01020304;
+static const in6_addr kIPv6LinkLocalAddr = {{{0xfe, 0x80, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xbe, 0x30, 0x5b, 0xff,
+ 0xfe, 0xe5, 0x00, 0xc3}}};
+static const in6_addr kIPv6PublicAddr = {{{0x24, 0x01, 0xfa, 0x00,
+ 0x00, 0x04, 0x10, 0x00,
+ 0xbe, 0x30, 0x5b, 0xff,
+ 0xfe, 0xe5, 0x00, 0xc3}}};
+static const in6_addr kIPv4MappedAnyAddr = {{{0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00}}};
+static const in6_addr kIPv4MappedRFC1918Addr = {{{0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xff, 0xff,
+ 0xc0, 0xa8, 0x07, 0x01}}};
+static const in6_addr kIPv4MappedPublicAddr = {{{0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xff, 0xff,
+ 0x01, 0x02, 0x03, 0x04}}};
+
+static const std::string kIPv4AnyAddrString = "0.0.0.0";
+static const std::string kIPv4LoopbackAddrString = "127.0.0.1";
+static const std::string kIPv4RFC1918AddrString = "192.168.7.1";
+static const std::string kIPv4PublicAddrString = "1.2.3.4";
+static const std::string kIPv4PublicAddrAnonymizedString = "1.2.3.x";
+static const std::string kIPv6AnyAddrString = "::";
+static const std::string kIPv6LoopbackAddrString = "::1";
+static const std::string kIPv6LinkLocalAddrString = "fe80::be30:5bff:fee5:c3";
+static const std::string kIPv6PublicAddrString =
+ "2401:fa00:4:1000:be30:5bff:fee5:c3";
+static const std::string kIPv6PublicAddrAnonymizedString = "2401:fa00:4::";
+static const std::string kIPv4MappedAnyAddrString = "::ffff:0:0";
+static const std::string kIPv4MappedRFC1918AddrString = "::ffff:c0a8:701";
+static const std::string kIPv4MappedLoopbackAddrString = "::ffff:7f00:1";
+static const std::string kIPv4MappedPublicAddrString = "::ffff:102:0304";
+static const std::string kIPv4MappedV4StyleAddrString = "::ffff:192.168.7.1";
+
+static const std::string kIPv4BrokenString1 = "192.168.7.";
+static const std::string kIPv4BrokenString2 = "192.168.7.1.1";
+static const std::string kIPv4BrokenString3 = "192.168.7.1:80";
+static const std::string kIPv4BrokenString4 = "192.168.7.ONE";
+static const std::string kIPv4BrokenString5 = "-192.168.7.1";
+static const std::string kIPv4BrokenString6 = "256.168.7.1";
+static const std::string kIPv6BrokenString1 = "2401:fa00:4:1000:be30";
+static const std::string kIPv6BrokenString2 =
+ "2401:fa00:4:1000:be30:5bff:fee5:c3:1";
+static const std::string kIPv6BrokenString3 =
+ "[2401:fa00:4:1000:be30:5bff:fee5:c3]:1";
+static const std::string kIPv6BrokenString4 =
+ "2401::4::be30";
+static const std::string kIPv6BrokenString5 =
+ "2401:::4:fee5:be30";
+static const std::string kIPv6BrokenString6 =
+ "2401f:fa00:4:1000:be30:5bff:fee5:c3";
+static const std::string kIPv6BrokenString7 =
+ "2401:ga00:4:1000:be30:5bff:fee5:c3";
+static const std::string kIPv6BrokenString8 =
+ "2401:fa000:4:1000:be30:5bff:fee5:c3";
+static const std::string kIPv6BrokenString9 =
+ "2401:fal0:4:1000:be30:5bff:fee5:c3";
+static const std::string kIPv6BrokenString10 =
+ "::ffff:192.168.7.";
+static const std::string kIPv6BrokenString11 =
+ "::ffff:192.168.7.1.1.1";
+static const std::string kIPv6BrokenString12 =
+ "::fffe:192.168.7.1";
+static const std::string kIPv6BrokenString13 =
+ "::ffff:192.168.7.ff";
+static const std::string kIPv6BrokenString14 =
+ "0x2401:fa00:4:1000:be30:5bff:fee5:c3";
+
+bool AreEqual(const IPAddress& addr,
+ const IPAddress& addr2) {
+ if ((IPIsAny(addr) != IPIsAny(addr2)) ||
+ (IPIsLoopback(addr) != IPIsLoopback(addr2)) ||
+ (IPIsPrivate(addr) != IPIsPrivate(addr2)) ||
+ (HashIP(addr) != HashIP(addr2)) ||
+ (addr.Size() != addr2.Size()) ||
+ (addr.family() != addr2.family()) ||
+ (addr.ToString() != addr2.ToString())) {
+ return false;
+ }
+ in_addr v4addr, v4addr2;
+ v4addr = addr.ipv4_address();
+ v4addr2 = addr2.ipv4_address();
+ if (0 != memcmp(&v4addr, &v4addr2, sizeof(v4addr))) {
+ return false;
+ }
+ in6_addr v6addr, v6addr2;
+ v6addr = addr.ipv6_address();
+ v6addr2 = addr2.ipv6_address();
+ if (0 != memcmp(&v6addr, &v6addr2, sizeof(v6addr))) {
+ return false;
+ }
+ return true;
+}
+
+bool BrokenIPStringFails(const std::string& broken) {
+ IPAddress addr(0); // Intentionally make it v4.
+ if (IPFromString(kIPv4BrokenString1, &addr)) {
+ return false;
+ }
+ return addr.family() == AF_UNSPEC;
+}
+
+bool CheckMaskCount(const std::string& mask, int expected_length) {
+ IPAddress addr;
+ return IPFromString(mask, &addr) &&
+ (expected_length == CountIPMaskBits(addr));
+}
+
+bool TryInvalidMaskCount(const std::string& mask) {
+ // We don't care about the result at all, but we do want to know if
+ // CountIPMaskBits is going to crash or infinite loop or something.
+ IPAddress addr;
+ if (!IPFromString(mask, &addr)) {
+ return false;
+ }
+ CountIPMaskBits(addr);
+ return true;
+}
+
+bool CheckTruncateIP(const std::string& initial, int truncate_length,
+ const std::string& expected_result) {
+ IPAddress addr, expected;
+ IPFromString(initial, &addr);
+ IPFromString(expected_result, &expected);
+ IPAddress truncated = TruncateIP(addr, truncate_length);
+ return truncated == expected;
+}
+
+TEST(IPAddressTest, TestDefaultCtor) {
+ IPAddress addr;
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+
+ EXPECT_EQ(0U, addr.Size());
+ EXPECT_EQ(AF_UNSPEC, addr.family());
+ EXPECT_EQ("", addr.ToString());
+}
+
+TEST(IPAddressTest, TestInAddrCtor) {
+ in_addr v4addr;
+
+ // Test V4 Any address.
+ v4addr.s_addr = INADDR_ANY;
+ IPAddress addr(v4addr);
+ EXPECT_TRUE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4AnyAddrString, addr.ToString());
+
+ // Test a V4 loopback address.
+ v4addr.s_addr = htonl(INADDR_LOOPBACK);
+ addr = IPAddress(v4addr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_TRUE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4LoopbackAddrString, addr.ToString());
+
+ // Test an RFC1918 address.
+ v4addr.s_addr = htonl(kIPv4RFC1918Addr);
+ addr = IPAddress(v4addr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4RFC1918AddrString, addr.ToString());
+
+ // Test a 'normal' v4 address.
+ v4addr.s_addr = htonl(kIPv4PublicAddr);
+ addr = IPAddress(v4addr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4PublicAddrString, addr.ToString());
+}
+
+TEST(IPAddressTest, TestInAddr6Ctor) {
+ // Test v6 empty.
+ IPAddress addr(in6addr_any);
+ EXPECT_TRUE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv6AddrSize, addr.Size());
+ EXPECT_EQ(kIPv6AnyAddrString, addr.ToString());
+
+ // Test v6 loopback.
+ addr = IPAddress(in6addr_loopback);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_TRUE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv6AddrSize, addr.Size());
+ EXPECT_EQ(kIPv6LoopbackAddrString, addr.ToString());
+
+ // Test v6 link-local.
+ addr = IPAddress(kIPv6LinkLocalAddr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv6AddrSize, addr.Size());
+ EXPECT_EQ(kIPv6LinkLocalAddrString, addr.ToString());
+
+ // Test v6 global address.
+ addr = IPAddress(kIPv6PublicAddr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv6AddrSize, addr.Size());
+ EXPECT_EQ(kIPv6PublicAddrString, addr.ToString());
+}
+
+TEST(IPAddressTest, TestUint32Ctor) {
+ // Test V4 Any address.
+ IPAddress addr(0);
+ EXPECT_TRUE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4AnyAddrString, addr.ToString());
+
+ // Test a V4 loopback address.
+ addr = IPAddress(INADDR_LOOPBACK);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_TRUE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4LoopbackAddrString, addr.ToString());
+
+ // Test an RFC1918 address.
+ addr = IPAddress(kIPv4RFC1918Addr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_TRUE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4RFC1918AddrString, addr.ToString());
+
+ // Test a 'normal' v4 address.
+ addr = IPAddress(kIPv4PublicAddr);
+ EXPECT_FALSE(IPIsAny(addr));
+ EXPECT_FALSE(IPIsLoopback(addr));
+ EXPECT_FALSE(IPIsPrivate(addr));
+ EXPECT_EQ(kIPv4AddrSize, addr.Size());
+ EXPECT_EQ(kIPv4PublicAddrString, addr.ToString());
+}
+
+TEST(IPAddressTest, TestCopyCtor) {
+ in_addr v4addr;
+ v4addr.s_addr = htonl(kIPv4PublicAddr);
+ IPAddress addr(v4addr);
+ IPAddress addr2(addr);
+
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(INADDR_ANY);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(INADDR_LOOPBACK);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(kIPv4PublicAddr);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(kIPv4RFC1918Addr);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(in6addr_any);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(in6addr_loopback);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(kIPv6LinkLocalAddr);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr = IPAddress(kIPv6PublicAddr);
+ addr2 = IPAddress(addr);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+}
+
+TEST(IPAddressTest, TestEquality) {
+ // Check v4 equality
+ in_addr v4addr, v4addr2;
+ v4addr.s_addr = htonl(kIPv4PublicAddr);
+ v4addr2.s_addr = htonl(kIPv4PublicAddr + 1);
+ IPAddress addr(v4addr);
+ IPAddress addr2(v4addr2);
+ IPAddress addr3(v4addr);
+
+ EXPECT_TRUE(addr == addr);
+ EXPECT_TRUE(addr2 == addr2);
+ EXPECT_TRUE(addr3 == addr3);
+ EXPECT_TRUE(addr == addr3);
+ EXPECT_TRUE(addr3 == addr);
+ EXPECT_FALSE(addr2 == addr);
+ EXPECT_FALSE(addr2 == addr3);
+ EXPECT_FALSE(addr == addr2);
+ EXPECT_FALSE(addr3 == addr2);
+
+ // Check v6 equality
+ IPAddress addr4(kIPv6PublicAddr);
+ IPAddress addr5(kIPv6LinkLocalAddr);
+ IPAddress addr6(kIPv6PublicAddr);
+
+ EXPECT_TRUE(addr4 == addr4);
+ EXPECT_TRUE(addr5 == addr5);
+ EXPECT_TRUE(addr4 == addr6);
+ EXPECT_TRUE(addr6 == addr4);
+ EXPECT_FALSE(addr4 == addr5);
+ EXPECT_FALSE(addr5 == addr4);
+ EXPECT_FALSE(addr6 == addr5);
+ EXPECT_FALSE(addr5 == addr6);
+
+ // Check v4/v6 cross-equality
+ EXPECT_FALSE(addr == addr4);
+ EXPECT_FALSE(addr == addr5);
+ EXPECT_FALSE(addr == addr6);
+ EXPECT_FALSE(addr4 == addr);
+ EXPECT_FALSE(addr5 == addr);
+ EXPECT_FALSE(addr6 == addr);
+ EXPECT_FALSE(addr2 == addr4);
+ EXPECT_FALSE(addr2 == addr5);
+ EXPECT_FALSE(addr2 == addr6);
+ EXPECT_FALSE(addr4 == addr2);
+ EXPECT_FALSE(addr5 == addr2);
+ EXPECT_FALSE(addr6 == addr2);
+ EXPECT_FALSE(addr3 == addr4);
+ EXPECT_FALSE(addr3 == addr5);
+ EXPECT_FALSE(addr3 == addr6);
+ EXPECT_FALSE(addr4 == addr3);
+ EXPECT_FALSE(addr5 == addr3);
+ EXPECT_FALSE(addr6 == addr3);
+
+ // Special cases: loopback and any.
+ // They're special but they're still not equal.
+ IPAddress v4loopback(htonl(INADDR_LOOPBACK));
+ IPAddress v6loopback(in6addr_loopback);
+ EXPECT_FALSE(v4loopback == v6loopback);
+
+ IPAddress v4any(0);
+ IPAddress v6any(in6addr_any);
+ EXPECT_FALSE(v4any == v6any);
+}
+
+TEST(IPAddressTest, TestComparison) {
+ // Defined in 'ascending' order.
+ // v6 > v4, and intra-family sorting is purely numerical
+ IPAddress addr0; // AF_UNSPEC
+ IPAddress addr1(INADDR_ANY); // 0.0.0.0
+ IPAddress addr2(kIPv4PublicAddr); // 1.2.3.4
+ IPAddress addr3(INADDR_LOOPBACK); // 127.0.0.1
+ IPAddress addr4(kIPv4RFC1918Addr); // 192.168.7.1.
+ IPAddress addr5(in6addr_any); // ::
+ IPAddress addr6(in6addr_loopback); // ::1
+ IPAddress addr7(kIPv6PublicAddr); // 2401....
+ IPAddress addr8(kIPv6LinkLocalAddr); // fe80....
+
+ EXPECT_TRUE(addr0 < addr1);
+ EXPECT_TRUE(addr1 < addr2);
+ EXPECT_TRUE(addr2 < addr3);
+ EXPECT_TRUE(addr3 < addr4);
+ EXPECT_TRUE(addr4 < addr5);
+ EXPECT_TRUE(addr5 < addr6);
+ EXPECT_TRUE(addr6 < addr7);
+ EXPECT_TRUE(addr7 < addr8);
+
+ EXPECT_FALSE(addr0 > addr1);
+ EXPECT_FALSE(addr1 > addr2);
+ EXPECT_FALSE(addr2 > addr3);
+ EXPECT_FALSE(addr3 > addr4);
+ EXPECT_FALSE(addr4 > addr5);
+ EXPECT_FALSE(addr5 > addr6);
+ EXPECT_FALSE(addr6 > addr7);
+ EXPECT_FALSE(addr7 > addr8);
+
+ EXPECT_FALSE(addr0 > addr0);
+ EXPECT_FALSE(addr1 > addr1);
+ EXPECT_FALSE(addr2 > addr2);
+ EXPECT_FALSE(addr3 > addr3);
+ EXPECT_FALSE(addr4 > addr4);
+ EXPECT_FALSE(addr5 > addr5);
+ EXPECT_FALSE(addr6 > addr6);
+ EXPECT_FALSE(addr7 > addr7);
+ EXPECT_FALSE(addr8 > addr8);
+
+ EXPECT_FALSE(addr0 < addr0);
+ EXPECT_FALSE(addr1 < addr1);
+ EXPECT_FALSE(addr2 < addr2);
+ EXPECT_FALSE(addr3 < addr3);
+ EXPECT_FALSE(addr4 < addr4);
+ EXPECT_FALSE(addr5 < addr5);
+ EXPECT_FALSE(addr6 < addr6);
+ EXPECT_FALSE(addr7 < addr7);
+ EXPECT_FALSE(addr8 < addr8);
+}
+
+TEST(IPAddressTest, TestFromString) {
+ IPAddress addr;
+ IPAddress addr2;
+ addr2 = IPAddress(INADDR_ANY);
+
+ EXPECT_TRUE(IPFromString(kIPv4AnyAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv4AnyAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(INADDR_LOOPBACK);
+ EXPECT_TRUE(IPFromString(kIPv4LoopbackAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv4LoopbackAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(kIPv4RFC1918Addr);
+ EXPECT_TRUE(IPFromString(kIPv4RFC1918AddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv4RFC1918AddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(kIPv4PublicAddr);
+ EXPECT_TRUE(IPFromString(kIPv4PublicAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv4PublicAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(in6addr_any);
+ EXPECT_TRUE(IPFromString(kIPv6AnyAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv6AnyAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(in6addr_loopback);
+ EXPECT_TRUE(IPFromString(kIPv6LoopbackAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv6LoopbackAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(kIPv6LinkLocalAddr);
+ EXPECT_TRUE(IPFromString(kIPv6LinkLocalAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv6LinkLocalAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(kIPv6PublicAddr);
+ EXPECT_TRUE(IPFromString(kIPv6PublicAddrString, &addr));
+ EXPECT_EQ(addr.ToString(), kIPv6PublicAddrString);
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ addr2 = IPAddress(kIPv4MappedRFC1918Addr);
+ EXPECT_TRUE(IPFromString(kIPv4MappedV4StyleAddrString, &addr));
+ EXPECT_PRED2(AreEqual, addr, addr2);
+
+ // Broken cases, should set addr to AF_UNSPEC.
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString1);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString2);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString3);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString4);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString5);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv4BrokenString6);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString1);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString2);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString3);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString4);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString5);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString6);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString7);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString8);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString9);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString10);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString11);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString12);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString13);
+ EXPECT_PRED1(BrokenIPStringFails, kIPv6BrokenString14);
+}
+
+TEST(IPAddressTest, TestIPFromAddrInfo) {
+ struct sockaddr_in expected4;
+ struct sockaddr_in6 expected6;
+ struct addrinfo test_info;
+ struct addrinfo next_info;
+ memset(&next_info, 'A', sizeof(next_info));
+ test_info.ai_next = &next_info;
+ // Check that we can get an IPv4 address out.
+ test_info.ai_addr = reinterpret_cast<struct sockaddr*>(&expected4);
+ expected4.sin_addr.s_addr = HostToNetwork32(kIPv4PublicAddr);
+ expected4.sin_family = AF_INET;
+ IPAddress expected(kIPv4PublicAddr);
+ IPAddress addr;
+ EXPECT_TRUE(IPFromAddrInfo(&test_info, &addr));
+ EXPECT_EQ(expected, addr);
+ // Check that we can get an IPv6 address out.
+ expected6.sin6_addr = kIPv6PublicAddr;
+ expected6.sin6_family = AF_INET6;
+ expected = IPAddress(kIPv6PublicAddr);
+ test_info.ai_addr = reinterpret_cast<struct sockaddr*>(&expected6);
+ EXPECT_TRUE(IPFromAddrInfo(&test_info, &addr));
+ EXPECT_EQ(expected, addr);
+ // Check that unspec fails.
+ expected6.sin6_family = AF_UNSPEC;
+ EXPECT_FALSE(IPFromAddrInfo(&test_info, &addr));
+ // Check a zeroed out addrinfo doesn't crash us.
+ memset(&next_info, 0, sizeof(next_info));
+ EXPECT_FALSE(IPFromAddrInfo(&next_info, &addr));
+}
+
+TEST(IPAddressTest, TestIsPrivate) {
+ EXPECT_FALSE(IPIsPrivate(IPAddress(INADDR_ANY)));
+ EXPECT_FALSE(IPIsPrivate(IPAddress(kIPv4PublicAddr)));
+ EXPECT_FALSE(IPIsPrivate(IPAddress(in6addr_any)));
+ EXPECT_FALSE(IPIsPrivate(IPAddress(kIPv6PublicAddr)));
+ EXPECT_FALSE(IPIsPrivate(IPAddress(kIPv4MappedAnyAddr)));
+ EXPECT_FALSE(IPIsPrivate(IPAddress(kIPv4MappedPublicAddr)));
+
+ EXPECT_TRUE(IPIsPrivate(IPAddress(kIPv4RFC1918Addr)));
+ EXPECT_TRUE(IPIsPrivate(IPAddress(INADDR_LOOPBACK)));
+ EXPECT_TRUE(IPIsPrivate(IPAddress(in6addr_loopback)));
+ EXPECT_TRUE(IPIsPrivate(IPAddress(kIPv6LinkLocalAddr)));
+}
+
+TEST(IPAddressTest, TestIsLoopback) {
+ EXPECT_FALSE(IPIsLoopback(IPAddress(INADDR_ANY)));
+ EXPECT_FALSE(IPIsLoopback(IPAddress(kIPv4PublicAddr)));
+ EXPECT_FALSE(IPIsLoopback(IPAddress(in6addr_any)));
+ EXPECT_FALSE(IPIsLoopback(IPAddress(kIPv6PublicAddr)));
+ EXPECT_FALSE(IPIsLoopback(IPAddress(kIPv4MappedAnyAddr)));
+ EXPECT_FALSE(IPIsLoopback(IPAddress(kIPv4MappedPublicAddr)));
+
+ EXPECT_TRUE(IPIsLoopback(IPAddress(INADDR_LOOPBACK)));
+ EXPECT_TRUE(IPIsLoopback(IPAddress(in6addr_loopback)));
+}
+
+TEST(IPAddressTest, TestNormalized) {
+ // Check normalizing a ::ffff:a.b.c.d address.
+ IPAddress addr;
+ EXPECT_TRUE(IPFromString(kIPv4MappedV4StyleAddrString, &addr));
+ IPAddress addr2(kIPv4RFC1918Addr);
+ addr = addr.Normalized();
+ EXPECT_EQ(addr2, addr);
+
+ // Check normalizing a ::ffff:aabb:ccdd address.
+ addr = IPAddress(kIPv4MappedPublicAddr);
+ addr2 = IPAddress(kIPv4PublicAddr);
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+
+ // Check that a non-mapped v6 addresses isn't altered.
+ addr = IPAddress(kIPv6PublicAddr);
+ addr2 = IPAddress(kIPv6PublicAddr);
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+
+ // Check that addresses that look a bit like mapped addresses aren't altered
+ EXPECT_TRUE(IPFromString("fe80::ffff:0102:0304", &addr));
+ addr2 = addr;
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+ EXPECT_TRUE(IPFromString("::0102:0304", &addr));
+ addr2 = addr;
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+ // This string should 'work' as an IP address but is not a mapped address,
+ // so it shouldn't change on normalization.
+ EXPECT_TRUE(IPFromString("::192.168.7.1", &addr));
+ addr2 = addr;
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+
+ // Check that v4 addresses aren't altered.
+ addr = IPAddress(htonl(kIPv4PublicAddr));
+ addr2 = IPAddress(htonl(kIPv4PublicAddr));
+ addr = addr.Normalized();
+ EXPECT_EQ(addr, addr2);
+}
+
+TEST(IPAddressTest, TestAsIPv6Address) {
+ IPAddress addr(kIPv4PublicAddr);
+ IPAddress addr2(kIPv4MappedPublicAddr);
+ addr = addr.AsIPv6Address();
+ EXPECT_EQ(addr, addr2);
+
+ addr = IPAddress(kIPv4MappedPublicAddr);
+ addr2 = IPAddress(kIPv4MappedPublicAddr);
+ addr = addr.AsIPv6Address();
+ EXPECT_EQ(addr, addr2);
+
+ addr = IPAddress(kIPv6PublicAddr);
+ addr2 = IPAddress(kIPv6PublicAddr);
+ addr = addr.AsIPv6Address();
+ EXPECT_EQ(addr, addr2);
+}
+
+TEST(IPAddressTest, TestCountIPMaskBits) {
+ IPAddress mask;
+ // IPv4 on byte boundaries
+ EXPECT_PRED2(CheckMaskCount, "255.255.255.255", 32);
+ EXPECT_PRED2(CheckMaskCount, "255.255.255.0", 24);
+ EXPECT_PRED2(CheckMaskCount, "255.255.0.0", 16);
+ EXPECT_PRED2(CheckMaskCount, "255.0.0.0", 8);
+ EXPECT_PRED2(CheckMaskCount, "0.0.0.0", 0);
+
+ // IPv4 not on byte boundaries
+ EXPECT_PRED2(CheckMaskCount, "128.0.0.0", 1);
+ EXPECT_PRED2(CheckMaskCount, "224.0.0.0", 3);
+ EXPECT_PRED2(CheckMaskCount, "255.248.0.0", 13);
+ EXPECT_PRED2(CheckMaskCount, "255.255.224.0", 19);
+ EXPECT_PRED2(CheckMaskCount, "255.255.255.252", 30);
+
+ // V6 on byte boundaries
+ EXPECT_PRED2(CheckMaskCount, "::", 0);
+ EXPECT_PRED2(CheckMaskCount, "ff00::", 8);
+ EXPECT_PRED2(CheckMaskCount, "ffff::", 16);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ff00::", 24);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff::", 32);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ff00::", 40);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff::", 48);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ff00::", 56);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff::", 64);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ff00::", 72);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff::", 80);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ff00::", 88);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff::", 96);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ff00:0000", 104);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0000", 112);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00", 120);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", 128);
+
+ // V6 not on byte boundaries.
+ EXPECT_PRED2(CheckMaskCount, "8000::", 1);
+ EXPECT_PRED2(CheckMaskCount, "ff80::", 9);
+ EXPECT_PRED2(CheckMaskCount, "ffff:fe00::", 23);
+ EXPECT_PRED2(CheckMaskCount, "ffff:fffe::", 31);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:e000::", 35);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffe0::", 43);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:f800::", 53);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:fff8::", 61);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:fc00::", 70);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:fffc::", 78);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:8000::", 81);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ff80::", 89);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:fe00::", 103);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:fffe:0000", 111);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fc00", 118);
+ EXPECT_PRED2(CheckMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc", 126);
+
+ // Non-contiguous ranges. These are invalid but lets test them
+ // to make sure they don't crash anything or infinite loop or something.
+ EXPECT_PRED1(TryInvalidMaskCount, "217.0.0.0");
+ EXPECT_PRED1(TryInvalidMaskCount, "255.185.0.0");
+ EXPECT_PRED1(TryInvalidMaskCount, "255.255.251.0");
+ EXPECT_PRED1(TryInvalidMaskCount, "255.255.251.255");
+ EXPECT_PRED1(TryInvalidMaskCount, "255.255.254.201");
+ EXPECT_PRED1(TryInvalidMaskCount, "::1");
+ EXPECT_PRED1(TryInvalidMaskCount, "fe80::1");
+ EXPECT_PRED1(TryInvalidMaskCount, "ff80::1");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff::1");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ff00:1::1");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff::ffff:1");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ff00:1::");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff::ff00");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ff00:1234::");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:0012::ffff");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ff01::");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:7f00::");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:ff7a::");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:7f00:0000");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ff70:0000");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0211");
+ EXPECT_PRED1(TryInvalidMaskCount, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff7f");
+}
+
+TEST(IPAddressTest, TestTruncateIP) {
+ EXPECT_PRED3(CheckTruncateIP, "255.255.255.255", 24, "255.255.255.0");
+ EXPECT_PRED3(CheckTruncateIP, "255.255.255.255", 16, "255.255.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "255.255.255.255", 8, "255.0.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "202.67.7.255", 24, "202.67.7.0");
+ EXPECT_PRED3(CheckTruncateIP, "202.129.65.205", 16, "202.129.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "55.25.2.77", 8, "55.0.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "74.128.99.254", 1, "0.0.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "106.55.99.254", 3, "96.0.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "172.167.53.222", 13, "172.160.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "255.255.224.0", 18, "255.255.192.0");
+ EXPECT_PRED3(CheckTruncateIP, "255.255.255.252", 28, "255.255.255.240");
+
+ EXPECT_PRED3(CheckTruncateIP, "fe80:1111:2222:3333:4444:5555:6666:7777", 1,
+ "8000::");
+ EXPECT_PRED3(CheckTruncateIP, "fff0:1111:2222:3333:4444:5555:6666:7777", 9,
+ "ff80::");
+ EXPECT_PRED3(CheckTruncateIP, "ffff:ff80:1111:2222:3333:4444:5555:6666", 23,
+ "ffff:fe00::");
+ EXPECT_PRED3(CheckTruncateIP, "ffff:ff80:1111:2222:3333:4444:5555:6666", 32,
+ "ffff:ff80::");
+ EXPECT_PRED3(CheckTruncateIP, "2400:f9af:e456:1111:2222:3333:4444:5555", 35,
+ "2400:f9af:e000::");
+ EXPECT_PRED3(CheckTruncateIP, "9999:1111:2233:4444:5555:6666:7777:8888", 53,
+ "9999:1111:2233:4000::");
+ EXPECT_PRED3(CheckTruncateIP, "9999:1111:2233:4567:5555:6666:7777:8888", 64,
+ "9999:1111:2233:4567::");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 68,
+ "1111:2222:3333:4444:5000::");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 92,
+ "1111:2222:3333:4444:5555:6660::");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 96,
+ "1111:2222:3333:4444:5555:6666::");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 105,
+ "1111:2222:3333:4444:5555:6666:7700::");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 124,
+ "1111:2222:3333:4444:5555:6666:7777:8880");
+
+ // Slightly degenerate cases
+ EXPECT_PRED3(CheckTruncateIP, "202.165.33.127", 32, "202.165.33.127");
+ EXPECT_PRED3(CheckTruncateIP, "235.105.77.12", 0, "0.0.0.0");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 128,
+ "1111:2222:3333:4444:5555:6666:7777:8888");
+ EXPECT_PRED3(CheckTruncateIP, "1111:2222:3333:4444:5555:6666:7777:8888", 0,
+ "::");
+}
+
+TEST(IPAddressTest, TestCategorizeIPv6) {
+ // Test determining if an IPAddress is 6Bone/6To4/Teredo/etc.
+ // IPv4 address, should be none of these (not even v4compat/v4mapped).
+ IPAddress v4_addr(kIPv4PublicAddr);
+ EXPECT_FALSE(IPIs6Bone(v4_addr));
+ EXPECT_FALSE(IPIs6To4(v4_addr));
+ EXPECT_FALSE(IPIsSiteLocal(v4_addr));
+ EXPECT_FALSE(IPIsTeredo(v4_addr));
+ EXPECT_FALSE(IPIsULA(v4_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(v4_addr));
+ EXPECT_FALSE(IPIsV4Mapped(v4_addr));
+ // Linklocal (fe80::/16) adddress; should be none of these.
+ IPAddress linklocal_addr(kIPv6LinkLocalAddr);
+ EXPECT_FALSE(IPIs6Bone(linklocal_addr));
+ EXPECT_FALSE(IPIs6To4(linklocal_addr));
+ EXPECT_FALSE(IPIsSiteLocal(linklocal_addr));
+ EXPECT_FALSE(IPIsTeredo(linklocal_addr));
+ EXPECT_FALSE(IPIsULA(linklocal_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(linklocal_addr));
+ EXPECT_FALSE(IPIsV4Mapped(linklocal_addr));
+ // 'Normal' IPv6 address, should also be none of these.
+ IPAddress normal_addr(kIPv6PublicAddr);
+ EXPECT_FALSE(IPIs6Bone(normal_addr));
+ EXPECT_FALSE(IPIs6To4(normal_addr));
+ EXPECT_FALSE(IPIsSiteLocal(normal_addr));
+ EXPECT_FALSE(IPIsTeredo(normal_addr));
+ EXPECT_FALSE(IPIsULA(normal_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(normal_addr));
+ EXPECT_FALSE(IPIsV4Mapped(normal_addr));
+ // IPv4 mapped address (::ffff:123.123.123.123)
+ IPAddress v4mapped_addr(kIPv4MappedPublicAddr);
+ EXPECT_TRUE(IPIsV4Mapped(v4mapped_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(v4mapped_addr));
+ EXPECT_FALSE(IPIs6Bone(v4mapped_addr));
+ EXPECT_FALSE(IPIs6To4(v4mapped_addr));
+ EXPECT_FALSE(IPIsSiteLocal(v4mapped_addr));
+ EXPECT_FALSE(IPIsTeredo(v4mapped_addr));
+ EXPECT_FALSE(IPIsULA(v4mapped_addr));
+ // IPv4 compatibility address (::123.123.123.123)
+ IPAddress v4compat_addr;
+ IPFromString("::192.168.7.1", &v4compat_addr);
+ EXPECT_TRUE(IPIsV4Compatibility(v4compat_addr));
+ EXPECT_FALSE(IPIs6Bone(v4compat_addr));
+ EXPECT_FALSE(IPIs6To4(v4compat_addr));
+ EXPECT_FALSE(IPIsSiteLocal(v4compat_addr));
+ EXPECT_FALSE(IPIsTeredo(v4compat_addr));
+ EXPECT_FALSE(IPIsULA(v4compat_addr));
+ EXPECT_FALSE(IPIsV4Mapped(v4compat_addr));
+ // 6Bone address (3FFE::/16)
+ IPAddress sixbone_addr;
+ IPFromString("3FFE:123:456::789:123", &sixbone_addr);
+ EXPECT_TRUE(IPIs6Bone(sixbone_addr));
+ EXPECT_FALSE(IPIs6To4(sixbone_addr));
+ EXPECT_FALSE(IPIsSiteLocal(sixbone_addr));
+ EXPECT_FALSE(IPIsTeredo(sixbone_addr));
+ EXPECT_FALSE(IPIsULA(sixbone_addr));
+ EXPECT_FALSE(IPIsV4Mapped(sixbone_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(sixbone_addr));
+ // Unique Local Address (FC::/7)
+ IPAddress ula_addr;
+ IPFromString("FC00:123:456::789:123", &ula_addr);
+ EXPECT_TRUE(IPIsULA(ula_addr));
+ EXPECT_FALSE(IPIs6Bone(ula_addr));
+ EXPECT_FALSE(IPIs6To4(ula_addr));
+ EXPECT_FALSE(IPIsSiteLocal(ula_addr));
+ EXPECT_FALSE(IPIsTeredo(ula_addr));
+ EXPECT_FALSE(IPIsV4Mapped(ula_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(ula_addr));
+ // 6To4 Address (2002::/16)
+ IPAddress sixtofour_addr;
+ IPFromString("2002:123:456::789:123", &sixtofour_addr);
+ EXPECT_TRUE(IPIs6To4(sixtofour_addr));
+ EXPECT_FALSE(IPIs6Bone(sixtofour_addr));
+ EXPECT_FALSE(IPIsSiteLocal(sixtofour_addr));
+ EXPECT_FALSE(IPIsTeredo(sixtofour_addr));
+ EXPECT_FALSE(IPIsULA(sixtofour_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(sixtofour_addr));
+ EXPECT_FALSE(IPIsV4Mapped(sixtofour_addr));
+ // Site Local address (FEC0::/10)
+ IPAddress sitelocal_addr;
+ IPFromString("FEC0:123:456::789:123", &sitelocal_addr);
+ EXPECT_TRUE(IPIsSiteLocal(sitelocal_addr));
+ EXPECT_FALSE(IPIs6Bone(sitelocal_addr));
+ EXPECT_FALSE(IPIs6To4(sitelocal_addr));
+ EXPECT_FALSE(IPIsTeredo(sitelocal_addr));
+ EXPECT_FALSE(IPIsULA(sitelocal_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(sitelocal_addr));
+ EXPECT_FALSE(IPIsV4Mapped(sitelocal_addr));
+ // Teredo Address (2001:0000::/32)
+ IPAddress teredo_addr;
+ IPFromString("2001:0000:123:456::789:123", &teredo_addr);
+ EXPECT_TRUE(IPIsTeredo(teredo_addr));
+ EXPECT_FALSE(IPIsSiteLocal(teredo_addr));
+ EXPECT_FALSE(IPIs6Bone(teredo_addr));
+ EXPECT_FALSE(IPIs6To4(teredo_addr));
+ EXPECT_FALSE(IPIsULA(teredo_addr));
+ EXPECT_FALSE(IPIsV4Compatibility(teredo_addr));
+ EXPECT_FALSE(IPIsV4Mapped(teredo_addr));
+}
+
+TEST(IPAddressTest, TestToSensitiveString) {
+ IPAddress addr_v4 = IPAddress(kIPv4PublicAddr);
+ EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
+ EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToSensitiveString());
+ IPAddress::set_strip_sensitive(true);
+ EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
+ EXPECT_EQ(kIPv4PublicAddrAnonymizedString, addr_v4.ToSensitiveString());
+ IPAddress::set_strip_sensitive(false);
+
+ IPAddress addr_v6 = IPAddress(kIPv6PublicAddr);
+ EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToString());
+ EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToSensitiveString());
+ IPAddress::set_strip_sensitive(true);
+ EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToString());
+ EXPECT_EQ(kIPv6PublicAddrAnonymizedString, addr_v6.ToSensitiveString());
+ IPAddress::set_strip_sensitive(false);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/json.cc b/chromium/third_party/webrtc/base/json.cc
new file mode 100644
index 00000000000..49a051c019c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/json.cc
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/json.h"
+
+#include <errno.h>
+#include <limits.h>
+#include <stdlib.h>
+
+#include <sstream>
+
+bool GetStringFromJson(const Json::Value& in, std::string* out) {
+ if (!in.isString()) {
+ std::ostringstream s;
+ if (in.isBool()) {
+ s << std::boolalpha << in.asBool();
+ } else if (in.isInt()) {
+ s << in.asInt();
+ } else if (in.isUInt()) {
+ s << in.asUInt();
+ } else if (in.isDouble()) {
+ s << in.asDouble();
+ } else {
+ return false;
+ }
+ *out = s.str();
+ } else {
+ *out = in.asString();
+ }
+ return true;
+}
+
+bool GetIntFromJson(const Json::Value& in, int* out) {
+ bool ret;
+ if (!in.isString()) {
+ ret = in.isConvertibleTo(Json::intValue);
+ if (ret) {
+ *out = in.asInt();
+ }
+ } else {
+ long val; // NOLINT
+ const char* c_str = in.asCString();
+ char* end_ptr;
+ errno = 0;
+ val = strtol(c_str, &end_ptr, 10); // NOLINT
+ ret = (end_ptr != c_str && *end_ptr == '\0' && !errno &&
+ val >= INT_MIN && val <= INT_MAX);
+ *out = val;
+ }
+ return ret;
+}
+
+bool GetUIntFromJson(const Json::Value& in, unsigned int* out) {
+ bool ret;
+ if (!in.isString()) {
+ ret = in.isConvertibleTo(Json::uintValue);
+ if (ret) {
+ *out = in.asUInt();
+ }
+ } else {
+ unsigned long val; // NOLINT
+ const char* c_str = in.asCString();
+ char* end_ptr;
+ errno = 0;
+ val = strtoul(c_str, &end_ptr, 10); // NOLINT
+ ret = (end_ptr != c_str && *end_ptr == '\0' && !errno &&
+ val <= UINT_MAX);
+ *out = val;
+ }
+ return ret;
+}
+
+bool GetBoolFromJson(const Json::Value& in, bool* out) {
+ bool ret;
+ if (!in.isString()) {
+ ret = in.isConvertibleTo(Json::booleanValue);
+ if (ret) {
+ *out = in.asBool();
+ }
+ } else {
+ if (in.asString() == "true") {
+ *out = true;
+ ret = true;
+ } else if (in.asString() == "false") {
+ *out = false;
+ ret = true;
+ } else {
+ ret = false;
+ }
+ }
+ return ret;
+}
+
+bool GetDoubleFromJson(const Json::Value& in, double* out) {
+ bool ret;
+ if (!in.isString()) {
+ ret = in.isConvertibleTo(Json::realValue);
+ if (ret) {
+ *out = in.asDouble();
+ }
+ } else {
+ double val;
+ const char* c_str = in.asCString();
+ char* end_ptr;
+ errno = 0;
+ val = strtod(c_str, &end_ptr);
+ ret = (end_ptr != c_str && *end_ptr == '\0' && !errno);
+ *out = val;
+ }
+ return ret;
+}
+
+namespace {
+template<typename T>
+bool JsonArrayToVector(const Json::Value& value,
+ bool (*getter)(const Json::Value& in, T* out),
+ std::vector<T> *vec) {
+ vec->clear();
+ if (!value.isArray()) {
+ return false;
+ }
+
+ for (Json::Value::ArrayIndex i = 0; i < value.size(); ++i) {
+ T val;
+ if (!getter(value[i], &val)) {
+ return false;
+ }
+ vec->push_back(val);
+ }
+
+ return true;
+}
+// Trivial getter helper
+bool GetValueFromJson(const Json::Value& in, Json::Value* out) {
+ *out = in;
+ return true;
+}
+} // unnamed namespace
+
+bool JsonArrayToValueVector(const Json::Value& in,
+ std::vector<Json::Value>* out) {
+ return JsonArrayToVector(in, GetValueFromJson, out);
+}
+
+bool JsonArrayToIntVector(const Json::Value& in,
+ std::vector<int>* out) {
+ return JsonArrayToVector(in, GetIntFromJson, out);
+}
+
+bool JsonArrayToUIntVector(const Json::Value& in,
+ std::vector<unsigned int>* out) {
+ return JsonArrayToVector(in, GetUIntFromJson, out);
+}
+
+bool JsonArrayToStringVector(const Json::Value& in,
+ std::vector<std::string>* out) {
+ return JsonArrayToVector(in, GetStringFromJson, out);
+}
+
+bool JsonArrayToBoolVector(const Json::Value& in,
+ std::vector<bool>* out) {
+ return JsonArrayToVector(in, GetBoolFromJson, out);
+}
+
+bool JsonArrayToDoubleVector(const Json::Value& in,
+ std::vector<double>* out) {
+ return JsonArrayToVector(in, GetDoubleFromJson, out);
+}
+
+namespace {
+template<typename T>
+Json::Value VectorToJsonArray(const std::vector<T>& vec) {
+ Json::Value result(Json::arrayValue);
+ for (size_t i = 0; i < vec.size(); ++i) {
+ result.append(Json::Value(vec[i]));
+ }
+ return result;
+}
+} // unnamed namespace
+
+Json::Value ValueVectorToJsonArray(const std::vector<Json::Value>& in) {
+ return VectorToJsonArray(in);
+}
+
+Json::Value IntVectorToJsonArray(const std::vector<int>& in) {
+ return VectorToJsonArray(in);
+}
+
+Json::Value UIntVectorToJsonArray(const std::vector<unsigned int>& in) {
+ return VectorToJsonArray(in);
+}
+
+Json::Value StringVectorToJsonArray(const std::vector<std::string>& in) {
+ return VectorToJsonArray(in);
+}
+
+Json::Value BoolVectorToJsonArray(const std::vector<bool>& in) {
+ return VectorToJsonArray(in);
+}
+
+Json::Value DoubleVectorToJsonArray(const std::vector<double>& in) {
+ return VectorToJsonArray(in);
+}
+
+bool GetValueFromJsonArray(const Json::Value& in, size_t n,
+ Json::Value* out) {
+ if (!in.isArray() || !in.isValidIndex(static_cast<int>(n))) {
+ return false;
+ }
+
+ *out = in[static_cast<Json::Value::ArrayIndex>(n)];
+ return true;
+}
+
+bool GetIntFromJsonArray(const Json::Value& in, size_t n,
+ int* out) {
+ Json::Value x;
+ return GetValueFromJsonArray(in, n, &x) && GetIntFromJson(x, out);
+}
+
+bool GetUIntFromJsonArray(const Json::Value& in, size_t n,
+ unsigned int* out) {
+ Json::Value x;
+ return GetValueFromJsonArray(in, n, &x) && GetUIntFromJson(x, out);
+}
+
+bool GetStringFromJsonArray(const Json::Value& in, size_t n,
+ std::string* out) {
+ Json::Value x;
+ return GetValueFromJsonArray(in, n, &x) && GetStringFromJson(x, out);
+}
+
+bool GetBoolFromJsonArray(const Json::Value& in, size_t n,
+ bool* out) {
+ Json::Value x;
+ return GetValueFromJsonArray(in, n, &x) && GetBoolFromJson(x, out);
+}
+
+bool GetDoubleFromJsonArray(const Json::Value& in, size_t n,
+ double* out) {
+ Json::Value x;
+ return GetValueFromJsonArray(in, n, &x) && GetDoubleFromJson(x, out);
+}
+
+bool GetValueFromJsonObject(const Json::Value& in, const std::string& k,
+ Json::Value* out) {
+ if (!in.isObject() || !in.isMember(k)) {
+ return false;
+ }
+
+ *out = in[k];
+ return true;
+}
+
+bool GetIntFromJsonObject(const Json::Value& in, const std::string& k,
+ int* out) {
+ Json::Value x;
+ return GetValueFromJsonObject(in, k, &x) && GetIntFromJson(x, out);
+}
+
+bool GetUIntFromJsonObject(const Json::Value& in, const std::string& k,
+ unsigned int* out) {
+ Json::Value x;
+ return GetValueFromJsonObject(in, k, &x) && GetUIntFromJson(x, out);
+}
+
+bool GetStringFromJsonObject(const Json::Value& in, const std::string& k,
+ std::string* out) {
+ Json::Value x;
+ return GetValueFromJsonObject(in, k, &x) && GetStringFromJson(x, out);
+}
+
+bool GetBoolFromJsonObject(const Json::Value& in, const std::string& k,
+ bool* out) {
+ Json::Value x;
+ return GetValueFromJsonObject(in, k, &x) && GetBoolFromJson(x, out);
+}
+
+bool GetDoubleFromJsonObject(const Json::Value& in, const std::string& k,
+ double* out) {
+ Json::Value x;
+ return GetValueFromJsonObject(in, k, &x) && GetDoubleFromJson(x, out);
+}
+
+std::string JsonValueToString(const Json::Value& json) {
+ Json::FastWriter w;
+ std::string value = w.write(json);
+ return value.substr(0, value.size() - 1); // trim trailing newline
+}
diff --git a/chromium/third_party/webrtc/base/json.h b/chromium/third_party/webrtc/base/json.h
new file mode 100644
index 00000000000..9d45ded97d0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/json.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_JSON_H_
+#define WEBRTC_BASE_JSON_H_
+
+#include <string>
+#include <vector>
+
+#if !defined(WEBRTC_EXTERNAL_JSON)
+#include "json/json.h"
+#else
+#include "third_party/jsoncpp/json.h"
+#endif
+
+// TODO: Move to rtc namespace
+
+///////////////////////////////////////////////////////////////////////////////
+// JSON Helpers
+///////////////////////////////////////////////////////////////////////////////
+
+// Robust conversion operators, better than the ones in JsonCpp.
+bool GetIntFromJson(const Json::Value& in, int* out);
+bool GetUIntFromJson(const Json::Value& in, unsigned int* out);
+bool GetStringFromJson(const Json::Value& in, std::string* out);
+bool GetBoolFromJson(const Json::Value& in, bool* out);
+bool GetDoubleFromJson(const Json::Value& in, double* out);
+
+// Pull values out of a JSON array.
+bool GetValueFromJsonArray(const Json::Value& in, size_t n,
+ Json::Value* out);
+bool GetIntFromJsonArray(const Json::Value& in, size_t n,
+ int* out);
+bool GetUIntFromJsonArray(const Json::Value& in, size_t n,
+ unsigned int* out);
+bool GetStringFromJsonArray(const Json::Value& in, size_t n,
+ std::string* out);
+bool GetBoolFromJsonArray(const Json::Value& in, size_t n,
+ bool* out);
+bool GetDoubleFromJsonArray(const Json::Value& in, size_t n,
+ double* out);
+
+// Convert json arrays to std::vector
+bool JsonArrayToValueVector(const Json::Value& in,
+ std::vector<Json::Value>* out);
+bool JsonArrayToIntVector(const Json::Value& in,
+ std::vector<int>* out);
+bool JsonArrayToUIntVector(const Json::Value& in,
+ std::vector<unsigned int>* out);
+bool JsonArrayToStringVector(const Json::Value& in,
+ std::vector<std::string>* out);
+bool JsonArrayToBoolVector(const Json::Value& in,
+ std::vector<bool>* out);
+bool JsonArrayToDoubleVector(const Json::Value& in,
+ std::vector<double>* out);
+
+// Convert std::vector to json array
+Json::Value ValueVectorToJsonArray(const std::vector<Json::Value>& in);
+Json::Value IntVectorToJsonArray(const std::vector<int>& in);
+Json::Value UIntVectorToJsonArray(const std::vector<unsigned int>& in);
+Json::Value StringVectorToJsonArray(const std::vector<std::string>& in);
+Json::Value BoolVectorToJsonArray(const std::vector<bool>& in);
+Json::Value DoubleVectorToJsonArray(const std::vector<double>& in);
+
+// Pull values out of a JSON object.
+bool GetValueFromJsonObject(const Json::Value& in, const std::string& k,
+ Json::Value* out);
+bool GetIntFromJsonObject(const Json::Value& in, const std::string& k,
+ int* out);
+bool GetUIntFromJsonObject(const Json::Value& in, const std::string& k,
+ unsigned int* out);
+bool GetStringFromJsonObject(const Json::Value& in, const std::string& k,
+ std::string* out);
+bool GetBoolFromJsonObject(const Json::Value& in, const std::string& k,
+ bool* out);
+bool GetDoubleFromJsonObject(const Json::Value& in, const std::string& k,
+ double* out);
+
+// Writes out a Json value as a string.
+std::string JsonValueToString(const Json::Value& json);
+
+#endif // WEBRTC_BASE_JSON_H_
diff --git a/chromium/third_party/webrtc/base/json_unittest.cc b/chromium/third_party/webrtc/base/json_unittest.cc
new file mode 100644
index 00000000000..e7e58227b1f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/json_unittest.cc
@@ -0,0 +1,277 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/json.h"
+
+static Json::Value in_s("foo");
+static Json::Value in_sn("99");
+static Json::Value in_si("-99");
+static Json::Value in_sb("true");
+static Json::Value in_sd("1.2");
+static Json::Value in_n(12);
+static Json::Value in_i(-12);
+static Json::Value in_u(34U);
+static Json::Value in_b(true);
+static Json::Value in_d(1.2);
+static Json::Value big_sn("12345678901234567890");
+static Json::Value big_si("-12345678901234567890");
+static Json::Value big_u(0xFFFFFFFF);
+static Json::Value bad_a(Json::arrayValue);
+static Json::Value bad_o(Json::objectValue);
+
+TEST(JsonTest, GetString) {
+ std::string out;
+ EXPECT_TRUE(GetStringFromJson(in_s, &out));
+ EXPECT_EQ("foo", out);
+ EXPECT_TRUE(GetStringFromJson(in_sn, &out));
+ EXPECT_EQ("99", out);
+ EXPECT_TRUE(GetStringFromJson(in_si, &out));
+ EXPECT_EQ("-99", out);
+ EXPECT_TRUE(GetStringFromJson(in_i, &out));
+ EXPECT_EQ("-12", out);
+ EXPECT_TRUE(GetStringFromJson(in_n, &out));
+ EXPECT_EQ("12", out);
+ EXPECT_TRUE(GetStringFromJson(in_u, &out));
+ EXPECT_EQ("34", out);
+ EXPECT_TRUE(GetStringFromJson(in_b, &out));
+ EXPECT_EQ("true", out);
+ // Not supported here yet.
+ EXPECT_FALSE(GetStringFromJson(bad_a, &out));
+ EXPECT_FALSE(GetStringFromJson(bad_o, &out));
+}
+
+TEST(JsonTest, GetInt) {
+ int out;
+ EXPECT_TRUE(GetIntFromJson(in_sn, &out));
+ EXPECT_EQ(99, out);
+ EXPECT_TRUE(GetIntFromJson(in_si, &out));
+ EXPECT_EQ(-99, out);
+ EXPECT_TRUE(GetIntFromJson(in_n, &out));
+ EXPECT_EQ(12, out);
+ EXPECT_TRUE(GetIntFromJson(in_i, &out));
+ EXPECT_EQ(-12, out);
+ EXPECT_TRUE(GetIntFromJson(in_u, &out));
+ EXPECT_EQ(34, out);
+ EXPECT_TRUE(GetIntFromJson(in_b, &out));
+ EXPECT_EQ(1, out);
+ EXPECT_FALSE(GetIntFromJson(in_s, &out));
+ EXPECT_FALSE(GetIntFromJson(big_sn, &out));
+ EXPECT_FALSE(GetIntFromJson(big_si, &out));
+ EXPECT_FALSE(GetIntFromJson(big_u, &out));
+ EXPECT_FALSE(GetIntFromJson(bad_a, &out));
+ EXPECT_FALSE(GetIntFromJson(bad_o, &out));
+}
+
+TEST(JsonTest, GetUInt) {
+ unsigned int out;
+ EXPECT_TRUE(GetUIntFromJson(in_sn, &out));
+ EXPECT_EQ(99U, out);
+ EXPECT_TRUE(GetUIntFromJson(in_n, &out));
+ EXPECT_EQ(12U, out);
+ EXPECT_TRUE(GetUIntFromJson(in_u, &out));
+ EXPECT_EQ(34U, out);
+ EXPECT_TRUE(GetUIntFromJson(in_b, &out));
+ EXPECT_EQ(1U, out);
+ EXPECT_TRUE(GetUIntFromJson(big_u, &out));
+ EXPECT_EQ(0xFFFFFFFFU, out);
+ EXPECT_FALSE(GetUIntFromJson(in_s, &out));
+ // TODO: Fail reading negative strings.
+ // EXPECT_FALSE(GetUIntFromJson(in_si, &out));
+ EXPECT_FALSE(GetUIntFromJson(in_i, &out));
+ EXPECT_FALSE(GetUIntFromJson(big_sn, &out));
+ EXPECT_FALSE(GetUIntFromJson(big_si, &out));
+ EXPECT_FALSE(GetUIntFromJson(bad_a, &out));
+ EXPECT_FALSE(GetUIntFromJson(bad_o, &out));
+}
+
+TEST(JsonTest, GetBool) {
+ bool out;
+ EXPECT_TRUE(GetBoolFromJson(in_sb, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_TRUE(GetBoolFromJson(in_n, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_TRUE(GetBoolFromJson(in_i, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_TRUE(GetBoolFromJson(in_u, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_TRUE(GetBoolFromJson(in_b, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_TRUE(GetBoolFromJson(big_u, &out));
+ EXPECT_EQ(true, out);
+ EXPECT_FALSE(GetBoolFromJson(in_s, &out));
+ EXPECT_FALSE(GetBoolFromJson(in_sn, &out));
+ EXPECT_FALSE(GetBoolFromJson(in_si, &out));
+ EXPECT_FALSE(GetBoolFromJson(big_sn, &out));
+ EXPECT_FALSE(GetBoolFromJson(big_si, &out));
+ EXPECT_FALSE(GetBoolFromJson(bad_a, &out));
+ EXPECT_FALSE(GetBoolFromJson(bad_o, &out));
+}
+
+TEST(JsonTest, GetDouble) {
+ double out;
+ EXPECT_TRUE(GetDoubleFromJson(in_sn, &out));
+ EXPECT_EQ(99, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_si, &out));
+ EXPECT_EQ(-99, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_sd, &out));
+ EXPECT_EQ(1.2, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_n, &out));
+ EXPECT_EQ(12, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_i, &out));
+ EXPECT_EQ(-12, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_u, &out));
+ EXPECT_EQ(34, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_b, &out));
+ EXPECT_EQ(1, out);
+ EXPECT_TRUE(GetDoubleFromJson(in_d, &out));
+ EXPECT_EQ(1.2, out);
+ EXPECT_FALSE(GetDoubleFromJson(in_s, &out));
+}
+
+TEST(JsonTest, GetFromArray) {
+ Json::Value a, out;
+ a.append(in_s);
+ a.append(in_i);
+ a.append(in_u);
+ a.append(in_b);
+ EXPECT_TRUE(GetValueFromJsonArray(a, 0, &out));
+ EXPECT_TRUE(GetValueFromJsonArray(a, 3, &out));
+ EXPECT_FALSE(GetValueFromJsonArray(a, 99, &out));
+ EXPECT_FALSE(GetValueFromJsonArray(a, 0xFFFFFFFF, &out));
+}
+
+TEST(JsonTest, GetFromObject) {
+ Json::Value o, out;
+ o["string"] = in_s;
+ o["int"] = in_i;
+ o["uint"] = in_u;
+ o["bool"] = in_b;
+ EXPECT_TRUE(GetValueFromJsonObject(o, "int", &out));
+ EXPECT_TRUE(GetValueFromJsonObject(o, "bool", &out));
+ EXPECT_FALSE(GetValueFromJsonObject(o, "foo", &out));
+ EXPECT_FALSE(GetValueFromJsonObject(o, "", &out));
+}
+
+namespace {
+template <typename T>
+std::vector<T> VecOf3(const T& a, const T& b, const T& c) {
+ std::vector<T> in;
+ in.push_back(a);
+ in.push_back(b);
+ in.push_back(c);
+ return in;
+}
+template <typename T>
+Json::Value JsonVecOf3(const T& a, const T& b, const T& c) {
+ Json::Value in(Json::arrayValue);
+ in.append(a);
+ in.append(b);
+ in.append(c);
+ return in;
+}
+} // unnamed namespace
+
+TEST(JsonTest, ValueVectorToFromArray) {
+ std::vector<Json::Value> in = VecOf3<Json::Value>("a", "b", "c");
+ Json::Value out = ValueVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i].asString(), out[i].asString());
+ }
+ Json::Value inj = JsonVecOf3<Json::Value>("a", "b", "c");
+ EXPECT_EQ(inj, out);
+ std::vector<Json::Value> outj;
+ EXPECT_TRUE(JsonArrayToValueVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
+
+TEST(JsonTest, IntVectorToFromArray) {
+ std::vector<int> in = VecOf3<int>(1, 2, 3);
+ Json::Value out = IntVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i], out[i].asInt());
+ }
+ Json::Value inj = JsonVecOf3<int>(1, 2, 3);
+ EXPECT_EQ(inj, out);
+ std::vector<int> outj;
+ EXPECT_TRUE(JsonArrayToIntVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
+
+TEST(JsonTest, UIntVectorToFromArray) {
+ std::vector<unsigned int> in = VecOf3<unsigned int>(1, 2, 3);
+ Json::Value out = UIntVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i], out[i].asUInt());
+ }
+ Json::Value inj = JsonVecOf3<unsigned int>(1, 2, 3);
+ EXPECT_EQ(inj, out);
+ std::vector<unsigned int> outj;
+ EXPECT_TRUE(JsonArrayToUIntVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
+
+TEST(JsonTest, StringVectorToFromArray) {
+ std::vector<std::string> in = VecOf3<std::string>("a", "b", "c");
+ Json::Value out = StringVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i], out[i].asString());
+ }
+ Json::Value inj = JsonVecOf3<std::string>("a", "b", "c");
+ EXPECT_EQ(inj, out);
+ std::vector<std::string> outj;
+ EXPECT_TRUE(JsonArrayToStringVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
+
+TEST(JsonTest, BoolVectorToFromArray) {
+ std::vector<bool> in = VecOf3<bool>(false, true, false);
+ Json::Value out = BoolVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i], out[i].asBool());
+ }
+ Json::Value inj = JsonVecOf3<bool>(false, true, false);
+ EXPECT_EQ(inj, out);
+ std::vector<bool> outj;
+ EXPECT_TRUE(JsonArrayToBoolVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
+
+TEST(JsonTest, DoubleVectorToFromArray) {
+ std::vector<double> in = VecOf3<double>(1.0, 2.0, 3.0);
+ Json::Value out = DoubleVectorToJsonArray(in);
+ EXPECT_EQ(in.size(), out.size());
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); ++i) {
+ EXPECT_EQ(in[i], out[i].asDouble());
+ }
+ Json::Value inj = JsonVecOf3<double>(1.0, 2.0, 3.0);
+ EXPECT_EQ(inj, out);
+ std::vector<double> outj;
+ EXPECT_TRUE(JsonArrayToDoubleVector(inj, &outj));
+ for (Json::Value::ArrayIndex i = 0; i < in.size(); i++) {
+ EXPECT_EQ(in[i], outj[i]);
+ }
+}
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable.cc b/chromium/third_party/webrtc/base/latebindingsymboltable.cc
new file mode 100644
index 00000000000..1896bd0f9b9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable.cc
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/latebindingsymboltable.h"
+
+#if defined(WEBRTC_POSIX)
+#include <dlfcn.h>
+#endif
+
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+#if defined(WEBRTC_POSIX)
+static const DllHandle kInvalidDllHandle = NULL;
+#else
+#error Not implemented
+#endif
+
+static const char *GetDllError() {
+#if defined(WEBRTC_POSIX)
+ const char *err = dlerror();
+ if (err) {
+ return err;
+ } else {
+ return "No error";
+ }
+#else
+#error Not implemented
+#endif
+}
+
+static bool LoadSymbol(DllHandle handle,
+ const char *symbol_name,
+ void **symbol) {
+#if defined(WEBRTC_POSIX)
+ *symbol = dlsym(handle, symbol_name);
+ const char *err = dlerror();
+ if (err) {
+ LOG(LS_ERROR) << "Error loading symbol " << symbol_name << ": " << err;
+ return false;
+ } else if (!*symbol) {
+ // ELF allows for symbols to be NULL, but that should never happen for our
+ // usage.
+ LOG(LS_ERROR) << "Symbol " << symbol_name << " is NULL";
+ return false;
+ }
+ return true;
+#else
+#error Not implemented
+#endif
+}
+
+LateBindingSymbolTable::LateBindingSymbolTable(const TableInfo *info,
+ void **table)
+ : info_(info),
+ table_(table),
+ handle_(kInvalidDllHandle),
+ undefined_symbols_(false) {
+ ClearSymbols();
+}
+
+LateBindingSymbolTable::~LateBindingSymbolTable() {
+ Unload();
+}
+
+bool LateBindingSymbolTable::IsLoaded() const {
+ return handle_ != kInvalidDllHandle;
+}
+
+bool LateBindingSymbolTable::Load() {
+ ASSERT(info_->dll_name != NULL);
+ return LoadFromPath(info_->dll_name);
+}
+
+bool LateBindingSymbolTable::LoadFromPath(const char *dll_path) {
+ if (IsLoaded()) {
+ return true;
+ }
+ if (undefined_symbols_) {
+ // We do not attempt to load again because repeated attempts are not
+ // likely to succeed and DLL loading is costly.
+ LOG(LS_ERROR) << "We know there are undefined symbols";
+ return false;
+ }
+
+#if defined(WEBRTC_POSIX)
+ handle_ = dlopen(dll_path,
+ // RTLD_NOW front-loads symbol resolution so that errors are
+ // caught early instead of causing a process abort later.
+ // RTLD_LOCAL prevents other modules from automatically
+ // seeing symbol definitions in the newly-loaded tree. This
+ // is necessary for same-named symbols in different ABI
+ // versions of the same library to not explode.
+ RTLD_NOW|RTLD_LOCAL
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ // RTLD_DEEPBIND makes symbol dependencies in the
+ // newly-loaded tree prefer to resolve to definitions within
+ // that tree (the default on OS X). This is necessary for
+ // same-named symbols in different ABI versions of the same
+ // library to not explode.
+ |RTLD_DEEPBIND
+#endif
+ ); // NOLINT
+#else
+#error Not implemented
+#endif
+
+ if (handle_ == kInvalidDllHandle) {
+ LOG(LS_WARNING) << "Can't load " << dll_path << ": "
+ << GetDllError();
+ return false;
+ }
+#if defined(WEBRTC_POSIX)
+ // Clear any old errors.
+ dlerror();
+#endif
+ for (int i = 0; i < info_->num_symbols; ++i) {
+ if (!LoadSymbol(handle_, info_->symbol_names[i], &table_[i])) {
+ undefined_symbols_ = true;
+ Unload();
+ return false;
+ }
+ }
+ return true;
+}
+
+void LateBindingSymbolTable::Unload() {
+ if (!IsLoaded()) {
+ return;
+ }
+
+#if defined(WEBRTC_POSIX)
+ if (dlclose(handle_) != 0) {
+ LOG(LS_ERROR) << GetDllError();
+ }
+#else
+#error Not implemented
+#endif
+
+ handle_ = kInvalidDllHandle;
+ ClearSymbols();
+}
+
+void LateBindingSymbolTable::ClearSymbols() {
+ memset(table_, 0, sizeof(void *) * info_->num_symbols);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable.cc.def b/chromium/third_party/webrtc/base/latebindingsymboltable.cc.def
new file mode 100644
index 00000000000..6ddb2ae629f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable.cc.def
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is a supermacro
+// (see http://wanderinghorse.net/computing/papers/supermacros_cpp.html) to
+// expand a definition of a late-binding symbol table class.
+//
+// Arguments:
+// LATE_BINDING_SYMBOL_TABLE_CLASS_NAME: Name of the class to generate.
+// LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST: List of symbols to load from the DLL,
+// as an X-Macro list (see http://www.drdobbs.com/blogs/cpp/228700289).
+// LATE_BINDING_SYMBOL_TABLE_DLL_NAME: String literal for the DLL file name to
+// load.
+//
+// From a .cc file, include the header file containing your call to the .h.def
+// supermacro, and then call this supermacro (optionally from inside the
+// namespace for the class to generate, if any). Example:
+//
+// #include "myclassname.h"
+//
+// namespace foo {
+//
+// #define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME MY_CLASS_NAME
+// #define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST MY_SYMBOLS_LIST
+// #define LATE_BINDING_SYMBOL_TABLE_DLL_NAME "libdll.so.n"
+// #include "webrtc/base/latebindingsymboltable.cc.def"
+//
+// }
+
+#ifndef LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#error You must define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#endif
+
+#ifndef LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#error You must define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#endif
+
+#ifndef LATE_BINDING_SYMBOL_TABLE_DLL_NAME
+#error You must define LATE_BINDING_SYMBOL_TABLE_DLL_NAME
+#endif
+
+#define X(sym) #sym,
+const char* const LATE_BINDING_SYMBOL_TABLE_CLASS_NAME::kSymbolNames[] = {
+ LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+};
+#undef X
+
+const ::rtc::LateBindingSymbolTable::TableInfo
+ LATE_BINDING_SYMBOL_TABLE_CLASS_NAME::kTableInfo = {
+ LATE_BINDING_SYMBOL_TABLE_DLL_NAME,
+ SYMBOL_TABLE_SIZE,
+ LATE_BINDING_SYMBOL_TABLE_CLASS_NAME::kSymbolNames
+};
+
+LATE_BINDING_SYMBOL_TABLE_CLASS_NAME::LATE_BINDING_SYMBOL_TABLE_CLASS_NAME()
+ : ::rtc::LateBindingSymbolTable(&kTableInfo, table_) {}
+
+LATE_BINDING_SYMBOL_TABLE_CLASS_NAME::~LATE_BINDING_SYMBOL_TABLE_CLASS_NAME() {}
+
+#undef LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#undef LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#undef LATE_BINDING_SYMBOL_TABLE_DLL_NAME
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable.h b/chromium/third_party/webrtc/base/latebindingsymboltable.h
new file mode 100644
index 00000000000..c1f535cd2b0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_LATEBINDINGSYMBOLTABLE_H_
+#define WEBRTC_BASE_LATEBINDINGSYMBOLTABLE_H_
+
+#include <string.h>
+
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+#if defined(WEBRTC_POSIX)
+typedef void *DllHandle;
+#else
+#error Not implemented for this platform
+#endif
+
+// This is the base class for "symbol table" classes to simplify the dynamic
+// loading of symbols from DLLs. Currently the implementation only supports
+// Linux and OS X, and pure C symbols (or extern "C" symbols that wrap C++
+// functions). Sub-classes for specific DLLs are generated via the "supermacro"
+// files latebindingsymboltable.h.def and latebindingsymboltable.cc.def. See
+// talk/sound/pulseaudiosymboltable.(h|cc) for an example.
+class LateBindingSymbolTable {
+ public:
+ struct TableInfo {
+ const char *dll_name;
+ int num_symbols;
+ // Array of size num_symbols.
+ const char *const *symbol_names;
+ };
+
+ LateBindingSymbolTable(const TableInfo *info, void **table);
+ ~LateBindingSymbolTable();
+
+ bool IsLoaded() const;
+ // Loads the DLL and the symbol table. Returns true iff the DLL and symbol
+ // table loaded successfully.
+ bool Load();
+ // Like load, but allows overriding the dll path for when the dll path is
+ // dynamic.
+ bool LoadFromPath(const char *dll_path);
+ void Unload();
+
+ // Gets the raw OS handle to the DLL. Be careful what you do with it.
+ DllHandle GetDllHandle() const { return handle_; }
+
+ private:
+ void ClearSymbols();
+
+ const TableInfo *info_;
+ void **table_;
+ DllHandle handle_;
+ bool undefined_symbols_;
+
+ DISALLOW_COPY_AND_ASSIGN(LateBindingSymbolTable);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_LATEBINDINGSYMBOLTABLE_H_
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable.h.def b/chromium/third_party/webrtc/base/latebindingsymboltable.h.def
new file mode 100644
index 00000000000..39b515fbd1d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable.h.def
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is a supermacro
+// (see http://wanderinghorse.net/computing/papers/supermacros_cpp.html) to
+// expand a declaration of a late-binding symbol table class.
+//
+// Arguments:
+// LATE_BINDING_SYMBOL_TABLE_CLASS_NAME: Name of the class to generate.
+// LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST: List of symbols to load from the DLL,
+// as an X-Macro list (see http://www.drdobbs.com/blogs/cpp/228700289).
+//
+// From a .h file, include the header(s) for the DLL to late-bind and the
+// latebindingsymboltable.h header, and then call this supermacro (optionally
+// from inside the namespace for the class to generate, if any). Example:
+//
+// #include <headerfordll.h>
+//
+// #include "webrtc/base/latebindingsymboltable.h"
+//
+// namespace foo {
+//
+// #define MY_CLASS_NAME DesiredClassName
+// #define MY_SYMBOLS_LIST X(acos) X(sin) X(tan)
+//
+// #define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME MY_CLASS_NAME
+// #define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST MY_SYMBOLS_LIST
+// #include "webrtc/base/latebindingsymboltable.h.def"
+//
+// }
+
+#ifndef WEBRTC_BASE_LATEBINDINGSYMBOLTABLE_H_
+#error You must first include latebindingsymboltable.h
+#endif
+
+#ifndef LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#error You must define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#endif
+
+#ifndef LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#error You must define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#endif
+
+class LATE_BINDING_SYMBOL_TABLE_CLASS_NAME :
+ public ::rtc::LateBindingSymbolTable {
+ public:
+ LATE_BINDING_SYMBOL_TABLE_CLASS_NAME();
+ ~LATE_BINDING_SYMBOL_TABLE_CLASS_NAME();
+
+#define X(sym) \
+ typeof(&::sym) sym() const { \
+ ASSERT(::rtc::LateBindingSymbolTable::IsLoaded()); \
+ return reinterpret_cast<typeof(&::sym)>(table_[SYMBOL_TABLE_INDEX_##sym]); \
+ }
+LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#undef X
+
+ private:
+ enum {
+#define X(sym) \
+ SYMBOL_TABLE_INDEX_##sym,
+LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
+#undef X
+ SYMBOL_TABLE_SIZE
+ };
+
+ static const ::rtc::LateBindingSymbolTable::TableInfo kTableInfo;
+ static const char *const kSymbolNames[];
+
+ void *table_[SYMBOL_TABLE_SIZE];
+
+ DISALLOW_COPY_AND_ASSIGN(LATE_BINDING_SYMBOL_TABLE_CLASS_NAME);
+};
+
+#undef LATE_BINDING_SYMBOL_TABLE_CLASS_NAME
+#undef LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable_unittest.cc b/chromium/third_party/webrtc/base/latebindingsymboltable_unittest.cc
new file mode 100644
index 00000000000..30ebd17cba2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable_unittest.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#include <math.h>
+#endif
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/latebindingsymboltable.h"
+
+namespace rtc {
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+
+#define LIBM_SYMBOLS_CLASS_NAME LibmTestSymbolTable
+#define LIBM_SYMBOLS_LIST \
+ X(acos) \
+ X(sin) \
+ X(tan)
+
+#define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME LIBM_SYMBOLS_CLASS_NAME
+#define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST LIBM_SYMBOLS_LIST
+#include "webrtc/base/latebindingsymboltable.h.def"
+
+#define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME LIBM_SYMBOLS_CLASS_NAME
+#define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST LIBM_SYMBOLS_LIST
+#define LATE_BINDING_SYMBOL_TABLE_DLL_NAME "libm.so.6"
+#include "webrtc/base/latebindingsymboltable.cc.def"
+
+TEST(LateBindingSymbolTable, libm) {
+ LibmTestSymbolTable table;
+ EXPECT_FALSE(table.IsLoaded());
+ ASSERT_TRUE(table.Load());
+ EXPECT_TRUE(table.IsLoaded());
+ EXPECT_EQ(table.acos()(0.5), acos(0.5));
+ EXPECT_EQ(table.sin()(0.5), sin(0.5));
+ EXPECT_EQ(table.tan()(0.5), tan(0.5));
+ // It would be nice to check that the addresses are the same, but the nature
+ // of dynamic linking and relocation makes them actually be different.
+ table.Unload();
+ EXPECT_FALSE(table.IsLoaded());
+}
+
+#else
+#error Not implemented
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/libdbusglibsymboltable.cc b/chromium/third_party/webrtc/base/libdbusglibsymboltable.cc
new file mode 100644
index 00000000000..ad51064bc58
--- /dev/null
+++ b/chromium/third_party/webrtc/base/libdbusglibsymboltable.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_DBUS_GLIB
+
+#include "webrtc/base/libdbusglibsymboltable.h"
+
+namespace rtc {
+
+#define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME LIBDBUS_GLIB_CLASS_NAME
+#define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST LIBDBUS_GLIB_SYMBOLS_LIST
+#define LATE_BINDING_SYMBOL_TABLE_DLL_NAME "libdbus-glib-1.so.2"
+#include "webrtc/base/latebindingsymboltable.cc.def"
+
+} // namespace rtc
+
+#endif // HAVE_DBUS_GLIB
diff --git a/chromium/third_party/webrtc/base/libdbusglibsymboltable.h b/chromium/third_party/webrtc/base/libdbusglibsymboltable.h
new file mode 100644
index 00000000000..b87b4c17442
--- /dev/null
+++ b/chromium/third_party/webrtc/base/libdbusglibsymboltable.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_LIBDBUSGLIBSYMBOLTABLE_H_
+#define WEBRTC_BASE_LIBDBUSGLIBSYMBOLTABLE_H_
+
+#ifdef HAVE_DBUS_GLIB
+
+#include <dbus/dbus-glib.h>
+#include <dbus/dbus-glib-lowlevel.h>
+
+#include "webrtc/base/latebindingsymboltable.h"
+
+namespace rtc {
+
+#define LIBDBUS_GLIB_CLASS_NAME LibDBusGlibSymbolTable
+// The libdbus-glib symbols we need, as an X-Macro list.
+// This list must contain precisely every libdbus-glib function that is used in
+// dbus.cc.
+#define LIBDBUS_GLIB_SYMBOLS_LIST \
+ X(dbus_bus_add_match) \
+ X(dbus_connection_add_filter) \
+ X(dbus_connection_close) \
+ X(dbus_connection_remove_filter) \
+ X(dbus_connection_set_exit_on_disconnect) \
+ X(dbus_g_bus_get) \
+ X(dbus_g_bus_get_private) \
+ X(dbus_g_connection_get_connection) \
+ X(dbus_g_connection_unref) \
+ X(dbus_g_thread_init) \
+ X(dbus_message_get_interface) \
+ X(dbus_message_get_member) \
+ X(dbus_message_get_path) \
+ X(dbus_message_get_type) \
+ X(dbus_message_iter_get_arg_type) \
+ X(dbus_message_iter_get_basic) \
+ X(dbus_message_iter_init) \
+ X(dbus_message_ref) \
+ X(dbus_message_unref)
+
+#define LATE_BINDING_SYMBOL_TABLE_CLASS_NAME LIBDBUS_GLIB_CLASS_NAME
+#define LATE_BINDING_SYMBOL_TABLE_SYMBOLS_LIST LIBDBUS_GLIB_SYMBOLS_LIST
+#include "webrtc/base/latebindingsymboltable.h.def"
+
+} // namespace rtc
+
+#endif // HAVE_DBUS_GLIB
+
+#endif // WEBRTC_BASE_LIBDBUSGLIBSYMBOLTABLE_H_
diff --git a/chromium/third_party/webrtc/base/linked_ptr.h b/chromium/third_party/webrtc/base/linked_ptr.h
new file mode 100644
index 00000000000..65e5a00ecfa
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linked_ptr.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * linked_ptr - simple reference linked pointer
+ * (like reference counting, just using a linked list of the references
+ * instead of their count.)
+ *
+ * The implementation stores three pointers for every linked_ptr, but
+ * does not allocate anything on the free store.
+ */
+
+#ifndef WEBRTC_BASE_LINKED_PTR_H__
+#define WEBRTC_BASE_LINKED_PTR_H__
+
+namespace rtc {
+
+/* For ANSI-challenged compilers, you may want to #define
+ * NO_MEMBER_TEMPLATES, explicit or mutable */
+#define NO_MEMBER_TEMPLATES
+
+template <class X> class linked_ptr
+{
+public:
+
+#ifndef NO_MEMBER_TEMPLATES
+# define TEMPLATE_FUNCTION template <class Y>
+ TEMPLATE_FUNCTION friend class linked_ptr<Y>;
+#else
+# define TEMPLATE_FUNCTION
+ typedef X Y;
+#endif
+
+ typedef X element_type;
+
+ explicit linked_ptr(X* p = 0) throw()
+ : itsPtr(p) {itsPrev = itsNext = this;}
+ ~linked_ptr()
+ {release();}
+ linked_ptr(const linked_ptr& r) throw()
+ {acquire(r);}
+ linked_ptr& operator=(const linked_ptr& r)
+ {
+ if (this != &r) {
+ release();
+ acquire(r);
+ }
+ return *this;
+ }
+
+#ifndef NO_MEMBER_TEMPLATES
+ template <class Y> friend class linked_ptr<Y>;
+ template <class Y> linked_ptr(const linked_ptr<Y>& r) throw()
+ {acquire(r);}
+ template <class Y> linked_ptr& operator=(const linked_ptr<Y>& r)
+ {
+ if (this != &r) {
+ release();
+ acquire(r);
+ }
+ return *this;
+ }
+#endif // NO_MEMBER_TEMPLATES
+
+ X& operator*() const throw() {return *itsPtr;}
+ X* operator->() const throw() {return itsPtr;}
+ X* get() const throw() {return itsPtr;}
+ bool unique() const throw() {return itsPrev ? itsPrev==this : true;}
+
+private:
+ X* itsPtr;
+ mutable const linked_ptr* itsPrev;
+ mutable const linked_ptr* itsNext;
+
+ void acquire(const linked_ptr& r) throw()
+ { // insert this to the list
+ itsPtr = r.itsPtr;
+ itsNext = r.itsNext;
+ itsNext->itsPrev = this;
+ itsPrev = &r;
+#ifndef mutable
+ r.itsNext = this;
+#else // for ANSI-challenged compilers
+ (const_cast<linked_ptr<X>*>(&r))->itsNext = this;
+#endif
+ }
+
+#ifndef NO_MEMBER_TEMPLATES
+ template <class Y> void acquire(const linked_ptr<Y>& r) throw()
+ { // insert this to the list
+ itsPtr = r.itsPtr;
+ itsNext = r.itsNext;
+ itsNext->itsPrev = this;
+ itsPrev = &r;
+#ifndef mutable
+ r.itsNext = this;
+#else // for ANSI-challenged compilers
+ (const_cast<linked_ptr<X>*>(&r))->itsNext = this;
+#endif
+ }
+#endif // NO_MEMBER_TEMPLATES
+
+ void release()
+ { // erase this from the list, delete if unique
+ if (unique()) delete itsPtr;
+ else {
+ itsPrev->itsNext = itsNext;
+ itsNext->itsPrev = itsPrev;
+ itsPrev = itsNext = 0;
+ }
+ itsPtr = 0;
+ }
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_LINKED_PTR_H__
+
diff --git a/chromium/third_party/webrtc/base/linux.cc b/chromium/third_party/webrtc/base/linux.cc
new file mode 100644
index 00000000000..b958543532e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linux.cc
@@ -0,0 +1,348 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_LINUX)
+#include "webrtc/base/linux.h"
+
+#include <ctype.h>
+
+#include <errno.h>
+#include <sys/utsname.h>
+#include <sys/wait.h>
+
+#include <cstdio>
+#include <set>
+
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+static const char kCpuInfoFile[] = "/proc/cpuinfo";
+static const char kCpuMaxFreqFile[] =
+ "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq";
+
+ProcCpuInfo::ProcCpuInfo() {
+}
+
+ProcCpuInfo::~ProcCpuInfo() {
+}
+
+bool ProcCpuInfo::LoadFromSystem() {
+ ConfigParser procfs;
+ if (!procfs.Open(kCpuInfoFile)) {
+ return false;
+ }
+ return procfs.Parse(&sections_);
+};
+
+bool ProcCpuInfo::GetSectionCount(size_t* count) {
+ if (sections_.empty()) {
+ return false;
+ }
+ if (count) {
+ *count = sections_.size();
+ }
+ return true;
+}
+
+bool ProcCpuInfo::GetNumCpus(int* num) {
+ if (sections_.empty()) {
+ return false;
+ }
+ int total_cpus = 0;
+#if defined(__arm__)
+ // Count the number of blocks that have a "processor" key defined. On ARM,
+ // there may be extra blocks of information that aren't per-processor.
+ size_t section_count = sections_.size();
+ for (size_t i = 0; i < section_count; ++i) {
+ int processor_id;
+ if (GetSectionIntValue(i, "processor", &processor_id)) {
+ ++total_cpus;
+ }
+ }
+ // Single core ARM systems don't include "processor" keys at all, so return
+ // that we have a single core if we didn't find any explicitly above.
+ if (total_cpus == 0) {
+ total_cpus = 1;
+ }
+#else
+ // On X86, there is exactly one info section per processor.
+ total_cpus = static_cast<int>(sections_.size());
+#endif
+ if (num) {
+ *num = total_cpus;
+ }
+ return true;
+}
+
+bool ProcCpuInfo::GetNumPhysicalCpus(int* num) {
+ if (sections_.empty()) {
+ return false;
+ }
+ // TODO: /proc/cpuinfo only reports cores that are currently
+ // _online_, so this may underreport the number of physical cores.
+#if defined(__arm__)
+ // ARM (currently) has no hyperthreading, so just return the same value
+ // as GetNumCpus.
+ return GetNumCpus(num);
+#else
+ int total_cores = 0;
+ std::set<int> physical_ids;
+ size_t section_count = sections_.size();
+ for (size_t i = 0; i < section_count; ++i) {
+ int physical_id;
+ int cores;
+ // Count the cores for the physical id only if we have not counted the id.
+ if (GetSectionIntValue(i, "physical id", &physical_id) &&
+ GetSectionIntValue(i, "cpu cores", &cores) &&
+ physical_ids.find(physical_id) == physical_ids.end()) {
+ physical_ids.insert(physical_id);
+ total_cores += cores;
+ }
+ }
+
+ if (num) {
+ *num = total_cores;
+ }
+ return true;
+#endif
+}
+
+bool ProcCpuInfo::GetCpuFamily(int* id) {
+ int cpu_family = 0;
+
+#if defined(__arm__)
+ // On some ARM platforms, there is no 'cpu family' in '/proc/cpuinfo'. But
+ // there is 'CPU Architecture' which can be used as 'cpu family'.
+ // See http://en.wikipedia.org/wiki/ARM_architecture for a good list of
+ // ARM cpu families, architectures, and their mappings.
+ // There may be multiple sessions that aren't per-processor. We need to scan
+ // through each session until we find the first 'CPU architecture'.
+ size_t section_count = sections_.size();
+ for (size_t i = 0; i < section_count; ++i) {
+ if (GetSectionIntValue(i, "CPU architecture", &cpu_family)) {
+ // We returns the first one (if there are multiple entries).
+ break;
+ };
+ }
+#else
+ GetSectionIntValue(0, "cpu family", &cpu_family);
+#endif
+ if (id) {
+ *id = cpu_family;
+ }
+ return true;
+}
+
+bool ProcCpuInfo::GetSectionStringValue(size_t section_num,
+ const std::string& key,
+ std::string* result) {
+ if (section_num >= sections_.size()) {
+ return false;
+ }
+ ConfigParser::SimpleMap::iterator iter = sections_[section_num].find(key);
+ if (iter == sections_[section_num].end()) {
+ return false;
+ }
+ *result = iter->second;
+ return true;
+}
+
+bool ProcCpuInfo::GetSectionIntValue(size_t section_num,
+ const std::string& key,
+ int* result) {
+ if (section_num >= sections_.size()) {
+ return false;
+ }
+ ConfigParser::SimpleMap::iterator iter = sections_[section_num].find(key);
+ if (iter == sections_[section_num].end()) {
+ return false;
+ }
+ return FromString(iter->second, result);
+}
+
+ConfigParser::ConfigParser() {}
+
+ConfigParser::~ConfigParser() {}
+
+bool ConfigParser::Open(const std::string& filename) {
+ FileStream* fs = new FileStream();
+ if (!fs->Open(filename, "r", NULL)) {
+ return false;
+ }
+ instream_.reset(fs);
+ return true;
+}
+
+void ConfigParser::Attach(StreamInterface* stream) {
+ instream_.reset(stream);
+}
+
+bool ConfigParser::Parse(MapVector* key_val_pairs) {
+ // Parses the file and places the found key-value pairs into key_val_pairs.
+ SimpleMap section;
+ while (ParseSection(&section)) {
+ key_val_pairs->push_back(section);
+ section.clear();
+ }
+ return (!key_val_pairs->empty());
+}
+
+bool ConfigParser::ParseSection(SimpleMap* key_val_pair) {
+ // Parses the next section in the filestream and places the found key-value
+ // pairs into key_val_pair.
+ std::string key, value;
+ while (ParseLine(&key, &value)) {
+ (*key_val_pair)[key] = value;
+ }
+ return (!key_val_pair->empty());
+}
+
+bool ConfigParser::ParseLine(std::string* key, std::string* value) {
+ // Parses the next line in the filestream and places the found key-value
+ // pair into key and val.
+ std::string line;
+ if ((instream_->ReadLine(&line)) == SR_EOS) {
+ return false;
+ }
+ std::vector<std::string> tokens;
+ if (2 != split(line, ':', &tokens)) {
+ return false;
+ }
+ // Removes whitespace at the end of Key name
+ size_t pos = tokens[0].length() - 1;
+ while ((pos > 0) && isspace(tokens[0][pos])) {
+ pos--;
+ }
+ tokens[0].erase(pos + 1);
+ // Removes whitespace at the start of value
+ pos = 0;
+ while (pos < tokens[1].length() && isspace(tokens[1][pos])) {
+ pos++;
+ }
+ tokens[1].erase(0, pos);
+ *key = tokens[0];
+ *value = tokens[1];
+ return true;
+}
+
+#if !defined(WEBRTC_CHROMIUM_BUILDs)
+static bool ExpectLineFromStream(FileStream* stream,
+ std::string* out) {
+ StreamResult res = stream->ReadLine(out);
+ if (res != SR_SUCCESS) {
+ if (res != SR_EOS) {
+ LOG(LS_ERROR) << "Error when reading from stream";
+ } else {
+ LOG(LS_ERROR) << "Incorrect number of lines in stream";
+ }
+ return false;
+ }
+ return true;
+}
+
+static void ExpectEofFromStream(FileStream* stream) {
+ std::string unused;
+ StreamResult res = stream->ReadLine(&unused);
+ if (res == SR_SUCCESS) {
+ LOG(LS_WARNING) << "Ignoring unexpected extra lines from stream";
+ } else if (res != SR_EOS) {
+ LOG(LS_WARNING) << "Error when checking for extra lines from stream";
+ }
+}
+
+// For caching the lsb_release output (reading it invokes a sub-process and
+// hence is somewhat expensive).
+static std::string lsb_release_string;
+static CriticalSection lsb_release_string_critsec;
+
+std::string ReadLinuxLsbRelease() {
+ CritScope cs(&lsb_release_string_critsec);
+ if (!lsb_release_string.empty()) {
+ // Have cached result from previous call.
+ return lsb_release_string;
+ }
+ // No cached result. Run lsb_release and parse output.
+ POpenStream lsb_release_output;
+ if (!lsb_release_output.Open("lsb_release -idrcs", "r", NULL)) {
+ LOG_ERR(LS_ERROR) << "Can't run lsb_release";
+ return lsb_release_string; // empty
+ }
+ // Read in the command's output and build the string.
+ std::ostringstream sstr;
+ std::string line;
+ int wait_status;
+
+ if (!ExpectLineFromStream(&lsb_release_output, &line)) {
+ return lsb_release_string; // empty
+ }
+ sstr << "DISTRIB_ID=" << line;
+
+ if (!ExpectLineFromStream(&lsb_release_output, &line)) {
+ return lsb_release_string; // empty
+ }
+ sstr << " DISTRIB_DESCRIPTION=\"" << line << '"';
+
+ if (!ExpectLineFromStream(&lsb_release_output, &line)) {
+ return lsb_release_string; // empty
+ }
+ sstr << " DISTRIB_RELEASE=" << line;
+
+ if (!ExpectLineFromStream(&lsb_release_output, &line)) {
+ return lsb_release_string; // empty
+ }
+ sstr << " DISTRIB_CODENAME=" << line;
+
+ // Should not be anything left.
+ ExpectEofFromStream(&lsb_release_output);
+
+ lsb_release_output.Close();
+ wait_status = lsb_release_output.GetWaitStatus();
+ if (wait_status == -1 ||
+ !WIFEXITED(wait_status) ||
+ WEXITSTATUS(wait_status) != 0) {
+ LOG(LS_WARNING) << "Unexpected exit status from lsb_release";
+ }
+
+ lsb_release_string = sstr.str();
+
+ return lsb_release_string;
+}
+#endif
+
+std::string ReadLinuxUname() {
+ struct utsname buf;
+ if (uname(&buf) < 0) {
+ LOG_ERR(LS_ERROR) << "Can't call uname()";
+ return std::string();
+ }
+ std::ostringstream sstr;
+ sstr << buf.sysname << " "
+ << buf.release << " "
+ << buf.version << " "
+ << buf.machine;
+ return sstr.str();
+}
+
+int ReadCpuMaxFreq() {
+ FileStream fs;
+ std::string str;
+ int freq = -1;
+ if (!fs.Open(kCpuMaxFreqFile, "r", NULL) ||
+ SR_SUCCESS != fs.ReadLine(&str) ||
+ !FromString(str, &freq)) {
+ return -1;
+ }
+ return freq;
+}
+
+} // namespace rtc
+
+#endif // defined(WEBRTC_LINUX)
diff --git a/chromium/third_party/webrtc/base/linux.h b/chromium/third_party/webrtc/base/linux.h
new file mode 100644
index 00000000000..8f601878f62
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linux.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_LINUX_H_
+#define WEBRTC_BASE_LINUX_H_
+
+#if defined(WEBRTC_LINUX)
+#include <string>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////////////
+// ConfigParser parses a FileStream of an ".ini."-type format into a map.
+//////////////////////////////////////////////////////////////////////////////
+
+// Sample Usage:
+// ConfigParser parser;
+// ConfigParser::MapVector key_val_pairs;
+// if (parser.Open(inifile) && parser.Parse(&key_val_pairs)) {
+// for (int section_num=0; i < key_val_pairs.size(); ++section_num) {
+// std::string val1 = key_val_pairs[section_num][key1];
+// std::string val2 = key_val_pairs[section_num][key2];
+// // Do something with valn;
+// }
+// }
+
+class ConfigParser {
+ public:
+ typedef std::map<std::string, std::string> SimpleMap;
+ typedef std::vector<SimpleMap> MapVector;
+
+ ConfigParser();
+ virtual ~ConfigParser();
+
+ virtual bool Open(const std::string& filename);
+ virtual void Attach(StreamInterface* stream);
+ virtual bool Parse(MapVector* key_val_pairs);
+ virtual bool ParseSection(SimpleMap* key_val_pair);
+ virtual bool ParseLine(std::string* key, std::string* value);
+
+ private:
+ scoped_ptr<StreamInterface> instream_;
+};
+
+//////////////////////////////////////////////////////////////////////////////
+// ProcCpuInfo reads CPU info from the /proc subsystem on any *NIX platform.
+//////////////////////////////////////////////////////////////////////////////
+
+// Sample Usage:
+// ProcCpuInfo proc_info;
+// int no_of_cpu;
+// if (proc_info.LoadFromSystem()) {
+// std::string out_str;
+// proc_info.GetNumCpus(&no_of_cpu);
+// proc_info.GetCpuStringValue(0, "vendor_id", &out_str);
+// }
+// }
+
+class ProcCpuInfo {
+ public:
+ ProcCpuInfo();
+ virtual ~ProcCpuInfo();
+
+ // Reads the proc subsystem's cpu info into memory. If this fails, this
+ // returns false; if it succeeds, it returns true.
+ virtual bool LoadFromSystem();
+
+ // Obtains the number of logical CPU threads and places the value num.
+ virtual bool GetNumCpus(int* num);
+
+ // Obtains the number of physical CPU cores and places the value num.
+ virtual bool GetNumPhysicalCpus(int* num);
+
+ // Obtains the CPU family id.
+ virtual bool GetCpuFamily(int* id);
+
+ // Obtains the number of sections in /proc/cpuinfo, which may be greater
+ // than the number of CPUs (e.g. on ARM)
+ virtual bool GetSectionCount(size_t* count);
+
+ // Looks for the CPU proc item with the given name for the given section
+ // number and places the string value in result.
+ virtual bool GetSectionStringValue(size_t section_num, const std::string& key,
+ std::string* result);
+
+ // Looks for the CPU proc item with the given name for the given section
+ // number and places the int value in result.
+ virtual bool GetSectionIntValue(size_t section_num, const std::string& key,
+ int* result);
+
+ private:
+ ConfigParser::MapVector sections_;
+};
+
+#if !defined(WEBRTC_CHROMIUM_BUILDs)
+// Builds a string containing the info from lsb_release on a single line.
+std::string ReadLinuxLsbRelease();
+#endif
+
+// Returns the output of "uname".
+std::string ReadLinuxUname();
+
+// Returns the content (int) of
+// /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq
+// Returns -1 on error.
+int ReadCpuMaxFreq();
+
+} // namespace rtc
+
+#endif // defined(WEBRTC_LINUX)
+#endif // WEBRTC_BASE_LINUX_H_
diff --git a/chromium/third_party/webrtc/base/linux_unittest.cc b/chromium/third_party/webrtc/base/linux_unittest.cc
new file mode 100644
index 00000000000..c65ef071c59
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linux_unittest.cc
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include "webrtc/base/linux.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+// These tests running on ARM are fairly specific to the output of the tegra2
+// ARM processor, and so may fail on other ARM-based systems.
+TEST(ProcCpuInfo, GetProcInfo) {
+ ProcCpuInfo proc_info;
+ EXPECT_TRUE(proc_info.LoadFromSystem());
+
+ int out_cpus = 0;
+ EXPECT_TRUE(proc_info.GetNumCpus(&out_cpus));
+ LOG(LS_INFO) << "GetNumCpus: " << out_cpus;
+ EXPECT_GT(out_cpus, 0);
+
+ int out_cpus_phys = 0;
+ EXPECT_TRUE(proc_info.GetNumPhysicalCpus(&out_cpus_phys));
+ LOG(LS_INFO) << "GetNumPhysicalCpus: " << out_cpus_phys;
+ EXPECT_GT(out_cpus_phys, 0);
+ EXPECT_LE(out_cpus_phys, out_cpus);
+
+ int out_family = 0;
+ EXPECT_TRUE(proc_info.GetCpuFamily(&out_family));
+ LOG(LS_INFO) << "cpu family: " << out_family;
+ EXPECT_GE(out_family, 4);
+
+#if defined(__arm__)
+ std::string out_processor;
+ EXPECT_TRUE(proc_info.GetSectionStringValue(0, "Processor", &out_processor));
+ LOG(LS_INFO) << "Processor: " << out_processor;
+ EXPECT_NE(std::string::npos, out_processor.find("ARM"));
+
+ // Most other info, such as model, stepping, vendor, etc.
+ // is missing on ARM systems.
+#else
+ int out_model = 0;
+ EXPECT_TRUE(proc_info.GetSectionIntValue(0, "model", &out_model));
+ LOG(LS_INFO) << "model: " << out_model;
+
+ int out_stepping = 0;
+ EXPECT_TRUE(proc_info.GetSectionIntValue(0, "stepping", &out_stepping));
+ LOG(LS_INFO) << "stepping: " << out_stepping;
+
+ int out_processor = 0;
+ EXPECT_TRUE(proc_info.GetSectionIntValue(0, "processor", &out_processor));
+ LOG(LS_INFO) << "processor: " << out_processor;
+ EXPECT_EQ(0, out_processor);
+
+ std::string out_str;
+ EXPECT_TRUE(proc_info.GetSectionStringValue(0, "vendor_id", &out_str));
+ LOG(LS_INFO) << "vendor_id: " << out_str;
+ EXPECT_FALSE(out_str.empty());
+#endif
+}
+
+TEST(ConfigParser, ParseConfig) {
+ ConfigParser parser;
+ MemoryStream *test_stream = new MemoryStream(
+ "Key1: Value1\n"
+ "Key2\t: Value2\n"
+ "Key3:Value3\n"
+ "\n"
+ "Key1:Value1\n");
+ ConfigParser::MapVector key_val_pairs;
+ parser.Attach(test_stream);
+ EXPECT_EQ(true, parser.Parse(&key_val_pairs));
+ EXPECT_EQ(2U, key_val_pairs.size());
+ EXPECT_EQ("Value1", key_val_pairs[0]["Key1"]);
+ EXPECT_EQ("Value2", key_val_pairs[0]["Key2"]);
+ EXPECT_EQ("Value3", key_val_pairs[0]["Key3"]);
+ EXPECT_EQ("Value1", key_val_pairs[1]["Key1"]);
+ key_val_pairs.clear();
+ EXPECT_EQ(true, parser.Open("/proc/cpuinfo"));
+ EXPECT_EQ(true, parser.Parse(&key_val_pairs));
+}
+
+#if !defined(WEBRTC_CHROMIUM_BUILDs)
+TEST(ReadLinuxLsbRelease, ReturnsSomething) {
+ std::string str = ReadLinuxLsbRelease();
+ // ChromeOS don't have lsb_release
+ // EXPECT_FALSE(str.empty());
+}
+#endif
+
+TEST(ReadLinuxUname, ReturnsSomething) {
+ std::string str = ReadLinuxUname();
+ EXPECT_FALSE(str.empty());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/linuxfdwalk.c b/chromium/third_party/webrtc/base/linuxfdwalk.c
new file mode 100644
index 00000000000..ae60cc524b4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxfdwalk.c
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sys/types.h>
+#include <dirent.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "webrtc/base/linuxfdwalk.h"
+
+// Parses a file descriptor number in base 10, requiring the strict format used
+// in /proc/*/fd. Returns the value, or -1 if not a valid string.
+static int parse_fd(const char *s) {
+ if (!*s) {
+ // Empty string is invalid.
+ return -1;
+ }
+ int val = 0;
+ do {
+ if (*s < '0' || *s > '9') {
+ // Non-numeric characters anywhere are invalid.
+ return -1;
+ }
+ int digit = *s++ - '0';
+ val = val * 10 + digit;
+ } while (*s);
+ return val;
+}
+
+int fdwalk(void (*func)(void *, int), void *opaque) {
+ DIR *dir = opendir("/proc/self/fd");
+ if (!dir) {
+ return -1;
+ }
+ int opendirfd = dirfd(dir);
+ int parse_errors = 0;
+ struct dirent *ent;
+ // Have to clear errno to distinguish readdir() completion from failure.
+ while (errno = 0, (ent = readdir(dir)) != NULL) {
+ if (strcmp(ent->d_name, ".") == 0 ||
+ strcmp(ent->d_name, "..") == 0) {
+ continue;
+ }
+ // We avoid atoi or strtol because those are part of libc and they involve
+ // locale stuff, which is probably not safe from a post-fork context in a
+ // multi-threaded app.
+ int fd = parse_fd(ent->d_name);
+ if (fd < 0) {
+ parse_errors = 1;
+ continue;
+ }
+ if (fd != opendirfd) {
+ (*func)(opaque, fd);
+ }
+ }
+ int saved_errno = errno;
+ if (closedir(dir) < 0) {
+ if (!saved_errno) {
+ // Return the closedir error.
+ return -1;
+ }
+ // Else ignore it because we have a more relevant error to return.
+ }
+ if (saved_errno) {
+ errno = saved_errno;
+ return -1;
+ } else if (parse_errors) {
+ errno = EBADF;
+ return -1;
+ } else {
+ return 0;
+ }
+}
diff --git a/chromium/third_party/webrtc/base/linuxfdwalk.h b/chromium/third_party/webrtc/base/linuxfdwalk.h
new file mode 100644
index 00000000000..fe5a6977dc7
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxfdwalk.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_LINUXFDWALK_H_
+#define WEBRTC_BASE_LINUXFDWALK_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Linux port of SunOS's fdwalk(3) call. It loops over all open file descriptors
+// and calls func on each one. Additionally, it is safe to use from the child
+// of a fork that hasn't exec'ed yet, so you can use it to close all open file
+// descriptors prior to exec'ing a daemon.
+// The return value is 0 if successful, or else -1 and errno is set. The
+// possible errors include any error that can be returned by opendir(),
+// readdir(), or closedir(), plus EBADF if there are problems parsing the
+// contents of /proc/self/fd.
+// The file descriptors that are enumerated will not include the file descriptor
+// used for the enumeration itself.
+int fdwalk(void (*func)(void *, int), void *opaque);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // WEBRTC_BASE_LINUXFDWALK_H_
diff --git a/chromium/third_party/webrtc/base/linuxfdwalk_unittest.cc b/chromium/third_party/webrtc/base/linuxfdwalk_unittest.cc
new file mode 100644
index 00000000000..bba48e8878f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxfdwalk_unittest.cc
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <set>
+#include <sstream>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/linuxfdwalk.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+static const int kArbitraryLargeFdNumber = 424;
+
+static void FdCheckVisitor(void *data, int fd) {
+ std::set<int> *fds = static_cast<std::set<int> *>(data);
+ EXPECT_EQ(1U, fds->erase(fd));
+}
+
+static void FdEnumVisitor(void *data, int fd) {
+ std::set<int> *fds = static_cast<std::set<int> *>(data);
+ EXPECT_TRUE(fds->insert(fd).second);
+}
+
+// Checks that the set of open fds is exactly the given list.
+static void CheckOpenFdList(std::set<int> fds) {
+ EXPECT_EQ(0, fdwalk(&FdCheckVisitor, &fds));
+ EXPECT_EQ(0U, fds.size());
+}
+
+static void GetOpenFdList(std::set<int> *fds) {
+ fds->clear();
+ EXPECT_EQ(0, fdwalk(&FdEnumVisitor, fds));
+}
+
+TEST(LinuxFdWalk, TestFdWalk) {
+ std::set<int> fds;
+ GetOpenFdList(&fds);
+ std::ostringstream str;
+ // I have observed that the open set when starting a test is [0, 6]. Leaked
+ // fds would change that, but so can (e.g.) running under a debugger, so we
+ // can't really do an EXPECT. :(
+ str << "File descriptors open in test executable:";
+ for (std::set<int>::const_iterator i = fds.begin(); i != fds.end(); ++i) {
+ str << " " << *i;
+ }
+ LOG(LS_INFO) << str.str();
+ // Open some files.
+ int fd1 = open("/dev/null", O_RDONLY);
+ EXPECT_LE(0, fd1);
+ int fd2 = open("/dev/null", O_WRONLY);
+ EXPECT_LE(0, fd2);
+ int fd3 = open("/dev/null", O_RDWR);
+ EXPECT_LE(0, fd3);
+ int fd4 = dup2(fd3, kArbitraryLargeFdNumber);
+ EXPECT_LE(0, fd4);
+ EXPECT_TRUE(fds.insert(fd1).second);
+ EXPECT_TRUE(fds.insert(fd2).second);
+ EXPECT_TRUE(fds.insert(fd3).second);
+ EXPECT_TRUE(fds.insert(fd4).second);
+ CheckOpenFdList(fds);
+ EXPECT_EQ(0, close(fd1));
+ EXPECT_EQ(0, close(fd2));
+ EXPECT_EQ(0, close(fd3));
+ EXPECT_EQ(0, close(fd4));
+}
diff --git a/chromium/third_party/webrtc/base/linuxwindowpicker.cc b/chromium/third_party/webrtc/base/linuxwindowpicker.cc
new file mode 100644
index 00000000000..56d565e5547
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxwindowpicker.cc
@@ -0,0 +1,818 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/linuxwindowpicker.h"
+
+#include <math.h>
+#include <string.h>
+
+#include <algorithm>
+#include <string>
+
+#include <X11/Xatom.h>
+#include <X11/extensions/Xcomposite.h>
+#include <X11/extensions/Xrender.h>
+#include <X11/Xutil.h>
+
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+// Convenience wrapper for XGetWindowProperty results.
+template <class PropertyType>
+class XWindowProperty {
+ public:
+ XWindowProperty(Display* display, Window window, Atom property)
+ : data_(NULL) {
+ const int kBitsPerByte = 8;
+ Atom actual_type;
+ int actual_format;
+ unsigned long bytes_after; // NOLINT: type required by XGetWindowProperty
+ int status = XGetWindowProperty(display, window, property, 0L, ~0L, False,
+ AnyPropertyType, &actual_type,
+ &actual_format, &size_,
+ &bytes_after, &data_);
+ succeeded_ = (status == Success);
+ if (!succeeded_) {
+ data_ = NULL; // Ensure nothing is freed.
+ } else if (sizeof(PropertyType) * kBitsPerByte != actual_format) {
+ LOG(LS_WARNING) << "Returned type size differs from "
+ "requested type size.";
+ succeeded_ = false;
+ // We still need to call XFree in this case, so leave data_ alone.
+ }
+ if (!succeeded_) {
+ size_ = 0;
+ }
+ }
+
+ ~XWindowProperty() {
+ if (data_) {
+ XFree(data_);
+ }
+ }
+
+ bool succeeded() const { return succeeded_; }
+ size_t size() const { return size_; }
+ const PropertyType* data() const {
+ return reinterpret_cast<PropertyType*>(data_);
+ }
+ PropertyType* data() {
+ return reinterpret_cast<PropertyType*>(data_);
+ }
+
+ private:
+ bool succeeded_;
+ unsigned long size_; // NOLINT: type required by XGetWindowProperty
+ unsigned char* data_;
+
+ DISALLOW_COPY_AND_ASSIGN(XWindowProperty);
+};
+
+// Stupid X11. It seems none of the synchronous returns codes from X11 calls
+// are meaningful unless an asynchronous error handler is configured. This
+// RAII class registers and unregisters an X11 error handler.
+class XErrorSuppressor {
+ public:
+ explicit XErrorSuppressor(Display* display)
+ : display_(display), original_error_handler_(NULL) {
+ SuppressX11Errors();
+ }
+ ~XErrorSuppressor() {
+ UnsuppressX11Errors();
+ }
+
+ private:
+ static int ErrorHandler(Display* display, XErrorEvent* e) {
+ char buf[256];
+ XGetErrorText(display, e->error_code, buf, sizeof buf);
+ LOG(LS_WARNING) << "Received X11 error \"" << buf << "\" for request code "
+ << static_cast<unsigned int>(e->request_code);
+ return 0;
+ }
+
+ void SuppressX11Errors() {
+ XFlush(display_);
+ XSync(display_, False);
+ original_error_handler_ = XSetErrorHandler(&ErrorHandler);
+ }
+
+ void UnsuppressX11Errors() {
+ XFlush(display_);
+ XSync(display_, False);
+ XErrorHandler handler = XSetErrorHandler(original_error_handler_);
+ if (handler != &ErrorHandler) {
+ LOG(LS_WARNING) << "Unbalanced XSetErrorHandler() calls detected. "
+ << "Final error handler may not be what you expect!";
+ }
+ original_error_handler_ = NULL;
+ }
+
+ Display* display_;
+ XErrorHandler original_error_handler_;
+
+ DISALLOW_COPY_AND_ASSIGN(XErrorSuppressor);
+};
+
+// Hiding all X11 specifics inside its own class. This to avoid
+// conflicts between talk and X11 header declarations.
+class XWindowEnumerator {
+ public:
+ XWindowEnumerator()
+ : display_(NULL),
+ has_composite_extension_(false),
+ has_render_extension_(false) {
+ }
+
+ ~XWindowEnumerator() {
+ if (display_ != NULL) {
+ XCloseDisplay(display_);
+ }
+ }
+
+ bool Init() {
+ if (display_ != NULL) {
+ // Already initialized.
+ return true;
+ }
+ display_ = XOpenDisplay(NULL);
+ if (display_ == NULL) {
+ LOG(LS_ERROR) << "Failed to open display.";
+ return false;
+ }
+
+ XErrorSuppressor error_suppressor(display_);
+
+ wm_state_ = XInternAtom(display_, "WM_STATE", True);
+ net_wm_icon_ = XInternAtom(display_, "_NET_WM_ICON", False);
+
+ int event_base, error_base, major_version, minor_version;
+ if (XCompositeQueryExtension(display_, &event_base, &error_base) &&
+ XCompositeQueryVersion(display_, &major_version, &minor_version) &&
+ // XCompositeNameWindowPixmap() requires version 0.2
+ (major_version > 0 || minor_version >= 2)) {
+ has_composite_extension_ = true;
+ } else {
+ LOG(LS_INFO) << "Xcomposite extension not available or too old.";
+ }
+
+ if (XRenderQueryExtension(display_, &event_base, &error_base) &&
+ XRenderQueryVersion(display_, &major_version, &minor_version) &&
+ // XRenderSetPictureTransform() requires version 0.6
+ (major_version > 0 || minor_version >= 6)) {
+ has_render_extension_ = true;
+ } else {
+ LOG(LS_INFO) << "Xrender extension not available or too old.";
+ }
+ return true;
+ }
+
+ bool EnumerateWindows(WindowDescriptionList* descriptions) {
+ if (!Init()) {
+ return false;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ int num_screens = XScreenCount(display_);
+ bool result = false;
+ for (int i = 0; i < num_screens; ++i) {
+ if (EnumerateScreenWindows(descriptions, i)) {
+ // We know we succeded on at least one screen.
+ result = true;
+ }
+ }
+ return result;
+ }
+
+ bool EnumerateDesktops(DesktopDescriptionList* descriptions) {
+ if (!Init()) {
+ return false;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ Window default_root_window = XDefaultRootWindow(display_);
+ int num_screens = XScreenCount(display_);
+ for (int i = 0; i < num_screens; ++i) {
+ Window root_window = XRootWindow(display_, i);
+ DesktopId id(DesktopId(root_window, i));
+ // TODO: Figure out an appropriate desktop title.
+ DesktopDescription desc(id, "");
+ desc.set_primary(root_window == default_root_window);
+ descriptions->push_back(desc);
+ }
+ return num_screens > 0;
+ }
+
+ bool IsVisible(const WindowId& id) {
+ if (!Init()) {
+ return false;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ XWindowAttributes attr;
+ if (!XGetWindowAttributes(display_, id.id(), &attr)) {
+ LOG(LS_ERROR) << "XGetWindowAttributes() failed";
+ return false;
+ }
+ return attr.map_state == IsViewable;
+ }
+
+ bool MoveToFront(const WindowId& id) {
+ if (!Init()) {
+ return false;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ unsigned int num_children;
+ Window* children;
+ Window parent;
+ Window root;
+
+ // Find root window to pass event to.
+ int status = XQueryTree(display_, id.id(), &root, &parent, &children,
+ &num_children);
+ if (status == 0) {
+ LOG(LS_WARNING) << "Failed to query for child windows.";
+ return false;
+ }
+ if (children != NULL) {
+ XFree(children);
+ }
+
+ // Move the window to front.
+ XRaiseWindow(display_, id.id());
+
+ // Some window managers (e.g., metacity in GNOME) consider it illegal to
+ // raise a window without also giving it input focus with
+ // _NET_ACTIVE_WINDOW, so XRaiseWindow() on its own isn't enough.
+ Atom atom = XInternAtom(display_, "_NET_ACTIVE_WINDOW", True);
+ if (atom != None) {
+ XEvent xev;
+ long event_mask;
+
+ xev.xclient.type = ClientMessage;
+ xev.xclient.serial = 0;
+ xev.xclient.send_event = True;
+ xev.xclient.window = id.id();
+ xev.xclient.message_type = atom;
+
+ // The format member is set to 8, 16, or 32 and specifies whether the
+ // data should be viewed as a list of bytes, shorts, or longs.
+ xev.xclient.format = 32;
+
+ xev.xclient.data.l[0] = 0;
+ xev.xclient.data.l[1] = 0;
+ xev.xclient.data.l[2] = 0;
+ xev.xclient.data.l[3] = 0;
+ xev.xclient.data.l[4] = 0;
+
+ event_mask = SubstructureRedirectMask | SubstructureNotifyMask;
+
+ XSendEvent(display_, root, False, event_mask, &xev);
+ }
+ XFlush(display_);
+ return true;
+ }
+
+ uint8* GetWindowIcon(const WindowId& id, int* width, int* height) {
+ if (!Init()) {
+ return NULL;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ Atom ret_type;
+ int format;
+ unsigned long length, bytes_after, size;
+ unsigned char* data = NULL;
+
+ // Find out the size of the icon data.
+ if (XGetWindowProperty(
+ display_, id.id(), net_wm_icon_, 0, 0, False, XA_CARDINAL,
+ &ret_type, &format, &length, &size, &data) == Success &&
+ data) {
+ XFree(data);
+ } else {
+ LOG(LS_ERROR) << "Failed to get size of the icon.";
+ return NULL;
+ }
+ // Get the icon data, the format is one uint32 each for width and height,
+ // followed by the actual pixel data.
+ if (size >= 2 &&
+ XGetWindowProperty(
+ display_, id.id(), net_wm_icon_, 0, size, False, XA_CARDINAL,
+ &ret_type, &format, &length, &bytes_after, &data) == Success &&
+ data) {
+ uint32* data_ptr = reinterpret_cast<uint32*>(data);
+ int w, h;
+ w = data_ptr[0];
+ h = data_ptr[1];
+ if (size < static_cast<unsigned long>(w * h + 2)) {
+ XFree(data);
+ LOG(LS_ERROR) << "Not a vaild icon.";
+ return NULL;
+ }
+ uint8* rgba =
+ ArgbToRgba(&data_ptr[2], 0, 0, w, h, w, h, true);
+ XFree(data);
+ *width = w;
+ *height = h;
+ return rgba;
+ } else {
+ LOG(LS_ERROR) << "Failed to get window icon data.";
+ return NULL;
+ }
+ }
+
+ uint8* GetWindowThumbnail(const WindowId& id, int width, int height) {
+ if (!Init()) {
+ return NULL;
+ }
+
+ if (!has_composite_extension_) {
+ // Without the Xcomposite extension we would only get a good thumbnail if
+ // the whole window is visible on screen and not covered by any
+ // other window. This is not something we want so instead, just
+ // bail out.
+ LOG(LS_INFO) << "No Xcomposite extension detected.";
+ return NULL;
+ }
+ XErrorSuppressor error_suppressor(display_);
+
+ Window root;
+ int x;
+ int y;
+ unsigned int src_width;
+ unsigned int src_height;
+ unsigned int border_width;
+ unsigned int depth;
+
+ // In addition to needing X11 server-side support for Xcomposite, it
+ // actually needs to be turned on for this window in order to get a good
+ // thumbnail. If the user has modern hardware/drivers but isn't using a
+ // compositing window manager, that won't be the case. Here we
+ // automatically turn it on for shareable windows so that we can get
+ // thumbnails. We used to avoid it because the transition is visually ugly,
+ // but recent window managers don't always redirect windows which led to
+ // no thumbnails at all, which is a worse experience.
+
+ // Redirect drawing to an offscreen buffer (ie, turn on compositing).
+ // X11 remembers what has requested this and will turn it off for us when
+ // we exit.
+ XCompositeRedirectWindow(display_, id.id(), CompositeRedirectAutomatic);
+ Pixmap src_pixmap = XCompositeNameWindowPixmap(display_, id.id());
+ if (!src_pixmap) {
+ // Even if the backing pixmap doesn't exist, this still should have
+ // succeeded and returned a valid handle (it just wouldn't be a handle to
+ // anything). So this is a real error path.
+ LOG(LS_ERROR) << "XCompositeNameWindowPixmap() failed";
+ return NULL;
+ }
+ if (!XGetGeometry(display_, src_pixmap, &root, &x, &y,
+ &src_width, &src_height, &border_width,
+ &depth)) {
+ // If the window does not actually have a backing pixmap, this is the path
+ // that will "fail", so it's a warning rather than an error.
+ LOG(LS_WARNING) << "XGetGeometry() failed (probably composite is not in "
+ << "use)";
+ XFreePixmap(display_, src_pixmap);
+ return NULL;
+ }
+
+ // If we get to here, then composite is in use for this window and it has a
+ // valid backing pixmap.
+
+ XWindowAttributes attr;
+ if (!XGetWindowAttributes(display_, id.id(), &attr)) {
+ LOG(LS_ERROR) << "XGetWindowAttributes() failed";
+ XFreePixmap(display_, src_pixmap);
+ return NULL;
+ }
+
+ uint8* data = GetDrawableThumbnail(src_pixmap,
+ attr.visual,
+ src_width,
+ src_height,
+ width,
+ height);
+ XFreePixmap(display_, src_pixmap);
+ return data;
+ }
+
+ int GetNumDesktops() {
+ if (!Init()) {
+ return -1;
+ }
+
+ return XScreenCount(display_);
+ }
+
+ uint8* GetDesktopThumbnail(const DesktopId& id, int width, int height) {
+ if (!Init()) {
+ return NULL;
+ }
+ XErrorSuppressor error_suppressor(display_);
+
+ Window root_window = id.id();
+ XWindowAttributes attr;
+ if (!XGetWindowAttributes(display_, root_window, &attr)) {
+ LOG(LS_ERROR) << "XGetWindowAttributes() failed";
+ return NULL;
+ }
+
+ return GetDrawableThumbnail(root_window,
+ attr.visual,
+ attr.width,
+ attr.height,
+ width,
+ height);
+ }
+
+ bool GetDesktopDimensions(const DesktopId& id, int* width, int* height) {
+ if (!Init()) {
+ return false;
+ }
+ XErrorSuppressor error_suppressor(display_);
+ XWindowAttributes attr;
+ if (!XGetWindowAttributes(display_, id.id(), &attr)) {
+ LOG(LS_ERROR) << "XGetWindowAttributes() failed";
+ return false;
+ }
+ *width = attr.width;
+ *height = attr.height;
+ return true;
+ }
+
+ private:
+ uint8* GetDrawableThumbnail(Drawable src_drawable,
+ Visual* visual,
+ int src_width,
+ int src_height,
+ int dst_width,
+ int dst_height) {
+ if (!has_render_extension_) {
+ // Without the Xrender extension we would have to read the full window and
+ // scale it down in our process. Xrender is over a decade old so we aren't
+ // going to expend effort to support that situation. We still need to
+ // check though because probably some virtual VNC displays are in this
+ // category.
+ LOG(LS_INFO) << "No Xrender extension detected.";
+ return NULL;
+ }
+
+ XRenderPictFormat* format = XRenderFindVisualFormat(display_,
+ visual);
+ if (!format) {
+ LOG(LS_ERROR) << "XRenderFindVisualFormat() failed";
+ return NULL;
+ }
+
+ // Create a picture to reference the window pixmap.
+ XRenderPictureAttributes pa;
+ pa.subwindow_mode = IncludeInferiors; // Don't clip child widgets
+ Picture src = XRenderCreatePicture(display_,
+ src_drawable,
+ format,
+ CPSubwindowMode,
+ &pa);
+ if (!src) {
+ LOG(LS_ERROR) << "XRenderCreatePicture() failed";
+ return NULL;
+ }
+
+ // Create a picture to reference the destination pixmap.
+ Pixmap dst_pixmap = XCreatePixmap(display_,
+ src_drawable,
+ dst_width,
+ dst_height,
+ format->depth);
+ if (!dst_pixmap) {
+ LOG(LS_ERROR) << "XCreatePixmap() failed";
+ XRenderFreePicture(display_, src);
+ return NULL;
+ }
+
+ Picture dst = XRenderCreatePicture(display_, dst_pixmap, format, 0, NULL);
+ if (!dst) {
+ LOG(LS_ERROR) << "XRenderCreatePicture() failed";
+ XFreePixmap(display_, dst_pixmap);
+ XRenderFreePicture(display_, src);
+ return NULL;
+ }
+
+ // Clear the background.
+ XRenderColor transparent = {0};
+ XRenderFillRectangle(display_,
+ PictOpSrc,
+ dst,
+ &transparent,
+ 0,
+ 0,
+ dst_width,
+ dst_height);
+
+ // Calculate how much we need to scale the image.
+ double scale_x = static_cast<double>(dst_width) /
+ static_cast<double>(src_width);
+ double scale_y = static_cast<double>(dst_height) /
+ static_cast<double>(src_height);
+ double scale = rtc::_min(scale_y, scale_x);
+
+ int scaled_width = round(src_width * scale);
+ int scaled_height = round(src_height * scale);
+
+ // Render the thumbnail centered on both axis.
+ int centered_x = (dst_width - scaled_width) / 2;
+ int centered_y = (dst_height - scaled_height) / 2;
+
+ // Scaling matrix
+ XTransform xform = { {
+ { XDoubleToFixed(1), XDoubleToFixed(0), XDoubleToFixed(0) },
+ { XDoubleToFixed(0), XDoubleToFixed(1), XDoubleToFixed(0) },
+ { XDoubleToFixed(0), XDoubleToFixed(0), XDoubleToFixed(scale) }
+ } };
+ XRenderSetPictureTransform(display_, src, &xform);
+
+ // Apply filter to smooth out the image.
+ XRenderSetPictureFilter(display_, src, FilterBest, NULL, 0);
+
+ // Render the image to the destination picture.
+ XRenderComposite(display_,
+ PictOpSrc,
+ src,
+ None,
+ dst,
+ 0,
+ 0,
+ 0,
+ 0,
+ centered_x,
+ centered_y,
+ scaled_width,
+ scaled_height);
+
+ // Get the pixel data from the X server. TODO: XGetImage
+ // might be slow here, compare with ShmGetImage.
+ XImage* image = XGetImage(display_,
+ dst_pixmap,
+ 0,
+ 0,
+ dst_width,
+ dst_height,
+ AllPlanes, ZPixmap);
+ uint8* data = ArgbToRgba(reinterpret_cast<uint32*>(image->data),
+ centered_x,
+ centered_y,
+ scaled_width,
+ scaled_height,
+ dst_width,
+ dst_height,
+ false);
+ XDestroyImage(image);
+ XRenderFreePicture(display_, dst);
+ XFreePixmap(display_, dst_pixmap);
+ XRenderFreePicture(display_, src);
+ return data;
+ }
+
+ uint8* ArgbToRgba(uint32* argb_data, int x, int y, int w, int h,
+ int stride_x, int stride_y, bool has_alpha) {
+ uint8* p;
+ int len = stride_x * stride_y * 4;
+ uint8* data = new uint8[len];
+ memset(data, 0, len);
+ p = data + 4 * (y * stride_x + x);
+ for (int i = 0; i < h; ++i) {
+ for (int j = 0; j < w; ++j) {
+ uint32 argb;
+ uint32 rgba;
+ argb = argb_data[stride_x * (y + i) + x + j];
+ rgba = (argb << 8) | (argb >> 24);
+ *p = rgba >> 24;
+ ++p;
+ *p = (rgba >> 16) & 0xff;
+ ++p;
+ *p = (rgba >> 8) & 0xff;
+ ++p;
+ *p = has_alpha ? rgba & 0xFF : 0xFF;
+ ++p;
+ }
+ p += (stride_x - w) * 4;
+ }
+ return data;
+ }
+
+ bool EnumerateScreenWindows(WindowDescriptionList* descriptions, int screen) {
+ Window parent;
+ Window *children;
+ int status;
+ unsigned int num_children;
+ Window root_window = XRootWindow(display_, screen);
+ status = XQueryTree(display_, root_window, &root_window, &parent, &children,
+ &num_children);
+ if (status == 0) {
+ LOG(LS_ERROR) << "Failed to query for child windows.";
+ return false;
+ }
+ for (unsigned int i = 0; i < num_children; ++i) {
+ // Iterate in reverse order to display windows from front to back.
+#ifdef CHROMEOS
+ // TODO(jhorwich): Short-term fix for crbug.com/120229: Don't need to
+ // filter, just return all windows and let the picker scan through them.
+ Window app_window = children[num_children - 1 - i];
+#else
+ Window app_window = GetApplicationWindow(children[num_children - 1 - i]);
+#endif
+ if (app_window &&
+ !LinuxWindowPicker::IsDesktopElement(display_, app_window)) {
+ std::string title;
+ if (GetWindowTitle(app_window, &title)) {
+ WindowId id(app_window);
+ WindowDescription desc(id, title);
+ descriptions->push_back(desc);
+ }
+ }
+ }
+ if (children != NULL) {
+ XFree(children);
+ }
+ return true;
+ }
+
+ bool GetWindowTitle(Window window, std::string* title) {
+ int status;
+ bool result = false;
+ XTextProperty window_name;
+ window_name.value = NULL;
+ if (window) {
+ status = XGetWMName(display_, window, &window_name);
+ if (status && window_name.value && window_name.nitems) {
+ int cnt;
+ char **list = NULL;
+ status = Xutf8TextPropertyToTextList(display_, &window_name, &list,
+ &cnt);
+ if (status >= Success && cnt && *list) {
+ if (cnt > 1) {
+ LOG(LS_INFO) << "Window has " << cnt
+ << " text properties, only using the first one.";
+ }
+ *title = *list;
+ result = true;
+ }
+ if (list != NULL) {
+ XFreeStringList(list);
+ }
+ }
+ if (window_name.value != NULL) {
+ XFree(window_name.value);
+ }
+ }
+ return result;
+ }
+
+ Window GetApplicationWindow(Window window) {
+ Window root, parent;
+ Window app_window = 0;
+ Window *children;
+ unsigned int num_children;
+ Atom type = None;
+ int format;
+ unsigned long nitems, after;
+ unsigned char *data;
+
+ int ret = XGetWindowProperty(display_, window,
+ wm_state_, 0L, 2,
+ False, wm_state_, &type, &format,
+ &nitems, &after, &data);
+ if (ret != Success) {
+ LOG(LS_ERROR) << "XGetWindowProperty failed with return code " << ret
+ << " for window " << window << ".";
+ return 0;
+ }
+ if (type != None) {
+ int64 state = static_cast<int64>(*data);
+ XFree(data);
+ return state == NormalState ? window : 0;
+ }
+ XFree(data);
+ if (!XQueryTree(display_, window, &root, &parent, &children,
+ &num_children)) {
+ LOG(LS_ERROR) << "Failed to query for child windows although window"
+ << "does not have a valid WM_STATE.";
+ return 0;
+ }
+ for (unsigned int i = 0; i < num_children; ++i) {
+ app_window = GetApplicationWindow(children[i]);
+ if (app_window) {
+ break;
+ }
+ }
+ if (children != NULL) {
+ XFree(children);
+ }
+ return app_window;
+ }
+
+ Atom wm_state_;
+ Atom net_wm_icon_;
+ Display* display_;
+ bool has_composite_extension_;
+ bool has_render_extension_;
+};
+
+LinuxWindowPicker::LinuxWindowPicker() : enumerator_(new XWindowEnumerator()) {
+}
+
+LinuxWindowPicker::~LinuxWindowPicker() {
+}
+
+bool LinuxWindowPicker::IsDesktopElement(_XDisplay* display, Window window) {
+ if (window == 0) {
+ LOG(LS_WARNING) << "Zero is never a valid window.";
+ return false;
+ }
+
+ // First look for _NET_WM_WINDOW_TYPE. The standard
+ // (http://standards.freedesktop.org/wm-spec/latest/ar01s05.html#id2760306)
+ // says this hint *should* be present on all windows, and we use the existence
+ // of _NET_WM_WINDOW_TYPE_NORMAL in the property to indicate a window is not
+ // a desktop element (that is, only "normal" windows should be shareable).
+ Atom window_type_atom = XInternAtom(display, "_NET_WM_WINDOW_TYPE", True);
+ XWindowProperty<uint32_t> window_type(display, window, window_type_atom);
+ if (window_type.succeeded() && window_type.size() > 0) {
+ Atom normal_window_type_atom = XInternAtom(
+ display, "_NET_WM_WINDOW_TYPE_NORMAL", True);
+ uint32_t* end = window_type.data() + window_type.size();
+ bool is_normal = (end != std::find(
+ window_type.data(), end, normal_window_type_atom));
+ return !is_normal;
+ }
+
+ // Fall back on using the hint.
+ XClassHint class_hint;
+ Status s = XGetClassHint(display, window, &class_hint);
+ bool result = false;
+ if (s == 0) {
+ // No hints, assume this is a normal application window.
+ return result;
+ }
+ static const std::string gnome_panel("gnome-panel");
+ static const std::string desktop_window("desktop_window");
+
+ if (gnome_panel.compare(class_hint.res_name) == 0 ||
+ desktop_window.compare(class_hint.res_name) == 0) {
+ result = true;
+ }
+ XFree(class_hint.res_name);
+ XFree(class_hint.res_class);
+ return result;
+}
+
+bool LinuxWindowPicker::Init() {
+ return enumerator_->Init();
+}
+
+bool LinuxWindowPicker::GetWindowList(WindowDescriptionList* descriptions) {
+ return enumerator_->EnumerateWindows(descriptions);
+}
+
+bool LinuxWindowPicker::GetDesktopList(DesktopDescriptionList* descriptions) {
+ return enumerator_->EnumerateDesktops(descriptions);
+}
+
+bool LinuxWindowPicker::IsVisible(const WindowId& id) {
+ return enumerator_->IsVisible(id);
+}
+
+bool LinuxWindowPicker::MoveToFront(const WindowId& id) {
+ return enumerator_->MoveToFront(id);
+}
+
+
+uint8* LinuxWindowPicker::GetWindowIcon(const WindowId& id, int* width,
+ int* height) {
+ return enumerator_->GetWindowIcon(id, width, height);
+}
+
+uint8* LinuxWindowPicker::GetWindowThumbnail(const WindowId& id, int width,
+ int height) {
+ return enumerator_->GetWindowThumbnail(id, width, height);
+}
+
+int LinuxWindowPicker::GetNumDesktops() {
+ return enumerator_->GetNumDesktops();
+}
+
+uint8* LinuxWindowPicker::GetDesktopThumbnail(const DesktopId& id,
+ int width,
+ int height) {
+ return enumerator_->GetDesktopThumbnail(id, width, height);
+}
+
+bool LinuxWindowPicker::GetDesktopDimensions(const DesktopId& id, int* width,
+ int* height) {
+ return enumerator_->GetDesktopDimensions(id, width, height);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/linuxwindowpicker.h b/chromium/third_party/webrtc/base/linuxwindowpicker.h
new file mode 100644
index 00000000000..f87b15081ca
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxwindowpicker.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_LINUXWINDOWPICKER_H_
+#define WEBRTC_BASE_LINUXWINDOWPICKER_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/windowpicker.h"
+
+// Avoid include <X11/Xlib.h>.
+struct _XDisplay;
+typedef unsigned long Window;
+
+namespace rtc {
+
+class XWindowEnumerator;
+
+class LinuxWindowPicker : public WindowPicker {
+ public:
+ LinuxWindowPicker();
+ ~LinuxWindowPicker();
+
+ static bool IsDesktopElement(_XDisplay* display, Window window);
+
+ virtual bool Init();
+ virtual bool IsVisible(const WindowId& id);
+ virtual bool MoveToFront(const WindowId& id);
+ virtual bool GetWindowList(WindowDescriptionList* descriptions);
+ virtual bool GetDesktopList(DesktopDescriptionList* descriptions);
+ virtual bool GetDesktopDimensions(const DesktopId& id, int* width,
+ int* height);
+ uint8* GetWindowIcon(const WindowId& id, int* width, int* height);
+ uint8* GetWindowThumbnail(const WindowId& id, int width, int height);
+ int GetNumDesktops();
+ uint8* GetDesktopThumbnail(const DesktopId& id, int width, int height);
+
+ private:
+ scoped_ptr<XWindowEnumerator> enumerator_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_LINUXWINDOWPICKER_H_
diff --git a/chromium/third_party/webrtc/base/linuxwindowpicker_unittest.cc b/chromium/third_party/webrtc/base/linuxwindowpicker_unittest.cc
new file mode 100644
index 00000000000..c2276ccd636
--- /dev/null
+++ b/chromium/third_party/webrtc/base/linuxwindowpicker_unittest.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/linuxwindowpicker.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/testutils.h"
+#include "webrtc/base/windowpicker.h"
+
+#if !defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)
+#error Only for Linux
+#endif
+
+namespace rtc {
+
+TEST(LinuxWindowPickerTest, TestGetWindowList) {
+ MAYBE_SKIP_SCREENCAST_TEST();
+ LinuxWindowPicker window_picker;
+ WindowDescriptionList descriptions;
+ window_picker.Init();
+ window_picker.GetWindowList(&descriptions);
+}
+
+TEST(LinuxWindowPickerTest, TestGetDesktopList) {
+ MAYBE_SKIP_SCREENCAST_TEST();
+ LinuxWindowPicker window_picker;
+ DesktopDescriptionList descriptions;
+ EXPECT_TRUE(window_picker.Init());
+ EXPECT_TRUE(window_picker.GetDesktopList(&descriptions));
+ EXPECT_TRUE(descriptions.size() > 0);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/logging.cc b/chromium/third_party/webrtc/base/logging.cc
new file mode 100644
index 00000000000..a417ed6c29a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/logging.cc
@@ -0,0 +1,618 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#define snprintf _snprintf
+#undef ERROR // wingdi.h
+#endif
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreServices/CoreServices.h>
+#elif defined(WEBRTC_ANDROID)
+#include <android/log.h>
+static const char kLibjingle[] = "libjingle";
+// Android has a 1024 limit on log inputs. We use 60 chars as an
+// approx for the header/tag portion.
+// See android/system/core/liblog/logd_write.c
+static const int kMaxLogLineSize = 1024 - 60;
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS) || WEBRTC_ANDROID
+
+#include <time.h>
+
+#include <ostream>
+#include <iomanip>
+#include <limits.h>
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+/////////////////////////////////////////////////////////////////////////////
+// Constant Labels
+/////////////////////////////////////////////////////////////////////////////
+
+const char * FindLabel(int value, const ConstantLabel entries[]) {
+ for (int i = 0; entries[i].label; ++i) {
+ if (value == entries[i].value) {
+ return entries[i].label;
+ }
+ }
+ return 0;
+}
+
+std::string ErrorName(int err, const ConstantLabel * err_table) {
+ if (err == 0)
+ return "No error";
+
+ if (err_table != 0) {
+ if (const char * value = FindLabel(err, err_table))
+ return value;
+ }
+
+ char buffer[16];
+ snprintf(buffer, sizeof(buffer), "0x%08x", err);
+ return buffer;
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// LogMessage
+/////////////////////////////////////////////////////////////////////////////
+
+const int LogMessage::NO_LOGGING = LS_ERROR + 1;
+
+#if _DEBUG
+static const int LOG_DEFAULT = LS_INFO;
+#else // !_DEBUG
+static const int LOG_DEFAULT = LogMessage::NO_LOGGING;
+#endif // !_DEBUG
+
+// Global lock for log subsystem, only needed to serialize access to streams_.
+CriticalSection LogMessage::crit_;
+
+// By default, release builds don't log, debug builds at info level
+int LogMessage::min_sev_ = LOG_DEFAULT;
+int LogMessage::dbg_sev_ = LOG_DEFAULT;
+
+// Don't bother printing context for the ubiquitous INFO log messages
+int LogMessage::ctx_sev_ = LS_WARNING;
+
+// The list of logging streams currently configured.
+// Note: we explicitly do not clean this up, because of the uncertain ordering
+// of destructors at program exit. Let the person who sets the stream trigger
+// cleanup by setting to NULL, or let it leak (safe at program exit).
+LogMessage::StreamList LogMessage::streams_;
+
+// Boolean options default to false (0)
+bool LogMessage::thread_, LogMessage::timestamp_;
+
+// If we're in diagnostic mode, we'll be explicitly set that way; default=false.
+bool LogMessage::is_diagnostic_mode_ = false;
+
+LogMessage::LogMessage(const char* file, int line, LoggingSeverity sev,
+ LogErrorContext err_ctx, int err, const char* module)
+ : severity_(sev),
+ warn_slow_logs_delay_(WARN_SLOW_LOGS_DELAY) {
+ if (timestamp_) {
+ uint32 time = TimeSince(LogStartTime());
+ // Also ensure WallClockStartTime is initialized, so that it matches
+ // LogStartTime.
+ WallClockStartTime();
+ print_stream_ << "[" << std::setfill('0') << std::setw(3) << (time / 1000)
+ << ":" << std::setw(3) << (time % 1000) << std::setfill(' ')
+ << "] ";
+ }
+
+ if (thread_) {
+#if defined(WEBRTC_WIN)
+ DWORD id = GetCurrentThreadId();
+ print_stream_ << "[" << std::hex << id << std::dec << "] ";
+#endif // WEBRTC_WIN
+ }
+
+ if (severity_ >= ctx_sev_) {
+ print_stream_ << Describe(sev) << "(" << DescribeFile(file)
+ << ":" << line << "): ";
+ }
+
+ if (err_ctx != ERRCTX_NONE) {
+ std::ostringstream tmp;
+ tmp << "[0x" << std::setfill('0') << std::hex << std::setw(8) << err << "]";
+ switch (err_ctx) {
+ case ERRCTX_ERRNO:
+ tmp << " " << strerror(err);
+ break;
+#if WEBRTC_WIN
+ case ERRCTX_HRESULT: {
+ char msgbuf[256];
+ DWORD flags = FORMAT_MESSAGE_FROM_SYSTEM;
+ HMODULE hmod = GetModuleHandleA(module);
+ if (hmod)
+ flags |= FORMAT_MESSAGE_FROM_HMODULE;
+ if (DWORD len = FormatMessageA(
+ flags, hmod, err,
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ msgbuf, sizeof(msgbuf) / sizeof(msgbuf[0]), NULL)) {
+ while ((len > 0) &&
+ isspace(static_cast<unsigned char>(msgbuf[len-1]))) {
+ msgbuf[--len] = 0;
+ }
+ tmp << " " << msgbuf;
+ }
+ break;
+ }
+#endif // WEBRTC_WIN
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ case ERRCTX_OSSTATUS: {
+ tmp << " " << nonnull(GetMacOSStatusErrorString(err), "Unknown error");
+ if (const char* desc = GetMacOSStatusCommentString(err)) {
+ tmp << ": " << desc;
+ }
+ break;
+ }
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+ default:
+ break;
+ }
+ extra_ = tmp.str();
+ }
+}
+
+LogMessage::~LogMessage() {
+ if (!extra_.empty())
+ print_stream_ << " : " << extra_;
+ print_stream_ << std::endl;
+
+ const std::string& str = print_stream_.str();
+ if (severity_ >= dbg_sev_) {
+ OutputToDebug(str, severity_);
+ }
+
+ uint32 before = Time();
+ // Must lock streams_ before accessing
+ CritScope cs(&crit_);
+ for (StreamList::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ if (severity_ >= it->second) {
+ OutputToStream(it->first, str);
+ }
+ }
+ uint32 delay = TimeSince(before);
+ if (delay >= warn_slow_logs_delay_) {
+ LogMessage slow_log_warning =
+ rtc::LogMessage(__FILE__, __LINE__, LS_WARNING);
+ // If our warning is slow, we don't want to warn about it, because
+ // that would lead to inifinite recursion. So, give a really big
+ // number for the delay threshold.
+ slow_log_warning.warn_slow_logs_delay_ = UINT_MAX;
+ slow_log_warning.stream() << "Slow log: took " << delay << "ms to write "
+ << str.size() << " bytes.";
+ }
+}
+
+uint32 LogMessage::LogStartTime() {
+ static const uint32 g_start = Time();
+ return g_start;
+}
+
+uint32 LogMessage::WallClockStartTime() {
+ static const uint32 g_start_wallclock = time(NULL);
+ return g_start_wallclock;
+}
+
+void LogMessage::LogContext(int min_sev) {
+ ctx_sev_ = min_sev;
+}
+
+void LogMessage::LogThreads(bool on) {
+ thread_ = on;
+}
+
+void LogMessage::LogTimestamps(bool on) {
+ timestamp_ = on;
+}
+
+void LogMessage::LogToDebug(int min_sev) {
+ dbg_sev_ = min_sev;
+ UpdateMinLogSeverity();
+}
+
+void LogMessage::LogToStream(StreamInterface* stream, int min_sev) {
+ CritScope cs(&crit_);
+ // Discard and delete all previously installed streams
+ for (StreamList::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ delete it->first;
+ }
+ streams_.clear();
+ // Install the new stream, if specified
+ if (stream) {
+ AddLogToStream(stream, min_sev);
+ }
+}
+
+int LogMessage::GetLogToStream(StreamInterface* stream) {
+ CritScope cs(&crit_);
+ int sev = NO_LOGGING;
+ for (StreamList::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ if (!stream || stream == it->first) {
+ sev = _min(sev, it->second);
+ }
+ }
+ return sev;
+}
+
+void LogMessage::AddLogToStream(StreamInterface* stream, int min_sev) {
+ CritScope cs(&crit_);
+ streams_.push_back(std::make_pair(stream, min_sev));
+ UpdateMinLogSeverity();
+}
+
+void LogMessage::RemoveLogToStream(StreamInterface* stream) {
+ CritScope cs(&crit_);
+ for (StreamList::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ if (stream == it->first) {
+ streams_.erase(it);
+ break;
+ }
+ }
+ UpdateMinLogSeverity();
+}
+
+void LogMessage::ConfigureLogging(const char* params, const char* filename) {
+ int current_level = LS_VERBOSE;
+ int debug_level = GetLogToDebug();
+ int file_level = GetLogToStream();
+
+ std::vector<std::string> tokens;
+ tokenize(params, ' ', &tokens);
+
+ for (size_t i = 0; i < tokens.size(); ++i) {
+ if (tokens[i].empty())
+ continue;
+
+ // Logging features
+ if (tokens[i] == "tstamp") {
+ LogTimestamps();
+ } else if (tokens[i] == "thread") {
+ LogThreads();
+
+ // Logging levels
+ } else if (tokens[i] == "sensitive") {
+ current_level = LS_SENSITIVE;
+ } else if (tokens[i] == "verbose") {
+ current_level = LS_VERBOSE;
+ } else if (tokens[i] == "info") {
+ current_level = LS_INFO;
+ } else if (tokens[i] == "warning") {
+ current_level = LS_WARNING;
+ } else if (tokens[i] == "error") {
+ current_level = LS_ERROR;
+ } else if (tokens[i] == "none") {
+ current_level = NO_LOGGING;
+
+ // Logging targets
+ } else if (tokens[i] == "file") {
+ file_level = current_level;
+ } else if (tokens[i] == "debug") {
+ debug_level = current_level;
+ }
+ }
+
+#if defined(WEBRTC_WIN)
+ if ((NO_LOGGING != debug_level) && !::IsDebuggerPresent()) {
+ // First, attempt to attach to our parent's console... so if you invoke
+ // from the command line, we'll see the output there. Otherwise, create
+ // our own console window.
+ // Note: These methods fail if a console already exists, which is fine.
+ bool success = false;
+ typedef BOOL (WINAPI* PFN_AttachConsole)(DWORD);
+ if (HINSTANCE kernel32 = ::LoadLibrary(L"kernel32.dll")) {
+ // AttachConsole is defined on WinXP+.
+ if (PFN_AttachConsole attach_console = reinterpret_cast<PFN_AttachConsole>
+ (::GetProcAddress(kernel32, "AttachConsole"))) {
+ success = (FALSE != attach_console(ATTACH_PARENT_PROCESS));
+ }
+ ::FreeLibrary(kernel32);
+ }
+ if (!success) {
+ ::AllocConsole();
+ }
+ }
+#endif // WEBRTC_WIN
+
+ LogToDebug(debug_level);
+
+#if !defined(__native_client__) // No logging to file in NaCl.
+ scoped_ptr<FileStream> stream;
+ if (NO_LOGGING != file_level) {
+ stream.reset(new FileStream);
+ if (!stream->Open(filename, "wb", NULL) || !stream->DisableBuffering()) {
+ stream.reset();
+ }
+ }
+
+ LogToStream(stream.release(), file_level);
+#endif
+}
+
+int LogMessage::ParseLogSeverity(const std::string& value) {
+ int level = NO_LOGGING;
+ if (value == "LS_SENSITIVE") {
+ level = LS_SENSITIVE;
+ } else if (value == "LS_VERBOSE") {
+ level = LS_VERBOSE;
+ } else if (value == "LS_INFO") {
+ level = LS_INFO;
+ } else if (value == "LS_WARNING") {
+ level = LS_WARNING;
+ } else if (value == "LS_ERROR") {
+ level = LS_ERROR;
+ } else if (isdigit(value[0])) {
+ level = atoi(value.c_str()); // NOLINT
+ }
+ return level;
+}
+
+void LogMessage::UpdateMinLogSeverity() {
+ int min_sev = dbg_sev_;
+ for (StreamList::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ min_sev = _min(dbg_sev_, it->second);
+ }
+ min_sev_ = min_sev;
+}
+
+const char* LogMessage::Describe(LoggingSeverity sev) {
+ switch (sev) {
+ case LS_SENSITIVE: return "Sensitive";
+ case LS_VERBOSE: return "Verbose";
+ case LS_INFO: return "Info";
+ case LS_WARNING: return "Warning";
+ case LS_ERROR: return "Error";
+ default: return "<unknown>";
+ }
+}
+
+const char* LogMessage::DescribeFile(const char* file) {
+ const char* end1 = ::strrchr(file, '/');
+ const char* end2 = ::strrchr(file, '\\');
+ if (!end1 && !end2)
+ return file;
+ else
+ return (end1 > end2) ? end1 + 1 : end2 + 1;
+}
+
+void LogMessage::OutputToDebug(const std::string& str,
+ LoggingSeverity severity) {
+ bool log_to_stderr = true;
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && (!defined(DEBUG) || defined(NDEBUG))
+ // On the Mac, all stderr output goes to the Console log and causes clutter.
+ // So in opt builds, don't log to stderr unless the user specifically sets
+ // a preference to do so.
+ CFStringRef key = CFStringCreateWithCString(kCFAllocatorDefault,
+ "logToStdErr",
+ kCFStringEncodingUTF8);
+ CFStringRef domain = CFBundleGetIdentifier(CFBundleGetMainBundle());
+ if (key != NULL && domain != NULL) {
+ Boolean exists_and_is_valid;
+ Boolean should_log =
+ CFPreferencesGetAppBooleanValue(key, domain, &exists_and_is_valid);
+ // If the key doesn't exist or is invalid or is false, we will not log to
+ // stderr.
+ log_to_stderr = exists_and_is_valid && should_log;
+ }
+ if (key != NULL) {
+ CFRelease(key);
+ }
+#endif
+#if defined(WEBRTC_WIN)
+ // Always log to the debugger.
+ // Perhaps stderr should be controlled by a preference, as on Mac?
+ OutputDebugStringA(str.c_str());
+ if (log_to_stderr) {
+ // This handles dynamically allocated consoles, too.
+ if (HANDLE error_handle = ::GetStdHandle(STD_ERROR_HANDLE)) {
+ log_to_stderr = false;
+ DWORD written = 0;
+ ::WriteFile(error_handle, str.data(), static_cast<DWORD>(str.size()),
+ &written, 0);
+ }
+ }
+#endif // WEBRTC_WIN
+#if defined(WEBRTC_ANDROID)
+ // Android's logging facility uses severity to log messages but we
+ // need to map libjingle's severity levels to Android ones first.
+ // Also write to stderr which maybe available to executable started
+ // from the shell.
+ int prio;
+ switch (severity) {
+ case LS_SENSITIVE:
+ __android_log_write(ANDROID_LOG_INFO, kLibjingle, "SENSITIVE");
+ if (log_to_stderr) {
+ fprintf(stderr, "SENSITIVE");
+ fflush(stderr);
+ }
+ return;
+ case LS_VERBOSE:
+ prio = ANDROID_LOG_VERBOSE;
+ break;
+ case LS_INFO:
+ prio = ANDROID_LOG_INFO;
+ break;
+ case LS_WARNING:
+ prio = ANDROID_LOG_WARN;
+ break;
+ case LS_ERROR:
+ prio = ANDROID_LOG_ERROR;
+ break;
+ default:
+ prio = ANDROID_LOG_UNKNOWN;
+ }
+
+ int size = str.size();
+ int line = 0;
+ int idx = 0;
+ const int max_lines = size / kMaxLogLineSize + 1;
+ if (max_lines == 1) {
+ __android_log_print(prio, kLibjingle, "%.*s", size, str.c_str());
+ } else {
+ while (size > 0) {
+ const int len = std::min(size, kMaxLogLineSize);
+ // Use the size of the string in the format (str may have \0 in the
+ // middle).
+ __android_log_print(prio, kLibjingle, "[%d/%d] %.*s",
+ line + 1, max_lines,
+ len, str.c_str() + idx);
+ idx += len;
+ size -= len;
+ ++line;
+ }
+ }
+#endif // WEBRTC_ANDROID
+ if (log_to_stderr) {
+ fprintf(stderr, "%s", str.c_str());
+ fflush(stderr);
+ }
+}
+
+void LogMessage::OutputToStream(StreamInterface* stream,
+ const std::string& str) {
+ // If write isn't fully successful, what are we going to do, log it? :)
+ stream->WriteAll(str.data(), str.size(), NULL, NULL);
+}
+
+//////////////////////////////////////////////////////////////////////
+// Logging Helpers
+//////////////////////////////////////////////////////////////////////
+
+void LogMultiline(LoggingSeverity level, const char* label, bool input,
+ const void* data, size_t len, bool hex_mode,
+ LogMultilineState* state) {
+ if (!LOG_CHECK_LEVEL_V(level))
+ return;
+
+ const char * direction = (input ? " << " : " >> ");
+
+ // NULL data means to flush our count of unprintable characters.
+ if (!data) {
+ if (state && state->unprintable_count_[input]) {
+ LOG_V(level) << label << direction << "## "
+ << state->unprintable_count_[input]
+ << " consecutive unprintable ##";
+ state->unprintable_count_[input] = 0;
+ }
+ return;
+ }
+
+ // The ctype classification functions want unsigned chars.
+ const unsigned char* udata = static_cast<const unsigned char*>(data);
+
+ if (hex_mode) {
+ const size_t LINE_SIZE = 24;
+ char hex_line[LINE_SIZE * 9 / 4 + 2], asc_line[LINE_SIZE + 1];
+ while (len > 0) {
+ memset(asc_line, ' ', sizeof(asc_line));
+ memset(hex_line, ' ', sizeof(hex_line));
+ size_t line_len = _min(len, LINE_SIZE);
+ for (size_t i = 0; i < line_len; ++i) {
+ unsigned char ch = udata[i];
+ asc_line[i] = isprint(ch) ? ch : '.';
+ hex_line[i*2 + i/4] = hex_encode(ch >> 4);
+ hex_line[i*2 + i/4 + 1] = hex_encode(ch & 0xf);
+ }
+ asc_line[sizeof(asc_line)-1] = 0;
+ hex_line[sizeof(hex_line)-1] = 0;
+ LOG_V(level) << label << direction
+ << asc_line << " " << hex_line << " ";
+ udata += line_len;
+ len -= line_len;
+ }
+ return;
+ }
+
+ size_t consecutive_unprintable = state ? state->unprintable_count_[input] : 0;
+
+ const unsigned char* end = udata + len;
+ while (udata < end) {
+ const unsigned char* line = udata;
+ const unsigned char* end_of_line = strchrn<unsigned char>(udata,
+ end - udata,
+ '\n');
+ if (!end_of_line) {
+ udata = end_of_line = end;
+ } else {
+ udata = end_of_line + 1;
+ }
+
+ bool is_printable = true;
+
+ // If we are in unprintable mode, we need to see a line of at least
+ // kMinPrintableLine characters before we'll switch back.
+ const ptrdiff_t kMinPrintableLine = 4;
+ if (consecutive_unprintable && ((end_of_line - line) < kMinPrintableLine)) {
+ is_printable = false;
+ } else {
+ // Determine if the line contains only whitespace and printable
+ // characters.
+ bool is_entirely_whitespace = true;
+ for (const unsigned char* pos = line; pos < end_of_line; ++pos) {
+ if (isspace(*pos))
+ continue;
+ is_entirely_whitespace = false;
+ if (!isprint(*pos)) {
+ is_printable = false;
+ break;
+ }
+ }
+ // Treat an empty line following unprintable data as unprintable.
+ if (consecutive_unprintable && is_entirely_whitespace) {
+ is_printable = false;
+ }
+ }
+ if (!is_printable) {
+ consecutive_unprintable += (udata - line);
+ continue;
+ }
+ // Print out the current line, but prefix with a count of prior unprintable
+ // characters.
+ if (consecutive_unprintable) {
+ LOG_V(level) << label << direction << "## " << consecutive_unprintable
+ << " consecutive unprintable ##";
+ consecutive_unprintable = 0;
+ }
+ // Strip off trailing whitespace.
+ while ((end_of_line > line) && isspace(*(end_of_line-1))) {
+ --end_of_line;
+ }
+ // Filter out any private data
+ std::string substr(reinterpret_cast<const char*>(line), end_of_line - line);
+ std::string::size_type pos_private = substr.find("Email");
+ if (pos_private == std::string::npos) {
+ pos_private = substr.find("Passwd");
+ }
+ if (pos_private == std::string::npos) {
+ LOG_V(level) << label << direction << substr;
+ } else {
+ LOG_V(level) << label << direction << "## omitted for privacy ##";
+ }
+ }
+
+ if (state) {
+ state->unprintable_count_[input] = consecutive_unprintable;
+ }
+}
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/logging.h b/chromium/third_party/webrtc/base/logging.h
new file mode 100644
index 00000000000..91d61b3e9a9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/logging.h
@@ -0,0 +1,387 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// LOG(...) an ostream target that can be used to send formatted
+// output to a variety of logging targets, such as debugger console, stderr,
+// file, or any StreamInterface.
+// The severity level passed as the first argument to the LOGging
+// functions is used as a filter, to limit the verbosity of the logging.
+// Static members of LogMessage documented below are used to control the
+// verbosity and target of the output.
+// There are several variations on the LOG macro which facilitate logging
+// of common error conditions, detailed below.
+
+// LOG(sev) logs the given stream at severity "sev", which must be a
+// compile-time constant of the LoggingSeverity type, without the namespace
+// prefix.
+// LOG_V(sev) Like LOG(), but sev is a run-time variable of the LoggingSeverity
+// type (basically, it just doesn't prepend the namespace).
+// LOG_F(sev) Like LOG(), but includes the name of the current function.
+// LOG_T(sev) Like LOG(), but includes the this pointer.
+// LOG_T_F(sev) Like LOG_F(), but includes the this pointer.
+// LOG_GLE(M)(sev [, mod]) attempt to add a string description of the
+// HRESULT returned by GetLastError. The "M" variant allows searching of a
+// DLL's string table for the error description.
+// LOG_ERRNO(sev) attempts to add a string description of an errno-derived
+// error. errno and associated facilities exist on both Windows and POSIX,
+// but on Windows they only apply to the C/C++ runtime.
+// LOG_ERR(sev) is an alias for the platform's normal error system, i.e. _GLE on
+// Windows and _ERRNO on POSIX.
+// (The above three also all have _EX versions that let you specify the error
+// code, rather than using the last one.)
+// LOG_E(sev, ctx, err, ...) logs a detailed error interpreted using the
+// specified context.
+// LOG_CHECK_LEVEL(sev) (and LOG_CHECK_LEVEL_V(sev)) can be used as a test
+// before performing expensive or sensitive operations whose sole purpose is
+// to output logging data at the desired level.
+// Lastly, PLOG(sev, err) is an alias for LOG_ERR_EX.
+
+#ifndef WEBRTC_BASE_LOGGING_H_
+#define WEBRTC_BASE_LOGGING_H_
+
+#ifdef HAVE_CONFIG_H
+#include "config.h" // NOLINT
+#endif
+
+#include <list>
+#include <sstream>
+#include <string>
+#include <utility>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+
+class StreamInterface;
+
+///////////////////////////////////////////////////////////////////////////////
+// ConstantLabel can be used to easily generate string names from constant
+// values. This can be useful for logging descriptive names of error messages.
+// Usage:
+// const ConstantLabel LIBRARY_ERRORS[] = {
+// KLABEL(SOME_ERROR),
+// KLABEL(SOME_OTHER_ERROR),
+// ...
+// LASTLABEL
+// }
+//
+// int err = LibraryFunc();
+// LOG(LS_ERROR) << "LibraryFunc returned: "
+// << ErrorName(err, LIBRARY_ERRORS);
+
+struct ConstantLabel { int value; const char * label; };
+#define KLABEL(x) { x, #x }
+#define TLABEL(x, y) { x, y }
+#define LASTLABEL { 0, 0 }
+
+const char * FindLabel(int value, const ConstantLabel entries[]);
+std::string ErrorName(int err, const ConstantLabel* err_table);
+
+//////////////////////////////////////////////////////////////////////
+
+// Note that the non-standard LoggingSeverity aliases exist because they are
+// still in broad use. The meanings of the levels are:
+// LS_SENSITIVE: Information which should only be logged with the consent
+// of the user, due to privacy concerns.
+// LS_VERBOSE: This level is for data which we do not want to appear in the
+// normal debug log, but should appear in diagnostic logs.
+// LS_INFO: Chatty level used in debugging for all sorts of things, the default
+// in debug builds.
+// LS_WARNING: Something that may warrant investigation.
+// LS_ERROR: Something that should not have occurred.
+enum LoggingSeverity { LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR,
+ INFO = LS_INFO,
+ WARNING = LS_WARNING,
+ LERROR = LS_ERROR };
+
+// LogErrorContext assists in interpreting the meaning of an error value.
+enum LogErrorContext {
+ ERRCTX_NONE,
+ ERRCTX_ERRNO, // System-local errno
+ ERRCTX_HRESULT, // Windows HRESULT
+ ERRCTX_OSSTATUS, // MacOS OSStatus
+
+ // Abbreviations for LOG_E macro
+ ERRCTX_EN = ERRCTX_ERRNO, // LOG_E(sev, EN, x)
+ ERRCTX_HR = ERRCTX_HRESULT, // LOG_E(sev, HR, x)
+ ERRCTX_OS = ERRCTX_OSSTATUS, // LOG_E(sev, OS, x)
+};
+
+class LogMessage {
+ public:
+ static const int NO_LOGGING;
+ static const uint32 WARN_SLOW_LOGS_DELAY = 50; // ms
+
+ LogMessage(const char* file, int line, LoggingSeverity sev,
+ LogErrorContext err_ctx = ERRCTX_NONE, int err = 0,
+ const char* module = NULL);
+ ~LogMessage();
+
+ static inline bool Loggable(LoggingSeverity sev) { return (sev >= min_sev_); }
+ std::ostream& stream() { return print_stream_; }
+
+ // Returns the time at which this function was called for the first time.
+ // The time will be used as the logging start time.
+ // If this is not called externally, the LogMessage ctor also calls it, in
+ // which case the logging start time will be the time of the first LogMessage
+ // instance is created.
+ static uint32 LogStartTime();
+
+ // Returns the wall clock equivalent of |LogStartTime|, in seconds from the
+ // epoch.
+ static uint32 WallClockStartTime();
+
+ // These are attributes which apply to all logging channels
+ // LogContext: Display the file and line number of the message
+ static void LogContext(int min_sev);
+ // LogThreads: Display the thread identifier of the current thread
+ static void LogThreads(bool on = true);
+ // LogTimestamps: Display the elapsed time of the program
+ static void LogTimestamps(bool on = true);
+
+ // These are the available logging channels
+ // Debug: Debug console on Windows, otherwise stderr
+ static void LogToDebug(int min_sev);
+ static int GetLogToDebug() { return dbg_sev_; }
+
+ // Stream: Any non-blocking stream interface. LogMessage takes ownership of
+ // the stream. Multiple streams may be specified by using AddLogToStream.
+ // LogToStream is retained for backwards compatibility; when invoked, it
+ // will discard any previously set streams and install the specified stream.
+ // GetLogToStream gets the severity for the specified stream, of if none
+ // is specified, the minimum stream severity.
+ // RemoveLogToStream removes the specified stream, without destroying it.
+ static void LogToStream(StreamInterface* stream, int min_sev);
+ static int GetLogToStream(StreamInterface* stream = NULL);
+ static void AddLogToStream(StreamInterface* stream, int min_sev);
+ static void RemoveLogToStream(StreamInterface* stream);
+
+ // Testing against MinLogSeverity allows code to avoid potentially expensive
+ // logging operations by pre-checking the logging level.
+ static int GetMinLogSeverity() { return min_sev_; }
+
+ static void SetDiagnosticMode(bool f) { is_diagnostic_mode_ = f; }
+ static bool IsDiagnosticMode() { return is_diagnostic_mode_; }
+
+ // Parses the provided parameter stream to configure the options above.
+ // Useful for configuring logging from the command line. If file logging
+ // is enabled, it is output to the specified filename.
+ static void ConfigureLogging(const char* params, const char* filename);
+
+ // Convert the string to a LS_ value; also accept numeric values.
+ static int ParseLogSeverity(const std::string& value);
+
+ private:
+ typedef std::list<std::pair<StreamInterface*, int> > StreamList;
+
+ // Updates min_sev_ appropriately when debug sinks change.
+ static void UpdateMinLogSeverity();
+
+ // These assist in formatting some parts of the debug output.
+ static const char* Describe(LoggingSeverity sev);
+ static const char* DescribeFile(const char* file);
+
+ // These write out the actual log messages.
+ static void OutputToDebug(const std::string& msg, LoggingSeverity severity_);
+ static void OutputToStream(StreamInterface* stream, const std::string& msg);
+
+ // The ostream that buffers the formatted message before output
+ std::ostringstream print_stream_;
+
+ // The severity level of this message
+ LoggingSeverity severity_;
+
+ // String data generated in the constructor, that should be appended to
+ // the message before output.
+ std::string extra_;
+
+ // If time it takes to write to stream is more than this, log one
+ // additional warning about it.
+ uint32 warn_slow_logs_delay_;
+
+ // Global lock for the logging subsystem
+ static CriticalSection crit_;
+
+ // dbg_sev_ is the thresholds for those output targets
+ // min_sev_ is the minimum (most verbose) of those levels, and is used
+ // as a short-circuit in the logging macros to identify messages that won't
+ // be logged.
+ // ctx_sev_ is the minimum level at which file context is displayed
+ static int min_sev_, dbg_sev_, ctx_sev_;
+
+ // The output streams and their associated severities
+ static StreamList streams_;
+
+ // Flags for formatting options
+ static bool thread_, timestamp_;
+
+ // are we in diagnostic mode (as defined by the app)?
+ static bool is_diagnostic_mode_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(LogMessage);
+};
+
+//////////////////////////////////////////////////////////////////////
+// Logging Helpers
+//////////////////////////////////////////////////////////////////////
+
+class LogMultilineState {
+ public:
+ size_t unprintable_count_[2];
+ LogMultilineState() {
+ unprintable_count_[0] = unprintable_count_[1] = 0;
+ }
+};
+
+// When possible, pass optional state variable to track various data across
+// multiple calls to LogMultiline. Otherwise, pass NULL.
+void LogMultiline(LoggingSeverity level, const char* label, bool input,
+ const void* data, size_t len, bool hex_mode,
+ LogMultilineState* state);
+
+//////////////////////////////////////////////////////////////////////
+// Macros which automatically disable logging when LOGGING == 0
+//////////////////////////////////////////////////////////////////////
+
+// If LOGGING is not explicitly defined, default to enabled in debug mode
+#if !defined(LOGGING)
+#if defined(_DEBUG) && !defined(NDEBUG)
+#define LOGGING 1
+#else
+#define LOGGING 0
+#endif
+#endif // !defined(LOGGING)
+
+#ifndef LOG
+#if LOGGING
+
+// The following non-obvious technique for implementation of a
+// conditional log stream was stolen from google3/base/logging.h.
+
+// This class is used to explicitly ignore values in the conditional
+// logging macros. This avoids compiler warnings like "value computed
+// is not used" and "statement has no effect".
+
+class LogMessageVoidify {
+ public:
+ LogMessageVoidify() { }
+ // This has to be an operator with a precedence lower than << but
+ // higher than ?:
+ void operator&(std::ostream&) { }
+};
+
+#define LOG_SEVERITY_PRECONDITION(sev) \
+ !(rtc::LogMessage::Loggable(sev)) \
+ ? (void) 0 \
+ : rtc::LogMessageVoidify() &
+
+#define LOG(sev) \
+ LOG_SEVERITY_PRECONDITION(rtc::sev) \
+ rtc::LogMessage(__FILE__, __LINE__, rtc::sev).stream()
+
+// The _V version is for when a variable is passed in. It doesn't do the
+// namespace concatination.
+#define LOG_V(sev) \
+ LOG_SEVERITY_PRECONDITION(sev) \
+ rtc::LogMessage(__FILE__, __LINE__, sev).stream()
+
+// The _F version prefixes the message with the current function name.
+#if (defined(__GNUC__) && defined(_DEBUG)) || defined(WANT_PRETTY_LOG_F)
+#define LOG_F(sev) LOG(sev) << __PRETTY_FUNCTION__ << ": "
+#define LOG_T_F(sev) LOG(sev) << this << ": " << __PRETTY_FUNCTION__ << ": "
+#else
+#define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": "
+#define LOG_T_F(sev) LOG(sev) << this << ": " << __FUNCTION__ << ": "
+#endif
+
+#define LOG_CHECK_LEVEL(sev) \
+ rtc::LogCheckLevel(rtc::sev)
+#define LOG_CHECK_LEVEL_V(sev) \
+ rtc::LogCheckLevel(sev)
+inline bool LogCheckLevel(LoggingSeverity sev) {
+ return (LogMessage::GetMinLogSeverity() <= sev);
+}
+
+#define LOG_E(sev, ctx, err, ...) \
+ LOG_SEVERITY_PRECONDITION(rtc::sev) \
+ rtc::LogMessage(__FILE__, __LINE__, rtc::sev, \
+ rtc::ERRCTX_ ## ctx, err , ##__VA_ARGS__) \
+ .stream()
+
+#define LOG_T(sev) LOG(sev) << this << ": "
+
+#else // !LOGGING
+
+// Hopefully, the compiler will optimize away some of this code.
+// Note: syntax of "1 ? (void)0 : LogMessage" was causing errors in g++,
+// converted to "while (false)"
+#define LOG(sev) \
+ while (false)rtc:: LogMessage(NULL, 0, rtc::sev).stream()
+#define LOG_V(sev) \
+ while (false) rtc::LogMessage(NULL, 0, sev).stream()
+#define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": "
+#define LOG_CHECK_LEVEL(sev) \
+ false
+#define LOG_CHECK_LEVEL_V(sev) \
+ false
+
+#define LOG_E(sev, ctx, err, ...) \
+ while (false) rtc::LogMessage(__FILE__, __LINE__, rtc::sev, \
+ rtc::ERRCTX_ ## ctx, err , ##__VA_ARGS__) \
+ .stream()
+
+#define LOG_T(sev) LOG(sev) << this << ": "
+#define LOG_T_F(sev) LOG(sev) << this << ": " << __FUNCTION__ <<
+#endif // !LOGGING
+
+#define LOG_ERRNO_EX(sev, err) \
+ LOG_E(sev, ERRNO, err)
+#define LOG_ERRNO(sev) \
+ LOG_ERRNO_EX(sev, errno)
+
+#if defined(WEBRTC_WIN)
+#define LOG_GLE_EX(sev, err) \
+ LOG_E(sev, HRESULT, err)
+#define LOG_GLE(sev) \
+ LOG_GLE_EX(sev, GetLastError())
+#define LOG_GLEM(sev, mod) \
+ LOG_E(sev, HRESULT, GetLastError(), mod)
+#define LOG_ERR_EX(sev, err) \
+ LOG_GLE_EX(sev, err)
+#define LOG_ERR(sev) \
+ LOG_GLE(sev)
+#define LAST_SYSTEM_ERROR \
+ (::GetLastError())
+#elif __native_client__
+#define LOG_ERR_EX(sev, err) \
+ LOG(sev)
+#define LOG_ERR(sev) \
+ LOG(sev)
+#define LAST_SYSTEM_ERROR \
+ (0)
+#elif defined(WEBRTC_POSIX)
+#define LOG_ERR_EX(sev, err) \
+ LOG_ERRNO_EX(sev, err)
+#define LOG_ERR(sev) \
+ LOG_ERRNO(sev)
+#define LAST_SYSTEM_ERROR \
+ (errno)
+#endif // WEBRTC_WIN
+
+#define PLOG(sev, err) \
+ LOG_ERR_EX(sev, err)
+
+// TODO(?): Add an "assert" wrapper that logs in the same manner.
+
+#endif // LOG
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_LOGGING_H_
diff --git a/chromium/third_party/webrtc/base/logging_unittest.cc b/chromium/third_party/webrtc/base/logging_unittest.cc
new file mode 100644
index 00000000000..59630d74637
--- /dev/null
+++ b/chromium/third_party/webrtc/base/logging_unittest.cc
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+// Test basic logging operation. We should get the INFO log but not the VERBOSE.
+// We should restore the correct global state at the end.
+TEST(LogTest, SingleStream) {
+ int sev = LogMessage::GetLogToStream(NULL);
+
+ std::string str;
+ StringStream stream(str);
+ LogMessage::AddLogToStream(&stream, LS_INFO);
+ EXPECT_EQ(LS_INFO, LogMessage::GetLogToStream(&stream));
+
+ LOG(LS_INFO) << "INFO";
+ LOG(LS_VERBOSE) << "VERBOSE";
+ EXPECT_NE(std::string::npos, str.find("INFO"));
+ EXPECT_EQ(std::string::npos, str.find("VERBOSE"));
+
+ LogMessage::RemoveLogToStream(&stream);
+ EXPECT_EQ(LogMessage::NO_LOGGING, LogMessage::GetLogToStream(&stream));
+
+ EXPECT_EQ(sev, LogMessage::GetLogToStream(NULL));
+}
+
+// Test using multiple log streams. The INFO stream should get the INFO message,
+// the VERBOSE stream should get the INFO and the VERBOSE.
+// We should restore the correct global state at the end.
+TEST(LogTest, MultipleStreams) {
+ int sev = LogMessage::GetLogToStream(NULL);
+
+ std::string str1, str2;
+ StringStream stream1(str1), stream2(str2);
+ LogMessage::AddLogToStream(&stream1, LS_INFO);
+ LogMessage::AddLogToStream(&stream2, LS_VERBOSE);
+ EXPECT_EQ(LS_INFO, LogMessage::GetLogToStream(&stream1));
+ EXPECT_EQ(LS_VERBOSE, LogMessage::GetLogToStream(&stream2));
+
+ LOG(LS_INFO) << "INFO";
+ LOG(LS_VERBOSE) << "VERBOSE";
+
+ EXPECT_NE(std::string::npos, str1.find("INFO"));
+ EXPECT_EQ(std::string::npos, str1.find("VERBOSE"));
+ EXPECT_NE(std::string::npos, str2.find("INFO"));
+ EXPECT_NE(std::string::npos, str2.find("VERBOSE"));
+
+ LogMessage::RemoveLogToStream(&stream2);
+ LogMessage::RemoveLogToStream(&stream1);
+ EXPECT_EQ(LogMessage::NO_LOGGING, LogMessage::GetLogToStream(&stream2));
+ EXPECT_EQ(LogMessage::NO_LOGGING, LogMessage::GetLogToStream(&stream1));
+
+ EXPECT_EQ(sev, LogMessage::GetLogToStream(NULL));
+}
+
+// Ensure we don't crash when adding/removing streams while threads are going.
+// We should restore the correct global state at the end.
+class LogThread : public Thread {
+ public:
+ virtual ~LogThread() {
+ Stop();
+ }
+
+ private:
+ void Run() {
+ // LS_SENSITIVE to avoid cluttering up any real logging going on
+ LOG(LS_SENSITIVE) << "LOG";
+ }
+};
+
+TEST(LogTest, MultipleThreads) {
+ int sev = LogMessage::GetLogToStream(NULL);
+
+ LogThread thread1, thread2, thread3;
+ thread1.Start();
+ thread2.Start();
+ thread3.Start();
+
+ NullStream stream1, stream2, stream3;
+ for (int i = 0; i < 1000; ++i) {
+ LogMessage::AddLogToStream(&stream1, LS_INFO);
+ LogMessage::AddLogToStream(&stream2, LS_VERBOSE);
+ LogMessage::AddLogToStream(&stream3, LS_SENSITIVE);
+ LogMessage::RemoveLogToStream(&stream1);
+ LogMessage::RemoveLogToStream(&stream2);
+ LogMessage::RemoveLogToStream(&stream3);
+ }
+
+ EXPECT_EQ(sev, LogMessage::GetLogToStream(NULL));
+}
+
+
+TEST(LogTest, WallClockStartTime) {
+ uint32 time = LogMessage::WallClockStartTime();
+ // Expect the time to be in a sensible range, e.g. > 2012-01-01.
+ EXPECT_GT(time, 1325376000u);
+}
+
+// Test the time required to write 1000 80-character logs to an unbuffered file.
+TEST(LogTest, Perf) {
+ Pathname path;
+ EXPECT_TRUE(Filesystem::GetTemporaryFolder(path, true, NULL));
+ path.SetPathname(Filesystem::TempFilename(path, "ut"));
+
+ FileStream stream;
+ EXPECT_TRUE(stream.Open(path.pathname(), "wb", NULL));
+ stream.DisableBuffering();
+ LogMessage::AddLogToStream(&stream, LS_SENSITIVE);
+
+ uint32 start = Time(), finish;
+ std::string message('X', 80);
+ for (int i = 0; i < 1000; ++i) {
+ LOG(LS_SENSITIVE) << message;
+ }
+ finish = Time();
+
+ LogMessage::RemoveLogToStream(&stream);
+ stream.Close();
+ Filesystem::DeleteFile(path);
+
+ LOG(LS_INFO) << "Average log time: " << TimeDiff(finish, start) << " us";
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macasyncsocket.cc b/chromium/third_party/webrtc/base/macasyncsocket.cc
new file mode 100644
index 00000000000..ee982ffff1a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macasyncsocket.cc
@@ -0,0 +1,477 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+//
+// MacAsyncSocket is a kind of AsyncSocket. It does not support the SOCK_DGRAM
+// type (yet). It works asynchronously, which means that users of this socket
+// should connect to the various events declared in asyncsocket.h to receive
+// notifications about this socket. It uses CFSockets for signals, but prefers
+// the basic bsd socket operations rather than their CFSocket wrappers when
+// possible.
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <fcntl.h>
+
+#include "webrtc/base/macasyncsocket.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macsocketserver.h"
+
+namespace rtc {
+
+static const int kCallbackFlags = kCFSocketReadCallBack |
+ kCFSocketConnectCallBack |
+ kCFSocketWriteCallBack;
+
+MacAsyncSocket::MacAsyncSocket(MacBaseSocketServer* ss, int family)
+ : ss_(ss),
+ socket_(NULL),
+ native_socket_(INVALID_SOCKET),
+ source_(NULL),
+ current_callbacks_(0),
+ disabled_(false),
+ error_(0),
+ state_(CS_CLOSED),
+ resolver_(NULL) {
+ Initialize(family);
+}
+
+MacAsyncSocket::~MacAsyncSocket() {
+ Close();
+}
+
+// Returns the address to which the socket is bound. If the socket is not
+// bound, then the any-address is returned.
+SocketAddress MacAsyncSocket::GetLocalAddress() const {
+ SocketAddress address;
+
+ // The CFSocket doesn't pick up on implicit binds from the connect call.
+ // Calling bind in before connect explicitly causes errors, so just query
+ // the underlying bsd socket.
+ sockaddr_storage addr;
+ socklen_t addrlen = sizeof(addr);
+ int result = ::getsockname(native_socket_,
+ reinterpret_cast<sockaddr*>(&addr), &addrlen);
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr, &address);
+ }
+ return address;
+}
+
+// Returns the address to which the socket is connected. If the socket is not
+// connected, then the any-address is returned.
+SocketAddress MacAsyncSocket::GetRemoteAddress() const {
+ SocketAddress address;
+
+ // Use native_socket for consistency with GetLocalAddress.
+ sockaddr_storage addr;
+ socklen_t addrlen = sizeof(addr);
+ int result = ::getpeername(native_socket_,
+ reinterpret_cast<sockaddr*>(&addr), &addrlen);
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr, &address);
+ }
+ return address;
+}
+
+// Bind the socket to a local address.
+int MacAsyncSocket::Bind(const SocketAddress& address) {
+ sockaddr_storage saddr = {0};
+ size_t len = address.ToSockAddrStorage(&saddr);
+ int err = ::bind(native_socket_, reinterpret_cast<sockaddr*>(&saddr), len);
+ if (err == SOCKET_ERROR) error_ = errno;
+ return err;
+}
+
+void MacAsyncSocket::OnResolveResult(SignalThread* thread) {
+ if (thread != resolver_) {
+ return;
+ }
+ int error = resolver_->GetError();
+ if (error == 0) {
+ error = DoConnect(resolver_->address());
+ } else {
+ Close();
+ }
+ if (error) {
+ error_ = error;
+ SignalCloseEvent(this, error_);
+ }
+}
+
+// Connect to a remote address.
+int MacAsyncSocket::Connect(const SocketAddress& addr) {
+ // TODO(djw): Consolidate all the connect->resolve->doconnect implementations.
+ if (state_ != CS_CLOSED) {
+ SetError(EALREADY);
+ return SOCKET_ERROR;
+ }
+ if (addr.IsUnresolved()) {
+ LOG(LS_VERBOSE) << "Resolving addr in MacAsyncSocket::Connect";
+ resolver_ = new AsyncResolver();
+ resolver_->SignalWorkDone.connect(this,
+ &MacAsyncSocket::OnResolveResult);
+ resolver_->Start(addr);
+ state_ = CS_CONNECTING;
+ return 0;
+ }
+ return DoConnect(addr);
+}
+
+int MacAsyncSocket::DoConnect(const SocketAddress& addr) {
+ if (!valid()) {
+ Initialize(addr.family());
+ if (!valid())
+ return SOCKET_ERROR;
+ }
+
+ sockaddr_storage saddr;
+ size_t len = addr.ToSockAddrStorage(&saddr);
+ int result = ::connect(native_socket_, reinterpret_cast<sockaddr*>(&saddr),
+ len);
+
+ if (result != SOCKET_ERROR) {
+ state_ = CS_CONNECTED;
+ } else {
+ error_ = errno;
+ if (error_ == EINPROGRESS) {
+ state_ = CS_CONNECTING;
+ result = 0;
+ }
+ }
+ return result;
+}
+
+// Send to the remote end we're connected to.
+int MacAsyncSocket::Send(const void* buffer, size_t length) {
+ if (!valid()) {
+ return SOCKET_ERROR;
+ }
+
+ int sent = ::send(native_socket_, buffer, length, 0);
+
+ if (sent == SOCKET_ERROR) {
+ error_ = errno;
+
+ if (IsBlocking()) {
+ // Reenable the writable callback (once), since we are flow controlled.
+ CFSocketEnableCallBacks(socket_, kCallbackFlags);
+ current_callbacks_ = kCallbackFlags;
+ }
+ }
+ return sent;
+}
+
+// Send to the given address. We may or may not be connected to anyone.
+int MacAsyncSocket::SendTo(const void* buffer, size_t length,
+ const SocketAddress& address) {
+ if (!valid()) {
+ return SOCKET_ERROR;
+ }
+
+ sockaddr_storage saddr;
+ size_t len = address.ToSockAddrStorage(&saddr);
+ int sent = ::sendto(native_socket_, buffer, length, 0,
+ reinterpret_cast<sockaddr*>(&saddr), len);
+
+ if (sent == SOCKET_ERROR) {
+ error_ = errno;
+ }
+
+ return sent;
+}
+
+// Read data received from the remote end we're connected to.
+int MacAsyncSocket::Recv(void* buffer, size_t length) {
+ int received = ::recv(native_socket_, reinterpret_cast<char*>(buffer),
+ length, 0);
+ if (received == SOCKET_ERROR) error_ = errno;
+
+ // Recv should only be called when there is data to read
+ ASSERT((received != 0) || (length == 0));
+ return received;
+}
+
+// Read data received from any remote party
+int MacAsyncSocket::RecvFrom(void* buffer, size_t length,
+ SocketAddress* out_addr) {
+ sockaddr_storage saddr;
+ socklen_t addr_len = sizeof(saddr);
+ int received = ::recvfrom(native_socket_, reinterpret_cast<char*>(buffer),
+ length, 0, reinterpret_cast<sockaddr*>(&saddr),
+ &addr_len);
+ if (received >= 0 && out_addr != NULL) {
+ SocketAddressFromSockAddrStorage(saddr, out_addr);
+ } else if (received == SOCKET_ERROR) {
+ error_ = errno;
+ }
+ return received;
+}
+
+int MacAsyncSocket::Listen(int backlog) {
+ if (!valid()) {
+ return SOCKET_ERROR;
+ }
+
+ int res = ::listen(native_socket_, backlog);
+ if (res != SOCKET_ERROR)
+ state_ = CS_CONNECTING;
+ else
+ error_ = errno;
+
+ return res;
+}
+
+MacAsyncSocket* MacAsyncSocket::Accept(SocketAddress* out_addr) {
+ sockaddr_storage saddr;
+ socklen_t addr_len = sizeof(saddr);
+
+ int socket_fd = ::accept(native_socket_, reinterpret_cast<sockaddr*>(&saddr),
+ &addr_len);
+ if (socket_fd == INVALID_SOCKET) {
+ error_ = errno;
+ return NULL;
+ }
+
+ MacAsyncSocket* s = new MacAsyncSocket(ss_, saddr.ss_family, socket_fd);
+ if (s && s->valid()) {
+ s->state_ = CS_CONNECTED;
+ if (out_addr)
+ SocketAddressFromSockAddrStorage(saddr, out_addr);
+ } else {
+ delete s;
+ s = NULL;
+ }
+ return s;
+}
+
+int MacAsyncSocket::Close() {
+ if (source_ != NULL) {
+ CFRunLoopSourceInvalidate(source_);
+ CFRelease(source_);
+ if (ss_) ss_->UnregisterSocket(this);
+ source_ = NULL;
+ }
+
+ if (socket_ != NULL) {
+ CFSocketInvalidate(socket_);
+ CFRelease(socket_);
+ socket_ = NULL;
+ }
+
+ if (resolver_) {
+ resolver_->Destroy(false);
+ resolver_ = NULL;
+ }
+
+ native_socket_ = INVALID_SOCKET; // invalidates the socket
+ error_ = 0;
+ state_ = CS_CLOSED;
+ return 0;
+}
+
+int MacAsyncSocket::EstimateMTU(uint16* mtu) {
+ ASSERT(false && "NYI");
+ return -1;
+}
+
+int MacAsyncSocket::GetError() const {
+ return error_;
+}
+
+void MacAsyncSocket::SetError(int error) {
+ error_ = error;
+}
+
+Socket::ConnState MacAsyncSocket::GetState() const {
+ return state_;
+}
+
+int MacAsyncSocket::GetOption(Option opt, int* value) {
+ ASSERT(false && "NYI");
+ return -1;
+}
+
+int MacAsyncSocket::SetOption(Option opt, int value) {
+ ASSERT(false && "NYI");
+ return -1;
+}
+
+void MacAsyncSocket::EnableCallbacks() {
+ if (valid()) {
+ disabled_ = false;
+ CFSocketEnableCallBacks(socket_, current_callbacks_);
+ }
+}
+
+void MacAsyncSocket::DisableCallbacks() {
+ if (valid()) {
+ disabled_ = true;
+ CFSocketDisableCallBacks(socket_, kCallbackFlags);
+ }
+}
+
+MacAsyncSocket::MacAsyncSocket(MacBaseSocketServer* ss, int family,
+ int native_socket)
+ : ss_(ss),
+ socket_(NULL),
+ native_socket_(native_socket),
+ source_(NULL),
+ current_callbacks_(0),
+ disabled_(false),
+ error_(0),
+ state_(CS_CLOSED),
+ resolver_(NULL) {
+ Initialize(family);
+}
+
+// Create a new socket, wrapping the native socket if provided or creating one
+// otherwise. In case of any failure, consume the native socket. We assume the
+// wrapped socket is in the closed state. If this is not the case you must
+// update the state_ field for this socket yourself.
+void MacAsyncSocket::Initialize(int family) {
+ CFSocketContext ctx = { 0 };
+ ctx.info = this;
+
+ // First create the CFSocket
+ CFSocketRef cf_socket = NULL;
+ bool res = false;
+ if (native_socket_ == INVALID_SOCKET) {
+ cf_socket = CFSocketCreate(kCFAllocatorDefault,
+ family, SOCK_STREAM, IPPROTO_TCP,
+ kCallbackFlags, MacAsyncSocketCallBack, &ctx);
+ } else {
+ cf_socket = CFSocketCreateWithNative(kCFAllocatorDefault,
+ native_socket_, kCallbackFlags,
+ MacAsyncSocketCallBack, &ctx);
+ }
+
+ if (cf_socket) {
+ res = true;
+ socket_ = cf_socket;
+ native_socket_ = CFSocketGetNative(cf_socket);
+ current_callbacks_ = kCallbackFlags;
+ }
+
+ if (res) {
+ // Make the underlying socket asynchronous
+ res = (-1 != ::fcntl(native_socket_, F_SETFL,
+ ::fcntl(native_socket_, F_GETFL, 0) | O_NONBLOCK));
+ }
+
+ if (res) {
+ // Add this socket to the run loop, at priority 1 so that it will be
+ // queued behind any pending signals.
+ source_ = CFSocketCreateRunLoopSource(kCFAllocatorDefault, socket_, 1);
+ res = (source_ != NULL);
+ if (!res) errno = EINVAL;
+ }
+
+ if (res) {
+ if (ss_) ss_->RegisterSocket(this);
+ CFRunLoopAddSource(CFRunLoopGetCurrent(), source_, kCFRunLoopCommonModes);
+ }
+
+ if (!res) {
+ int error = errno;
+ Close(); // Clears error_.
+ error_ = error;
+ }
+}
+
+// Call CFRelease on the result when done using it
+CFDataRef MacAsyncSocket::CopyCFAddress(const SocketAddress& address) {
+ sockaddr_storage saddr;
+ size_t len = address.ToSockAddrStorage(&saddr);
+
+ const UInt8* bytes = reinterpret_cast<UInt8*>(&saddr);
+
+ CFDataRef cf_address = CFDataCreate(kCFAllocatorDefault,
+ bytes, len);
+
+ ASSERT(cf_address != NULL);
+ return cf_address;
+}
+
+void MacAsyncSocket::MacAsyncSocketCallBack(CFSocketRef s,
+ CFSocketCallBackType callbackType,
+ CFDataRef address,
+ const void* data,
+ void* info) {
+ MacAsyncSocket* this_socket =
+ reinterpret_cast<MacAsyncSocket*>(info);
+ ASSERT(this_socket != NULL && this_socket->socket_ == s);
+
+ // Don't signal any socket messages if the socketserver is not listening on
+ // them. When we are reenabled they will be requeued and will fire again.
+ if (this_socket->disabled_)
+ return;
+
+ switch (callbackType) {
+ case kCFSocketReadCallBack:
+ // This callback is invoked in one of 3 situations:
+ // 1. A new connection is waiting to be accepted.
+ // 2. The remote end closed the connection (a recv will return 0).
+ // 3. Data is available to read.
+ // 4. The connection closed unhappily (recv will return -1).
+ if (this_socket->state_ == CS_CONNECTING) {
+ // Case 1.
+ this_socket->SignalReadEvent(this_socket);
+ } else {
+ char ch, amt;
+ amt = ::recv(this_socket->native_socket_, &ch, 1, MSG_PEEK);
+ if (amt == 0) {
+ // Case 2.
+ this_socket->state_ = CS_CLOSED;
+
+ // Disable additional callbacks or we will signal close twice.
+ CFSocketDisableCallBacks(this_socket->socket_, kCFSocketReadCallBack);
+ this_socket->current_callbacks_ &= ~kCFSocketReadCallBack;
+ this_socket->SignalCloseEvent(this_socket, 0);
+ } else if (amt > 0) {
+ // Case 3.
+ this_socket->SignalReadEvent(this_socket);
+ } else {
+ // Case 4.
+ int error = errno;
+ if (error == EAGAIN) {
+ // Observed in practice. Let's hope it's a spurious or out of date
+ // signal, since we just eat it.
+ } else {
+ this_socket->error_ = error;
+ this_socket->SignalCloseEvent(this_socket, error);
+ }
+ }
+ }
+ break;
+
+ case kCFSocketConnectCallBack:
+ if (data != NULL) {
+ // An error occured in the background while connecting
+ this_socket->error_ = errno;
+ this_socket->state_ = CS_CLOSED;
+ this_socket->SignalCloseEvent(this_socket, this_socket->error_);
+ } else {
+ this_socket->state_ = CS_CONNECTED;
+ this_socket->SignalConnectEvent(this_socket);
+ }
+ break;
+
+ case kCFSocketWriteCallBack:
+ // Update our callback tracking. Write doesn't reenable, so it's off now.
+ this_socket->current_callbacks_ &= ~kCFSocketWriteCallBack;
+ this_socket->SignalWriteEvent(this_socket);
+ break;
+
+ default:
+ ASSERT(false && "Invalid callback type for socket");
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macasyncsocket.h b/chromium/third_party/webrtc/base/macasyncsocket.h
new file mode 100644
index 00000000000..bf838654633
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macasyncsocket.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+// MacAsyncSocket is a kind of AsyncSocket. It only creates sockets
+// of the TCP type, and does not (yet) support listen and accept. It works
+// asynchronously, which means that users of this socket should connect to
+// the various events declared in asyncsocket.h to receive notifications about
+// this socket.
+
+#ifndef WEBRTC_BASE_MACASYNCSOCKET_H__
+#define WEBRTC_BASE_MACASYNCSOCKET_H__
+
+#include <CoreFoundation/CoreFoundation.h>
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/nethelpers.h"
+
+namespace rtc {
+
+class MacBaseSocketServer;
+
+class MacAsyncSocket : public AsyncSocket, public sigslot::has_slots<> {
+ public:
+ MacAsyncSocket(MacBaseSocketServer* ss, int family);
+ virtual ~MacAsyncSocket();
+
+ bool valid() const { return source_ != NULL; }
+
+ // Socket interface
+ virtual SocketAddress GetLocalAddress() const;
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int Bind(const SocketAddress& addr);
+ virtual int Connect(const SocketAddress& addr);
+ virtual int Send(const void* buffer, size_t length);
+ virtual int SendTo(const void* buffer, size_t length,
+ const SocketAddress& addr);
+ virtual int Recv(void* buffer, size_t length);
+ virtual int RecvFrom(void* buffer, size_t length, SocketAddress* out_addr);
+ virtual int Listen(int backlog);
+ virtual MacAsyncSocket* Accept(SocketAddress* out_addr);
+ virtual int Close();
+ virtual int GetError() const;
+ virtual void SetError(int error);
+ virtual ConnState GetState() const;
+ virtual int EstimateMTU(uint16* mtu);
+ virtual int GetOption(Option opt, int* value);
+ virtual int SetOption(Option opt, int value);
+
+ // For the MacBaseSocketServer to disable callbacks when process_io is false.
+ void EnableCallbacks();
+ void DisableCallbacks();
+
+ protected:
+ void OnResolveResult(SignalThread* thread);
+ int DoConnect(const SocketAddress& addr);
+
+ private:
+ // Creates an async socket from an existing bsd socket
+ MacAsyncSocket(MacBaseSocketServer* ss, int family, int native_socket);
+
+ // Attaches the socket to the CFRunloop and sets the wrapped bsd socket
+ // to async mode
+ void Initialize(int family);
+
+ // Translate the SocketAddress into a CFDataRef to pass to CF socket
+ // functions. Caller must call CFRelease on the result when done.
+ static CFDataRef CopyCFAddress(const SocketAddress& address);
+
+ // Callback for the underlying CFSocketRef.
+ static void MacAsyncSocketCallBack(CFSocketRef s,
+ CFSocketCallBackType callbackType,
+ CFDataRef address,
+ const void* data,
+ void* info);
+
+ MacBaseSocketServer* ss_;
+ CFSocketRef socket_;
+ int native_socket_;
+ CFRunLoopSourceRef source_;
+ int current_callbacks_;
+ bool disabled_;
+ int error_;
+ ConnState state_;
+ AsyncResolver* resolver_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MacAsyncSocket);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACASYNCSOCKET_H__
diff --git a/chromium/third_party/webrtc/base/maccocoasocketserver.h b/chromium/third_party/webrtc/base/maccocoasocketserver.h
new file mode 100644
index 00000000000..d5deac153bb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/maccocoasocketserver.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// A libjingle compatible SocketServer for OSX/iOS/Cocoa.
+
+#ifndef WEBRTC_BASE_MACCOCOASOCKETSERVER_H_
+#define WEBRTC_BASE_MACCOCOASOCKETSERVER_H_
+
+#include "webrtc/base/macsocketserver.h"
+
+#ifdef __OBJC__
+@class NSTimer, MacCocoaSocketServerHelperRtc;
+#else
+class NSTimer;
+class MacCocoaSocketServerHelperRtc;
+#endif
+
+namespace rtc {
+
+// A socketserver implementation that wraps the main cocoa
+// application loop accessed through [NSApp run].
+class MacCocoaSocketServer : public MacBaseSocketServer {
+ public:
+ explicit MacCocoaSocketServer();
+ virtual ~MacCocoaSocketServer();
+
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ private:
+ MacCocoaSocketServerHelperRtc* helper_;
+ NSTimer* timer_; // Weak.
+ // The count of how many times we're inside the NSApplication main loop.
+ int run_count_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MacCocoaSocketServer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACCOCOASOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/maccocoasocketserver.mm b/chromium/third_party/webrtc/base/maccocoasocketserver.mm
new file mode 100644
index 00000000000..123ffdc5255
--- /dev/null
+++ b/chromium/third_party/webrtc/base/maccocoasocketserver.mm
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#import "webrtc/base/maccocoasocketserver.h"
+
+#import <Foundation/Foundation.h>
+#import <AppKit/AppKit.h>
+#include <assert.h>
+
+#include "webrtc/base/scoped_autorelease_pool.h"
+
+// MacCocoaSocketServerHelperRtc serves as a delegate to NSMachPort or a target for
+// a timeout.
+@interface MacCocoaSocketServerHelperRtc : NSObject {
+ // This is a weak reference. This works fine since the
+ // rtc::MacCocoaSocketServer owns this object.
+ rtc::MacCocoaSocketServer* socketServer_; // Weak.
+}
+@end
+
+@implementation MacCocoaSocketServerHelperRtc
+- (id)initWithSocketServer:(rtc::MacCocoaSocketServer*)ss {
+ self = [super init];
+ if (self) {
+ socketServer_ = ss;
+ }
+ return self;
+}
+
+- (void)timerFired:(NSTimer*)timer {
+ socketServer_->WakeUp();
+}
+
+- (void)breakMainloop {
+ [NSApp stop:self];
+ // NSApp stop only exits after finishing processing of the
+ // current event. Since we're potentially in a timer callback
+ // and not an NSEvent handler, we need to trigger a dummy one
+ // and turn the loop over. We may be able to skip this if we're
+ // on the ss' thread and not inside the app loop already.
+ NSEvent* event = [NSEvent otherEventWithType:NSApplicationDefined
+ location:NSMakePoint(0,0)
+ modifierFlags:0
+ timestamp:0
+ windowNumber:0
+ context:nil
+ subtype:0
+ data1:0
+ data2:0];
+ [NSApp postEvent:event atStart:NO];
+}
+@end
+
+namespace rtc {
+
+MacCocoaSocketServer::MacCocoaSocketServer() {
+ helper_ = [[MacCocoaSocketServerHelperRtc alloc] initWithSocketServer:this];
+ timer_ = nil;
+ run_count_ = 0;
+
+ // Initialize the shared NSApplication
+ [NSApplication sharedApplication];
+}
+
+MacCocoaSocketServer::~MacCocoaSocketServer() {
+ [timer_ invalidate];
+ [timer_ release];
+ [helper_ release];
+}
+
+// ::Wait is reentrant, for example when blocking on another thread while
+// responding to I/O. Calls to [NSApp] MUST be made from the main thread
+// only!
+bool MacCocoaSocketServer::Wait(int cms, bool process_io) {
+ rtc::ScopedAutoreleasePool pool;
+ if (!process_io && cms == 0) {
+ // No op.
+ return true;
+ }
+ if ([NSApp isRunning]) {
+ // Only allow reentrant waiting if we're in a blocking send.
+ ASSERT(!process_io && cms == kForever);
+ }
+
+ if (!process_io) {
+ // No way to listen to common modes and not get socket events, unless
+ // we disable each one's callbacks.
+ EnableSocketCallbacks(false);
+ }
+
+ if (kForever != cms) {
+ // Install a timer that fires wakeup after cms has elapsed.
+ timer_ =
+ [NSTimer scheduledTimerWithTimeInterval:cms / 1000.0
+ target:helper_
+ selector:@selector(timerFired:)
+ userInfo:nil
+ repeats:NO];
+ [timer_ retain];
+ }
+
+ // Run until WakeUp is called, which will call stop and exit this loop.
+ run_count_++;
+ [NSApp run];
+ run_count_--;
+
+ if (!process_io) {
+ // Reenable them. Hopefully this won't cause spurious callbacks or
+ // missing ones while they were disabled.
+ EnableSocketCallbacks(true);
+ }
+
+ return true;
+}
+
+// Can be called from any thread. Post a message back to the main thread to
+// break out of the NSApp loop.
+void MacCocoaSocketServer::WakeUp() {
+ if (timer_ != nil) {
+ [timer_ invalidate];
+ [timer_ release];
+ timer_ = nil;
+ }
+
+ // [NSApp isRunning] returns unexpected results when called from another
+ // thread. Maintain our own count of how many times to break the main loop.
+ if (run_count_ > 0) {
+ [helper_ performSelectorOnMainThread:@selector(breakMainloop)
+ withObject:nil
+ waitUntilDone:false];
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm b/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm
new file mode 100644
index 00000000000..932b4a14f59
--- /dev/null
+++ b/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/maccocoasocketserver.h"
+
+namespace rtc {
+
+class WakeThread : public Thread {
+ public:
+ WakeThread(SocketServer* ss) : ss_(ss) {
+ }
+ virtual ~WakeThread() {
+ Stop();
+ }
+ void Run() {
+ ss_->WakeUp();
+ }
+ private:
+ SocketServer* ss_;
+};
+
+// Test that MacCocoaSocketServer::Wait works as expected.
+TEST(MacCocoaSocketServer, TestWait) {
+ MacCocoaSocketServer server;
+ uint32 start = Time();
+ server.Wait(1000, true);
+ EXPECT_GE(TimeSince(start), 1000);
+}
+
+// Test that MacCocoaSocketServer::Wakeup works as expected.
+TEST(MacCocoaSocketServer, TestWakeup) {
+ MacCFSocketServer server;
+ WakeThread thread(&server);
+ uint32 start = Time();
+ thread.Start();
+ server.Wait(10000, true);
+ EXPECT_LT(TimeSince(start), 10000);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/maccocoathreadhelper.h b/chromium/third_party/webrtc/base/maccocoathreadhelper.h
new file mode 100644
index 00000000000..255d081cece
--- /dev/null
+++ b/chromium/third_party/webrtc/base/maccocoathreadhelper.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Helper function for using Cocoa with Posix threads. This header should be
+// included from C/C++ files that want to use some Cocoa functionality without
+// using the .mm extension (mostly for files that are compiled on multiple
+// platforms).
+
+#ifndef WEBRTC_BASE_MACCOCOATHREADHELPER_H__
+#define WEBRTC_BASE_MACCOCOATHREADHELPER_H__
+
+namespace rtc {
+
+// Cocoa must be "put into multithreading mode" before Cocoa functionality can
+// be used on POSIX threads. This function does that.
+void InitCocoaMultiThreading();
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACCOCOATHREADHELPER_H__
diff --git a/chromium/third_party/webrtc/base/maccocoathreadhelper.mm b/chromium/third_party/webrtc/base/maccocoathreadhelper.mm
new file mode 100644
index 00000000000..7bf9e9206bd
--- /dev/null
+++ b/chromium/third_party/webrtc/base/maccocoathreadhelper.mm
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+// Helper function for using Cocoa with Posix threading.
+
+#import <assert.h>
+#import <Foundation/Foundation.h>
+
+#import "webrtc/base/maccocoathreadhelper.h"
+
+namespace rtc {
+
+// Cocoa must be "put into multithreading mode" before Cocoa functionality can
+// be used on POSIX threads. The way to do that is to spawn one thread that may
+// immediately exit.
+void InitCocoaMultiThreading() {
+ if ([NSThread isMultiThreaded] == NO) {
+ // The sole purpose of this autorelease pool is to avoid a console
+ // message on Leopard that tells us we're autoreleasing the thread
+ // with no autorelease pool in place; we can't set up an autorelease
+ // pool before this, because this is executed from an initializer,
+ // which is run before main. This means we leak an autorelease pool,
+ // and one thread, and if other objects are set up in initializers after
+ // this they'll be silently added to this pool and never released.
+
+ // Doing NSAutoreleasePool* hack = [[NSAutoreleasePool alloc] init];
+ // causes unused variable error.
+ NSAutoreleasePool* hack;
+ hack = [[NSAutoreleasePool alloc] init];
+ [NSThread detachNewThreadSelector:@selector(class)
+ toTarget:[NSObject class]
+ withObject:nil];
+ }
+
+ assert([NSThread isMultiThreaded]);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macconversion.cc b/chromium/third_party/webrtc/base/macconversion.cc
new file mode 100644
index 00000000000..75d11a803cb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macconversion.cc
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+
+#include <CoreFoundation/CoreFoundation.h>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macconversion.h"
+
+bool p_convertHostCFStringRefToCPPString(
+ const CFStringRef cfstr, std::string& cppstr) {
+ bool result = false;
+
+ // First this must be non-null,
+ if (NULL != cfstr) {
+ // it must actually *be* a CFString, and not something just masquerading
+ // as one,
+ if (CFGetTypeID(cfstr) == CFStringGetTypeID()) {
+ // and we must be able to get the characters out of it.
+ // (The cfstr owns this buffer; it came from somewhere else,
+ // so someone else gets to take care of getting rid of the cfstr,
+ // and then this buffer will go away automatically.)
+ unsigned length = CFStringGetLength(cfstr);
+ char* buf = new char[1 + length];
+ if (CFStringGetCString(cfstr, buf, 1 + length, kCFStringEncodingASCII)) {
+ if (strlen(buf) == length) {
+ cppstr.assign(buf);
+ result = true;
+ }
+ }
+ delete [] buf;
+ }
+ }
+
+ return result;
+}
+
+bool p_convertCFNumberToInt(CFNumberRef cfn, int* i) {
+ bool converted = false;
+
+ // It must not be null.
+ if (NULL != cfn) {
+ // It must actually *be* a CFNumber and not something just masquerading
+ // as one.
+ if (CFGetTypeID(cfn) == CFNumberGetTypeID()) {
+ CFNumberType ntype = CFNumberGetType(cfn);
+ switch (ntype) {
+ case kCFNumberSInt8Type:
+ SInt8 sint8;
+ converted = CFNumberGetValue(cfn, ntype, static_cast<void*>(&sint8));
+ if (converted) *i = static_cast<int>(sint8);
+ break;
+ case kCFNumberSInt16Type:
+ SInt16 sint16;
+ converted = CFNumberGetValue(cfn, ntype, static_cast<void*>(&sint16));
+ if (converted) *i = static_cast<int>(sint16);
+ break;
+ case kCFNumberSInt32Type:
+ SInt32 sint32;
+ converted = CFNumberGetValue(cfn, ntype, static_cast<void*>(&sint32));
+ if (converted) *i = static_cast<int>(sint32);
+ break;
+ case kCFNumberSInt64Type:
+ SInt64 sint64;
+ converted = CFNumberGetValue(cfn, ntype, static_cast<void*>(&sint64));
+ if (converted) *i = static_cast<int>(sint64);
+ break;
+ case kCFNumberFloat32Type:
+ Float32 float32;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&float32));
+ if (converted) *i = static_cast<int>(float32);
+ break;
+ case kCFNumberFloat64Type:
+ Float64 float64;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&float64));
+ if (converted) *i = static_cast<int>(float64);
+ break;
+ case kCFNumberCharType:
+ char charvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&charvalue));
+ if (converted) *i = static_cast<int>(charvalue);
+ break;
+ case kCFNumberShortType:
+ short shortvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&shortvalue));
+ if (converted) *i = static_cast<int>(shortvalue);
+ break;
+ case kCFNumberIntType:
+ int intvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&intvalue));
+ if (converted) *i = static_cast<int>(intvalue);
+ break;
+ case kCFNumberLongType:
+ long longvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&longvalue));
+ if (converted) *i = static_cast<int>(longvalue);
+ break;
+ case kCFNumberLongLongType:
+ long long llvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&llvalue));
+ if (converted) *i = static_cast<int>(llvalue);
+ break;
+ case kCFNumberFloatType:
+ float floatvalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&floatvalue));
+ if (converted) *i = static_cast<int>(floatvalue);
+ break;
+ case kCFNumberDoubleType:
+ double doublevalue;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&doublevalue));
+ if (converted) *i = static_cast<int>(doublevalue);
+ break;
+ case kCFNumberCFIndexType:
+ CFIndex cfindex;
+ converted = CFNumberGetValue(cfn, ntype,
+ static_cast<void*>(&cfindex));
+ if (converted) *i = static_cast<int>(cfindex);
+ break;
+ default:
+ LOG(LS_ERROR) << "got unknown type.";
+ break;
+ }
+ }
+ }
+
+ return converted;
+}
+
+bool p_isCFNumberTrue(CFNumberRef cfn) {
+ // We assume it's false until proven otherwise.
+ bool result = false;
+ int asInt;
+ bool converted = p_convertCFNumberToInt(cfn, &asInt);
+
+ if (converted && (0 != asInt)) {
+ result = true;
+ }
+
+ return result;
+}
+
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
diff --git a/chromium/third_party/webrtc/base/macconversion.h b/chromium/third_party/webrtc/base/macconversion.h
new file mode 100644
index 00000000000..a96ed229854
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macconversion.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MACCONVERSION_H_
+#define WEBRTC_BASE_MACCONVERSION_H_
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+
+#include <CoreFoundation/CoreFoundation.h>
+
+#include <string>
+
+// given a CFStringRef, attempt to convert it to a C++ string.
+// returns true if it succeeds, false otherwise.
+// We can safely assume, given our context, that the string is
+// going to be in ASCII, because it will either be an IP address,
+// or a domain name, which is guaranteed to be ASCII-representable.
+bool p_convertHostCFStringRefToCPPString(const CFStringRef cfstr,
+ std::string& cppstr);
+
+// Convert the CFNumber to an integer, putting the integer in the location
+// given, and returhing true, if the conversion succeeds.
+// If given a NULL or a non-CFNumber, returns false.
+// This is pretty aggresive about trying to convert to int.
+bool p_convertCFNumberToInt(CFNumberRef cfn, int* i);
+
+// given a CFNumberRef, determine if it represents a true value.
+bool p_isCFNumberTrue(CFNumberRef cfn);
+
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+
+#endif // WEBRTC_BASE_MACCONVERSION_H_
diff --git a/chromium/third_party/webrtc/base/macsocketserver.cc b/chromium/third_party/webrtc/base/macsocketserver.cc
new file mode 100644
index 00000000000..c7ab6e44df8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macsocketserver.cc
@@ -0,0 +1,378 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "webrtc/base/macsocketserver.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macasyncsocket.h"
+#include "webrtc/base/macutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// MacBaseSocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+MacBaseSocketServer::MacBaseSocketServer() {
+}
+
+MacBaseSocketServer::~MacBaseSocketServer() {
+}
+
+AsyncSocket* MacBaseSocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* MacBaseSocketServer::CreateAsyncSocket(int family, int type) {
+ if (SOCK_STREAM != type)
+ return NULL;
+
+ MacAsyncSocket* socket = new MacAsyncSocket(this, family);
+ if (!socket->valid()) {
+ delete socket;
+ return NULL;
+ }
+ return socket;
+}
+
+void MacBaseSocketServer::RegisterSocket(MacAsyncSocket* s) {
+ sockets_.insert(s);
+}
+
+void MacBaseSocketServer::UnregisterSocket(MacAsyncSocket* s) {
+ VERIFY(1 == sockets_.erase(s)); // found 1
+}
+
+bool MacBaseSocketServer::SetPosixSignalHandler(int signum,
+ void (*handler)(int)) {
+ Dispatcher* dispatcher = signal_dispatcher();
+ if (!PhysicalSocketServer::SetPosixSignalHandler(signum, handler)) {
+ return false;
+ }
+
+ // Only register the FD once, when the first custom handler is installed.
+ if (!dispatcher && (dispatcher = signal_dispatcher())) {
+ CFFileDescriptorContext ctx = { 0 };
+ ctx.info = this;
+
+ CFFileDescriptorRef desc = CFFileDescriptorCreate(
+ kCFAllocatorDefault,
+ dispatcher->GetDescriptor(),
+ false,
+ &MacBaseSocketServer::FileDescriptorCallback,
+ &ctx);
+ if (!desc) {
+ return false;
+ }
+
+ CFFileDescriptorEnableCallBacks(desc, kCFFileDescriptorReadCallBack);
+ CFRunLoopSourceRef ref =
+ CFFileDescriptorCreateRunLoopSource(kCFAllocatorDefault, desc, 0);
+
+ if (!ref) {
+ CFRelease(desc);
+ return false;
+ }
+
+ CFRunLoopAddSource(CFRunLoopGetCurrent(), ref, kCFRunLoopCommonModes);
+ CFRelease(desc);
+ CFRelease(ref);
+ }
+
+ return true;
+}
+
+// Used to disable socket events from waking our message queue when
+// process_io is false. Does not disable signal event handling though.
+void MacBaseSocketServer::EnableSocketCallbacks(bool enable) {
+ for (std::set<MacAsyncSocket*>::iterator it = sockets().begin();
+ it != sockets().end(); ++it) {
+ if (enable) {
+ (*it)->EnableCallbacks();
+ } else {
+ (*it)->DisableCallbacks();
+ }
+ }
+}
+
+void MacBaseSocketServer::FileDescriptorCallback(CFFileDescriptorRef fd,
+ CFOptionFlags flags,
+ void* context) {
+ MacBaseSocketServer* this_ss =
+ reinterpret_cast<MacBaseSocketServer*>(context);
+ ASSERT(this_ss);
+ Dispatcher* signal_dispatcher = this_ss->signal_dispatcher();
+ ASSERT(signal_dispatcher);
+
+ signal_dispatcher->OnPreEvent(DE_READ);
+ signal_dispatcher->OnEvent(DE_READ, 0);
+ CFFileDescriptorEnableCallBacks(fd, kCFFileDescriptorReadCallBack);
+}
+
+
+///////////////////////////////////////////////////////////////////////////////
+// MacCFSocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+void WakeUpCallback(void* info) {
+ MacCFSocketServer* server = static_cast<MacCFSocketServer*>(info);
+ ASSERT(NULL != server);
+ server->OnWakeUpCallback();
+}
+
+MacCFSocketServer::MacCFSocketServer()
+ : run_loop_(CFRunLoopGetCurrent()),
+ wake_up_(NULL) {
+ CFRunLoopSourceContext ctx;
+ memset(&ctx, 0, sizeof(ctx));
+ ctx.info = this;
+ ctx.perform = &WakeUpCallback;
+ wake_up_ = CFRunLoopSourceCreate(NULL, 0, &ctx);
+ ASSERT(NULL != wake_up_);
+ if (wake_up_) {
+ CFRunLoopAddSource(run_loop_, wake_up_, kCFRunLoopCommonModes);
+ }
+}
+
+MacCFSocketServer::~MacCFSocketServer() {
+ if (wake_up_) {
+ CFRunLoopSourceInvalidate(wake_up_);
+ CFRelease(wake_up_);
+ }
+}
+
+bool MacCFSocketServer::Wait(int cms, bool process_io) {
+ ASSERT(CFRunLoopGetCurrent() == run_loop_);
+
+ if (!process_io && cms == 0) {
+ // No op.
+ return true;
+ }
+
+ if (!process_io) {
+ // No way to listen to common modes and not get socket events, unless
+ // we disable each one's callbacks.
+ EnableSocketCallbacks(false);
+ }
+
+ SInt32 result;
+ if (kForever == cms) {
+ do {
+ // Would prefer to run in a custom mode that only listens to wake_up,
+ // but we have qtkit sending work to the main thread which is effectively
+ // blocked here, causing deadlock. Thus listen to the common modes.
+ // TODO: If QTKit becomes thread safe, do the above.
+ result = CFRunLoopRunInMode(kCFRunLoopDefaultMode, 10000000, false);
+ } while (result != kCFRunLoopRunFinished && result != kCFRunLoopRunStopped);
+ } else {
+ // TODO: In the case of 0ms wait, this will only process one event, so we
+ // may want to loop until it returns TimedOut.
+ CFTimeInterval seconds = cms / 1000.0;
+ result = CFRunLoopRunInMode(kCFRunLoopDefaultMode, seconds, false);
+ }
+
+ if (!process_io) {
+ // Reenable them. Hopefully this won't cause spurious callbacks or
+ // missing ones while they were disabled.
+ EnableSocketCallbacks(true);
+ }
+
+ if (kCFRunLoopRunFinished == result) {
+ return false;
+ }
+ return true;
+}
+
+void MacCFSocketServer::WakeUp() {
+ if (wake_up_) {
+ CFRunLoopSourceSignal(wake_up_);
+ CFRunLoopWakeUp(run_loop_);
+ }
+}
+
+void MacCFSocketServer::OnWakeUpCallback() {
+ ASSERT(run_loop_ == CFRunLoopGetCurrent());
+ CFRunLoopStop(run_loop_);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// MacCarbonSocketServer
+///////////////////////////////////////////////////////////////////////////////
+#ifndef CARBON_DEPRECATED
+
+const UInt32 kEventClassSocketServer = 'MCSS';
+const UInt32 kEventWakeUp = 'WAKE';
+const EventTypeSpec kEventWakeUpSpec[] = {
+ { kEventClassSocketServer, kEventWakeUp }
+};
+
+std::string DecodeEvent(EventRef event) {
+ std::string str;
+ DecodeFourChar(::GetEventClass(event), &str);
+ str.push_back(':');
+ DecodeFourChar(::GetEventKind(event), &str);
+ return str;
+}
+
+MacCarbonSocketServer::MacCarbonSocketServer()
+ : event_queue_(GetCurrentEventQueue()), wake_up_(NULL) {
+ VERIFY(noErr == CreateEvent(NULL, kEventClassSocketServer, kEventWakeUp, 0,
+ kEventAttributeUserEvent, &wake_up_));
+}
+
+MacCarbonSocketServer::~MacCarbonSocketServer() {
+ if (wake_up_) {
+ ReleaseEvent(wake_up_);
+ }
+}
+
+bool MacCarbonSocketServer::Wait(int cms, bool process_io) {
+ ASSERT(GetCurrentEventQueue() == event_queue_);
+
+ // Listen to all events if we're processing I/O.
+ // Only listen for our wakeup event if we're not.
+ UInt32 num_types = 0;
+ const EventTypeSpec* events = NULL;
+ if (!process_io) {
+ num_types = GetEventTypeCount(kEventWakeUpSpec);
+ events = kEventWakeUpSpec;
+ }
+
+ EventTargetRef target = GetEventDispatcherTarget();
+ EventTimeout timeout =
+ (kForever == cms) ? kEventDurationForever : cms / 1000.0;
+ EventTimeout end_time = GetCurrentEventTime() + timeout;
+
+ bool done = false;
+ while (!done) {
+ EventRef event;
+ OSStatus result = ReceiveNextEvent(num_types, events, timeout, true,
+ &event);
+ if (noErr == result) {
+ if (wake_up_ != event) {
+ LOG_F(LS_VERBOSE) << "Dispatching event: " << DecodeEvent(event);
+ result = SendEventToEventTarget(event, target);
+ if ((noErr != result) && (eventNotHandledErr != result)) {
+ LOG_E(LS_ERROR, OS, result) << "SendEventToEventTarget";
+ }
+ } else {
+ done = true;
+ }
+ ReleaseEvent(event);
+ } else if (eventLoopTimedOutErr == result) {
+ ASSERT(cms != kForever);
+ done = true;
+ } else if (eventLoopQuitErr == result) {
+ // Ignore this... we get spurious quits for a variety of reasons.
+ LOG_E(LS_VERBOSE, OS, result) << "ReceiveNextEvent";
+ } else {
+ // Some strange error occurred. Log it.
+ LOG_E(LS_WARNING, OS, result) << "ReceiveNextEvent";
+ return false;
+ }
+ if (kForever != cms) {
+ timeout = end_time - GetCurrentEventTime();
+ }
+ }
+ return true;
+}
+
+void MacCarbonSocketServer::WakeUp() {
+ if (!IsEventInQueue(event_queue_, wake_up_)) {
+ RetainEvent(wake_up_);
+ OSStatus result = PostEventToQueue(event_queue_, wake_up_,
+ kEventPriorityStandard);
+ if (noErr != result) {
+ LOG_E(LS_ERROR, OS, result) << "PostEventToQueue";
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// MacCarbonAppSocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+MacCarbonAppSocketServer::MacCarbonAppSocketServer()
+ : event_queue_(GetCurrentEventQueue()) {
+ // Install event handler
+ VERIFY(noErr == InstallApplicationEventHandler(
+ NewEventHandlerUPP(WakeUpEventHandler), 1, kEventWakeUpSpec, this,
+ &event_handler_));
+
+ // Install a timer and set it idle to begin with.
+ VERIFY(noErr == InstallEventLoopTimer(GetMainEventLoop(),
+ kEventDurationForever,
+ kEventDurationForever,
+ NewEventLoopTimerUPP(TimerHandler),
+ this,
+ &timer_));
+}
+
+MacCarbonAppSocketServer::~MacCarbonAppSocketServer() {
+ RemoveEventLoopTimer(timer_);
+ RemoveEventHandler(event_handler_);
+}
+
+OSStatus MacCarbonAppSocketServer::WakeUpEventHandler(
+ EventHandlerCallRef next, EventRef event, void *data) {
+ QuitApplicationEventLoop();
+ return noErr;
+}
+
+void MacCarbonAppSocketServer::TimerHandler(
+ EventLoopTimerRef timer, void *data) {
+ QuitApplicationEventLoop();
+}
+
+bool MacCarbonAppSocketServer::Wait(int cms, bool process_io) {
+ if (!process_io && cms == 0) {
+ // No op.
+ return true;
+ }
+ if (kForever != cms) {
+ // Start a timer.
+ OSStatus error =
+ SetEventLoopTimerNextFireTime(timer_, cms / 1000.0);
+ if (error != noErr) {
+ LOG(LS_ERROR) << "Failed setting next fire time.";
+ }
+ }
+ if (!process_io) {
+ // No way to listen to common modes and not get socket events, unless
+ // we disable each one's callbacks.
+ EnableSocketCallbacks(false);
+ }
+ RunApplicationEventLoop();
+ if (!process_io) {
+ // Reenable them. Hopefully this won't cause spurious callbacks or
+ // missing ones while they were disabled.
+ EnableSocketCallbacks(true);
+ }
+ return true;
+}
+
+void MacCarbonAppSocketServer::WakeUp() {
+ // TODO: No-op if there's already a WakeUp in flight.
+ EventRef wake_up;
+ VERIFY(noErr == CreateEvent(NULL, kEventClassSocketServer, kEventWakeUp, 0,
+ kEventAttributeUserEvent, &wake_up));
+ OSStatus result = PostEventToQueue(event_queue_, wake_up,
+ kEventPriorityStandard);
+ if (noErr != result) {
+ LOG_E(LS_ERROR, OS, result) << "PostEventToQueue";
+ }
+ ReleaseEvent(wake_up);
+}
+
+#endif
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macsocketserver.h b/chromium/third_party/webrtc/base/macsocketserver.h
new file mode 100644
index 00000000000..8eebac6c632
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macsocketserver.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_BASE_MACSOCKETSERVER_H__
+#define WEBRTC_BASE_MACSOCKETSERVER_H__
+
+#include <set>
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Invalid on IOS
+#include <Carbon/Carbon.h>
+#endif
+#include "webrtc/base/physicalsocketserver.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// MacBaseSocketServer
+///////////////////////////////////////////////////////////////////////////////
+class MacAsyncSocket;
+
+class MacBaseSocketServer : public PhysicalSocketServer {
+ public:
+ MacBaseSocketServer();
+ virtual ~MacBaseSocketServer();
+
+ // SocketServer Interface
+ virtual Socket* CreateSocket(int type) { return NULL; }
+ virtual Socket* CreateSocket(int family, int type) { return NULL; }
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ virtual bool Wait(int cms, bool process_io) = 0;
+ virtual void WakeUp() = 0;
+
+ void RegisterSocket(MacAsyncSocket* socket);
+ void UnregisterSocket(MacAsyncSocket* socket);
+
+ // PhysicalSocketServer Overrides
+ virtual bool SetPosixSignalHandler(int signum, void (*handler)(int));
+
+ protected:
+ void EnableSocketCallbacks(bool enable);
+ const std::set<MacAsyncSocket*>& sockets() {
+ return sockets_;
+ }
+
+ private:
+ static void FileDescriptorCallback(CFFileDescriptorRef ref,
+ CFOptionFlags flags,
+ void* context);
+
+ std::set<MacAsyncSocket*> sockets_;
+};
+
+// Core Foundation implementation of the socket server. While idle it
+// will run the current CF run loop. When the socket server has work
+// to do the run loop will be paused. Does not support Carbon or Cocoa
+// UI interaction.
+class MacCFSocketServer : public MacBaseSocketServer {
+ public:
+ MacCFSocketServer();
+ virtual ~MacCFSocketServer();
+
+ // SocketServer Interface
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+ void OnWakeUpCallback();
+
+ private:
+ CFRunLoopRef run_loop_;
+ CFRunLoopSourceRef wake_up_;
+};
+
+#ifndef CARBON_DEPRECATED
+
+///////////////////////////////////////////////////////////////////////////////
+// MacCarbonSocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+// Interacts with the Carbon event queue. While idle it will block,
+// waiting for events. When the socket server has work to do, it will
+// post a 'wake up' event to the queue, causing the thread to exit the
+// event loop until the next call to Wait. Other events are dispatched
+// to their target. Supports Carbon and Cocoa UI interaction.
+class MacCarbonSocketServer : public MacBaseSocketServer {
+ public:
+ MacCarbonSocketServer();
+ virtual ~MacCarbonSocketServer();
+
+ // SocketServer Interface
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ private:
+ EventQueueRef event_queue_;
+ EventRef wake_up_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// MacCarbonAppSocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+// Runs the Carbon application event loop on the current thread while
+// idle. When the socket server has work to do, it will post an event
+// to the queue, causing the thread to exit the event loop until the
+// next call to Wait. Other events are automatically dispatched to
+// their target.
+class MacCarbonAppSocketServer : public MacBaseSocketServer {
+ public:
+ MacCarbonAppSocketServer();
+ virtual ~MacCarbonAppSocketServer();
+
+ // SocketServer Interface
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ private:
+ static OSStatus WakeUpEventHandler(EventHandlerCallRef next, EventRef event,
+ void *data);
+ static void TimerHandler(EventLoopTimerRef timer, void *data);
+
+ EventQueueRef event_queue_;
+ EventHandlerRef event_handler_;
+ EventLoopTimerRef timer_;
+};
+
+#endif
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACSOCKETSERVER_H__
diff --git a/chromium/third_party/webrtc/base/macsocketserver_unittest.cc b/chromium/third_party/webrtc/base/macsocketserver_unittest.cc
new file mode 100644
index 00000000000..e98be918ca5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macsocketserver_unittest.cc
@@ -0,0 +1,237 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socket_unittest.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/macsocketserver.h"
+
+namespace rtc {
+
+class WakeThread : public Thread {
+ public:
+ WakeThread(SocketServer* ss) : ss_(ss) {
+ }
+ virtual ~WakeThread() {
+ Stop();
+ }
+ void Run() {
+ ss_->WakeUp();
+ }
+ private:
+ SocketServer* ss_;
+};
+
+#ifndef CARBON_DEPRECATED
+
+// Test that MacCFSocketServer::Wait works as expected.
+TEST(MacCFSocketServerTest, TestWait) {
+ MacCFSocketServer server;
+ uint32 start = Time();
+ server.Wait(1000, true);
+ EXPECT_GE(TimeSince(start), 1000);
+}
+
+// Test that MacCFSocketServer::Wakeup works as expected.
+TEST(MacCFSocketServerTest, TestWakeup) {
+ MacCFSocketServer server;
+ WakeThread thread(&server);
+ uint32 start = Time();
+ thread.Start();
+ server.Wait(10000, true);
+ EXPECT_LT(TimeSince(start), 10000);
+}
+
+// Test that MacCarbonSocketServer::Wait works as expected.
+TEST(MacCarbonSocketServerTest, TestWait) {
+ MacCarbonSocketServer server;
+ uint32 start = Time();
+ server.Wait(1000, true);
+ EXPECT_GE(TimeSince(start), 1000);
+}
+
+// Test that MacCarbonSocketServer::Wakeup works as expected.
+TEST(MacCarbonSocketServerTest, TestWakeup) {
+ MacCarbonSocketServer server;
+ WakeThread thread(&server);
+ uint32 start = Time();
+ thread.Start();
+ server.Wait(10000, true);
+ EXPECT_LT(TimeSince(start), 10000);
+}
+
+// Test that MacCarbonAppSocketServer::Wait works as expected.
+TEST(MacCarbonAppSocketServerTest, TestWait) {
+ MacCarbonAppSocketServer server;
+ uint32 start = Time();
+ server.Wait(1000, true);
+ EXPECT_GE(TimeSince(start), 1000);
+}
+
+// Test that MacCarbonAppSocketServer::Wakeup works as expected.
+TEST(MacCarbonAppSocketServerTest, TestWakeup) {
+ MacCarbonAppSocketServer server;
+ WakeThread thread(&server);
+ uint32 start = Time();
+ thread.Start();
+ server.Wait(10000, true);
+ EXPECT_LT(TimeSince(start), 10000);
+}
+
+#endif
+
+// Test that MacAsyncSocket passes all the generic Socket tests.
+class MacAsyncSocketTest : public SocketTest {
+ protected:
+ MacAsyncSocketTest()
+ : server_(CreateSocketServer()),
+ scope_(server_.get()) {}
+ // Override for other implementations of MacBaseSocketServer.
+ virtual MacBaseSocketServer* CreateSocketServer() {
+ return new MacCFSocketServer();
+ };
+ rtc::scoped_ptr<MacBaseSocketServer> server_;
+ SocketServerScope scope_;
+};
+
+TEST_F(MacAsyncSocketTest, TestConnectIPv4) {
+ SocketTest::TestConnectIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectIPv6) {
+ SocketTest::TestConnectIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectWithDnsLookupIPv4) {
+ SocketTest::TestConnectWithDnsLookupIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectWithDnsLookupIPv6) {
+ SocketTest::TestConnectWithDnsLookupIPv6();
+}
+
+// BUG=https://code.google.com/p/webrtc/issues/detail?id=2272
+TEST_F(MacAsyncSocketTest, DISABLED_TestConnectFailIPv4) {
+ SocketTest::TestConnectFailIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectFailIPv6) {
+ SocketTest::TestConnectFailIPv6();
+}
+
+// Reenable once we have mac async dns
+TEST_F(MacAsyncSocketTest, DISABLED_TestConnectWithDnsLookupFailIPv4) {
+ SocketTest::TestConnectWithDnsLookupFailIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestConnectWithDnsLookupFailIPv6) {
+ SocketTest::TestConnectWithDnsLookupFailIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectWithClosedSocketIPv4) {
+ SocketTest::TestConnectWithClosedSocketIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestConnectWithClosedSocketIPv6) {
+ SocketTest::TestConnectWithClosedSocketIPv6();
+}
+
+// Flaky at the moment (10% failure rate). Seems the client doesn't get
+// signalled in a timely manner...
+TEST_F(MacAsyncSocketTest, DISABLED_TestServerCloseDuringConnectIPv4) {
+ SocketTest::TestServerCloseDuringConnectIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestServerCloseDuringConnectIPv6) {
+ SocketTest::TestServerCloseDuringConnectIPv6();
+}
+// Flaky at the moment (0.5% failure rate). Seems the client doesn't get
+// signalled in a timely manner...
+TEST_F(MacAsyncSocketTest, TestClientCloseDuringConnectIPv4) {
+ SocketTest::TestClientCloseDuringConnectIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestClientCloseDuringConnectIPv6) {
+ SocketTest::TestClientCloseDuringConnectIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestServerCloseIPv4) {
+ SocketTest::TestServerCloseIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestServerCloseIPv6) {
+ SocketTest::TestServerCloseIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestCloseInClosedCallbackIPv4) {
+ SocketTest::TestCloseInClosedCallbackIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestCloseInClosedCallbackIPv6) {
+ SocketTest::TestCloseInClosedCallbackIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestSocketServerWaitIPv4) {
+ SocketTest::TestSocketServerWaitIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestSocketServerWaitIPv6) {
+ SocketTest::TestSocketServerWaitIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestTcpIPv4) {
+ SocketTest::TestTcpIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestTcpIPv6) {
+ SocketTest::TestTcpIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, TestSingleFlowControlCallbackIPv4) {
+ SocketTest::TestSingleFlowControlCallbackIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, TestSingleFlowControlCallbackIPv6) {
+ SocketTest::TestSingleFlowControlCallbackIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestUdpIPv4) {
+ SocketTest::TestUdpIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestUdpIPv6) {
+ SocketTest::TestUdpIPv6();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestGetSetOptionsIPv4) {
+ SocketTest::TestGetSetOptionsIPv4();
+}
+
+TEST_F(MacAsyncSocketTest, DISABLED_TestGetSetOptionsIPv6) {
+ SocketTest::TestGetSetOptionsIPv6();
+}
+
+#ifndef CARBON_DEPRECATED
+class MacCarbonAppAsyncSocketTest : public MacAsyncSocketTest {
+ virtual MacBaseSocketServer* CreateSocketServer() {
+ return new MacCarbonAppSocketServer();
+ };
+};
+
+TEST_F(MacCarbonAppAsyncSocketTest, TestSocketServerWaitIPv4) {
+ SocketTest::TestSocketServerWaitIPv4();
+}
+
+TEST_F(MacCarbonAppAsyncSocketTest, TestSocketServerWaitIPv6) {
+ SocketTest::TestSocketServerWaitIPv6();
+}
+#endif
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macutils.cc b/chromium/third_party/webrtc/base/macutils.cc
new file mode 100644
index 00000000000..6e436d4a805
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macutils.cc
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sstream>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macutils.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/stringutils.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+bool ToUtf8(const CFStringRef str16, std::string* str8) {
+ if ((NULL == str16) || (NULL == str8)) {
+ return false;
+ }
+ size_t maxlen = CFStringGetMaximumSizeForEncoding(CFStringGetLength(str16),
+ kCFStringEncodingUTF8) + 1;
+ scoped_ptr<char[]> buffer(new char[maxlen]);
+ if (!buffer || !CFStringGetCString(str16, buffer.get(), maxlen,
+ kCFStringEncodingUTF8)) {
+ return false;
+ }
+ str8->assign(buffer.get());
+ return true;
+}
+
+bool ToUtf16(const std::string& str8, CFStringRef* str16) {
+ if (NULL == str16) {
+ return false;
+ }
+ *str16 = CFStringCreateWithBytes(kCFAllocatorDefault,
+ reinterpret_cast<const UInt8*>(str8.data()),
+ str8.length(), kCFStringEncodingUTF8,
+ false);
+ return NULL != *str16;
+}
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+void DecodeFourChar(UInt32 fc, std::string* out) {
+ std::stringstream ss;
+ ss << '\'';
+ bool printable = true;
+ for (int i = 3; i >= 0; --i) {
+ char ch = (fc >> (8 * i)) & 0xFF;
+ if (isprint(static_cast<unsigned char>(ch))) {
+ ss << ch;
+ } else {
+ printable = false;
+ break;
+ }
+ }
+ if (printable) {
+ ss << '\'';
+ } else {
+ ss.str("");
+ ss << "0x" << std::hex << fc;
+ }
+ out->append(ss.str());
+}
+
+static bool GetGestalt(OSType ostype, int* value) {
+ ASSERT(NULL != value);
+ SInt32 native_value;
+ OSStatus result = Gestalt(ostype, &native_value);
+ if (noErr == result) {
+ *value = native_value;
+ return true;
+ }
+ std::string str;
+ DecodeFourChar(ostype, &str);
+ LOG_E(LS_ERROR, OS, result) << "Gestalt(" << str << ")";
+ return false;
+}
+
+bool GetOSVersion(int* major, int* minor, int* bugfix) {
+ ASSERT(major && minor && bugfix);
+ if (!GetGestalt(gestaltSystemVersion, major)) {
+ return false;
+ }
+ if (*major < 0x1040) {
+ *bugfix = *major & 0xF;
+ *minor = (*major >> 4) & 0xF;
+ *major = (*major >> 8);
+ return true;
+ }
+ return GetGestalt(gestaltSystemVersionMajor, major) &&
+ GetGestalt(gestaltSystemVersionMinor, minor) &&
+ GetGestalt(gestaltSystemVersionBugFix, bugfix);
+}
+
+MacOSVersionName GetOSVersionName() {
+ int major = 0, minor = 0, bugfix = 0;
+ if (!GetOSVersion(&major, &minor, &bugfix)) {
+ return kMacOSUnknown;
+ }
+ if (major > 10) {
+ return kMacOSNewer;
+ }
+ if ((major < 10) || (minor < 3)) {
+ return kMacOSOlder;
+ }
+ switch (minor) {
+ case 3:
+ return kMacOSPanther;
+ case 4:
+ return kMacOSTiger;
+ case 5:
+ return kMacOSLeopard;
+ case 6:
+ return kMacOSSnowLeopard;
+ case 7:
+ return kMacOSLion;
+ case 8:
+ return kMacOSMountainLion;
+ case 9:
+ return kMacOSMavericks;
+ }
+ return kMacOSNewer;
+}
+
+bool GetQuickTimeVersion(std::string* out) {
+ int ver;
+ if (!GetGestalt(gestaltQuickTimeVersion, &ver)) {
+ return false;
+ }
+
+ std::stringstream ss;
+ ss << std::hex << ver;
+ *out = ss.str();
+ return true;
+}
+
+bool RunAppleScript(const std::string& script) {
+ // TODO(thaloun): Add a .mm file that contains something like this:
+ // NSString source from script
+ // NSAppleScript* appleScript = [[NSAppleScript alloc] initWithSource:&source]
+ // if (appleScript != nil) {
+ // [appleScript executeAndReturnError:nil]
+ // [appleScript release]
+#ifndef CARBON_DEPRECATED
+ ComponentInstance component = NULL;
+ AEDesc script_desc;
+ AEDesc result_data;
+ OSStatus err;
+ OSAID script_id, result_id;
+
+ AECreateDesc(typeNull, NULL, 0, &script_desc);
+ AECreateDesc(typeNull, NULL, 0, &result_data);
+ script_id = kOSANullScript;
+ result_id = kOSANullScript;
+
+ component = OpenDefaultComponent(kOSAComponentType, typeAppleScript);
+ if (component == NULL) {
+ LOG(LS_ERROR) << "Failed opening Apple Script component";
+ return false;
+ }
+ err = AECreateDesc(typeUTF8Text, script.data(), script.size(), &script_desc);
+ if (err != noErr) {
+ CloseComponent(component);
+ LOG(LS_ERROR) << "Failed creating Apple Script description";
+ return false;
+ }
+
+ err = OSACompile(component, &script_desc, kOSAModeCanInteract, &script_id);
+ if (err != noErr) {
+ AEDisposeDesc(&script_desc);
+ if (script_id != kOSANullScript) {
+ OSADispose(component, script_id);
+ }
+ CloseComponent(component);
+ LOG(LS_ERROR) << "Error compiling Apple Script";
+ return false;
+ }
+
+ err = OSAExecute(component, script_id, kOSANullScript, kOSAModeCanInteract,
+ &result_id);
+
+ if (err == errOSAScriptError) {
+ LOG(LS_ERROR) << "Error when executing Apple Script: " << script;
+ AECreateDesc(typeNull, NULL, 0, &result_data);
+ OSAScriptError(component, kOSAErrorMessage, typeChar, &result_data);
+ int len = AEGetDescDataSize(&result_data);
+ char* data = (char*) malloc(len);
+ if (data != NULL) {
+ err = AEGetDescData(&result_data, data, len);
+ LOG(LS_ERROR) << "Script error: " << data;
+ }
+ AEDisposeDesc(&script_desc);
+ AEDisposeDesc(&result_data);
+ return false;
+ }
+ AEDisposeDesc(&script_desc);
+ if (script_id != kOSANullScript) {
+ OSADispose(component, script_id);
+ }
+ if (result_id != kOSANullScript) {
+ OSADispose(component, result_id);
+ }
+ CloseComponent(component);
+ return true;
+#else
+ // TODO(thaloun): Support applescripts with the NSAppleScript API.
+ return false;
+#endif // CARBON_DEPRECATED
+}
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macutils.h b/chromium/third_party/webrtc/base/macutils.h
new file mode 100644
index 00000000000..35c3d1870b3
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macutils.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MACUTILS_H__
+#define WEBRTC_BASE_MACUTILS_H__
+
+#include <CoreFoundation/CoreFoundation.h>
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <Carbon/Carbon.h>
+#endif
+#include <string>
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Note that some of these functions work for both iOS and Mac OS X. The ones
+// that are specific to Mac are #ifdef'ed as such.
+
+bool ToUtf8(const CFStringRef str16, std::string* str8);
+bool ToUtf16(const std::string& str8, CFStringRef* str16);
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+void DecodeFourChar(UInt32 fc, std::string* out);
+
+enum MacOSVersionName {
+ kMacOSUnknown, // ???
+ kMacOSOlder, // 10.2-
+ kMacOSPanther, // 10.3
+ kMacOSTiger, // 10.4
+ kMacOSLeopard, // 10.5
+ kMacOSSnowLeopard, // 10.6
+ kMacOSLion, // 10.7
+ kMacOSMountainLion, // 10.8
+ kMacOSMavericks, // 10.9
+ kMacOSNewer, // 10.10+
+};
+
+bool GetOSVersion(int* major, int* minor, int* bugfix);
+MacOSVersionName GetOSVersionName();
+bool GetQuickTimeVersion(std::string* version);
+
+// Runs the given apple script. Only supports scripts that does not
+// require user interaction.
+bool RunAppleScript(const std::string& script);
+#endif
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACUTILS_H__
diff --git a/chromium/third_party/webrtc/base/macutils_unittest.cc b/chromium/third_party/webrtc/base/macutils_unittest.cc
new file mode 100644
index 00000000000..7150bf35505
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macutils_unittest.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/macutils.h"
+
+TEST(MacUtilsTest, GetOsVersionName) {
+ rtc::MacOSVersionName ver = rtc::GetOSVersionName();
+ LOG(LS_INFO) << "GetOsVersionName " << ver;
+ EXPECT_NE(rtc::kMacOSUnknown, ver);
+}
+
+TEST(MacUtilsTest, GetQuickTimeVersion) {
+ std::string version;
+ EXPECT_TRUE(rtc::GetQuickTimeVersion(&version));
+ LOG(LS_INFO) << "GetQuickTimeVersion " << version;
+}
+
+TEST(MacUtilsTest, RunAppleScriptCompileError) {
+ std::string script("set value to to 5");
+ EXPECT_FALSE(rtc::RunAppleScript(script));
+}
+
+TEST(MacUtilsTest, RunAppleScriptRuntimeError) {
+ std::string script("set value to 5 / 0");
+ EXPECT_FALSE(rtc::RunAppleScript(script));
+}
+
+#ifdef CARBON_DEPRECATED
+TEST(MacUtilsTest, DISABLED_RunAppleScriptSuccess) {
+#else
+TEST(MacUtilsTest, RunAppleScriptSuccess) {
+#endif
+ std::string script("set value to 5");
+ EXPECT_TRUE(rtc::RunAppleScript(script));
+}
diff --git a/chromium/third_party/webrtc/base/macwindowpicker.cc b/chromium/third_party/webrtc/base/macwindowpicker.cc
new file mode 100644
index 00000000000..bb97d20f1ac
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macwindowpicker.cc
@@ -0,0 +1,256 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/macwindowpicker.h"
+
+#include <ApplicationServices/ApplicationServices.h>
+#include <CoreFoundation/CoreFoundation.h>
+#include <dlfcn.h>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macutils.h"
+
+namespace rtc {
+
+static const char* kCoreGraphicsName =
+ "/System/Library/Frameworks/ApplicationServices.framework/Frameworks/"
+ "CoreGraphics.framework/CoreGraphics";
+
+static const char* kWindowListCopyWindowInfo = "CGWindowListCopyWindowInfo";
+static const char* kWindowListCreateDescriptionFromArray =
+ "CGWindowListCreateDescriptionFromArray";
+
+// Function pointer for holding the CGWindowListCopyWindowInfo function.
+typedef CFArrayRef(*CGWindowListCopyWindowInfoProc)(CGWindowListOption,
+ CGWindowID);
+
+// Function pointer for holding the CGWindowListCreateDescriptionFromArray
+// function.
+typedef CFArrayRef(*CGWindowListCreateDescriptionFromArrayProc)(CFArrayRef);
+
+MacWindowPicker::MacWindowPicker() : lib_handle_(NULL), get_window_list_(NULL),
+ get_window_list_desc_(NULL) {
+}
+
+MacWindowPicker::~MacWindowPicker() {
+ if (lib_handle_ != NULL) {
+ dlclose(lib_handle_);
+ }
+}
+
+bool MacWindowPicker::Init() {
+ // TODO: If this class grows to use more dynamically functions
+ // from the CoreGraphics framework, consider using
+ // webrtc/base/latebindingsymboltable.h.
+ lib_handle_ = dlopen(kCoreGraphicsName, RTLD_NOW);
+ if (lib_handle_ == NULL) {
+ LOG(LS_ERROR) << "Could not load CoreGraphics";
+ return false;
+ }
+
+ get_window_list_ = dlsym(lib_handle_, kWindowListCopyWindowInfo);
+ get_window_list_desc_ =
+ dlsym(lib_handle_, kWindowListCreateDescriptionFromArray);
+ if (get_window_list_ == NULL || get_window_list_desc_ == NULL) {
+ // The CGWindowListCopyWindowInfo and the
+ // CGWindowListCreateDescriptionFromArray functions was introduced
+ // in Leopard(10.5) so this is a normal failure on Tiger.
+ LOG(LS_INFO) << "Failed to load Core Graphics symbols";
+ dlclose(lib_handle_);
+ lib_handle_ = NULL;
+ return false;
+ }
+
+ return true;
+}
+
+bool MacWindowPicker::IsVisible(const WindowId& id) {
+ // Init if we're not already inited.
+ if (get_window_list_desc_ == NULL && !Init()) {
+ return false;
+ }
+ CGWindowID ids[1];
+ ids[0] = id.id();
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+
+ CFArrayRef window_array =
+ reinterpret_cast<CGWindowListCreateDescriptionFromArrayProc>(
+ get_window_list_desc_)(window_id_array);
+ if (window_array == NULL || 0 == CFArrayGetCount(window_array)) {
+ // Could not find the window. It might have been closed.
+ LOG(LS_INFO) << "Window not found";
+ CFRelease(window_id_array);
+ return false;
+ }
+
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFBooleanRef is_visible = reinterpret_cast<CFBooleanRef>(
+ CFDictionaryGetValue(window, kCGWindowIsOnscreen));
+
+ // Check that the window is visible. If not we might crash.
+ bool visible = false;
+ if (is_visible != NULL) {
+ visible = CFBooleanGetValue(is_visible);
+ }
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+ return visible;
+}
+
+bool MacWindowPicker::MoveToFront(const WindowId& id) {
+ // Init if we're not already initialized.
+ if (get_window_list_desc_ == NULL && !Init()) {
+ return false;
+ }
+ CGWindowID ids[1];
+ ids[0] = id.id();
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+
+ CFArrayRef window_array =
+ reinterpret_cast<CGWindowListCreateDescriptionFromArrayProc>(
+ get_window_list_desc_)(window_id_array);
+ if (window_array == NULL || 0 == CFArrayGetCount(window_array)) {
+ // Could not find the window. It might have been closed.
+ LOG(LS_INFO) << "Window not found";
+ CFRelease(window_id_array);
+ return false;
+ }
+
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFStringRef window_name_ref = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+ CFNumberRef application_pid = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowOwnerPID));
+
+ int pid_val;
+ CFNumberGetValue(application_pid, kCFNumberIntType, &pid_val);
+ std::string window_name;
+ ToUtf8(window_name_ref, &window_name);
+
+ // Build an applescript that sets the selected window to front
+ // within the application. Then set the application to front.
+ bool result = true;
+ std::stringstream ss;
+ ss << "tell application \"System Events\"\n"
+ << "set proc to the first item of (every process whose unix id is "
+ << pid_val
+ << ")\n"
+ << "tell proc to perform action \"AXRaise\" of window \""
+ << window_name
+ << "\"\n"
+ << "set the frontmost of proc to true\n"
+ << "end tell";
+ if (!RunAppleScript(ss.str())) {
+ // This might happen to for example X applications where the X
+ // server spawns of processes with their own PID but the X server
+ // is still registered as owner to the application windows. As a
+ // workaround, we put the X server process to front, meaning that
+ // all X applications will show up. The drawback with this
+ // workaround is that the application that we really wanted to set
+ // to front might be behind another X application.
+ ProcessSerialNumber psn;
+ pid_t pid = pid_val;
+ int res = GetProcessForPID(pid, &psn);
+ if (res != 0) {
+ LOG(LS_ERROR) << "Failed getting process for pid";
+ result = false;
+ }
+ res = SetFrontProcess(&psn);
+ if (res != 0) {
+ LOG(LS_ERROR) << "Failed setting process to front";
+ result = false;
+ }
+ }
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+ return result;
+}
+
+bool MacWindowPicker::GetDesktopList(DesktopDescriptionList* descriptions) {
+ const uint32_t kMaxDisplays = 128;
+ CGDirectDisplayID active_displays[kMaxDisplays];
+ uint32_t display_count = 0;
+
+ CGError err = CGGetActiveDisplayList(kMaxDisplays,
+ active_displays,
+ &display_count);
+ if (err != kCGErrorSuccess) {
+ LOG_E(LS_ERROR, OS, err) << "Failed to enumerate the active displays.";
+ return false;
+ }
+ for (uint32_t i = 0; i < display_count; ++i) {
+ DesktopId id(active_displays[i], static_cast<int>(i));
+ // TODO: Figure out an appropriate desktop title.
+ DesktopDescription desc(id, "");
+ desc.set_primary(CGDisplayIsMain(id.id()));
+ descriptions->push_back(desc);
+ }
+ return display_count > 0;
+}
+
+bool MacWindowPicker::GetDesktopDimensions(const DesktopId& id,
+ int* width,
+ int* height) {
+ *width = CGDisplayPixelsWide(id.id());
+ *height = CGDisplayPixelsHigh(id.id());
+ return true;
+}
+
+bool MacWindowPicker::GetWindowList(WindowDescriptionList* descriptions) {
+ // Init if we're not already inited.
+ if (get_window_list_ == NULL && !Init()) {
+ return false;
+ }
+
+ // Only get onscreen, non-desktop windows.
+ CFArrayRef window_array =
+ reinterpret_cast<CGWindowListCopyWindowInfoProc>(get_window_list_)(
+ kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
+ kCGNullWindowID);
+ if (window_array == NULL) {
+ return false;
+ }
+
+ // Check windows to make sure they have an id, title, and use window layer 0.
+ CFIndex i;
+ CFIndex count = CFArrayGetCount(window_array);
+ for (i = 0; i < count; ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, i));
+ CFStringRef window_title = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+ CFNumberRef window_id = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+ CFNumberRef window_layer = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowLayer));
+ if (window_title != NULL && window_id != NULL && window_layer != NULL) {
+ std::string title_str;
+ int id_val, layer_val;
+ ToUtf8(window_title, &title_str);
+ CFNumberGetValue(window_id, kCFNumberIntType, &id_val);
+ CFNumberGetValue(window_layer, kCFNumberIntType, &layer_val);
+
+ // Discard windows without a title.
+ if (layer_val == 0 && title_str.length() > 0) {
+ WindowId id(static_cast<CGWindowID>(id_val));
+ WindowDescription desc(id, title_str);
+ descriptions->push_back(desc);
+ }
+ }
+ }
+
+ CFRelease(window_array);
+ return true;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/macwindowpicker.h b/chromium/third_party/webrtc/base/macwindowpicker.h
new file mode 100644
index 00000000000..9a44747d2ba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macwindowpicker.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_BASE_MACWINDOWPICKER_H_
+#define WEBRTC_BASE_MACWINDOWPICKER_H_
+
+#include "webrtc/base/windowpicker.h"
+
+namespace rtc {
+
+class MacWindowPicker : public WindowPicker {
+ public:
+ MacWindowPicker();
+ ~MacWindowPicker();
+ virtual bool Init();
+ virtual bool IsVisible(const WindowId& id);
+ virtual bool MoveToFront(const WindowId& id);
+ virtual bool GetWindowList(WindowDescriptionList* descriptions);
+ virtual bool GetDesktopList(DesktopDescriptionList* descriptions);
+ virtual bool GetDesktopDimensions(const DesktopId& id, int* width,
+ int* height);
+
+ private:
+ void* lib_handle_;
+ void* get_window_list_;
+ void* get_window_list_desc_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MACWINDOWPICKER_H_
diff --git a/chromium/third_party/webrtc/base/macwindowpicker_unittest.cc b/chromium/third_party/webrtc/base/macwindowpicker_unittest.cc
new file mode 100644
index 00000000000..7140f023139
--- /dev/null
+++ b/chromium/third_party/webrtc/base/macwindowpicker_unittest.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/macutils.h"
+#include "webrtc/base/macwindowpicker.h"
+#include "webrtc/base/windowpicker.h"
+
+#if !defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
+#error Only for WEBRTC_MAC && !WEBRTC_IOS
+#endif
+
+namespace rtc {
+
+bool IsLeopardOrLater() {
+ return GetOSVersionName() >= kMacOSLeopard;
+}
+
+// Test that this works on new versions and fails acceptably on old versions.
+TEST(MacWindowPickerTest, TestGetWindowList) {
+ MacWindowPicker picker, picker2;
+ WindowDescriptionList descriptions;
+ if (IsLeopardOrLater()) {
+ EXPECT_TRUE(picker.Init());
+ EXPECT_TRUE(picker.GetWindowList(&descriptions));
+ EXPECT_TRUE(picker2.GetWindowList(&descriptions)); // Init is optional
+ } else {
+ EXPECT_FALSE(picker.Init());
+ EXPECT_FALSE(picker.GetWindowList(&descriptions));
+ EXPECT_FALSE(picker2.GetWindowList(&descriptions));
+ }
+}
+
+// TODO: Add verification of the actual parsing, ie, add
+// functionality to inject a fake get_window_array function which
+// provide a pre-constructed list of windows.
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/mathutils.h b/chromium/third_party/webrtc/base/mathutils.h
new file mode 100644
index 00000000000..e2b21261ddb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/mathutils.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2005 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MATHUTILS_H_
+#define WEBRTC_BASE_MATHUTILS_H_
+
+#include <math.h>
+
+#ifndef M_PI
+#define M_PI 3.14159265359f
+#endif
+
+#endif // WEBRTC_BASE_MATHUTILS_H_
diff --git a/chromium/third_party/webrtc/base/md5.cc b/chromium/third_party/webrtc/base/md5.cc
new file mode 100644
index 00000000000..54128907ad5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/md5.cc
@@ -0,0 +1,222 @@
+/*
+ * This code implements the MD5 message-digest algorithm.
+ * The algorithm is due to Ron Rivest. This code was
+ * written by Colin Plumb in 1993, no copyright is claimed.
+ * This code is in the public domain; do with it what you wish.
+ *
+ * Equivalent code is available from RSA Data Security, Inc.
+ * This code has been tested against that, and is equivalent,
+ * except that you don't need to include two pages of legalese
+ * with every copy.
+ *
+ * To compute the message digest of a chunk of bytes, declare an
+ * MD5Context structure, pass it to MD5Init, call MD5Update as
+ * needed on buffers full of bytes, and then call MD5Final, which
+ * will fill a supplied 16-byte array with the digest.
+ */
+
+// Changes from original C code:
+// Ported to C++, type casting, Google code style.
+
+#include "webrtc/base/md5.h"
+
+// TODO: Avoid memcmpy - hash directly from memory.
+#include <string.h> // for memcpy().
+
+#include "webrtc/base/byteorder.h" // for ARCH_CPU_LITTLE_ENDIAN.
+
+namespace rtc {
+
+#ifdef ARCH_CPU_LITTLE_ENDIAN
+#define ByteReverse(buf, len) // Nothing.
+#else // ARCH_CPU_BIG_ENDIAN
+static void ByteReverse(uint32* buf, int len) {
+ for (int i = 0; i < len; ++i) {
+ buf[i] = rtc::GetLE32(&buf[i]);
+ }
+}
+#endif
+
+// Start MD5 accumulation. Set bit count to 0 and buffer to mysterious
+// initialization constants.
+void MD5Init(MD5Context* ctx) {
+ ctx->buf[0] = 0x67452301;
+ ctx->buf[1] = 0xefcdab89;
+ ctx->buf[2] = 0x98badcfe;
+ ctx->buf[3] = 0x10325476;
+ ctx->bits[0] = 0;
+ ctx->bits[1] = 0;
+}
+
+// Update context to reflect the concatenation of another buffer full of bytes.
+void MD5Update(MD5Context* ctx, const uint8* buf, size_t len) {
+ // Update bitcount.
+ uint32 t = ctx->bits[0];
+ if ((ctx->bits[0] = t + (static_cast<uint32>(len) << 3)) < t) {
+ ctx->bits[1]++; // Carry from low to high.
+ }
+ ctx->bits[1] += static_cast<uint32>(len >> 29);
+ t = (t >> 3) & 0x3f; // Bytes already in shsInfo->data.
+
+ // Handle any leading odd-sized chunks.
+ if (t) {
+ uint8* p = reinterpret_cast<uint8*>(ctx->in) + t;
+
+ t = 64-t;
+ if (len < t) {
+ memcpy(p, buf, len);
+ return;
+ }
+ memcpy(p, buf, t);
+ ByteReverse(ctx->in, 16);
+ MD5Transform(ctx->buf, ctx->in);
+ buf += t;
+ len -= t;
+ }
+
+ // Process data in 64-byte chunks.
+ while (len >= 64) {
+ memcpy(ctx->in, buf, 64);
+ ByteReverse(ctx->in, 16);
+ MD5Transform(ctx->buf, ctx->in);
+ buf += 64;
+ len -= 64;
+ }
+
+ // Handle any remaining bytes of data.
+ memcpy(ctx->in, buf, len);
+}
+
+// Final wrapup - pad to 64-byte boundary with the bit pattern.
+// 1 0* (64-bit count of bits processed, MSB-first)
+void MD5Final(MD5Context* ctx, uint8 digest[16]) {
+ // Compute number of bytes mod 64.
+ uint32 count = (ctx->bits[0] >> 3) & 0x3F;
+
+ // Set the first char of padding to 0x80. This is safe since there is
+ // always at least one byte free.
+ uint8* p = reinterpret_cast<uint8*>(ctx->in) + count;
+ *p++ = 0x80;
+
+ // Bytes of padding needed to make 64 bytes.
+ count = 64 - 1 - count;
+
+ // Pad out to 56 mod 64.
+ if (count < 8) {
+ // Two lots of padding: Pad the first block to 64 bytes.
+ memset(p, 0, count);
+ ByteReverse(ctx->in, 16);
+ MD5Transform(ctx->buf, ctx->in);
+
+ // Now fill the next block with 56 bytes.
+ memset(ctx->in, 0, 56);
+ } else {
+ // Pad block to 56 bytes.
+ memset(p, 0, count - 8);
+ }
+ ByteReverse(ctx->in, 14);
+
+ // Append length in bits and transform.
+ ctx->in[14] = ctx->bits[0];
+ ctx->in[15] = ctx->bits[1];
+
+ MD5Transform(ctx->buf, ctx->in);
+ ByteReverse(ctx->buf, 4);
+ memcpy(digest, ctx->buf, 16);
+ memset(ctx, 0, sizeof(*ctx)); // In case it's sensitive.
+}
+
+// The four core functions - F1 is optimized somewhat.
+// #define F1(x, y, z) (x & y | ~x & z)
+#define F1(x, y, z) (z ^ (x & (y ^ z)))
+#define F2(x, y, z) F1(z, x, y)
+#define F3(x, y, z) (x ^ y ^ z)
+#define F4(x, y, z) (y ^ (x | ~z))
+
+// This is the central step in the MD5 algorithm.
+#define MD5STEP(f, w, x, y, z, data, s) \
+ (w += f(x, y, z) + data, w = w << s | w >> (32 - s), w += x)
+
+// The core of the MD5 algorithm, this alters an existing MD5 hash to
+// reflect the addition of 16 longwords of new data. MD5Update blocks
+// the data and converts bytes into longwords for this routine.
+void MD5Transform(uint32 buf[4], const uint32 in[16]) {
+ uint32 a = buf[0];
+ uint32 b = buf[1];
+ uint32 c = buf[2];
+ uint32 d = buf[3];
+
+ MD5STEP(F1, a, b, c, d, in[ 0] + 0xd76aa478, 7);
+ MD5STEP(F1, d, a, b, c, in[ 1] + 0xe8c7b756, 12);
+ MD5STEP(F1, c, d, a, b, in[ 2] + 0x242070db, 17);
+ MD5STEP(F1, b, c, d, a, in[ 3] + 0xc1bdceee, 22);
+ MD5STEP(F1, a, b, c, d, in[ 4] + 0xf57c0faf, 7);
+ MD5STEP(F1, d, a, b, c, in[ 5] + 0x4787c62a, 12);
+ MD5STEP(F1, c, d, a, b, in[ 6] + 0xa8304613, 17);
+ MD5STEP(F1, b, c, d, a, in[ 7] + 0xfd469501, 22);
+ MD5STEP(F1, a, b, c, d, in[ 8] + 0x698098d8, 7);
+ MD5STEP(F1, d, a, b, c, in[ 9] + 0x8b44f7af, 12);
+ MD5STEP(F1, c, d, a, b, in[10] + 0xffff5bb1, 17);
+ MD5STEP(F1, b, c, d, a, in[11] + 0x895cd7be, 22);
+ MD5STEP(F1, a, b, c, d, in[12] + 0x6b901122, 7);
+ MD5STEP(F1, d, a, b, c, in[13] + 0xfd987193, 12);
+ MD5STEP(F1, c, d, a, b, in[14] + 0xa679438e, 17);
+ MD5STEP(F1, b, c, d, a, in[15] + 0x49b40821, 22);
+
+ MD5STEP(F2, a, b, c, d, in[ 1] + 0xf61e2562, 5);
+ MD5STEP(F2, d, a, b, c, in[ 6] + 0xc040b340, 9);
+ MD5STEP(F2, c, d, a, b, in[11] + 0x265e5a51, 14);
+ MD5STEP(F2, b, c, d, a, in[ 0] + 0xe9b6c7aa, 20);
+ MD5STEP(F2, a, b, c, d, in[ 5] + 0xd62f105d, 5);
+ MD5STEP(F2, d, a, b, c, in[10] + 0x02441453, 9);
+ MD5STEP(F2, c, d, a, b, in[15] + 0xd8a1e681, 14);
+ MD5STEP(F2, b, c, d, a, in[ 4] + 0xe7d3fbc8, 20);
+ MD5STEP(F2, a, b, c, d, in[ 9] + 0x21e1cde6, 5);
+ MD5STEP(F2, d, a, b, c, in[14] + 0xc33707d6, 9);
+ MD5STEP(F2, c, d, a, b, in[ 3] + 0xf4d50d87, 14);
+ MD5STEP(F2, b, c, d, a, in[ 8] + 0x455a14ed, 20);
+ MD5STEP(F2, a, b, c, d, in[13] + 0xa9e3e905, 5);
+ MD5STEP(F2, d, a, b, c, in[ 2] + 0xfcefa3f8, 9);
+ MD5STEP(F2, c, d, a, b, in[ 7] + 0x676f02d9, 14);
+ MD5STEP(F2, b, c, d, a, in[12] + 0x8d2a4c8a, 20);
+
+ MD5STEP(F3, a, b, c, d, in[ 5] + 0xfffa3942, 4);
+ MD5STEP(F3, d, a, b, c, in[ 8] + 0x8771f681, 11);
+ MD5STEP(F3, c, d, a, b, in[11] + 0x6d9d6122, 16);
+ MD5STEP(F3, b, c, d, a, in[14] + 0xfde5380c, 23);
+ MD5STEP(F3, a, b, c, d, in[ 1] + 0xa4beea44, 4);
+ MD5STEP(F3, d, a, b, c, in[ 4] + 0x4bdecfa9, 11);
+ MD5STEP(F3, c, d, a, b, in[ 7] + 0xf6bb4b60, 16);
+ MD5STEP(F3, b, c, d, a, in[10] + 0xbebfbc70, 23);
+ MD5STEP(F3, a, b, c, d, in[13] + 0x289b7ec6, 4);
+ MD5STEP(F3, d, a, b, c, in[ 0] + 0xeaa127fa, 11);
+ MD5STEP(F3, c, d, a, b, in[ 3] + 0xd4ef3085, 16);
+ MD5STEP(F3, b, c, d, a, in[ 6] + 0x04881d05, 23);
+ MD5STEP(F3, a, b, c, d, in[ 9] + 0xd9d4d039, 4);
+ MD5STEP(F3, d, a, b, c, in[12] + 0xe6db99e5, 11);
+ MD5STEP(F3, c, d, a, b, in[15] + 0x1fa27cf8, 16);
+ MD5STEP(F3, b, c, d, a, in[ 2] + 0xc4ac5665, 23);
+
+ MD5STEP(F4, a, b, c, d, in[ 0] + 0xf4292244, 6);
+ MD5STEP(F4, d, a, b, c, in[ 7] + 0x432aff97, 10);
+ MD5STEP(F4, c, d, a, b, in[14] + 0xab9423a7, 15);
+ MD5STEP(F4, b, c, d, a, in[ 5] + 0xfc93a039, 21);
+ MD5STEP(F4, a, b, c, d, in[12] + 0x655b59c3, 6);
+ MD5STEP(F4, d, a, b, c, in[ 3] + 0x8f0ccc92, 10);
+ MD5STEP(F4, c, d, a, b, in[10] + 0xffeff47d, 15);
+ MD5STEP(F4, b, c, d, a, in[ 1] + 0x85845dd1, 21);
+ MD5STEP(F4, a, b, c, d, in[ 8] + 0x6fa87e4f, 6);
+ MD5STEP(F4, d, a, b, c, in[15] + 0xfe2ce6e0, 10);
+ MD5STEP(F4, c, d, a, b, in[ 6] + 0xa3014314, 15);
+ MD5STEP(F4, b, c, d, a, in[13] + 0x4e0811a1, 21);
+ MD5STEP(F4, a, b, c, d, in[ 4] + 0xf7537e82, 6);
+ MD5STEP(F4, d, a, b, c, in[11] + 0xbd3af235, 10);
+ MD5STEP(F4, c, d, a, b, in[ 2] + 0x2ad7d2bb, 15);
+ MD5STEP(F4, b, c, d, a, in[ 9] + 0xeb86d391, 21);
+ buf[0] += a;
+ buf[1] += b;
+ buf[2] += c;
+ buf[3] += d;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/md5.h b/chromium/third_party/webrtc/base/md5.h
new file mode 100644
index 00000000000..418e214c76f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/md5.h
@@ -0,0 +1,45 @@
+/*
+ * This is the header file for the MD5 message-digest algorithm.
+ * The algorithm is due to Ron Rivest. This code was
+ * written by Colin Plumb in 1993, no copyright is claimed.
+ * This code is in the public domain; do with it what you wish.
+ *
+ * Equivalent code is available from RSA Data Security, Inc.
+ * This code has been tested against that, and is equivalent,
+ * except that you don't need to include two pages of legalese
+ * with every copy.
+ * To compute the message digest of a chunk of bytes, declare an
+ * MD5Context structure, pass it to MD5Init, call MD5Update as
+ * needed on buffers full of bytes, and then call MD5Final, which
+ * will fill a supplied 16-byte array with the digest.
+ *
+ */
+
+// Changes(fbarchard): Ported to C++ and Google style guide.
+// Made context first parameter in MD5Final for consistency with Sha1.
+// Changes(hellner): added rtc namespace
+
+#ifndef WEBRTC_BASE_MD5_H_
+#define WEBRTC_BASE_MD5_H_
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// Canonical name for a MD5 context structure, used in many crypto libs.
+typedef struct MD5Context MD5_CTX;
+
+struct MD5Context {
+ uint32 buf[4];
+ uint32 bits[2];
+ uint32 in[16];
+};
+
+void MD5Init(MD5Context* context);
+void MD5Update(MD5Context* context, const uint8* data, size_t len);
+void MD5Final(MD5Context* context, uint8 digest[16]);
+void MD5Transform(uint32 buf[4], const uint32 in[16]);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MD5_H_
diff --git a/chromium/third_party/webrtc/base/md5digest.h b/chromium/third_party/webrtc/base/md5digest.h
new file mode 100644
index 00000000000..5e8580222ce
--- /dev/null
+++ b/chromium/third_party/webrtc/base/md5digest.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MD5DIGEST_H_
+#define WEBRTC_BASE_MD5DIGEST_H_
+
+#include "webrtc/base/md5.h"
+#include "webrtc/base/messagedigest.h"
+
+namespace rtc {
+
+// A simple wrapper for our MD5 implementation.
+class Md5Digest : public MessageDigest {
+ public:
+ enum { kSize = 16 };
+ Md5Digest() {
+ MD5Init(&ctx_);
+ }
+ virtual size_t Size() const {
+ return kSize;
+ }
+ virtual void Update(const void* buf, size_t len) {
+ MD5Update(&ctx_, static_cast<const uint8*>(buf), len);
+ }
+ virtual size_t Finish(void* buf, size_t len) {
+ if (len < kSize) {
+ return 0;
+ }
+ MD5Final(&ctx_, static_cast<uint8*>(buf));
+ MD5Init(&ctx_); // Reset for next use.
+ return kSize;
+ }
+ private:
+ MD5_CTX ctx_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MD5DIGEST_H_
diff --git a/chromium/third_party/webrtc/base/md5digest_unittest.cc b/chromium/third_party/webrtc/base/md5digest_unittest.cc
new file mode 100644
index 00000000000..67c62db6283
--- /dev/null
+++ b/chromium/third_party/webrtc/base/md5digest_unittest.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+std::string Md5(const std::string& input) {
+ Md5Digest md5;
+ return ComputeDigest(&md5, input);
+}
+
+TEST(Md5DigestTest, TestSize) {
+ Md5Digest md5;
+ EXPECT_EQ(16, static_cast<int>(Md5Digest::kSize));
+ EXPECT_EQ(16U, md5.Size());
+}
+
+TEST(Md5DigestTest, TestBasic) {
+ // These are the standard MD5 test vectors from RFC 1321.
+ EXPECT_EQ("d41d8cd98f00b204e9800998ecf8427e", Md5(""));
+ EXPECT_EQ("0cc175b9c0f1b6a831c399e269772661", Md5("a"));
+ EXPECT_EQ("900150983cd24fb0d6963f7d28e17f72", Md5("abc"));
+ EXPECT_EQ("f96b697d7cb7938d525a2f31aaf161d0", Md5("message digest"));
+ EXPECT_EQ("c3fcd3d76192e4007dfb496cca67e13b",
+ Md5("abcdefghijklmnopqrstuvwxyz"));
+}
+
+TEST(Md5DigestTest, TestMultipleUpdates) {
+ Md5Digest md5;
+ std::string input = "abcdefghijklmnopqrstuvwxyz";
+ char output[Md5Digest::kSize];
+ for (size_t i = 0; i < input.size(); ++i) {
+ md5.Update(&input[i], 1);
+ }
+ EXPECT_EQ(md5.Size(), md5.Finish(output, sizeof(output)));
+ EXPECT_EQ("c3fcd3d76192e4007dfb496cca67e13b",
+ hex_encode(output, sizeof(output)));
+}
+
+TEST(Md5DigestTest, TestReuse) {
+ Md5Digest md5;
+ std::string input = "message digest";
+ EXPECT_EQ("f96b697d7cb7938d525a2f31aaf161d0", ComputeDigest(&md5, input));
+ input = "abcdefghijklmnopqrstuvwxyz";
+ EXPECT_EQ("c3fcd3d76192e4007dfb496cca67e13b", ComputeDigest(&md5, input));
+}
+
+TEST(Md5DigestTest, TestBufferTooSmall) {
+ Md5Digest md5;
+ std::string input = "abcdefghijklmnopqrstuvwxyz";
+ char output[Md5Digest::kSize - 1];
+ md5.Update(input.c_str(), input.size());
+ EXPECT_EQ(0U, md5.Finish(output, sizeof(output)));
+}
+
+TEST(Md5DigestTest, TestBufferConst) {
+ Md5Digest md5;
+ const int kLongSize = 1000000;
+ std::string input(kLongSize, '\0');
+ for (int i = 0; i < kLongSize; ++i) {
+ input[i] = static_cast<char>(i);
+ }
+ md5.Update(input.c_str(), input.size());
+ for (int i = 0; i < kLongSize; ++i) {
+ EXPECT_EQ(static_cast<char>(i), input[i]);
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagedigest.cc b/chromium/third_party/webrtc/base/messagedigest.cc
new file mode 100644
index 00000000000..dc3e1006ad2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagedigest.cc
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/messagedigest.h"
+
+#include <string.h>
+
+#include "webrtc/base/sslconfig.h"
+#if SSL_USE_OPENSSL
+#include "webrtc/base/openssldigest.h"
+#else
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/sha1digest.h"
+#endif
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+// From RFC 4572.
+const char DIGEST_MD5[] = "md5";
+const char DIGEST_SHA_1[] = "sha-1";
+const char DIGEST_SHA_224[] = "sha-224";
+const char DIGEST_SHA_256[] = "sha-256";
+const char DIGEST_SHA_384[] = "sha-384";
+const char DIGEST_SHA_512[] = "sha-512";
+
+static const size_t kBlockSize = 64; // valid for SHA-256 and down
+
+MessageDigest* MessageDigestFactory::Create(const std::string& alg) {
+#if SSL_USE_OPENSSL
+ MessageDigest* digest = new OpenSSLDigest(alg);
+ if (digest->Size() == 0) { // invalid algorithm
+ delete digest;
+ digest = NULL;
+ }
+ return digest;
+#else
+ MessageDigest* digest = NULL;
+ if (alg == DIGEST_MD5) {
+ digest = new Md5Digest();
+ } else if (alg == DIGEST_SHA_1) {
+ digest = new Sha1Digest();
+ }
+ return digest;
+#endif
+}
+
+bool IsFips180DigestAlgorithm(const std::string& alg) {
+ // These are the FIPS 180 algorithms. According to RFC 4572 Section 5,
+ // "Self-signed certificates (for which legacy certificates are not a
+ // consideration) MUST use one of the FIPS 180 algorithms (SHA-1,
+ // SHA-224, SHA-256, SHA-384, or SHA-512) as their signature algorithm,
+ // and thus also MUST use it to calculate certificate fingerprints."
+ return alg == DIGEST_SHA_1 ||
+ alg == DIGEST_SHA_224 ||
+ alg == DIGEST_SHA_256 ||
+ alg == DIGEST_SHA_384 ||
+ alg == DIGEST_SHA_512;
+}
+
+size_t ComputeDigest(MessageDigest* digest, const void* input, size_t in_len,
+ void* output, size_t out_len) {
+ digest->Update(input, in_len);
+ return digest->Finish(output, out_len);
+}
+
+size_t ComputeDigest(const std::string& alg, const void* input, size_t in_len,
+ void* output, size_t out_len) {
+ scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ return (digest) ?
+ ComputeDigest(digest.get(), input, in_len, output, out_len) :
+ 0;
+}
+
+std::string ComputeDigest(MessageDigest* digest, const std::string& input) {
+ scoped_ptr<char[]> output(new char[digest->Size()]);
+ ComputeDigest(digest, input.data(), input.size(),
+ output.get(), digest->Size());
+ return hex_encode(output.get(), digest->Size());
+}
+
+bool ComputeDigest(const std::string& alg, const std::string& input,
+ std::string* output) {
+ scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ if (!digest) {
+ return false;
+ }
+ *output = ComputeDigest(digest.get(), input);
+ return true;
+}
+
+std::string ComputeDigest(const std::string& alg, const std::string& input) {
+ std::string output;
+ ComputeDigest(alg, input, &output);
+ return output;
+}
+
+// Compute a RFC 2104 HMAC: H(K XOR opad, H(K XOR ipad, text))
+size_t ComputeHmac(MessageDigest* digest,
+ const void* key, size_t key_len,
+ const void* input, size_t in_len,
+ void* output, size_t out_len) {
+ // We only handle algorithms with a 64-byte blocksize.
+ // TODO: Add BlockSize() method to MessageDigest.
+ size_t block_len = kBlockSize;
+ if (digest->Size() > 32) {
+ return 0;
+ }
+ // Copy the key to a block-sized buffer to simplify padding.
+ // If the key is longer than a block, hash it and use the result instead.
+ scoped_ptr<uint8[]> new_key(new uint8[block_len]);
+ if (key_len > block_len) {
+ ComputeDigest(digest, key, key_len, new_key.get(), block_len);
+ memset(new_key.get() + digest->Size(), 0, block_len - digest->Size());
+ } else {
+ memcpy(new_key.get(), key, key_len);
+ memset(new_key.get() + key_len, 0, block_len - key_len);
+ }
+ // Set up the padding from the key, salting appropriately for each padding.
+ scoped_ptr<uint8[]> o_pad(new uint8[block_len]), i_pad(new uint8[block_len]);
+ for (size_t i = 0; i < block_len; ++i) {
+ o_pad[i] = 0x5c ^ new_key[i];
+ i_pad[i] = 0x36 ^ new_key[i];
+ }
+ // Inner hash; hash the inner padding, and then the input buffer.
+ scoped_ptr<uint8[]> inner(new uint8[digest->Size()]);
+ digest->Update(i_pad.get(), block_len);
+ digest->Update(input, in_len);
+ digest->Finish(inner.get(), digest->Size());
+ // Outer hash; hash the outer padding, and then the result of the inner hash.
+ digest->Update(o_pad.get(), block_len);
+ digest->Update(inner.get(), digest->Size());
+ return digest->Finish(output, out_len);
+}
+
+size_t ComputeHmac(const std::string& alg, const void* key, size_t key_len,
+ const void* input, size_t in_len,
+ void* output, size_t out_len) {
+ scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ if (!digest) {
+ return 0;
+ }
+ return ComputeHmac(digest.get(), key, key_len,
+ input, in_len, output, out_len);
+}
+
+std::string ComputeHmac(MessageDigest* digest, const std::string& key,
+ const std::string& input) {
+ scoped_ptr<char[]> output(new char[digest->Size()]);
+ ComputeHmac(digest, key.data(), key.size(),
+ input.data(), input.size(), output.get(), digest->Size());
+ return hex_encode(output.get(), digest->Size());
+}
+
+bool ComputeHmac(const std::string& alg, const std::string& key,
+ const std::string& input, std::string* output) {
+ scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ if (!digest) {
+ return false;
+ }
+ *output = ComputeHmac(digest.get(), key, input);
+ return true;
+}
+
+std::string ComputeHmac(const std::string& alg, const std::string& key,
+ const std::string& input) {
+ std::string output;
+ ComputeHmac(alg, key, input, &output);
+ return output;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagedigest.h b/chromium/third_party/webrtc/base/messagedigest.h
new file mode 100644
index 00000000000..5cfcb477230
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagedigest.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MESSAGEDIGEST_H_
+#define WEBRTC_BASE_MESSAGEDIGEST_H_
+
+#include <string>
+
+namespace rtc {
+
+// Definitions for the digest algorithms.
+extern const char DIGEST_MD5[];
+extern const char DIGEST_SHA_1[];
+extern const char DIGEST_SHA_224[];
+extern const char DIGEST_SHA_256[];
+extern const char DIGEST_SHA_384[];
+extern const char DIGEST_SHA_512[];
+
+// A general class for computing hashes.
+class MessageDigest {
+ public:
+ enum { kMaxSize = 64 }; // Maximum known size (SHA-512)
+ virtual ~MessageDigest() {}
+ // Returns the digest output size (e.g. 16 bytes for MD5).
+ virtual size_t Size() const = 0;
+ // Updates the digest with |len| bytes from |buf|.
+ virtual void Update(const void* buf, size_t len) = 0;
+ // Outputs the digest value to |buf| with length |len|.
+ // Returns the number of bytes written, i.e., Size().
+ virtual size_t Finish(void* buf, size_t len) = 0;
+};
+
+// A factory class for creating digest objects.
+class MessageDigestFactory {
+ public:
+ static MessageDigest* Create(const std::string& alg);
+};
+
+// A whitelist of approved digest algorithms from RFC 4572 (FIPS 180).
+bool IsFips180DigestAlgorithm(const std::string& alg);
+
+// Functions to create hashes.
+
+// Computes the hash of |in_len| bytes of |input|, using the |digest| hash
+// implementation, and outputs the hash to the buffer |output|, which is
+// |out_len| bytes long. Returns the number of bytes written to |output| if
+// successful, or 0 if |out_len| was too small.
+size_t ComputeDigest(MessageDigest* digest, const void* input, size_t in_len,
+ void* output, size_t out_len);
+// Like the previous function, but creates a digest implementation based on
+// the desired digest name |alg|, e.g. DIGEST_SHA_1. Returns 0 if there is no
+// digest with the given name.
+size_t ComputeDigest(const std::string& alg, const void* input, size_t in_len,
+ void* output, size_t out_len);
+// Computes the hash of |input| using the |digest| hash implementation, and
+// returns it as a hex-encoded string.
+std::string ComputeDigest(MessageDigest* digest, const std::string& input);
+// Like the previous function, but creates a digest implementation based on
+// the desired digest name |alg|, e.g. DIGEST_SHA_1. Returns empty string if
+// there is no digest with the given name.
+std::string ComputeDigest(const std::string& alg, const std::string& input);
+// Like the previous function, but returns an explicit result code.
+bool ComputeDigest(const std::string& alg, const std::string& input,
+ std::string* output);
+
+// Shorthand way to compute a hex-encoded hash using MD5.
+inline std::string MD5(const std::string& input) {
+ return ComputeDigest(DIGEST_MD5, input);
+}
+
+// Functions to compute RFC 2104 HMACs.
+
+// Computes the HMAC of |in_len| bytes of |input|, using the |digest| hash
+// implementation and |key_len| bytes of |key| to key the HMAC, and outputs
+// the HMAC to the buffer |output|, which is |out_len| bytes long. Returns the
+// number of bytes written to |output| if successful, or 0 if |out_len| was too
+// small.
+size_t ComputeHmac(MessageDigest* digest, const void* key, size_t key_len,
+ const void* input, size_t in_len,
+ void* output, size_t out_len);
+// Like the previous function, but creates a digest implementation based on
+// the desired digest name |alg|, e.g. DIGEST_SHA_1. Returns 0 if there is no
+// digest with the given name.
+size_t ComputeHmac(const std::string& alg, const void* key, size_t key_len,
+ const void* input, size_t in_len,
+ void* output, size_t out_len);
+// Computes the HMAC of |input| using the |digest| hash implementation and |key|
+// to key the HMAC, and returns it as a hex-encoded string.
+std::string ComputeHmac(MessageDigest* digest, const std::string& key,
+ const std::string& input);
+// Like the previous function, but creates a digest implementation based on
+// the desired digest name |alg|, e.g. DIGEST_SHA_1. Returns empty string if
+// there is no digest with the given name.
+std::string ComputeHmac(const std::string& alg, const std::string& key,
+ const std::string& input);
+// Like the previous function, but returns an explicit result code.
+bool ComputeHmac(const std::string& alg, const std::string& key,
+ const std::string& input, std::string* output);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MESSAGEDIGEST_H_
diff --git a/chromium/third_party/webrtc/base/messagedigest_unittest.cc b/chromium/third_party/webrtc/base/messagedigest_unittest.cc
new file mode 100644
index 00000000000..86cf688cec4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagedigest_unittest.cc
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+// Test vectors from RFC 1321.
+TEST(MessageDigestTest, TestMd5Digest) {
+ // Test the string versions of the APIs.
+ EXPECT_EQ("d41d8cd98f00b204e9800998ecf8427e",
+ ComputeDigest(DIGEST_MD5, ""));
+ EXPECT_EQ("900150983cd24fb0d6963f7d28e17f72",
+ ComputeDigest(DIGEST_MD5, "abc"));
+ EXPECT_EQ("c3fcd3d76192e4007dfb496cca67e13b",
+ ComputeDigest(DIGEST_MD5, "abcdefghijklmnopqrstuvwxyz"));
+
+ // Test the raw buffer versions of the APIs; also check output buffer size.
+ char output[16];
+ EXPECT_EQ(sizeof(output),
+ ComputeDigest(DIGEST_MD5, "abc", 3, output, sizeof(output)));
+ EXPECT_EQ("900150983cd24fb0d6963f7d28e17f72",
+ hex_encode(output, sizeof(output)));
+ EXPECT_EQ(0U,
+ ComputeDigest(DIGEST_MD5, "abc", 3, output, sizeof(output) - 1));
+}
+
+// Test vectors from RFC 3174.
+TEST(MessageDigestTest, TestSha1Digest) {
+ // Test the string versions of the APIs.
+ EXPECT_EQ("da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ ComputeDigest(DIGEST_SHA_1, ""));
+ EXPECT_EQ("a9993e364706816aba3e25717850c26c9cd0d89d",
+ ComputeDigest(DIGEST_SHA_1, "abc"));
+ EXPECT_EQ("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+ ComputeDigest(DIGEST_SHA_1,
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"));
+
+ // Test the raw buffer versions of the APIs; also check output buffer size.
+ char output[20];
+ EXPECT_EQ(sizeof(output),
+ ComputeDigest(DIGEST_SHA_1, "abc", 3, output, sizeof(output)));
+ EXPECT_EQ("a9993e364706816aba3e25717850c26c9cd0d89d",
+ hex_encode(output, sizeof(output)));
+ EXPECT_EQ(0U,
+ ComputeDigest(DIGEST_SHA_1, "abc", 3, output, sizeof(output) - 1));
+}
+
+// Test that we fail properly if a bad digest algorithm is specified.
+TEST(MessageDigestTest, TestBadDigest) {
+ std::string output;
+ EXPECT_FALSE(ComputeDigest("sha-9000", "abc", &output));
+ EXPECT_EQ("", ComputeDigest("sha-9000", "abc"));
+}
+
+// Test vectors from RFC 2202.
+TEST(MessageDigestTest, TestMd5Hmac) {
+ // Test the string versions of the APIs.
+ EXPECT_EQ("9294727a3638bb1c13f48ef8158bfc9d",
+ ComputeHmac(DIGEST_MD5, std::string(16, '\x0b'), "Hi There"));
+ EXPECT_EQ("750c783e6ab0b503eaa86e310a5db738",
+ ComputeHmac(DIGEST_MD5, "Jefe", "what do ya want for nothing?"));
+ EXPECT_EQ("56be34521d144c88dbb8c733f0e8b3f6",
+ ComputeHmac(DIGEST_MD5, std::string(16, '\xaa'),
+ std::string(50, '\xdd')));
+ EXPECT_EQ("697eaf0aca3a3aea3a75164746ffaa79",
+ ComputeHmac(DIGEST_MD5,
+ "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19",
+ std::string(50, '\xcd')));
+ EXPECT_EQ("56461ef2342edc00f9bab995690efd4c",
+ ComputeHmac(DIGEST_MD5, std::string(16, '\x0c'),
+ "Test With Truncation"));
+ EXPECT_EQ("6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd",
+ ComputeHmac(DIGEST_MD5, std::string(80, '\xaa'),
+ "Test Using Larger Than Block-Size Key - Hash Key First"));
+ EXPECT_EQ("6f630fad67cda0ee1fb1f562db3aa53e",
+ ComputeHmac(DIGEST_MD5, std::string(80, '\xaa'),
+ "Test Using Larger Than Block-Size Key and Larger "
+ "Than One Block-Size Data"));
+
+ // Test the raw buffer versions of the APIs; also check output buffer size.
+ std::string key(16, '\x0b');
+ std::string input("Hi There");
+ char output[16];
+ EXPECT_EQ(sizeof(output),
+ ComputeHmac(DIGEST_MD5, key.c_str(), key.size(),
+ input.c_str(), input.size(), output, sizeof(output)));
+ EXPECT_EQ("9294727a3638bb1c13f48ef8158bfc9d",
+ hex_encode(output, sizeof(output)));
+ EXPECT_EQ(0U,
+ ComputeHmac(DIGEST_MD5, key.c_str(), key.size(),
+ input.c_str(), input.size(), output, sizeof(output) - 1));
+}
+
+// Test vectors from RFC 2202.
+TEST(MessageDigestTest, TestSha1Hmac) {
+ // Test the string versions of the APIs.
+ EXPECT_EQ("b617318655057264e28bc0b6fb378c8ef146be00",
+ ComputeHmac(DIGEST_SHA_1, std::string(20, '\x0b'), "Hi There"));
+ EXPECT_EQ("effcdf6ae5eb2fa2d27416d5f184df9c259a7c79",
+ ComputeHmac(DIGEST_SHA_1, "Jefe", "what do ya want for nothing?"));
+ EXPECT_EQ("125d7342b9ac11cd91a39af48aa17b4f63f175d3",
+ ComputeHmac(DIGEST_SHA_1, std::string(20, '\xaa'),
+ std::string(50, '\xdd')));
+ EXPECT_EQ("4c9007f4026250c6bc8414f9bf50c86c2d7235da",
+ ComputeHmac(DIGEST_SHA_1,
+ "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19",
+ std::string(50, '\xcd')));
+ EXPECT_EQ("4c1a03424b55e07fe7f27be1d58bb9324a9a5a04",
+ ComputeHmac(DIGEST_SHA_1, std::string(20, '\x0c'),
+ "Test With Truncation"));
+ EXPECT_EQ("aa4ae5e15272d00e95705637ce8a3b55ed402112",
+ ComputeHmac(DIGEST_SHA_1, std::string(80, '\xaa'),
+ "Test Using Larger Than Block-Size Key - Hash Key First"));
+ EXPECT_EQ("e8e99d0f45237d786d6bbaa7965c7808bbff1a91",
+ ComputeHmac(DIGEST_SHA_1, std::string(80, '\xaa'),
+ "Test Using Larger Than Block-Size Key and Larger "
+ "Than One Block-Size Data"));
+
+ // Test the raw buffer versions of the APIs; also check output buffer size.
+ std::string key(20, '\x0b');
+ std::string input("Hi There");
+ char output[20];
+ EXPECT_EQ(sizeof(output),
+ ComputeHmac(DIGEST_SHA_1, key.c_str(), key.size(),
+ input.c_str(), input.size(), output, sizeof(output)));
+ EXPECT_EQ("b617318655057264e28bc0b6fb378c8ef146be00",
+ hex_encode(output, sizeof(output)));
+ EXPECT_EQ(0U,
+ ComputeHmac(DIGEST_SHA_1, key.c_str(), key.size(),
+ input.c_str(), input.size(), output, sizeof(output) - 1));
+}
+
+TEST(MessageDigestTest, TestBadHmac) {
+ std::string output;
+ EXPECT_FALSE(ComputeHmac("sha-9000", "key", "abc", &output));
+ EXPECT_EQ("", ComputeHmac("sha-9000", "key", "abc"));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagehandler.cc b/chromium/third_party/webrtc/base/messagehandler.cc
new file mode 100644
index 00000000000..be5bb7f8f9d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagehandler.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/messagequeue.h"
+
+namespace rtc {
+
+MessageHandler::~MessageHandler() {
+ MessageQueueManager::Clear(this);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagehandler.h b/chromium/third_party/webrtc/base/messagehandler.h
new file mode 100644
index 00000000000..123c8509736
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagehandler.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MESSAGEHANDLER_H_
+#define WEBRTC_BASE_MESSAGEHANDLER_H_
+
+#include "webrtc/base/constructormagic.h"
+
+namespace rtc {
+
+struct Message;
+
+// Messages get dispatched to a MessageHandler
+
+class MessageHandler {
+ public:
+ virtual ~MessageHandler();
+ virtual void OnMessage(Message* msg) = 0;
+
+ protected:
+ MessageHandler() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MessageHandler);
+};
+
+// Helper class to facilitate executing a functor on a thread.
+template <class ReturnT, class FunctorT>
+class FunctorMessageHandler : public MessageHandler {
+ public:
+ explicit FunctorMessageHandler(const FunctorT& functor)
+ : functor_(functor) {}
+ virtual void OnMessage(Message* msg) {
+ result_ = functor_();
+ }
+ const ReturnT& result() const { return result_; }
+
+ private:
+ FunctorT functor_;
+ ReturnT result_;
+};
+
+// Specialization for ReturnT of void.
+template <class FunctorT>
+class FunctorMessageHandler<void, FunctorT> : public MessageHandler {
+ public:
+ explicit FunctorMessageHandler(const FunctorT& functor)
+ : functor_(functor) {}
+ virtual void OnMessage(Message* msg) {
+ functor_();
+ }
+ void result() const {}
+
+ private:
+ FunctorT functor_;
+};
+
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MESSAGEHANDLER_H_
diff --git a/chromium/third_party/webrtc/base/messagequeue.cc b/chromium/third_party/webrtc/base/messagequeue.cc
new file mode 100644
index 00000000000..1b312ff7afb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagequeue.cc
@@ -0,0 +1,384 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_POSIX)
+#include <sys/time.h>
+#endif
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagequeue.h"
+#if defined(__native_client__)
+#include "webrtc/base/nullsocketserver.h"
+typedef rtc::NullSocketServer DefaultSocketServer;
+#else
+#include "webrtc/base/physicalsocketserver.h"
+typedef rtc::PhysicalSocketServer DefaultSocketServer;
+#endif
+
+namespace rtc {
+
+const uint32 kMaxMsgLatency = 150; // 150 ms
+
+//------------------------------------------------------------------
+// MessageQueueManager
+
+MessageQueueManager* MessageQueueManager::instance_ = NULL;
+
+MessageQueueManager* MessageQueueManager::Instance() {
+ // Note: This is not thread safe, but it is first called before threads are
+ // spawned.
+ if (!instance_)
+ instance_ = new MessageQueueManager;
+ return instance_;
+}
+
+bool MessageQueueManager::IsInitialized() {
+ return instance_ != NULL;
+}
+
+MessageQueueManager::MessageQueueManager() {
+}
+
+MessageQueueManager::~MessageQueueManager() {
+}
+
+void MessageQueueManager::Add(MessageQueue *message_queue) {
+ return Instance()->AddInternal(message_queue);
+}
+void MessageQueueManager::AddInternal(MessageQueue *message_queue) {
+ // MessageQueueManager methods should be non-reentrant, so we
+ // ASSERT that is the case. If any of these ASSERT, please
+ // contact bpm or jbeda.
+ ASSERT(!crit_.CurrentThreadIsOwner());
+ CritScope cs(&crit_);
+ message_queues_.push_back(message_queue);
+}
+
+void MessageQueueManager::Remove(MessageQueue *message_queue) {
+ // If there isn't a message queue manager instance, then there isn't a queue
+ // to remove.
+ if (!instance_) return;
+ return Instance()->RemoveInternal(message_queue);
+}
+void MessageQueueManager::RemoveInternal(MessageQueue *message_queue) {
+ ASSERT(!crit_.CurrentThreadIsOwner()); // See note above.
+ // If this is the last MessageQueue, destroy the manager as well so that
+ // we don't leak this object at program shutdown. As mentioned above, this is
+ // not thread-safe, but this should only happen at program termination (when
+ // the ThreadManager is destroyed, and threads are no longer active).
+ bool destroy = false;
+ {
+ CritScope cs(&crit_);
+ std::vector<MessageQueue *>::iterator iter;
+ iter = std::find(message_queues_.begin(), message_queues_.end(),
+ message_queue);
+ if (iter != message_queues_.end()) {
+ message_queues_.erase(iter);
+ }
+ destroy = message_queues_.empty();
+ }
+ if (destroy) {
+ instance_ = NULL;
+ delete this;
+ }
+}
+
+void MessageQueueManager::Clear(MessageHandler *handler) {
+ // If there isn't a message queue manager instance, then there aren't any
+ // queues to remove this handler from.
+ if (!instance_) return;
+ return Instance()->ClearInternal(handler);
+}
+void MessageQueueManager::ClearInternal(MessageHandler *handler) {
+ ASSERT(!crit_.CurrentThreadIsOwner()); // See note above.
+ CritScope cs(&crit_);
+ std::vector<MessageQueue *>::iterator iter;
+ for (iter = message_queues_.begin(); iter != message_queues_.end(); iter++)
+ (*iter)->Clear(handler);
+}
+
+//------------------------------------------------------------------
+// MessageQueue
+
+MessageQueue::MessageQueue(SocketServer* ss)
+ : ss_(ss), fStop_(false), fPeekKeep_(false),
+ dmsgq_next_num_(0) {
+ if (!ss_) {
+ // Currently, MessageQueue holds a socket server, and is the base class for
+ // Thread. It seems like it makes more sense for Thread to hold the socket
+ // server, and provide it to the MessageQueue, since the Thread controls
+ // the I/O model, and MQ is agnostic to those details. Anyway, this causes
+ // messagequeue_unittest to depend on network libraries... yuck.
+ default_ss_.reset(new DefaultSocketServer());
+ ss_ = default_ss_.get();
+ }
+ ss_->SetMessageQueue(this);
+ MessageQueueManager::Add(this);
+}
+
+MessageQueue::~MessageQueue() {
+ // The signal is done from here to ensure
+ // that it always gets called when the queue
+ // is going away.
+ SignalQueueDestroyed();
+ MessageQueueManager::Remove(this);
+ Clear(NULL);
+ if (ss_) {
+ ss_->SetMessageQueue(NULL);
+ }
+}
+
+void MessageQueue::set_socketserver(SocketServer* ss) {
+ ss_ = ss ? ss : default_ss_.get();
+ ss_->SetMessageQueue(this);
+}
+
+void MessageQueue::Quit() {
+ fStop_ = true;
+ ss_->WakeUp();
+}
+
+bool MessageQueue::IsQuitting() {
+ return fStop_;
+}
+
+void MessageQueue::Restart() {
+ fStop_ = false;
+}
+
+bool MessageQueue::Peek(Message *pmsg, int cmsWait) {
+ if (fPeekKeep_) {
+ *pmsg = msgPeek_;
+ return true;
+ }
+ if (!Get(pmsg, cmsWait))
+ return false;
+ msgPeek_ = *pmsg;
+ fPeekKeep_ = true;
+ return true;
+}
+
+bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
+ // Return and clear peek if present
+ // Always return the peek if it exists so there is Peek/Get symmetry
+
+ if (fPeekKeep_) {
+ *pmsg = msgPeek_;
+ fPeekKeep_ = false;
+ return true;
+ }
+
+ // Get w/wait + timer scan / dispatch + socket / event multiplexer dispatch
+
+ int cmsTotal = cmsWait;
+ int cmsElapsed = 0;
+ uint32 msStart = Time();
+ uint32 msCurrent = msStart;
+ while (true) {
+ // Check for sent messages
+ ReceiveSends();
+
+ // Check for posted events
+ int cmsDelayNext = kForever;
+ bool first_pass = true;
+ while (true) {
+ // All queue operations need to be locked, but nothing else in this loop
+ // (specifically handling disposed message) can happen inside the crit.
+ // Otherwise, disposed MessageHandlers will cause deadlocks.
+ {
+ CritScope cs(&crit_);
+ // On the first pass, check for delayed messages that have been
+ // triggered and calculate the next trigger time.
+ if (first_pass) {
+ first_pass = false;
+ while (!dmsgq_.empty()) {
+ if (TimeIsLater(msCurrent, dmsgq_.top().msTrigger_)) {
+ cmsDelayNext = TimeDiff(dmsgq_.top().msTrigger_, msCurrent);
+ break;
+ }
+ msgq_.push_back(dmsgq_.top().msg_);
+ dmsgq_.pop();
+ }
+ }
+ // Pull a message off the message queue, if available.
+ if (msgq_.empty()) {
+ break;
+ } else {
+ *pmsg = msgq_.front();
+ msgq_.pop_front();
+ }
+ } // crit_ is released here.
+
+ // Log a warning for time-sensitive messages that we're late to deliver.
+ if (pmsg->ts_sensitive) {
+ int32 delay = TimeDiff(msCurrent, pmsg->ts_sensitive);
+ if (delay > 0) {
+ LOG_F(LS_WARNING) << "id: " << pmsg->message_id << " delay: "
+ << (delay + kMaxMsgLatency) << "ms";
+ }
+ }
+ // If this was a dispose message, delete it and skip it.
+ if (MQID_DISPOSE == pmsg->message_id) {
+ ASSERT(NULL == pmsg->phandler);
+ delete pmsg->pdata;
+ *pmsg = Message();
+ continue;
+ }
+ return true;
+ }
+
+ if (fStop_)
+ break;
+
+ // Which is shorter, the delay wait or the asked wait?
+
+ int cmsNext;
+ if (cmsWait == kForever) {
+ cmsNext = cmsDelayNext;
+ } else {
+ cmsNext = _max(0, cmsTotal - cmsElapsed);
+ if ((cmsDelayNext != kForever) && (cmsDelayNext < cmsNext))
+ cmsNext = cmsDelayNext;
+ }
+
+ // Wait and multiplex in the meantime
+ if (!ss_->Wait(cmsNext, process_io))
+ return false;
+
+ // If the specified timeout expired, return
+
+ msCurrent = Time();
+ cmsElapsed = TimeDiff(msCurrent, msStart);
+ if (cmsWait != kForever) {
+ if (cmsElapsed >= cmsWait)
+ return false;
+ }
+ }
+ return false;
+}
+
+void MessageQueue::ReceiveSends() {
+}
+
+void MessageQueue::Post(MessageHandler *phandler, uint32 id,
+ MessageData *pdata, bool time_sensitive) {
+ if (fStop_)
+ return;
+
+ // Keep thread safe
+ // Add the message to the end of the queue
+ // Signal for the multiplexer to return
+
+ CritScope cs(&crit_);
+ Message msg;
+ msg.phandler = phandler;
+ msg.message_id = id;
+ msg.pdata = pdata;
+ if (time_sensitive) {
+ msg.ts_sensitive = Time() + kMaxMsgLatency;
+ }
+ msgq_.push_back(msg);
+ ss_->WakeUp();
+}
+
+void MessageQueue::DoDelayPost(int cmsDelay, uint32 tstamp,
+ MessageHandler *phandler, uint32 id, MessageData* pdata) {
+ if (fStop_)
+ return;
+
+ // Keep thread safe
+ // Add to the priority queue. Gets sorted soonest first.
+ // Signal for the multiplexer to return.
+
+ CritScope cs(&crit_);
+ Message msg;
+ msg.phandler = phandler;
+ msg.message_id = id;
+ msg.pdata = pdata;
+ DelayedMessage dmsg(cmsDelay, tstamp, dmsgq_next_num_, msg);
+ dmsgq_.push(dmsg);
+ // If this message queue processes 1 message every millisecond for 50 days,
+ // we will wrap this number. Even then, only messages with identical times
+ // will be misordered, and then only briefly. This is probably ok.
+ VERIFY(0 != ++dmsgq_next_num_);
+ ss_->WakeUp();
+}
+
+int MessageQueue::GetDelay() {
+ CritScope cs(&crit_);
+
+ if (!msgq_.empty())
+ return 0;
+
+ if (!dmsgq_.empty()) {
+ int delay = TimeUntil(dmsgq_.top().msTrigger_);
+ if (delay < 0)
+ delay = 0;
+ return delay;
+ }
+
+ return kForever;
+}
+
+void MessageQueue::Clear(MessageHandler *phandler, uint32 id,
+ MessageList* removed) {
+ CritScope cs(&crit_);
+
+ // Remove messages with phandler
+
+ if (fPeekKeep_ && msgPeek_.Match(phandler, id)) {
+ if (removed) {
+ removed->push_back(msgPeek_);
+ } else {
+ delete msgPeek_.pdata;
+ }
+ fPeekKeep_ = false;
+ }
+
+ // Remove from ordered message queue
+
+ for (MessageList::iterator it = msgq_.begin(); it != msgq_.end();) {
+ if (it->Match(phandler, id)) {
+ if (removed) {
+ removed->push_back(*it);
+ } else {
+ delete it->pdata;
+ }
+ it = msgq_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ // Remove from priority queue. Not directly iterable, so use this approach
+
+ PriorityQueue::container_type::iterator new_end = dmsgq_.container().begin();
+ for (PriorityQueue::container_type::iterator it = new_end;
+ it != dmsgq_.container().end(); ++it) {
+ if (it->msg_.Match(phandler, id)) {
+ if (removed) {
+ removed->push_back(it->msg_);
+ } else {
+ delete it->msg_.pdata;
+ }
+ } else {
+ *new_end++ = *it;
+ }
+ }
+ dmsgq_.container().erase(new_end, dmsgq_.container().end());
+ dmsgq_.reheap();
+}
+
+void MessageQueue::Dispatch(Message *pmsg) {
+ pmsg->phandler->OnMessage(pmsg);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagequeue.h b/chromium/third_party/webrtc/base/messagequeue.h
new file mode 100644
index 00000000000..41c1e24b00b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagequeue.h
@@ -0,0 +1,254 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MESSAGEQUEUE_H_
+#define WEBRTC_BASE_MESSAGEQUEUE_H_
+
+#include <string.h>
+
+#include <algorithm>
+#include <list>
+#include <queue>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+struct Message;
+class MessageQueue;
+
+// MessageQueueManager does cleanup of of message queues
+
+class MessageQueueManager {
+ public:
+ static void Add(MessageQueue *message_queue);
+ static void Remove(MessageQueue *message_queue);
+ static void Clear(MessageHandler *handler);
+
+ // For testing purposes, we expose whether or not the MessageQueueManager
+ // instance has been initialized. It has no other use relative to the rest of
+ // the functions of this class, which auto-initialize the underlying
+ // MessageQueueManager instance when necessary.
+ static bool IsInitialized();
+
+ private:
+ static MessageQueueManager* Instance();
+
+ MessageQueueManager();
+ ~MessageQueueManager();
+
+ void AddInternal(MessageQueue *message_queue);
+ void RemoveInternal(MessageQueue *message_queue);
+ void ClearInternal(MessageHandler *handler);
+
+ static MessageQueueManager* instance_;
+ // This list contains all live MessageQueues.
+ std::vector<MessageQueue *> message_queues_;
+ CriticalSection crit_;
+};
+
+// Derive from this for specialized data
+// App manages lifetime, except when messages are purged
+
+class MessageData {
+ public:
+ MessageData() {}
+ virtual ~MessageData() {}
+};
+
+template <class T>
+class TypedMessageData : public MessageData {
+ public:
+ explicit TypedMessageData(const T& data) : data_(data) { }
+ const T& data() const { return data_; }
+ T& data() { return data_; }
+ private:
+ T data_;
+};
+
+// Like TypedMessageData, but for pointers that require a delete.
+template <class T>
+class ScopedMessageData : public MessageData {
+ public:
+ explicit ScopedMessageData(T* data) : data_(data) { }
+ const scoped_ptr<T>& data() const { return data_; }
+ scoped_ptr<T>& data() { return data_; }
+ private:
+ scoped_ptr<T> data_;
+};
+
+// Like ScopedMessageData, but for reference counted pointers.
+template <class T>
+class ScopedRefMessageData : public MessageData {
+ public:
+ explicit ScopedRefMessageData(T* data) : data_(data) { }
+ const scoped_refptr<T>& data() const { return data_; }
+ scoped_refptr<T>& data() { return data_; }
+ private:
+ scoped_refptr<T> data_;
+};
+
+template<class T>
+inline MessageData* WrapMessageData(const T& data) {
+ return new TypedMessageData<T>(data);
+}
+
+template<class T>
+inline const T& UseMessageData(MessageData* data) {
+ return static_cast< TypedMessageData<T>* >(data)->data();
+}
+
+template<class T>
+class DisposeData : public MessageData {
+ public:
+ explicit DisposeData(T* data) : data_(data) { }
+ virtual ~DisposeData() { delete data_; }
+ private:
+ T* data_;
+};
+
+const uint32 MQID_ANY = static_cast<uint32>(-1);
+const uint32 MQID_DISPOSE = static_cast<uint32>(-2);
+
+// No destructor
+
+struct Message {
+ Message() {
+ memset(this, 0, sizeof(*this));
+ }
+ inline bool Match(MessageHandler* handler, uint32 id) const {
+ return (handler == NULL || handler == phandler)
+ && (id == MQID_ANY || id == message_id);
+ }
+ MessageHandler *phandler;
+ uint32 message_id;
+ MessageData *pdata;
+ uint32 ts_sensitive;
+};
+
+typedef std::list<Message> MessageList;
+
+// DelayedMessage goes into a priority queue, sorted by trigger time. Messages
+// with the same trigger time are processed in num_ (FIFO) order.
+
+class DelayedMessage {
+ public:
+ DelayedMessage(int delay, uint32 trigger, uint32 num, const Message& msg)
+ : cmsDelay_(delay), msTrigger_(trigger), num_(num), msg_(msg) { }
+
+ bool operator< (const DelayedMessage& dmsg) const {
+ return (dmsg.msTrigger_ < msTrigger_)
+ || ((dmsg.msTrigger_ == msTrigger_) && (dmsg.num_ < num_));
+ }
+
+ int cmsDelay_; // for debugging
+ uint32 msTrigger_;
+ uint32 num_;
+ Message msg_;
+};
+
+class MessageQueue {
+ public:
+ explicit MessageQueue(SocketServer* ss = NULL);
+ virtual ~MessageQueue();
+
+ SocketServer* socketserver() { return ss_; }
+ void set_socketserver(SocketServer* ss);
+
+ // Note: The behavior of MessageQueue has changed. When a MQ is stopped,
+ // futher Posts and Sends will fail. However, any pending Sends and *ready*
+ // Posts (as opposed to unexpired delayed Posts) will be delivered before
+ // Get (or Peek) returns false. By guaranteeing delivery of those messages,
+ // we eliminate the race condition when an MessageHandler and MessageQueue
+ // may be destroyed independently of each other.
+ virtual void Quit();
+ virtual bool IsQuitting();
+ virtual void Restart();
+
+ // Get() will process I/O until:
+ // 1) A message is available (returns true)
+ // 2) cmsWait seconds have elapsed (returns false)
+ // 3) Stop() is called (returns false)
+ virtual bool Get(Message *pmsg, int cmsWait = kForever,
+ bool process_io = true);
+ virtual bool Peek(Message *pmsg, int cmsWait = 0);
+ virtual void Post(MessageHandler *phandler, uint32 id = 0,
+ MessageData *pdata = NULL, bool time_sensitive = false);
+ virtual void PostDelayed(int cmsDelay, MessageHandler *phandler,
+ uint32 id = 0, MessageData *pdata = NULL) {
+ return DoDelayPost(cmsDelay, TimeAfter(cmsDelay), phandler, id, pdata);
+ }
+ virtual void PostAt(uint32 tstamp, MessageHandler *phandler,
+ uint32 id = 0, MessageData *pdata = NULL) {
+ return DoDelayPost(TimeUntil(tstamp), tstamp, phandler, id, pdata);
+ }
+ virtual void Clear(MessageHandler *phandler, uint32 id = MQID_ANY,
+ MessageList* removed = NULL);
+ virtual void Dispatch(Message *pmsg);
+ virtual void ReceiveSends();
+
+ // Amount of time until the next message can be retrieved
+ virtual int GetDelay();
+
+ bool empty() const { return size() == 0u; }
+ size_t size() const {
+ CritScope cs(&crit_); // msgq_.size() is not thread safe.
+ return msgq_.size() + dmsgq_.size() + (fPeekKeep_ ? 1u : 0u);
+ }
+
+ // Internally posts a message which causes the doomed object to be deleted
+ template<class T> void Dispose(T* doomed) {
+ if (doomed) {
+ Post(NULL, MQID_DISPOSE, new DisposeData<T>(doomed));
+ }
+ }
+
+ // When this signal is sent out, any references to this queue should
+ // no longer be used.
+ sigslot::signal0<> SignalQueueDestroyed;
+
+ protected:
+ class PriorityQueue : public std::priority_queue<DelayedMessage> {
+ public:
+ container_type& container() { return c; }
+ void reheap() { make_heap(c.begin(), c.end(), comp); }
+ };
+
+ void DoDelayPost(int cmsDelay, uint32 tstamp, MessageHandler *phandler,
+ uint32 id, MessageData* pdata);
+
+ // The SocketServer is not owned by MessageQueue.
+ SocketServer* ss_;
+ // If a server isn't supplied in the constructor, use this one.
+ scoped_ptr<SocketServer> default_ss_;
+ bool fStop_;
+ bool fPeekKeep_;
+ Message msgPeek_;
+ MessageList msgq_;
+ PriorityQueue dmsgq_;
+ uint32 dmsgq_next_num_;
+ mutable CriticalSection crit_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MessageQueue);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MESSAGEQUEUE_H_
diff --git a/chromium/third_party/webrtc/base/messagequeue_unittest.cc b/chromium/third_party/webrtc/base/messagequeue_unittest.cc
new file mode 100644
index 00000000000..78024e0b2d6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/messagequeue_unittest.cc
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/messagequeue.h"
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/nullsocketserver.h"
+
+using namespace rtc;
+
+class MessageQueueTest: public testing::Test, public MessageQueue {
+ public:
+ bool IsLocked_Worker() {
+ if (!crit_.TryEnter()) {
+ return true;
+ }
+ crit_.Leave();
+ return false;
+ }
+ bool IsLocked() {
+ // We have to do this on a worker thread, or else the TryEnter will
+ // succeed, since our critical sections are reentrant.
+ Thread worker;
+ worker.Start();
+ return worker.Invoke<bool>(
+ rtc::Bind(&MessageQueueTest::IsLocked_Worker, this));
+ }
+};
+
+struct DeletedLockChecker {
+ DeletedLockChecker(MessageQueueTest* test, bool* was_locked, bool* deleted)
+ : test(test), was_locked(was_locked), deleted(deleted) { }
+ ~DeletedLockChecker() {
+ *deleted = true;
+ *was_locked = test->IsLocked();
+ }
+ MessageQueueTest* test;
+ bool* was_locked;
+ bool* deleted;
+};
+
+static void DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(
+ MessageQueue* q) {
+ EXPECT_TRUE(q != NULL);
+ TimeStamp now = Time();
+ q->PostAt(now, NULL, 3);
+ q->PostAt(now - 2, NULL, 0);
+ q->PostAt(now - 1, NULL, 1);
+ q->PostAt(now, NULL, 4);
+ q->PostAt(now - 1, NULL, 2);
+
+ Message msg;
+ for (size_t i=0; i<5; ++i) {
+ memset(&msg, 0, sizeof(msg));
+ EXPECT_TRUE(q->Get(&msg, 0));
+ EXPECT_EQ(i, msg.message_id);
+ }
+
+ EXPECT_FALSE(q->Get(&msg, 0)); // No more messages
+}
+
+TEST_F(MessageQueueTest,
+ DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder) {
+ MessageQueue q;
+ DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(&q);
+ NullSocketServer nullss;
+ MessageQueue q_nullss(&nullss);
+ DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(&q_nullss);
+}
+
+TEST_F(MessageQueueTest, DisposeNotLocked) {
+ bool was_locked = true;
+ bool deleted = false;
+ DeletedLockChecker* d = new DeletedLockChecker(this, &was_locked, &deleted);
+ Dispose(d);
+ Message msg;
+ EXPECT_FALSE(Get(&msg, 0));
+ EXPECT_TRUE(deleted);
+ EXPECT_FALSE(was_locked);
+}
+
+class DeletedMessageHandler : public MessageHandler {
+ public:
+ explicit DeletedMessageHandler(bool* deleted) : deleted_(deleted) { }
+ ~DeletedMessageHandler() {
+ *deleted_ = true;
+ }
+ void OnMessage(Message* msg) { }
+ private:
+ bool* deleted_;
+};
+
+TEST_F(MessageQueueTest, DiposeHandlerWithPostedMessagePending) {
+ bool deleted = false;
+ DeletedMessageHandler *handler = new DeletedMessageHandler(&deleted);
+ // First, post a dispose.
+ Dispose(handler);
+ // Now, post a message, which should *not* be returned by Get().
+ Post(handler, 1);
+ Message msg;
+ EXPECT_FALSE(Get(&msg, 0));
+ EXPECT_TRUE(deleted);
+}
+
+struct UnwrapMainThreadScope {
+ UnwrapMainThreadScope() : rewrap_(Thread::Current() != NULL) {
+ if (rewrap_) ThreadManager::Instance()->UnwrapCurrentThread();
+ }
+ ~UnwrapMainThreadScope() {
+ if (rewrap_) ThreadManager::Instance()->WrapCurrentThread();
+ }
+ private:
+ bool rewrap_;
+};
+
+TEST(MessageQueueManager, Clear) {
+ UnwrapMainThreadScope s;
+ if (MessageQueueManager::IsInitialized()) {
+ LOG(LS_INFO) << "Unable to run MessageQueueManager::Clear test, since the "
+ << "MessageQueueManager was already initialized by some "
+ << "other test in this run.";
+ return;
+ }
+ bool deleted = false;
+ DeletedMessageHandler* handler = new DeletedMessageHandler(&deleted);
+ delete handler;
+ EXPECT_TRUE(deleted);
+ EXPECT_FALSE(MessageQueueManager::IsInitialized());
+}
diff --git a/chromium/third_party/webrtc/base/move.h b/chromium/third_party/webrtc/base/move.h
new file mode 100644
index 00000000000..6d59cc583fa
--- /dev/null
+++ b/chromium/third_party/webrtc/base/move.h
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef THIRD_PARTY_WEBRTC_FILES_WEBRTC_BASE_MOVE_H_
+#define THIRD_PARTY_WEBRTC_FILES_WEBRTC_BASE_MOVE_H_
+
+// Macro with the boilerplate that makes a type move-only in C++03.
+//
+// USAGE
+//
+// This macro should be used instead of DISALLOW_COPY_AND_ASSIGN to create
+// a "move-only" type. Unlike DISALLOW_COPY_AND_ASSIGN, this macro should be
+// the first line in a class declaration.
+//
+// A class using this macro must call .Pass() (or somehow be an r-value already)
+// before it can be:
+//
+// * Passed as a function argument
+// * Used as the right-hand side of an assignment
+// * Returned from a function
+//
+// Each class will still need to define their own "move constructor" and "move
+// operator=" to make this useful. Here's an example of the macro, the move
+// constructor, and the move operator= from the scoped_ptr class:
+//
+// template <typename T>
+// class scoped_ptr {
+// TALK_MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue)
+// public:
+// scoped_ptr(RValue& other) : ptr_(other.release()) { }
+// scoped_ptr& operator=(RValue& other) {
+// swap(other);
+// return *this;
+// }
+// };
+//
+// Note that the constructor must NOT be marked explicit.
+//
+// For consistency, the second parameter to the macro should always be RValue
+// unless you have a strong reason to do otherwise. It is only exposed as a
+// macro parameter so that the move constructor and move operator= don't look
+// like they're using a phantom type.
+//
+//
+// HOW THIS WORKS
+//
+// For a thorough explanation of this technique, see:
+//
+// http://en.wikibooks.org/wiki/More_C%2B%2B_Idioms/Move_Constructor
+//
+// The summary is that we take advantage of 2 properties:
+//
+// 1) non-const references will not bind to r-values.
+// 2) C++ can apply one user-defined conversion when initializing a
+// variable.
+//
+// The first lets us disable the copy constructor and assignment operator
+// by declaring private version of them with a non-const reference parameter.
+//
+// For l-values, direct initialization still fails like in
+// DISALLOW_COPY_AND_ASSIGN because the copy constructor and assignment
+// operators are private.
+//
+// For r-values, the situation is different. The copy constructor and
+// assignment operator are not viable due to (1), so we are trying to call
+// a non-existent constructor and non-existing operator= rather than a private
+// one. Since we have not committed an error quite yet, we can provide an
+// alternate conversion sequence and a constructor. We add
+//
+// * a private struct named "RValue"
+// * a user-defined conversion "operator RValue()"
+// * a "move constructor" and "move operator=" that take the RValue& as
+// their sole parameter.
+//
+// Only r-values will trigger this sequence and execute our "move constructor"
+// or "move operator=." L-values will match the private copy constructor and
+// operator= first giving a "private in this context" error. This combination
+// gives us a move-only type.
+//
+// For signaling a destructive transfer of data from an l-value, we provide a
+// method named Pass() which creates an r-value for the current instance
+// triggering the move constructor or move operator=.
+//
+// Other ways to get r-values is to use the result of an expression like a
+// function call.
+//
+// Here's an example with comments explaining what gets triggered where:
+//
+// class Foo {
+// TALK_MOVE_ONLY_TYPE_FOR_CPP_03(Foo, RValue);
+//
+// public:
+// ... API ...
+// Foo(RValue other); // Move constructor.
+// Foo& operator=(RValue rhs); // Move operator=
+// };
+//
+// Foo MakeFoo(); // Function that returns a Foo.
+//
+// Foo f;
+// Foo f_copy(f); // ERROR: Foo(Foo&) is private in this context.
+// Foo f_assign;
+// f_assign = f; // ERROR: operator=(Foo&) is private in this context.
+//
+//
+// Foo f(MakeFoo()); // R-value so alternate conversion executed.
+// Foo f_copy(f.Pass()); // R-value so alternate conversion executed.
+// f = f_copy.Pass(); // R-value so alternate conversion executed.
+//
+//
+// IMPLEMENTATION SUBTLETIES WITH RValue
+//
+// The RValue struct is just a container for a pointer back to the original
+// object. It should only ever be created as a temporary, and no external
+// class should ever declare it or use it in a parameter.
+//
+// It is tempting to want to use the RValue type in function parameters, but
+// excluding the limited usage here for the move constructor and move
+// operator=, doing so would mean that the function could take both r-values
+// and l-values equially which is unexpected. See COMPARED To Boost.Move for
+// more details.
+//
+// An alternate, and incorrect, implementation of the RValue class used by
+// Boost.Move makes RValue a fieldless child of the move-only type. RValue&
+// is then used in place of RValue in the various operators. The RValue& is
+// "created" by doing *reinterpret_cast<RValue*>(this). This has the appeal
+// of never creating a temporary RValue struct even with optimizations
+// disabled. Also, by virtue of inheritance you can treat the RValue
+// reference as if it were the move-only type itself. Unfortunately,
+// using the result of this reinterpret_cast<> is actually undefined behavior
+// due to C++98 5.2.10.7. In certain compilers (e.g., NaCl) the optimizer
+// will generate non-working code.
+//
+// In optimized builds, both implementations generate the same assembly so we
+// choose the one that adheres to the standard.
+//
+//
+// COMPARED TO C++11
+//
+// In C++11, you would implement this functionality using an r-value reference
+// and our .Pass() method would be replaced with a call to std::move().
+//
+// This emulation also has a deficiency where it uses up the single
+// user-defined conversion allowed by C++ during initialization. This can
+// cause problems in some API edge cases. For instance, in scoped_ptr, it is
+// impossible to make a function "void Foo(scoped_ptr<Parent> p)" accept a
+// value of type scoped_ptr<Child> even if you add a constructor to
+// scoped_ptr<> that would make it look like it should work. C++11 does not
+// have this deficiency.
+//
+//
+// COMPARED TO Boost.Move
+//
+// Our implementation similar to Boost.Move, but we keep the RValue struct
+// private to the move-only type, and we don't use the reinterpret_cast<> hack.
+//
+// In Boost.Move, RValue is the boost::rv<> template. This type can be used
+// when writing APIs like:
+//
+// void MyFunc(boost::rv<Foo>& f)
+//
+// that can take advantage of rv<> to avoid extra copies of a type. However you
+// would still be able to call this version of MyFunc with an l-value:
+//
+// Foo f;
+// MyFunc(f); // Uh oh, we probably just destroyed |f| w/o calling Pass().
+//
+// unless someone is very careful to also declare a parallel override like:
+//
+// void MyFunc(const Foo& f)
+//
+// that would catch the l-values first. This was declared unsafe in C++11 and
+// a C++11 compiler will explicitly fail MyFunc(f). Unfortunately, we cannot
+// ensure this in C++03.
+//
+// Since we have no need for writing such APIs yet, our implementation keeps
+// RValue private and uses a .Pass() method to do the conversion instead of
+// trying to write a version of "std::move()." Writing an API like std::move()
+// would require the RValue struct to be public.
+//
+//
+// CAVEATS
+//
+// If you include a move-only type as a field inside a class that does not
+// explicitly declare a copy constructor, the containing class's implicit
+// copy constructor will change from Containing(const Containing&) to
+// Containing(Containing&). This can cause some unexpected errors.
+//
+// http://llvm.org/bugs/show_bug.cgi?id=11528
+//
+// The workaround is to explicitly declare your copy constructor.
+//
+#define TALK_MOVE_ONLY_TYPE_FOR_CPP_03(type, rvalue_type) \
+ private: \
+ struct rvalue_type { \
+ explicit rvalue_type(type* object) : object(object) {} \
+ type* object; \
+ }; \
+ type(type&); \
+ void operator=(type&); \
+ public: \
+ operator rvalue_type() { return rvalue_type(this); } \
+ type Pass() { return type(rvalue_type(this)); } \
+ private:
+
+#endif // THIRD_PARTY_WEBRTC_FILES_WEBRTC_BASE_MOVE_H_
diff --git a/chromium/third_party/webrtc/base/multipart.cc b/chromium/third_party/webrtc/base/multipart.cc
new file mode 100644
index 00000000000..0d73880e46f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/multipart.cc
@@ -0,0 +1,253 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/multipart.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// MultipartStream
+///////////////////////////////////////////////////////////////////////////////
+
+MultipartStream::MultipartStream(const std::string& type,
+ const std::string& boundary)
+ : type_(type),
+ boundary_(boundary),
+ adding_(true),
+ current_(0),
+ position_(0) {
+ // The content type should be multipart/*.
+ ASSERT(0 == strncmp(type_.c_str(), "multipart/", 10));
+}
+
+MultipartStream::~MultipartStream() {
+ Close();
+}
+
+void MultipartStream::GetContentType(std::string* content_type) {
+ ASSERT(NULL != content_type);
+ content_type->assign(type_);
+ content_type->append("; boundary=");
+ content_type->append(boundary_);
+}
+
+bool MultipartStream::AddPart(StreamInterface* data_stream,
+ const std::string& content_disposition,
+ const std::string& content_type) {
+ if (!AddPart("", content_disposition, content_type))
+ return false;
+ parts_.push_back(data_stream);
+ data_stream->SignalEvent.connect(this, &MultipartStream::OnEvent);
+ return true;
+}
+
+bool MultipartStream::AddPart(const std::string& data,
+ const std::string& content_disposition,
+ const std::string& content_type) {
+ ASSERT(adding_);
+ if (!adding_)
+ return false;
+ std::stringstream ss;
+ if (!parts_.empty()) {
+ ss << "\r\n";
+ }
+ ss << "--" << boundary_ << "\r\n";
+ if (!content_disposition.empty()) {
+ ss << ToString(HH_CONTENT_DISPOSITION) << ": "
+ << content_disposition << "\r\n";
+ }
+ if (!content_type.empty()) {
+ ss << ToString(HH_CONTENT_TYPE) << ": "
+ << content_type << "\r\n";
+ }
+ ss << "\r\n" << data;
+ parts_.push_back(new MemoryStream(ss.str().data(), ss.str().size()));
+ return true;
+}
+
+void MultipartStream::EndParts() {
+ ASSERT(adding_);
+ if (!adding_)
+ return;
+
+ std::stringstream ss;
+ if (!parts_.empty()) {
+ ss << "\r\n";
+ }
+ ss << "--" << boundary_ << "--" << "\r\n";
+ parts_.push_back(new MemoryStream(ss.str().data(), ss.str().size()));
+
+ ASSERT(0 == current_);
+ ASSERT(0 == position_);
+ adding_ = false;
+ SignalEvent(this, SE_OPEN | SE_READ, 0);
+}
+
+size_t MultipartStream::GetPartSize(const std::string& data,
+ const std::string& content_disposition,
+ const std::string& content_type) const {
+ size_t size = 0;
+ if (!parts_.empty()) {
+ size += 2; // for "\r\n";
+ }
+ size += boundary_.size() + 4; // for "--boundary_\r\n";
+ if (!content_disposition.empty()) {
+ // for ToString(HH_CONTENT_DISPOSITION): content_disposition\r\n
+ size += std::string(ToString(HH_CONTENT_DISPOSITION)).size() + 2 +
+ content_disposition.size() + 2;
+ }
+ if (!content_type.empty()) {
+ // for ToString(HH_CONTENT_TYPE): content_type\r\n
+ size += std::string(ToString(HH_CONTENT_TYPE)).size() + 2 +
+ content_type.size() + 2;
+ }
+ size += 2 + data.size(); // for \r\ndata
+ return size;
+}
+
+size_t MultipartStream::GetEndPartSize() const {
+ size_t size = 0;
+ if (!parts_.empty()) {
+ size += 2; // for "\r\n";
+ }
+ size += boundary_.size() + 6; // for "--boundary_--\r\n";
+ return size;
+}
+
+//
+// StreamInterface
+//
+
+StreamState MultipartStream::GetState() const {
+ if (adding_) {
+ return SS_OPENING;
+ }
+ return (current_ < parts_.size()) ? SS_OPEN : SS_CLOSED;
+}
+
+StreamResult MultipartStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (adding_) {
+ return SR_BLOCK;
+ }
+ size_t local_read;
+ if (!read) read = &local_read;
+ while (current_ < parts_.size()) {
+ StreamResult result = parts_[current_]->Read(buffer, buffer_len, read,
+ error);
+ if (SR_EOS != result) {
+ if (SR_SUCCESS == result) {
+ position_ += *read;
+ }
+ return result;
+ }
+ ++current_;
+ }
+ return SR_EOS;
+}
+
+StreamResult MultipartStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (error) {
+ *error = -1;
+ }
+ return SR_ERROR;
+}
+
+void MultipartStream::Close() {
+ for (size_t i = 0; i < parts_.size(); ++i) {
+ delete parts_[i];
+ }
+ parts_.clear();
+ adding_ = false;
+ current_ = 0;
+ position_ = 0;
+}
+
+bool MultipartStream::SetPosition(size_t position) {
+ if (adding_) {
+ return false;
+ }
+ size_t part_size, part_offset = 0;
+ for (size_t i = 0; i < parts_.size(); ++i) {
+ if (!parts_[i]->GetSize(&part_size)) {
+ return false;
+ }
+ if (part_offset + part_size > position) {
+ for (size_t j = i+1; j < _min(parts_.size(), current_+1); ++j) {
+ if (!parts_[j]->Rewind()) {
+ return false;
+ }
+ }
+ if (!parts_[i]->SetPosition(position - part_offset)) {
+ return false;
+ }
+ current_ = i;
+ position_ = position;
+ return true;
+ }
+ part_offset += part_size;
+ }
+ return false;
+}
+
+bool MultipartStream::GetPosition(size_t* position) const {
+ if (position) {
+ *position = position_;
+ }
+ return true;
+}
+
+bool MultipartStream::GetSize(size_t* size) const {
+ size_t part_size, total_size = 0;
+ for (size_t i = 0; i < parts_.size(); ++i) {
+ if (!parts_[i]->GetSize(&part_size)) {
+ return false;
+ }
+ total_size += part_size;
+ }
+ if (size) {
+ *size = total_size;
+ }
+ return true;
+}
+
+bool MultipartStream::GetAvailable(size_t* size) const {
+ if (adding_) {
+ return false;
+ }
+ size_t part_size, total_size = 0;
+ for (size_t i = current_; i < parts_.size(); ++i) {
+ if (!parts_[i]->GetAvailable(&part_size)) {
+ return false;
+ }
+ total_size += part_size;
+ }
+ if (size) {
+ *size = total_size;
+ }
+ return true;
+}
+
+//
+// StreamInterface Slots
+//
+
+void MultipartStream::OnEvent(StreamInterface* stream, int events, int error) {
+ if (adding_ || (current_ >= parts_.size()) || (parts_[current_] != stream)) {
+ return;
+ }
+ SignalEvent(this, events, error);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/multipart.h b/chromium/third_party/webrtc/base/multipart.h
new file mode 100644
index 00000000000..a41f596ffd8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/multipart.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_MULTIPART_H__
+#define WEBRTC_BASE_MULTIPART_H__
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// MultipartStream - Implements an RFC2046 multipart stream by concatenating
+// the supplied parts together, and adding the correct boundaries.
+///////////////////////////////////////////////////////////////////////////////
+
+class MultipartStream : public StreamInterface, public sigslot::has_slots<> {
+ public:
+ MultipartStream(const std::string& type, const std::string& boundary);
+ virtual ~MultipartStream();
+
+ void GetContentType(std::string* content_type);
+
+ // Note: If content_disposition and/or content_type are the empty string,
+ // they will be omitted.
+ bool AddPart(StreamInterface* data_stream,
+ const std::string& content_disposition,
+ const std::string& content_type);
+ bool AddPart(const std::string& data,
+ const std::string& content_disposition,
+ const std::string& content_type);
+ void EndParts();
+
+ // Calculates the size of a part before actually adding the part.
+ size_t GetPartSize(const std::string& data,
+ const std::string& content_disposition,
+ const std::string& content_type) const;
+ size_t GetEndPartSize() const;
+
+ // StreamInterface
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+ virtual bool SetPosition(size_t position);
+ virtual bool GetPosition(size_t* position) const;
+ virtual bool GetSize(size_t* size) const;
+ virtual bool GetAvailable(size_t* size) const;
+
+ private:
+ typedef std::vector<StreamInterface*> PartList;
+
+ // StreamInterface Slots
+ void OnEvent(StreamInterface* stream, int events, int error);
+
+ std::string type_, boundary_;
+ PartList parts_;
+ bool adding_;
+ size_t current_; // The index into parts_ of the current read position.
+ size_t position_; // The current read position in bytes.
+
+ DISALLOW_COPY_AND_ASSIGN(MultipartStream);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_MULTIPART_H__
diff --git a/chromium/third_party/webrtc/base/multipart_unittest.cc b/chromium/third_party/webrtc/base/multipart_unittest.cc
new file mode 100644
index 00000000000..38e1114935a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/multipart_unittest.cc
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/multipart.h"
+
+namespace rtc {
+
+static const std::string kTestMultipartBoundary = "123456789987654321";
+static const std::string kTestContentType =
+ "multipart/form-data; boundary=123456789987654321";
+static const char kTestData[] = "This is a test.";
+static const char kTestStreamContent[] = "This is a test stream.";
+
+TEST(MultipartTest, TestBasicOperations) {
+ MultipartStream multipart("multipart/form-data", kTestMultipartBoundary);
+ std::string content_type;
+ multipart.GetContentType(&content_type);
+ EXPECT_EQ(kTestContentType, content_type);
+
+ EXPECT_EQ(rtc::SS_OPENING, multipart.GetState());
+
+ // The multipart stream contains only --boundary--\r\n
+ size_t end_part_size = multipart.GetEndPartSize();
+ multipart.EndParts();
+ EXPECT_EQ(rtc::SS_OPEN, multipart.GetState());
+ size_t size;
+ EXPECT_TRUE(multipart.GetSize(&size));
+ EXPECT_EQ(end_part_size, size);
+
+ // Write is not supported.
+ EXPECT_EQ(rtc::SR_ERROR,
+ multipart.Write(kTestData, sizeof(kTestData), NULL, NULL));
+
+ multipart.Close();
+ EXPECT_EQ(rtc::SS_CLOSED, multipart.GetState());
+ EXPECT_TRUE(multipart.GetSize(&size));
+ EXPECT_EQ(0U, size);
+}
+
+TEST(MultipartTest, TestAddAndRead) {
+ MultipartStream multipart("multipart/form-data", kTestMultipartBoundary);
+
+ size_t part_size =
+ multipart.GetPartSize(kTestData, "form-data; name=\"text\"", "text");
+ EXPECT_TRUE(multipart.AddPart(kTestData, "form-data; name=\"text\"", "text"));
+ size_t size;
+ EXPECT_TRUE(multipart.GetSize(&size));
+ EXPECT_EQ(part_size, size);
+
+ rtc::scoped_ptr<rtc::MemoryStream> stream(
+ new rtc::MemoryStream(kTestStreamContent));
+ size_t stream_size = 0;
+ EXPECT_TRUE(stream->GetSize(&stream_size));
+ part_size +=
+ multipart.GetPartSize("", "form-data; name=\"stream\"", "stream");
+ part_size += stream_size;
+
+ EXPECT_TRUE(multipart.AddPart(
+ new rtc::MemoryStream(kTestStreamContent),
+ "form-data; name=\"stream\"",
+ "stream"));
+ EXPECT_TRUE(multipart.GetSize(&size));
+ EXPECT_EQ(part_size, size);
+
+ // In adding state, block read.
+ char buffer[1024];
+ EXPECT_EQ(rtc::SR_BLOCK,
+ multipart.Read(buffer, sizeof(buffer), NULL, NULL));
+ // Write is not supported.
+ EXPECT_EQ(rtc::SR_ERROR,
+ multipart.Write(buffer, sizeof(buffer), NULL, NULL));
+
+ part_size += multipart.GetEndPartSize();
+ multipart.EndParts();
+ EXPECT_TRUE(multipart.GetSize(&size));
+ EXPECT_EQ(part_size, size);
+
+ // Read the multipart stream into StringStream
+ std::string str;
+ rtc::StringStream str_stream(str);
+ EXPECT_EQ(rtc::SR_SUCCESS,
+ Flow(&multipart, buffer, sizeof(buffer), &str_stream));
+ EXPECT_EQ(size, str.length());
+
+ // Search three boundaries and two parts in the order.
+ size_t pos = 0;
+ pos = str.find(kTestMultipartBoundary);
+ EXPECT_NE(std::string::npos, pos);
+ pos += kTestMultipartBoundary.length();
+
+ pos = str.find(kTestData, pos);
+ EXPECT_NE(std::string::npos, pos);
+ pos += sizeof(kTestData);
+
+ pos = str.find(kTestMultipartBoundary, pos);
+ EXPECT_NE(std::string::npos, pos);
+ pos += kTestMultipartBoundary.length();
+
+ pos = str.find(kTestStreamContent, pos);
+ EXPECT_NE(std::string::npos, pos);
+ pos += sizeof(kTestStreamContent);
+
+ pos = str.find(kTestMultipartBoundary, pos);
+ EXPECT_NE(std::string::npos, pos);
+ pos += kTestMultipartBoundary.length();
+
+ pos = str.find(kTestMultipartBoundary, pos);
+ EXPECT_EQ(std::string::npos, pos);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nat_unittest.cc b/chromium/third_party/webrtc/base/nat_unittest.cc
new file mode 100644
index 00000000000..8b9d8a1509f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nat_unittest.cc
@@ -0,0 +1,345 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/natserver.h"
+#include "webrtc/base/natsocketfactory.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+using namespace rtc;
+
+bool CheckReceive(
+ TestClient* client, bool should_receive, const char* buf, size_t size) {
+ return (should_receive) ?
+ client->CheckNextPacket(buf, size, 0) :
+ client->CheckNoPacket();
+}
+
+TestClient* CreateTestClient(
+ SocketFactory* factory, const SocketAddress& local_addr) {
+ AsyncUDPSocket* socket = AsyncUDPSocket::Create(factory, local_addr);
+ return new TestClient(socket);
+}
+
+// Tests that when sending from internal_addr to external_addrs through the
+// NAT type specified by nat_type, all external addrs receive the sent packet
+// and, if exp_same is true, all use the same mapped-address on the NAT.
+void TestSend(
+ SocketServer* internal, const SocketAddress& internal_addr,
+ SocketServer* external, const SocketAddress external_addrs[4],
+ NATType nat_type, bool exp_same) {
+ Thread th_int(internal);
+ Thread th_ext(external);
+
+ SocketAddress server_addr = internal_addr;
+ server_addr.SetPort(0); // Auto-select a port
+ NATServer* nat = new NATServer(
+ nat_type, internal, server_addr, external, external_addrs[0]);
+ NATSocketFactory* natsf = new NATSocketFactory(internal,
+ nat->internal_address());
+
+ TestClient* in = CreateTestClient(natsf, internal_addr);
+ TestClient* out[4];
+ for (int i = 0; i < 4; i++)
+ out[i] = CreateTestClient(external, external_addrs[i]);
+
+ th_int.Start();
+ th_ext.Start();
+
+ const char* buf = "filter_test";
+ size_t len = strlen(buf);
+
+ in->SendTo(buf, len, out[0]->address());
+ SocketAddress trans_addr;
+ EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr));
+
+ for (int i = 1; i < 4; i++) {
+ in->SendTo(buf, len, out[i]->address());
+ SocketAddress trans_addr2;
+ EXPECT_TRUE(out[i]->CheckNextPacket(buf, len, &trans_addr2));
+ bool are_same = (trans_addr == trans_addr2);
+ ASSERT_EQ(are_same, exp_same) << "same translated address";
+ ASSERT_NE(AF_UNSPEC, trans_addr.family());
+ ASSERT_NE(AF_UNSPEC, trans_addr2.family());
+ }
+
+ th_int.Stop();
+ th_ext.Stop();
+
+ delete nat;
+ delete natsf;
+ delete in;
+ for (int i = 0; i < 4; i++)
+ delete out[i];
+}
+
+// Tests that when sending from external_addrs to internal_addr, the packet
+// is delivered according to the specified filter_ip and filter_port rules.
+void TestRecv(
+ SocketServer* internal, const SocketAddress& internal_addr,
+ SocketServer* external, const SocketAddress external_addrs[4],
+ NATType nat_type, bool filter_ip, bool filter_port) {
+ Thread th_int(internal);
+ Thread th_ext(external);
+
+ SocketAddress server_addr = internal_addr;
+ server_addr.SetPort(0); // Auto-select a port
+ NATServer* nat = new NATServer(
+ nat_type, internal, server_addr, external, external_addrs[0]);
+ NATSocketFactory* natsf = new NATSocketFactory(internal,
+ nat->internal_address());
+
+ TestClient* in = CreateTestClient(natsf, internal_addr);
+ TestClient* out[4];
+ for (int i = 0; i < 4; i++)
+ out[i] = CreateTestClient(external, external_addrs[i]);
+
+ th_int.Start();
+ th_ext.Start();
+
+ const char* buf = "filter_test";
+ size_t len = strlen(buf);
+
+ in->SendTo(buf, len, out[0]->address());
+ SocketAddress trans_addr;
+ EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr));
+
+ out[1]->SendTo(buf, len, trans_addr);
+ EXPECT_TRUE(CheckReceive(in, !filter_ip, buf, len));
+
+ out[2]->SendTo(buf, len, trans_addr);
+ EXPECT_TRUE(CheckReceive(in, !filter_port, buf, len));
+
+ out[3]->SendTo(buf, len, trans_addr);
+ EXPECT_TRUE(CheckReceive(in, !filter_ip && !filter_port, buf, len));
+
+ th_int.Stop();
+ th_ext.Stop();
+
+ delete nat;
+ delete natsf;
+ delete in;
+ for (int i = 0; i < 4; i++)
+ delete out[i];
+}
+
+// Tests that NATServer allocates bindings properly.
+void TestBindings(
+ SocketServer* internal, const SocketAddress& internal_addr,
+ SocketServer* external, const SocketAddress external_addrs[4]) {
+ TestSend(internal, internal_addr, external, external_addrs,
+ NAT_OPEN_CONE, true);
+ TestSend(internal, internal_addr, external, external_addrs,
+ NAT_ADDR_RESTRICTED, true);
+ TestSend(internal, internal_addr, external, external_addrs,
+ NAT_PORT_RESTRICTED, true);
+ TestSend(internal, internal_addr, external, external_addrs,
+ NAT_SYMMETRIC, false);
+}
+
+// Tests that NATServer filters packets properly.
+void TestFilters(
+ SocketServer* internal, const SocketAddress& internal_addr,
+ SocketServer* external, const SocketAddress external_addrs[4]) {
+ TestRecv(internal, internal_addr, external, external_addrs,
+ NAT_OPEN_CONE, false, false);
+ TestRecv(internal, internal_addr, external, external_addrs,
+ NAT_ADDR_RESTRICTED, true, false);
+ TestRecv(internal, internal_addr, external, external_addrs,
+ NAT_PORT_RESTRICTED, true, true);
+ TestRecv(internal, internal_addr, external, external_addrs,
+ NAT_SYMMETRIC, true, true);
+}
+
+bool TestConnectivity(const SocketAddress& src, const IPAddress& dst) {
+ // The physical NAT tests require connectivity to the selected ip from the
+ // internal address used for the NAT. Things like firewalls can break that, so
+ // check to see if it's worth even trying with this ip.
+ scoped_ptr<PhysicalSocketServer> pss(new PhysicalSocketServer());
+ scoped_ptr<AsyncSocket> client(pss->CreateAsyncSocket(src.family(),
+ SOCK_DGRAM));
+ scoped_ptr<AsyncSocket> server(pss->CreateAsyncSocket(src.family(),
+ SOCK_DGRAM));
+ if (client->Bind(SocketAddress(src.ipaddr(), 0)) != 0 ||
+ server->Bind(SocketAddress(dst, 0)) != 0) {
+ return false;
+ }
+ const char* buf = "hello other socket";
+ size_t len = strlen(buf);
+ int sent = client->SendTo(buf, len, server->GetLocalAddress());
+ SocketAddress addr;
+ const size_t kRecvBufSize = 64;
+ char recvbuf[kRecvBufSize];
+ Thread::Current()->SleepMs(100);
+ int received = server->RecvFrom(recvbuf, kRecvBufSize, &addr);
+ return received == sent && ::memcmp(buf, recvbuf, len) == 0;
+}
+
+void TestPhysicalInternal(const SocketAddress& int_addr) {
+ BasicNetworkManager network_manager;
+ network_manager.set_ipv6_enabled(true);
+ network_manager.StartUpdating();
+ // Process pending messages so the network list is updated.
+ Thread::Current()->ProcessMessages(0);
+
+ std::vector<Network*> networks;
+ network_manager.GetNetworks(&networks);
+ if (networks.empty()) {
+ LOG(LS_WARNING) << "Not enough network adapters for test.";
+ return;
+ }
+
+ SocketAddress ext_addr1(int_addr);
+ SocketAddress ext_addr2;
+ // Find an available IP with matching family. The test breaks if int_addr
+ // can't talk to ip, so check for connectivity as well.
+ for (std::vector<Network*>::iterator it = networks.begin();
+ it != networks.end(); ++it) {
+ const IPAddress& ip = (*it)->ip();
+ if (ip.family() == int_addr.family() && TestConnectivity(int_addr, ip)) {
+ ext_addr2.SetIP(ip);
+ break;
+ }
+ }
+ if (ext_addr2.IsNil()) {
+ LOG(LS_WARNING) << "No available IP of same family as " << int_addr;
+ return;
+ }
+
+ LOG(LS_INFO) << "selected ip " << ext_addr2.ipaddr();
+
+ SocketAddress ext_addrs[4] = {
+ SocketAddress(ext_addr1),
+ SocketAddress(ext_addr2),
+ SocketAddress(ext_addr1),
+ SocketAddress(ext_addr2)
+ };
+
+ scoped_ptr<PhysicalSocketServer> int_pss(new PhysicalSocketServer());
+ scoped_ptr<PhysicalSocketServer> ext_pss(new PhysicalSocketServer());
+
+ TestBindings(int_pss.get(), int_addr, ext_pss.get(), ext_addrs);
+ TestFilters(int_pss.get(), int_addr, ext_pss.get(), ext_addrs);
+}
+
+TEST(NatTest, TestPhysicalIPv4) {
+ TestPhysicalInternal(SocketAddress("127.0.0.1", 0));
+}
+
+TEST(NatTest, TestPhysicalIPv6) {
+ if (HasIPv6Enabled()) {
+ TestPhysicalInternal(SocketAddress("::1", 0));
+ } else {
+ LOG(LS_WARNING) << "No IPv6, skipping";
+ }
+}
+
+class TestVirtualSocketServer : public VirtualSocketServer {
+ public:
+ explicit TestVirtualSocketServer(SocketServer* ss)
+ : VirtualSocketServer(ss),
+ ss_(ss) {}
+ // Expose this publicly
+ IPAddress GetNextIP(int af) { return VirtualSocketServer::GetNextIP(af); }
+
+ private:
+ scoped_ptr<SocketServer> ss_;
+};
+
+void TestVirtualInternal(int family) {
+ scoped_ptr<TestVirtualSocketServer> int_vss(new TestVirtualSocketServer(
+ new PhysicalSocketServer()));
+ scoped_ptr<TestVirtualSocketServer> ext_vss(new TestVirtualSocketServer(
+ new PhysicalSocketServer()));
+
+ SocketAddress int_addr;
+ SocketAddress ext_addrs[4];
+ int_addr.SetIP(int_vss->GetNextIP(family));
+ ext_addrs[0].SetIP(ext_vss->GetNextIP(int_addr.family()));
+ ext_addrs[1].SetIP(ext_vss->GetNextIP(int_addr.family()));
+ ext_addrs[2].SetIP(ext_addrs[0].ipaddr());
+ ext_addrs[3].SetIP(ext_addrs[1].ipaddr());
+
+ TestBindings(int_vss.get(), int_addr, ext_vss.get(), ext_addrs);
+ TestFilters(int_vss.get(), int_addr, ext_vss.get(), ext_addrs);
+}
+
+TEST(NatTest, TestVirtualIPv4) {
+ TestVirtualInternal(AF_INET);
+}
+
+TEST(NatTest, TestVirtualIPv6) {
+ if (HasIPv6Enabled()) {
+ TestVirtualInternal(AF_INET6);
+ } else {
+ LOG(LS_WARNING) << "No IPv6, skipping";
+ }
+}
+
+// TODO: Finish this test
+class NatTcpTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ NatTcpTest() : connected_(false) {}
+ virtual void SetUp() {
+ int_vss_ = new TestVirtualSocketServer(new PhysicalSocketServer());
+ ext_vss_ = new TestVirtualSocketServer(new PhysicalSocketServer());
+ nat_ = new NATServer(NAT_OPEN_CONE, int_vss_, SocketAddress(),
+ ext_vss_, SocketAddress());
+ natsf_ = new NATSocketFactory(int_vss_, nat_->internal_address());
+ }
+ void OnConnectEvent(AsyncSocket* socket) {
+ connected_ = true;
+ }
+ void OnAcceptEvent(AsyncSocket* socket) {
+ accepted_ = server_->Accept(NULL);
+ }
+ void OnCloseEvent(AsyncSocket* socket, int error) {
+ }
+ void ConnectEvents() {
+ server_->SignalReadEvent.connect(this, &NatTcpTest::OnAcceptEvent);
+ client_->SignalConnectEvent.connect(this, &NatTcpTest::OnConnectEvent);
+ }
+ TestVirtualSocketServer* int_vss_;
+ TestVirtualSocketServer* ext_vss_;
+ NATServer* nat_;
+ NATSocketFactory* natsf_;
+ AsyncSocket* client_;
+ AsyncSocket* server_;
+ AsyncSocket* accepted_;
+ bool connected_;
+};
+
+TEST_F(NatTcpTest, DISABLED_TestConnectOut) {
+ server_ = ext_vss_->CreateAsyncSocket(SOCK_STREAM);
+ server_->Bind(SocketAddress());
+ server_->Listen(5);
+
+ client_ = int_vss_->CreateAsyncSocket(SOCK_STREAM);
+ EXPECT_GE(0, client_->Bind(SocketAddress()));
+ EXPECT_GE(0, client_->Connect(server_->GetLocalAddress()));
+
+
+ ConnectEvents();
+
+ EXPECT_TRUE_WAIT(connected_, 1000);
+ EXPECT_EQ(client_->GetRemoteAddress(), server_->GetLocalAddress());
+ EXPECT_EQ(client_->GetRemoteAddress(), accepted_->GetLocalAddress());
+ EXPECT_EQ(client_->GetLocalAddress(), accepted_->GetRemoteAddress());
+
+ client_->Close();
+}
+//#endif
diff --git a/chromium/third_party/webrtc/base/natserver.cc b/chromium/third_party/webrtc/base/natserver.cc
new file mode 100644
index 00000000000..0ce04d70b3b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/natserver.cc
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/natsocketfactory.h"
+#include "webrtc/base/natserver.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+RouteCmp::RouteCmp(NAT* nat) : symmetric(nat->IsSymmetric()) {
+}
+
+size_t RouteCmp::operator()(const SocketAddressPair& r) const {
+ size_t h = r.source().Hash();
+ if (symmetric)
+ h ^= r.destination().Hash();
+ return h;
+}
+
+bool RouteCmp::operator()(
+ const SocketAddressPair& r1, const SocketAddressPair& r2) const {
+ if (r1.source() < r2.source())
+ return true;
+ if (r2.source() < r1.source())
+ return false;
+ if (symmetric && (r1.destination() < r2.destination()))
+ return true;
+ if (symmetric && (r2.destination() < r1.destination()))
+ return false;
+ return false;
+}
+
+AddrCmp::AddrCmp(NAT* nat)
+ : use_ip(nat->FiltersIP()), use_port(nat->FiltersPort()) {
+}
+
+size_t AddrCmp::operator()(const SocketAddress& a) const {
+ size_t h = 0;
+ if (use_ip)
+ h ^= HashIP(a.ipaddr());
+ if (use_port)
+ h ^= a.port() | (a.port() << 16);
+ return h;
+}
+
+bool AddrCmp::operator()(
+ const SocketAddress& a1, const SocketAddress& a2) const {
+ if (use_ip && (a1.ipaddr() < a2.ipaddr()))
+ return true;
+ if (use_ip && (a2.ipaddr() < a1.ipaddr()))
+ return false;
+ if (use_port && (a1.port() < a2.port()))
+ return true;
+ if (use_port && (a2.port() < a1.port()))
+ return false;
+ return false;
+}
+
+NATServer::NATServer(
+ NATType type, SocketFactory* internal, const SocketAddress& internal_addr,
+ SocketFactory* external, const SocketAddress& external_ip)
+ : external_(external), external_ip_(external_ip.ipaddr(), 0) {
+ nat_ = NAT::Create(type);
+
+ server_socket_ = AsyncUDPSocket::Create(internal, internal_addr);
+ server_socket_->SignalReadPacket.connect(this, &NATServer::OnInternalPacket);
+
+ int_map_ = new InternalMap(RouteCmp(nat_));
+ ext_map_ = new ExternalMap();
+}
+
+NATServer::~NATServer() {
+ for (InternalMap::iterator iter = int_map_->begin();
+ iter != int_map_->end();
+ iter++)
+ delete iter->second;
+
+ delete nat_;
+ delete server_socket_;
+ delete int_map_;
+ delete ext_map_;
+}
+
+void NATServer::OnInternalPacket(
+ AsyncPacketSocket* socket, const char* buf, size_t size,
+ const SocketAddress& addr, const PacketTime& packet_time) {
+
+ // Read the intended destination from the wire.
+ SocketAddress dest_addr;
+ size_t length = UnpackAddressFromNAT(buf, size, &dest_addr);
+
+ // Find the translation for these addresses (allocating one if necessary).
+ SocketAddressPair route(addr, dest_addr);
+ InternalMap::iterator iter = int_map_->find(route);
+ if (iter == int_map_->end()) {
+ Translate(route);
+ iter = int_map_->find(route);
+ }
+ ASSERT(iter != int_map_->end());
+
+ // Allow the destination to send packets back to the source.
+ iter->second->WhitelistInsert(dest_addr);
+
+ // Send the packet to its intended destination.
+ rtc::PacketOptions options;
+ iter->second->socket->SendTo(buf + length, size - length, dest_addr, options);
+}
+
+void NATServer::OnExternalPacket(
+ AsyncPacketSocket* socket, const char* buf, size_t size,
+ const SocketAddress& remote_addr, const PacketTime& packet_time) {
+
+ SocketAddress local_addr = socket->GetLocalAddress();
+
+ // Find the translation for this addresses.
+ ExternalMap::iterator iter = ext_map_->find(local_addr);
+ ASSERT(iter != ext_map_->end());
+
+ // Allow the NAT to reject this packet.
+ if (ShouldFilterOut(iter->second, remote_addr)) {
+ LOG(LS_INFO) << "Packet from " << remote_addr.ToSensitiveString()
+ << " was filtered out by the NAT.";
+ return;
+ }
+
+ // Forward this packet to the internal address.
+ // First prepend the address in a quasi-STUN format.
+ scoped_ptr<char[]> real_buf(new char[size + kNATEncodedIPv6AddressSize]);
+ size_t addrlength = PackAddressForNAT(real_buf.get(),
+ size + kNATEncodedIPv6AddressSize,
+ remote_addr);
+ // Copy the data part after the address.
+ rtc::PacketOptions options;
+ memcpy(real_buf.get() + addrlength, buf, size);
+ server_socket_->SendTo(real_buf.get(), size + addrlength,
+ iter->second->route.source(), options);
+}
+
+void NATServer::Translate(const SocketAddressPair& route) {
+ AsyncUDPSocket* socket = AsyncUDPSocket::Create(external_, external_ip_);
+
+ if (!socket) {
+ LOG(LS_ERROR) << "Couldn't find a free port!";
+ return;
+ }
+
+ TransEntry* entry = new TransEntry(route, socket, nat_);
+ (*int_map_)[route] = entry;
+ (*ext_map_)[socket->GetLocalAddress()] = entry;
+ socket->SignalReadPacket.connect(this, &NATServer::OnExternalPacket);
+}
+
+bool NATServer::ShouldFilterOut(TransEntry* entry,
+ const SocketAddress& ext_addr) {
+ return entry->WhitelistContains(ext_addr);
+}
+
+NATServer::TransEntry::TransEntry(
+ const SocketAddressPair& r, AsyncUDPSocket* s, NAT* nat)
+ : route(r), socket(s) {
+ whitelist = new AddressSet(AddrCmp(nat));
+}
+
+NATServer::TransEntry::~TransEntry() {
+ delete whitelist;
+ delete socket;
+}
+
+void NATServer::TransEntry::WhitelistInsert(const SocketAddress& addr) {
+ CritScope cs(&crit_);
+ whitelist->insert(addr);
+}
+
+bool NATServer::TransEntry::WhitelistContains(const SocketAddress& ext_addr) {
+ CritScope cs(&crit_);
+ return whitelist->find(ext_addr) == whitelist->end();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/natserver.h b/chromium/third_party/webrtc/base/natserver.h
new file mode 100644
index 00000000000..1db77dacfad
--- /dev/null
+++ b/chromium/third_party/webrtc/base/natserver.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NATSERVER_H_
+#define WEBRTC_BASE_NATSERVER_H_
+
+#include <map>
+#include <set>
+
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/socketaddresspair.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/socketfactory.h"
+#include "webrtc/base/nattypes.h"
+
+namespace rtc {
+
+// Change how routes (socketaddress pairs) are compared based on the type of
+// NAT. The NAT server maintains a hashtable of the routes that it knows
+// about. So these affect which routes are treated the same.
+struct RouteCmp {
+ explicit RouteCmp(NAT* nat);
+ size_t operator()(const SocketAddressPair& r) const;
+ bool operator()(
+ const SocketAddressPair& r1, const SocketAddressPair& r2) const;
+
+ bool symmetric;
+};
+
+// Changes how addresses are compared based on the filtering rules of the NAT.
+struct AddrCmp {
+ explicit AddrCmp(NAT* nat);
+ size_t operator()(const SocketAddress& r) const;
+ bool operator()(const SocketAddress& r1, const SocketAddress& r2) const;
+
+ bool use_ip;
+ bool use_port;
+};
+
+// Implements the NAT device. It listens for packets on the internal network,
+// translates them, and sends them out over the external network.
+
+const int NAT_SERVER_PORT = 4237;
+
+class NATServer : public sigslot::has_slots<> {
+ public:
+ NATServer(
+ NATType type, SocketFactory* internal, const SocketAddress& internal_addr,
+ SocketFactory* external, const SocketAddress& external_ip);
+ ~NATServer();
+
+ SocketAddress internal_address() const {
+ return server_socket_->GetLocalAddress();
+ }
+
+ // Packets received on one of the networks.
+ void OnInternalPacket(AsyncPacketSocket* socket, const char* buf,
+ size_t size, const SocketAddress& addr,
+ const PacketTime& packet_time);
+ void OnExternalPacket(AsyncPacketSocket* socket, const char* buf,
+ size_t size, const SocketAddress& remote_addr,
+ const PacketTime& packet_time);
+
+ private:
+ typedef std::set<SocketAddress, AddrCmp> AddressSet;
+
+ /* Records a translation and the associated external socket. */
+ struct TransEntry {
+ TransEntry(const SocketAddressPair& r, AsyncUDPSocket* s, NAT* nat);
+ ~TransEntry();
+
+ void WhitelistInsert(const SocketAddress& addr);
+ bool WhitelistContains(const SocketAddress& ext_addr);
+
+ SocketAddressPair route;
+ AsyncUDPSocket* socket;
+ AddressSet* whitelist;
+ CriticalSection crit_;
+ };
+
+ typedef std::map<SocketAddressPair, TransEntry*, RouteCmp> InternalMap;
+ typedef std::map<SocketAddress, TransEntry*> ExternalMap;
+
+ /* Creates a new entry that translates the given route. */
+ void Translate(const SocketAddressPair& route);
+
+ /* Determines whether the NAT would filter out a packet from this address. */
+ bool ShouldFilterOut(TransEntry* entry, const SocketAddress& ext_addr);
+
+ NAT* nat_;
+ SocketFactory* internal_;
+ SocketFactory* external_;
+ SocketAddress external_ip_;
+ AsyncUDPSocket* server_socket_;
+ AsyncSocket* tcp_server_socket_;
+ InternalMap* int_map_;
+ ExternalMap* ext_map_;
+ DISALLOW_EVIL_CONSTRUCTORS(NATServer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NATSERVER_H_
diff --git a/chromium/third_party/webrtc/base/natsocketfactory.cc b/chromium/third_party/webrtc/base/natsocketfactory.cc
new file mode 100644
index 00000000000..b5ae67b2510
--- /dev/null
+++ b/chromium/third_party/webrtc/base/natsocketfactory.cc
@@ -0,0 +1,487 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/natsocketfactory.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/natserver.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+namespace rtc {
+
+// Packs the given socketaddress into the buffer in buf, in the quasi-STUN
+// format that the natserver uses.
+// Returns 0 if an invalid address is passed.
+size_t PackAddressForNAT(char* buf, size_t buf_size,
+ const SocketAddress& remote_addr) {
+ const IPAddress& ip = remote_addr.ipaddr();
+ int family = ip.family();
+ buf[0] = 0;
+ buf[1] = family;
+ // Writes the port.
+ *(reinterpret_cast<uint16*>(&buf[2])) = HostToNetwork16(remote_addr.port());
+ if (family == AF_INET) {
+ ASSERT(buf_size >= kNATEncodedIPv4AddressSize);
+ in_addr v4addr = ip.ipv4_address();
+ memcpy(&buf[4], &v4addr, kNATEncodedIPv4AddressSize - 4);
+ return kNATEncodedIPv4AddressSize;
+ } else if (family == AF_INET6) {
+ ASSERT(buf_size >= kNATEncodedIPv6AddressSize);
+ in6_addr v6addr = ip.ipv6_address();
+ memcpy(&buf[4], &v6addr, kNATEncodedIPv6AddressSize - 4);
+ return kNATEncodedIPv6AddressSize;
+ }
+ return 0U;
+}
+
+// Decodes the remote address from a packet that has been encoded with the nat's
+// quasi-STUN format. Returns the length of the address (i.e., the offset into
+// data where the original packet starts).
+size_t UnpackAddressFromNAT(const char* buf, size_t buf_size,
+ SocketAddress* remote_addr) {
+ ASSERT(buf_size >= 8);
+ ASSERT(buf[0] == 0);
+ int family = buf[1];
+ uint16 port = NetworkToHost16(*(reinterpret_cast<const uint16*>(&buf[2])));
+ if (family == AF_INET) {
+ const in_addr* v4addr = reinterpret_cast<const in_addr*>(&buf[4]);
+ *remote_addr = SocketAddress(IPAddress(*v4addr), port);
+ return kNATEncodedIPv4AddressSize;
+ } else if (family == AF_INET6) {
+ ASSERT(buf_size >= 20);
+ const in6_addr* v6addr = reinterpret_cast<const in6_addr*>(&buf[4]);
+ *remote_addr = SocketAddress(IPAddress(*v6addr), port);
+ return kNATEncodedIPv6AddressSize;
+ }
+ return 0U;
+}
+
+
+// NATSocket
+class NATSocket : public AsyncSocket, public sigslot::has_slots<> {
+ public:
+ explicit NATSocket(NATInternalSocketFactory* sf, int family, int type)
+ : sf_(sf), family_(family), type_(type), connected_(false),
+ socket_(NULL), buf_(NULL), size_(0) {
+ }
+
+ virtual ~NATSocket() {
+ delete socket_;
+ delete[] buf_;
+ }
+
+ virtual SocketAddress GetLocalAddress() const {
+ return (socket_) ? socket_->GetLocalAddress() : SocketAddress();
+ }
+
+ virtual SocketAddress GetRemoteAddress() const {
+ return remote_addr_; // will be NIL if not connected
+ }
+
+ virtual int Bind(const SocketAddress& addr) {
+ if (socket_) { // already bound, bubble up error
+ return -1;
+ }
+
+ int result;
+ socket_ = sf_->CreateInternalSocket(family_, type_, addr, &server_addr_);
+ result = (socket_) ? socket_->Bind(addr) : -1;
+ if (result >= 0) {
+ socket_->SignalConnectEvent.connect(this, &NATSocket::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this, &NATSocket::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this, &NATSocket::OnWriteEvent);
+ socket_->SignalCloseEvent.connect(this, &NATSocket::OnCloseEvent);
+ } else {
+ server_addr_.Clear();
+ delete socket_;
+ socket_ = NULL;
+ }
+
+ return result;
+ }
+
+ virtual int Connect(const SocketAddress& addr) {
+ if (!socket_) { // socket must be bound, for now
+ return -1;
+ }
+
+ int result = 0;
+ if (type_ == SOCK_STREAM) {
+ result = socket_->Connect(server_addr_.IsNil() ? addr : server_addr_);
+ } else {
+ connected_ = true;
+ }
+
+ if (result >= 0) {
+ remote_addr_ = addr;
+ }
+
+ return result;
+ }
+
+ virtual int Send(const void* data, size_t size) {
+ ASSERT(connected_);
+ return SendTo(data, size, remote_addr_);
+ }
+
+ virtual int SendTo(const void* data, size_t size, const SocketAddress& addr) {
+ ASSERT(!connected_ || addr == remote_addr_);
+ if (server_addr_.IsNil() || type_ == SOCK_STREAM) {
+ return socket_->SendTo(data, size, addr);
+ }
+ // This array will be too large for IPv4 packets, but only by 12 bytes.
+ scoped_ptr<char[]> buf(new char[size + kNATEncodedIPv6AddressSize]);
+ size_t addrlength = PackAddressForNAT(buf.get(),
+ size + kNATEncodedIPv6AddressSize,
+ addr);
+ size_t encoded_size = size + addrlength;
+ memcpy(buf.get() + addrlength, data, size);
+ int result = socket_->SendTo(buf.get(), encoded_size, server_addr_);
+ if (result >= 0) {
+ ASSERT(result == static_cast<int>(encoded_size));
+ result = result - static_cast<int>(addrlength);
+ }
+ return result;
+ }
+
+ virtual int Recv(void* data, size_t size) {
+ SocketAddress addr;
+ return RecvFrom(data, size, &addr);
+ }
+
+ virtual int RecvFrom(void* data, size_t size, SocketAddress *out_addr) {
+ if (server_addr_.IsNil() || type_ == SOCK_STREAM) {
+ return socket_->RecvFrom(data, size, out_addr);
+ }
+ // Make sure we have enough room to read the requested amount plus the
+ // largest possible header address.
+ SocketAddress remote_addr;
+ Grow(size + kNATEncodedIPv6AddressSize);
+
+ // Read the packet from the socket.
+ int result = socket_->RecvFrom(buf_, size_, &remote_addr);
+ if (result >= 0) {
+ ASSERT(remote_addr == server_addr_);
+
+ // TODO: we need better framing so we know how many bytes we can
+ // return before we need to read the next address. For UDP, this will be
+ // fine as long as the reader always reads everything in the packet.
+ ASSERT((size_t)result < size_);
+
+ // Decode the wire packet into the actual results.
+ SocketAddress real_remote_addr;
+ size_t addrlength =
+ UnpackAddressFromNAT(buf_, result, &real_remote_addr);
+ memcpy(data, buf_ + addrlength, result - addrlength);
+
+ // Make sure this packet should be delivered before returning it.
+ if (!connected_ || (real_remote_addr == remote_addr_)) {
+ if (out_addr)
+ *out_addr = real_remote_addr;
+ result = result - static_cast<int>(addrlength);
+ } else {
+ LOG(LS_ERROR) << "Dropping packet from unknown remote address: "
+ << real_remote_addr.ToString();
+ result = 0; // Tell the caller we didn't read anything
+ }
+ }
+
+ return result;
+ }
+
+ virtual int Close() {
+ int result = 0;
+ if (socket_) {
+ result = socket_->Close();
+ if (result >= 0) {
+ connected_ = false;
+ remote_addr_ = SocketAddress();
+ delete socket_;
+ socket_ = NULL;
+ }
+ }
+ return result;
+ }
+
+ virtual int Listen(int backlog) {
+ return socket_->Listen(backlog);
+ }
+ virtual AsyncSocket* Accept(SocketAddress *paddr) {
+ return socket_->Accept(paddr);
+ }
+ virtual int GetError() const {
+ return socket_->GetError();
+ }
+ virtual void SetError(int error) {
+ socket_->SetError(error);
+ }
+ virtual ConnState GetState() const {
+ return connected_ ? CS_CONNECTED : CS_CLOSED;
+ }
+ virtual int EstimateMTU(uint16* mtu) {
+ return socket_->EstimateMTU(mtu);
+ }
+ virtual int GetOption(Option opt, int* value) {
+ return socket_->GetOption(opt, value);
+ }
+ virtual int SetOption(Option opt, int value) {
+ return socket_->SetOption(opt, value);
+ }
+
+ void OnConnectEvent(AsyncSocket* socket) {
+ // If we're NATed, we need to send a request with the real addr to use.
+ ASSERT(socket == socket_);
+ if (server_addr_.IsNil()) {
+ connected_ = true;
+ SignalConnectEvent(this);
+ } else {
+ SendConnectRequest();
+ }
+ }
+ void OnReadEvent(AsyncSocket* socket) {
+ // If we're NATed, we need to process the connect reply.
+ ASSERT(socket == socket_);
+ if (type_ == SOCK_STREAM && !server_addr_.IsNil() && !connected_) {
+ HandleConnectReply();
+ } else {
+ SignalReadEvent(this);
+ }
+ }
+ void OnWriteEvent(AsyncSocket* socket) {
+ ASSERT(socket == socket_);
+ SignalWriteEvent(this);
+ }
+ void OnCloseEvent(AsyncSocket* socket, int error) {
+ ASSERT(socket == socket_);
+ SignalCloseEvent(this, error);
+ }
+
+ private:
+ // Makes sure the buffer is at least the given size.
+ void Grow(size_t new_size) {
+ if (size_ < new_size) {
+ delete[] buf_;
+ size_ = new_size;
+ buf_ = new char[size_];
+ }
+ }
+
+ // Sends the destination address to the server to tell it to connect.
+ void SendConnectRequest() {
+ char buf[256];
+ size_t length = PackAddressForNAT(buf, ARRAY_SIZE(buf), remote_addr_);
+ socket_->Send(buf, length);
+ }
+
+ // Handles the byte sent back from the server and fires the appropriate event.
+ void HandleConnectReply() {
+ char code;
+ socket_->Recv(&code, sizeof(code));
+ if (code == 0) {
+ SignalConnectEvent(this);
+ } else {
+ Close();
+ SignalCloseEvent(this, code);
+ }
+ }
+
+ NATInternalSocketFactory* sf_;
+ int family_;
+ int type_;
+ bool connected_;
+ SocketAddress remote_addr_;
+ SocketAddress server_addr_; // address of the NAT server
+ AsyncSocket* socket_;
+ char* buf_;
+ size_t size_;
+};
+
+// NATSocketFactory
+NATSocketFactory::NATSocketFactory(SocketFactory* factory,
+ const SocketAddress& nat_addr)
+ : factory_(factory), nat_addr_(nat_addr) {
+}
+
+Socket* NATSocketFactory::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* NATSocketFactory::CreateSocket(int family, int type) {
+ return new NATSocket(this, family, type);
+}
+
+AsyncSocket* NATSocketFactory::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* NATSocketFactory::CreateAsyncSocket(int family, int type) {
+ return new NATSocket(this, family, type);
+}
+
+AsyncSocket* NATSocketFactory::CreateInternalSocket(int family, int type,
+ const SocketAddress& local_addr, SocketAddress* nat_addr) {
+ *nat_addr = nat_addr_;
+ return factory_->CreateAsyncSocket(family, type);
+}
+
+// NATSocketServer
+NATSocketServer::NATSocketServer(SocketServer* server)
+ : server_(server), msg_queue_(NULL) {
+}
+
+NATSocketServer::Translator* NATSocketServer::GetTranslator(
+ const SocketAddress& ext_ip) {
+ return nats_.Get(ext_ip);
+}
+
+NATSocketServer::Translator* NATSocketServer::AddTranslator(
+ const SocketAddress& ext_ip, const SocketAddress& int_ip, NATType type) {
+ // Fail if a translator already exists with this extternal address.
+ if (nats_.Get(ext_ip))
+ return NULL;
+
+ return nats_.Add(ext_ip, new Translator(this, type, int_ip, server_, ext_ip));
+}
+
+void NATSocketServer::RemoveTranslator(
+ const SocketAddress& ext_ip) {
+ nats_.Remove(ext_ip);
+}
+
+Socket* NATSocketServer::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* NATSocketServer::CreateSocket(int family, int type) {
+ return new NATSocket(this, family, type);
+}
+
+AsyncSocket* NATSocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* NATSocketServer::CreateAsyncSocket(int family, int type) {
+ return new NATSocket(this, family, type);
+}
+
+AsyncSocket* NATSocketServer::CreateInternalSocket(int family, int type,
+ const SocketAddress& local_addr, SocketAddress* nat_addr) {
+ AsyncSocket* socket = NULL;
+ Translator* nat = nats_.FindClient(local_addr);
+ if (nat) {
+ socket = nat->internal_factory()->CreateAsyncSocket(family, type);
+ *nat_addr = (type == SOCK_STREAM) ?
+ nat->internal_tcp_address() : nat->internal_address();
+ } else {
+ socket = server_->CreateAsyncSocket(family, type);
+ }
+ return socket;
+}
+
+// NATSocketServer::Translator
+NATSocketServer::Translator::Translator(
+ NATSocketServer* server, NATType type, const SocketAddress& int_ip,
+ SocketFactory* ext_factory, const SocketAddress& ext_ip)
+ : server_(server) {
+ // Create a new private network, and a NATServer running on the private
+ // network that bridges to the external network. Also tell the private
+ // network to use the same message queue as us.
+ VirtualSocketServer* internal_server = new VirtualSocketServer(server_);
+ internal_server->SetMessageQueue(server_->queue());
+ internal_factory_.reset(internal_server);
+ nat_server_.reset(new NATServer(type, internal_server, int_ip,
+ ext_factory, ext_ip));
+}
+
+
+NATSocketServer::Translator* NATSocketServer::Translator::GetTranslator(
+ const SocketAddress& ext_ip) {
+ return nats_.Get(ext_ip);
+}
+
+NATSocketServer::Translator* NATSocketServer::Translator::AddTranslator(
+ const SocketAddress& ext_ip, const SocketAddress& int_ip, NATType type) {
+ // Fail if a translator already exists with this extternal address.
+ if (nats_.Get(ext_ip))
+ return NULL;
+
+ AddClient(ext_ip);
+ return nats_.Add(ext_ip,
+ new Translator(server_, type, int_ip, server_, ext_ip));
+}
+void NATSocketServer::Translator::RemoveTranslator(
+ const SocketAddress& ext_ip) {
+ nats_.Remove(ext_ip);
+ RemoveClient(ext_ip);
+}
+
+bool NATSocketServer::Translator::AddClient(
+ const SocketAddress& int_ip) {
+ // Fail if a client already exists with this internal address.
+ if (clients_.find(int_ip) != clients_.end())
+ return false;
+
+ clients_.insert(int_ip);
+ return true;
+}
+
+void NATSocketServer::Translator::RemoveClient(
+ const SocketAddress& int_ip) {
+ std::set<SocketAddress>::iterator it = clients_.find(int_ip);
+ if (it != clients_.end()) {
+ clients_.erase(it);
+ }
+}
+
+NATSocketServer::Translator* NATSocketServer::Translator::FindClient(
+ const SocketAddress& int_ip) {
+ // See if we have the requested IP, or any of our children do.
+ return (clients_.find(int_ip) != clients_.end()) ?
+ this : nats_.FindClient(int_ip);
+}
+
+// NATSocketServer::TranslatorMap
+NATSocketServer::TranslatorMap::~TranslatorMap() {
+ for (TranslatorMap::iterator it = begin(); it != end(); ++it) {
+ delete it->second;
+ }
+}
+
+NATSocketServer::Translator* NATSocketServer::TranslatorMap::Get(
+ const SocketAddress& ext_ip) {
+ TranslatorMap::iterator it = find(ext_ip);
+ return (it != end()) ? it->second : NULL;
+}
+
+NATSocketServer::Translator* NATSocketServer::TranslatorMap::Add(
+ const SocketAddress& ext_ip, Translator* nat) {
+ (*this)[ext_ip] = nat;
+ return nat;
+}
+
+void NATSocketServer::TranslatorMap::Remove(
+ const SocketAddress& ext_ip) {
+ TranslatorMap::iterator it = find(ext_ip);
+ if (it != end()) {
+ delete it->second;
+ erase(it);
+ }
+}
+
+NATSocketServer::Translator* NATSocketServer::TranslatorMap::FindClient(
+ const SocketAddress& int_ip) {
+ Translator* nat = NULL;
+ for (TranslatorMap::iterator it = begin(); it != end() && !nat; ++it) {
+ nat = it->second->FindClient(int_ip);
+ }
+ return nat;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/natsocketfactory.h b/chromium/third_party/webrtc/base/natsocketfactory.h
new file mode 100644
index 00000000000..6a8e20fe147
--- /dev/null
+++ b/chromium/third_party/webrtc/base/natsocketfactory.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NATSOCKETFACTORY_H_
+#define WEBRTC_BASE_NATSOCKETFACTORY_H_
+
+#include <string>
+#include <map>
+#include <set>
+
+#include "webrtc/base/natserver.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/socketserver.h"
+
+namespace rtc {
+
+const size_t kNATEncodedIPv4AddressSize = 8U;
+const size_t kNATEncodedIPv6AddressSize = 20U;
+
+// Used by the NAT socket implementation.
+class NATInternalSocketFactory {
+ public:
+ virtual ~NATInternalSocketFactory() {}
+ virtual AsyncSocket* CreateInternalSocket(int family, int type,
+ const SocketAddress& local_addr, SocketAddress* nat_addr) = 0;
+};
+
+// Creates sockets that will send all traffic through a NAT, using an existing
+// NATServer instance running at nat_addr. The actual data is sent using sockets
+// from a socket factory, given to the constructor.
+class NATSocketFactory : public SocketFactory, public NATInternalSocketFactory {
+ public:
+ NATSocketFactory(SocketFactory* factory, const SocketAddress& nat_addr);
+
+ // SocketFactory implementation
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ // NATInternalSocketFactory implementation
+ virtual AsyncSocket* CreateInternalSocket(int family, int type,
+ const SocketAddress& local_addr, SocketAddress* nat_addr);
+
+ private:
+ SocketFactory* factory_;
+ SocketAddress nat_addr_;
+ DISALLOW_EVIL_CONSTRUCTORS(NATSocketFactory);
+};
+
+// Creates sockets that will send traffic through a NAT depending on what
+// address they bind to. This can be used to simulate a client on a NAT sending
+// to a client that is not behind a NAT.
+// Note that the internal addresses of clients must be unique. This is because
+// there is only one socketserver per thread, and the Bind() address is used to
+// figure out which NAT (if any) the socket should talk to.
+//
+// Example with 3 NATs (2 cascaded), and 3 clients.
+// ss->AddTranslator("1.2.3.4", "192.168.0.1", NAT_ADDR_RESTRICTED);
+// ss->AddTranslator("99.99.99.99", "10.0.0.1", NAT_SYMMETRIC)->
+// AddTranslator("10.0.0.2", "192.168.1.1", NAT_OPEN_CONE);
+// ss->GetTranslator("1.2.3.4")->AddClient("1.2.3.4", "192.168.0.2");
+// ss->GetTranslator("99.99.99.99")->AddClient("10.0.0.3");
+// ss->GetTranslator("99.99.99.99")->GetTranslator("10.0.0.2")->
+// AddClient("192.168.1.2");
+class NATSocketServer : public SocketServer, public NATInternalSocketFactory {
+ public:
+ class Translator;
+ // holds a list of NATs
+ class TranslatorMap : private std::map<SocketAddress, Translator*> {
+ public:
+ ~TranslatorMap();
+ Translator* Get(const SocketAddress& ext_ip);
+ Translator* Add(const SocketAddress& ext_ip, Translator*);
+ void Remove(const SocketAddress& ext_ip);
+ Translator* FindClient(const SocketAddress& int_ip);
+ };
+
+ // a specific NAT
+ class Translator {
+ public:
+ Translator(NATSocketServer* server, NATType type,
+ const SocketAddress& int_addr, SocketFactory* ext_factory,
+ const SocketAddress& ext_addr);
+
+ SocketFactory* internal_factory() { return internal_factory_.get(); }
+ SocketAddress internal_address() const {
+ return nat_server_->internal_address();
+ }
+ SocketAddress internal_tcp_address() const {
+ return SocketAddress(); // nat_server_->internal_tcp_address();
+ }
+
+ Translator* GetTranslator(const SocketAddress& ext_ip);
+ Translator* AddTranslator(const SocketAddress& ext_ip,
+ const SocketAddress& int_ip, NATType type);
+ void RemoveTranslator(const SocketAddress& ext_ip);
+
+ bool AddClient(const SocketAddress& int_ip);
+ void RemoveClient(const SocketAddress& int_ip);
+
+ // Looks for the specified client in this or a child NAT.
+ Translator* FindClient(const SocketAddress& int_ip);
+
+ private:
+ NATSocketServer* server_;
+ scoped_ptr<SocketFactory> internal_factory_;
+ scoped_ptr<NATServer> nat_server_;
+ TranslatorMap nats_;
+ std::set<SocketAddress> clients_;
+ };
+
+ explicit NATSocketServer(SocketServer* ss);
+
+ SocketServer* socketserver() { return server_; }
+ MessageQueue* queue() { return msg_queue_; }
+
+ Translator* GetTranslator(const SocketAddress& ext_ip);
+ Translator* AddTranslator(const SocketAddress& ext_ip,
+ const SocketAddress& int_ip, NATType type);
+ void RemoveTranslator(const SocketAddress& ext_ip);
+
+ // SocketServer implementation
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ virtual void SetMessageQueue(MessageQueue* queue) {
+ msg_queue_ = queue;
+ server_->SetMessageQueue(queue);
+ }
+ virtual bool Wait(int cms, bool process_io) {
+ return server_->Wait(cms, process_io);
+ }
+ virtual void WakeUp() {
+ server_->WakeUp();
+ }
+
+ // NATInternalSocketFactory implementation
+ virtual AsyncSocket* CreateInternalSocket(int family, int type,
+ const SocketAddress& local_addr, SocketAddress* nat_addr);
+
+ private:
+ SocketServer* server_;
+ MessageQueue* msg_queue_;
+ TranslatorMap nats_;
+ DISALLOW_EVIL_CONSTRUCTORS(NATSocketServer);
+};
+
+// Free-standing NAT helper functions.
+size_t PackAddressForNAT(char* buf, size_t buf_size,
+ const SocketAddress& remote_addr);
+size_t UnpackAddressFromNAT(const char* buf, size_t buf_size,
+ SocketAddress* remote_addr);
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NATSOCKETFACTORY_H_
diff --git a/chromium/third_party/webrtc/base/nattypes.cc b/chromium/third_party/webrtc/base/nattypes.cc
new file mode 100644
index 00000000000..fedb78dc543
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nattypes.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/base/nattypes.h"
+
+namespace rtc {
+
+class SymmetricNAT : public NAT {
+public:
+ bool IsSymmetric() { return true; }
+ bool FiltersIP() { return true; }
+ bool FiltersPort() { return true; }
+};
+
+class OpenConeNAT : public NAT {
+public:
+ bool IsSymmetric() { return false; }
+ bool FiltersIP() { return false; }
+ bool FiltersPort() { return false; }
+};
+
+class AddressRestrictedNAT : public NAT {
+public:
+ bool IsSymmetric() { return false; }
+ bool FiltersIP() { return true; }
+ bool FiltersPort() { return false; }
+};
+
+class PortRestrictedNAT : public NAT {
+public:
+ bool IsSymmetric() { return false; }
+ bool FiltersIP() { return true; }
+ bool FiltersPort() { return true; }
+};
+
+NAT* NAT::Create(NATType type) {
+ switch (type) {
+ case NAT_OPEN_CONE: return new OpenConeNAT();
+ case NAT_ADDR_RESTRICTED: return new AddressRestrictedNAT();
+ case NAT_PORT_RESTRICTED: return new PortRestrictedNAT();
+ case NAT_SYMMETRIC: return new SymmetricNAT();
+ default: assert(0); return 0;
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nattypes.h b/chromium/third_party/webrtc/base/nattypes.h
new file mode 100644
index 00000000000..27e4b2f4575
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nattypes.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NATTYPE_H__
+#define WEBRTC_BASE_NATTYPE_H__
+
+namespace rtc {
+
+/* Identifies each type of NAT that can be simulated. */
+enum NATType {
+ NAT_OPEN_CONE,
+ NAT_ADDR_RESTRICTED,
+ NAT_PORT_RESTRICTED,
+ NAT_SYMMETRIC
+};
+
+// Implements the rules for each specific type of NAT.
+class NAT {
+public:
+ virtual ~NAT() { }
+
+ // Determines whether this NAT uses both source and destination address when
+ // checking whether a mapping already exists.
+ virtual bool IsSymmetric() = 0;
+
+ // Determines whether this NAT drops packets received from a different IP
+ // the one last sent to.
+ virtual bool FiltersIP() = 0;
+
+ // Determines whether this NAT drops packets received from a different port
+ // the one last sent to.
+ virtual bool FiltersPort() = 0;
+
+ // Returns an implementation of the given type of NAT.
+ static NAT* Create(NATType type);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NATTYPE_H__
diff --git a/chromium/third_party/webrtc/base/nethelpers.cc b/chromium/third_party/webrtc/base/nethelpers.cc
new file mode 100644
index 00000000000..5d4802dfde2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nethelpers.cc
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/nethelpers.h"
+
+#if defined(WEBRTC_WIN)
+#include <ws2spi.h>
+#include <ws2tcpip.h>
+#include "webrtc/base/win32.h"
+#endif
+
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/signalthread.h"
+
+namespace rtc {
+
+int ResolveHostname(const std::string& hostname, int family,
+ std::vector<IPAddress>* addresses) {
+#ifdef __native_client__
+ ASSERT(false);
+ LOG(LS_WARNING) << "ResolveHostname() is not implemented for NaCl";
+ return -1;
+#else // __native_client__
+ if (!addresses) {
+ return -1;
+ }
+ addresses->clear();
+ struct addrinfo* result = NULL;
+ struct addrinfo hints = {0};
+ // TODO(djw): For now this is IPv4 only so existing users remain unaffected.
+ hints.ai_family = AF_INET;
+ hints.ai_flags = AI_ADDRCONFIG;
+ int ret = getaddrinfo(hostname.c_str(), NULL, &hints, &result);
+ if (ret != 0) {
+ return ret;
+ }
+ struct addrinfo* cursor = result;
+ for (; cursor; cursor = cursor->ai_next) {
+ if (family == AF_UNSPEC || cursor->ai_family == family) {
+ IPAddress ip;
+ if (IPFromAddrInfo(cursor, &ip)) {
+ addresses->push_back(ip);
+ }
+ }
+ }
+ freeaddrinfo(result);
+ return 0;
+#endif // !__native_client__
+}
+
+// AsyncResolver
+AsyncResolver::AsyncResolver() : error_(-1) {
+}
+
+void AsyncResolver::Start(const SocketAddress& addr) {
+ addr_ = addr;
+ // SignalThred Start will kickoff the resolve process.
+ SignalThread::Start();
+}
+
+bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
+ if (error_ != 0 || addresses_.empty())
+ return false;
+
+ *addr = addr_;
+ for (size_t i = 0; i < addresses_.size(); ++i) {
+ if (family == addresses_[i].family()) {
+ addr->SetResolvedIP(addresses_[i]);
+ return true;
+ }
+ }
+ return false;
+}
+
+void AsyncResolver::DoWork() {
+ error_ = ResolveHostname(addr_.hostname().c_str(), addr_.family(),
+ &addresses_);
+}
+
+void AsyncResolver::OnWorkDone() {
+ SignalDone(this);
+}
+
+const char* inet_ntop(int af, const void *src, char* dst, socklen_t size) {
+#if defined(WEBRTC_WIN)
+ return win32_inet_ntop(af, src, dst, size);
+#else
+ return ::inet_ntop(af, src, dst, size);
+#endif
+}
+
+int inet_pton(int af, const char* src, void *dst) {
+#if defined(WEBRTC_WIN)
+ return win32_inet_pton(af, src, dst);
+#else
+ return ::inet_pton(af, src, dst);
+#endif
+}
+
+bool HasIPv6Enabled() {
+#if !defined(WEBRTC_WIN)
+ // We only need to check this for Windows XP (so far).
+ return true;
+#else
+ if (IsWindowsVistaOrLater()) {
+ return true;
+ }
+ if (!IsWindowsXpOrLater()) {
+ return false;
+ }
+ DWORD protbuff_size = 4096;
+ scoped_ptr<char[]> protocols;
+ LPWSAPROTOCOL_INFOW protocol_infos = NULL;
+ int requested_protocols[2] = {AF_INET6, 0};
+
+ int err = 0;
+ int ret = 0;
+ // Check for protocols in a do-while loop until we provide a buffer large
+ // enough. (WSCEnumProtocols sets protbuff_size to its desired value).
+ // It is extremely unlikely that this will loop more than once.
+ do {
+ protocols.reset(new char[protbuff_size]);
+ protocol_infos = reinterpret_cast<LPWSAPROTOCOL_INFOW>(protocols.get());
+ ret = WSCEnumProtocols(requested_protocols, protocol_infos,
+ &protbuff_size, &err);
+ } while (ret == SOCKET_ERROR && err == WSAENOBUFS);
+
+ if (ret == SOCKET_ERROR) {
+ return false;
+ }
+
+ // Even if ret is positive, check specifically for IPv6.
+ // Non-IPv6 enabled WinXP will still return a RAW protocol.
+ for (int i = 0; i < ret; ++i) {
+ if (protocol_infos[i].iAddressFamily == AF_INET6) {
+ return true;
+ }
+ }
+ return false;
+#endif
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nethelpers.h b/chromium/third_party/webrtc/base/nethelpers.h
new file mode 100644
index 00000000000..d39400c2f85
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nethelpers.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NETHELPERS_H_
+#define WEBRTC_BASE_NETHELPERS_H_
+
+#if defined(WEBRTC_POSIX)
+#include <netdb.h>
+#include <stddef.h>
+#elif WEBRTC_WIN
+#include <winsock2.h> // NOLINT
+#endif
+
+#include <list>
+
+#include "webrtc/base/asyncresolverinterface.h"
+#include "webrtc/base/signalthread.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+
+class AsyncResolverTest;
+
+// AsyncResolver will perform async DNS resolution, signaling the result on
+// the SignalDone from AsyncResolverInterface when the operation completes.
+class AsyncResolver : public SignalThread, public AsyncResolverInterface {
+ public:
+ AsyncResolver();
+ virtual ~AsyncResolver() {}
+
+ virtual void Start(const SocketAddress& addr);
+ virtual bool GetResolvedAddress(int family, SocketAddress* addr) const;
+ virtual int GetError() const { return error_; }
+ virtual void Destroy(bool wait) { SignalThread::Destroy(wait); }
+
+ const std::vector<IPAddress>& addresses() const { return addresses_; }
+ void set_error(int error) { error_ = error; }
+
+ protected:
+ virtual void DoWork();
+ virtual void OnWorkDone();
+
+ private:
+ SocketAddress addr_;
+ std::vector<IPAddress> addresses_;
+ int error_;
+};
+
+// rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid
+// the windows-native versions of these.
+const char* inet_ntop(int af, const void *src, char* dst, socklen_t size);
+int inet_pton(int af, const char* src, void *dst);
+
+bool HasIPv6Enabled();
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NETHELPERS_H_
diff --git a/chromium/third_party/webrtc/base/network.cc b/chromium/third_party/webrtc/base/network.cc
new file mode 100644
index 00000000000..d94c69eae7a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/network.cc
@@ -0,0 +1,658 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "webrtc/base/network.h"
+
+#if defined(WEBRTC_POSIX)
+// linux/if.h can't be included at the same time as the posix sys/if.h, and
+// it's transitively required by linux/route.h, so include that version on
+// linux instead of the standard posix one.
+#if defined(WEBRTC_LINUX)
+#include <linux/if.h>
+#include <linux/route.h>
+#elif !defined(__native_client__)
+#include <net/if.h>
+#endif
+#include <sys/socket.h>
+#include <sys/utsname.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <errno.h>
+
+#if defined(WEBRTC_ANDROID)
+#include "webrtc/base/ifaddrs-android.h"
+#elif !defined(__native_client__)
+#include <ifaddrs.h>
+#endif
+
+#endif // WEBRTC_POSIX
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#include <Iphlpapi.h>
+#endif
+
+#include <stdio.h>
+
+#include <algorithm>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socket.h" // includes something that makes windows happy
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+namespace {
+
+const uint32 kUpdateNetworksMessage = 1;
+const uint32 kSignalNetworksMessage = 2;
+
+// Fetch list of networks every two seconds.
+const int kNetworksUpdateIntervalMs = 2000;
+
+const int kHighestNetworkPreference = 127;
+
+bool CompareNetworks(const Network* a, const Network* b) {
+ if (a->prefix_length() == b->prefix_length()) {
+ if (a->name() == b->name()) {
+ return a->prefix() < b->prefix();
+ }
+ }
+ return a->name() < b->name();
+}
+
+bool SortNetworks(const Network* a, const Network* b) {
+ // Network types will be preferred above everything else while sorting
+ // Networks.
+
+ // Networks are sorted first by type.
+ if (a->type() != b->type()) {
+ return a->type() < b->type();
+ }
+
+ // After type, networks are sorted by IP address precedence values
+ // from RFC 3484-bis
+ if (IPAddressPrecedence(a->ip()) != IPAddressPrecedence(b->ip())) {
+ return IPAddressPrecedence(a->ip()) > IPAddressPrecedence(b->ip());
+ }
+
+ // TODO(mallinath) - Add VPN and Link speed conditions while sorting.
+
+ // Networks are sorted last by key.
+ return a->key() > b->key();
+}
+
+std::string AdapterTypeToString(AdapterType type) {
+ switch (type) {
+ case ADAPTER_TYPE_UNKNOWN:
+ return "Unknown";
+ case ADAPTER_TYPE_ETHERNET:
+ return "Ethernet";
+ case ADAPTER_TYPE_WIFI:
+ return "Wifi";
+ case ADAPTER_TYPE_CELLULAR:
+ return "Cellular";
+ case ADAPTER_TYPE_VPN:
+ return "VPN";
+ default:
+ ASSERT(false);
+ return std::string();
+ }
+}
+
+} // namespace
+
+std::string MakeNetworkKey(const std::string& name, const IPAddress& prefix,
+ int prefix_length) {
+ std::ostringstream ost;
+ ost << name << "%" << prefix.ToString() << "/" << prefix_length;
+ return ost.str();
+}
+
+NetworkManager::NetworkManager() {
+}
+
+NetworkManager::~NetworkManager() {
+}
+
+NetworkManagerBase::NetworkManagerBase() : ipv6_enabled_(true) {
+}
+
+NetworkManagerBase::~NetworkManagerBase() {
+ for (NetworkMap::iterator i = networks_map_.begin();
+ i != networks_map_.end(); ++i) {
+ delete i->second;
+ }
+}
+
+void NetworkManagerBase::GetNetworks(NetworkList* result) const {
+ *result = networks_;
+}
+
+void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks,
+ bool* changed) {
+ // Sort the list so that we can detect when it changes.
+ typedef std::pair<Network*, std::vector<IPAddress> > address_list;
+ std::map<std::string, address_list> address_map;
+ NetworkList list(new_networks);
+ NetworkList merged_list;
+ std::sort(list.begin(), list.end(), CompareNetworks);
+
+ *changed = false;
+
+ if (networks_.size() != list.size())
+ *changed = true;
+
+ // First, build a set of network-keys to the ipaddresses.
+ for (uint32 i = 0; i < list.size(); ++i) {
+ bool might_add_to_merged_list = false;
+ std::string key = MakeNetworkKey(list[i]->name(),
+ list[i]->prefix(),
+ list[i]->prefix_length());
+ if (address_map.find(key) == address_map.end()) {
+ address_map[key] = address_list(list[i], std::vector<IPAddress>());
+ might_add_to_merged_list = true;
+ }
+ const std::vector<IPAddress>& addresses = list[i]->GetIPs();
+ address_list& current_list = address_map[key];
+ for (std::vector<IPAddress>::const_iterator it = addresses.begin();
+ it != addresses.end();
+ ++it) {
+ current_list.second.push_back(*it);
+ }
+ if (!might_add_to_merged_list) {
+ delete list[i];
+ }
+ }
+
+ // Next, look for existing network objects to re-use.
+ for (std::map<std::string, address_list >::iterator it = address_map.begin();
+ it != address_map.end();
+ ++it) {
+ const std::string& key = it->first;
+ Network* net = it->second.first;
+ NetworkMap::iterator existing = networks_map_.find(key);
+ if (existing == networks_map_.end()) {
+ // This network is new. Place it in the network map.
+ merged_list.push_back(net);
+ networks_map_[key] = net;
+ *changed = true;
+ } else {
+ // This network exists in the map already. Reset its IP addresses.
+ *changed = existing->second->SetIPs(it->second.second, *changed);
+ merged_list.push_back(existing->second);
+ if (existing->second != net) {
+ delete net;
+ }
+ }
+ }
+ networks_ = merged_list;
+
+ // If the network lists changes, we resort it.
+ if (changed) {
+ std::sort(networks_.begin(), networks_.end(), SortNetworks);
+ // Now network interfaces are sorted, we should set the preference value
+ // for each of the interfaces we are planning to use.
+ // Preference order of network interfaces might have changed from previous
+ // sorting due to addition of higher preference network interface.
+ // Since we have already sorted the network interfaces based on our
+ // requirements, we will just assign a preference value starting with 127,
+ // in decreasing order.
+ int pref = kHighestNetworkPreference;
+ for (NetworkList::const_iterator iter = networks_.begin();
+ iter != networks_.end(); ++iter) {
+ (*iter)->set_preference(pref);
+ if (pref > 0) {
+ --pref;
+ } else {
+ LOG(LS_ERROR) << "Too many network interfaces to handle!";
+ break;
+ }
+ }
+ }
+}
+
+BasicNetworkManager::BasicNetworkManager()
+ : thread_(NULL), sent_first_update_(false), start_count_(0),
+ ignore_non_default_routes_(false) {
+}
+
+BasicNetworkManager::~BasicNetworkManager() {
+}
+
+#if defined(__native_client__)
+
+bool BasicNetworkManager::CreateNetworks(bool include_ignored,
+ NetworkList* networks) const {
+ ASSERT(false);
+ LOG(LS_WARNING) << "BasicNetworkManager doesn't work on NaCl yet";
+ return false;
+}
+
+#elif defined(WEBRTC_POSIX)
+void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces,
+ bool include_ignored,
+ NetworkList* networks) const {
+ NetworkMap current_networks;
+ for (struct ifaddrs* cursor = interfaces;
+ cursor != NULL; cursor = cursor->ifa_next) {
+ IPAddress prefix;
+ IPAddress mask;
+ IPAddress ip;
+ int scope_id = 0;
+
+ // Some interfaces may not have address assigned.
+ if (!cursor->ifa_addr || !cursor->ifa_netmask)
+ continue;
+
+ switch (cursor->ifa_addr->sa_family) {
+ case AF_INET: {
+ ip = IPAddress(
+ reinterpret_cast<sockaddr_in*>(cursor->ifa_addr)->sin_addr);
+ mask = IPAddress(
+ reinterpret_cast<sockaddr_in*>(cursor->ifa_netmask)->sin_addr);
+ break;
+ }
+ case AF_INET6: {
+ if (ipv6_enabled()) {
+ ip = IPAddress(
+ reinterpret_cast<sockaddr_in6*>(cursor->ifa_addr)->sin6_addr);
+ mask = IPAddress(
+ reinterpret_cast<sockaddr_in6*>(cursor->ifa_netmask)->sin6_addr);
+ scope_id =
+ reinterpret_cast<sockaddr_in6*>(cursor->ifa_addr)->sin6_scope_id;
+ break;
+ } else {
+ continue;
+ }
+ }
+ default: {
+ continue;
+ }
+ }
+
+ int prefix_length = CountIPMaskBits(mask);
+ prefix = TruncateIP(ip, prefix_length);
+ std::string key = MakeNetworkKey(std::string(cursor->ifa_name),
+ prefix, prefix_length);
+ NetworkMap::iterator existing_network = current_networks.find(key);
+ if (existing_network == current_networks.end()) {
+ scoped_ptr<Network> network(new Network(cursor->ifa_name,
+ cursor->ifa_name,
+ prefix,
+ prefix_length));
+ network->set_scope_id(scope_id);
+ network->AddIP(ip);
+ bool ignored = ((cursor->ifa_flags & IFF_LOOPBACK) ||
+ IsIgnoredNetwork(*network));
+ network->set_ignored(ignored);
+ if (include_ignored || !network->ignored()) {
+ networks->push_back(network.release());
+ }
+ } else {
+ (*existing_network).second->AddIP(ip);
+ }
+ }
+}
+
+bool BasicNetworkManager::CreateNetworks(bool include_ignored,
+ NetworkList* networks) const {
+ struct ifaddrs* interfaces;
+ int error = getifaddrs(&interfaces);
+ if (error != 0) {
+ LOG_ERR(LERROR) << "getifaddrs failed to gather interface data: " << error;
+ return false;
+ }
+
+ ConvertIfAddrs(interfaces, include_ignored, networks);
+
+ freeifaddrs(interfaces);
+ return true;
+}
+
+#elif defined(WEBRTC_WIN)
+
+unsigned int GetPrefix(PIP_ADAPTER_PREFIX prefixlist,
+ const IPAddress& ip, IPAddress* prefix) {
+ IPAddress current_prefix;
+ IPAddress best_prefix;
+ unsigned int best_length = 0;
+ while (prefixlist) {
+ // Look for the longest matching prefix in the prefixlist.
+ if (prefixlist->Address.lpSockaddr == NULL ||
+ prefixlist->Address.lpSockaddr->sa_family != ip.family()) {
+ prefixlist = prefixlist->Next;
+ continue;
+ }
+ switch (prefixlist->Address.lpSockaddr->sa_family) {
+ case AF_INET: {
+ sockaddr_in* v4_addr =
+ reinterpret_cast<sockaddr_in*>(prefixlist->Address.lpSockaddr);
+ current_prefix = IPAddress(v4_addr->sin_addr);
+ break;
+ }
+ case AF_INET6: {
+ sockaddr_in6* v6_addr =
+ reinterpret_cast<sockaddr_in6*>(prefixlist->Address.lpSockaddr);
+ current_prefix = IPAddress(v6_addr->sin6_addr);
+ break;
+ }
+ default: {
+ prefixlist = prefixlist->Next;
+ continue;
+ }
+ }
+ if (TruncateIP(ip, prefixlist->PrefixLength) == current_prefix &&
+ prefixlist->PrefixLength > best_length) {
+ best_prefix = current_prefix;
+ best_length = prefixlist->PrefixLength;
+ }
+ prefixlist = prefixlist->Next;
+ }
+ *prefix = best_prefix;
+ return best_length;
+}
+
+bool BasicNetworkManager::CreateNetworks(bool include_ignored,
+ NetworkList* networks) const {
+ NetworkMap current_networks;
+ // MSDN recommends a 15KB buffer for the first try at GetAdaptersAddresses.
+ size_t buffer_size = 16384;
+ scoped_ptr<char[]> adapter_info(new char[buffer_size]);
+ PIP_ADAPTER_ADDRESSES adapter_addrs =
+ reinterpret_cast<PIP_ADAPTER_ADDRESSES>(adapter_info.get());
+ int adapter_flags = (GAA_FLAG_SKIP_DNS_SERVER | GAA_FLAG_SKIP_ANYCAST |
+ GAA_FLAG_SKIP_MULTICAST | GAA_FLAG_INCLUDE_PREFIX);
+ int ret = 0;
+ do {
+ adapter_info.reset(new char[buffer_size]);
+ adapter_addrs = reinterpret_cast<PIP_ADAPTER_ADDRESSES>(adapter_info.get());
+ ret = GetAdaptersAddresses(AF_UNSPEC, adapter_flags,
+ 0, adapter_addrs,
+ reinterpret_cast<PULONG>(&buffer_size));
+ } while (ret == ERROR_BUFFER_OVERFLOW);
+ if (ret != ERROR_SUCCESS) {
+ return false;
+ }
+ int count = 0;
+ while (adapter_addrs) {
+ if (adapter_addrs->OperStatus == IfOperStatusUp) {
+ PIP_ADAPTER_UNICAST_ADDRESS address = adapter_addrs->FirstUnicastAddress;
+ PIP_ADAPTER_PREFIX prefixlist = adapter_addrs->FirstPrefix;
+ std::string name;
+ std::string description;
+#ifdef _DEBUG
+ name = ToUtf8(adapter_addrs->FriendlyName,
+ wcslen(adapter_addrs->FriendlyName));
+#endif
+ description = ToUtf8(adapter_addrs->Description,
+ wcslen(adapter_addrs->Description));
+ for (; address; address = address->Next) {
+#ifndef _DEBUG
+ name = rtc::ToString(count);
+#endif
+
+ IPAddress ip;
+ int scope_id = 0;
+ scoped_ptr<Network> network;
+ switch (address->Address.lpSockaddr->sa_family) {
+ case AF_INET: {
+ sockaddr_in* v4_addr =
+ reinterpret_cast<sockaddr_in*>(address->Address.lpSockaddr);
+ ip = IPAddress(v4_addr->sin_addr);
+ break;
+ }
+ case AF_INET6: {
+ if (ipv6_enabled()) {
+ sockaddr_in6* v6_addr =
+ reinterpret_cast<sockaddr_in6*>(address->Address.lpSockaddr);
+ scope_id = v6_addr->sin6_scope_id;
+ ip = IPAddress(v6_addr->sin6_addr);
+ break;
+ } else {
+ continue;
+ }
+ }
+ default: {
+ continue;
+ }
+ }
+
+ IPAddress prefix;
+ int prefix_length = GetPrefix(prefixlist, ip, &prefix);
+ std::string key = MakeNetworkKey(name, prefix, prefix_length);
+ NetworkMap::iterator existing_network = current_networks.find(key);
+ if (existing_network == current_networks.end()) {
+ scoped_ptr<Network> network(new Network(name,
+ description,
+ prefix,
+ prefix_length));
+ network->set_scope_id(scope_id);
+ network->AddIP(ip);
+ bool ignore = ((adapter_addrs->IfType == IF_TYPE_SOFTWARE_LOOPBACK) ||
+ IsIgnoredNetwork(*network));
+ network->set_ignored(ignore);
+ if (include_ignored || !network->ignored()) {
+ networks->push_back(network.release());
+ }
+ } else {
+ (*existing_network).second->AddIP(ip);
+ }
+ }
+ // Count is per-adapter - all 'Networks' created from the same
+ // adapter need to have the same name.
+ ++count;
+ }
+ adapter_addrs = adapter_addrs->Next;
+ }
+ return true;
+}
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_LINUX)
+bool IsDefaultRoute(const std::string& network_name) {
+ FileStream fs;
+ if (!fs.Open("/proc/net/route", "r", NULL)) {
+ LOG(LS_WARNING) << "Couldn't read /proc/net/route, skipping default "
+ << "route check (assuming everything is a default route).";
+ return true;
+ } else {
+ std::string line;
+ while (fs.ReadLine(&line) == SR_SUCCESS) {
+ char iface_name[256];
+ unsigned int iface_ip, iface_gw, iface_mask, iface_flags;
+ if (sscanf(line.c_str(),
+ "%255s %8X %8X %4X %*d %*u %*d %8X",
+ iface_name, &iface_ip, &iface_gw,
+ &iface_flags, &iface_mask) == 5 &&
+ network_name == iface_name &&
+ iface_mask == 0 &&
+ (iface_flags & (RTF_UP | RTF_HOST)) == RTF_UP) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+#endif
+
+bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const {
+ // Ignore networks on the explicit ignore list.
+ for (size_t i = 0; i < network_ignore_list_.size(); ++i) {
+ if (network.name() == network_ignore_list_[i]) {
+ return true;
+ }
+ }
+#if defined(WEBRTC_POSIX)
+ // Filter out VMware interfaces, typically named vmnet1 and vmnet8
+ if (strncmp(network.name().c_str(), "vmnet", 5) == 0 ||
+ strncmp(network.name().c_str(), "vnic", 4) == 0) {
+ return true;
+ }
+#if defined(WEBRTC_LINUX)
+ // Make sure this is a default route, if we're ignoring non-defaults.
+ if (ignore_non_default_routes_ && !IsDefaultRoute(network.name())) {
+ return true;
+ }
+#endif
+#elif defined(WEBRTC_WIN)
+ // Ignore any HOST side vmware adapters with a description like:
+ // VMware Virtual Ethernet Adapter for VMnet1
+ // but don't ignore any GUEST side adapters with a description like:
+ // VMware Accelerated AMD PCNet Adapter #2
+ if (strstr(network.description().c_str(), "VMnet") != NULL) {
+ return true;
+ }
+#endif
+
+ // Ignore any networks with a 0.x.y.z IP
+ if (network.prefix().family() == AF_INET) {
+ return (network.prefix().v4AddressAsHostOrderInteger() < 0x01000000);
+ }
+ return false;
+}
+
+void BasicNetworkManager::StartUpdating() {
+ thread_ = Thread::Current();
+ if (start_count_) {
+ // If network interfaces are already discovered and signal is sent,
+ // we should trigger network signal immediately for the new clients
+ // to start allocating ports.
+ if (sent_first_update_)
+ thread_->Post(this, kSignalNetworksMessage);
+ } else {
+ thread_->Post(this, kUpdateNetworksMessage);
+ }
+ ++start_count_;
+}
+
+void BasicNetworkManager::StopUpdating() {
+ ASSERT(Thread::Current() == thread_);
+ if (!start_count_)
+ return;
+
+ --start_count_;
+ if (!start_count_) {
+ thread_->Clear(this);
+ sent_first_update_ = false;
+ }
+}
+
+void BasicNetworkManager::OnMessage(Message* msg) {
+ switch (msg->message_id) {
+ case kUpdateNetworksMessage: {
+ DoUpdateNetworks();
+ break;
+ }
+ case kSignalNetworksMessage: {
+ SignalNetworksChanged();
+ break;
+ }
+ default:
+ ASSERT(false);
+ }
+}
+
+void BasicNetworkManager::DoUpdateNetworks() {
+ if (!start_count_)
+ return;
+
+ ASSERT(Thread::Current() == thread_);
+
+ NetworkList list;
+ if (!CreateNetworks(false, &list)) {
+ SignalError();
+ } else {
+ bool changed;
+ MergeNetworkList(list, &changed);
+ if (changed || !sent_first_update_) {
+ SignalNetworksChanged();
+ sent_first_update_ = true;
+ }
+ }
+
+ thread_->PostDelayed(kNetworksUpdateIntervalMs, this, kUpdateNetworksMessage);
+}
+
+void BasicNetworkManager::DumpNetworks(bool include_ignored) {
+ NetworkList list;
+ CreateNetworks(include_ignored, &list);
+ LOG(LS_INFO) << "NetworkManager detected " << list.size() << " networks:";
+ for (size_t i = 0; i < list.size(); ++i) {
+ const Network* network = list[i];
+ if (!network->ignored() || include_ignored) {
+ LOG(LS_INFO) << network->ToString() << ": "
+ << network->description()
+ << ((network->ignored()) ? ", Ignored" : "");
+ }
+ }
+ // Release the network list created previously.
+ // Do this in a seperated for loop for better readability.
+ for (size_t i = 0; i < list.size(); ++i) {
+ delete list[i];
+ }
+}
+
+Network::Network(const std::string& name, const std::string& desc,
+ const IPAddress& prefix, int prefix_length)
+ : name_(name), description_(desc), prefix_(prefix),
+ prefix_length_(prefix_length),
+ key_(MakeNetworkKey(name, prefix, prefix_length)), scope_id_(0),
+ ignored_(false), type_(ADAPTER_TYPE_UNKNOWN), preference_(0) {
+}
+
+Network::Network(const std::string& name, const std::string& desc,
+ const IPAddress& prefix, int prefix_length, AdapterType type)
+ : name_(name), description_(desc), prefix_(prefix),
+ prefix_length_(prefix_length),
+ key_(MakeNetworkKey(name, prefix, prefix_length)), scope_id_(0),
+ ignored_(false), type_(type), preference_(0) {
+}
+
+std::string Network::ToString() const {
+ std::stringstream ss;
+ // Print out the first space-terminated token of the network desc, plus
+ // the IP address.
+ ss << "Net[" << description_.substr(0, description_.find(' '))
+ << ":" << prefix_.ToSensitiveString() << "/" << prefix_length_
+ << ":" << AdapterTypeToString(type_) << "]";
+ return ss.str();
+}
+
+// Sets the addresses of this network. Returns true if the address set changed.
+// Change detection is short circuited if the changed argument is true.
+bool Network::SetIPs(const std::vector<IPAddress>& ips, bool changed) {
+ changed = changed || ips.size() != ips_.size();
+ // Detect changes with a nested loop; n-squared but we expect on the order
+ // of 2-3 addresses per network.
+ for (std::vector<IPAddress>::const_iterator it = ips.begin();
+ !changed && it != ips.end();
+ ++it) {
+ bool found = false;
+ for (std::vector<IPAddress>::iterator inner_it = ips_.begin();
+ !found && inner_it != ips_.end();
+ ++inner_it) {
+ if (*it == *inner_it) {
+ found = true;
+ }
+ }
+ changed = !found;
+ }
+ ips_ = ips;
+ return changed;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/network.h b/chromium/third_party/webrtc/base/network.h
new file mode 100644
index 00000000000..855b1b74a5f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/network.h
@@ -0,0 +1,245 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NETWORK_H_
+#define WEBRTC_BASE_NETWORK_H_
+
+#include <deque>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/ipaddress.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/sigslot.h"
+
+#if defined(WEBRTC_POSIX)
+struct ifaddrs;
+#endif // defined(WEBRTC_POSIX)
+
+namespace rtc {
+
+class Network;
+class Thread;
+
+enum AdapterType {
+ // This enum resembles the one in Chromium net::ConnectionType.
+ ADAPTER_TYPE_UNKNOWN = 0,
+ ADAPTER_TYPE_ETHERNET = 1,
+ ADAPTER_TYPE_WIFI = 2,
+ ADAPTER_TYPE_CELLULAR = 3,
+ ADAPTER_TYPE_VPN = 4
+};
+
+// Makes a string key for this network. Used in the network manager's maps.
+// Network objects are keyed on interface name, network prefix and the
+// length of that prefix.
+std::string MakeNetworkKey(const std::string& name, const IPAddress& prefix,
+ int prefix_length);
+
+// Generic network manager interface. It provides list of local
+// networks.
+class NetworkManager {
+ public:
+ typedef std::vector<Network*> NetworkList;
+
+ NetworkManager();
+ virtual ~NetworkManager();
+
+ // Called when network list is updated.
+ sigslot::signal0<> SignalNetworksChanged;
+
+ // Indicates a failure when getting list of network interfaces.
+ sigslot::signal0<> SignalError;
+
+ // Start/Stop monitoring of network interfaces
+ // list. SignalNetworksChanged or SignalError is emitted immidiately
+ // after StartUpdating() is called. After that SignalNetworksChanged
+ // is emitted wheneven list of networks changes.
+ virtual void StartUpdating() = 0;
+ virtual void StopUpdating() = 0;
+
+ // Returns the current list of networks available on this machine.
+ // UpdateNetworks() must be called before this method is called.
+ // It makes sure that repeated calls return the same object for a
+ // given network, so that quality is tracked appropriately. Does not
+ // include ignored networks.
+ virtual void GetNetworks(NetworkList* networks) const = 0;
+
+ // Dumps a list of networks available to LS_INFO.
+ virtual void DumpNetworks(bool include_ignored) {}
+};
+
+// Base class for NetworkManager implementations.
+class NetworkManagerBase : public NetworkManager {
+ public:
+ NetworkManagerBase();
+ virtual ~NetworkManagerBase();
+
+ virtual void GetNetworks(std::vector<Network*>* networks) const;
+ bool ipv6_enabled() const { return ipv6_enabled_; }
+ void set_ipv6_enabled(bool enabled) { ipv6_enabled_ = enabled; }
+
+ protected:
+ typedef std::map<std::string, Network*> NetworkMap;
+ // Updates |networks_| with the networks listed in |list|. If
+ // |network_map_| already has a Network object for a network listed
+ // in the |list| then it is reused. Accept ownership of the Network
+ // objects in the |list|. |changed| will be set to true if there is
+ // any change in the network list.
+ void MergeNetworkList(const NetworkList& list, bool* changed);
+
+ private:
+ friend class NetworkTest;
+ void DoUpdateNetworks();
+
+ NetworkList networks_;
+ NetworkMap networks_map_;
+ bool ipv6_enabled_;
+};
+
+// Basic implementation of the NetworkManager interface that gets list
+// of networks using OS APIs.
+class BasicNetworkManager : public NetworkManagerBase,
+ public MessageHandler {
+ public:
+ BasicNetworkManager();
+ virtual ~BasicNetworkManager();
+
+ virtual void StartUpdating();
+ virtual void StopUpdating();
+
+ // Logs the available networks.
+ virtual void DumpNetworks(bool include_ignored);
+
+ // MessageHandler interface.
+ virtual void OnMessage(Message* msg);
+ bool started() { return start_count_ > 0; }
+
+ // Sets the network ignore list, which is empty by default. Any network on
+ // the ignore list will be filtered from network enumeration results.
+ void set_network_ignore_list(const std::vector<std::string>& list) {
+ network_ignore_list_ = list;
+ }
+#if defined(WEBRTC_LINUX)
+ // Sets the flag for ignoring non-default routes.
+ void set_ignore_non_default_routes(bool value) {
+ ignore_non_default_routes_ = true;
+ }
+#endif
+
+ protected:
+#if defined(WEBRTC_POSIX)
+ // Separated from CreateNetworks for tests.
+ void ConvertIfAddrs(ifaddrs* interfaces,
+ bool include_ignored,
+ NetworkList* networks) const;
+#endif // defined(WEBRTC_POSIX)
+
+ // Creates a network object for each network available on the machine.
+ bool CreateNetworks(bool include_ignored, NetworkList* networks) const;
+
+ // Determines if a network should be ignored.
+ bool IsIgnoredNetwork(const Network& network) const;
+
+ private:
+ friend class NetworkTest;
+
+ void DoUpdateNetworks();
+
+ Thread* thread_;
+ bool sent_first_update_;
+ int start_count_;
+ std::vector<std::string> network_ignore_list_;
+ bool ignore_non_default_routes_;
+};
+
+// Represents a Unix-type network interface, with a name and single address.
+class Network {
+ public:
+ Network(const std::string& name, const std::string& description,
+ const IPAddress& prefix, int prefix_length);
+
+ Network(const std::string& name, const std::string& description,
+ const IPAddress& prefix, int prefix_length, AdapterType type);
+
+ // Returns the name of the interface this network is associated wtih.
+ const std::string& name() const { return name_; }
+
+ // Returns the OS-assigned name for this network. This is useful for
+ // debugging but should not be sent over the wire (for privacy reasons).
+ const std::string& description() const { return description_; }
+
+ // Returns the prefix for this network.
+ const IPAddress& prefix() const { return prefix_; }
+ // Returns the length, in bits, of this network's prefix.
+ int prefix_length() const { return prefix_length_; }
+
+ // |key_| has unique value per network interface. Used in sorting network
+ // interfaces. Key is derived from interface name and it's prefix.
+ std::string key() const { return key_; }
+
+ // Returns the Network's current idea of the 'best' IP it has.
+ // 'Best' currently means the first one added.
+ // TODO: We should be preferring temporary addresses.
+ // Returns an unset IP if this network has no active addresses.
+ IPAddress ip() const {
+ if (ips_.size() == 0) {
+ return IPAddress();
+ }
+ return ips_.at(0);
+ }
+ // Adds an active IP address to this network. Does not check for duplicates.
+ void AddIP(const IPAddress& ip) { ips_.push_back(ip); }
+
+ // Sets the network's IP address list. Returns true if new IP addresses were
+ // detected. Passing true to already_changed skips this check.
+ bool SetIPs(const std::vector<IPAddress>& ips, bool already_changed);
+ // Get the list of IP Addresses associated with this network.
+ const std::vector<IPAddress>& GetIPs() { return ips_;}
+ // Clear the network's list of addresses.
+ void ClearIPs() { ips_.clear(); }
+
+ // Returns the scope-id of the network's address.
+ // Should only be relevant for link-local IPv6 addresses.
+ int scope_id() const { return scope_id_; }
+ void set_scope_id(int id) { scope_id_ = id; }
+
+ // Indicates whether this network should be ignored, perhaps because
+ // the IP is 0, or the interface is one we know is invalid.
+ bool ignored() const { return ignored_; }
+ void set_ignored(bool ignored) { ignored_ = ignored; }
+
+ AdapterType type() const { return type_; }
+ int preference() const { return preference_; }
+ void set_preference(int preference) { preference_ = preference; }
+
+ // Debugging description of this network
+ std::string ToString() const;
+
+ private:
+ std::string name_;
+ std::string description_;
+ IPAddress prefix_;
+ int prefix_length_;
+ std::string key_;
+ std::vector<IPAddress> ips_;
+ int scope_id_;
+ bool ignored_;
+ AdapterType type_;
+ int preference_;
+
+ friend class NetworkManager;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NETWORK_H_
diff --git a/chromium/third_party/webrtc/base/network_unittest.cc b/chromium/third_party/webrtc/base/network_unittest.cc
new file mode 100644
index 00000000000..431f8b4eafd
--- /dev/null
+++ b/chromium/third_party/webrtc/base/network_unittest.cc
@@ -0,0 +1,617 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/network.h"
+
+#include <vector>
+#if defined(WEBRTC_POSIX)
+#include <sys/types.h>
+#if !defined(WEBRTC_ANDROID)
+#include <ifaddrs.h>
+#else
+#include "webrtc/base/ifaddrs-android.h"
+#endif
+#endif
+#include "webrtc/base/gunit.h"
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/logging.h" // For LOG_GLE
+#endif
+
+namespace rtc {
+
+class NetworkTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ NetworkTest() : callback_called_(false) {}
+
+ void OnNetworksChanged() {
+ callback_called_ = true;
+ }
+
+ void MergeNetworkList(BasicNetworkManager& network_manager,
+ const NetworkManager::NetworkList& list,
+ bool* changed ) {
+ network_manager.MergeNetworkList(list, changed);
+ }
+
+ bool IsIgnoredNetwork(BasicNetworkManager& network_manager,
+ const Network& network) {
+ return network_manager.IsIgnoredNetwork(network);
+ }
+
+ NetworkManager::NetworkList GetNetworks(
+ const BasicNetworkManager& network_manager, bool include_ignored) {
+ NetworkManager::NetworkList list;
+ network_manager.CreateNetworks(include_ignored, &list);
+ return list;
+ }
+
+#if defined(WEBRTC_POSIX)
+ // Separated from CreateNetworks for tests.
+ static void CallConvertIfAddrs(const BasicNetworkManager& network_manager,
+ struct ifaddrs* interfaces,
+ bool include_ignored,
+ NetworkManager::NetworkList* networks) {
+ network_manager.ConvertIfAddrs(interfaces, include_ignored, networks);
+ }
+#endif // defined(WEBRTC_POSIX)
+
+ protected:
+ bool callback_called_;
+};
+
+// Test that the Network ctor works properly.
+TEST_F(NetworkTest, TestNetworkConstruct) {
+ Network ipv4_network1("test_eth0", "Test Network Adapter 1",
+ IPAddress(0x12345600U), 24);
+ EXPECT_EQ("test_eth0", ipv4_network1.name());
+ EXPECT_EQ("Test Network Adapter 1", ipv4_network1.description());
+ EXPECT_EQ(IPAddress(0x12345600U), ipv4_network1.prefix());
+ EXPECT_EQ(24, ipv4_network1.prefix_length());
+ EXPECT_FALSE(ipv4_network1.ignored());
+}
+
+// Tests that our ignore function works properly.
+TEST_F(NetworkTest, TestNetworkIgnore) {
+ Network ipv4_network1("test_eth0", "Test Network Adapter 1",
+ IPAddress(0x12345600U), 24);
+ Network ipv4_network2("test_eth1", "Test Network Adapter 2",
+ IPAddress(0x00010000U), 16);
+ BasicNetworkManager network_manager;
+ EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network1));
+ EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network2));
+}
+
+TEST_F(NetworkTest, TestIgnoreList) {
+ Network ignore_me("ignore_me", "Ignore me please!",
+ IPAddress(0x12345600U), 24);
+ Network include_me("include_me", "Include me please!",
+ IPAddress(0x12345600U), 24);
+ BasicNetworkManager network_manager;
+ EXPECT_FALSE(IsIgnoredNetwork(network_manager, ignore_me));
+ EXPECT_FALSE(IsIgnoredNetwork(network_manager, include_me));
+ std::vector<std::string> ignore_list;
+ ignore_list.push_back("ignore_me");
+ network_manager.set_network_ignore_list(ignore_list);
+ EXPECT_TRUE(IsIgnoredNetwork(network_manager, ignore_me));
+ EXPECT_FALSE(IsIgnoredNetwork(network_manager, include_me));
+}
+
+// Test is failing on Windows opt: b/11288214
+TEST_F(NetworkTest, DISABLED_TestCreateNetworks) {
+ BasicNetworkManager manager;
+ NetworkManager::NetworkList result = GetNetworks(manager, true);
+ // We should be able to bind to any addresses we find.
+ NetworkManager::NetworkList::iterator it;
+ for (it = result.begin();
+ it != result.end();
+ ++it) {
+ sockaddr_storage storage;
+ memset(&storage, 0, sizeof(storage));
+ IPAddress ip = (*it)->ip();
+ SocketAddress bindaddress(ip, 0);
+ bindaddress.SetScopeID((*it)->scope_id());
+ // TODO(thaloun): Use rtc::AsyncSocket once it supports IPv6.
+ int fd = static_cast<int>(socket(ip.family(), SOCK_STREAM, IPPROTO_TCP));
+ if (fd > 0) {
+ size_t ipsize = bindaddress.ToSockAddrStorage(&storage);
+ EXPECT_GE(ipsize, 0U);
+ int success = ::bind(fd,
+ reinterpret_cast<sockaddr*>(&storage),
+ static_cast<int>(ipsize));
+#if defined(WEBRTC_WIN)
+ if (success) LOG_GLE(LS_ERROR) << "Socket bind failed.";
+#endif
+ EXPECT_EQ(0, success);
+#if defined(WEBRTC_WIN)
+ closesocket(fd);
+#else
+ close(fd);
+#endif
+ }
+ delete (*it);
+ }
+}
+
+// Test that UpdateNetworks succeeds.
+TEST_F(NetworkTest, TestUpdateNetworks) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ manager.StartUpdating();
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_TRUE(callback_called_);
+ callback_called_ = false;
+ // Callback should be triggered immediately when StartUpdating
+ // is called, after network update signal is already sent.
+ manager.StartUpdating();
+ EXPECT_TRUE(manager.started());
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_TRUE(callback_called_);
+ manager.StopUpdating();
+ EXPECT_TRUE(manager.started());
+ manager.StopUpdating();
+ EXPECT_FALSE(manager.started());
+ manager.StopUpdating();
+ EXPECT_FALSE(manager.started());
+ callback_called_ = false;
+ // Callback should be triggered immediately after StartUpdating is called
+ // when start_count_ is reset to 0.
+ manager.StartUpdating();
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_TRUE(callback_called_);
+}
+
+// Verify that MergeNetworkList() merges network lists properly.
+TEST_F(NetworkTest, TestBasicMergeNetworkList) {
+ Network ipv4_network1("test_eth0", "Test Network Adapter 1",
+ IPAddress(0x12345600U), 24);
+ Network ipv4_network2("test_eth1", "Test Network Adapter 2",
+ IPAddress(0x00010000U), 16);
+ ipv4_network1.AddIP(IPAddress(0x12345678));
+ ipv4_network2.AddIP(IPAddress(0x00010004));
+ BasicNetworkManager manager;
+
+ // Add ipv4_network1 to the list of networks.
+ NetworkManager::NetworkList list;
+ list.push_back(new Network(ipv4_network1));
+ bool changed;
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+
+ manager.GetNetworks(&list);
+ EXPECT_EQ(1U, list.size());
+ EXPECT_EQ(ipv4_network1.ToString(), list[0]->ToString());
+ Network* net1 = list[0];
+ list.clear();
+
+ // Replace ipv4_network1 with ipv4_network2.
+ list.push_back(new Network(ipv4_network2));
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+
+ manager.GetNetworks(&list);
+ EXPECT_EQ(1U, list.size());
+ EXPECT_EQ(ipv4_network2.ToString(), list[0]->ToString());
+ Network* net2 = list[0];
+ list.clear();
+
+ // Add Network2 back.
+ list.push_back(new Network(ipv4_network1));
+ list.push_back(new Network(ipv4_network2));
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_TRUE(changed);
+ list.clear();
+
+ // Verify that we get previous instances of Network objects.
+ manager.GetNetworks(&list);
+ EXPECT_EQ(2U, list.size());
+ EXPECT_TRUE((net1 == list[0] && net2 == list[1]) ||
+ (net1 == list[1] && net2 == list[0]));
+ list.clear();
+
+ // Call MergeNetworkList() again and verify that we don't get update
+ // notification.
+ list.push_back(new Network(ipv4_network2));
+ list.push_back(new Network(ipv4_network1));
+ MergeNetworkList(manager, list, &changed);
+ EXPECT_FALSE(changed);
+ list.clear();
+
+ // Verify that we get previous instances of Network objects.
+ manager.GetNetworks(&list);
+ EXPECT_EQ(2U, list.size());
+ EXPECT_TRUE((net1 == list[0] && net2 == list[1]) ||
+ (net1 == list[1] && net2 == list[0]));
+ list.clear();
+}
+
+// Sets up some test IPv6 networks and appends them to list.
+// Four networks are added - public and link local, for two interfaces.
+void SetupNetworks(NetworkManager::NetworkList* list) {
+ IPAddress ip;
+ IPAddress prefix;
+ EXPECT_TRUE(IPFromString("fe80::1234:5678:abcd:ef12", &ip));
+ EXPECT_TRUE(IPFromString("fe80::", &prefix));
+ // First, fake link-locals.
+ Network ipv6_eth0_linklocalnetwork("test_eth0", "Test NetworkAdapter 1",
+ prefix, 64);
+ ipv6_eth0_linklocalnetwork.AddIP(ip);
+ EXPECT_TRUE(IPFromString("fe80::5678:abcd:ef12:3456", &ip));
+ Network ipv6_eth1_linklocalnetwork("test_eth1", "Test NetworkAdapter 2",
+ prefix, 64);
+ ipv6_eth1_linklocalnetwork.AddIP(ip);
+ // Public networks:
+ EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &ip));
+ prefix = TruncateIP(ip, 64);
+ Network ipv6_eth0_publicnetwork1_ip1("test_eth0", "Test NetworkAdapter 1",
+ prefix, 64);
+ ipv6_eth0_publicnetwork1_ip1.AddIP(ip);
+ EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip));
+ prefix = TruncateIP(ip, 64);
+ Network ipv6_eth1_publicnetwork1_ip1("test_eth1", "Test NetworkAdapter 1",
+ prefix, 64);
+ ipv6_eth1_publicnetwork1_ip1.AddIP(ip);
+ list->push_back(new Network(ipv6_eth0_linklocalnetwork));
+ list->push_back(new Network(ipv6_eth1_linklocalnetwork));
+ list->push_back(new Network(ipv6_eth0_publicnetwork1_ip1));
+ list->push_back(new Network(ipv6_eth1_publicnetwork1_ip1));
+}
+
+// Test that the basic network merging case works.
+TEST_F(NetworkTest, TestIPv6MergeNetworkList) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ NetworkManager::NetworkList original_list;
+ SetupNetworks(&original_list);
+ bool changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ NetworkManager::NetworkList list;
+ manager.GetNetworks(&list);
+ EXPECT_EQ(original_list.size(), list.size());
+ // Verify that the original members are in the merged list.
+ for (NetworkManager::NetworkList::iterator it = original_list.begin();
+ it != original_list.end(); ++it) {
+ EXPECT_NE(list.end(), std::find(list.begin(), list.end(), *it));
+ }
+}
+
+// Tests that when two network lists that describe the same set of networks are
+// merged, that the changed callback is not called, and that the original
+// objects remain in the result list.
+TEST_F(NetworkTest, TestNoChangeMerge) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ NetworkManager::NetworkList original_list;
+ SetupNetworks(&original_list);
+ bool changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ // Second list that describes the same networks but with new objects.
+ NetworkManager::NetworkList second_list;
+ SetupNetworks(&second_list);
+ changed = false;
+ MergeNetworkList(manager, second_list, &changed);
+ EXPECT_FALSE(changed);
+ NetworkManager::NetworkList resulting_list;
+ manager.GetNetworks(&resulting_list);
+ EXPECT_EQ(original_list.size(), resulting_list.size());
+ // Verify that the original members are in the merged list.
+ for (NetworkManager::NetworkList::iterator it = original_list.begin();
+ it != original_list.end(); ++it) {
+ EXPECT_NE(resulting_list.end(),
+ std::find(resulting_list.begin(), resulting_list.end(), *it));
+ }
+ // Doublecheck that the new networks aren't in the list.
+ for (NetworkManager::NetworkList::iterator it = second_list.begin();
+ it != second_list.end(); ++it) {
+ EXPECT_EQ(resulting_list.end(),
+ std::find(resulting_list.begin(), resulting_list.end(), *it));
+ }
+}
+
+// Test that we can merge a network that is the same as another network but with
+// a different IP. The original network should remain in the list, but have its
+// IP changed.
+TEST_F(NetworkTest, MergeWithChangedIP) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ NetworkManager::NetworkList original_list;
+ SetupNetworks(&original_list);
+ // Make a network that we're going to change.
+ IPAddress ip;
+ EXPECT_TRUE(IPFromString("2401:fa01:4:1000:be30:faa:fee:faa", &ip));
+ IPAddress prefix = TruncateIP(ip, 64);
+ Network* network_to_change = new Network("test_eth0",
+ "Test Network Adapter 1",
+ prefix, 64);
+ Network* changed_network = new Network(*network_to_change);
+ network_to_change->AddIP(ip);
+ IPAddress changed_ip;
+ EXPECT_TRUE(IPFromString("2401:fa01:4:1000:be30:f00:f00:f00", &changed_ip));
+ changed_network->AddIP(changed_ip);
+ original_list.push_back(network_to_change);
+ bool changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ NetworkManager::NetworkList second_list;
+ SetupNetworks(&second_list);
+ second_list.push_back(changed_network);
+ changed = false;
+ MergeNetworkList(manager, second_list, &changed);
+ EXPECT_TRUE(changed);
+ NetworkManager::NetworkList list;
+ manager.GetNetworks(&list);
+ EXPECT_EQ(original_list.size(), list.size());
+ // Make sure the original network is still in the merged list.
+ EXPECT_NE(list.end(),
+ std::find(list.begin(), list.end(), network_to_change));
+ EXPECT_EQ(changed_ip, network_to_change->GetIPs().at(0));
+}
+
+// Testing a similar case to above, but checking that a network can be updated
+// with additional IPs (not just a replacement).
+TEST_F(NetworkTest, TestMultipleIPMergeNetworkList) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ NetworkManager::NetworkList original_list;
+ SetupNetworks(&original_list);
+ bool changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ IPAddress ip;
+ IPAddress check_ip;
+ IPAddress prefix;
+ // Add a second IP to the public network on eth0 (2401:fa00:4:1000/64).
+ EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c6", &ip));
+ prefix = TruncateIP(ip, 64);
+ Network ipv6_eth0_publicnetwork1_ip2("test_eth0", "Test NetworkAdapter 1",
+ prefix, 64);
+ // This is the IP that already existed in the public network on eth0.
+ EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &check_ip));
+ ipv6_eth0_publicnetwork1_ip2.AddIP(ip);
+ original_list.push_back(new Network(ipv6_eth0_publicnetwork1_ip2));
+ changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ // There should still be four networks.
+ NetworkManager::NetworkList list;
+ manager.GetNetworks(&list);
+ EXPECT_EQ(4U, list.size());
+ // Check the gathered IPs.
+ int matchcount = 0;
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ if ((*it)->ToString() == original_list[2]->ToString()) {
+ ++matchcount;
+ EXPECT_EQ(1, matchcount);
+ // This should be the same network object as before.
+ EXPECT_EQ((*it), original_list[2]);
+ // But with two addresses now.
+ EXPECT_EQ(2U, (*it)->GetIPs().size());
+ EXPECT_NE((*it)->GetIPs().end(),
+ std::find((*it)->GetIPs().begin(),
+ (*it)->GetIPs().end(),
+ check_ip));
+ EXPECT_NE((*it)->GetIPs().end(),
+ std::find((*it)->GetIPs().begin(),
+ (*it)->GetIPs().end(),
+ ip));
+ } else {
+ // Check the IP didn't get added anywhere it wasn't supposed to.
+ EXPECT_EQ((*it)->GetIPs().end(),
+ std::find((*it)->GetIPs().begin(),
+ (*it)->GetIPs().end(),
+ ip));
+ }
+ }
+}
+
+// Test that merge correctly distinguishes multiple networks on an interface.
+TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) {
+ BasicNetworkManager manager;
+ manager.SignalNetworksChanged.connect(
+ static_cast<NetworkTest*>(this), &NetworkTest::OnNetworksChanged);
+ NetworkManager::NetworkList original_list;
+ SetupNetworks(&original_list);
+ bool changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ IPAddress ip;
+ IPAddress prefix;
+ // A second network for eth0.
+ EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:5bff:fee5:c3", &ip));
+ prefix = TruncateIP(ip, 64);
+ Network ipv6_eth0_publicnetwork2_ip1("test_eth0", "Test NetworkAdapter 1",
+ prefix, 64);
+ ipv6_eth0_publicnetwork2_ip1.AddIP(ip);
+ original_list.push_back(new Network(ipv6_eth0_publicnetwork2_ip1));
+ changed = false;
+ MergeNetworkList(manager, original_list, &changed);
+ EXPECT_TRUE(changed);
+ // There should be five networks now.
+ NetworkManager::NetworkList list;
+ manager.GetNetworks(&list);
+ EXPECT_EQ(5U, list.size());
+ // Check the resulting addresses.
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ if ((*it)->prefix() == ipv6_eth0_publicnetwork2_ip1.prefix() &&
+ (*it)->name() == ipv6_eth0_publicnetwork2_ip1.name()) {
+ // Check the new network has 1 IP and that it's the correct one.
+ EXPECT_EQ(1U, (*it)->GetIPs().size());
+ EXPECT_EQ(ip, (*it)->GetIPs().at(0));
+ } else {
+ // Check the IP didn't get added anywhere it wasn't supposed to.
+ EXPECT_EQ((*it)->GetIPs().end(),
+ std::find((*it)->GetIPs().begin(),
+ (*it)->GetIPs().end(),
+ ip));
+ }
+ }
+}
+
+// Test that DumpNetworks works.
+TEST_F(NetworkTest, TestDumpNetworks) {
+ BasicNetworkManager manager;
+ manager.DumpNetworks(true);
+}
+
+// Test that we can toggle IPv6 on and off.
+TEST_F(NetworkTest, TestIPv6Toggle) {
+ BasicNetworkManager manager;
+ bool ipv6_found = false;
+ NetworkManager::NetworkList list;
+#if !defined(WEBRTC_WIN)
+ // There should be at least one IPv6 network (fe80::/64 should be in there).
+ // TODO(thaloun): Disabling this test on windows for the moment as the test
+ // machines don't seem to have IPv6 installed on them at all.
+ manager.set_ipv6_enabled(true);
+ list = GetNetworks(manager, true);
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ if ((*it)->prefix().family() == AF_INET6) {
+ ipv6_found = true;
+ break;
+ }
+ }
+ EXPECT_TRUE(ipv6_found);
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ delete (*it);
+ }
+#endif
+ ipv6_found = false;
+ manager.set_ipv6_enabled(false);
+ list = GetNetworks(manager, true);
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ if ((*it)->prefix().family() == AF_INET6) {
+ ipv6_found = true;
+ break;
+ }
+ }
+ EXPECT_FALSE(ipv6_found);
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ delete (*it);
+ }
+}
+
+TEST_F(NetworkTest, TestNetworkListSorting) {
+ BasicNetworkManager manager;
+ Network ipv4_network1("test_eth0", "Test Network Adapter 1",
+ IPAddress(0x12345600U), 24);
+ ipv4_network1.AddIP(IPAddress(0x12345600U));
+
+ IPAddress ip;
+ IPAddress prefix;
+ EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip));
+ prefix = TruncateIP(ip, 64);
+ Network ipv6_eth1_publicnetwork1_ip1("test_eth1", "Test NetworkAdapter 2",
+ prefix, 64);
+ ipv6_eth1_publicnetwork1_ip1.AddIP(ip);
+
+ NetworkManager::NetworkList list;
+ list.push_back(new Network(ipv4_network1));
+ list.push_back(new Network(ipv6_eth1_publicnetwork1_ip1));
+ Network* net1 = list[0];
+ Network* net2 = list[1];
+
+ bool changed = false;
+ MergeNetworkList(manager, list, &changed);
+ ASSERT_TRUE(changed);
+ // After sorting IPv6 network should be higher order than IPv4 networks.
+ EXPECT_TRUE(net1->preference() < net2->preference());
+}
+
+TEST_F(NetworkTest, TestNetworkAdapterTypes) {
+ Network wifi("wlan0", "Wireless Adapter", IPAddress(0x12345600U), 24,
+ ADAPTER_TYPE_WIFI);
+ EXPECT_EQ(ADAPTER_TYPE_WIFI, wifi.type());
+ Network ethernet("eth0", "Ethernet", IPAddress(0x12345600U), 24,
+ ADAPTER_TYPE_ETHERNET);
+ EXPECT_EQ(ADAPTER_TYPE_ETHERNET, ethernet.type());
+ Network cellular("test_cell", "Cellular Adapter", IPAddress(0x12345600U), 24,
+ ADAPTER_TYPE_CELLULAR);
+ EXPECT_EQ(ADAPTER_TYPE_CELLULAR, cellular.type());
+ Network vpn("bridge_test", "VPN Adapter", IPAddress(0x12345600U), 24,
+ ADAPTER_TYPE_VPN);
+ EXPECT_EQ(ADAPTER_TYPE_VPN, vpn.type());
+ Network unknown("test", "Test Adapter", IPAddress(0x12345600U), 24,
+ ADAPTER_TYPE_UNKNOWN);
+ EXPECT_EQ(ADAPTER_TYPE_UNKNOWN, unknown.type());
+}
+
+#if defined(WEBRTC_POSIX)
+// Verify that we correctly handle interfaces with no address.
+TEST_F(NetworkTest, TestConvertIfAddrsNoAddress) {
+ ifaddrs list;
+ memset(&list, 0, sizeof(list));
+ list.ifa_name = const_cast<char*>("test_iface");
+
+ NetworkManager::NetworkList result;
+ BasicNetworkManager manager;
+ CallConvertIfAddrs(manager, &list, true, &result);
+ EXPECT_TRUE(result.empty());
+}
+#endif // defined(WEBRTC_POSIX)
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+// If you want to test non-default routes, you can do the following on a linux
+// machine:
+// 1) Load the dummy network driver:
+// sudo modprobe dummy
+// sudo ifconfig dummy0 127.0.0.1
+// 2) Run this test and confirm the output says it found a dummy route (and
+// passes).
+// 3) When done:
+// sudo rmmmod dummy
+TEST_F(NetworkTest, TestIgnoreNonDefaultRoutes) {
+ BasicNetworkManager manager;
+ NetworkManager::NetworkList list;
+ list = GetNetworks(manager, false);
+ bool found_dummy = false;
+ LOG(LS_INFO) << "Looking for dummy network: ";
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ LOG(LS_INFO) << " Network name: " << (*it)->name();
+ found_dummy |= (*it)->name().find("dummy0") != std::string::npos;
+ }
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ delete (*it);
+ }
+ if (!found_dummy) {
+ LOG(LS_INFO) << "No dummy found, quitting.";
+ return;
+ }
+ LOG(LS_INFO) << "Found dummy, running again while ignoring non-default "
+ << "routes.";
+ manager.set_ignore_non_default_routes(true);
+ list = GetNetworks(manager, false);
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ LOG(LS_INFO) << " Network name: " << (*it)->name();
+ EXPECT_TRUE((*it)->name().find("dummy0") == std::string::npos);
+ }
+ for (NetworkManager::NetworkList::iterator it = list.begin();
+ it != list.end(); ++it) {
+ delete (*it);
+ }
+}
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nssidentity.cc b/chromium/third_party/webrtc/base/nssidentity.cc
new file mode 100644
index 00000000000..77635a2fcaf
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nssidentity.cc
@@ -0,0 +1,521 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#if HAVE_NSS_SSL_H
+
+#include "webrtc/base/nssidentity.h"
+
+#include "cert.h"
+#include "cryptohi.h"
+#include "keyhi.h"
+#include "nss.h"
+#include "pk11pub.h"
+#include "sechash.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/nssstreamadapter.h"
+#include "webrtc/base/safe_conversions.h"
+
+namespace rtc {
+
+// Certificate validity lifetime in seconds.
+static const int CERTIFICATE_LIFETIME = 60*60*24*30; // 30 days, arbitrarily
+// Certificate validity window in seconds.
+// This is to compensate for slightly incorrect system clocks.
+static const int CERTIFICATE_WINDOW = -60*60*24;
+
+NSSKeyPair::~NSSKeyPair() {
+ if (privkey_)
+ SECKEY_DestroyPrivateKey(privkey_);
+ if (pubkey_)
+ SECKEY_DestroyPublicKey(pubkey_);
+}
+
+NSSKeyPair *NSSKeyPair::Generate() {
+ SECKEYPrivateKey *privkey = NULL;
+ SECKEYPublicKey *pubkey = NULL;
+ PK11RSAGenParams rsaparams;
+ rsaparams.keySizeInBits = 1024;
+ rsaparams.pe = 0x010001; // 65537 -- a common RSA public exponent.
+
+ privkey = PK11_GenerateKeyPair(NSSContext::GetSlot(),
+ CKM_RSA_PKCS_KEY_PAIR_GEN,
+ &rsaparams, &pubkey, PR_FALSE /*permanent*/,
+ PR_FALSE /*sensitive*/, NULL);
+ if (!privkey) {
+ LOG(LS_ERROR) << "Couldn't generate key pair";
+ return NULL;
+ }
+
+ return new NSSKeyPair(privkey, pubkey);
+}
+
+// Just make a copy.
+NSSKeyPair *NSSKeyPair::GetReference() {
+ SECKEYPrivateKey *privkey = SECKEY_CopyPrivateKey(privkey_);
+ if (!privkey)
+ return NULL;
+
+ SECKEYPublicKey *pubkey = SECKEY_CopyPublicKey(pubkey_);
+ if (!pubkey) {
+ SECKEY_DestroyPrivateKey(privkey);
+ return NULL;
+ }
+
+ return new NSSKeyPair(privkey, pubkey);
+}
+
+NSSCertificate::NSSCertificate(CERTCertificate* cert)
+ : certificate_(CERT_DupCertificate(cert)) {
+ ASSERT(certificate_ != NULL);
+}
+
+static void DeleteCert(SSLCertificate* cert) {
+ delete cert;
+}
+
+NSSCertificate::NSSCertificate(CERTCertList* cert_list) {
+ // Copy the first cert into certificate_.
+ CERTCertListNode* node = CERT_LIST_HEAD(cert_list);
+ certificate_ = CERT_DupCertificate(node->cert);
+
+ // Put any remaining certificates into the chain.
+ node = CERT_LIST_NEXT(node);
+ std::vector<SSLCertificate*> certs;
+ for (; !CERT_LIST_END(node, cert_list); node = CERT_LIST_NEXT(node)) {
+ certs.push_back(new NSSCertificate(node->cert));
+ }
+
+ if (!certs.empty())
+ chain_.reset(new SSLCertChain(certs));
+
+ // The SSLCertChain constructor copies its input, so now we have to delete
+ // the originals.
+ std::for_each(certs.begin(), certs.end(), DeleteCert);
+}
+
+NSSCertificate::NSSCertificate(CERTCertificate* cert, SSLCertChain* chain)
+ : certificate_(CERT_DupCertificate(cert)) {
+ ASSERT(certificate_ != NULL);
+ if (chain)
+ chain_.reset(chain->Copy());
+}
+
+
+NSSCertificate *NSSCertificate::FromPEMString(const std::string &pem_string) {
+ std::string der;
+ if (!SSLIdentity::PemToDer(kPemTypeCertificate, pem_string, &der))
+ return NULL;
+
+ SECItem der_cert;
+ der_cert.data = reinterpret_cast<unsigned char *>(const_cast<char *>(
+ der.data()));
+ der_cert.len = checked_cast<unsigned int>(der.size());
+ CERTCertificate *cert = CERT_NewTempCertificate(CERT_GetDefaultCertDB(),
+ &der_cert, NULL, PR_FALSE, PR_TRUE);
+
+ if (!cert)
+ return NULL;
+
+ NSSCertificate* ret = new NSSCertificate(cert);
+ CERT_DestroyCertificate(cert);
+ return ret;
+}
+
+NSSCertificate *NSSCertificate::GetReference() const {
+ return new NSSCertificate(certificate_, chain_.get());
+}
+
+std::string NSSCertificate::ToPEMString() const {
+ return SSLIdentity::DerToPem(kPemTypeCertificate,
+ certificate_->derCert.data,
+ certificate_->derCert.len);
+}
+
+void NSSCertificate::ToDER(Buffer* der_buffer) const {
+ der_buffer->SetData(certificate_->derCert.data, certificate_->derCert.len);
+}
+
+static bool Certifies(CERTCertificate* parent, CERTCertificate* child) {
+ // TODO(bemasc): Identify stricter validation checks to use here. In the
+ // context of some future identity standard, it might make sense to check
+ // the certificates' roles, expiration dates, self-signatures (if
+ // self-signed), certificate transparency logging, or many other attributes.
+ // NOTE: Future changes to this validation may reject some previously allowed
+ // certificate chains. Users should be advised not to deploy chained
+ // certificates except in controlled environments until the validity
+ // requirements are finalized.
+
+ // Check that the parent's name is the same as the child's claimed issuer.
+ SECComparison name_status =
+ CERT_CompareName(&child->issuer, &parent->subject);
+ if (name_status != SECEqual)
+ return false;
+
+ // Extract the parent's public key, or fail if the key could not be read
+ // (e.g. certificate is corrupted).
+ SECKEYPublicKey* parent_key = CERT_ExtractPublicKey(parent);
+ if (!parent_key)
+ return false;
+
+ // Check that the parent's privkey was actually used to generate the child's
+ // signature.
+ SECStatus verified = CERT_VerifySignedDataWithPublicKey(
+ &child->signatureWrap, parent_key, NULL);
+ SECKEY_DestroyPublicKey(parent_key);
+ return verified == SECSuccess;
+}
+
+bool NSSCertificate::IsValidChain(const CERTCertList* cert_list) {
+ CERTCertListNode* child = CERT_LIST_HEAD(cert_list);
+ for (CERTCertListNode* parent = CERT_LIST_NEXT(child);
+ !CERT_LIST_END(parent, cert_list);
+ child = parent, parent = CERT_LIST_NEXT(parent)) {
+ if (!Certifies(parent->cert, child->cert))
+ return false;
+ }
+ return true;
+}
+
+bool NSSCertificate::GetDigestLength(const std::string& algorithm,
+ size_t* length) {
+ const SECHashObject *ho;
+
+ if (!GetDigestObject(algorithm, &ho))
+ return false;
+
+ *length = ho->length;
+
+ return true;
+}
+
+bool NSSCertificate::GetSignatureDigestAlgorithm(std::string* algorithm) const {
+ // The function sec_DecodeSigAlg in NSS provides this mapping functionality.
+ // Unfortunately it is private, so the functionality must be duplicated here.
+ // See https://bugzilla.mozilla.org/show_bug.cgi?id=925165 .
+ SECOidTag sig_alg = SECOID_GetAlgorithmTag(&certificate_->signature);
+ switch (sig_alg) {
+ case SEC_OID_PKCS1_MD5_WITH_RSA_ENCRYPTION:
+ *algorithm = DIGEST_MD5;
+ break;
+ case SEC_OID_PKCS1_SHA1_WITH_RSA_ENCRYPTION:
+ case SEC_OID_ISO_SHA_WITH_RSA_SIGNATURE:
+ case SEC_OID_ISO_SHA1_WITH_RSA_SIGNATURE:
+ case SEC_OID_ANSIX9_DSA_SIGNATURE_WITH_SHA1_DIGEST:
+ case SEC_OID_BOGUS_DSA_SIGNATURE_WITH_SHA1_DIGEST:
+ case SEC_OID_ANSIX962_ECDSA_SHA1_SIGNATURE:
+ case SEC_OID_MISSI_DSS:
+ case SEC_OID_MISSI_KEA_DSS:
+ case SEC_OID_MISSI_KEA_DSS_OLD:
+ case SEC_OID_MISSI_DSS_OLD:
+ *algorithm = DIGEST_SHA_1;
+ break;
+ case SEC_OID_ANSIX962_ECDSA_SHA224_SIGNATURE:
+ case SEC_OID_PKCS1_SHA224_WITH_RSA_ENCRYPTION:
+ case SEC_OID_NIST_DSA_SIGNATURE_WITH_SHA224_DIGEST:
+ *algorithm = DIGEST_SHA_224;
+ break;
+ case SEC_OID_ANSIX962_ECDSA_SHA256_SIGNATURE:
+ case SEC_OID_PKCS1_SHA256_WITH_RSA_ENCRYPTION:
+ case SEC_OID_NIST_DSA_SIGNATURE_WITH_SHA256_DIGEST:
+ *algorithm = DIGEST_SHA_256;
+ break;
+ case SEC_OID_ANSIX962_ECDSA_SHA384_SIGNATURE:
+ case SEC_OID_PKCS1_SHA384_WITH_RSA_ENCRYPTION:
+ *algorithm = DIGEST_SHA_384;
+ break;
+ case SEC_OID_ANSIX962_ECDSA_SHA512_SIGNATURE:
+ case SEC_OID_PKCS1_SHA512_WITH_RSA_ENCRYPTION:
+ *algorithm = DIGEST_SHA_512;
+ break;
+ default:
+ // Unknown algorithm. There are several unhandled options that are less
+ // common and more complex.
+ algorithm->clear();
+ return false;
+ }
+ return true;
+}
+
+bool NSSCertificate::ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const {
+ const SECHashObject *ho;
+
+ if (!GetDigestObject(algorithm, &ho))
+ return false;
+
+ if (size < ho->length) // Sanity check for fit
+ return false;
+
+ SECStatus rv = HASH_HashBuf(ho->type, digest,
+ certificate_->derCert.data,
+ certificate_->derCert.len);
+ if (rv != SECSuccess)
+ return false;
+
+ *length = ho->length;
+
+ return true;
+}
+
+bool NSSCertificate::GetChain(SSLCertChain** chain) const {
+ if (!chain_)
+ return false;
+
+ *chain = chain_->Copy();
+ return true;
+}
+
+bool NSSCertificate::Equals(const NSSCertificate *tocompare) const {
+ if (!certificate_->derCert.len)
+ return false;
+ if (!tocompare->certificate_->derCert.len)
+ return false;
+
+ if (certificate_->derCert.len != tocompare->certificate_->derCert.len)
+ return false;
+
+ return memcmp(certificate_->derCert.data,
+ tocompare->certificate_->derCert.data,
+ certificate_->derCert.len) == 0;
+}
+
+
+bool NSSCertificate::GetDigestObject(const std::string &algorithm,
+ const SECHashObject **hop) {
+ const SECHashObject *ho;
+ HASH_HashType hash_type;
+
+ if (algorithm == DIGEST_SHA_1) {
+ hash_type = HASH_AlgSHA1;
+ // HASH_AlgSHA224 is not supported in the chromium linux build system.
+#if 0
+ } else if (algorithm == DIGEST_SHA_224) {
+ hash_type = HASH_AlgSHA224;
+#endif
+ } else if (algorithm == DIGEST_SHA_256) {
+ hash_type = HASH_AlgSHA256;
+ } else if (algorithm == DIGEST_SHA_384) {
+ hash_type = HASH_AlgSHA384;
+ } else if (algorithm == DIGEST_SHA_512) {
+ hash_type = HASH_AlgSHA512;
+ } else {
+ return false;
+ }
+
+ ho = HASH_GetHashObject(hash_type);
+
+ ASSERT(ho->length >= 20); // Can't happen
+ *hop = ho;
+
+ return true;
+}
+
+
+NSSIdentity* NSSIdentity::GenerateInternal(const SSLIdentityParams& params) {
+ std::string subject_name_string = "CN=" + params.common_name;
+ CERTName *subject_name = CERT_AsciiToName(
+ const_cast<char *>(subject_name_string.c_str()));
+ NSSIdentity *identity = NULL;
+ CERTSubjectPublicKeyInfo *spki = NULL;
+ CERTCertificateRequest *certreq = NULL;
+ CERTValidity *validity = NULL;
+ CERTCertificate *certificate = NULL;
+ NSSKeyPair *keypair = NSSKeyPair::Generate();
+ SECItem inner_der;
+ SECStatus rv;
+ PLArenaPool* arena;
+ SECItem signed_cert;
+ PRTime now = PR_Now();
+ PRTime not_before =
+ now + static_cast<PRTime>(params.not_before) * PR_USEC_PER_SEC;
+ PRTime not_after =
+ now + static_cast<PRTime>(params.not_after) * PR_USEC_PER_SEC;
+
+ inner_der.len = 0;
+ inner_der.data = NULL;
+
+ if (!keypair) {
+ LOG(LS_ERROR) << "Couldn't generate key pair";
+ goto fail;
+ }
+
+ if (!subject_name) {
+ LOG(LS_ERROR) << "Couldn't convert subject name " << subject_name;
+ goto fail;
+ }
+
+ spki = SECKEY_CreateSubjectPublicKeyInfo(keypair->pubkey());
+ if (!spki) {
+ LOG(LS_ERROR) << "Couldn't create SPKI";
+ goto fail;
+ }
+
+ certreq = CERT_CreateCertificateRequest(subject_name, spki, NULL);
+ if (!certreq) {
+ LOG(LS_ERROR) << "Couldn't create certificate signing request";
+ goto fail;
+ }
+
+ validity = CERT_CreateValidity(not_before, not_after);
+ if (!validity) {
+ LOG(LS_ERROR) << "Couldn't create validity";
+ goto fail;
+ }
+
+ unsigned long serial;
+ // Note: This serial in principle could collide, but it's unlikely
+ rv = PK11_GenerateRandom(reinterpret_cast<unsigned char *>(&serial),
+ sizeof(serial));
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Couldn't generate random serial";
+ goto fail;
+ }
+
+ certificate = CERT_CreateCertificate(serial, subject_name, validity, certreq);
+ if (!certificate) {
+ LOG(LS_ERROR) << "Couldn't create certificate";
+ goto fail;
+ }
+
+ arena = certificate->arena;
+
+ rv = SECOID_SetAlgorithmID(arena, &certificate->signature,
+ SEC_OID_PKCS1_SHA1_WITH_RSA_ENCRYPTION, NULL);
+ if (rv != SECSuccess)
+ goto fail;
+
+ // Set version to X509v3.
+ *(certificate->version.data) = 2;
+ certificate->version.len = 1;
+
+ if (!SEC_ASN1EncodeItem(arena, &inner_der, certificate,
+ SEC_ASN1_GET(CERT_CertificateTemplate)))
+ goto fail;
+
+ rv = SEC_DerSignData(arena, &signed_cert, inner_der.data, inner_der.len,
+ keypair->privkey(),
+ SEC_OID_PKCS1_SHA1_WITH_RSA_ENCRYPTION);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Couldn't sign certificate";
+ goto fail;
+ }
+ certificate->derCert = signed_cert;
+
+ identity = new NSSIdentity(keypair, new NSSCertificate(certificate));
+
+ goto done;
+
+ fail:
+ delete keypair;
+
+ done:
+ if (certificate) CERT_DestroyCertificate(certificate);
+ if (subject_name) CERT_DestroyName(subject_name);
+ if (spki) SECKEY_DestroySubjectPublicKeyInfo(spki);
+ if (certreq) CERT_DestroyCertificateRequest(certreq);
+ if (validity) CERT_DestroyValidity(validity);
+ return identity;
+}
+
+NSSIdentity* NSSIdentity::Generate(const std::string &common_name) {
+ SSLIdentityParams params;
+ params.common_name = common_name;
+ params.not_before = CERTIFICATE_WINDOW;
+ params.not_after = CERTIFICATE_LIFETIME;
+ return GenerateInternal(params);
+}
+
+NSSIdentity* NSSIdentity::GenerateForTest(const SSLIdentityParams& params) {
+ return GenerateInternal(params);
+}
+
+SSLIdentity* NSSIdentity::FromPEMStrings(const std::string& private_key,
+ const std::string& certificate) {
+ std::string private_key_der;
+ if (!SSLIdentity::PemToDer(
+ kPemTypeRsaPrivateKey, private_key, &private_key_der))
+ return NULL;
+
+ SECItem private_key_item;
+ private_key_item.data = reinterpret_cast<unsigned char *>(
+ const_cast<char *>(private_key_der.c_str()));
+ private_key_item.len = checked_cast<unsigned int>(private_key_der.size());
+
+ const unsigned int key_usage = KU_KEY_ENCIPHERMENT | KU_DATA_ENCIPHERMENT |
+ KU_DIGITAL_SIGNATURE;
+
+ SECKEYPrivateKey* privkey = NULL;
+ SECStatus rv =
+ PK11_ImportDERPrivateKeyInfoAndReturnKey(NSSContext::GetSlot(),
+ &private_key_item,
+ NULL, NULL, PR_FALSE, PR_FALSE,
+ key_usage, &privkey, NULL);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Couldn't import private key";
+ return NULL;
+ }
+
+ SECKEYPublicKey *pubkey = SECKEY_ConvertToPublicKey(privkey);
+ if (rv != SECSuccess) {
+ SECKEY_DestroyPrivateKey(privkey);
+ LOG(LS_ERROR) << "Couldn't convert private key to public key";
+ return NULL;
+ }
+
+ // Assign to a scoped_ptr so we don't leak on error.
+ scoped_ptr<NSSKeyPair> keypair(new NSSKeyPair(privkey, pubkey));
+
+ scoped_ptr<NSSCertificate> cert(NSSCertificate::FromPEMString(certificate));
+ if (!cert) {
+ LOG(LS_ERROR) << "Couldn't parse certificate";
+ return NULL;
+ }
+
+ // TODO(ekr@rtfm.com): Check the public key against the certificate.
+
+ return new NSSIdentity(keypair.release(), cert.release());
+}
+
+NSSIdentity *NSSIdentity::GetReference() const {
+ NSSKeyPair *keypair = keypair_->GetReference();
+ if (!keypair)
+ return NULL;
+
+ NSSCertificate *certificate = certificate_->GetReference();
+ if (!certificate) {
+ delete keypair;
+ return NULL;
+ }
+
+ return new NSSIdentity(keypair, certificate);
+}
+
+
+NSSCertificate &NSSIdentity::certificate() const {
+ return *certificate_;
+}
+
+
+} // rtc namespace
+
+#endif // HAVE_NSS_SSL_H
+
diff --git a/chromium/third_party/webrtc/base/nssidentity.h b/chromium/third_party/webrtc/base/nssidentity.h
new file mode 100644
index 00000000000..2c56c002bf4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nssidentity.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NSSIDENTITY_H_
+#define WEBRTC_BASE_NSSIDENTITY_H_
+
+#include <string>
+
+#include "cert.h"
+#include "nspr.h"
+#include "hasht.h"
+#include "keythi.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sslidentity.h"
+
+namespace rtc {
+
+class NSSKeyPair {
+ public:
+ NSSKeyPair(SECKEYPrivateKey* privkey, SECKEYPublicKey* pubkey) :
+ privkey_(privkey), pubkey_(pubkey) {}
+ ~NSSKeyPair();
+
+ // Generate a 1024-bit RSA key pair.
+ static NSSKeyPair* Generate();
+ NSSKeyPair* GetReference();
+
+ SECKEYPrivateKey* privkey() const { return privkey_; }
+ SECKEYPublicKey * pubkey() const { return pubkey_; }
+
+ private:
+ SECKEYPrivateKey* privkey_;
+ SECKEYPublicKey* pubkey_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(NSSKeyPair);
+};
+
+
+class NSSCertificate : public SSLCertificate {
+ public:
+ static NSSCertificate* FromPEMString(const std::string& pem_string);
+ // The caller retains ownership of the argument to all the constructors,
+ // and the constructor makes a copy.
+ explicit NSSCertificate(CERTCertificate* cert);
+ explicit NSSCertificate(CERTCertList* cert_list);
+ virtual ~NSSCertificate() {
+ if (certificate_)
+ CERT_DestroyCertificate(certificate_);
+ }
+
+ virtual NSSCertificate* GetReference() const;
+
+ virtual std::string ToPEMString() const;
+
+ virtual void ToDER(Buffer* der_buffer) const;
+
+ virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const;
+
+ virtual bool ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const;
+
+ virtual bool GetChain(SSLCertChain** chain) const;
+
+ CERTCertificate* certificate() { return certificate_; }
+
+ // Performs minimal checks to determine if the list is a valid chain. This
+ // only checks that each certificate certifies the preceding certificate,
+ // and ignores many other certificate features such as expiration dates.
+ static bool IsValidChain(const CERTCertList* cert_list);
+
+ // Helper function to get the length of a digest
+ static bool GetDigestLength(const std::string& algorithm, size_t* length);
+
+ // Comparison. Only the certificate itself is considered, not the chain.
+ bool Equals(const NSSCertificate* tocompare) const;
+
+ private:
+ NSSCertificate(CERTCertificate* cert, SSLCertChain* chain);
+ static bool GetDigestObject(const std::string& algorithm,
+ const SECHashObject** hash_object);
+
+ CERTCertificate* certificate_;
+ scoped_ptr<SSLCertChain> chain_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(NSSCertificate);
+};
+
+// Represents a SSL key pair and certificate for NSS.
+class NSSIdentity : public SSLIdentity {
+ public:
+ static NSSIdentity* Generate(const std::string& common_name);
+ static NSSIdentity* GenerateForTest(const SSLIdentityParams& params);
+ static SSLIdentity* FromPEMStrings(const std::string& private_key,
+ const std::string& certificate);
+ virtual ~NSSIdentity() {
+ LOG(LS_INFO) << "Destroying NSS identity";
+ }
+
+ virtual NSSIdentity* GetReference() const;
+ virtual NSSCertificate& certificate() const;
+
+ NSSKeyPair* keypair() const { return keypair_.get(); }
+
+ private:
+ NSSIdentity(NSSKeyPair* keypair, NSSCertificate* cert) :
+ keypair_(keypair), certificate_(cert) {}
+
+ static NSSIdentity* GenerateInternal(const SSLIdentityParams& params);
+
+ rtc::scoped_ptr<NSSKeyPair> keypair_;
+ rtc::scoped_ptr<NSSCertificate> certificate_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(NSSIdentity);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NSSIDENTITY_H_
diff --git a/chromium/third_party/webrtc/base/nssstreamadapter.cc b/chromium/third_party/webrtc/base/nssstreamadapter.cc
new file mode 100644
index 00000000000..1d06c1c4f5f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nssstreamadapter.cc
@@ -0,0 +1,1020 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#if HAVE_NSS_SSL_H
+
+#include "webrtc/base/nssstreamadapter.h"
+
+#include "keyhi.h"
+#include "nspr.h"
+#include "nss.h"
+#include "pk11pub.h"
+#include "secerr.h"
+
+#ifdef NSS_SSL_RELATIVE_PATH
+#include "ssl.h"
+#include "sslerr.h"
+#include "sslproto.h"
+#else
+#include "net/third_party/nss/ssl/ssl.h"
+#include "net/third_party/nss/ssl/sslerr.h"
+#include "net/third_party/nss/ssl/sslproto.h"
+#endif
+
+#include "webrtc/base/nssidentity.h"
+#include "webrtc/base/safe_conversions.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+PRDescIdentity NSSStreamAdapter::nspr_layer_identity = PR_INVALID_IO_LAYER;
+
+#define UNIMPLEMENTED \
+ PR_SetError(PR_NOT_IMPLEMENTED_ERROR, 0); \
+ LOG(LS_ERROR) \
+ << "Call to unimplemented function "<< __FUNCTION__; ASSERT(false)
+
+#ifdef SRTP_AES128_CM_HMAC_SHA1_80
+#define HAVE_DTLS_SRTP
+#endif
+
+#ifdef HAVE_DTLS_SRTP
+// SRTP cipher suite table
+struct SrtpCipherMapEntry {
+ const char* external_name;
+ PRUint16 cipher_id;
+};
+
+// This isn't elegant, but it's better than an external reference
+static const SrtpCipherMapEntry kSrtpCipherMap[] = {
+ {"AES_CM_128_HMAC_SHA1_80", SRTP_AES128_CM_HMAC_SHA1_80 },
+ {"AES_CM_128_HMAC_SHA1_32", SRTP_AES128_CM_HMAC_SHA1_32 },
+ {NULL, 0}
+};
+#endif
+
+
+// Implementation of NSPR methods
+static PRStatus StreamClose(PRFileDesc *socket) {
+ ASSERT(!socket->lower);
+ socket->dtor(socket);
+ return PR_SUCCESS;
+}
+
+static PRInt32 StreamRead(PRFileDesc *socket, void *buf, PRInt32 length) {
+ StreamInterface *stream = reinterpret_cast<StreamInterface *>(socket->secret);
+ size_t read;
+ int error;
+ StreamResult result = stream->Read(buf, length, &read, &error);
+ if (result == SR_SUCCESS) {
+ return checked_cast<PRInt32>(read);
+ }
+
+ if (result == SR_EOS) {
+ return 0;
+ }
+
+ if (result == SR_BLOCK) {
+ PR_SetError(PR_WOULD_BLOCK_ERROR, 0);
+ return -1;
+ }
+
+ PR_SetError(PR_UNKNOWN_ERROR, error);
+ return -1;
+}
+
+static PRInt32 StreamWrite(PRFileDesc *socket, const void *buf,
+ PRInt32 length) {
+ StreamInterface *stream = reinterpret_cast<StreamInterface *>(socket->secret);
+ size_t written;
+ int error;
+ StreamResult result = stream->Write(buf, length, &written, &error);
+ if (result == SR_SUCCESS) {
+ return checked_cast<PRInt32>(written);
+ }
+
+ if (result == SR_BLOCK) {
+ LOG(LS_INFO) <<
+ "NSSStreamAdapter: write to underlying transport would block";
+ PR_SetError(PR_WOULD_BLOCK_ERROR, 0);
+ return -1;
+ }
+
+ LOG(LS_ERROR) << "Write error";
+ PR_SetError(PR_UNKNOWN_ERROR, error);
+ return -1;
+}
+
+static PRInt32 StreamAvailable(PRFileDesc *socket) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+PRInt64 StreamAvailable64(PRFileDesc *socket) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRStatus StreamSync(PRFileDesc *socket) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PROffset32 StreamSeek(PRFileDesc *socket, PROffset32 offset,
+ PRSeekWhence how) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PROffset64 StreamSeek64(PRFileDesc *socket, PROffset64 offset,
+ PRSeekWhence how) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRStatus StreamFileInfo(PRFileDesc *socket, PRFileInfo *info) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRStatus StreamFileInfo64(PRFileDesc *socket, PRFileInfo64 *info) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRInt32 StreamWritev(PRFileDesc *socket, const PRIOVec *iov,
+ PRInt32 iov_size, PRIntervalTime timeout) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRStatus StreamConnect(PRFileDesc *socket, const PRNetAddr *addr,
+ PRIntervalTime timeout) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRFileDesc *StreamAccept(PRFileDesc *sd, PRNetAddr *addr,
+ PRIntervalTime timeout) {
+ UNIMPLEMENTED;
+ return NULL;
+}
+
+static PRStatus StreamBind(PRFileDesc *socket, const PRNetAddr *addr) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRStatus StreamListen(PRFileDesc *socket, PRIntn depth) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRStatus StreamShutdown(PRFileDesc *socket, PRIntn how) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+// Note: this is always nonblocking and ignores the timeout.
+// TODO(ekr@rtfm.com): In future verify that the socket is
+// actually in non-blocking mode.
+// This function does not support peek.
+static PRInt32 StreamRecv(PRFileDesc *socket, void *buf, PRInt32 amount,
+ PRIntn flags, PRIntervalTime to) {
+ ASSERT(flags == 0);
+
+ if (flags != 0) {
+ PR_SetError(PR_NOT_IMPLEMENTED_ERROR, 0);
+ return -1;
+ }
+
+ return StreamRead(socket, buf, amount);
+}
+
+// Note: this is always nonblocking and assumes a zero timeout.
+// This function does not support peek.
+static PRInt32 StreamSend(PRFileDesc *socket, const void *buf,
+ PRInt32 amount, PRIntn flags,
+ PRIntervalTime to) {
+ ASSERT(flags == 0);
+
+ return StreamWrite(socket, buf, amount);
+}
+
+static PRInt32 StreamRecvfrom(PRFileDesc *socket, void *buf,
+ PRInt32 amount, PRIntn flags,
+ PRNetAddr *addr, PRIntervalTime to) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRInt32 StreamSendto(PRFileDesc *socket, const void *buf,
+ PRInt32 amount, PRIntn flags,
+ const PRNetAddr *addr, PRIntervalTime to) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRInt16 StreamPoll(PRFileDesc *socket, PRInt16 in_flags,
+ PRInt16 *out_flags) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRInt32 StreamAcceptRead(PRFileDesc *sd, PRFileDesc **nd,
+ PRNetAddr **raddr,
+ void *buf, PRInt32 amount, PRIntervalTime t) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRInt32 StreamTransmitFile(PRFileDesc *sd, PRFileDesc *socket,
+ const void *headers, PRInt32 hlen,
+ PRTransmitFileFlags flags, PRIntervalTime t) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRStatus StreamGetPeerName(PRFileDesc *socket, PRNetAddr *addr) {
+ // TODO(ekr@rtfm.com): Modify to return unique names for each channel
+ // somehow, as opposed to always the same static address. The current
+ // implementation messes up the session cache, which is why it's off
+ // elsewhere
+ addr->inet.family = PR_AF_INET;
+ addr->inet.port = 0;
+ addr->inet.ip = 0;
+
+ return PR_SUCCESS;
+}
+
+static PRStatus StreamGetSockName(PRFileDesc *socket, PRNetAddr *addr) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRStatus StreamGetSockOption(PRFileDesc *socket, PRSocketOptionData *opt) {
+ switch (opt->option) {
+ case PR_SockOpt_Nonblocking:
+ opt->value.non_blocking = PR_TRUE;
+ return PR_SUCCESS;
+ default:
+ UNIMPLEMENTED;
+ break;
+ }
+
+ return PR_FAILURE;
+}
+
+// Imitate setting socket options. These are mostly noops.
+static PRStatus StreamSetSockOption(PRFileDesc *socket,
+ const PRSocketOptionData *opt) {
+ switch (opt->option) {
+ case PR_SockOpt_Nonblocking:
+ return PR_SUCCESS;
+ case PR_SockOpt_NoDelay:
+ return PR_SUCCESS;
+ default:
+ UNIMPLEMENTED;
+ break;
+ }
+
+ return PR_FAILURE;
+}
+
+static PRInt32 StreamSendfile(PRFileDesc *out, PRSendFileData *in,
+ PRTransmitFileFlags flags, PRIntervalTime to) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static PRStatus StreamConnectContinue(PRFileDesc *socket, PRInt16 flags) {
+ UNIMPLEMENTED;
+ return PR_FAILURE;
+}
+
+static PRIntn StreamReserved(PRFileDesc *socket) {
+ UNIMPLEMENTED;
+ return -1;
+}
+
+static const struct PRIOMethods nss_methods = {
+ PR_DESC_LAYERED,
+ StreamClose,
+ StreamRead,
+ StreamWrite,
+ StreamAvailable,
+ StreamAvailable64,
+ StreamSync,
+ StreamSeek,
+ StreamSeek64,
+ StreamFileInfo,
+ StreamFileInfo64,
+ StreamWritev,
+ StreamConnect,
+ StreamAccept,
+ StreamBind,
+ StreamListen,
+ StreamShutdown,
+ StreamRecv,
+ StreamSend,
+ StreamRecvfrom,
+ StreamSendto,
+ StreamPoll,
+ StreamAcceptRead,
+ StreamTransmitFile,
+ StreamGetSockName,
+ StreamGetPeerName,
+ StreamReserved,
+ StreamReserved,
+ StreamGetSockOption,
+ StreamSetSockOption,
+ StreamSendfile,
+ StreamConnectContinue,
+ StreamReserved,
+ StreamReserved,
+ StreamReserved,
+ StreamReserved
+};
+
+NSSStreamAdapter::NSSStreamAdapter(StreamInterface *stream)
+ : SSLStreamAdapterHelper(stream),
+ ssl_fd_(NULL),
+ cert_ok_(false) {
+}
+
+bool NSSStreamAdapter::Init() {
+ if (nspr_layer_identity == PR_INVALID_IO_LAYER) {
+ nspr_layer_identity = PR_GetUniqueIdentity("nssstreamadapter");
+ }
+ PRFileDesc *pr_fd = PR_CreateIOLayerStub(nspr_layer_identity, &nss_methods);
+ if (!pr_fd)
+ return false;
+ pr_fd->secret = reinterpret_cast<PRFilePrivate *>(stream());
+
+ PRFileDesc *ssl_fd;
+ if (ssl_mode_ == SSL_MODE_DTLS) {
+ ssl_fd = DTLS_ImportFD(NULL, pr_fd);
+ } else {
+ ssl_fd = SSL_ImportFD(NULL, pr_fd);
+ }
+ ASSERT(ssl_fd != NULL); // This should never happen
+ if (!ssl_fd) {
+ PR_Close(pr_fd);
+ return false;
+ }
+
+ SECStatus rv;
+ // Turn on security.
+ rv = SSL_OptionSet(ssl_fd, SSL_SECURITY, PR_TRUE);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error enabling security on SSL Socket";
+ return false;
+ }
+
+ // Disable SSLv2.
+ rv = SSL_OptionSet(ssl_fd, SSL_ENABLE_SSL2, PR_FALSE);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error disabling SSL2";
+ return false;
+ }
+
+ // Disable caching.
+ // TODO(ekr@rtfm.com): restore this when I have the caching
+ // identity set.
+ rv = SSL_OptionSet(ssl_fd, SSL_NO_CACHE, PR_TRUE);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error disabling cache";
+ return false;
+ }
+
+ // Disable session tickets.
+ rv = SSL_OptionSet(ssl_fd, SSL_ENABLE_SESSION_TICKETS, PR_FALSE);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error enabling tickets";
+ return false;
+ }
+
+ // Disable renegotiation.
+ rv = SSL_OptionSet(ssl_fd, SSL_ENABLE_RENEGOTIATION,
+ SSL_RENEGOTIATE_NEVER);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error disabling renegotiation";
+ return false;
+ }
+
+ // Disable false start.
+ rv = SSL_OptionSet(ssl_fd, SSL_ENABLE_FALSE_START, PR_FALSE);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Error disabling false start";
+ return false;
+ }
+
+ ssl_fd_ = ssl_fd;
+
+ return true;
+}
+
+NSSStreamAdapter::~NSSStreamAdapter() {
+ if (ssl_fd_)
+ PR_Close(ssl_fd_);
+};
+
+
+int NSSStreamAdapter::BeginSSL() {
+ SECStatus rv;
+
+ if (!Init()) {
+ Error("Init", -1, false);
+ return -1;
+ }
+
+ ASSERT(state_ == SSL_CONNECTING);
+ // The underlying stream has been opened. If we are in peer-to-peer mode
+ // then a peer certificate must have been specified by now.
+ ASSERT(!ssl_server_name_.empty() ||
+ peer_certificate_.get() != NULL ||
+ !peer_certificate_digest_algorithm_.empty());
+ LOG(LS_INFO) << "BeginSSL: "
+ << (!ssl_server_name_.empty() ? ssl_server_name_ :
+ "with peer");
+
+ if (role_ == SSL_CLIENT) {
+ LOG(LS_INFO) << "BeginSSL: as client";
+
+ rv = SSL_GetClientAuthDataHook(ssl_fd_, GetClientAuthDataHook,
+ this);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+ } else {
+ LOG(LS_INFO) << "BeginSSL: as server";
+ NSSIdentity *identity;
+
+ if (identity_.get()) {
+ identity = static_cast<NSSIdentity *>(identity_.get());
+ } else {
+ LOG(LS_ERROR) << "Can't be an SSL server without an identity";
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+ rv = SSL_ConfigSecureServer(ssl_fd_, identity->certificate().certificate(),
+ identity->keypair()->privkey(),
+ kt_rsa);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+
+ // Insist on a certificate from the client
+ rv = SSL_OptionSet(ssl_fd_, SSL_REQUEST_CERTIFICATE, PR_TRUE);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+
+ rv = SSL_OptionSet(ssl_fd_, SSL_REQUIRE_CERTIFICATE, PR_TRUE);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+ }
+
+ // Set the version range.
+ SSLVersionRange vrange;
+ vrange.min = (ssl_mode_ == SSL_MODE_DTLS) ?
+ SSL_LIBRARY_VERSION_TLS_1_1 :
+ SSL_LIBRARY_VERSION_TLS_1_0;
+ vrange.max = SSL_LIBRARY_VERSION_TLS_1_1;
+
+ rv = SSL_VersionRangeSet(ssl_fd_, &vrange);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+
+ // SRTP
+#ifdef HAVE_DTLS_SRTP
+ if (!srtp_ciphers_.empty()) {
+ rv = SSL_SetSRTPCiphers(
+ ssl_fd_, &srtp_ciphers_[0],
+ checked_cast<unsigned int>(srtp_ciphers_.size()));
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+ }
+#endif
+
+ // Certificate validation
+ rv = SSL_AuthCertificateHook(ssl_fd_, AuthCertificateHook, this);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+
+ // Now start the handshake
+ rv = SSL_ResetHandshake(ssl_fd_, role_ == SSL_SERVER ? PR_TRUE : PR_FALSE);
+ if (rv != SECSuccess) {
+ Error("BeginSSL", -1, false);
+ return -1;
+ }
+
+ return ContinueSSL();
+}
+
+int NSSStreamAdapter::ContinueSSL() {
+ LOG(LS_INFO) << "ContinueSSL";
+ ASSERT(state_ == SSL_CONNECTING);
+
+ // Clear the DTLS timer
+ Thread::Current()->Clear(this, MSG_DTLS_TIMEOUT);
+
+ SECStatus rv = SSL_ForceHandshake(ssl_fd_);
+
+ if (rv == SECSuccess) {
+ LOG(LS_INFO) << "Handshake complete";
+
+ ASSERT(cert_ok_);
+ if (!cert_ok_) {
+ Error("ContinueSSL", -1, true);
+ return -1;
+ }
+
+ state_ = SSL_CONNECTED;
+ StreamAdapterInterface::OnEvent(stream(), SE_OPEN|SE_READ|SE_WRITE, 0);
+ return 0;
+ }
+
+ PRInt32 err = PR_GetError();
+ switch (err) {
+ case SSL_ERROR_RX_MALFORMED_HANDSHAKE:
+ if (ssl_mode_ != SSL_MODE_DTLS) {
+ Error("ContinueSSL", -1, true);
+ return -1;
+ } else {
+ LOG(LS_INFO) << "Malformed DTLS message. Ignoring.";
+ // Fall through
+ }
+ case PR_WOULD_BLOCK_ERROR:
+ LOG(LS_INFO) << "Would have blocked";
+ if (ssl_mode_ == SSL_MODE_DTLS) {
+ PRIntervalTime timeout;
+
+ SECStatus rv = DTLS_GetHandshakeTimeout(ssl_fd_, &timeout);
+ if (rv == SECSuccess) {
+ LOG(LS_INFO) << "Timeout is " << timeout << " ms";
+ Thread::Current()->PostDelayed(PR_IntervalToMilliseconds(timeout),
+ this, MSG_DTLS_TIMEOUT, 0);
+ }
+ }
+
+ return 0;
+ default:
+ LOG(LS_INFO) << "Error " << err;
+ break;
+ }
+
+ Error("ContinueSSL", -1, true);
+ return -1;
+}
+
+void NSSStreamAdapter::Cleanup() {
+ if (state_ != SSL_ERROR) {
+ state_ = SSL_CLOSED;
+ }
+
+ if (ssl_fd_) {
+ PR_Close(ssl_fd_);
+ ssl_fd_ = NULL;
+ }
+
+ identity_.reset();
+ peer_certificate_.reset();
+
+ Thread::Current()->Clear(this, MSG_DTLS_TIMEOUT);
+}
+
+StreamResult NSSStreamAdapter::Read(void* data, size_t data_len,
+ size_t* read, int* error) {
+ // SSL_CONNECTED sanity check.
+ switch (state_) {
+ case SSL_NONE:
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SR_BLOCK;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_CLOSED:
+ return SR_EOS;
+
+ case SSL_ERROR:
+ default:
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+
+ PRInt32 rv = PR_Read(ssl_fd_, data, checked_cast<PRInt32>(data_len));
+
+ if (rv == 0) {
+ return SR_EOS;
+ }
+
+ // Error
+ if (rv < 0) {
+ PRInt32 err = PR_GetError();
+
+ switch (err) {
+ case PR_WOULD_BLOCK_ERROR:
+ return SR_BLOCK;
+ default:
+ Error("Read", -1, false);
+ *error = err; // libjingle semantics are that this is impl-specific
+ return SR_ERROR;
+ }
+ }
+
+ // Success
+ *read = rv;
+
+ return SR_SUCCESS;
+}
+
+StreamResult NSSStreamAdapter::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ // SSL_CONNECTED sanity check.
+ switch (state_) {
+ case SSL_NONE:
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SR_BLOCK;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ case SSL_CLOSED:
+ default:
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+
+ PRInt32 rv = PR_Write(ssl_fd_, data, checked_cast<PRInt32>(data_len));
+
+ // Error
+ if (rv < 0) {
+ PRInt32 err = PR_GetError();
+
+ switch (err) {
+ case PR_WOULD_BLOCK_ERROR:
+ return SR_BLOCK;
+ default:
+ Error("Write", -1, false);
+ *error = err; // libjingle semantics are that this is impl-specific
+ return SR_ERROR;
+ }
+ }
+
+ // Success
+ *written = rv;
+
+ return SR_SUCCESS;
+}
+
+void NSSStreamAdapter::OnEvent(StreamInterface* stream, int events,
+ int err) {
+ int events_to_signal = 0;
+ int signal_error = 0;
+ ASSERT(stream == this->stream());
+ if ((events & SE_OPEN)) {
+ LOG(LS_INFO) << "NSSStreamAdapter::OnEvent SE_OPEN";
+ if (state_ != SSL_WAIT) {
+ ASSERT(state_ == SSL_NONE);
+ events_to_signal |= SE_OPEN;
+ } else {
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err, true);
+ return;
+ }
+ }
+ }
+ if ((events & (SE_READ|SE_WRITE))) {
+ LOG(LS_INFO) << "NSSStreamAdapter::OnEvent"
+ << ((events & SE_READ) ? " SE_READ" : "")
+ << ((events & SE_WRITE) ? " SE_WRITE" : "");
+ if (state_ == SSL_NONE) {
+ events_to_signal |= events & (SE_READ|SE_WRITE);
+ } else if (state_ == SSL_CONNECTING) {
+ if (int err = ContinueSSL()) {
+ Error("ContinueSSL", err, true);
+ return;
+ }
+ } else if (state_ == SSL_CONNECTED) {
+ if (events & SE_WRITE) {
+ LOG(LS_INFO) << " -- onStreamWriteable";
+ events_to_signal |= SE_WRITE;
+ }
+ if (events & SE_READ) {
+ LOG(LS_INFO) << " -- onStreamReadable";
+ events_to_signal |= SE_READ;
+ }
+ }
+ }
+ if ((events & SE_CLOSE)) {
+ LOG(LS_INFO) << "NSSStreamAdapter::OnEvent(SE_CLOSE, " << err << ")";
+ Cleanup();
+ events_to_signal |= SE_CLOSE;
+ // SE_CLOSE is the only event that uses the final parameter to OnEvent().
+ ASSERT(signal_error == 0);
+ signal_error = err;
+ }
+ if (events_to_signal)
+ StreamAdapterInterface::OnEvent(stream, events_to_signal, signal_error);
+}
+
+void NSSStreamAdapter::OnMessage(Message* msg) {
+ // Process our own messages and then pass others to the superclass
+ if (MSG_DTLS_TIMEOUT == msg->message_id) {
+ LOG(LS_INFO) << "DTLS timeout expired";
+ ContinueSSL();
+ } else {
+ StreamInterface::OnMessage(msg);
+ }
+}
+
+// Certificate verification callback. Called to check any certificate
+SECStatus NSSStreamAdapter::AuthCertificateHook(void *arg,
+ PRFileDesc *fd,
+ PRBool checksig,
+ PRBool isServer) {
+ LOG(LS_INFO) << "NSSStreamAdapter::AuthCertificateHook";
+ // SSL_PeerCertificate returns a pointer that is owned by the caller, and
+ // the NSSCertificate constructor copies its argument, so |raw_peer_cert|
+ // must be destroyed in this function.
+ CERTCertificate* raw_peer_cert = SSL_PeerCertificate(fd);
+ NSSCertificate peer_cert(raw_peer_cert);
+ CERT_DestroyCertificate(raw_peer_cert);
+
+ NSSStreamAdapter *stream = reinterpret_cast<NSSStreamAdapter *>(arg);
+ stream->cert_ok_ = false;
+
+ // Read the peer's certificate chain.
+ CERTCertList* cert_list = SSL_PeerCertificateChain(fd);
+ ASSERT(cert_list != NULL);
+
+ // If the peer provided multiple certificates, check that they form a valid
+ // chain as defined by RFC 5246 Section 7.4.2: "Each following certificate
+ // MUST directly certify the one preceding it.". This check does NOT
+ // verify other requirements, such as whether the chain reaches a trusted
+ // root, self-signed certificates have valid signatures, certificates are not
+ // expired, etc.
+ // Even if the chain is valid, the leaf certificate must still match a
+ // provided certificate or digest.
+ if (!NSSCertificate::IsValidChain(cert_list)) {
+ CERT_DestroyCertList(cert_list);
+ PORT_SetError(SEC_ERROR_BAD_SIGNATURE);
+ return SECFailure;
+ }
+
+ if (stream->peer_certificate_.get()) {
+ LOG(LS_INFO) << "Checking against specified certificate";
+
+ // The peer certificate was specified
+ if (reinterpret_cast<NSSCertificate *>(stream->peer_certificate_.get())->
+ Equals(&peer_cert)) {
+ LOG(LS_INFO) << "Accepted peer certificate";
+ stream->cert_ok_ = true;
+ }
+ } else if (!stream->peer_certificate_digest_algorithm_.empty()) {
+ LOG(LS_INFO) << "Checking against specified digest";
+ // The peer certificate digest was specified
+ unsigned char digest[64]; // Maximum size
+ size_t digest_length;
+
+ if (!peer_cert.ComputeDigest(
+ stream->peer_certificate_digest_algorithm_,
+ digest, sizeof(digest), &digest_length)) {
+ LOG(LS_ERROR) << "Digest computation failed";
+ } else {
+ Buffer computed_digest(digest, digest_length);
+ if (computed_digest == stream->peer_certificate_digest_value_) {
+ LOG(LS_INFO) << "Accepted peer certificate";
+ stream->cert_ok_ = true;
+ }
+ }
+ } else {
+ // Other modes, but we haven't implemented yet
+ // TODO(ekr@rtfm.com): Implement real certificate validation
+ UNIMPLEMENTED;
+ }
+
+ if (!stream->cert_ok_ && stream->ignore_bad_cert()) {
+ LOG(LS_WARNING) << "Ignoring cert error while verifying cert chain";
+ stream->cert_ok_ = true;
+ }
+
+ if (stream->cert_ok_)
+ stream->peer_certificate_.reset(new NSSCertificate(cert_list));
+
+ CERT_DestroyCertList(cert_list);
+
+ if (stream->cert_ok_)
+ return SECSuccess;
+
+ PORT_SetError(SEC_ERROR_UNTRUSTED_CERT);
+ return SECFailure;
+}
+
+
+SECStatus NSSStreamAdapter::GetClientAuthDataHook(void *arg, PRFileDesc *fd,
+ CERTDistNames *caNames,
+ CERTCertificate **pRetCert,
+ SECKEYPrivateKey **pRetKey) {
+ LOG(LS_INFO) << "Client cert requested";
+ NSSStreamAdapter *stream = reinterpret_cast<NSSStreamAdapter *>(arg);
+
+ if (!stream->identity_.get()) {
+ LOG(LS_ERROR) << "No identity available";
+ return SECFailure;
+ }
+
+ NSSIdentity *identity = static_cast<NSSIdentity *>(stream->identity_.get());
+ // Destroyed internally by NSS
+ *pRetCert = CERT_DupCertificate(identity->certificate().certificate());
+ *pRetKey = SECKEY_CopyPrivateKey(identity->keypair()->privkey());
+
+ return SECSuccess;
+}
+
+// RFC 5705 Key Exporter
+bool NSSStreamAdapter::ExportKeyingMaterial(const std::string& label,
+ const uint8* context,
+ size_t context_len,
+ bool use_context,
+ uint8* result,
+ size_t result_len) {
+ SECStatus rv = SSL_ExportKeyingMaterial(
+ ssl_fd_,
+ label.c_str(),
+ checked_cast<unsigned int>(label.size()),
+ use_context,
+ context,
+ checked_cast<unsigned int>(context_len),
+ result,
+ checked_cast<unsigned int>(result_len));
+
+ return rv == SECSuccess;
+}
+
+bool NSSStreamAdapter::SetDtlsSrtpCiphers(
+ const std::vector<std::string>& ciphers) {
+#ifdef HAVE_DTLS_SRTP
+ std::vector<PRUint16> internal_ciphers;
+ if (state_ != SSL_NONE)
+ return false;
+
+ for (std::vector<std::string>::const_iterator cipher = ciphers.begin();
+ cipher != ciphers.end(); ++cipher) {
+ bool found = false;
+ for (const SrtpCipherMapEntry *entry = kSrtpCipherMap; entry->cipher_id;
+ ++entry) {
+ if (*cipher == entry->external_name) {
+ found = true;
+ internal_ciphers.push_back(entry->cipher_id);
+ break;
+ }
+ }
+
+ if (!found) {
+ LOG(LS_ERROR) << "Could not find cipher: " << *cipher;
+ return false;
+ }
+ }
+
+ if (internal_ciphers.empty())
+ return false;
+
+ srtp_ciphers_ = internal_ciphers;
+
+ return true;
+#else
+ return false;
+#endif
+}
+
+bool NSSStreamAdapter::GetDtlsSrtpCipher(std::string* cipher) {
+#ifdef HAVE_DTLS_SRTP
+ ASSERT(state_ == SSL_CONNECTED);
+ if (state_ != SSL_CONNECTED)
+ return false;
+
+ PRUint16 selected_cipher;
+
+ SECStatus rv = SSL_GetSRTPCipher(ssl_fd_, &selected_cipher);
+ if (rv == SECFailure)
+ return false;
+
+ for (const SrtpCipherMapEntry *entry = kSrtpCipherMap;
+ entry->cipher_id; ++entry) {
+ if (selected_cipher == entry->cipher_id) {
+ *cipher = entry->external_name;
+ return true;
+ }
+ }
+
+ ASSERT(false); // This should never happen
+#endif
+ return false;
+}
+
+
+bool NSSContext::initialized;
+NSSContext *NSSContext::global_nss_context;
+
+// Static initialization and shutdown
+NSSContext *NSSContext::Instance() {
+ if (!global_nss_context) {
+ NSSContext *new_ctx = new NSSContext();
+
+ if (!(new_ctx->slot_ = PK11_GetInternalSlot())) {
+ delete new_ctx;
+ goto fail;
+ }
+
+ global_nss_context = new_ctx;
+ }
+
+ fail:
+ return global_nss_context;
+}
+
+
+
+bool NSSContext::InitializeSSL(VerificationCallback callback) {
+ ASSERT(!callback);
+
+ if (!initialized) {
+ SECStatus rv;
+
+ rv = NSS_NoDB_Init(NULL);
+ if (rv != SECSuccess) {
+ LOG(LS_ERROR) << "Couldn't initialize NSS error=" <<
+ PORT_GetError();
+ return false;
+ }
+
+ NSS_SetDomesticPolicy();
+
+ initialized = true;
+ }
+
+ return true;
+}
+
+bool NSSContext::InitializeSSLThread() {
+ // Not needed
+ return true;
+}
+
+bool NSSContext::CleanupSSL() {
+ // Not needed
+ return true;
+}
+
+bool NSSStreamAdapter::HaveDtls() {
+ return true;
+}
+
+bool NSSStreamAdapter::HaveDtlsSrtp() {
+#ifdef HAVE_DTLS_SRTP
+ return true;
+#else
+ return false;
+#endif
+}
+
+bool NSSStreamAdapter::HaveExporter() {
+ return true;
+}
+
+} // namespace rtc
+
+#endif // HAVE_NSS_SSL_H
diff --git a/chromium/third_party/webrtc/base/nssstreamadapter.h b/chromium/third_party/webrtc/base/nssstreamadapter.h
new file mode 100644
index 00000000000..210a47933d4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nssstreamadapter.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NSSSTREAMADAPTER_H_
+#define WEBRTC_BASE_NSSSTREAMADAPTER_H_
+
+#include <string>
+#include <vector>
+
+#include "nspr.h"
+#include "nss.h"
+#include "secmodt.h"
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/nssidentity.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/sslstreamadapterhelper.h"
+
+namespace rtc {
+
+// Singleton
+class NSSContext {
+ public:
+ NSSContext() {}
+ ~NSSContext() {
+ }
+
+ static PK11SlotInfo *GetSlot() {
+ return Instance() ? Instance()->slot_: NULL;
+ }
+
+ static NSSContext *Instance();
+ static bool InitializeSSL(VerificationCallback callback);
+ static bool InitializeSSLThread();
+ static bool CleanupSSL();
+
+ private:
+ PK11SlotInfo *slot_; // The PKCS-11 slot
+ static bool initialized; // Was this initialized?
+ static NSSContext *global_nss_context; // The global context
+};
+
+
+class NSSStreamAdapter : public SSLStreamAdapterHelper {
+ public:
+ explicit NSSStreamAdapter(StreamInterface* stream);
+ virtual ~NSSStreamAdapter();
+ bool Init();
+
+ virtual StreamResult Read(void* data, size_t data_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ void OnMessage(Message *msg);
+
+ // Key Extractor interface
+ virtual bool ExportKeyingMaterial(const std::string& label,
+ const uint8* context,
+ size_t context_len,
+ bool use_context,
+ uint8* result,
+ size_t result_len);
+
+ // DTLS-SRTP interface
+ virtual bool SetDtlsSrtpCiphers(const std::vector<std::string>& ciphers);
+ virtual bool GetDtlsSrtpCipher(std::string* cipher);
+
+ // Capabilities interfaces
+ static bool HaveDtls();
+ static bool HaveDtlsSrtp();
+ static bool HaveExporter();
+
+ protected:
+ // Override SSLStreamAdapter
+ virtual void OnEvent(StreamInterface* stream, int events, int err);
+
+ // Override SSLStreamAdapterHelper
+ virtual int BeginSSL();
+ virtual void Cleanup();
+ virtual bool GetDigestLength(const std::string& algorithm, size_t* length) {
+ return NSSCertificate::GetDigestLength(algorithm, length);
+ }
+
+ private:
+ int ContinueSSL();
+ static SECStatus AuthCertificateHook(void *arg, PRFileDesc *fd,
+ PRBool checksig, PRBool isServer);
+ static SECStatus GetClientAuthDataHook(void *arg, PRFileDesc *fd,
+ CERTDistNames *caNames,
+ CERTCertificate **pRetCert,
+ SECKEYPrivateKey **pRetKey);
+
+ PRFileDesc *ssl_fd_; // NSS's SSL file descriptor
+ static bool initialized; // Was InitializeSSL() called?
+ bool cert_ok_; // Did we get and check a cert
+ std::vector<PRUint16> srtp_ciphers_; // SRTP cipher list
+
+ static PRDescIdentity nspr_layer_identity; // The NSPR layer identity
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NSSSTREAMADAPTER_H_
diff --git a/chromium/third_party/webrtc/base/nullsocketserver.h b/chromium/third_party/webrtc/base/nullsocketserver.h
new file mode 100644
index 00000000000..5378e43158c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nullsocketserver.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_NULLSOCKETSERVER_H_
+#define WEBRTC_BASE_NULLSOCKETSERVER_H_
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/physicalsocketserver.h"
+
+namespace rtc {
+
+// NullSocketServer
+
+class NullSocketServer : public rtc::SocketServer {
+ public:
+ NullSocketServer() : event_(false, false) {}
+
+ virtual bool Wait(int cms, bool process_io) {
+ event_.Wait(cms);
+ return true;
+ }
+
+ virtual void WakeUp() {
+ event_.Set();
+ }
+
+ virtual rtc::Socket* CreateSocket(int type) {
+ ASSERT(false);
+ return NULL;
+ }
+
+ virtual rtc::Socket* CreateSocket(int family, int type) {
+ ASSERT(false);
+ return NULL;
+ }
+
+ virtual rtc::AsyncSocket* CreateAsyncSocket(int type) {
+ ASSERT(false);
+ return NULL;
+ }
+
+ virtual rtc::AsyncSocket* CreateAsyncSocket(int family, int type) {
+ ASSERT(false);
+ return NULL;
+ }
+
+
+ private:
+ rtc::Event event_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_NULLSOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc b/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc
new file mode 100644
index 00000000000..fe21f6ad00c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/nullsocketserver.h"
+
+namespace rtc {
+
+static const uint32 kTimeout = 5000U;
+
+class NullSocketServerTest
+ : public testing::Test,
+ public MessageHandler {
+ public:
+ NullSocketServerTest() {}
+ protected:
+ virtual void OnMessage(Message* message) {
+ ss_.WakeUp();
+ }
+ NullSocketServer ss_;
+};
+
+TEST_F(NullSocketServerTest, WaitAndSet) {
+ Thread thread;
+ EXPECT_TRUE(thread.Start());
+ thread.Post(this, 0);
+ // The process_io will be ignored.
+ const bool process_io = true;
+ EXPECT_TRUE_WAIT(ss_.Wait(rtc::kForever, process_io), kTimeout);
+}
+
+TEST_F(NullSocketServerTest, TestWait) {
+ uint32 start = Time();
+ ss_.Wait(200, true);
+ // The actual wait time is dependent on the resolution of the timer used by
+ // the Event class. Allow for the event to signal ~20ms early.
+ EXPECT_GE(TimeSince(start), 180);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/openssl.h b/chromium/third_party/webrtc/base/openssl.h
new file mode 100644
index 00000000000..2071619d5d5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/openssl.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPENSSL_H_
+#define WEBRTC_BASE_OPENSSL_H_
+
+#include <openssl/ssl.h>
+
+#if (OPENSSL_VERSION_NUMBER < 0x10000000L)
+#error OpenSSL is older than 1.0.0, which is the minimum supported version.
+#endif
+
+#endif // WEBRTC_BASE_OPENSSL_H_
diff --git a/chromium/third_party/webrtc/base/openssladapter.cc b/chromium/third_party/webrtc/base/openssladapter.cc
new file mode 100644
index 00000000000..3618aadaae1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/openssladapter.cc
@@ -0,0 +1,884 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_OPENSSL_SSL_H
+
+#include "webrtc/base/openssladapter.h"
+
+#if defined(WEBRTC_POSIX)
+#include <unistd.h>
+#endif
+
+// Must be included first before openssl headers.
+#include "webrtc/base/win32.h" // NOLINT
+
+#include <openssl/bio.h>
+#include <openssl/crypto.h>
+#include <openssl/err.h>
+#include <openssl/opensslv.h>
+#include <openssl/rand.h>
+#include <openssl/x509v3.h>
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/openssl.h"
+#include "webrtc/base/sslroots.h"
+#include "webrtc/base/stringutils.h"
+
+// TODO: Use a nicer abstraction for mutex.
+
+#if defined(WEBRTC_WIN)
+ #define MUTEX_TYPE HANDLE
+ #define MUTEX_SETUP(x) (x) = CreateMutex(NULL, FALSE, NULL)
+ #define MUTEX_CLEANUP(x) CloseHandle(x)
+ #define MUTEX_LOCK(x) WaitForSingleObject((x), INFINITE)
+ #define MUTEX_UNLOCK(x) ReleaseMutex(x)
+ #define THREAD_ID GetCurrentThreadId()
+#elif defined(WEBRTC_POSIX)
+ #define MUTEX_TYPE pthread_mutex_t
+ #define MUTEX_SETUP(x) pthread_mutex_init(&(x), NULL)
+ #define MUTEX_CLEANUP(x) pthread_mutex_destroy(&(x))
+ #define MUTEX_LOCK(x) pthread_mutex_lock(&(x))
+ #define MUTEX_UNLOCK(x) pthread_mutex_unlock(&(x))
+ #define THREAD_ID pthread_self()
+#else
+ #error You must define mutex operations appropriate for your platform!
+#endif
+
+struct CRYPTO_dynlock_value {
+ MUTEX_TYPE mutex;
+};
+
+//////////////////////////////////////////////////////////////////////
+// SocketBIO
+//////////////////////////////////////////////////////////////////////
+
+static int socket_write(BIO* h, const char* buf, int num);
+static int socket_read(BIO* h, char* buf, int size);
+static int socket_puts(BIO* h, const char* str);
+static long socket_ctrl(BIO* h, int cmd, long arg1, void* arg2);
+static int socket_new(BIO* h);
+static int socket_free(BIO* data);
+
+static BIO_METHOD methods_socket = {
+ BIO_TYPE_BIO,
+ "socket",
+ socket_write,
+ socket_read,
+ socket_puts,
+ 0,
+ socket_ctrl,
+ socket_new,
+ socket_free,
+ NULL,
+};
+
+static BIO_METHOD* BIO_s_socket2() { return(&methods_socket); }
+
+static BIO* BIO_new_socket(rtc::AsyncSocket* socket) {
+ BIO* ret = BIO_new(BIO_s_socket2());
+ if (ret == NULL) {
+ return NULL;
+ }
+ ret->ptr = socket;
+ return ret;
+}
+
+static int socket_new(BIO* b) {
+ b->shutdown = 0;
+ b->init = 1;
+ b->num = 0; // 1 means socket closed
+ b->ptr = 0;
+ return 1;
+}
+
+static int socket_free(BIO* b) {
+ if (b == NULL)
+ return 0;
+ return 1;
+}
+
+static int socket_read(BIO* b, char* out, int outl) {
+ if (!out)
+ return -1;
+ rtc::AsyncSocket* socket = static_cast<rtc::AsyncSocket*>(b->ptr);
+ BIO_clear_retry_flags(b);
+ int result = socket->Recv(out, outl);
+ if (result > 0) {
+ return result;
+ } else if (result == 0) {
+ b->num = 1;
+ } else if (socket->IsBlocking()) {
+ BIO_set_retry_read(b);
+ }
+ return -1;
+}
+
+static int socket_write(BIO* b, const char* in, int inl) {
+ if (!in)
+ return -1;
+ rtc::AsyncSocket* socket = static_cast<rtc::AsyncSocket*>(b->ptr);
+ BIO_clear_retry_flags(b);
+ int result = socket->Send(in, inl);
+ if (result > 0) {
+ return result;
+ } else if (socket->IsBlocking()) {
+ BIO_set_retry_write(b);
+ }
+ return -1;
+}
+
+static int socket_puts(BIO* b, const char* str) {
+ return socket_write(b, str, strlen(str));
+}
+
+static long socket_ctrl(BIO* b, int cmd, long num, void* ptr) {
+ RTC_UNUSED(num);
+ RTC_UNUSED(ptr);
+
+ switch (cmd) {
+ case BIO_CTRL_RESET:
+ return 0;
+ case BIO_CTRL_EOF:
+ return b->num;
+ case BIO_CTRL_WPENDING:
+ case BIO_CTRL_PENDING:
+ return 0;
+ case BIO_CTRL_FLUSH:
+ return 1;
+ default:
+ return 0;
+ }
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// OpenSSLAdapter
+/////////////////////////////////////////////////////////////////////////////
+
+namespace rtc {
+
+// This array will store all of the mutexes available to OpenSSL.
+static MUTEX_TYPE* mutex_buf = NULL;
+
+static void locking_function(int mode, int n, const char * file, int line) {
+ if (mode & CRYPTO_LOCK) {
+ MUTEX_LOCK(mutex_buf[n]);
+ } else {
+ MUTEX_UNLOCK(mutex_buf[n]);
+ }
+}
+
+static unsigned long id_function() { // NOLINT
+ // Use old-style C cast because THREAD_ID's type varies with the platform,
+ // in some cases requiring static_cast, and in others requiring
+ // reinterpret_cast.
+ return (unsigned long)THREAD_ID; // NOLINT
+}
+
+static CRYPTO_dynlock_value* dyn_create_function(const char* file, int line) {
+ CRYPTO_dynlock_value* value = new CRYPTO_dynlock_value;
+ if (!value)
+ return NULL;
+ MUTEX_SETUP(value->mutex);
+ return value;
+}
+
+static void dyn_lock_function(int mode, CRYPTO_dynlock_value* l,
+ const char* file, int line) {
+ if (mode & CRYPTO_LOCK) {
+ MUTEX_LOCK(l->mutex);
+ } else {
+ MUTEX_UNLOCK(l->mutex);
+ }
+}
+
+static void dyn_destroy_function(CRYPTO_dynlock_value* l,
+ const char* file, int line) {
+ MUTEX_CLEANUP(l->mutex);
+ delete l;
+}
+
+VerificationCallback OpenSSLAdapter::custom_verify_callback_ = NULL;
+
+bool OpenSSLAdapter::InitializeSSL(VerificationCallback callback) {
+ if (!InitializeSSLThread() || !SSL_library_init())
+ return false;
+#if !defined(ADDRESS_SANITIZER) || !defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
+ // Loading the error strings crashes mac_asan. Omit this debugging aid there.
+ SSL_load_error_strings();
+#endif
+ ERR_load_BIO_strings();
+ OpenSSL_add_all_algorithms();
+ RAND_poll();
+ custom_verify_callback_ = callback;
+ return true;
+}
+
+bool OpenSSLAdapter::InitializeSSLThread() {
+ mutex_buf = new MUTEX_TYPE[CRYPTO_num_locks()];
+ if (!mutex_buf)
+ return false;
+ for (int i = 0; i < CRYPTO_num_locks(); ++i)
+ MUTEX_SETUP(mutex_buf[i]);
+
+ // we need to cast our id_function to return an unsigned long -- pthread_t is
+ // a pointer
+ CRYPTO_set_id_callback(id_function);
+ CRYPTO_set_locking_callback(locking_function);
+ CRYPTO_set_dynlock_create_callback(dyn_create_function);
+ CRYPTO_set_dynlock_lock_callback(dyn_lock_function);
+ CRYPTO_set_dynlock_destroy_callback(dyn_destroy_function);
+ return true;
+}
+
+bool OpenSSLAdapter::CleanupSSL() {
+ if (!mutex_buf)
+ return false;
+ CRYPTO_set_id_callback(NULL);
+ CRYPTO_set_locking_callback(NULL);
+ CRYPTO_set_dynlock_create_callback(NULL);
+ CRYPTO_set_dynlock_lock_callback(NULL);
+ CRYPTO_set_dynlock_destroy_callback(NULL);
+ for (int i = 0; i < CRYPTO_num_locks(); ++i)
+ MUTEX_CLEANUP(mutex_buf[i]);
+ delete [] mutex_buf;
+ mutex_buf = NULL;
+ return true;
+}
+
+OpenSSLAdapter::OpenSSLAdapter(AsyncSocket* socket)
+ : SSLAdapter(socket),
+ state_(SSL_NONE),
+ ssl_read_needs_write_(false),
+ ssl_write_needs_read_(false),
+ restartable_(false),
+ ssl_(NULL), ssl_ctx_(NULL),
+ custom_verification_succeeded_(false) {
+}
+
+OpenSSLAdapter::~OpenSSLAdapter() {
+ Cleanup();
+}
+
+int
+OpenSSLAdapter::StartSSL(const char* hostname, bool restartable) {
+ if (state_ != SSL_NONE)
+ return -1;
+
+ ssl_host_name_ = hostname;
+ restartable_ = restartable;
+
+ if (socket_->GetState() != Socket::CS_CONNECTED) {
+ state_ = SSL_WAIT;
+ return 0;
+ }
+
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err, false);
+ return err;
+ }
+
+ return 0;
+}
+
+int
+OpenSSLAdapter::BeginSSL() {
+ LOG(LS_INFO) << "BeginSSL: " << ssl_host_name_;
+ ASSERT(state_ == SSL_CONNECTING);
+
+ int err = 0;
+ BIO* bio = NULL;
+
+ // First set up the context
+ if (!ssl_ctx_)
+ ssl_ctx_ = SetupSSLContext();
+
+ if (!ssl_ctx_) {
+ err = -1;
+ goto ssl_error;
+ }
+
+ bio = BIO_new_socket(static_cast<AsyncSocketAdapter*>(socket_));
+ if (!bio) {
+ err = -1;
+ goto ssl_error;
+ }
+
+ ssl_ = SSL_new(ssl_ctx_);
+ if (!ssl_) {
+ err = -1;
+ goto ssl_error;
+ }
+
+ SSL_set_app_data(ssl_, this);
+
+ SSL_set_bio(ssl_, bio, bio);
+ SSL_set_mode(ssl_, SSL_MODE_ENABLE_PARTIAL_WRITE |
+ SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER);
+
+ // the SSL object owns the bio now
+ bio = NULL;
+
+ // Do the connect
+ err = ContinueSSL();
+ if (err != 0)
+ goto ssl_error;
+
+ return err;
+
+ssl_error:
+ Cleanup();
+ if (bio)
+ BIO_free(bio);
+
+ return err;
+}
+
+int
+OpenSSLAdapter::ContinueSSL() {
+ ASSERT(state_ == SSL_CONNECTING);
+
+ int code = SSL_connect(ssl_);
+ switch (SSL_get_error(ssl_, code)) {
+ case SSL_ERROR_NONE:
+ if (!SSLPostConnectionCheck(ssl_, ssl_host_name_.c_str())) {
+ LOG(LS_ERROR) << "TLS post connection check failed";
+ // make sure we close the socket
+ Cleanup();
+ // The connect failed so return -1 to shut down the socket
+ return -1;
+ }
+
+ state_ = SSL_CONNECTED;
+ AsyncSocketAdapter::OnConnectEvent(this);
+#if 0 // TODO: worry about this
+ // Don't let ourselves go away during the callbacks
+ PRefPtr<OpenSSLAdapter> lock(this);
+ LOG(LS_INFO) << " -- onStreamReadable";
+ AsyncSocketAdapter::OnReadEvent(this);
+ LOG(LS_INFO) << " -- onStreamWriteable";
+ AsyncSocketAdapter::OnWriteEvent(this);
+#endif
+ break;
+
+ case SSL_ERROR_WANT_READ:
+ case SSL_ERROR_WANT_WRITE:
+ break;
+
+ case SSL_ERROR_ZERO_RETURN:
+ default:
+ LOG(LS_WARNING) << "ContinueSSL -- error " << code;
+ return (code != 0) ? code : -1;
+ }
+
+ return 0;
+}
+
+void
+OpenSSLAdapter::Error(const char* context, int err, bool signal) {
+ LOG(LS_WARNING) << "OpenSSLAdapter::Error("
+ << context << ", " << err << ")";
+ state_ = SSL_ERROR;
+ SetError(err);
+ if (signal)
+ AsyncSocketAdapter::OnCloseEvent(this, err);
+}
+
+void
+OpenSSLAdapter::Cleanup() {
+ LOG(LS_INFO) << "Cleanup";
+
+ state_ = SSL_NONE;
+ ssl_read_needs_write_ = false;
+ ssl_write_needs_read_ = false;
+ custom_verification_succeeded_ = false;
+
+ if (ssl_) {
+ SSL_free(ssl_);
+ ssl_ = NULL;
+ }
+
+ if (ssl_ctx_) {
+ SSL_CTX_free(ssl_ctx_);
+ ssl_ctx_ = NULL;
+ }
+}
+
+//
+// AsyncSocket Implementation
+//
+
+int
+OpenSSLAdapter::Send(const void* pv, size_t cb) {
+ //LOG(LS_INFO) << "OpenSSLAdapter::Send(" << cb << ")";
+
+ switch (state_) {
+ case SSL_NONE:
+ return AsyncSocketAdapter::Send(pv, cb);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ default:
+ return SOCKET_ERROR;
+ }
+
+ // OpenSSL will return an error if we try to write zero bytes
+ if (cb == 0)
+ return 0;
+
+ ssl_write_needs_read_ = false;
+
+ int code = SSL_write(ssl_, pv, cb);
+ switch (SSL_get_error(ssl_, code)) {
+ case SSL_ERROR_NONE:
+ //LOG(LS_INFO) << " -- success";
+ return code;
+ case SSL_ERROR_WANT_READ:
+ //LOG(LS_INFO) << " -- error want read";
+ ssl_write_needs_read_ = true;
+ SetError(EWOULDBLOCK);
+ break;
+ case SSL_ERROR_WANT_WRITE:
+ //LOG(LS_INFO) << " -- error want write";
+ SetError(EWOULDBLOCK);
+ break;
+ case SSL_ERROR_ZERO_RETURN:
+ //LOG(LS_INFO) << " -- remote side closed";
+ SetError(EWOULDBLOCK);
+ // do we need to signal closure?
+ break;
+ default:
+ //LOG(LS_INFO) << " -- error " << code;
+ Error("SSL_write", (code ? code : -1), false);
+ break;
+ }
+
+ return SOCKET_ERROR;
+}
+
+int
+OpenSSLAdapter::Recv(void* pv, size_t cb) {
+ //LOG(LS_INFO) << "OpenSSLAdapter::Recv(" << cb << ")";
+ switch (state_) {
+
+ case SSL_NONE:
+ return AsyncSocketAdapter::Recv(pv, cb);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ default:
+ return SOCKET_ERROR;
+ }
+
+ // Don't trust OpenSSL with zero byte reads
+ if (cb == 0)
+ return 0;
+
+ ssl_read_needs_write_ = false;
+
+ int code = SSL_read(ssl_, pv, cb);
+ switch (SSL_get_error(ssl_, code)) {
+ case SSL_ERROR_NONE:
+ //LOG(LS_INFO) << " -- success";
+ return code;
+ case SSL_ERROR_WANT_READ:
+ //LOG(LS_INFO) << " -- error want read";
+ SetError(EWOULDBLOCK);
+ break;
+ case SSL_ERROR_WANT_WRITE:
+ //LOG(LS_INFO) << " -- error want write";
+ ssl_read_needs_write_ = true;
+ SetError(EWOULDBLOCK);
+ break;
+ case SSL_ERROR_ZERO_RETURN:
+ //LOG(LS_INFO) << " -- remote side closed";
+ SetError(EWOULDBLOCK);
+ // do we need to signal closure?
+ break;
+ default:
+ //LOG(LS_INFO) << " -- error " << code;
+ Error("SSL_read", (code ? code : -1), false);
+ break;
+ }
+
+ return SOCKET_ERROR;
+}
+
+int
+OpenSSLAdapter::Close() {
+ Cleanup();
+ state_ = restartable_ ? SSL_WAIT : SSL_NONE;
+ return AsyncSocketAdapter::Close();
+}
+
+Socket::ConnState
+OpenSSLAdapter::GetState() const {
+ //if (signal_close_)
+ // return CS_CONNECTED;
+ ConnState state = socket_->GetState();
+ if ((state == CS_CONNECTED)
+ && ((state_ == SSL_WAIT) || (state_ == SSL_CONNECTING)))
+ state = CS_CONNECTING;
+ return state;
+}
+
+void
+OpenSSLAdapter::OnConnectEvent(AsyncSocket* socket) {
+ LOG(LS_INFO) << "OpenSSLAdapter::OnConnectEvent";
+ if (state_ != SSL_WAIT) {
+ ASSERT(state_ == SSL_NONE);
+ AsyncSocketAdapter::OnConnectEvent(socket);
+ return;
+ }
+
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ AsyncSocketAdapter::OnCloseEvent(socket, err);
+ }
+}
+
+void
+OpenSSLAdapter::OnReadEvent(AsyncSocket* socket) {
+ //LOG(LS_INFO) << "OpenSSLAdapter::OnReadEvent";
+
+ if (state_ == SSL_NONE) {
+ AsyncSocketAdapter::OnReadEvent(socket);
+ return;
+ }
+
+ if (state_ == SSL_CONNECTING) {
+ if (int err = ContinueSSL()) {
+ Error("ContinueSSL", err);
+ }
+ return;
+ }
+
+ if (state_ != SSL_CONNECTED)
+ return;
+
+ // Don't let ourselves go away during the callbacks
+ //PRefPtr<OpenSSLAdapter> lock(this); // TODO: fix this
+ if (ssl_write_needs_read_) {
+ //LOG(LS_INFO) << " -- onStreamWriteable";
+ AsyncSocketAdapter::OnWriteEvent(socket);
+ }
+
+ //LOG(LS_INFO) << " -- onStreamReadable";
+ AsyncSocketAdapter::OnReadEvent(socket);
+}
+
+void
+OpenSSLAdapter::OnWriteEvent(AsyncSocket* socket) {
+ //LOG(LS_INFO) << "OpenSSLAdapter::OnWriteEvent";
+
+ if (state_ == SSL_NONE) {
+ AsyncSocketAdapter::OnWriteEvent(socket);
+ return;
+ }
+
+ if (state_ == SSL_CONNECTING) {
+ if (int err = ContinueSSL()) {
+ Error("ContinueSSL", err);
+ }
+ return;
+ }
+
+ if (state_ != SSL_CONNECTED)
+ return;
+
+ // Don't let ourselves go away during the callbacks
+ //PRefPtr<OpenSSLAdapter> lock(this); // TODO: fix this
+
+ if (ssl_read_needs_write_) {
+ //LOG(LS_INFO) << " -- onStreamReadable";
+ AsyncSocketAdapter::OnReadEvent(socket);
+ }
+
+ //LOG(LS_INFO) << " -- onStreamWriteable";
+ AsyncSocketAdapter::OnWriteEvent(socket);
+}
+
+void
+OpenSSLAdapter::OnCloseEvent(AsyncSocket* socket, int err) {
+ LOG(LS_INFO) << "OpenSSLAdapter::OnCloseEvent(" << err << ")";
+ AsyncSocketAdapter::OnCloseEvent(socket, err);
+}
+
+// This code is taken from the "Network Security with OpenSSL"
+// sample in chapter 5
+
+bool OpenSSLAdapter::VerifyServerName(SSL* ssl, const char* host,
+ bool ignore_bad_cert) {
+ if (!host)
+ return false;
+
+ // Checking the return from SSL_get_peer_certificate here is not strictly
+ // necessary. With our setup, it is not possible for it to return
+ // NULL. However, it is good form to check the return.
+ X509* certificate = SSL_get_peer_certificate(ssl);
+ if (!certificate)
+ return false;
+
+ // Logging certificates is extremely verbose. So it is disabled by default.
+#ifdef LOG_CERTIFICATES
+ {
+ LOG(LS_INFO) << "Certificate from server:";
+ BIO* mem = BIO_new(BIO_s_mem());
+ X509_print_ex(mem, certificate, XN_FLAG_SEP_CPLUS_SPC, X509_FLAG_NO_HEADER);
+ BIO_write(mem, "\0", 1);
+ char* buffer;
+ BIO_get_mem_data(mem, &buffer);
+ LOG(LS_INFO) << buffer;
+ BIO_free(mem);
+
+ char* cipher_description =
+ SSL_CIPHER_description(SSL_get_current_cipher(ssl), NULL, 128);
+ LOG(LS_INFO) << "Cipher: " << cipher_description;
+ OPENSSL_free(cipher_description);
+ }
+#endif
+
+ bool ok = false;
+ int extension_count = X509_get_ext_count(certificate);
+ for (int i = 0; i < extension_count; ++i) {
+ X509_EXTENSION* extension = X509_get_ext(certificate, i);
+ int extension_nid = OBJ_obj2nid(X509_EXTENSION_get_object(extension));
+
+ if (extension_nid == NID_subject_alt_name) {
+ const X509V3_EXT_METHOD* meth = X509V3_EXT_get(extension);
+ if (!meth)
+ break;
+
+ void* ext_str = NULL;
+
+ // We assign this to a local variable, instead of passing the address
+ // directly to ASN1_item_d2i.
+ // See http://readlist.com/lists/openssl.org/openssl-users/0/4761.html.
+ unsigned char* ext_value_data = extension->value->data;
+
+ const unsigned char **ext_value_data_ptr =
+ (const_cast<const unsigned char **>(&ext_value_data));
+
+ if (meth->it) {
+ ext_str = ASN1_item_d2i(NULL, ext_value_data_ptr,
+ extension->value->length,
+ ASN1_ITEM_ptr(meth->it));
+ } else {
+ ext_str = meth->d2i(NULL, ext_value_data_ptr, extension->value->length);
+ }
+
+ STACK_OF(CONF_VALUE)* value = meth->i2v(meth, ext_str, NULL);
+ for (int j = 0; j < sk_CONF_VALUE_num(value); ++j) {
+ CONF_VALUE* nval = sk_CONF_VALUE_value(value, j);
+ // The value for nval can contain wildcards
+ if (!strcmp(nval->name, "DNS") && string_match(host, nval->value)) {
+ ok = true;
+ break;
+ }
+ }
+ sk_CONF_VALUE_pop_free(value, X509V3_conf_free);
+ value = NULL;
+
+ if (meth->it) {
+ ASN1_item_free(reinterpret_cast<ASN1_VALUE*>(ext_str),
+ ASN1_ITEM_ptr(meth->it));
+ } else {
+ meth->ext_free(ext_str);
+ }
+ ext_str = NULL;
+ }
+ if (ok)
+ break;
+ }
+
+ char data[256];
+ X509_name_st* subject;
+ if (!ok
+ && ((subject = X509_get_subject_name(certificate)) != NULL)
+ && (X509_NAME_get_text_by_NID(subject, NID_commonName,
+ data, sizeof(data)) > 0)) {
+ data[sizeof(data)-1] = 0;
+ if (_stricmp(data, host) == 0)
+ ok = true;
+ }
+
+ X509_free(certificate);
+
+ // This should only ever be turned on for debugging and development.
+ if (!ok && ignore_bad_cert) {
+ LOG(LS_WARNING) << "TLS certificate check FAILED. "
+ << "Allowing connection anyway.";
+ ok = true;
+ }
+
+ return ok;
+}
+
+bool OpenSSLAdapter::SSLPostConnectionCheck(SSL* ssl, const char* host) {
+ bool ok = VerifyServerName(ssl, host, ignore_bad_cert());
+
+ if (ok) {
+ ok = (SSL_get_verify_result(ssl) == X509_V_OK ||
+ custom_verification_succeeded_);
+ }
+
+ if (!ok && ignore_bad_cert()) {
+ LOG(LS_INFO) << "Other TLS post connection checks failed.";
+ ok = true;
+ }
+
+ return ok;
+}
+
+#if _DEBUG
+
+// We only use this for tracing and so it is only needed in debug mode
+
+void
+OpenSSLAdapter::SSLInfoCallback(const SSL* s, int where, int ret) {
+ const char* str = "undefined";
+ int w = where & ~SSL_ST_MASK;
+ if (w & SSL_ST_CONNECT) {
+ str = "SSL_connect";
+ } else if (w & SSL_ST_ACCEPT) {
+ str = "SSL_accept";
+ }
+ if (where & SSL_CB_LOOP) {
+ LOG(LS_INFO) << str << ":" << SSL_state_string_long(s);
+ } else if (where & SSL_CB_ALERT) {
+ str = (where & SSL_CB_READ) ? "read" : "write";
+ LOG(LS_INFO) << "SSL3 alert " << str
+ << ":" << SSL_alert_type_string_long(ret)
+ << ":" << SSL_alert_desc_string_long(ret);
+ } else if (where & SSL_CB_EXIT) {
+ if (ret == 0) {
+ LOG(LS_INFO) << str << ":failed in " << SSL_state_string_long(s);
+ } else if (ret < 0) {
+ LOG(LS_INFO) << str << ":error in " << SSL_state_string_long(s);
+ }
+ }
+}
+
+#endif // _DEBUG
+
+int
+OpenSSLAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) {
+#if _DEBUG
+ if (!ok) {
+ char data[256];
+ X509* cert = X509_STORE_CTX_get_current_cert(store);
+ int depth = X509_STORE_CTX_get_error_depth(store);
+ int err = X509_STORE_CTX_get_error(store);
+
+ LOG(LS_INFO) << "Error with certificate at depth: " << depth;
+ X509_NAME_oneline(X509_get_issuer_name(cert), data, sizeof(data));
+ LOG(LS_INFO) << " issuer = " << data;
+ X509_NAME_oneline(X509_get_subject_name(cert), data, sizeof(data));
+ LOG(LS_INFO) << " subject = " << data;
+ LOG(LS_INFO) << " err = " << err
+ << ":" << X509_verify_cert_error_string(err);
+ }
+#endif
+
+ // Get our stream pointer from the store
+ SSL* ssl = reinterpret_cast<SSL*>(
+ X509_STORE_CTX_get_ex_data(store,
+ SSL_get_ex_data_X509_STORE_CTX_idx()));
+
+ OpenSSLAdapter* stream =
+ reinterpret_cast<OpenSSLAdapter*>(SSL_get_app_data(ssl));
+
+ if (!ok && custom_verify_callback_) {
+ void* cert =
+ reinterpret_cast<void*>(X509_STORE_CTX_get_current_cert(store));
+ if (custom_verify_callback_(cert)) {
+ stream->custom_verification_succeeded_ = true;
+ LOG(LS_INFO) << "validated certificate using custom callback";
+ ok = true;
+ }
+ }
+
+ // Should only be used for debugging and development.
+ if (!ok && stream->ignore_bad_cert()) {
+ LOG(LS_WARNING) << "Ignoring cert error while verifying cert chain";
+ ok = 1;
+ }
+
+ return ok;
+}
+
+bool OpenSSLAdapter::ConfigureTrustedRootCertificates(SSL_CTX* ctx) {
+ // Add the root cert that we care about to the SSL context
+ int count_of_added_certs = 0;
+ for (int i = 0; i < ARRAY_SIZE(kSSLCertCertificateList); i++) {
+ const unsigned char* cert_buffer = kSSLCertCertificateList[i];
+ size_t cert_buffer_len = kSSLCertCertificateSizeList[i];
+ X509* cert = d2i_X509(NULL, &cert_buffer, cert_buffer_len);
+ if (cert) {
+ int return_value = X509_STORE_add_cert(SSL_CTX_get_cert_store(ctx), cert);
+ if (return_value == 0) {
+ LOG(LS_WARNING) << "Unable to add certificate.";
+ } else {
+ count_of_added_certs++;
+ }
+ X509_free(cert);
+ }
+ }
+ return count_of_added_certs > 0;
+}
+
+SSL_CTX*
+OpenSSLAdapter::SetupSSLContext() {
+ SSL_CTX* ctx = SSL_CTX_new(TLSv1_client_method());
+ if (ctx == NULL) {
+ unsigned long error = ERR_get_error(); // NOLINT: type used by OpenSSL.
+ LOG(LS_WARNING) << "SSL_CTX creation failed: "
+ << '"' << ERR_reason_error_string(error) << "\" "
+ << "(error=" << error << ')';
+ return NULL;
+ }
+ if (!ConfigureTrustedRootCertificates(ctx)) {
+ SSL_CTX_free(ctx);
+ return NULL;
+ }
+
+#ifdef _DEBUG
+ SSL_CTX_set_info_callback(ctx, SSLInfoCallback);
+#endif
+
+ SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER, SSLVerifyCallback);
+ SSL_CTX_set_verify_depth(ctx, 4);
+ SSL_CTX_set_cipher_list(ctx, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
+
+ return ctx;
+}
+
+} // namespace rtc
+
+#endif // HAVE_OPENSSL_SSL_H
diff --git a/chromium/third_party/webrtc/base/openssladapter.h b/chromium/third_party/webrtc/base/openssladapter.h
new file mode 100644
index 00000000000..d244a7f5c81
--- /dev/null
+++ b/chromium/third_party/webrtc/base/openssladapter.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPENSSLADAPTER_H__
+#define WEBRTC_BASE_OPENSSLADAPTER_H__
+
+#include <string>
+#include "webrtc/base/ssladapter.h"
+
+typedef struct ssl_st SSL;
+typedef struct ssl_ctx_st SSL_CTX;
+typedef struct x509_store_ctx_st X509_STORE_CTX;
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+class OpenSSLAdapter : public SSLAdapter {
+public:
+ static bool InitializeSSL(VerificationCallback callback);
+ static bool InitializeSSLThread();
+ static bool CleanupSSL();
+
+ OpenSSLAdapter(AsyncSocket* socket);
+ virtual ~OpenSSLAdapter();
+
+ virtual int StartSSL(const char* hostname, bool restartable);
+ virtual int Send(const void* pv, size_t cb);
+ virtual int Recv(void* pv, size_t cb);
+ virtual int Close();
+
+ // Note that the socket returns ST_CONNECTING while SSL is being negotiated.
+ virtual ConnState GetState() const;
+
+protected:
+ virtual void OnConnectEvent(AsyncSocket* socket);
+ virtual void OnReadEvent(AsyncSocket* socket);
+ virtual void OnWriteEvent(AsyncSocket* socket);
+ virtual void OnCloseEvent(AsyncSocket* socket, int err);
+
+private:
+ enum SSLState {
+ SSL_NONE, SSL_WAIT, SSL_CONNECTING, SSL_CONNECTED, SSL_ERROR
+ };
+
+ int BeginSSL();
+ int ContinueSSL();
+ void Error(const char* context, int err, bool signal = true);
+ void Cleanup();
+
+ static bool VerifyServerName(SSL* ssl, const char* host,
+ bool ignore_bad_cert);
+ bool SSLPostConnectionCheck(SSL* ssl, const char* host);
+#if _DEBUG
+ static void SSLInfoCallback(const SSL* s, int where, int ret);
+#endif // !_DEBUG
+ static int SSLVerifyCallback(int ok, X509_STORE_CTX* store);
+ static VerificationCallback custom_verify_callback_;
+ friend class OpenSSLStreamAdapter; // for custom_verify_callback_;
+
+ static bool ConfigureTrustedRootCertificates(SSL_CTX* ctx);
+ static SSL_CTX* SetupSSLContext();
+
+ SSLState state_;
+ bool ssl_read_needs_write_;
+ bool ssl_write_needs_read_;
+ // If true, socket will retain SSL configuration after Close.
+ bool restartable_;
+
+ SSL* ssl_;
+ SSL_CTX* ssl_ctx_;
+ std::string ssl_host_name_;
+
+ bool custom_verification_succeeded_;
+};
+
+/////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPENSSLADAPTER_H__
diff --git a/chromium/third_party/webrtc/base/openssldigest.cc b/chromium/third_party/webrtc/base/openssldigest.cc
new file mode 100644
index 00000000000..0d22f4329c5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/openssldigest.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_OPENSSL_SSL_H
+
+#include "webrtc/base/openssldigest.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/openssl.h"
+
+namespace rtc {
+
+OpenSSLDigest::OpenSSLDigest(const std::string& algorithm) {
+ EVP_MD_CTX_init(&ctx_);
+ if (GetDigestEVP(algorithm, &md_)) {
+ EVP_DigestInit_ex(&ctx_, md_, NULL);
+ } else {
+ md_ = NULL;
+ }
+}
+
+OpenSSLDigest::~OpenSSLDigest() {
+ EVP_MD_CTX_cleanup(&ctx_);
+}
+
+size_t OpenSSLDigest::Size() const {
+ if (!md_) {
+ return 0;
+ }
+ return EVP_MD_size(md_);
+}
+
+void OpenSSLDigest::Update(const void* buf, size_t len) {
+ if (!md_) {
+ return;
+ }
+ EVP_DigestUpdate(&ctx_, buf, len);
+}
+
+size_t OpenSSLDigest::Finish(void* buf, size_t len) {
+ if (!md_ || len < Size()) {
+ return 0;
+ }
+ unsigned int md_len;
+ EVP_DigestFinal_ex(&ctx_, static_cast<unsigned char*>(buf), &md_len);
+ EVP_DigestInit_ex(&ctx_, md_, NULL); // prepare for future Update()s
+ ASSERT(md_len == Size());
+ return md_len;
+}
+
+bool OpenSSLDigest::GetDigestEVP(const std::string& algorithm,
+ const EVP_MD** mdp) {
+ const EVP_MD* md;
+ if (algorithm == DIGEST_MD5) {
+ md = EVP_md5();
+ } else if (algorithm == DIGEST_SHA_1) {
+ md = EVP_sha1();
+ } else if (algorithm == DIGEST_SHA_224) {
+ md = EVP_sha224();
+ } else if (algorithm == DIGEST_SHA_256) {
+ md = EVP_sha256();
+ } else if (algorithm == DIGEST_SHA_384) {
+ md = EVP_sha384();
+ } else if (algorithm == DIGEST_SHA_512) {
+ md = EVP_sha512();
+ } else {
+ return false;
+ }
+
+ // Can't happen
+ ASSERT(EVP_MD_size(md) >= 16);
+ *mdp = md;
+ return true;
+}
+
+bool OpenSSLDigest::GetDigestName(const EVP_MD* md,
+ std::string* algorithm) {
+ ASSERT(md != NULL);
+ ASSERT(algorithm != NULL);
+
+ int md_type = EVP_MD_type(md);
+ if (md_type == NID_md5) {
+ *algorithm = DIGEST_MD5;
+ } else if (md_type == NID_sha1) {
+ *algorithm = DIGEST_SHA_1;
+ } else if (md_type == NID_sha224) {
+ *algorithm = DIGEST_SHA_224;
+ } else if (md_type == NID_sha256) {
+ *algorithm = DIGEST_SHA_256;
+ } else if (md_type == NID_sha384) {
+ *algorithm = DIGEST_SHA_384;
+ } else if (md_type == NID_sha512) {
+ *algorithm = DIGEST_SHA_512;
+ } else {
+ algorithm->clear();
+ return false;
+ }
+
+ return true;
+}
+
+bool OpenSSLDigest::GetDigestSize(const std::string& algorithm,
+ size_t* length) {
+ const EVP_MD *md;
+ if (!GetDigestEVP(algorithm, &md))
+ return false;
+
+ *length = EVP_MD_size(md);
+ return true;
+}
+
+} // namespace rtc
+
+#endif // HAVE_OPENSSL_SSL_H
+
diff --git a/chromium/third_party/webrtc/base/openssldigest.h b/chromium/third_party/webrtc/base/openssldigest.h
new file mode 100644
index 00000000000..c4b0d8aed54
--- /dev/null
+++ b/chromium/third_party/webrtc/base/openssldigest.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPENSSLDIGEST_H_
+#define WEBRTC_BASE_OPENSSLDIGEST_H_
+
+#include <openssl/evp.h>
+
+#include "webrtc/base/messagedigest.h"
+
+namespace rtc {
+
+// An implementation of the digest class that uses OpenSSL.
+class OpenSSLDigest : public MessageDigest {
+ public:
+ // Creates an OpenSSLDigest with |algorithm| as the hash algorithm.
+ explicit OpenSSLDigest(const std::string& algorithm);
+ ~OpenSSLDigest();
+ // Returns the digest output size (e.g. 16 bytes for MD5).
+ virtual size_t Size() const;
+ // Updates the digest with |len| bytes from |buf|.
+ virtual void Update(const void* buf, size_t len);
+ // Outputs the digest value to |buf| with length |len|.
+ virtual size_t Finish(void* buf, size_t len);
+
+ // Helper function to look up a digest's EVP by name.
+ static bool GetDigestEVP(const std::string &algorithm,
+ const EVP_MD** md);
+ // Helper function to look up a digest's name by EVP.
+ static bool GetDigestName(const EVP_MD* md,
+ std::string* algorithm);
+ // Helper function to get the length of a digest.
+ static bool GetDigestSize(const std::string &algorithm,
+ size_t* len);
+
+ private:
+ EVP_MD_CTX ctx_;
+ const EVP_MD* md_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPENSSLDIGEST_H_
diff --git a/chromium/third_party/webrtc/base/opensslidentity.cc b/chromium/third_party/webrtc/base/opensslidentity.cc
new file mode 100644
index 00000000000..915680ce28e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/opensslidentity.cc
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_OPENSSL_SSL_H
+
+#include "webrtc/base/opensslidentity.h"
+
+// Must be included first before openssl headers.
+#include "webrtc/base/win32.h" // NOLINT
+
+#include <openssl/bio.h>
+#include <openssl/err.h>
+#include <openssl/pem.h>
+#include <openssl/bn.h>
+#include <openssl/rsa.h>
+#include <openssl/crypto.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/openssl.h"
+#include "webrtc/base/openssldigest.h"
+
+namespace rtc {
+
+// We could have exposed a myriad of parameters for the crypto stuff,
+// but keeping it simple seems best.
+
+// Strength of generated keys. Those are RSA.
+static const int KEY_LENGTH = 1024;
+
+// Random bits for certificate serial number
+static const int SERIAL_RAND_BITS = 64;
+
+// Certificate validity lifetime
+static const int CERTIFICATE_LIFETIME = 60*60*24*30; // 30 days, arbitrarily
+// Certificate validity window.
+// This is to compensate for slightly incorrect system clocks.
+static const int CERTIFICATE_WINDOW = -60*60*24;
+
+// Generate a key pair. Caller is responsible for freeing the returned object.
+static EVP_PKEY* MakeKey() {
+ LOG(LS_INFO) << "Making key pair";
+ EVP_PKEY* pkey = EVP_PKEY_new();
+ // RSA_generate_key is deprecated. Use _ex version.
+ BIGNUM* exponent = BN_new();
+ RSA* rsa = RSA_new();
+ if (!pkey || !exponent || !rsa ||
+ !BN_set_word(exponent, 0x10001) || // 65537 RSA exponent
+ !RSA_generate_key_ex(rsa, KEY_LENGTH, exponent, NULL) ||
+ !EVP_PKEY_assign_RSA(pkey, rsa)) {
+ EVP_PKEY_free(pkey);
+ BN_free(exponent);
+ RSA_free(rsa);
+ return NULL;
+ }
+ // ownership of rsa struct was assigned, don't free it.
+ BN_free(exponent);
+ LOG(LS_INFO) << "Returning key pair";
+ return pkey;
+}
+
+// Generate a self-signed certificate, with the public key from the
+// given key pair. Caller is responsible for freeing the returned object.
+static X509* MakeCertificate(EVP_PKEY* pkey, const SSLIdentityParams& params) {
+ LOG(LS_INFO) << "Making certificate for " << params.common_name;
+ X509* x509 = NULL;
+ BIGNUM* serial_number = NULL;
+ X509_NAME* name = NULL;
+
+ if ((x509=X509_new()) == NULL)
+ goto error;
+
+ if (!X509_set_pubkey(x509, pkey))
+ goto error;
+
+ // serial number
+ // temporary reference to serial number inside x509 struct
+ ASN1_INTEGER* asn1_serial_number;
+ if ((serial_number = BN_new()) == NULL ||
+ !BN_pseudo_rand(serial_number, SERIAL_RAND_BITS, 0, 0) ||
+ (asn1_serial_number = X509_get_serialNumber(x509)) == NULL ||
+ !BN_to_ASN1_INTEGER(serial_number, asn1_serial_number))
+ goto error;
+
+ if (!X509_set_version(x509, 0L)) // version 1
+ goto error;
+
+ // There are a lot of possible components for the name entries. In
+ // our P2P SSL mode however, the certificates are pre-exchanged
+ // (through the secure XMPP channel), and so the certificate
+ // identification is arbitrary. It can't be empty, so we set some
+ // arbitrary common_name. Note that this certificate goes out in
+ // clear during SSL negotiation, so there may be a privacy issue in
+ // putting anything recognizable here.
+ if ((name = X509_NAME_new()) == NULL ||
+ !X509_NAME_add_entry_by_NID(
+ name, NID_commonName, MBSTRING_UTF8,
+ (unsigned char*)params.common_name.c_str(), -1, -1, 0) ||
+ !X509_set_subject_name(x509, name) ||
+ !X509_set_issuer_name(x509, name))
+ goto error;
+
+ if (!X509_gmtime_adj(X509_get_notBefore(x509), params.not_before) ||
+ !X509_gmtime_adj(X509_get_notAfter(x509), params.not_after))
+ goto error;
+
+ if (!X509_sign(x509, pkey, EVP_sha1()))
+ goto error;
+
+ BN_free(serial_number);
+ X509_NAME_free(name);
+ LOG(LS_INFO) << "Returning certificate";
+ return x509;
+
+ error:
+ BN_free(serial_number);
+ X509_NAME_free(name);
+ X509_free(x509);
+ return NULL;
+}
+
+// This dumps the SSL error stack to the log.
+static void LogSSLErrors(const std::string& prefix) {
+ char error_buf[200];
+ unsigned long err;
+
+ while ((err = ERR_get_error()) != 0) {
+ ERR_error_string_n(err, error_buf, sizeof(error_buf));
+ LOG(LS_ERROR) << prefix << ": " << error_buf << "\n";
+ }
+}
+
+OpenSSLKeyPair* OpenSSLKeyPair::Generate() {
+ EVP_PKEY* pkey = MakeKey();
+ if (!pkey) {
+ LogSSLErrors("Generating key pair");
+ return NULL;
+ }
+ return new OpenSSLKeyPair(pkey);
+}
+
+OpenSSLKeyPair::~OpenSSLKeyPair() {
+ EVP_PKEY_free(pkey_);
+}
+
+void OpenSSLKeyPair::AddReference() {
+ CRYPTO_add(&pkey_->references, 1, CRYPTO_LOCK_EVP_PKEY);
+}
+
+#ifdef _DEBUG
+// Print a certificate to the log, for debugging.
+static void PrintCert(X509* x509) {
+ BIO* temp_memory_bio = BIO_new(BIO_s_mem());
+ if (!temp_memory_bio) {
+ LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio";
+ return;
+ }
+ X509_print_ex(temp_memory_bio, x509, XN_FLAG_SEP_CPLUS_SPC, 0);
+ BIO_write(temp_memory_bio, "\0", 1);
+ char* buffer;
+ BIO_get_mem_data(temp_memory_bio, &buffer);
+ LOG(LS_VERBOSE) << buffer;
+ BIO_free(temp_memory_bio);
+}
+#endif
+
+OpenSSLCertificate* OpenSSLCertificate::Generate(
+ OpenSSLKeyPair* key_pair, const SSLIdentityParams& params) {
+ SSLIdentityParams actual_params(params);
+ if (actual_params.common_name.empty()) {
+ // Use a random string, arbitrarily 8chars long.
+ actual_params.common_name = CreateRandomString(8);
+ }
+ X509* x509 = MakeCertificate(key_pair->pkey(), actual_params);
+ if (!x509) {
+ LogSSLErrors("Generating certificate");
+ return NULL;
+ }
+#ifdef _DEBUG
+ PrintCert(x509);
+#endif
+ OpenSSLCertificate* ret = new OpenSSLCertificate(x509);
+ X509_free(x509);
+ return ret;
+}
+
+OpenSSLCertificate* OpenSSLCertificate::FromPEMString(
+ const std::string& pem_string) {
+ BIO* bio = BIO_new_mem_buf(const_cast<char*>(pem_string.c_str()), -1);
+ if (!bio)
+ return NULL;
+ BIO_set_mem_eof_return(bio, 0);
+ X509 *x509 = PEM_read_bio_X509(bio, NULL, NULL,
+ const_cast<char*>("\0"));
+ BIO_free(bio); // Frees the BIO, but not the pointed-to string.
+
+ if (!x509)
+ return NULL;
+
+ OpenSSLCertificate* ret = new OpenSSLCertificate(x509);
+ X509_free(x509);
+ return ret;
+}
+
+// NOTE: This implementation only functions correctly after InitializeSSL
+// and before CleanupSSL.
+bool OpenSSLCertificate::GetSignatureDigestAlgorithm(
+ std::string* algorithm) const {
+ return OpenSSLDigest::GetDigestName(
+ EVP_get_digestbyobj(x509_->sig_alg->algorithm), algorithm);
+}
+
+bool OpenSSLCertificate::ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const {
+ return ComputeDigest(x509_, algorithm, digest, size, length);
+}
+
+bool OpenSSLCertificate::ComputeDigest(const X509* x509,
+ const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) {
+ const EVP_MD *md;
+ unsigned int n;
+
+ if (!OpenSSLDigest::GetDigestEVP(algorithm, &md))
+ return false;
+
+ if (size < static_cast<size_t>(EVP_MD_size(md)))
+ return false;
+
+ X509_digest(x509, md, digest, &n);
+
+ *length = n;
+
+ return true;
+}
+
+OpenSSLCertificate::~OpenSSLCertificate() {
+ X509_free(x509_);
+}
+
+std::string OpenSSLCertificate::ToPEMString() const {
+ BIO* bio = BIO_new(BIO_s_mem());
+ if (!bio) {
+ UNREACHABLE();
+ return std::string();
+ }
+ if (!PEM_write_bio_X509(bio, x509_)) {
+ BIO_free(bio);
+ UNREACHABLE();
+ return std::string();
+ }
+ BIO_write(bio, "\0", 1);
+ char* buffer;
+ BIO_get_mem_data(bio, &buffer);
+ std::string ret(buffer);
+ BIO_free(bio);
+ return ret;
+}
+
+void OpenSSLCertificate::ToDER(Buffer* der_buffer) const {
+ // In case of failure, make sure to leave the buffer empty.
+ der_buffer->SetData(NULL, 0);
+
+ // Calculates the DER representation of the certificate, from scratch.
+ BIO* bio = BIO_new(BIO_s_mem());
+ if (!bio) {
+ UNREACHABLE();
+ return;
+ }
+ if (!i2d_X509_bio(bio, x509_)) {
+ BIO_free(bio);
+ UNREACHABLE();
+ return;
+ }
+ char* data;
+ size_t length = BIO_get_mem_data(bio, &data);
+ der_buffer->SetData(data, length);
+ BIO_free(bio);
+}
+
+void OpenSSLCertificate::AddReference() const {
+ ASSERT(x509_ != NULL);
+ CRYPTO_add(&x509_->references, 1, CRYPTO_LOCK_X509);
+}
+
+OpenSSLIdentity* OpenSSLIdentity::GenerateInternal(
+ const SSLIdentityParams& params) {
+ OpenSSLKeyPair *key_pair = OpenSSLKeyPair::Generate();
+ if (key_pair) {
+ OpenSSLCertificate *certificate = OpenSSLCertificate::Generate(
+ key_pair, params);
+ if (certificate)
+ return new OpenSSLIdentity(key_pair, certificate);
+ delete key_pair;
+ }
+ LOG(LS_INFO) << "Identity generation failed";
+ return NULL;
+}
+
+OpenSSLIdentity* OpenSSLIdentity::Generate(const std::string& common_name) {
+ SSLIdentityParams params;
+ params.common_name = common_name;
+ params.not_before = CERTIFICATE_WINDOW;
+ params.not_after = CERTIFICATE_LIFETIME;
+ return GenerateInternal(params);
+}
+
+OpenSSLIdentity* OpenSSLIdentity::GenerateForTest(
+ const SSLIdentityParams& params) {
+ return GenerateInternal(params);
+}
+
+SSLIdentity* OpenSSLIdentity::FromPEMStrings(
+ const std::string& private_key,
+ const std::string& certificate) {
+ scoped_ptr<OpenSSLCertificate> cert(
+ OpenSSLCertificate::FromPEMString(certificate));
+ if (!cert) {
+ LOG(LS_ERROR) << "Failed to create OpenSSLCertificate from PEM string.";
+ return NULL;
+ }
+
+ BIO* bio = BIO_new_mem_buf(const_cast<char*>(private_key.c_str()), -1);
+ if (!bio) {
+ LOG(LS_ERROR) << "Failed to create a new BIO buffer.";
+ return NULL;
+ }
+ BIO_set_mem_eof_return(bio, 0);
+ EVP_PKEY *pkey = PEM_read_bio_PrivateKey(bio, NULL, NULL,
+ const_cast<char*>("\0"));
+ BIO_free(bio); // Frees the BIO, but not the pointed-to string.
+
+ if (!pkey) {
+ LOG(LS_ERROR) << "Failed to create the private key from PEM string.";
+ return NULL;
+ }
+
+ return new OpenSSLIdentity(new OpenSSLKeyPair(pkey),
+ cert.release());
+}
+
+bool OpenSSLIdentity::ConfigureIdentity(SSL_CTX* ctx) {
+ // 1 is the documented success return code.
+ if (SSL_CTX_use_certificate(ctx, certificate_->x509()) != 1 ||
+ SSL_CTX_use_PrivateKey(ctx, key_pair_->pkey()) != 1) {
+ LogSSLErrors("Configuring key and certificate");
+ return false;
+ }
+ return true;
+}
+
+} // namespace rtc
+
+#endif // HAVE_OPENSSL_SSL_H
diff --git a/chromium/third_party/webrtc/base/opensslidentity.h b/chromium/third_party/webrtc/base/opensslidentity.h
new file mode 100644
index 00000000000..e52cd10a9e6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/opensslidentity.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPENSSLIDENTITY_H_
+#define WEBRTC_BASE_OPENSSLIDENTITY_H_
+
+#include <openssl/evp.h>
+#include <openssl/x509.h>
+
+#include <string>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sslidentity.h"
+
+typedef struct ssl_ctx_st SSL_CTX;
+
+namespace rtc {
+
+// OpenSSLKeyPair encapsulates an OpenSSL EVP_PKEY* keypair object,
+// which is reference counted inside the OpenSSL library.
+class OpenSSLKeyPair {
+ public:
+ explicit OpenSSLKeyPair(EVP_PKEY* pkey) : pkey_(pkey) {
+ ASSERT(pkey_ != NULL);
+ }
+
+ static OpenSSLKeyPair* Generate();
+
+ virtual ~OpenSSLKeyPair();
+
+ virtual OpenSSLKeyPair* GetReference() {
+ AddReference();
+ return new OpenSSLKeyPair(pkey_);
+ }
+
+ EVP_PKEY* pkey() const { return pkey_; }
+
+ private:
+ void AddReference();
+
+ EVP_PKEY* pkey_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(OpenSSLKeyPair);
+};
+
+// OpenSSLCertificate encapsulates an OpenSSL X509* certificate object,
+// which is also reference counted inside the OpenSSL library.
+class OpenSSLCertificate : public SSLCertificate {
+ public:
+ // Caller retains ownership of the X509 object.
+ explicit OpenSSLCertificate(X509* x509) : x509_(x509) {
+ AddReference();
+ }
+
+ static OpenSSLCertificate* Generate(OpenSSLKeyPair* key_pair,
+ const SSLIdentityParams& params);
+ static OpenSSLCertificate* FromPEMString(const std::string& pem_string);
+
+ virtual ~OpenSSLCertificate();
+
+ virtual OpenSSLCertificate* GetReference() const {
+ return new OpenSSLCertificate(x509_);
+ }
+
+ X509* x509() const { return x509_; }
+
+ virtual std::string ToPEMString() const;
+
+ virtual void ToDER(Buffer* der_buffer) const;
+
+ // Compute the digest of the certificate given algorithm
+ virtual bool ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const;
+
+ // Compute the digest of a certificate as an X509 *
+ static bool ComputeDigest(const X509* x509,
+ const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length);
+
+ virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const;
+
+ virtual bool GetChain(SSLCertChain** chain) const {
+ // Chains are not yet supported when using OpenSSL.
+ // OpenSSLStreamAdapter::SSLVerifyCallback currently requires the remote
+ // certificate to be self-signed.
+ return false;
+ }
+
+ private:
+ void AddReference() const;
+
+ X509* x509_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(OpenSSLCertificate);
+};
+
+// Holds a keypair and certificate together, and a method to generate
+// them consistently.
+class OpenSSLIdentity : public SSLIdentity {
+ public:
+ static OpenSSLIdentity* Generate(const std::string& common_name);
+ static OpenSSLIdentity* GenerateForTest(const SSLIdentityParams& params);
+ static SSLIdentity* FromPEMStrings(const std::string& private_key,
+ const std::string& certificate);
+ virtual ~OpenSSLIdentity() { }
+
+ virtual const OpenSSLCertificate& certificate() const {
+ return *certificate_;
+ }
+
+ virtual OpenSSLIdentity* GetReference() const {
+ return new OpenSSLIdentity(key_pair_->GetReference(),
+ certificate_->GetReference());
+ }
+
+ // Configure an SSL context object to use our key and certificate.
+ bool ConfigureIdentity(SSL_CTX* ctx);
+
+ private:
+ OpenSSLIdentity(OpenSSLKeyPair* key_pair,
+ OpenSSLCertificate* certificate)
+ : key_pair_(key_pair), certificate_(certificate) {
+ ASSERT(key_pair != NULL);
+ ASSERT(certificate != NULL);
+ }
+
+ static OpenSSLIdentity* GenerateInternal(const SSLIdentityParams& params);
+
+ scoped_ptr<OpenSSLKeyPair> key_pair_;
+ scoped_ptr<OpenSSLCertificate> certificate_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(OpenSSLIdentity);
+};
+
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPENSSLIDENTITY_H_
diff --git a/chromium/third_party/webrtc/base/opensslstreamadapter.cc b/chromium/third_party/webrtc/base/opensslstreamadapter.cc
new file mode 100644
index 00000000000..5eaeb1b5f63
--- /dev/null
+++ b/chromium/third_party/webrtc/base/opensslstreamadapter.cc
@@ -0,0 +1,857 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#if HAVE_OPENSSL_SSL_H
+
+#include "webrtc/base/opensslstreamadapter.h"
+
+#include <openssl/bio.h>
+#include <openssl/crypto.h>
+#include <openssl/err.h>
+#include <openssl/rand.h>
+#include <openssl/x509v3.h>
+
+#include <vector>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/openssl.h"
+#include "webrtc/base/openssladapter.h"
+#include "webrtc/base/openssldigest.h"
+#include "webrtc/base/opensslidentity.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+#if (OPENSSL_VERSION_NUMBER >= 0x10001000L)
+#define HAVE_DTLS_SRTP
+#endif
+
+#ifdef HAVE_DTLS_SRTP
+// SRTP cipher suite table
+struct SrtpCipherMapEntry {
+ const char* external_name;
+ const char* internal_name;
+};
+
+// This isn't elegant, but it's better than an external reference
+static SrtpCipherMapEntry SrtpCipherMap[] = {
+ {"AES_CM_128_HMAC_SHA1_80", "SRTP_AES128_CM_SHA1_80"},
+ {"AES_CM_128_HMAC_SHA1_32", "SRTP_AES128_CM_SHA1_32"},
+ {NULL, NULL}
+};
+#endif
+
+//////////////////////////////////////////////////////////////////////
+// StreamBIO
+//////////////////////////////////////////////////////////////////////
+
+static int stream_write(BIO* h, const char* buf, int num);
+static int stream_read(BIO* h, char* buf, int size);
+static int stream_puts(BIO* h, const char* str);
+static long stream_ctrl(BIO* h, int cmd, long arg1, void* arg2);
+static int stream_new(BIO* h);
+static int stream_free(BIO* data);
+
+static BIO_METHOD methods_stream = {
+ BIO_TYPE_BIO,
+ "stream",
+ stream_write,
+ stream_read,
+ stream_puts,
+ 0,
+ stream_ctrl,
+ stream_new,
+ stream_free,
+ NULL,
+};
+
+static BIO_METHOD* BIO_s_stream() { return(&methods_stream); }
+
+static BIO* BIO_new_stream(StreamInterface* stream) {
+ BIO* ret = BIO_new(BIO_s_stream());
+ if (ret == NULL)
+ return NULL;
+ ret->ptr = stream;
+ return ret;
+}
+
+// bio methods return 1 (or at least non-zero) on success and 0 on failure.
+
+static int stream_new(BIO* b) {
+ b->shutdown = 0;
+ b->init = 1;
+ b->num = 0; // 1 means end-of-stream
+ b->ptr = 0;
+ return 1;
+}
+
+static int stream_free(BIO* b) {
+ if (b == NULL)
+ return 0;
+ return 1;
+}
+
+static int stream_read(BIO* b, char* out, int outl) {
+ if (!out)
+ return -1;
+ StreamInterface* stream = static_cast<StreamInterface*>(b->ptr);
+ BIO_clear_retry_flags(b);
+ size_t read;
+ int error;
+ StreamResult result = stream->Read(out, outl, &read, &error);
+ if (result == SR_SUCCESS) {
+ return read;
+ } else if (result == SR_EOS) {
+ b->num = 1;
+ } else if (result == SR_BLOCK) {
+ BIO_set_retry_read(b);
+ }
+ return -1;
+}
+
+static int stream_write(BIO* b, const char* in, int inl) {
+ if (!in)
+ return -1;
+ StreamInterface* stream = static_cast<StreamInterface*>(b->ptr);
+ BIO_clear_retry_flags(b);
+ size_t written;
+ int error;
+ StreamResult result = stream->Write(in, inl, &written, &error);
+ if (result == SR_SUCCESS) {
+ return written;
+ } else if (result == SR_BLOCK) {
+ BIO_set_retry_write(b);
+ }
+ return -1;
+}
+
+static int stream_puts(BIO* b, const char* str) {
+ return stream_write(b, str, strlen(str));
+}
+
+static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) {
+ RTC_UNUSED(num);
+ RTC_UNUSED(ptr);
+
+ switch (cmd) {
+ case BIO_CTRL_RESET:
+ return 0;
+ case BIO_CTRL_EOF:
+ return b->num;
+ case BIO_CTRL_WPENDING:
+ case BIO_CTRL_PENDING:
+ return 0;
+ case BIO_CTRL_FLUSH:
+ return 1;
+ default:
+ return 0;
+ }
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// OpenSSLStreamAdapter
+/////////////////////////////////////////////////////////////////////////////
+
+OpenSSLStreamAdapter::OpenSSLStreamAdapter(StreamInterface* stream)
+ : SSLStreamAdapter(stream),
+ state_(SSL_NONE),
+ role_(SSL_CLIENT),
+ ssl_read_needs_write_(false), ssl_write_needs_read_(false),
+ ssl_(NULL), ssl_ctx_(NULL),
+ custom_verification_succeeded_(false),
+ ssl_mode_(SSL_MODE_TLS) {
+}
+
+OpenSSLStreamAdapter::~OpenSSLStreamAdapter() {
+ Cleanup();
+}
+
+void OpenSSLStreamAdapter::SetIdentity(SSLIdentity* identity) {
+ ASSERT(!identity_);
+ identity_.reset(static_cast<OpenSSLIdentity*>(identity));
+}
+
+void OpenSSLStreamAdapter::SetServerRole(SSLRole role) {
+ role_ = role;
+}
+
+bool OpenSSLStreamAdapter::GetPeerCertificate(SSLCertificate** cert) const {
+ if (!peer_certificate_)
+ return false;
+
+ *cert = peer_certificate_->GetReference();
+ return true;
+}
+
+bool OpenSSLStreamAdapter::SetPeerCertificateDigest(const std::string
+ &digest_alg,
+ const unsigned char*
+ digest_val,
+ size_t digest_len) {
+ ASSERT(!peer_certificate_);
+ ASSERT(peer_certificate_digest_algorithm_.size() == 0);
+ ASSERT(ssl_server_name_.empty());
+ size_t expected_len;
+
+ if (!OpenSSLDigest::GetDigestSize(digest_alg, &expected_len)) {
+ LOG(LS_WARNING) << "Unknown digest algorithm: " << digest_alg;
+ return false;
+ }
+ if (expected_len != digest_len)
+ return false;
+
+ peer_certificate_digest_value_.SetData(digest_val, digest_len);
+ peer_certificate_digest_algorithm_ = digest_alg;
+
+ return true;
+}
+
+// Key Extractor interface
+bool OpenSSLStreamAdapter::ExportKeyingMaterial(const std::string& label,
+ const uint8* context,
+ size_t context_len,
+ bool use_context,
+ uint8* result,
+ size_t result_len) {
+#ifdef HAVE_DTLS_SRTP
+ int i;
+
+ i = SSL_export_keying_material(ssl_, result, result_len,
+ label.c_str(), label.length(),
+ const_cast<uint8 *>(context),
+ context_len, use_context);
+
+ if (i != 1)
+ return false;
+
+ return true;
+#else
+ return false;
+#endif
+}
+
+bool OpenSSLStreamAdapter::SetDtlsSrtpCiphers(
+ const std::vector<std::string>& ciphers) {
+#ifdef HAVE_DTLS_SRTP
+ std::string internal_ciphers;
+
+ if (state_ != SSL_NONE)
+ return false;
+
+ for (std::vector<std::string>::const_iterator cipher = ciphers.begin();
+ cipher != ciphers.end(); ++cipher) {
+ bool found = false;
+ for (SrtpCipherMapEntry *entry = SrtpCipherMap; entry->internal_name;
+ ++entry) {
+ if (*cipher == entry->external_name) {
+ found = true;
+ if (!internal_ciphers.empty())
+ internal_ciphers += ":";
+ internal_ciphers += entry->internal_name;
+ break;
+ }
+ }
+
+ if (!found) {
+ LOG(LS_ERROR) << "Could not find cipher: " << *cipher;
+ return false;
+ }
+ }
+
+ if (internal_ciphers.empty())
+ return false;
+
+ srtp_ciphers_ = internal_ciphers;
+ return true;
+#else
+ return false;
+#endif
+}
+
+bool OpenSSLStreamAdapter::GetDtlsSrtpCipher(std::string* cipher) {
+#ifdef HAVE_DTLS_SRTP
+ ASSERT(state_ == SSL_CONNECTED);
+ if (state_ != SSL_CONNECTED)
+ return false;
+
+ SRTP_PROTECTION_PROFILE *srtp_profile =
+ SSL_get_selected_srtp_profile(ssl_);
+
+ if (!srtp_profile)
+ return false;
+
+ for (SrtpCipherMapEntry *entry = SrtpCipherMap;
+ entry->internal_name; ++entry) {
+ if (!strcmp(entry->internal_name, srtp_profile->name)) {
+ *cipher = entry->external_name;
+ return true;
+ }
+ }
+
+ ASSERT(false); // This should never happen
+
+ return false;
+#else
+ return false;
+#endif
+}
+
+int OpenSSLStreamAdapter::StartSSLWithServer(const char* server_name) {
+ ASSERT(server_name != NULL && server_name[0] != '\0');
+ ssl_server_name_ = server_name;
+ return StartSSL();
+}
+
+int OpenSSLStreamAdapter::StartSSLWithPeer() {
+ ASSERT(ssl_server_name_.empty());
+ // It is permitted to specify peer_certificate_ only later.
+ return StartSSL();
+}
+
+void OpenSSLStreamAdapter::SetMode(SSLMode mode) {
+ ASSERT(state_ == SSL_NONE);
+ ssl_mode_ = mode;
+}
+
+//
+// StreamInterface Implementation
+//
+
+StreamResult OpenSSLStreamAdapter::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")";
+
+ switch (state_) {
+ case SSL_NONE:
+ // pass-through in clear text
+ return StreamAdapterInterface::Write(data, data_len, written, error);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SR_BLOCK;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ case SSL_CLOSED:
+ default:
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+
+ // OpenSSL will return an error if we try to write zero bytes
+ if (data_len == 0) {
+ if (written)
+ *written = 0;
+ return SR_SUCCESS;
+ }
+
+ ssl_write_needs_read_ = false;
+
+ int code = SSL_write(ssl_, data, data_len);
+ int ssl_error = SSL_get_error(ssl_, code);
+ switch (ssl_error) {
+ case SSL_ERROR_NONE:
+ LOG(LS_VERBOSE) << " -- success";
+ ASSERT(0 < code && static_cast<unsigned>(code) <= data_len);
+ if (written)
+ *written = code;
+ return SR_SUCCESS;
+ case SSL_ERROR_WANT_READ:
+ LOG(LS_VERBOSE) << " -- error want read";
+ ssl_write_needs_read_ = true;
+ return SR_BLOCK;
+ case SSL_ERROR_WANT_WRITE:
+ LOG(LS_VERBOSE) << " -- error want write";
+ return SR_BLOCK;
+
+ case SSL_ERROR_ZERO_RETURN:
+ default:
+ Error("SSL_write", (ssl_error ? ssl_error : -1), false);
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+ // not reached
+}
+
+StreamResult OpenSSLStreamAdapter::Read(void* data, size_t data_len,
+ size_t* read, int* error) {
+ LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")";
+ switch (state_) {
+ case SSL_NONE:
+ // pass-through in clear text
+ return StreamAdapterInterface::Read(data, data_len, read, error);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SR_BLOCK;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_CLOSED:
+ return SR_EOS;
+
+ case SSL_ERROR:
+ default:
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+
+ // Don't trust OpenSSL with zero byte reads
+ if (data_len == 0) {
+ if (read)
+ *read = 0;
+ return SR_SUCCESS;
+ }
+
+ ssl_read_needs_write_ = false;
+
+ int code = SSL_read(ssl_, data, data_len);
+ int ssl_error = SSL_get_error(ssl_, code);
+ switch (ssl_error) {
+ case SSL_ERROR_NONE:
+ LOG(LS_VERBOSE) << " -- success";
+ ASSERT(0 < code && static_cast<unsigned>(code) <= data_len);
+ if (read)
+ *read = code;
+
+ if (ssl_mode_ == SSL_MODE_DTLS) {
+ // Enforce atomic reads -- this is a short read
+ unsigned int pending = SSL_pending(ssl_);
+
+ if (pending) {
+ LOG(LS_INFO) << " -- short DTLS read. flushing";
+ FlushInput(pending);
+ if (error)
+ *error = SSE_MSG_TRUNC;
+ return SR_ERROR;
+ }
+ }
+ return SR_SUCCESS;
+ case SSL_ERROR_WANT_READ:
+ LOG(LS_VERBOSE) << " -- error want read";
+ return SR_BLOCK;
+ case SSL_ERROR_WANT_WRITE:
+ LOG(LS_VERBOSE) << " -- error want write";
+ ssl_read_needs_write_ = true;
+ return SR_BLOCK;
+ case SSL_ERROR_ZERO_RETURN:
+ LOG(LS_VERBOSE) << " -- remote side closed";
+ return SR_EOS;
+ break;
+ default:
+ LOG(LS_VERBOSE) << " -- error " << code;
+ Error("SSL_read", (ssl_error ? ssl_error : -1), false);
+ if (error)
+ *error = ssl_error_code_;
+ return SR_ERROR;
+ }
+ // not reached
+}
+
+void OpenSSLStreamAdapter::FlushInput(unsigned int left) {
+ unsigned char buf[2048];
+
+ while (left) {
+ // This should always succeed
+ int toread = (sizeof(buf) < left) ? sizeof(buf) : left;
+ int code = SSL_read(ssl_, buf, toread);
+
+ int ssl_error = SSL_get_error(ssl_, code);
+ ASSERT(ssl_error == SSL_ERROR_NONE);
+
+ if (ssl_error != SSL_ERROR_NONE) {
+ LOG(LS_VERBOSE) << " -- error " << code;
+ Error("SSL_read", (ssl_error ? ssl_error : -1), false);
+ return;
+ }
+
+ LOG(LS_VERBOSE) << " -- flushed " << code << " bytes";
+ left -= code;
+ }
+}
+
+void OpenSSLStreamAdapter::Close() {
+ Cleanup();
+ ASSERT(state_ == SSL_CLOSED || state_ == SSL_ERROR);
+ StreamAdapterInterface::Close();
+}
+
+StreamState OpenSSLStreamAdapter::GetState() const {
+ switch (state_) {
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SS_OPENING;
+ case SSL_CONNECTED:
+ return SS_OPEN;
+ default:
+ return SS_CLOSED;
+ };
+ // not reached
+}
+
+void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, int events,
+ int err) {
+ int events_to_signal = 0;
+ int signal_error = 0;
+ ASSERT(stream == this->stream());
+ if ((events & SE_OPEN)) {
+ LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN";
+ if (state_ != SSL_WAIT) {
+ ASSERT(state_ == SSL_NONE);
+ events_to_signal |= SE_OPEN;
+ } else {
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err, true);
+ return;
+ }
+ }
+ }
+ if ((events & (SE_READ|SE_WRITE))) {
+ LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent"
+ << ((events & SE_READ) ? " SE_READ" : "")
+ << ((events & SE_WRITE) ? " SE_WRITE" : "");
+ if (state_ == SSL_NONE) {
+ events_to_signal |= events & (SE_READ|SE_WRITE);
+ } else if (state_ == SSL_CONNECTING) {
+ if (int err = ContinueSSL()) {
+ Error("ContinueSSL", err, true);
+ return;
+ }
+ } else if (state_ == SSL_CONNECTED) {
+ if (((events & SE_READ) && ssl_write_needs_read_) ||
+ (events & SE_WRITE)) {
+ LOG(LS_VERBOSE) << " -- onStreamWriteable";
+ events_to_signal |= SE_WRITE;
+ }
+ if (((events & SE_WRITE) && ssl_read_needs_write_) ||
+ (events & SE_READ)) {
+ LOG(LS_VERBOSE) << " -- onStreamReadable";
+ events_to_signal |= SE_READ;
+ }
+ }
+ }
+ if ((events & SE_CLOSE)) {
+ LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err << ")";
+ Cleanup();
+ events_to_signal |= SE_CLOSE;
+ // SE_CLOSE is the only event that uses the final parameter to OnEvent().
+ ASSERT(signal_error == 0);
+ signal_error = err;
+ }
+ if (events_to_signal)
+ StreamAdapterInterface::OnEvent(stream, events_to_signal, signal_error);
+}
+
+int OpenSSLStreamAdapter::StartSSL() {
+ ASSERT(state_ == SSL_NONE);
+
+ if (StreamAdapterInterface::GetState() != SS_OPEN) {
+ state_ = SSL_WAIT;
+ return 0;
+ }
+
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err, false);
+ return err;
+ }
+
+ return 0;
+}
+
+int OpenSSLStreamAdapter::BeginSSL() {
+ ASSERT(state_ == SSL_CONNECTING);
+ // The underlying stream has open. If we are in peer-to-peer mode
+ // then a peer certificate must have been specified by now.
+ ASSERT(!ssl_server_name_.empty() ||
+ !peer_certificate_digest_algorithm_.empty());
+ LOG(LS_INFO) << "BeginSSL: "
+ << (!ssl_server_name_.empty() ? ssl_server_name_ :
+ "with peer");
+
+ BIO* bio = NULL;
+
+ // First set up the context
+ ASSERT(ssl_ctx_ == NULL);
+ ssl_ctx_ = SetupSSLContext();
+ if (!ssl_ctx_)
+ return -1;
+
+ bio = BIO_new_stream(static_cast<StreamInterface*>(stream()));
+ if (!bio)
+ return -1;
+
+ ssl_ = SSL_new(ssl_ctx_);
+ if (!ssl_) {
+ BIO_free(bio);
+ return -1;
+ }
+
+ SSL_set_app_data(ssl_, this);
+
+ SSL_set_bio(ssl_, bio, bio); // the SSL object owns the bio now.
+
+ SSL_set_mode(ssl_, SSL_MODE_ENABLE_PARTIAL_WRITE |
+ SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER);
+
+ // Do the connect
+ return ContinueSSL();
+}
+
+int OpenSSLStreamAdapter::ContinueSSL() {
+ LOG(LS_VERBOSE) << "ContinueSSL";
+ ASSERT(state_ == SSL_CONNECTING);
+
+ // Clear the DTLS timer
+ Thread::Current()->Clear(this, MSG_TIMEOUT);
+
+ int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_);
+ int ssl_error;
+ switch (ssl_error = SSL_get_error(ssl_, code)) {
+ case SSL_ERROR_NONE:
+ LOG(LS_VERBOSE) << " -- success";
+
+ if (!SSLPostConnectionCheck(ssl_, ssl_server_name_.c_str(), NULL,
+ peer_certificate_digest_algorithm_)) {
+ LOG(LS_ERROR) << "TLS post connection check failed";
+ return -1;
+ }
+
+ state_ = SSL_CONNECTED;
+ StreamAdapterInterface::OnEvent(stream(), SE_OPEN|SE_READ|SE_WRITE, 0);
+ break;
+
+ case SSL_ERROR_WANT_READ: {
+ LOG(LS_VERBOSE) << " -- error want read";
+ struct timeval timeout;
+ if (DTLSv1_get_timeout(ssl_, &timeout)) {
+ int delay = timeout.tv_sec * 1000 + timeout.tv_usec/1000;
+
+ Thread::Current()->PostDelayed(delay, this, MSG_TIMEOUT, 0);
+ }
+ }
+ break;
+
+ case SSL_ERROR_WANT_WRITE:
+ LOG(LS_VERBOSE) << " -- error want write";
+ break;
+
+ case SSL_ERROR_ZERO_RETURN:
+ default:
+ LOG(LS_VERBOSE) << " -- error " << code;
+ return (ssl_error != 0) ? ssl_error : -1;
+ }
+
+ return 0;
+}
+
+void OpenSSLStreamAdapter::Error(const char* context, int err, bool signal) {
+ LOG(LS_WARNING) << "OpenSSLStreamAdapter::Error("
+ << context << ", " << err << ")";
+ state_ = SSL_ERROR;
+ ssl_error_code_ = err;
+ Cleanup();
+ if (signal)
+ StreamAdapterInterface::OnEvent(stream(), SE_CLOSE, err);
+}
+
+void OpenSSLStreamAdapter::Cleanup() {
+ LOG(LS_INFO) << "Cleanup";
+
+ if (state_ != SSL_ERROR) {
+ state_ = SSL_CLOSED;
+ ssl_error_code_ = 0;
+ }
+
+ if (ssl_) {
+ SSL_free(ssl_);
+ ssl_ = NULL;
+ }
+ if (ssl_ctx_) {
+ SSL_CTX_free(ssl_ctx_);
+ ssl_ctx_ = NULL;
+ }
+ identity_.reset();
+ peer_certificate_.reset();
+
+ // Clear the DTLS timer
+ Thread::Current()->Clear(this, MSG_TIMEOUT);
+}
+
+
+void OpenSSLStreamAdapter::OnMessage(Message* msg) {
+ // Process our own messages and then pass others to the superclass
+ if (MSG_TIMEOUT == msg->message_id) {
+ LOG(LS_INFO) << "DTLS timeout expired";
+ DTLSv1_handle_timeout(ssl_);
+ ContinueSSL();
+ } else {
+ StreamInterface::OnMessage(msg);
+ }
+}
+
+SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() {
+ SSL_CTX *ctx = NULL;
+
+ if (role_ == SSL_CLIENT) {
+ ctx = SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ?
+ DTLSv1_client_method() : TLSv1_client_method());
+ } else {
+ ctx = SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ?
+ DTLSv1_server_method() : TLSv1_server_method());
+ }
+ if (ctx == NULL)
+ return NULL;
+
+ if (identity_ && !identity_->ConfigureIdentity(ctx)) {
+ SSL_CTX_free(ctx);
+ return NULL;
+ }
+
+#ifdef _DEBUG
+ SSL_CTX_set_info_callback(ctx, OpenSSLAdapter::SSLInfoCallback);
+#endif
+
+ SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER |SSL_VERIFY_FAIL_IF_NO_PEER_CERT,
+ SSLVerifyCallback);
+ SSL_CTX_set_verify_depth(ctx, 4);
+ SSL_CTX_set_cipher_list(ctx, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
+
+#ifdef HAVE_DTLS_SRTP
+ if (!srtp_ciphers_.empty()) {
+ if (SSL_CTX_set_tlsext_use_srtp(ctx, srtp_ciphers_.c_str())) {
+ SSL_CTX_free(ctx);
+ return NULL;
+ }
+ }
+#endif
+
+ return ctx;
+}
+
+int OpenSSLStreamAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) {
+ // Get our SSL structure from the store
+ SSL* ssl = reinterpret_cast<SSL*>(X509_STORE_CTX_get_ex_data(
+ store,
+ SSL_get_ex_data_X509_STORE_CTX_idx()));
+ OpenSSLStreamAdapter* stream =
+ reinterpret_cast<OpenSSLStreamAdapter*>(SSL_get_app_data(ssl));
+
+ if (stream->peer_certificate_digest_algorithm_.empty()) {
+ return 0;
+ }
+ X509* cert = X509_STORE_CTX_get_current_cert(store);
+ int depth = X509_STORE_CTX_get_error_depth(store);
+
+ // For now We ignore the parent certificates and verify the leaf against
+ // the digest.
+ //
+ // TODO(jiayl): Verify the chain is a proper chain and report the chain to
+ // |stream->peer_certificate_|, like what NSS does.
+ if (depth > 0) {
+ LOG(LS_INFO) << "Ignored chained certificate at depth " << depth;
+ return 1;
+ }
+
+ unsigned char digest[EVP_MAX_MD_SIZE];
+ size_t digest_length;
+ if (!OpenSSLCertificate::ComputeDigest(
+ cert,
+ stream->peer_certificate_digest_algorithm_,
+ digest, sizeof(digest),
+ &digest_length)) {
+ LOG(LS_WARNING) << "Failed to compute peer cert digest.";
+ return 0;
+ }
+
+ Buffer computed_digest(digest, digest_length);
+ if (computed_digest != stream->peer_certificate_digest_value_) {
+ LOG(LS_WARNING) << "Rejected peer certificate due to mismatched digest.";
+ return 0;
+ }
+ // Ignore any verification error if the digest matches, since there is no
+ // value in checking the validity of a self-signed cert issued by untrusted
+ // sources.
+ LOG(LS_INFO) << "Accepted peer certificate.";
+
+ // Record the peer's certificate.
+ stream->peer_certificate_.reset(new OpenSSLCertificate(cert));
+ return 1;
+}
+
+// This code is taken from the "Network Security with OpenSSL"
+// sample in chapter 5
+bool OpenSSLStreamAdapter::SSLPostConnectionCheck(SSL* ssl,
+ const char* server_name,
+ const X509* peer_cert,
+ const std::string
+ &peer_digest) {
+ ASSERT(server_name != NULL);
+ bool ok;
+ if (server_name[0] != '\0') { // traditional mode
+ ok = OpenSSLAdapter::VerifyServerName(ssl, server_name, ignore_bad_cert());
+
+ if (ok) {
+ ok = (SSL_get_verify_result(ssl) == X509_V_OK ||
+ custom_verification_succeeded_);
+ }
+ } else { // peer-to-peer mode
+ ASSERT((peer_cert != NULL) || (!peer_digest.empty()));
+ // no server name validation
+ ok = true;
+ }
+
+ if (!ok && ignore_bad_cert()) {
+ LOG(LS_ERROR) << "SSL_get_verify_result(ssl) = "
+ << SSL_get_verify_result(ssl);
+ LOG(LS_INFO) << "Other TLS post connection checks failed.";
+ ok = true;
+ }
+
+ return ok;
+}
+
+bool OpenSSLStreamAdapter::HaveDtls() {
+ return true;
+}
+
+bool OpenSSLStreamAdapter::HaveDtlsSrtp() {
+#ifdef HAVE_DTLS_SRTP
+ return true;
+#else
+ return false;
+#endif
+}
+
+bool OpenSSLStreamAdapter::HaveExporter() {
+#ifdef HAVE_DTLS_SRTP
+ return true;
+#else
+ return false;
+#endif
+}
+
+} // namespace rtc
+
+#endif // HAVE_OPENSSL_SSL_H
diff --git a/chromium/third_party/webrtc/base/opensslstreamadapter.h b/chromium/third_party/webrtc/base/opensslstreamadapter.h
new file mode 100644
index 00000000000..9506217b4c3
--- /dev/null
+++ b/chromium/third_party/webrtc/base/opensslstreamadapter.h
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPENSSLSTREAMADAPTER_H__
+#define WEBRTC_BASE_OPENSSLSTREAMADAPTER_H__
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/opensslidentity.h"
+
+typedef struct ssl_st SSL;
+typedef struct ssl_ctx_st SSL_CTX;
+typedef struct x509_store_ctx_st X509_STORE_CTX;
+
+namespace rtc {
+
+// This class was written with OpenSSLAdapter (a socket adapter) as a
+// starting point. It has similar structure and functionality, with
+// the peer-to-peer mode added.
+//
+// Static methods to initialize and deinit the SSL library are in
+// OpenSSLAdapter. This class also uses
+// OpenSSLAdapter::custom_verify_callback_ (a static field). These
+// should probably be moved out to a neutral class.
+//
+// In a few cases I have factored out some OpenSSLAdapter code into
+// static methods so it can be reused from this class. Eventually that
+// code should probably be moved to a common support
+// class. Unfortunately there remain a few duplicated sections of
+// code. I have not done more restructuring because I did not want to
+// affect existing code that uses OpenSSLAdapter.
+//
+// This class does not support the SSL connection restart feature
+// present in OpenSSLAdapter. I am not entirely sure how the feature
+// is useful and I am not convinced that it works properly.
+//
+// This implementation is careful to disallow data exchange after an
+// SSL error, and it has an explicit SSL_CLOSED state. It should not
+// be possible to send any data in clear after one of the StartSSL
+// methods has been called.
+
+// Look in sslstreamadapter.h for documentation of the methods.
+
+class OpenSSLIdentity;
+
+///////////////////////////////////////////////////////////////////////////////
+
+class OpenSSLStreamAdapter : public SSLStreamAdapter {
+ public:
+ explicit OpenSSLStreamAdapter(StreamInterface* stream);
+ virtual ~OpenSSLStreamAdapter();
+
+ virtual void SetIdentity(SSLIdentity* identity);
+
+ // Default argument is for compatibility
+ virtual void SetServerRole(SSLRole role = SSL_SERVER);
+ virtual bool SetPeerCertificateDigest(const std::string& digest_alg,
+ const unsigned char* digest_val,
+ size_t digest_len);
+
+ virtual bool GetPeerCertificate(SSLCertificate** cert) const;
+
+ virtual int StartSSLWithServer(const char* server_name);
+ virtual int StartSSLWithPeer();
+ virtual void SetMode(SSLMode mode);
+
+ virtual StreamResult Read(void* data, size_t data_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+ virtual StreamState GetState() const;
+
+ // Key Extractor interface
+ virtual bool ExportKeyingMaterial(const std::string& label,
+ const uint8* context,
+ size_t context_len,
+ bool use_context,
+ uint8* result,
+ size_t result_len);
+
+
+ // DTLS-SRTP interface
+ virtual bool SetDtlsSrtpCiphers(const std::vector<std::string>& ciphers);
+ virtual bool GetDtlsSrtpCipher(std::string* cipher);
+
+ // Capabilities interfaces
+ static bool HaveDtls();
+ static bool HaveDtlsSrtp();
+ static bool HaveExporter();
+
+ protected:
+ virtual void OnEvent(StreamInterface* stream, int events, int err);
+
+ private:
+ enum SSLState {
+ // Before calling one of the StartSSL methods, data flows
+ // in clear text.
+ SSL_NONE,
+ SSL_WAIT, // waiting for the stream to open to start SSL negotiation
+ SSL_CONNECTING, // SSL negotiation in progress
+ SSL_CONNECTED, // SSL stream successfully established
+ SSL_ERROR, // some SSL error occurred, stream is closed
+ SSL_CLOSED // Clean close
+ };
+
+ enum { MSG_TIMEOUT = MSG_MAX+1};
+
+ // The following three methods return 0 on success and a negative
+ // error code on failure. The error code may be from OpenSSL or -1
+ // on some other error cases, so it can't really be interpreted
+ // unfortunately.
+
+ // Go from state SSL_NONE to either SSL_CONNECTING or SSL_WAIT,
+ // depending on whether the underlying stream is already open or
+ // not.
+ int StartSSL();
+ // Prepare SSL library, state is SSL_CONNECTING.
+ int BeginSSL();
+ // Perform SSL negotiation steps.
+ int ContinueSSL();
+
+ // Error handler helper. signal is given as true for errors in
+ // asynchronous contexts (when an error method was not returned
+ // through some other method), and in that case an SE_CLOSE event is
+ // raised on the stream with the specified error.
+ // A 0 error means a graceful close, otherwise there is not really enough
+ // context to interpret the error code.
+ void Error(const char* context, int err, bool signal);
+ void Cleanup();
+
+ // Override MessageHandler
+ virtual void OnMessage(Message* msg);
+
+ // Flush the input buffers by reading left bytes (for DTLS)
+ void FlushInput(unsigned int left);
+
+ // SSL library configuration
+ SSL_CTX* SetupSSLContext();
+ // SSL verification check
+ bool SSLPostConnectionCheck(SSL* ssl, const char* server_name,
+ const X509* peer_cert,
+ const std::string& peer_digest);
+ // SSL certification verification error handler, called back from
+ // the openssl library. Returns an int interpreted as a boolean in
+ // the C style: zero means verification failure, non-zero means
+ // passed.
+ static int SSLVerifyCallback(int ok, X509_STORE_CTX* store);
+
+ SSLState state_;
+ SSLRole role_;
+ int ssl_error_code_; // valid when state_ == SSL_ERROR or SSL_CLOSED
+ // Whether the SSL negotiation is blocked on needing to read or
+ // write to the wrapped stream.
+ bool ssl_read_needs_write_;
+ bool ssl_write_needs_read_;
+
+ SSL* ssl_;
+ SSL_CTX* ssl_ctx_;
+
+ // Our key and certificate, mostly useful in peer-to-peer mode.
+ scoped_ptr<OpenSSLIdentity> identity_;
+ // in traditional mode, the server name that the server's certificate
+ // must specify. Empty in peer-to-peer mode.
+ std::string ssl_server_name_;
+ // The certificate that the peer must present or did present. Initially
+ // null in traditional mode, until the connection is established.
+ scoped_ptr<OpenSSLCertificate> peer_certificate_;
+ // In peer-to-peer mode, the digest of the certificate that
+ // the peer must present.
+ Buffer peer_certificate_digest_value_;
+ std::string peer_certificate_digest_algorithm_;
+
+ // OpenSSLAdapter::custom_verify_callback_ result
+ bool custom_verification_succeeded_;
+
+ // The DtlsSrtp ciphers
+ std::string srtp_ciphers_;
+
+ // Do DTLS or not
+ SSLMode ssl_mode_;
+};
+
+/////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPENSSLSTREAMADAPTER_H__
diff --git a/chromium/third_party/webrtc/base/optionsfile.cc b/chromium/third_party/webrtc/base/optionsfile.cc
new file mode 100644
index 00000000000..d84c948e351
--- /dev/null
+++ b/chromium/third_party/webrtc/base/optionsfile.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/optionsfile.h"
+
+#include <ctype.h>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+OptionsFile::OptionsFile(const std::string &path) : path_(path) {
+}
+
+bool OptionsFile::Load() {
+ options_.clear();
+ // Open file.
+ FileStream stream;
+ int err;
+ if (!stream.Open(path_, "r", &err)) {
+ LOG_F(LS_WARNING) << "Could not open file, err=" << err;
+ // We do not consider this an error because we expect there to be no file
+ // until the user saves a setting.
+ return true;
+ }
+ // Read in all its data.
+ std::string line;
+ StreamResult res;
+ for (;;) {
+ res = stream.ReadLine(&line);
+ if (res != SR_SUCCESS) {
+ break;
+ }
+ size_t equals_pos = line.find('=');
+ if (equals_pos == std::string::npos) {
+ // We do not consider this an error. Instead we ignore the line and
+ // keep going.
+ LOG_F(LS_WARNING) << "Ignoring malformed line in " << path_;
+ continue;
+ }
+ std::string key(line, 0, equals_pos);
+ std::string value(line, equals_pos + 1, line.length() - (equals_pos + 1));
+ options_[key] = value;
+ }
+ if (res != SR_EOS) {
+ LOG_F(LS_ERROR) << "Error when reading from file";
+ return false;
+ } else {
+ return true;
+ }
+}
+
+bool OptionsFile::Save() {
+ // Open file.
+ FileStream stream;
+ int err;
+ if (!stream.Open(path_, "w", &err)) {
+ LOG_F(LS_ERROR) << "Could not open file, err=" << err;
+ return false;
+ }
+ // Write out all the data.
+ StreamResult res = SR_SUCCESS;
+ size_t written;
+ int error;
+ for (OptionsMap::const_iterator i = options_.begin(); i != options_.end();
+ ++i) {
+ res = stream.WriteAll(i->first.c_str(), i->first.length(), &written,
+ &error);
+ if (res != SR_SUCCESS) {
+ break;
+ }
+ res = stream.WriteAll("=", 1, &written, &error);
+ if (res != SR_SUCCESS) {
+ break;
+ }
+ res = stream.WriteAll(i->second.c_str(), i->second.length(), &written,
+ &error);
+ if (res != SR_SUCCESS) {
+ break;
+ }
+ res = stream.WriteAll("\n", 1, &written, &error);
+ if (res != SR_SUCCESS) {
+ break;
+ }
+ }
+ if (res != SR_SUCCESS) {
+ LOG_F(LS_ERROR) << "Unable to write to file";
+ return false;
+ } else {
+ return true;
+ }
+}
+
+bool OptionsFile::IsLegalName(const std::string &name) {
+ for (size_t pos = 0; pos < name.length(); ++pos) {
+ if (name[pos] == '\n' || name[pos] == '\\' || name[pos] == '=') {
+ // Illegal character.
+ LOG(LS_WARNING) << "Ignoring operation for illegal option " << name;
+ return false;
+ }
+ }
+ return true;
+}
+
+bool OptionsFile::IsLegalValue(const std::string &value) {
+ for (size_t pos = 0; pos < value.length(); ++pos) {
+ if (value[pos] == '\n' || value[pos] == '\\') {
+ // Illegal character.
+ LOG(LS_WARNING) << "Ignoring operation for illegal value " << value;
+ return false;
+ }
+ }
+ return true;
+}
+
+bool OptionsFile::GetStringValue(const std::string& option,
+ std::string *out_val) const {
+ LOG(LS_VERBOSE) << "OptionsFile::GetStringValue "
+ << option;
+ if (!IsLegalName(option)) {
+ return false;
+ }
+ OptionsMap::const_iterator i = options_.find(option);
+ if (i == options_.end()) {
+ return false;
+ }
+ *out_val = i->second;
+ return true;
+}
+
+bool OptionsFile::GetIntValue(const std::string& option,
+ int *out_val) const {
+ LOG(LS_VERBOSE) << "OptionsFile::GetIntValue "
+ << option;
+ if (!IsLegalName(option)) {
+ return false;
+ }
+ OptionsMap::const_iterator i = options_.find(option);
+ if (i == options_.end()) {
+ return false;
+ }
+ return FromString(i->second, out_val);
+}
+
+bool OptionsFile::SetStringValue(const std::string& option,
+ const std::string& value) {
+ LOG(LS_VERBOSE) << "OptionsFile::SetStringValue "
+ << option << ":" << value;
+ if (!IsLegalName(option) || !IsLegalValue(value)) {
+ return false;
+ }
+ options_[option] = value;
+ return true;
+}
+
+bool OptionsFile::SetIntValue(const std::string& option,
+ int value) {
+ LOG(LS_VERBOSE) << "OptionsFile::SetIntValue "
+ << option << ":" << value;
+ if (!IsLegalName(option)) {
+ return false;
+ }
+ return ToString(value, &options_[option]);
+}
+
+bool OptionsFile::RemoveValue(const std::string& option) {
+ LOG(LS_VERBOSE) << "OptionsFile::RemoveValue " << option;
+ if (!IsLegalName(option)) {
+ return false;
+ }
+ options_.erase(option);
+ return true;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/optionsfile.h b/chromium/third_party/webrtc/base/optionsfile.h
new file mode 100644
index 00000000000..c740ce4fa9b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/optionsfile.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_OPTIONSFILE_H_
+#define WEBRTC_BASE_OPTIONSFILE_H_
+
+#include <map>
+#include <string>
+
+namespace rtc {
+
+// Implements storage of simple options in a text file on disk. This is
+// cross-platform, but it is intended mostly for Linux where there is no
+// first-class options storage system.
+class OptionsFile {
+ public:
+ OptionsFile(const std::string &path);
+
+ // Loads the file from disk, overwriting the in-memory values.
+ bool Load();
+ // Saves the contents in memory, overwriting the on-disk values.
+ bool Save();
+
+ bool GetStringValue(const std::string& option, std::string* out_val) const;
+ bool GetIntValue(const std::string& option, int* out_val) const;
+ bool SetStringValue(const std::string& option, const std::string& val);
+ bool SetIntValue(const std::string& option, int val);
+ bool RemoveValue(const std::string& option);
+
+ private:
+ typedef std::map<std::string, std::string> OptionsMap;
+
+ static bool IsLegalName(const std::string &name);
+ static bool IsLegalValue(const std::string &value);
+
+ std::string path_;
+ OptionsMap options_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_OPTIONSFILE_H_
diff --git a/chromium/third_party/webrtc/base/optionsfile_unittest.cc b/chromium/third_party/webrtc/base/optionsfile_unittest.cc
new file mode 100644
index 00000000000..adddb95215f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/optionsfile_unittest.cc
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/optionsfile.h"
+#include "webrtc/base/pathutils.h"
+
+namespace rtc {
+
+static const std::string kTestOptionA = "test-option-a";
+static const std::string kTestOptionB = "test-option-b";
+static const std::string kTestString1 = "a string";
+static const std::string kTestString2 = "different string";
+static const std::string kOptionWithEquals = "foo=bar";
+static const std::string kOptionWithNewline = "foo\nbar";
+static const std::string kValueWithEquals = "baz=quux";
+static const std::string kValueWithNewline = "baz\nquux";
+static const std::string kEmptyString = "";
+static const char kOptionWithUtf8[] = {'O', 'p', 't', '\302', '\256', 'i', 'o',
+ 'n', '\342', '\204', '\242', '\0'}; // Opt(R)io(TM).
+static const char kValueWithUtf8[] = {'V', 'a', 'l', '\302', '\256', 'v', 'e',
+ '\342', '\204', '\242', '\0'}; // Val(R)ue(TM).
+static int kTestInt1 = 12345;
+static int kTestInt2 = 67890;
+static int kNegInt = -634;
+static int kZero = 0;
+
+class OptionsFileTest : public testing::Test {
+ public:
+ OptionsFileTest() {
+ Pathname dir;
+ ASSERT(Filesystem::GetTemporaryFolder(dir, true, NULL));
+ test_file_ = Filesystem::TempFilename(dir, ".testfile");
+ OpenStore();
+ }
+
+ protected:
+ void OpenStore() {
+ store_.reset(new OptionsFile(test_file_));
+ }
+
+ rtc::scoped_ptr<OptionsFile> store_;
+
+ private:
+ std::string test_file_;
+};
+
+TEST_F(OptionsFileTest, GetSetString) {
+ // Clear contents of the file on disk.
+ EXPECT_TRUE(store_->Save());
+ std::string out1, out2;
+ EXPECT_FALSE(store_->GetStringValue(kTestOptionA, &out1));
+ EXPECT_FALSE(store_->GetStringValue(kTestOptionB, &out2));
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionA, kTestString1));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionB, kTestString2));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionA, &out1));
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionB, &out2));
+ EXPECT_EQ(kTestString1, out1);
+ EXPECT_EQ(kTestString2, out2);
+ EXPECT_TRUE(store_->RemoveValue(kTestOptionA));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->RemoveValue(kTestOptionB));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_FALSE(store_->GetStringValue(kTestOptionA, &out1));
+ EXPECT_FALSE(store_->GetStringValue(kTestOptionB, &out2));
+}
+
+TEST_F(OptionsFileTest, GetSetInt) {
+ // Clear contents of the file on disk.
+ EXPECT_TRUE(store_->Save());
+ int out1, out2;
+ EXPECT_FALSE(store_->GetIntValue(kTestOptionA, &out1));
+ EXPECT_FALSE(store_->GetIntValue(kTestOptionB, &out2));
+ EXPECT_TRUE(store_->SetIntValue(kTestOptionA, kTestInt1));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->SetIntValue(kTestOptionB, kTestInt2));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetIntValue(kTestOptionA, &out1));
+ EXPECT_TRUE(store_->GetIntValue(kTestOptionB, &out2));
+ EXPECT_EQ(kTestInt1, out1);
+ EXPECT_EQ(kTestInt2, out2);
+ EXPECT_TRUE(store_->RemoveValue(kTestOptionA));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->RemoveValue(kTestOptionB));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_FALSE(store_->GetIntValue(kTestOptionA, &out1));
+ EXPECT_FALSE(store_->GetIntValue(kTestOptionB, &out2));
+ EXPECT_TRUE(store_->SetIntValue(kTestOptionA, kNegInt));
+ EXPECT_TRUE(store_->GetIntValue(kTestOptionA, &out1));
+ EXPECT_EQ(kNegInt, out1);
+ EXPECT_TRUE(store_->SetIntValue(kTestOptionA, kZero));
+ EXPECT_TRUE(store_->GetIntValue(kTestOptionA, &out1));
+ EXPECT_EQ(kZero, out1);
+}
+
+TEST_F(OptionsFileTest, Persist) {
+ // Clear contents of the file on disk.
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionA, kTestString1));
+ EXPECT_TRUE(store_->SetIntValue(kTestOptionB, kNegInt));
+ EXPECT_TRUE(store_->Save());
+
+ // Load the saved contents from above.
+ OpenStore();
+ EXPECT_TRUE(store_->Load());
+ std::string out1;
+ int out2;
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionA, &out1));
+ EXPECT_TRUE(store_->GetIntValue(kTestOptionB, &out2));
+ EXPECT_EQ(kTestString1, out1);
+ EXPECT_EQ(kNegInt, out2);
+}
+
+TEST_F(OptionsFileTest, SpecialCharacters) {
+ // Clear contents of the file on disk.
+ EXPECT_TRUE(store_->Save());
+ std::string out;
+ EXPECT_FALSE(store_->SetStringValue(kOptionWithEquals, kTestString1));
+ EXPECT_FALSE(store_->GetStringValue(kOptionWithEquals, &out));
+ EXPECT_FALSE(store_->SetStringValue(kOptionWithNewline, kTestString1));
+ EXPECT_FALSE(store_->GetStringValue(kOptionWithNewline, &out));
+ EXPECT_TRUE(store_->SetStringValue(kOptionWithUtf8, kValueWithUtf8));
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionA, kTestString1));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionA, &out));
+ EXPECT_EQ(kTestString1, out);
+ EXPECT_TRUE(store_->GetStringValue(kOptionWithUtf8, &out));
+ EXPECT_EQ(kValueWithUtf8, out);
+ EXPECT_FALSE(store_->SetStringValue(kTestOptionA, kValueWithNewline));
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionA, &out));
+ EXPECT_EQ(kTestString1, out);
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionA, kValueWithEquals));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionA, &out));
+ EXPECT_EQ(kValueWithEquals, out);
+ EXPECT_TRUE(store_->SetStringValue(kEmptyString, kTestString2));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetStringValue(kEmptyString, &out));
+ EXPECT_EQ(kTestString2, out);
+ EXPECT_TRUE(store_->SetStringValue(kTestOptionB, kEmptyString));
+ EXPECT_TRUE(store_->Save());
+ EXPECT_TRUE(store_->Load());
+ EXPECT_TRUE(store_->GetStringValue(kTestOptionB, &out));
+ EXPECT_EQ(kEmptyString, out);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/pathutils.cc b/chromium/third_party/webrtc/base/pathutils.cc
new file mode 100644
index 00000000000..7671bfc29f5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/pathutils.cc
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#include <shellapi.h>
+#include <shlobj.h>
+#include <tchar.h>
+#endif // WEBRTC_WIN
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/urlencode.h"
+
+namespace rtc {
+
+static const char EMPTY_STR[] = "";
+
+// EXT_DELIM separates a file basename from extension
+const char EXT_DELIM = '.';
+
+// FOLDER_DELIMS separate folder segments and the filename
+const char* const FOLDER_DELIMS = "/\\";
+
+// DEFAULT_FOLDER_DELIM is the preferred delimiter for this platform
+#if WEBRTC_WIN
+const char DEFAULT_FOLDER_DELIM = '\\';
+#else // !WEBRTC_WIN
+const char DEFAULT_FOLDER_DELIM = '/';
+#endif // !WEBRTC_WIN
+
+///////////////////////////////////////////////////////////////////////////////
+// Pathname - parsing of pathnames into components, and vice versa
+///////////////////////////////////////////////////////////////////////////////
+
+bool Pathname::IsFolderDelimiter(char ch) {
+ return (NULL != ::strchr(FOLDER_DELIMS, ch));
+}
+
+char Pathname::DefaultFolderDelimiter() {
+ return DEFAULT_FOLDER_DELIM;
+}
+
+Pathname::Pathname()
+ : folder_delimiter_(DEFAULT_FOLDER_DELIM) {
+}
+
+Pathname::Pathname(const std::string& pathname)
+ : folder_delimiter_(DEFAULT_FOLDER_DELIM) {
+ SetPathname(pathname);
+}
+
+Pathname::Pathname(const std::string& folder, const std::string& filename)
+ : folder_delimiter_(DEFAULT_FOLDER_DELIM) {
+ SetPathname(folder, filename);
+}
+
+void Pathname::SetFolderDelimiter(char delimiter) {
+ ASSERT(IsFolderDelimiter(delimiter));
+ folder_delimiter_ = delimiter;
+}
+
+void Pathname::Normalize() {
+ for (size_t i=0; i<folder_.length(); ++i) {
+ if (IsFolderDelimiter(folder_[i])) {
+ folder_[i] = folder_delimiter_;
+ }
+ }
+}
+
+void Pathname::clear() {
+ folder_.clear();
+ basename_.clear();
+ extension_.clear();
+}
+
+bool Pathname::empty() const {
+ return folder_.empty() && basename_.empty() && extension_.empty();
+}
+
+std::string Pathname::pathname() const {
+ std::string pathname(folder_);
+ pathname.append(basename_);
+ pathname.append(extension_);
+ if (pathname.empty()) {
+ // Instead of the empty pathname, return the current working directory.
+ pathname.push_back('.');
+ pathname.push_back(folder_delimiter_);
+ }
+ return pathname;
+}
+
+std::string Pathname::url() const {
+ std::string s = "file:///";
+ for (size_t i=0; i<folder_.length(); ++i) {
+ if (IsFolderDelimiter(folder_[i]))
+ s += '/';
+ else
+ s += folder_[i];
+ }
+ s += basename_;
+ s += extension_;
+ return UrlEncodeStringForOnlyUnsafeChars(s);
+}
+
+void Pathname::SetPathname(const std::string& pathname) {
+ std::string::size_type pos = pathname.find_last_of(FOLDER_DELIMS);
+ if (pos != std::string::npos) {
+ SetFolder(pathname.substr(0, pos + 1));
+ SetFilename(pathname.substr(pos + 1));
+ } else {
+ SetFolder(EMPTY_STR);
+ SetFilename(pathname);
+ }
+}
+
+void Pathname::SetPathname(const std::string& folder,
+ const std::string& filename) {
+ SetFolder(folder);
+ SetFilename(filename);
+}
+
+void Pathname::AppendPathname(const std::string& pathname) {
+ std::string full_pathname(folder_);
+ full_pathname.append(pathname);
+ SetPathname(full_pathname);
+}
+
+std::string Pathname::folder() const {
+ return folder_;
+}
+
+std::string Pathname::folder_name() const {
+ std::string::size_type pos = std::string::npos;
+ if (folder_.size() >= 2) {
+ pos = folder_.find_last_of(FOLDER_DELIMS, folder_.length() - 2);
+ }
+ if (pos != std::string::npos) {
+ return folder_.substr(pos + 1);
+ } else {
+ return folder_;
+ }
+}
+
+std::string Pathname::parent_folder() const {
+ std::string::size_type pos = std::string::npos;
+ if (folder_.size() >= 2) {
+ pos = folder_.find_last_of(FOLDER_DELIMS, folder_.length() - 2);
+ }
+ if (pos != std::string::npos) {
+ return folder_.substr(0, pos + 1);
+ } else {
+ return EMPTY_STR;
+ }
+}
+
+void Pathname::SetFolder(const std::string& folder) {
+ folder_.assign(folder);
+ // Ensure folder ends in a path delimiter
+ if (!folder_.empty() && !IsFolderDelimiter(folder_[folder_.length()-1])) {
+ folder_.push_back(folder_delimiter_);
+ }
+}
+
+void Pathname::AppendFolder(const std::string& folder) {
+ folder_.append(folder);
+ // Ensure folder ends in a path delimiter
+ if (!folder_.empty() && !IsFolderDelimiter(folder_[folder_.length()-1])) {
+ folder_.push_back(folder_delimiter_);
+ }
+}
+
+std::string Pathname::basename() const {
+ return basename_;
+}
+
+bool Pathname::SetBasename(const std::string& basename) {
+ if(basename.find_first_of(FOLDER_DELIMS) != std::string::npos) {
+ return false;
+ }
+ basename_.assign(basename);
+ return true;
+}
+
+std::string Pathname::extension() const {
+ return extension_;
+}
+
+bool Pathname::SetExtension(const std::string& extension) {
+ if (extension.find_first_of(FOLDER_DELIMS) != std::string::npos ||
+ extension.find_first_of(EXT_DELIM, 1) != std::string::npos) {
+ return false;
+ }
+ extension_.assign(extension);
+ // Ensure extension begins with the extension delimiter
+ if (!extension_.empty() && (extension_[0] != EXT_DELIM)) {
+ extension_.insert(extension_.begin(), EXT_DELIM);
+ }
+ return true;
+}
+
+std::string Pathname::filename() const {
+ std::string filename(basename_);
+ filename.append(extension_);
+ return filename;
+}
+
+bool Pathname::SetFilename(const std::string& filename) {
+ std::string::size_type pos = filename.rfind(EXT_DELIM);
+ if ((pos == std::string::npos) || (pos == 0)) {
+ return SetExtension(EMPTY_STR) && SetBasename(filename);
+ } else {
+ return SetExtension(filename.substr(pos)) && SetBasename(filename.substr(0, pos));
+ }
+}
+
+#if defined(WEBRTC_WIN)
+bool Pathname::GetDrive(char *drive, uint32 bytes) const {
+ return GetDrive(drive, bytes, folder_);
+}
+
+// static
+bool Pathname::GetDrive(char *drive, uint32 bytes,
+ const std::string& pathname) {
+ // need at lease 4 bytes to save c:
+ if (bytes < 4 || pathname.size() < 3) {
+ return false;
+ }
+
+ memcpy(drive, pathname.c_str(), 3);
+ drive[3] = 0;
+ // sanity checking
+ return (isalpha(drive[0]) &&
+ drive[1] == ':' &&
+ drive[2] == '\\');
+}
+#endif
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/pathutils.h b/chromium/third_party/webrtc/base/pathutils.h
new file mode 100644
index 00000000000..8f07e1dbc07
--- /dev/null
+++ b/chromium/third_party/webrtc/base/pathutils.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_PATHUTILS_H__
+#define WEBRTC_BASE_PATHUTILS_H__
+
+#include <string>
+// Temporary, until deprecated helpers are removed.
+#include "webrtc/base/fileutils.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Pathname - parsing of pathnames into components, and vice versa.
+//
+// To establish consistent terminology, a filename never contains a folder
+// component. A folder never contains a filename. A pathname may include
+// a folder and/or filename component. Here are some examples:
+//
+// pathname() /home/john/example.txt
+// folder() /home/john/
+// filename() example.txt
+// parent_folder() /home/
+// folder_name() john/
+// basename() example
+// extension() .txt
+//
+// Basename may begin, end, and/or include periods, but no folder delimiters.
+// If extension exists, it consists of a period followed by zero or more
+// non-period/non-delimiter characters, and basename is non-empty.
+///////////////////////////////////////////////////////////////////////////////
+
+class Pathname {
+public:
+ // Folder delimiters are slash and backslash
+ static bool IsFolderDelimiter(char ch);
+ static char DefaultFolderDelimiter();
+
+ Pathname();
+ Pathname(const std::string& pathname);
+ Pathname(const std::string& folder, const std::string& filename);
+
+ // Set's the default folder delimiter for this Pathname
+ char folder_delimiter() const { return folder_delimiter_; }
+ void SetFolderDelimiter(char delimiter);
+
+ // Normalize changes all folder delimiters to folder_delimiter()
+ void Normalize();
+
+ // Reset to the empty pathname
+ void clear();
+
+ // Returns true if the pathname is empty. Note: this->pathname().empty()
+ // is always false.
+ bool empty() const;
+
+ std::string url() const;
+
+ // Returns the folder and filename components. If the pathname is empty,
+ // returns a string representing the current directory (as a relative path,
+ // i.e., ".").
+ std::string pathname() const;
+ void SetPathname(const std::string& pathname);
+ void SetPathname(const std::string& folder, const std::string& filename);
+
+ // Append pathname to the current folder (if any). Any existing filename
+ // will be discarded.
+ void AppendPathname(const std::string& pathname);
+
+ std::string folder() const;
+ std::string folder_name() const;
+ std::string parent_folder() const;
+ // SetFolder and AppendFolder will append a folder delimiter, if needed.
+ void SetFolder(const std::string& folder);
+ void AppendFolder(const std::string& folder);
+
+ std::string basename() const;
+ bool SetBasename(const std::string& basename);
+
+ std::string extension() const;
+ // SetExtension will prefix a period, if needed.
+ bool SetExtension(const std::string& extension);
+
+ std::string filename() const;
+ bool SetFilename(const std::string& filename);
+
+#if defined(WEBRTC_WIN)
+ bool GetDrive(char *drive, uint32 bytes) const;
+ static bool GetDrive(char *drive, uint32 bytes,const std::string& pathname);
+#endif
+
+private:
+ std::string folder_, basename_, extension_;
+ char folder_delimiter_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// Global Helpers (deprecated)
+///////////////////////////////////////////////////////////////////////////////
+
+inline void SetOrganizationName(const std::string& organization) {
+ Filesystem::SetOrganizationName(organization);
+}
+inline void SetApplicationName(const std::string& application) {
+ Filesystem::SetApplicationName(application);
+}
+inline void GetOrganizationName(std::string* organization) {
+ Filesystem::GetOrganizationName(organization);
+}
+inline void GetApplicationName(std::string* application) {
+ Filesystem::GetApplicationName(application);
+}
+inline bool CreateFolder(const Pathname& path) {
+ return Filesystem::CreateFolder(path);
+}
+inline bool FinishPath(Pathname& path, bool create, const std::string& append) {
+ if (!append.empty())
+ path.AppendFolder(append);
+ return !create || CreateFolder(path);
+}
+// Note: this method uses the convention of <temp>/<appname> for the temporary
+// folder. Filesystem uses <temp>/<exename>. We will be migrating exclusively
+// to <temp>/<orgname>/<appname> eventually. Since these are temp folders,
+// it's probably ok to orphan them during the transition.
+inline bool GetTemporaryFolder(Pathname& path, bool create,
+ const std::string& append) {
+ std::string application_name;
+ Filesystem::GetApplicationName(&application_name);
+ ASSERT(!application_name.empty());
+ return Filesystem::GetTemporaryFolder(path, create, &application_name)
+ && FinishPath(path, create, append);
+}
+inline bool GetAppDataFolder(Pathname& path, bool create,
+ const std::string& append) {
+ ASSERT(!create); // TODO: Support create flag on Filesystem::GetAppDataFolder.
+ return Filesystem::GetAppDataFolder(&path, true)
+ && FinishPath(path, create, append);
+}
+inline bool CleanupTemporaryFolder() {
+ Pathname path;
+ if (!GetTemporaryFolder(path, false, ""))
+ return false;
+ if (Filesystem::IsAbsent(path))
+ return true;
+ if (!Filesystem::IsTemporaryPath(path)) {
+ ASSERT(false);
+ return false;
+ }
+ return Filesystem::DeleteFolderContents(path);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PATHUTILS_H__
diff --git a/chromium/third_party/webrtc/base/pathutils_unittest.cc b/chromium/third_party/webrtc/base/pathutils_unittest.cc
new file mode 100644
index 00000000000..a04effa68cd
--- /dev/null
+++ b/chromium/third_party/webrtc/base/pathutils_unittest.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/gunit.h"
+
+TEST(Pathname, ReturnsDotForEmptyPathname) {
+ const std::string kCWD =
+ std::string(".") + rtc::Pathname::DefaultFolderDelimiter();
+
+ rtc::Pathname path("/", "");
+ EXPECT_FALSE(path.empty());
+ EXPECT_FALSE(path.folder().empty());
+ EXPECT_TRUE (path.filename().empty());
+ EXPECT_FALSE(path.pathname().empty());
+ EXPECT_EQ(std::string("/"), path.pathname());
+
+ path.SetPathname("", "foo");
+ EXPECT_FALSE(path.empty());
+ EXPECT_TRUE (path.folder().empty());
+ EXPECT_FALSE(path.filename().empty());
+ EXPECT_FALSE(path.pathname().empty());
+ EXPECT_EQ(std::string("foo"), path.pathname());
+
+ path.SetPathname("", "");
+ EXPECT_TRUE (path.empty());
+ EXPECT_TRUE (path.folder().empty());
+ EXPECT_TRUE (path.filename().empty());
+ EXPECT_FALSE(path.pathname().empty());
+ EXPECT_EQ(kCWD, path.pathname());
+
+ path.SetPathname(kCWD, "");
+ EXPECT_FALSE(path.empty());
+ EXPECT_FALSE(path.folder().empty());
+ EXPECT_TRUE (path.filename().empty());
+ EXPECT_FALSE(path.pathname().empty());
+ EXPECT_EQ(kCWD, path.pathname());
+
+ rtc::Pathname path2("c:/foo bar.txt");
+ EXPECT_EQ(path2.url(), std::string("file:///c:/foo%20bar.txt"));
+}
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver.cc b/chromium/third_party/webrtc/base/physicalsocketserver.cc
new file mode 100644
index 00000000000..cff5e4dcb5f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/physicalsocketserver.cc
@@ -0,0 +1,1659 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(_MSC_VER) && _MSC_VER < 1300
+#pragma warning(disable:4786)
+#endif
+
+#include <assert.h>
+
+#if defined(WEBRTC_POSIX)
+#include <string.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/time.h>
+#include <sys/select.h>
+#include <unistd.h>
+#include <signal.h>
+#endif
+
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#undef SetPort
+#endif
+
+#include <algorithm>
+#include <map>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/winping.h"
+#include "webrtc/base/win32socketinit.h"
+
+// stm: this will tell us if we are on OSX
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#if defined(WEBRTC_POSIX)
+#include <netinet/tcp.h> // for TCP_NODELAY
+#define IP_MTU 14 // Until this is integrated from linux/in.h to netinet/in.h
+typedef void* SockOptArg;
+#endif // WEBRTC_POSIX
+
+#if defined(WEBRTC_WIN)
+typedef char* SockOptArg;
+#endif
+
+namespace rtc {
+
+#if defined(WEBRTC_WIN)
+// Standard MTUs, from RFC 1191
+const uint16 PACKET_MAXIMUMS[] = {
+ 65535, // Theoretical maximum, Hyperchannel
+ 32000, // Nothing
+ 17914, // 16Mb IBM Token Ring
+ 8166, // IEEE 802.4
+ //4464, // IEEE 802.5 (4Mb max)
+ 4352, // FDDI
+ //2048, // Wideband Network
+ 2002, // IEEE 802.5 (4Mb recommended)
+ //1536, // Expermental Ethernet Networks
+ //1500, // Ethernet, Point-to-Point (default)
+ 1492, // IEEE 802.3
+ 1006, // SLIP, ARPANET
+ //576, // X.25 Networks
+ //544, // DEC IP Portal
+ //512, // NETBIOS
+ 508, // IEEE 802/Source-Rt Bridge, ARCNET
+ 296, // Point-to-Point (low delay)
+ 68, // Official minimum
+ 0, // End of list marker
+};
+
+static const int IP_HEADER_SIZE = 20u;
+static const int IPV6_HEADER_SIZE = 40u;
+static const int ICMP_HEADER_SIZE = 8u;
+static const int ICMP_PING_TIMEOUT_MILLIS = 10000u;
+#endif
+
+class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> {
+ public:
+ PhysicalSocket(PhysicalSocketServer* ss, SOCKET s = INVALID_SOCKET)
+ : ss_(ss), s_(s), enabled_events_(0), error_(0),
+ state_((s == INVALID_SOCKET) ? CS_CLOSED : CS_CONNECTED),
+ resolver_(NULL) {
+#if defined(WEBRTC_WIN)
+ // EnsureWinsockInit() ensures that winsock is initialized. The default
+ // version of this function doesn't do anything because winsock is
+ // initialized by constructor of a static object. If neccessary libjingle
+ // users can link it with a different version of this function by replacing
+ // win32socketinit.cc. See win32socketinit.cc for more details.
+ EnsureWinsockInit();
+#endif
+ if (s_ != INVALID_SOCKET) {
+ enabled_events_ = DE_READ | DE_WRITE;
+
+ int type = SOCK_STREAM;
+ socklen_t len = sizeof(type);
+ VERIFY(0 == getsockopt(s_, SOL_SOCKET, SO_TYPE, (SockOptArg)&type, &len));
+ udp_ = (SOCK_DGRAM == type);
+ }
+ }
+
+ virtual ~PhysicalSocket() {
+ Close();
+ }
+
+ // Creates the underlying OS socket (same as the "socket" function).
+ virtual bool Create(int family, int type) {
+ Close();
+ s_ = ::socket(family, type, 0);
+ udp_ = (SOCK_DGRAM == type);
+ UpdateLastError();
+ if (udp_)
+ enabled_events_ = DE_READ | DE_WRITE;
+ return s_ != INVALID_SOCKET;
+ }
+
+ SocketAddress GetLocalAddress() const {
+ sockaddr_storage addr_storage = {0};
+ socklen_t addrlen = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int result = ::getsockname(s_, addr, &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr_storage, &address);
+ } else {
+ LOG(LS_WARNING) << "GetLocalAddress: unable to get local addr, socket="
+ << s_;
+ }
+ return address;
+ }
+
+ SocketAddress GetRemoteAddress() const {
+ sockaddr_storage addr_storage = {0};
+ socklen_t addrlen = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int result = ::getpeername(s_, addr, &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr_storage, &address);
+ } else {
+ LOG(LS_WARNING) << "GetRemoteAddress: unable to get remote addr, socket="
+ << s_;
+ }
+ return address;
+ }
+
+ int Bind(const SocketAddress& bind_addr) {
+ sockaddr_storage addr_storage;
+ size_t len = bind_addr.ToSockAddrStorage(&addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int err = ::bind(s_, addr, static_cast<int>(len));
+ UpdateLastError();
+#ifdef _DEBUG
+ if (0 == err) {
+ dbg_addr_ = "Bound @ ";
+ dbg_addr_.append(GetLocalAddress().ToString());
+ }
+#endif // _DEBUG
+ return err;
+ }
+
+ int Connect(const SocketAddress& addr) {
+ // TODO: Implicit creation is required to reconnect...
+ // ...but should we make it more explicit?
+ if (state_ != CS_CLOSED) {
+ SetError(EALREADY);
+ return SOCKET_ERROR;
+ }
+ if (addr.IsUnresolved()) {
+ LOG(LS_VERBOSE) << "Resolving addr in PhysicalSocket::Connect";
+ resolver_ = new AsyncResolver();
+ resolver_->SignalDone.connect(this, &PhysicalSocket::OnResolveResult);
+ resolver_->Start(addr);
+ state_ = CS_CONNECTING;
+ return 0;
+ }
+
+ return DoConnect(addr);
+ }
+
+ int DoConnect(const SocketAddress& connect_addr) {
+ if ((s_ == INVALID_SOCKET) &&
+ !Create(connect_addr.family(), SOCK_STREAM)) {
+ return SOCKET_ERROR;
+ }
+ sockaddr_storage addr_storage;
+ size_t len = connect_addr.ToSockAddrStorage(&addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int err = ::connect(s_, addr, static_cast<int>(len));
+ UpdateLastError();
+ if (err == 0) {
+ state_ = CS_CONNECTED;
+ } else if (IsBlockingError(GetError())) {
+ state_ = CS_CONNECTING;
+ enabled_events_ |= DE_CONNECT;
+ } else {
+ return SOCKET_ERROR;
+ }
+
+ enabled_events_ |= DE_READ | DE_WRITE;
+ return 0;
+ }
+
+ int GetError() const {
+ CritScope cs(&crit_);
+ return error_;
+ }
+
+ void SetError(int error) {
+ CritScope cs(&crit_);
+ error_ = error;
+ }
+
+ ConnState GetState() const {
+ return state_;
+ }
+
+ int GetOption(Option opt, int* value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+ socklen_t optlen = sizeof(*value);
+ int ret = ::getsockopt(s_, slevel, sopt, (SockOptArg)value, &optlen);
+ if (ret != -1 && opt == OPT_DONTFRAGMENT) {
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ *value = (*value != IP_PMTUDISC_DONT) ? 1 : 0;
+#endif
+ }
+ return ret;
+ }
+
+ int SetOption(Option opt, int value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+ if (opt == OPT_DONTFRAGMENT) {
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ value = (value) ? IP_PMTUDISC_DO : IP_PMTUDISC_DONT;
+#endif
+ }
+ return ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value));
+ }
+
+ int Send(const void *pv, size_t cb) {
+ int sent = ::send(s_, reinterpret_cast<const char *>(pv), (int)cb,
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ // Suppress SIGPIPE. Without this, attempting to send on a socket whose
+ // other end is closed will result in a SIGPIPE signal being raised to
+ // our process, which by default will terminate the process, which we
+ // don't want. By specifying this flag, we'll just get the error EPIPE
+ // instead and can handle the error gracefully.
+ MSG_NOSIGNAL
+#else
+ 0
+#endif
+ );
+ UpdateLastError();
+ MaybeRemapSendError();
+ // We have seen minidumps where this may be false.
+ ASSERT(sent <= static_cast<int>(cb));
+ if ((sent < 0) && IsBlockingError(GetError())) {
+ enabled_events_ |= DE_WRITE;
+ }
+ return sent;
+ }
+
+ int SendTo(const void* buffer, size_t length, const SocketAddress& addr) {
+ sockaddr_storage saddr;
+ size_t len = addr.ToSockAddrStorage(&saddr);
+ int sent = ::sendto(
+ s_, static_cast<const char *>(buffer), static_cast<int>(length),
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ // Suppress SIGPIPE. See above for explanation.
+ MSG_NOSIGNAL,
+#else
+ 0,
+#endif
+ reinterpret_cast<sockaddr*>(&saddr), static_cast<int>(len));
+ UpdateLastError();
+ MaybeRemapSendError();
+ // We have seen minidumps where this may be false.
+ ASSERT(sent <= static_cast<int>(length));
+ if ((sent < 0) && IsBlockingError(GetError())) {
+ enabled_events_ |= DE_WRITE;
+ }
+ return sent;
+ }
+
+ int Recv(void* buffer, size_t length) {
+ int received = ::recv(s_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0);
+ if ((received == 0) && (length != 0)) {
+ // Note: on graceful shutdown, recv can return 0. In this case, we
+ // pretend it is blocking, and then signal close, so that simplifying
+ // assumptions can be made about Recv.
+ LOG(LS_WARNING) << "EOF from socket; deferring close event";
+ // Must turn this back on so that the select() loop will notice the close
+ // event.
+ enabled_events_ |= DE_READ;
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+ }
+ UpdateLastError();
+ int error = GetError();
+ bool success = (received >= 0) || IsBlockingError(error);
+ if (udp_ || success) {
+ enabled_events_ |= DE_READ;
+ }
+ if (!success) {
+ LOG_F(LS_VERBOSE) << "Error = " << error;
+ }
+ return received;
+ }
+
+ int RecvFrom(void* buffer, size_t length, SocketAddress *out_addr) {
+ sockaddr_storage addr_storage;
+ socklen_t addr_len = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ int received = ::recvfrom(s_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0, addr, &addr_len);
+ UpdateLastError();
+ if ((received >= 0) && (out_addr != NULL))
+ SocketAddressFromSockAddrStorage(addr_storage, out_addr);
+ int error = GetError();
+ bool success = (received >= 0) || IsBlockingError(error);
+ if (udp_ || success) {
+ enabled_events_ |= DE_READ;
+ }
+ if (!success) {
+ LOG_F(LS_VERBOSE) << "Error = " << error;
+ }
+ return received;
+ }
+
+ int Listen(int backlog) {
+ int err = ::listen(s_, backlog);
+ UpdateLastError();
+ if (err == 0) {
+ state_ = CS_CONNECTING;
+ enabled_events_ |= DE_ACCEPT;
+#ifdef _DEBUG
+ dbg_addr_ = "Listening @ ";
+ dbg_addr_.append(GetLocalAddress().ToString());
+#endif // _DEBUG
+ }
+ return err;
+ }
+
+ AsyncSocket* Accept(SocketAddress *out_addr) {
+ sockaddr_storage addr_storage;
+ socklen_t addr_len = sizeof(addr_storage);
+ sockaddr* addr = reinterpret_cast<sockaddr*>(&addr_storage);
+ SOCKET s = ::accept(s_, addr, &addr_len);
+ UpdateLastError();
+ if (s == INVALID_SOCKET)
+ return NULL;
+ enabled_events_ |= DE_ACCEPT;
+ if (out_addr != NULL)
+ SocketAddressFromSockAddrStorage(addr_storage, out_addr);
+ return ss_->WrapSocket(s);
+ }
+
+ int Close() {
+ if (s_ == INVALID_SOCKET)
+ return 0;
+ int err = ::closesocket(s_);
+ UpdateLastError();
+ s_ = INVALID_SOCKET;
+ state_ = CS_CLOSED;
+ enabled_events_ = 0;
+ if (resolver_) {
+ resolver_->Destroy(false);
+ resolver_ = NULL;
+ }
+ return err;
+ }
+
+ int EstimateMTU(uint16* mtu) {
+ SocketAddress addr = GetRemoteAddress();
+ if (addr.IsAny()) {
+ SetError(ENOTCONN);
+ return -1;
+ }
+
+#if defined(WEBRTC_WIN)
+ // Gets the interface MTU (TTL=1) for the interface used to reach |addr|.
+ WinPing ping;
+ if (!ping.IsValid()) {
+ SetError(EINVAL); // can't think of a better error ID
+ return -1;
+ }
+ int header_size = ICMP_HEADER_SIZE;
+ if (addr.family() == AF_INET6) {
+ header_size += IPV6_HEADER_SIZE;
+ } else if (addr.family() == AF_INET) {
+ header_size += IP_HEADER_SIZE;
+ }
+
+ for (int level = 0; PACKET_MAXIMUMS[level + 1] > 0; ++level) {
+ int32 size = PACKET_MAXIMUMS[level] - header_size;
+ WinPing::PingResult result = ping.Ping(addr.ipaddr(), size,
+ ICMP_PING_TIMEOUT_MILLIS,
+ 1, false);
+ if (result == WinPing::PING_FAIL) {
+ SetError(EINVAL); // can't think of a better error ID
+ return -1;
+ } else if (result != WinPing::PING_TOO_LARGE) {
+ *mtu = PACKET_MAXIMUMS[level];
+ return 0;
+ }
+ }
+
+ ASSERT(false);
+ return -1;
+#elif defined(WEBRTC_MAC)
+ // No simple way to do this on Mac OS X.
+ // SIOCGIFMTU would work if we knew which interface would be used, but
+ // figuring that out is pretty complicated. For now we'll return an error
+ // and let the caller pick a default MTU.
+ SetError(EINVAL);
+ return -1;
+#elif defined(WEBRTC_LINUX)
+ // Gets the path MTU.
+ int value;
+ socklen_t vlen = sizeof(value);
+ int err = getsockopt(s_, IPPROTO_IP, IP_MTU, &value, &vlen);
+ if (err < 0) {
+ UpdateLastError();
+ return err;
+ }
+
+ ASSERT((0 <= value) && (value <= 65536));
+ *mtu = value;
+ return 0;
+#elif defined(__native_client__)
+ // Most socket operations, including this, will fail in NaCl's sandbox.
+ error_ = EACCES;
+ return -1;
+#endif
+ }
+
+ SocketServer* socketserver() { return ss_; }
+
+ protected:
+ void OnResolveResult(AsyncResolverInterface* resolver) {
+ if (resolver != resolver_) {
+ return;
+ }
+
+ int error = resolver_->GetError();
+ if (error == 0) {
+ error = DoConnect(resolver_->address());
+ } else {
+ Close();
+ }
+
+ if (error) {
+ SetError(error);
+ SignalCloseEvent(this, error);
+ }
+ }
+
+ void UpdateLastError() {
+ SetError(LAST_SYSTEM_ERROR);
+ }
+
+ void MaybeRemapSendError() {
+#if defined(WEBRTC_MAC)
+ // https://developer.apple.com/library/mac/documentation/Darwin/
+ // Reference/ManPages/man2/sendto.2.html
+ // ENOBUFS - The output queue for a network interface is full.
+ // This generally indicates that the interface has stopped sending,
+ // but may be caused by transient congestion.
+ if (GetError() == ENOBUFS) {
+ SetError(EWOULDBLOCK);
+ }
+#endif
+ }
+
+ static int TranslateOption(Option opt, int* slevel, int* sopt) {
+ switch (opt) {
+ case OPT_DONTFRAGMENT:
+#if defined(WEBRTC_WIN)
+ *slevel = IPPROTO_IP;
+ *sopt = IP_DONTFRAGMENT;
+ break;
+#elif defined(WEBRTC_MAC) || defined(BSD) || defined(__native_client__)
+ LOG(LS_WARNING) << "Socket::OPT_DONTFRAGMENT not supported.";
+ return -1;
+#elif defined(WEBRTC_POSIX)
+ *slevel = IPPROTO_IP;
+ *sopt = IP_MTU_DISCOVER;
+ break;
+#endif
+ case OPT_RCVBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_RCVBUF;
+ break;
+ case OPT_SNDBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_SNDBUF;
+ break;
+ case OPT_NODELAY:
+ *slevel = IPPROTO_TCP;
+ *sopt = TCP_NODELAY;
+ break;
+ case OPT_DSCP:
+ LOG(LS_WARNING) << "Socket::OPT_DSCP not supported.";
+ return -1;
+ case OPT_RTP_SENDTIME_EXTN_ID:
+ return -1; // No logging is necessary as this not a OS socket option.
+ default:
+ ASSERT(false);
+ return -1;
+ }
+ return 0;
+ }
+
+ PhysicalSocketServer* ss_;
+ SOCKET s_;
+ uint8 enabled_events_;
+ bool udp_;
+ int error_;
+ // Protects |error_| that is accessed from different threads.
+ mutable CriticalSection crit_;
+ ConnState state_;
+ AsyncResolver* resolver_;
+
+#ifdef _DEBUG
+ std::string dbg_addr_;
+#endif // _DEBUG;
+};
+
+#if defined(WEBRTC_POSIX)
+class EventDispatcher : public Dispatcher {
+ public:
+ EventDispatcher(PhysicalSocketServer* ss) : ss_(ss), fSignaled_(false) {
+ if (pipe(afd_) < 0)
+ LOG(LERROR) << "pipe failed";
+ ss_->Add(this);
+ }
+
+ virtual ~EventDispatcher() {
+ ss_->Remove(this);
+ close(afd_[0]);
+ close(afd_[1]);
+ }
+
+ virtual void Signal() {
+ CritScope cs(&crit_);
+ if (!fSignaled_) {
+ const uint8 b[1] = { 0 };
+ if (VERIFY(1 == write(afd_[1], b, sizeof(b)))) {
+ fSignaled_ = true;
+ }
+ }
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return DE_READ;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ // It is not possible to perfectly emulate an auto-resetting event with
+ // pipes. This simulates it by resetting before the event is handled.
+
+ CritScope cs(&crit_);
+ if (fSignaled_) {
+ uint8 b[4]; // Allow for reading more than 1 byte, but expect 1.
+ VERIFY(1 == read(afd_[0], b, sizeof(b)));
+ fSignaled_ = false;
+ }
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ ASSERT(false);
+ }
+
+ virtual int GetDescriptor() {
+ return afd_[0];
+ }
+
+ virtual bool IsDescriptorClosed() {
+ return false;
+ }
+
+ private:
+ PhysicalSocketServer *ss_;
+ int afd_[2];
+ bool fSignaled_;
+ CriticalSection crit_;
+};
+
+// These two classes use the self-pipe trick to deliver POSIX signals to our
+// select loop. This is the only safe, reliable, cross-platform way to do
+// non-trivial things with a POSIX signal in an event-driven program (until
+// proper pselect() implementations become ubiquitous).
+
+class PosixSignalHandler {
+ public:
+ // POSIX only specifies 32 signals, but in principle the system might have
+ // more and the programmer might choose to use them, so we size our array
+ // for 128.
+ static const int kNumPosixSignals = 128;
+
+ // There is just a single global instance. (Signal handlers do not get any
+ // sort of user-defined void * parameter, so they can't access anything that
+ // isn't global.)
+ static PosixSignalHandler* Instance() {
+ LIBJINGLE_DEFINE_STATIC_LOCAL(PosixSignalHandler, instance, ());
+ return &instance;
+ }
+
+ // Returns true if the given signal number is set.
+ bool IsSignalSet(int signum) const {
+ ASSERT(signum < ARRAY_SIZE(received_signal_));
+ if (signum < ARRAY_SIZE(received_signal_)) {
+ return received_signal_[signum];
+ } else {
+ return false;
+ }
+ }
+
+ // Clears the given signal number.
+ void ClearSignal(int signum) {
+ ASSERT(signum < ARRAY_SIZE(received_signal_));
+ if (signum < ARRAY_SIZE(received_signal_)) {
+ received_signal_[signum] = false;
+ }
+ }
+
+ // Returns the file descriptor to monitor for signal events.
+ int GetDescriptor() const {
+ return afd_[0];
+ }
+
+ // This is called directly from our real signal handler, so it must be
+ // signal-handler-safe. That means it cannot assume anything about the
+ // user-level state of the process, since the handler could be executed at any
+ // time on any thread.
+ void OnPosixSignalReceived(int signum) {
+ if (signum >= ARRAY_SIZE(received_signal_)) {
+ // We don't have space in our array for this.
+ return;
+ }
+ // Set a flag saying we've seen this signal.
+ received_signal_[signum] = true;
+ // Notify application code that we got a signal.
+ const uint8 b[1] = { 0 };
+ if (-1 == write(afd_[1], b, sizeof(b))) {
+ // Nothing we can do here. If there's an error somehow then there's
+ // nothing we can safely do from a signal handler.
+ // No, we can't even safely log it.
+ // But, we still have to check the return value here. Otherwise,
+ // GCC 4.4.1 complains ignoring return value. Even (void) doesn't help.
+ return;
+ }
+ }
+
+ private:
+ PosixSignalHandler() {
+ if (pipe(afd_) < 0) {
+ LOG_ERR(LS_ERROR) << "pipe failed";
+ return;
+ }
+ if (fcntl(afd_[0], F_SETFL, O_NONBLOCK) < 0) {
+ LOG_ERR(LS_WARNING) << "fcntl #1 failed";
+ }
+ if (fcntl(afd_[1], F_SETFL, O_NONBLOCK) < 0) {
+ LOG_ERR(LS_WARNING) << "fcntl #2 failed";
+ }
+ memset(const_cast<void *>(static_cast<volatile void *>(received_signal_)),
+ 0,
+ sizeof(received_signal_));
+ }
+
+ ~PosixSignalHandler() {
+ int fd1 = afd_[0];
+ int fd2 = afd_[1];
+ // We clobber the stored file descriptor numbers here or else in principle
+ // a signal that happens to be delivered during application termination
+ // could erroneously write a zero byte to an unrelated file handle in
+ // OnPosixSignalReceived() if some other file happens to be opened later
+ // during shutdown and happens to be given the same file descriptor number
+ // as our pipe had. Unfortunately even with this precaution there is still a
+ // race where that could occur if said signal happens to be handled
+ // concurrently with this code and happens to have already read the value of
+ // afd_[1] from memory before we clobber it, but that's unlikely.
+ afd_[0] = -1;
+ afd_[1] = -1;
+ close(fd1);
+ close(fd2);
+ }
+
+ int afd_[2];
+ // These are boolean flags that will be set in our signal handler and read
+ // and cleared from Wait(). There is a race involved in this, but it is
+ // benign. The signal handler sets the flag before signaling the pipe, so
+ // we'll never end up blocking in select() while a flag is still true.
+ // However, if two of the same signal arrive close to each other then it's
+ // possible that the second time the handler may set the flag while it's still
+ // true, meaning that signal will be missed. But the first occurrence of it
+ // will still be handled, so this isn't a problem.
+ // Volatile is not necessary here for correctness, but this data _is_ volatile
+ // so I've marked it as such.
+ volatile uint8 received_signal_[kNumPosixSignals];
+};
+
+class PosixSignalDispatcher : public Dispatcher {
+ public:
+ PosixSignalDispatcher(PhysicalSocketServer *owner) : owner_(owner) {
+ owner_->Add(this);
+ }
+
+ virtual ~PosixSignalDispatcher() {
+ owner_->Remove(this);
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return DE_READ;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ // Events might get grouped if signals come very fast, so we read out up to
+ // 16 bytes to make sure we keep the pipe empty.
+ uint8 b[16];
+ ssize_t ret = read(GetDescriptor(), b, sizeof(b));
+ if (ret < 0) {
+ LOG_ERR(LS_WARNING) << "Error in read()";
+ } else if (ret == 0) {
+ LOG(LS_WARNING) << "Should have read at least one byte";
+ }
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ for (int signum = 0; signum < PosixSignalHandler::kNumPosixSignals;
+ ++signum) {
+ if (PosixSignalHandler::Instance()->IsSignalSet(signum)) {
+ PosixSignalHandler::Instance()->ClearSignal(signum);
+ HandlerMap::iterator i = handlers_.find(signum);
+ if (i == handlers_.end()) {
+ // This can happen if a signal is delivered to our process at around
+ // the same time as we unset our handler for it. It is not an error
+ // condition, but it's unusual enough to be worth logging.
+ LOG(LS_INFO) << "Received signal with no handler: " << signum;
+ } else {
+ // Otherwise, execute our handler.
+ (*i->second)(signum);
+ }
+ }
+ }
+ }
+
+ virtual int GetDescriptor() {
+ return PosixSignalHandler::Instance()->GetDescriptor();
+ }
+
+ virtual bool IsDescriptorClosed() {
+ return false;
+ }
+
+ void SetHandler(int signum, void (*handler)(int)) {
+ handlers_[signum] = handler;
+ }
+
+ void ClearHandler(int signum) {
+ handlers_.erase(signum);
+ }
+
+ bool HasHandlers() {
+ return !handlers_.empty();
+ }
+
+ private:
+ typedef std::map<int, void (*)(int)> HandlerMap;
+
+ HandlerMap handlers_;
+ // Our owner.
+ PhysicalSocketServer *owner_;
+};
+
+class SocketDispatcher : public Dispatcher, public PhysicalSocket {
+ public:
+ explicit SocketDispatcher(PhysicalSocketServer *ss) : PhysicalSocket(ss) {
+ }
+ SocketDispatcher(SOCKET s, PhysicalSocketServer *ss) : PhysicalSocket(ss, s) {
+ }
+
+ virtual ~SocketDispatcher() {
+ Close();
+ }
+
+ bool Initialize() {
+ ss_->Add(this);
+ fcntl(s_, F_SETFL, fcntl(s_, F_GETFL, 0) | O_NONBLOCK);
+ return true;
+ }
+
+ virtual bool Create(int type) {
+ return Create(AF_INET, type);
+ }
+
+ virtual bool Create(int family, int type) {
+ // Change the socket to be non-blocking.
+ if (!PhysicalSocket::Create(family, type))
+ return false;
+
+ return Initialize();
+ }
+
+ virtual int GetDescriptor() {
+ return s_;
+ }
+
+ virtual bool IsDescriptorClosed() {
+ // We don't have a reliable way of distinguishing end-of-stream
+ // from readability. So test on each readable call. Is this
+ // inefficient? Probably.
+ char ch;
+ ssize_t res = ::recv(s_, &ch, 1, MSG_PEEK);
+ if (res > 0) {
+ // Data available, so not closed.
+ return false;
+ } else if (res == 0) {
+ // EOF, so closed.
+ return true;
+ } else { // error
+ switch (errno) {
+ // Returned if we've already closed s_.
+ case EBADF:
+ // Returned during ungraceful peer shutdown.
+ case ECONNRESET:
+ return true;
+ default:
+ // Assume that all other errors are just blocking errors, meaning the
+ // connection is still good but we just can't read from it right now.
+ // This should only happen when connecting (and at most once), because
+ // in all other cases this function is only called if the file
+ // descriptor is already known to be in the readable state. However,
+ // it's not necessary a problem if we spuriously interpret a
+ // "connection lost"-type error as a blocking error, because typically
+ // the next recv() will get EOF, so we'll still eventually notice that
+ // the socket is closed.
+ LOG_ERR(LS_WARNING) << "Assuming benign blocking error";
+ return false;
+ }
+ }
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return enabled_events_;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ if ((ff & DE_CONNECT) != 0)
+ state_ = CS_CONNECTED;
+ if ((ff & DE_CLOSE) != 0)
+ state_ = CS_CLOSED;
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ // Make sure we deliver connect/accept first. Otherwise, consumers may see
+ // something like a READ followed by a CONNECT, which would be odd.
+ if ((ff & DE_CONNECT) != 0) {
+ enabled_events_ &= ~DE_CONNECT;
+ SignalConnectEvent(this);
+ }
+ if ((ff & DE_ACCEPT) != 0) {
+ enabled_events_ &= ~DE_ACCEPT;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_READ) != 0) {
+ enabled_events_ &= ~DE_READ;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_WRITE) != 0) {
+ enabled_events_ &= ~DE_WRITE;
+ SignalWriteEvent(this);
+ }
+ if ((ff & DE_CLOSE) != 0) {
+ // The socket is now dead to us, so stop checking it.
+ enabled_events_ = 0;
+ SignalCloseEvent(this, err);
+ }
+ }
+
+ virtual int Close() {
+ if (s_ == INVALID_SOCKET)
+ return 0;
+
+ ss_->Remove(this);
+ return PhysicalSocket::Close();
+ }
+};
+
+class FileDispatcher: public Dispatcher, public AsyncFile {
+ public:
+ FileDispatcher(int fd, PhysicalSocketServer *ss) : ss_(ss), fd_(fd) {
+ set_readable(true);
+
+ ss_->Add(this);
+
+ fcntl(fd_, F_SETFL, fcntl(fd_, F_GETFL, 0) | O_NONBLOCK);
+ }
+
+ virtual ~FileDispatcher() {
+ ss_->Remove(this);
+ }
+
+ SocketServer* socketserver() { return ss_; }
+
+ virtual int GetDescriptor() {
+ return fd_;
+ }
+
+ virtual bool IsDescriptorClosed() {
+ return false;
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return flags_;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ if ((ff & DE_READ) != 0)
+ SignalReadEvent(this);
+ if ((ff & DE_WRITE) != 0)
+ SignalWriteEvent(this);
+ if ((ff & DE_CLOSE) != 0)
+ SignalCloseEvent(this, err);
+ }
+
+ virtual bool readable() {
+ return (flags_ & DE_READ) != 0;
+ }
+
+ virtual void set_readable(bool value) {
+ flags_ = value ? (flags_ | DE_READ) : (flags_ & ~DE_READ);
+ }
+
+ virtual bool writable() {
+ return (flags_ & DE_WRITE) != 0;
+ }
+
+ virtual void set_writable(bool value) {
+ flags_ = value ? (flags_ | DE_WRITE) : (flags_ & ~DE_WRITE);
+ }
+
+ private:
+ PhysicalSocketServer* ss_;
+ int fd_;
+ int flags_;
+};
+
+AsyncFile* PhysicalSocketServer::CreateFile(int fd) {
+ return new FileDispatcher(fd, this);
+}
+
+#endif // WEBRTC_POSIX
+
+#if defined(WEBRTC_WIN)
+static uint32 FlagsToEvents(uint32 events) {
+ uint32 ffFD = FD_CLOSE;
+ if (events & DE_READ)
+ ffFD |= FD_READ;
+ if (events & DE_WRITE)
+ ffFD |= FD_WRITE;
+ if (events & DE_CONNECT)
+ ffFD |= FD_CONNECT;
+ if (events & DE_ACCEPT)
+ ffFD |= FD_ACCEPT;
+ return ffFD;
+}
+
+class EventDispatcher : public Dispatcher {
+ public:
+ EventDispatcher(PhysicalSocketServer *ss) : ss_(ss) {
+ hev_ = WSACreateEvent();
+ if (hev_) {
+ ss_->Add(this);
+ }
+ }
+
+ ~EventDispatcher() {
+ if (hev_ != NULL) {
+ ss_->Remove(this);
+ WSACloseEvent(hev_);
+ hev_ = NULL;
+ }
+ }
+
+ virtual void Signal() {
+ if (hev_ != NULL)
+ WSASetEvent(hev_);
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return 0;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ WSAResetEvent(hev_);
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ }
+
+ virtual WSAEVENT GetWSAEvent() {
+ return hev_;
+ }
+
+ virtual SOCKET GetSocket() {
+ return INVALID_SOCKET;
+ }
+
+ virtual bool CheckSignalClose() { return false; }
+
+private:
+ PhysicalSocketServer* ss_;
+ WSAEVENT hev_;
+};
+
+class SocketDispatcher : public Dispatcher, public PhysicalSocket {
+ public:
+ static int next_id_;
+ int id_;
+ bool signal_close_;
+ int signal_err_;
+
+ SocketDispatcher(PhysicalSocketServer* ss)
+ : PhysicalSocket(ss),
+ id_(0),
+ signal_close_(false) {
+ }
+
+ SocketDispatcher(SOCKET s, PhysicalSocketServer* ss)
+ : PhysicalSocket(ss, s),
+ id_(0),
+ signal_close_(false) {
+ }
+
+ virtual ~SocketDispatcher() {
+ Close();
+ }
+
+ bool Initialize() {
+ ASSERT(s_ != INVALID_SOCKET);
+ // Must be a non-blocking
+ u_long argp = 1;
+ ioctlsocket(s_, FIONBIO, &argp);
+ ss_->Add(this);
+ return true;
+ }
+
+ virtual bool Create(int type) {
+ return Create(AF_INET, type);
+ }
+
+ virtual bool Create(int family, int type) {
+ // Create socket
+ if (!PhysicalSocket::Create(family, type))
+ return false;
+
+ if (!Initialize())
+ return false;
+
+ do { id_ = ++next_id_; } while (id_ == 0);
+ return true;
+ }
+
+ virtual int Close() {
+ if (s_ == INVALID_SOCKET)
+ return 0;
+
+ id_ = 0;
+ signal_close_ = false;
+ ss_->Remove(this);
+ return PhysicalSocket::Close();
+ }
+
+ virtual uint32 GetRequestedEvents() {
+ return enabled_events_;
+ }
+
+ virtual void OnPreEvent(uint32 ff) {
+ if ((ff & DE_CONNECT) != 0)
+ state_ = CS_CONNECTED;
+ // We set CS_CLOSED from CheckSignalClose.
+ }
+
+ virtual void OnEvent(uint32 ff, int err) {
+ int cache_id = id_;
+ // Make sure we deliver connect/accept first. Otherwise, consumers may see
+ // something like a READ followed by a CONNECT, which would be odd.
+ if (((ff & DE_CONNECT) != 0) && (id_ == cache_id)) {
+ if (ff != DE_CONNECT)
+ LOG(LS_VERBOSE) << "Signalled with DE_CONNECT: " << ff;
+ enabled_events_ &= ~DE_CONNECT;
+#ifdef _DEBUG
+ dbg_addr_ = "Connected @ ";
+ dbg_addr_.append(GetRemoteAddress().ToString());
+#endif // _DEBUG
+ SignalConnectEvent(this);
+ }
+ if (((ff & DE_ACCEPT) != 0) && (id_ == cache_id)) {
+ enabled_events_ &= ~DE_ACCEPT;
+ SignalReadEvent(this);
+ }
+ if ((ff & DE_READ) != 0) {
+ enabled_events_ &= ~DE_READ;
+ SignalReadEvent(this);
+ }
+ if (((ff & DE_WRITE) != 0) && (id_ == cache_id)) {
+ enabled_events_ &= ~DE_WRITE;
+ SignalWriteEvent(this);
+ }
+ if (((ff & DE_CLOSE) != 0) && (id_ == cache_id)) {
+ signal_close_ = true;
+ signal_err_ = err;
+ }
+ }
+
+ virtual WSAEVENT GetWSAEvent() {
+ return WSA_INVALID_EVENT;
+ }
+
+ virtual SOCKET GetSocket() {
+ return s_;
+ }
+
+ virtual bool CheckSignalClose() {
+ if (!signal_close_)
+ return false;
+
+ char ch;
+ if (recv(s_, &ch, 1, MSG_PEEK) > 0)
+ return false;
+
+ state_ = CS_CLOSED;
+ signal_close_ = false;
+ SignalCloseEvent(this, signal_err_);
+ return true;
+ }
+};
+
+int SocketDispatcher::next_id_ = 0;
+
+#endif // WEBRTC_WIN
+
+// Sets the value of a boolean value to false when signaled.
+class Signaler : public EventDispatcher {
+ public:
+ Signaler(PhysicalSocketServer* ss, bool* pf)
+ : EventDispatcher(ss), pf_(pf) {
+ }
+ virtual ~Signaler() { }
+
+ void OnEvent(uint32 ff, int err) {
+ if (pf_)
+ *pf_ = false;
+ }
+
+ private:
+ bool *pf_;
+};
+
+PhysicalSocketServer::PhysicalSocketServer()
+ : fWait_(false) {
+ signal_wakeup_ = new Signaler(this, &fWait_);
+#if defined(WEBRTC_WIN)
+ socket_ev_ = WSACreateEvent();
+#endif
+}
+
+PhysicalSocketServer::~PhysicalSocketServer() {
+#if defined(WEBRTC_WIN)
+ WSACloseEvent(socket_ev_);
+#endif
+#if defined(WEBRTC_POSIX)
+ signal_dispatcher_.reset();
+#endif
+ delete signal_wakeup_;
+ ASSERT(dispatchers_.empty());
+}
+
+void PhysicalSocketServer::WakeUp() {
+ signal_wakeup_->Signal();
+}
+
+Socket* PhysicalSocketServer::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* PhysicalSocketServer::CreateSocket(int family, int type) {
+ PhysicalSocket* socket = new PhysicalSocket(this);
+ if (socket->Create(family, type)) {
+ return socket;
+ } else {
+ delete socket;
+ return 0;
+ }
+}
+
+AsyncSocket* PhysicalSocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* PhysicalSocketServer::CreateAsyncSocket(int family, int type) {
+ SocketDispatcher* dispatcher = new SocketDispatcher(this);
+ if (dispatcher->Create(family, type)) {
+ return dispatcher;
+ } else {
+ delete dispatcher;
+ return 0;
+ }
+}
+
+AsyncSocket* PhysicalSocketServer::WrapSocket(SOCKET s) {
+ SocketDispatcher* dispatcher = new SocketDispatcher(s, this);
+ if (dispatcher->Initialize()) {
+ return dispatcher;
+ } else {
+ delete dispatcher;
+ return 0;
+ }
+}
+
+void PhysicalSocketServer::Add(Dispatcher *pdispatcher) {
+ CritScope cs(&crit_);
+ // Prevent duplicates. This can cause dead dispatchers to stick around.
+ DispatcherList::iterator pos = std::find(dispatchers_.begin(),
+ dispatchers_.end(),
+ pdispatcher);
+ if (pos != dispatchers_.end())
+ return;
+ dispatchers_.push_back(pdispatcher);
+}
+
+void PhysicalSocketServer::Remove(Dispatcher *pdispatcher) {
+ CritScope cs(&crit_);
+ DispatcherList::iterator pos = std::find(dispatchers_.begin(),
+ dispatchers_.end(),
+ pdispatcher);
+ // We silently ignore duplicate calls to Add, so we should silently ignore
+ // the (expected) symmetric calls to Remove. Note that this may still hide
+ // a real issue, so we at least log a warning about it.
+ if (pos == dispatchers_.end()) {
+ LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown "
+ << "dispatcher, potentially from a duplicate call to Add.";
+ return;
+ }
+ size_t index = pos - dispatchers_.begin();
+ dispatchers_.erase(pos);
+ for (IteratorList::iterator it = iterators_.begin(); it != iterators_.end();
+ ++it) {
+ if (index < **it) {
+ --**it;
+ }
+ }
+}
+
+#if defined(WEBRTC_POSIX)
+bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
+ // Calculate timing information
+
+ struct timeval *ptvWait = NULL;
+ struct timeval tvWait;
+ struct timeval tvStop;
+ if (cmsWait != kForever) {
+ // Calculate wait timeval
+ tvWait.tv_sec = cmsWait / 1000;
+ tvWait.tv_usec = (cmsWait % 1000) * 1000;
+ ptvWait = &tvWait;
+
+ // Calculate when to return in a timeval
+ gettimeofday(&tvStop, NULL);
+ tvStop.tv_sec += tvWait.tv_sec;
+ tvStop.tv_usec += tvWait.tv_usec;
+ if (tvStop.tv_usec >= 1000000) {
+ tvStop.tv_usec -= 1000000;
+ tvStop.tv_sec += 1;
+ }
+ }
+
+ // Zero all fd_sets. Don't need to do this inside the loop since
+ // select() zeros the descriptors not signaled
+
+ fd_set fdsRead;
+ FD_ZERO(&fdsRead);
+ fd_set fdsWrite;
+ FD_ZERO(&fdsWrite);
+
+ fWait_ = true;
+
+ while (fWait_) {
+ int fdmax = -1;
+ {
+ CritScope cr(&crit_);
+ for (size_t i = 0; i < dispatchers_.size(); ++i) {
+ // Query dispatchers for read and write wait state
+ Dispatcher *pdispatcher = dispatchers_[i];
+ ASSERT(pdispatcher);
+ if (!process_io && (pdispatcher != signal_wakeup_))
+ continue;
+ int fd = pdispatcher->GetDescriptor();
+ if (fd > fdmax)
+ fdmax = fd;
+
+ uint32 ff = pdispatcher->GetRequestedEvents();
+ if (ff & (DE_READ | DE_ACCEPT))
+ FD_SET(fd, &fdsRead);
+ if (ff & (DE_WRITE | DE_CONNECT))
+ FD_SET(fd, &fdsWrite);
+ }
+ }
+
+ // Wait then call handlers as appropriate
+ // < 0 means error
+ // 0 means timeout
+ // > 0 means count of descriptors ready
+ int n = select(fdmax + 1, &fdsRead, &fdsWrite, NULL, ptvWait);
+
+ // If error, return error.
+ if (n < 0) {
+ if (errno != EINTR) {
+ LOG_E(LS_ERROR, EN, errno) << "select";
+ return false;
+ }
+ // Else ignore the error and keep going. If this EINTR was for one of the
+ // signals managed by this PhysicalSocketServer, the
+ // PosixSignalDeliveryDispatcher will be in the signaled state in the next
+ // iteration.
+ } else if (n == 0) {
+ // If timeout, return success
+ return true;
+ } else {
+ // We have signaled descriptors
+ CritScope cr(&crit_);
+ for (size_t i = 0; i < dispatchers_.size(); ++i) {
+ Dispatcher *pdispatcher = dispatchers_[i];
+ int fd = pdispatcher->GetDescriptor();
+ uint32 ff = 0;
+ int errcode = 0;
+
+ // Reap any error code, which can be signaled through reads or writes.
+ // TODO: Should we set errcode if getsockopt fails?
+ if (FD_ISSET(fd, &fdsRead) || FD_ISSET(fd, &fdsWrite)) {
+ socklen_t len = sizeof(errcode);
+ ::getsockopt(fd, SOL_SOCKET, SO_ERROR, &errcode, &len);
+ }
+
+ // Check readable descriptors. If we're waiting on an accept, signal
+ // that. Otherwise we're waiting for data, check to see if we're
+ // readable or really closed.
+ // TODO: Only peek at TCP descriptors.
+ if (FD_ISSET(fd, &fdsRead)) {
+ FD_CLR(fd, &fdsRead);
+ if (pdispatcher->GetRequestedEvents() & DE_ACCEPT) {
+ ff |= DE_ACCEPT;
+ } else if (errcode || pdispatcher->IsDescriptorClosed()) {
+ ff |= DE_CLOSE;
+ } else {
+ ff |= DE_READ;
+ }
+ }
+
+ // Check writable descriptors. If we're waiting on a connect, detect
+ // success versus failure by the reaped error code.
+ if (FD_ISSET(fd, &fdsWrite)) {
+ FD_CLR(fd, &fdsWrite);
+ if (pdispatcher->GetRequestedEvents() & DE_CONNECT) {
+ if (!errcode) {
+ ff |= DE_CONNECT;
+ } else {
+ ff |= DE_CLOSE;
+ }
+ } else {
+ ff |= DE_WRITE;
+ }
+ }
+
+ // Tell the descriptor about the event.
+ if (ff != 0) {
+ pdispatcher->OnPreEvent(ff);
+ pdispatcher->OnEvent(ff, errcode);
+ }
+ }
+ }
+
+ // Recalc the time remaining to wait. Doing it here means it doesn't get
+ // calced twice the first time through the loop
+ if (ptvWait) {
+ ptvWait->tv_sec = 0;
+ ptvWait->tv_usec = 0;
+ struct timeval tvT;
+ gettimeofday(&tvT, NULL);
+ if ((tvStop.tv_sec > tvT.tv_sec)
+ || ((tvStop.tv_sec == tvT.tv_sec)
+ && (tvStop.tv_usec > tvT.tv_usec))) {
+ ptvWait->tv_sec = tvStop.tv_sec - tvT.tv_sec;
+ ptvWait->tv_usec = tvStop.tv_usec - tvT.tv_usec;
+ if (ptvWait->tv_usec < 0) {
+ ASSERT(ptvWait->tv_sec > 0);
+ ptvWait->tv_usec += 1000000;
+ ptvWait->tv_sec -= 1;
+ }
+ }
+ }
+ }
+
+ return true;
+}
+
+static void GlobalSignalHandler(int signum) {
+ PosixSignalHandler::Instance()->OnPosixSignalReceived(signum);
+}
+
+bool PhysicalSocketServer::SetPosixSignalHandler(int signum,
+ void (*handler)(int)) {
+ // If handler is SIG_IGN or SIG_DFL then clear our user-level handler,
+ // otherwise set one.
+ if (handler == SIG_IGN || handler == SIG_DFL) {
+ if (!InstallSignal(signum, handler)) {
+ return false;
+ }
+ if (signal_dispatcher_) {
+ signal_dispatcher_->ClearHandler(signum);
+ if (!signal_dispatcher_->HasHandlers()) {
+ signal_dispatcher_.reset();
+ }
+ }
+ } else {
+ if (!signal_dispatcher_) {
+ signal_dispatcher_.reset(new PosixSignalDispatcher(this));
+ }
+ signal_dispatcher_->SetHandler(signum, handler);
+ if (!InstallSignal(signum, &GlobalSignalHandler)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+Dispatcher* PhysicalSocketServer::signal_dispatcher() {
+ return signal_dispatcher_.get();
+}
+
+bool PhysicalSocketServer::InstallSignal(int signum, void (*handler)(int)) {
+ struct sigaction act;
+ // It doesn't really matter what we set this mask to.
+ if (sigemptyset(&act.sa_mask) != 0) {
+ LOG_ERR(LS_ERROR) << "Couldn't set mask";
+ return false;
+ }
+ act.sa_handler = handler;
+#if !defined(__native_client__)
+ // Use SA_RESTART so that our syscalls don't get EINTR, since we don't need it
+ // and it's a nuisance. Though some syscalls still return EINTR and there's no
+ // real standard for which ones. :(
+ act.sa_flags = SA_RESTART;
+#else
+ act.sa_flags = 0;
+#endif
+ if (sigaction(signum, &act, NULL) != 0) {
+ LOG_ERR(LS_ERROR) << "Couldn't set sigaction";
+ return false;
+ }
+ return true;
+}
+#endif // WEBRTC_POSIX
+
+#if defined(WEBRTC_WIN)
+bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
+ int cmsTotal = cmsWait;
+ int cmsElapsed = 0;
+ uint32 msStart = Time();
+
+ fWait_ = true;
+ while (fWait_) {
+ std::vector<WSAEVENT> events;
+ std::vector<Dispatcher *> event_owners;
+
+ events.push_back(socket_ev_);
+
+ {
+ CritScope cr(&crit_);
+ size_t i = 0;
+ iterators_.push_back(&i);
+ // Don't track dispatchers_.size(), because we want to pick up any new
+ // dispatchers that were added while processing the loop.
+ while (i < dispatchers_.size()) {
+ Dispatcher* disp = dispatchers_[i++];
+ if (!process_io && (disp != signal_wakeup_))
+ continue;
+ SOCKET s = disp->GetSocket();
+ if (disp->CheckSignalClose()) {
+ // We just signalled close, don't poll this socket
+ } else if (s != INVALID_SOCKET) {
+ WSAEventSelect(s,
+ events[0],
+ FlagsToEvents(disp->GetRequestedEvents()));
+ } else {
+ events.push_back(disp->GetWSAEvent());
+ event_owners.push_back(disp);
+ }
+ }
+ ASSERT(iterators_.back() == &i);
+ iterators_.pop_back();
+ }
+
+ // Which is shorter, the delay wait or the asked wait?
+
+ int cmsNext;
+ if (cmsWait == kForever) {
+ cmsNext = cmsWait;
+ } else {
+ cmsNext = _max(0, cmsTotal - cmsElapsed);
+ }
+
+ // Wait for one of the events to signal
+ DWORD dw = WSAWaitForMultipleEvents(static_cast<DWORD>(events.size()),
+ &events[0],
+ false,
+ cmsNext,
+ false);
+
+ if (dw == WSA_WAIT_FAILED) {
+ // Failed?
+ // TODO: need a better strategy than this!
+ WSAGetLastError();
+ ASSERT(false);
+ return false;
+ } else if (dw == WSA_WAIT_TIMEOUT) {
+ // Timeout?
+ return true;
+ } else {
+ // Figure out which one it is and call it
+ CritScope cr(&crit_);
+ int index = dw - WSA_WAIT_EVENT_0;
+ if (index > 0) {
+ --index; // The first event is the socket event
+ event_owners[index]->OnPreEvent(0);
+ event_owners[index]->OnEvent(0, 0);
+ } else if (process_io) {
+ size_t i = 0, end = dispatchers_.size();
+ iterators_.push_back(&i);
+ iterators_.push_back(&end); // Don't iterate over new dispatchers.
+ while (i < end) {
+ Dispatcher* disp = dispatchers_[i++];
+ SOCKET s = disp->GetSocket();
+ if (s == INVALID_SOCKET)
+ continue;
+
+ WSANETWORKEVENTS wsaEvents;
+ int err = WSAEnumNetworkEvents(s, events[0], &wsaEvents);
+ if (err == 0) {
+
+#if LOGGING
+ {
+ if ((wsaEvents.lNetworkEvents & FD_READ) &&
+ wsaEvents.iErrorCode[FD_READ_BIT] != 0) {
+ LOG(WARNING) << "PhysicalSocketServer got FD_READ_BIT error "
+ << wsaEvents.iErrorCode[FD_READ_BIT];
+ }
+ if ((wsaEvents.lNetworkEvents & FD_WRITE) &&
+ wsaEvents.iErrorCode[FD_WRITE_BIT] != 0) {
+ LOG(WARNING) << "PhysicalSocketServer got FD_WRITE_BIT error "
+ << wsaEvents.iErrorCode[FD_WRITE_BIT];
+ }
+ if ((wsaEvents.lNetworkEvents & FD_CONNECT) &&
+ wsaEvents.iErrorCode[FD_CONNECT_BIT] != 0) {
+ LOG(WARNING) << "PhysicalSocketServer got FD_CONNECT_BIT error "
+ << wsaEvents.iErrorCode[FD_CONNECT_BIT];
+ }
+ if ((wsaEvents.lNetworkEvents & FD_ACCEPT) &&
+ wsaEvents.iErrorCode[FD_ACCEPT_BIT] != 0) {
+ LOG(WARNING) << "PhysicalSocketServer got FD_ACCEPT_BIT error "
+ << wsaEvents.iErrorCode[FD_ACCEPT_BIT];
+ }
+ if ((wsaEvents.lNetworkEvents & FD_CLOSE) &&
+ wsaEvents.iErrorCode[FD_CLOSE_BIT] != 0) {
+ LOG(WARNING) << "PhysicalSocketServer got FD_CLOSE_BIT error "
+ << wsaEvents.iErrorCode[FD_CLOSE_BIT];
+ }
+ }
+#endif
+ uint32 ff = 0;
+ int errcode = 0;
+ if (wsaEvents.lNetworkEvents & FD_READ)
+ ff |= DE_READ;
+ if (wsaEvents.lNetworkEvents & FD_WRITE)
+ ff |= DE_WRITE;
+ if (wsaEvents.lNetworkEvents & FD_CONNECT) {
+ if (wsaEvents.iErrorCode[FD_CONNECT_BIT] == 0) {
+ ff |= DE_CONNECT;
+ } else {
+ ff |= DE_CLOSE;
+ errcode = wsaEvents.iErrorCode[FD_CONNECT_BIT];
+ }
+ }
+ if (wsaEvents.lNetworkEvents & FD_ACCEPT)
+ ff |= DE_ACCEPT;
+ if (wsaEvents.lNetworkEvents & FD_CLOSE) {
+ ff |= DE_CLOSE;
+ errcode = wsaEvents.iErrorCode[FD_CLOSE_BIT];
+ }
+ if (ff != 0) {
+ disp->OnPreEvent(ff);
+ disp->OnEvent(ff, errcode);
+ }
+ }
+ }
+ ASSERT(iterators_.back() == &end);
+ iterators_.pop_back();
+ ASSERT(iterators_.back() == &i);
+ iterators_.pop_back();
+ }
+
+ // Reset the network event until new activity occurs
+ WSAResetEvent(socket_ev_);
+ }
+
+ // Break?
+ if (!fWait_)
+ break;
+ cmsElapsed = TimeSince(msStart);
+ if ((cmsWait != kForever) && (cmsElapsed >= cmsWait)) {
+ break;
+ }
+ }
+
+ // Done
+ return true;
+}
+#endif // WEBRTC_WIN
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver.h b/chromium/third_party/webrtc/base/physicalsocketserver.h
new file mode 100644
index 00000000000..8a289de7e52
--- /dev/null
+++ b/chromium/third_party/webrtc/base/physicalsocketserver.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
+#define WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
+
+#include <vector>
+
+#include "webrtc/base/asyncfile.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/criticalsection.h"
+
+#if defined(WEBRTC_POSIX)
+typedef int SOCKET;
+#endif // WEBRTC_POSIX
+
+namespace rtc {
+
+// Event constants for the Dispatcher class.
+enum DispatcherEvent {
+ DE_READ = 0x0001,
+ DE_WRITE = 0x0002,
+ DE_CONNECT = 0x0004,
+ DE_CLOSE = 0x0008,
+ DE_ACCEPT = 0x0010,
+};
+
+class Signaler;
+#if defined(WEBRTC_POSIX)
+class PosixSignalDispatcher;
+#endif
+
+class Dispatcher {
+ public:
+ virtual ~Dispatcher() {}
+ virtual uint32 GetRequestedEvents() = 0;
+ virtual void OnPreEvent(uint32 ff) = 0;
+ virtual void OnEvent(uint32 ff, int err) = 0;
+#if defined(WEBRTC_WIN)
+ virtual WSAEVENT GetWSAEvent() = 0;
+ virtual SOCKET GetSocket() = 0;
+ virtual bool CheckSignalClose() = 0;
+#elif defined(WEBRTC_POSIX)
+ virtual int GetDescriptor() = 0;
+ virtual bool IsDescriptorClosed() = 0;
+#endif
+};
+
+// A socket server that provides the real sockets of the underlying OS.
+class PhysicalSocketServer : public SocketServer {
+ public:
+ PhysicalSocketServer();
+ virtual ~PhysicalSocketServer();
+
+ // SocketFactory:
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ // Internal Factory for Accept
+ AsyncSocket* WrapSocket(SOCKET s);
+
+ // SocketServer:
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ void Add(Dispatcher* dispatcher);
+ void Remove(Dispatcher* dispatcher);
+
+#if defined(WEBRTC_POSIX)
+ AsyncFile* CreateFile(int fd);
+
+ // Sets the function to be executed in response to the specified POSIX signal.
+ // The function is executed from inside Wait() using the "self-pipe trick"--
+ // regardless of which thread receives the signal--and hence can safely
+ // manipulate user-level data structures.
+ // "handler" may be SIG_IGN, SIG_DFL, or a user-specified function, just like
+ // with signal(2).
+ // Only one PhysicalSocketServer should have user-level signal handlers.
+ // Dispatching signals on multiple PhysicalSocketServers is not reliable.
+ // The signal mask is not modified. It is the caller's responsibily to
+ // maintain it as desired.
+ virtual bool SetPosixSignalHandler(int signum, void (*handler)(int));
+
+ protected:
+ Dispatcher* signal_dispatcher();
+#endif
+
+ private:
+ typedef std::vector<Dispatcher*> DispatcherList;
+ typedef std::vector<size_t*> IteratorList;
+
+#if defined(WEBRTC_POSIX)
+ static bool InstallSignal(int signum, void (*handler)(int));
+
+ scoped_ptr<PosixSignalDispatcher> signal_dispatcher_;
+#endif
+ DispatcherList dispatchers_;
+ IteratorList iterators_;
+ Signaler* signal_wakeup_;
+ CriticalSection crit_;
+ bool fWait_;
+#if defined(WEBRTC_WIN)
+ WSAEVENT socket_ev_;
+#endif
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc b/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc
new file mode 100644
index 00000000000..f29c5fc120b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc
@@ -0,0 +1,274 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <signal.h>
+#include <stdarg.h>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/socket_unittest.h"
+#include "webrtc/base/testutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+class PhysicalSocketTest : public SocketTest {
+};
+
+TEST_F(PhysicalSocketTest, TestConnectIPv4) {
+ SocketTest::TestConnectIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectIPv6) {
+ SocketTest::TestConnectIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWithDnsLookupIPv4) {
+ SocketTest::TestConnectWithDnsLookupIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWithDnsLookupIPv6) {
+ SocketTest::TestConnectWithDnsLookupIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectFailIPv4) {
+ SocketTest::TestConnectFailIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectFailIPv6) {
+ SocketTest::TestConnectFailIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWithDnsLookupFailIPv4) {
+ SocketTest::TestConnectWithDnsLookupFailIPv4();
+}
+
+
+TEST_F(PhysicalSocketTest, TestConnectWithDnsLookupFailIPv6) {
+ SocketTest::TestConnectWithDnsLookupFailIPv6();
+}
+
+
+TEST_F(PhysicalSocketTest, TestConnectWithClosedSocketIPv4) {
+ SocketTest::TestConnectWithClosedSocketIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWithClosedSocketIPv6) {
+ SocketTest::TestConnectWithClosedSocketIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWhileNotClosedIPv4) {
+ SocketTest::TestConnectWhileNotClosedIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestConnectWhileNotClosedIPv6) {
+ SocketTest::TestConnectWhileNotClosedIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestServerCloseDuringConnectIPv4) {
+ SocketTest::TestServerCloseDuringConnectIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestServerCloseDuringConnectIPv6) {
+ SocketTest::TestServerCloseDuringConnectIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestClientCloseDuringConnectIPv4) {
+ SocketTest::TestClientCloseDuringConnectIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestClientCloseDuringConnectIPv6) {
+ SocketTest::TestClientCloseDuringConnectIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestServerCloseIPv4) {
+ SocketTest::TestServerCloseIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestServerCloseIPv6) {
+ SocketTest::TestServerCloseIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestCloseInClosedCallbackIPv4) {
+ SocketTest::TestCloseInClosedCallbackIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestCloseInClosedCallbackIPv6) {
+ SocketTest::TestCloseInClosedCallbackIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestSocketServerWaitIPv4) {
+ SocketTest::TestSocketServerWaitIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestSocketServerWaitIPv6) {
+ SocketTest::TestSocketServerWaitIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestTcpIPv4) {
+ SocketTest::TestTcpIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestTcpIPv6) {
+ SocketTest::TestTcpIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestUdpIPv4) {
+ SocketTest::TestUdpIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestUdpIPv6) {
+ SocketTest::TestUdpIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestUdpReadyToSendIPv4) {
+ SocketTest::TestUdpReadyToSendIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestUdpReadyToSendIPv6) {
+ SocketTest::TestUdpReadyToSendIPv6();
+}
+
+TEST_F(PhysicalSocketTest, TestGetSetOptionsIPv4) {
+ SocketTest::TestGetSetOptionsIPv4();
+}
+
+TEST_F(PhysicalSocketTest, TestGetSetOptionsIPv6) {
+ SocketTest::TestGetSetOptionsIPv6();
+}
+
+#if defined(WEBRTC_POSIX)
+
+class PosixSignalDeliveryTest : public testing::Test {
+ public:
+ static void RecordSignal(int signum) {
+ signals_received_.push_back(signum);
+ signaled_thread_ = Thread::Current();
+ }
+
+ protected:
+ void SetUp() {
+ ss_.reset(new PhysicalSocketServer());
+ }
+
+ void TearDown() {
+ ss_.reset(NULL);
+ signals_received_.clear();
+ signaled_thread_ = NULL;
+ }
+
+ bool ExpectSignal(int signum) {
+ if (signals_received_.empty()) {
+ LOG(LS_ERROR) << "ExpectSignal(): No signal received";
+ return false;
+ }
+ if (signals_received_[0] != signum) {
+ LOG(LS_ERROR) << "ExpectSignal(): Received signal " <<
+ signals_received_[0] << ", expected " << signum;
+ return false;
+ }
+ signals_received_.erase(signals_received_.begin());
+ return true;
+ }
+
+ bool ExpectNone() {
+ bool ret = signals_received_.empty();
+ if (!ret) {
+ LOG(LS_ERROR) << "ExpectNone(): Received signal " << signals_received_[0]
+ << ", expected none";
+ }
+ return ret;
+ }
+
+ static std::vector<int> signals_received_;
+ static Thread *signaled_thread_;
+
+ scoped_ptr<PhysicalSocketServer> ss_;
+};
+
+std::vector<int> PosixSignalDeliveryTest::signals_received_;
+Thread *PosixSignalDeliveryTest::signaled_thread_ = NULL;
+
+// Test receiving a synchronous signal while not in Wait() and then entering
+// Wait() afterwards.
+TEST_F(PosixSignalDeliveryTest, RaiseThenWait) {
+ ASSERT_TRUE(ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal));
+ raise(SIGTERM);
+ EXPECT_TRUE(ss_->Wait(0, true));
+ EXPECT_TRUE(ExpectSignal(SIGTERM));
+ EXPECT_TRUE(ExpectNone());
+}
+
+// Test that we can handle getting tons of repeated signals and that we see all
+// the different ones.
+TEST_F(PosixSignalDeliveryTest, InsanelyManySignals) {
+ ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal);
+ ss_->SetPosixSignalHandler(SIGINT, &RecordSignal);
+ for (int i = 0; i < 10000; ++i) {
+ raise(SIGTERM);
+ }
+ raise(SIGINT);
+ EXPECT_TRUE(ss_->Wait(0, true));
+ // Order will be lowest signal numbers first.
+ EXPECT_TRUE(ExpectSignal(SIGINT));
+ EXPECT_TRUE(ExpectSignal(SIGTERM));
+ EXPECT_TRUE(ExpectNone());
+}
+
+// Test that a signal during a Wait() call is detected.
+TEST_F(PosixSignalDeliveryTest, SignalDuringWait) {
+ ss_->SetPosixSignalHandler(SIGALRM, &RecordSignal);
+ alarm(1);
+ EXPECT_TRUE(ss_->Wait(1500, true));
+ EXPECT_TRUE(ExpectSignal(SIGALRM));
+ EXPECT_TRUE(ExpectNone());
+}
+
+class RaiseSigTermRunnable : public Runnable {
+ void Run(Thread *thread) {
+ thread->socketserver()->Wait(1000, false);
+
+ // Allow SIGTERM. This will be the only thread with it not masked so it will
+ // be delivered to us.
+ sigset_t mask;
+ sigemptyset(&mask);
+ pthread_sigmask(SIG_SETMASK, &mask, NULL);
+
+ // Raise it.
+ raise(SIGTERM);
+ }
+};
+
+// Test that it works no matter what thread the kernel chooses to give the
+// signal to (since it's not guaranteed to be the one that Wait() runs on).
+TEST_F(PosixSignalDeliveryTest, SignalOnDifferentThread) {
+ ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal);
+ // Mask out SIGTERM so that it can't be delivered to this thread.
+ sigset_t mask;
+ sigemptyset(&mask);
+ sigaddset(&mask, SIGTERM);
+ EXPECT_EQ(0, pthread_sigmask(SIG_SETMASK, &mask, NULL));
+ // Start a new thread that raises it. It will have to be delivered to that
+ // thread. Our implementation should safely handle it and dispatch
+ // RecordSignal() on this thread.
+ scoped_ptr<Thread> thread(new Thread());
+ scoped_ptr<RaiseSigTermRunnable> runnable(new RaiseSigTermRunnable());
+ thread->Start(runnable.get());
+ EXPECT_TRUE(ss_->Wait(1500, true));
+ EXPECT_TRUE(ExpectSignal(SIGTERM));
+ EXPECT_EQ(Thread::Current(), signaled_thread_);
+ EXPECT_TRUE(ExpectNone());
+}
+
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/posix.cc b/chromium/third_party/webrtc/base/posix.cc
new file mode 100644
index 00000000000..0eb24ee64a5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/posix.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/posix.h"
+
+#include <sys/wait.h>
+#include <errno.h>
+#include <unistd.h>
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#include "webrtc/base/linuxfdwalk.h"
+#endif
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+static void closefds(void *close_errors, int fd) {
+ if (fd <= 2) {
+ // We leave stdin/out/err open to the browser's terminal, if any.
+ return;
+ }
+ if (close(fd) < 0) {
+ *static_cast<bool *>(close_errors) = true;
+ }
+}
+#endif
+
+enum {
+ EXIT_FLAG_CHDIR_ERRORS = 1 << 0,
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ EXIT_FLAG_FDWALK_ERRORS = 1 << 1,
+ EXIT_FLAG_CLOSE_ERRORS = 1 << 2,
+#endif
+ EXIT_FLAG_SECOND_FORK_FAILED = 1 << 3,
+};
+
+bool RunAsDaemon(const char *file, const char *const argv[]) {
+ // Fork intermediate child to daemonize.
+ pid_t pid = fork();
+ if (pid < 0) {
+ LOG_ERR(LS_ERROR) << "fork()";
+ return false;
+ } else if (!pid) {
+ // Child.
+
+ // We try to close all fds and change directory to /, but if that fails we
+ // keep going because it's not critical.
+ int exit_code = 0;
+ if (chdir("/") < 0) {
+ exit_code |= EXIT_FLAG_CHDIR_ERRORS;
+ }
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ bool close_errors = false;
+ if (fdwalk(&closefds, &close_errors) < 0) {
+ exit_code |= EXIT_FLAG_FDWALK_ERRORS;
+ }
+ if (close_errors) {
+ exit_code |= EXIT_FLAG_CLOSE_ERRORS;
+ }
+#endif
+
+ // Fork again to become a daemon.
+ pid = fork();
+ // It is important that everything here use _exit() and not exit(), because
+ // exit() would call the destructors of all global variables in the whole
+ // process, which is both unnecessary and unsafe.
+ if (pid < 0) {
+ exit_code |= EXIT_FLAG_SECOND_FORK_FAILED;
+ _exit(exit_code); // if second fork failed
+ } else if (!pid) {
+ // Child.
+ // Successfully daemonized. Run command.
+ // WEBRTC_POSIX requires the args to be typed as non-const for historical
+ // reasons, but it mandates that the actual implementation be const, so
+ // the cast is safe.
+ execvp(file, const_cast<char *const *>(argv));
+ _exit(255); // if execvp failed
+ }
+
+ // Parent.
+ // Successfully spawned process, but report any problems to the parent where
+ // we can log them.
+ _exit(exit_code);
+ }
+
+ // Parent. Reap intermediate child.
+ int status;
+ pid_t child = waitpid(pid, &status, 0);
+ if (child < 0) {
+ LOG_ERR(LS_ERROR) << "Error in waitpid()";
+ return false;
+ }
+ if (child != pid) {
+ // Should never happen (see man page).
+ LOG(LS_ERROR) << "waitpid() chose wrong child???";
+ return false;
+ }
+ if (!WIFEXITED(status)) {
+ LOG(LS_ERROR) << "Intermediate child killed uncleanly"; // Probably crashed
+ return false;
+ }
+
+ int exit_code = WEXITSTATUS(status);
+ if (exit_code & EXIT_FLAG_CHDIR_ERRORS) {
+ LOG(LS_WARNING) << "Child reported probles calling chdir()";
+ }
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ if (exit_code & EXIT_FLAG_FDWALK_ERRORS) {
+ LOG(LS_WARNING) << "Child reported problems calling fdwalk()";
+ }
+ if (exit_code & EXIT_FLAG_CLOSE_ERRORS) {
+ LOG(LS_WARNING) << "Child reported problems calling close()";
+ }
+#endif
+ if (exit_code & EXIT_FLAG_SECOND_FORK_FAILED) {
+ LOG(LS_ERROR) << "Failed to daemonize";
+ // This means the command was not launched, so failure.
+ return false;
+ }
+ return true;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/posix.h b/chromium/third_party/webrtc/base/posix.h
new file mode 100644
index 00000000000..8d1c2b11e45
--- /dev/null
+++ b/chromium/third_party/webrtc/base/posix.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_POSIX_H_
+#define WEBRTC_BASE_POSIX_H_
+
+namespace rtc {
+
+// Runs the given executable name as a daemon, so that it executes concurrently
+// with this process. Upon completion, the daemon process will automatically be
+// reaped by init(8), so an error exit status or a failure to start the
+// executable are not reported. Returns true if the daemon process was forked
+// successfully, else false.
+bool RunAsDaemon(const char *file, const char *const argv[]);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_POSIX_H_
diff --git a/chromium/third_party/webrtc/base/profiler.cc b/chromium/third_party/webrtc/base/profiler.cc
new file mode 100644
index 00000000000..f5734485383
--- /dev/null
+++ b/chromium/third_party/webrtc/base/profiler.cc
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/profiler.h"
+
+#include <math.h>
+
+#include "webrtc/base/timeutils.h"
+
+namespace {
+
+// When written to an ostream, FormattedTime chooses an appropriate scale and
+// suffix for a time value given in seconds.
+class FormattedTime {
+ public:
+ explicit FormattedTime(double t) : time_(t) {}
+ double time() const { return time_; }
+ private:
+ double time_;
+};
+
+std::ostream& operator<<(std::ostream& stream, const FormattedTime& time) {
+ if (time.time() < 1.0) {
+ stream << (time.time() * 1000.0) << "ms";
+ } else {
+ stream << time.time() << 's';
+ }
+ return stream;
+}
+
+} // namespace
+
+namespace rtc {
+
+ProfilerEvent::ProfilerEvent()
+ : total_time_(0.0),
+ mean_(0.0),
+ sum_of_squared_differences_(0.0),
+ start_count_(0),
+ event_count_(0) {
+}
+
+void ProfilerEvent::Start() {
+ if (start_count_ == 0) {
+ current_start_time_ = TimeNanos();
+ }
+ ++start_count_;
+}
+
+void ProfilerEvent::Stop(uint64 stop_time) {
+ --start_count_;
+ ASSERT(start_count_ >= 0);
+ if (start_count_ == 0) {
+ double elapsed = static_cast<double>(stop_time - current_start_time_) /
+ kNumNanosecsPerSec;
+ total_time_ += elapsed;
+ if (event_count_ == 0) {
+ minimum_ = maximum_ = elapsed;
+ } else {
+ minimum_ = _min(minimum_, elapsed);
+ maximum_ = _max(maximum_, elapsed);
+ }
+ // Online variance and mean algorithm: http://en.wikipedia.org/wiki/
+ // Algorithms_for_calculating_variance#Online_algorithm
+ ++event_count_;
+ double delta = elapsed - mean_;
+ mean_ = mean_ + delta / event_count_;
+ sum_of_squared_differences_ += delta * (elapsed - mean_);
+ }
+}
+
+void ProfilerEvent::Stop() {
+ Stop(TimeNanos());
+}
+
+double ProfilerEvent::standard_deviation() const {
+ if (event_count_ <= 1) return 0.0;
+ return sqrt(sum_of_squared_differences_ / (event_count_ - 1.0));
+}
+
+Profiler* Profiler::Instance() {
+ LIBJINGLE_DEFINE_STATIC_LOCAL(Profiler, instance, ());
+ return &instance;
+}
+
+void Profiler::StartEvent(const std::string& event_name) {
+ lock_.LockShared();
+ EventMap::iterator it = events_.find(event_name);
+ bool needs_insert = (it == events_.end());
+ lock_.UnlockShared();
+
+ if (needs_insert) {
+ // Need an exclusive lock to modify the map.
+ ExclusiveScope scope(&lock_);
+ it = events_.insert(
+ EventMap::value_type(event_name, ProfilerEvent())).first;
+ }
+
+ it->second.Start();
+}
+
+void Profiler::StopEvent(const std::string& event_name) {
+ // Get the time ASAP, then wait for the lock.
+ uint64 stop_time = TimeNanos();
+ SharedScope scope(&lock_);
+ EventMap::iterator it = events_.find(event_name);
+ if (it != events_.end()) {
+ it->second.Stop(stop_time);
+ }
+}
+
+void Profiler::ReportToLog(const char* file, int line,
+ LoggingSeverity severity_to_use,
+ const std::string& event_prefix) {
+ if (!LogMessage::Loggable(severity_to_use)) {
+ return;
+ }
+
+ SharedScope scope(&lock_);
+
+ { // Output first line.
+ LogMessage msg(file, line, severity_to_use);
+ msg.stream() << "=== Profile report ";
+ if (event_prefix.empty()) {
+ msg.stream() << "(prefix: '" << event_prefix << "') ";
+ }
+ msg.stream() << "===";
+ }
+ for (EventMap::const_iterator it = events_.begin();
+ it != events_.end(); ++it) {
+ if (event_prefix.empty() || it->first.find(event_prefix) == 0) {
+ LogMessage(file, line, severity_to_use).stream()
+ << it->first << " " << it->second;
+ }
+ }
+ LogMessage(file, line, severity_to_use).stream()
+ << "=== End profile report ===";
+}
+
+void Profiler::ReportAllToLog(const char* file, int line,
+ LoggingSeverity severity_to_use) {
+ ReportToLog(file, line, severity_to_use, "");
+}
+
+const ProfilerEvent* Profiler::GetEvent(const std::string& event_name) const {
+ SharedScope scope(&lock_);
+ EventMap::const_iterator it =
+ events_.find(event_name);
+ return (it == events_.end()) ? NULL : &it->second;
+}
+
+bool Profiler::Clear() {
+ ExclusiveScope scope(&lock_);
+ bool result = true;
+ // Clear all events that aren't started.
+ EventMap::iterator it = events_.begin();
+ while (it != events_.end()) {
+ if (it->second.is_started()) {
+ ++it; // Can't clear started events.
+ result = false;
+ } else {
+ events_.erase(it++);
+ }
+ }
+ return result;
+}
+
+std::ostream& operator<<(std::ostream& stream,
+ const ProfilerEvent& profiler_event) {
+ stream << "count=" << profiler_event.event_count()
+ << " total=" << FormattedTime(profiler_event.total_time())
+ << " mean=" << FormattedTime(profiler_event.mean())
+ << " min=" << FormattedTime(profiler_event.minimum())
+ << " max=" << FormattedTime(profiler_event.maximum())
+ << " sd=" << profiler_event.standard_deviation();
+ return stream;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/profiler.h b/chromium/third_party/webrtc/base/profiler.h
new file mode 100644
index 00000000000..13b99f7c91f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/profiler.h
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2013 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// A simple wall-clock profiler for instrumented code.
+// Example:
+// void MyLongFunction() {
+// PROFILE_F(); // Time the execution of this function.
+// // Do something
+// { // Time just what is in this scope.
+// PROFILE("My event");
+// // Do something else
+// }
+// }
+// Another example:
+// void StartAsyncProcess() {
+// PROFILE_START("My async event");
+// DoSomethingAsyncAndThenCall(&Callback);
+// }
+// void Callback() {
+// PROFILE_STOP("My async event");
+// // Handle callback.
+// }
+
+#ifndef WEBRTC_BASE_PROFILER_H_
+#define WEBRTC_BASE_PROFILER_H_
+
+#include <map>
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/sharedexclusivelock.h"
+
+// Profiling could be switched via a build flag, but for now, it's always on.
+#ifndef ENABLE_PROFILING
+#define ENABLE_PROFILING
+#endif
+
+#ifdef ENABLE_PROFILING
+
+#define UV_HELPER2(x) _uv_ ## x
+#define UV_HELPER(x) UV_HELPER2(x)
+#define UNIQUE_VAR UV_HELPER(__LINE__)
+
+// Profiles the current scope.
+#define PROFILE(msg) rtc::ProfilerScope UNIQUE_VAR(msg)
+// When placed at the start of a function, profiles the current function.
+#define PROFILE_F() PROFILE(__FUNCTION__)
+// Reports current timings to the log at severity |sev|.
+#define PROFILE_DUMP_ALL(sev) \
+ rtc::Profiler::Instance()->ReportAllToLog(__FILE__, __LINE__, sev)
+// Reports current timings for all events whose names are prefixed by |prefix|
+// to the log at severity |sev|. Using a unique event name as |prefix| will
+// report only that event.
+#define PROFILE_DUMP(sev, prefix) \
+ rtc::Profiler::Instance()->ReportToLog(__FILE__, __LINE__, sev, prefix)
+// Starts and stops a profile event. Useful when an event is not easily
+// captured within a scope (eg, an async call with a callback when done).
+#define PROFILE_START(msg) rtc::Profiler::Instance()->StartEvent(msg)
+#define PROFILE_STOP(msg) rtc::Profiler::Instance()->StopEvent(msg)
+// TODO(ryanpetrie): Consider adding PROFILE_DUMP_EVERY(sev, iterations)
+
+#undef UV_HELPER2
+#undef UV_HELPER
+#undef UNIQUE_VAR
+
+#else // ENABLE_PROFILING
+
+#define PROFILE(msg) (void)0
+#define PROFILE_F() (void)0
+#define PROFILE_DUMP_ALL(sev) (void)0
+#define PROFILE_DUMP(sev, prefix) (void)0
+#define PROFILE_START(msg) (void)0
+#define PROFILE_STOP(msg) (void)0
+
+#endif // ENABLE_PROFILING
+
+namespace rtc {
+
+// Tracks information for one profiler event.
+class ProfilerEvent {
+ public:
+ ProfilerEvent();
+ void Start();
+ void Stop();
+ void Stop(uint64 stop_time);
+ double standard_deviation() const;
+ double total_time() const { return total_time_; }
+ double mean() const { return mean_; }
+ double minimum() const { return minimum_; }
+ double maximum() const { return maximum_; }
+ int event_count() const { return event_count_; }
+ bool is_started() const { return start_count_ > 0; }
+
+ private:
+ uint64 current_start_time_;
+ double total_time_;
+ double mean_;
+ double sum_of_squared_differences_;
+ double minimum_;
+ double maximum_;
+ int start_count_;
+ int event_count_;
+};
+
+// Singleton that owns ProfilerEvents and reports results. Prefer to use
+// macros, defined above, rather than directly calling Profiler methods.
+class Profiler {
+ public:
+ void StartEvent(const std::string& event_name);
+ void StopEvent(const std::string& event_name);
+ void ReportToLog(const char* file, int line, LoggingSeverity severity_to_use,
+ const std::string& event_prefix);
+ void ReportAllToLog(const char* file, int line,
+ LoggingSeverity severity_to_use);
+ const ProfilerEvent* GetEvent(const std::string& event_name) const;
+ // Clears all _stopped_ events. Returns true if _all_ events were cleared.
+ bool Clear();
+
+ static Profiler* Instance();
+ private:
+ Profiler() {}
+
+ typedef std::map<std::string, ProfilerEvent> EventMap;
+ EventMap events_;
+ mutable SharedExclusiveLock lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(Profiler);
+};
+
+// Starts an event on construction and stops it on destruction.
+// Used by PROFILE macro.
+class ProfilerScope {
+ public:
+ explicit ProfilerScope(const std::string& event_name)
+ : event_name_(event_name) {
+ Profiler::Instance()->StartEvent(event_name_);
+ }
+ ~ProfilerScope() {
+ Profiler::Instance()->StopEvent(event_name_);
+ }
+ private:
+ std::string event_name_;
+
+ DISALLOW_COPY_AND_ASSIGN(ProfilerScope);
+};
+
+std::ostream& operator<<(std::ostream& stream,
+ const ProfilerEvent& profiler_event);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PROFILER_H_
diff --git a/chromium/third_party/webrtc/base/profiler_unittest.cc b/chromium/third_party/webrtc/base/profiler_unittest.cc
new file mode 100644
index 00000000000..8f4421e9e91
--- /dev/null
+++ b/chromium/third_party/webrtc/base/profiler_unittest.cc
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/profiler.h"
+#include "webrtc/base/thread.h"
+
+namespace {
+
+const int kWaitMs = 250;
+const double kWaitSec = 0.250;
+const double kTolerance = 0.1;
+
+const char* TestFunc() {
+ PROFILE_F();
+ rtc::Thread::SleepMs(kWaitMs);
+ return __FUNCTION__;
+}
+
+} // namespace
+
+namespace rtc {
+
+TEST(ProfilerTest, TestFunction) {
+ ASSERT_TRUE(Profiler::Instance()->Clear());
+
+ // Profile a long-running function.
+ const char* function_name = TestFunc();
+ const ProfilerEvent* event = Profiler::Instance()->GetEvent(function_name);
+ ASSERT_TRUE(event != NULL);
+ EXPECT_FALSE(event->is_started());
+ EXPECT_EQ(1, event->event_count());
+ EXPECT_NEAR(kWaitSec, event->mean(), kTolerance * 3);
+
+ // Run it a second time.
+ TestFunc();
+ EXPECT_FALSE(event->is_started());
+ EXPECT_EQ(2, event->event_count());
+ EXPECT_NEAR(kWaitSec, event->mean(), kTolerance);
+ EXPECT_NEAR(kWaitSec * 2, event->total_time(), kTolerance * 2);
+ EXPECT_DOUBLE_EQ(event->mean(), event->total_time() / event->event_count());
+}
+
+TEST(ProfilerTest, TestScopedEvents) {
+ const std::string kEvent1Name = "Event 1";
+ const std::string kEvent2Name = "Event 2";
+ const int kEvent2WaitMs = 150;
+ const double kEvent2WaitSec = 0.150;
+ const ProfilerEvent* event1;
+ const ProfilerEvent* event2;
+ ASSERT_TRUE(Profiler::Instance()->Clear());
+ { // Profile a scope.
+ PROFILE(kEvent1Name);
+ event1 = Profiler::Instance()->GetEvent(kEvent1Name);
+ ASSERT_TRUE(event1 != NULL);
+ EXPECT_TRUE(event1->is_started());
+ EXPECT_EQ(0, event1->event_count());
+ rtc::Thread::SleepMs(kWaitMs);
+ EXPECT_TRUE(event1->is_started());
+ }
+ // Check the result.
+ EXPECT_FALSE(event1->is_started());
+ EXPECT_EQ(1, event1->event_count());
+ EXPECT_NEAR(kWaitSec, event1->mean(), kTolerance);
+ { // Profile a second event.
+ PROFILE(kEvent2Name);
+ event2 = Profiler::Instance()->GetEvent(kEvent2Name);
+ ASSERT_TRUE(event2 != NULL);
+ EXPECT_FALSE(event1->is_started());
+ EXPECT_TRUE(event2->is_started());
+ rtc::Thread::SleepMs(kEvent2WaitMs);
+ }
+ // Check the result.
+ EXPECT_FALSE(event2->is_started());
+ EXPECT_EQ(1, event2->event_count());
+
+ // The difference here can be as much as 0.33, so we need high tolerance.
+ EXPECT_NEAR(kEvent2WaitSec, event2->mean(), kTolerance * 4);
+ // Make sure event1 is unchanged.
+ EXPECT_FALSE(event1->is_started());
+ EXPECT_EQ(1, event1->event_count());
+ { // Run another event 1.
+ PROFILE(kEvent1Name);
+ EXPECT_TRUE(event1->is_started());
+ rtc::Thread::SleepMs(kWaitMs);
+ }
+ // Check the result.
+ EXPECT_FALSE(event1->is_started());
+ EXPECT_EQ(2, event1->event_count());
+ EXPECT_NEAR(kWaitSec, event1->mean(), kTolerance);
+ EXPECT_NEAR(kWaitSec * 2, event1->total_time(), kTolerance * 2);
+ EXPECT_DOUBLE_EQ(event1->mean(),
+ event1->total_time() / event1->event_count());
+}
+
+TEST(ProfilerTest, Clear) {
+ ASSERT_TRUE(Profiler::Instance()->Clear());
+ PROFILE_START("event");
+ EXPECT_FALSE(Profiler::Instance()->Clear());
+ EXPECT_TRUE(Profiler::Instance()->GetEvent("event") != NULL);
+ PROFILE_STOP("event");
+ EXPECT_TRUE(Profiler::Instance()->Clear());
+ EXPECT_EQ(NULL, Profiler::Instance()->GetEvent("event"));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/proxy_unittest.cc b/chromium/third_party/webrtc/base/proxy_unittest.cc
new file mode 100644
index 00000000000..d8a523fe17e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxy_unittest.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+#include "webrtc/base/autodetectproxy.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/httpserver.h"
+#include "webrtc/base/proxyserver.h"
+#include "webrtc/base/socketadapters.h"
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/testechoserver.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+using rtc::Socket;
+using rtc::Thread;
+using rtc::SocketAddress;
+
+static const SocketAddress kSocksProxyIntAddr("1.2.3.4", 1080);
+static const SocketAddress kSocksProxyExtAddr("1.2.3.5", 0);
+static const SocketAddress kHttpsProxyIntAddr("1.2.3.4", 443);
+static const SocketAddress kHttpsProxyExtAddr("1.2.3.5", 0);
+static const SocketAddress kBogusProxyIntAddr("1.2.3.4", 999);
+
+// Used to run a proxy detect on the current thread. Otherwise we would need
+// to make both threads share the same VirtualSocketServer.
+class AutoDetectProxyRunner : public rtc::AutoDetectProxy {
+ public:
+ explicit AutoDetectProxyRunner(const std::string& agent)
+ : AutoDetectProxy(agent) {}
+ void Run() {
+ DoWork();
+ Thread::Current()->Restart(); // needed to reset the messagequeue
+ }
+};
+
+// Sets up a virtual socket server and HTTPS/SOCKS5 proxy servers.
+class ProxyTest : public testing::Test {
+ public:
+ ProxyTest() : ss_(new rtc::VirtualSocketServer(NULL)) {
+ Thread::Current()->set_socketserver(ss_.get());
+ socks_.reset(new rtc::SocksProxyServer(
+ ss_.get(), kSocksProxyIntAddr, ss_.get(), kSocksProxyExtAddr));
+ https_.reset(new rtc::HttpListenServer());
+ https_->Listen(kHttpsProxyIntAddr);
+ }
+ ~ProxyTest() {
+ Thread::Current()->set_socketserver(NULL);
+ }
+
+ rtc::SocketServer* ss() { return ss_.get(); }
+
+ rtc::ProxyType DetectProxyType(const SocketAddress& address) {
+ rtc::ProxyType type;
+ AutoDetectProxyRunner* detect = new AutoDetectProxyRunner("unittest/1.0");
+ detect->set_proxy(address);
+ detect->Run(); // blocks until done
+ type = detect->proxy().type;
+ detect->Destroy(false);
+ return type;
+ }
+
+ private:
+ rtc::scoped_ptr<rtc::SocketServer> ss_;
+ rtc::scoped_ptr<rtc::SocksProxyServer> socks_;
+ // TODO: Make this a real HTTPS proxy server.
+ rtc::scoped_ptr<rtc::HttpListenServer> https_;
+};
+
+// Tests whether we can use a SOCKS5 proxy to connect to a server.
+TEST_F(ProxyTest, TestSocks5Connect) {
+ rtc::AsyncSocket* socket =
+ ss()->CreateAsyncSocket(kSocksProxyIntAddr.family(), SOCK_STREAM);
+ rtc::AsyncSocksProxySocket* proxy_socket =
+ new rtc::AsyncSocksProxySocket(socket, kSocksProxyIntAddr,
+ "", rtc::CryptString());
+ // TODO: IPv6-ize these tests when proxy supports IPv6.
+
+ rtc::TestEchoServer server(Thread::Current(),
+ SocketAddress(INADDR_ANY, 0));
+
+ rtc::AsyncTCPSocket* packet_socket = rtc::AsyncTCPSocket::Create(
+ proxy_socket, SocketAddress(INADDR_ANY, 0), server.address());
+ EXPECT_TRUE(packet_socket != NULL);
+ rtc::TestClient client(packet_socket);
+
+ EXPECT_EQ(Socket::CS_CONNECTING, proxy_socket->GetState());
+ EXPECT_TRUE(client.CheckConnected());
+ EXPECT_EQ(Socket::CS_CONNECTED, proxy_socket->GetState());
+ EXPECT_EQ(server.address(), client.remote_address());
+ client.Send("foo", 3);
+ EXPECT_TRUE(client.CheckNextPacket("foo", 3, NULL));
+ EXPECT_TRUE(client.CheckNoPacket());
+}
+
+/*
+// Tests whether we can use a HTTPS proxy to connect to a server.
+TEST_F(ProxyTest, TestHttpsConnect) {
+ AsyncSocket* socket = ss()->CreateAsyncSocket(SOCK_STREAM);
+ AsyncHttpsProxySocket* proxy_socket = new AsyncHttpsProxySocket(
+ socket, "unittest/1.0", kHttpsProxyIntAddress, "", CryptString());
+ TestClient client(new AsyncTCPSocket(proxy_socket));
+ TestEchoServer server(Thread::Current(), SocketAddress());
+
+ EXPECT_TRUE(client.Connect(server.address()));
+ EXPECT_TRUE(client.CheckConnected());
+ EXPECT_EQ(server.address(), client.remote_address());
+ client.Send("foo", 3);
+ EXPECT_TRUE(client.CheckNextPacket("foo", 3, NULL));
+ EXPECT_TRUE(client.CheckNoPacket());
+}
+*/
+
+// Tests whether we can autodetect a SOCKS5 proxy.
+TEST_F(ProxyTest, TestAutoDetectSocks5) {
+ EXPECT_EQ(rtc::PROXY_SOCKS5, DetectProxyType(kSocksProxyIntAddr));
+}
+
+/*
+// Tests whether we can autodetect a HTTPS proxy.
+TEST_F(ProxyTest, TestAutoDetectHttps) {
+ EXPECT_EQ(rtc::PROXY_HTTPS, DetectProxyType(kHttpsProxyIntAddr));
+}
+*/
+
+// Tests whether we fail properly for no proxy.
+TEST_F(ProxyTest, TestAutoDetectBogus) {
+ EXPECT_EQ(rtc::PROXY_UNKNOWN, DetectProxyType(kBogusProxyIntAddr));
+}
diff --git a/chromium/third_party/webrtc/base/proxydetect.cc b/chromium/third_party/webrtc/base/proxydetect.cc
new file mode 100644
index 00000000000..7265f4fd965
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxydetect.cc
@@ -0,0 +1,1246 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/proxydetect.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#include <shlobj.h>
+#endif // WEBRTC_WIN
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <SystemConfiguration/SystemConfiguration.h>
+#include <CoreFoundation/CoreFoundation.h>
+#include <CoreServices/CoreServices.h>
+#include <Security/Security.h>
+#include "macconversion.h"
+#endif
+
+#include <map>
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/httpcommon-inl.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stringutils.h"
+
+#if defined(WEBRTC_WIN)
+#define _TRY_WINHTTP 1
+#define _TRY_JSPROXY 0
+#define _TRY_WM_FINDPROXY 0
+#define _TRY_IE_LAN_SETTINGS 1
+#endif // WEBRTC_WIN
+
+// For all platforms try Firefox.
+#define _TRY_FIREFOX 1
+
+// Use profiles.ini to find the correct profile for this user.
+// If not set, we'll just look for the default one.
+#define USE_FIREFOX_PROFILES_INI 1
+
+static const size_t kMaxLineLength = 1024;
+static const char kFirefoxPattern[] = "Firefox";
+static const char kInternetExplorerPattern[] = "MSIE";
+
+struct StringMap {
+ public:
+ void Add(const char * name, const char * value) { map_[name] = value; }
+ const std::string& Get(const char * name, const char * def = "") const {
+ std::map<std::string, std::string>::const_iterator it =
+ map_.find(name);
+ if (it != map_.end())
+ return it->second;
+ def_ = def;
+ return def_;
+ }
+ bool IsSet(const char * name) const {
+ return (map_.find(name) != map_.end());
+ }
+ private:
+ std::map<std::string, std::string> map_;
+ mutable std::string def_;
+};
+
+enum UserAgent {
+ UA_FIREFOX,
+ UA_INTERNETEXPLORER,
+ UA_OTHER,
+ UA_UNKNOWN
+};
+
+#if _TRY_WINHTTP
+//#include <winhttp.h>
+// Note: From winhttp.h
+
+const char WINHTTP[] = "winhttp";
+
+typedef LPVOID HINTERNET;
+
+typedef struct {
+ DWORD dwAccessType; // see WINHTTP_ACCESS_* types below
+ LPWSTR lpszProxy; // proxy server list
+ LPWSTR lpszProxyBypass; // proxy bypass list
+} WINHTTP_PROXY_INFO, * LPWINHTTP_PROXY_INFO;
+
+typedef struct {
+ DWORD dwFlags;
+ DWORD dwAutoDetectFlags;
+ LPCWSTR lpszAutoConfigUrl;
+ LPVOID lpvReserved;
+ DWORD dwReserved;
+ BOOL fAutoLogonIfChallenged;
+} WINHTTP_AUTOPROXY_OPTIONS;
+
+typedef struct {
+ BOOL fAutoDetect;
+ LPWSTR lpszAutoConfigUrl;
+ LPWSTR lpszProxy;
+ LPWSTR lpszProxyBypass;
+} WINHTTP_CURRENT_USER_IE_PROXY_CONFIG;
+
+extern "C" {
+ typedef HINTERNET (WINAPI * pfnWinHttpOpen)
+ (
+ IN LPCWSTR pwszUserAgent,
+ IN DWORD dwAccessType,
+ IN LPCWSTR pwszProxyName OPTIONAL,
+ IN LPCWSTR pwszProxyBypass OPTIONAL,
+ IN DWORD dwFlags
+ );
+ typedef BOOL (STDAPICALLTYPE * pfnWinHttpCloseHandle)
+ (
+ IN HINTERNET hInternet
+ );
+ typedef BOOL (STDAPICALLTYPE * pfnWinHttpGetProxyForUrl)
+ (
+ IN HINTERNET hSession,
+ IN LPCWSTR lpcwszUrl,
+ IN WINHTTP_AUTOPROXY_OPTIONS * pAutoProxyOptions,
+ OUT WINHTTP_PROXY_INFO * pProxyInfo
+ );
+ typedef BOOL (STDAPICALLTYPE * pfnWinHttpGetIEProxyConfig)
+ (
+ IN OUT WINHTTP_CURRENT_USER_IE_PROXY_CONFIG * pProxyConfig
+ );
+
+} // extern "C"
+
+#define WINHTTP_AUTOPROXY_AUTO_DETECT 0x00000001
+#define WINHTTP_AUTOPROXY_CONFIG_URL 0x00000002
+#define WINHTTP_AUTOPROXY_RUN_INPROCESS 0x00010000
+#define WINHTTP_AUTOPROXY_RUN_OUTPROCESS_ONLY 0x00020000
+#define WINHTTP_AUTO_DETECT_TYPE_DHCP 0x00000001
+#define WINHTTP_AUTO_DETECT_TYPE_DNS_A 0x00000002
+#define WINHTTP_ACCESS_TYPE_DEFAULT_PROXY 0
+#define WINHTTP_ACCESS_TYPE_NO_PROXY 1
+#define WINHTTP_ACCESS_TYPE_NAMED_PROXY 3
+#define WINHTTP_NO_PROXY_NAME NULL
+#define WINHTTP_NO_PROXY_BYPASS NULL
+
+#endif // _TRY_WINHTTP
+
+#if _TRY_JSPROXY
+extern "C" {
+ typedef BOOL (STDAPICALLTYPE * pfnInternetGetProxyInfo)
+ (
+ LPCSTR lpszUrl,
+ DWORD dwUrlLength,
+ LPSTR lpszUrlHostName,
+ DWORD dwUrlHostNameLength,
+ LPSTR * lplpszProxyHostName,
+ LPDWORD lpdwProxyHostNameLength
+ );
+} // extern "C"
+#endif // _TRY_JSPROXY
+
+#if _TRY_WM_FINDPROXY
+#include <comutil.h>
+#include <wmnetsourcecreator.h>
+#include <wmsinternaladminnetsource.h>
+#endif // _TRY_WM_FINDPROXY
+
+#if _TRY_IE_LAN_SETTINGS
+#include <wininet.h>
+#include <string>
+#endif // _TRY_IE_LAN_SETTINGS
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// Utility Functions
+//////////////////////////////////////////////////////////////////////
+
+#if defined(WEBRTC_WIN)
+#ifdef _UNICODE
+
+typedef std::wstring tstring;
+std::string Utf8String(const tstring& str) { return ToUtf8(str); }
+
+#else // !_UNICODE
+
+typedef std::string tstring;
+std::string Utf8String(const tstring& str) { return str; }
+
+#endif // !_UNICODE
+#endif // WEBRTC_WIN
+
+bool ProxyItemMatch(const Url<char>& url, char * item, size_t len) {
+ // hostname:443
+ if (char * port = ::strchr(item, ':')) {
+ *port++ = '\0';
+ if (url.port() != atol(port)) {
+ return false;
+ }
+ }
+
+ // A.B.C.D or A.B.C.D/24
+ int a, b, c, d, m;
+ int match = sscanf(item, "%d.%d.%d.%d/%d", &a, &b, &c, &d, &m);
+ if (match >= 4) {
+ uint32 ip = ((a & 0xFF) << 24) | ((b & 0xFF) << 16) | ((c & 0xFF) << 8) |
+ (d & 0xFF);
+ if ((match < 5) || (m > 32))
+ m = 32;
+ else if (m < 0)
+ m = 0;
+ uint32 mask = (m == 0) ? 0 : (~0UL) << (32 - m);
+ SocketAddress addr(url.host(), 0);
+ // TODO: Support IPv6 proxyitems. This code block is IPv4 only anyway.
+ return !addr.IsUnresolved() &&
+ ((addr.ipaddr().v4AddressAsHostOrderInteger() & mask) == (ip & mask));
+ }
+
+ // .foo.com
+ if (*item == '.') {
+ size_t hostlen = url.host().length();
+ return (hostlen > len)
+ && (stricmp(url.host().c_str() + (hostlen - len), item) == 0);
+ }
+
+ // localhost or www.*.com
+ if (!string_match(url.host().c_str(), item))
+ return false;
+
+ return true;
+}
+
+bool ProxyListMatch(const Url<char>& url, const std::string& proxy_list,
+ char sep) {
+ const size_t BUFSIZE = 256;
+ char buffer[BUFSIZE];
+ const char* list = proxy_list.c_str();
+ while (*list) {
+ // Remove leading space
+ if (isspace(*list)) {
+ ++list;
+ continue;
+ }
+ // Break on separator
+ size_t len;
+ const char * start = list;
+ if (const char * end = ::strchr(list, sep)) {
+ len = (end - list);
+ list += len + 1;
+ } else {
+ len = strlen(list);
+ list += len;
+ }
+ // Remove trailing space
+ while ((len > 0) && isspace(start[len-1]))
+ --len;
+ // Check for oversized entry
+ if (len >= BUFSIZE)
+ continue;
+ memcpy(buffer, start, len);
+ buffer[len] = 0;
+ if (!ProxyItemMatch(url, buffer, len))
+ continue;
+ return true;
+ }
+ return false;
+}
+
+bool Better(ProxyType lhs, const ProxyType rhs) {
+ // PROXY_NONE, PROXY_HTTPS, PROXY_SOCKS5, PROXY_UNKNOWN
+ const int PROXY_VALUE[5] = { 0, 2, 3, 1 };
+ return (PROXY_VALUE[lhs] > PROXY_VALUE[rhs]);
+}
+
+bool ParseProxy(const std::string& saddress, ProxyInfo* proxy) {
+ const size_t kMaxAddressLength = 1024;
+ // Allow semicolon, space, or tab as an address separator
+ const char* const kAddressSeparator = " ;\t";
+
+ ProxyType ptype;
+ std::string host;
+ uint16 port;
+
+ const char* address = saddress.c_str();
+ while (*address) {
+ size_t len;
+ const char * start = address;
+ if (const char * sep = strchr(address, kAddressSeparator)) {
+ len = (sep - address);
+ address += len + 1;
+ while (*address != '\0' && ::strchr(kAddressSeparator, *address)) {
+ address += 1;
+ }
+ } else {
+ len = strlen(address);
+ address += len;
+ }
+
+ if (len > kMaxAddressLength - 1) {
+ LOG(LS_WARNING) << "Proxy address too long [" << start << "]";
+ continue;
+ }
+
+ char buffer[kMaxAddressLength];
+ memcpy(buffer, start, len);
+ buffer[len] = 0;
+
+ char * colon = ::strchr(buffer, ':');
+ if (!colon) {
+ LOG(LS_WARNING) << "Proxy address without port [" << buffer << "]";
+ continue;
+ }
+
+ *colon = 0;
+ char * endptr;
+ port = static_cast<uint16>(strtol(colon + 1, &endptr, 0));
+ if (*endptr != 0) {
+ LOG(LS_WARNING) << "Proxy address with invalid port [" << buffer << "]";
+ continue;
+ }
+
+ if (char * equals = ::strchr(buffer, '=')) {
+ *equals = 0;
+ host = equals + 1;
+ if (_stricmp(buffer, "socks") == 0) {
+ ptype = PROXY_SOCKS5;
+ } else if (_stricmp(buffer, "https") == 0) {
+ ptype = PROXY_HTTPS;
+ } else {
+ LOG(LS_WARNING) << "Proxy address with unknown protocol ["
+ << buffer << "]";
+ ptype = PROXY_UNKNOWN;
+ }
+ } else {
+ host = buffer;
+ ptype = PROXY_UNKNOWN;
+ }
+
+ if (Better(ptype, proxy->type)) {
+ proxy->type = ptype;
+ proxy->address.SetIP(host);
+ proxy->address.SetPort(port);
+ }
+ }
+
+ return proxy->type != PROXY_NONE;
+}
+
+UserAgent GetAgent(const char* agent) {
+ if (agent) {
+ std::string agent_str(agent);
+ if (agent_str.find(kFirefoxPattern) != std::string::npos) {
+ return UA_FIREFOX;
+ } else if (agent_str.find(kInternetExplorerPattern) != std::string::npos) {
+ return UA_INTERNETEXPLORER;
+ } else if (agent_str.empty()) {
+ return UA_UNKNOWN;
+ }
+ }
+ return UA_OTHER;
+}
+
+bool EndsWith(const std::string& a, const std::string& b) {
+ if (b.size() > a.size()) {
+ return false;
+ }
+ int result = a.compare(a.size() - b.size(), b.size(), b);
+ return result == 0;
+}
+
+bool GetFirefoxProfilePath(Pathname* path) {
+#if defined(WEBRTC_WIN)
+ wchar_t w_path[MAX_PATH];
+ if (SHGetFolderPath(0, CSIDL_APPDATA, 0, SHGFP_TYPE_CURRENT, w_path) !=
+ S_OK) {
+ LOG(LS_ERROR) << "SHGetFolderPath failed";
+ return false;
+ }
+ path->SetFolder(ToUtf8(w_path, wcslen(w_path)));
+ path->AppendFolder("Mozilla");
+ path->AppendFolder("Firefox");
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ FSRef fr;
+ if (0 != FSFindFolder(kUserDomain, kApplicationSupportFolderType,
+ kCreateFolder, &fr)) {
+ LOG(LS_ERROR) << "FSFindFolder failed";
+ return false;
+ }
+ char buffer[NAME_MAX + 1];
+ if (0 != FSRefMakePath(&fr, reinterpret_cast<uint8*>(buffer),
+ ARRAY_SIZE(buffer))) {
+ LOG(LS_ERROR) << "FSRefMakePath failed";
+ return false;
+ }
+ path->SetFolder(std::string(buffer));
+ path->AppendFolder("Firefox");
+#else
+ char* user_home = getenv("HOME");
+ if (user_home == NULL) {
+ return false;
+ }
+ path->SetFolder(std::string(user_home));
+ path->AppendFolder(".mozilla");
+ path->AppendFolder("firefox");
+#endif // WEBRTC_WIN
+ return true;
+}
+
+bool GetDefaultFirefoxProfile(Pathname* profile_path) {
+ ASSERT(NULL != profile_path);
+ Pathname path;
+ if (!GetFirefoxProfilePath(&path)) {
+ return false;
+ }
+
+#if USE_FIREFOX_PROFILES_INI
+ // [Profile0]
+ // Name=default
+ // IsRelative=1
+ // Path=Profiles/2de53ejb.default
+ // Default=1
+
+ // Note: we are looking for the first entry with "Default=1", or the last
+ // entry in the file
+ path.SetFilename("profiles.ini");
+ scoped_ptr<FileStream> fs(Filesystem::OpenFile(path, "r"));
+ if (!fs) {
+ return false;
+ }
+ Pathname candidate;
+ bool relative = true;
+ std::string line;
+ while (fs->ReadLine(&line) == SR_SUCCESS) {
+ if (line.length() == 0) {
+ continue;
+ }
+ if (line.at(0) == '[') {
+ relative = true;
+ candidate.clear();
+ } else if (line.find("IsRelative=") == 0 &&
+ line.length() >= 12) {
+ // TODO: The initial Linux public launch revealed a fairly
+ // high number of machines where IsRelative= did not have anything after
+ // it. Perhaps that is legal profiles.ini syntax?
+ relative = (line.at(11) != '0');
+ } else if (line.find("Path=") == 0 &&
+ line.length() >= 6) {
+ if (relative) {
+ candidate = path;
+ } else {
+ candidate.clear();
+ }
+ candidate.AppendFolder(line.substr(5));
+ } else if (line.find("Default=") == 0 &&
+ line.length() >= 9) {
+ if ((line.at(8) != '0') && !candidate.empty()) {
+ break;
+ }
+ }
+ }
+ fs->Close();
+ if (candidate.empty()) {
+ return false;
+ }
+ profile_path->SetPathname(candidate.pathname());
+
+#else // !USE_FIREFOX_PROFILES_INI
+ path.AppendFolder("Profiles");
+ DirectoryIterator* it = Filesystem::IterateDirectory();
+ it->Iterate(path);
+ std::string extension(".default");
+ while (!EndsWith(it->Name(), extension)) {
+ if (!it->Next()) {
+ return false;
+ }
+ }
+
+ profile_path->SetPathname(path);
+ profile->AppendFolder("Profiles");
+ profile->AppendFolder(it->Name());
+ delete it;
+
+#endif // !USE_FIREFOX_PROFILES_INI
+
+ return true;
+}
+
+bool ReadFirefoxPrefs(const Pathname& filename,
+ const char * prefix,
+ StringMap* settings) {
+ scoped_ptr<FileStream> fs(Filesystem::OpenFile(filename, "r"));
+ if (!fs) {
+ LOG(LS_ERROR) << "Failed to open file: " << filename.pathname();
+ return false;
+ }
+
+ std::string line;
+ while (fs->ReadLine(&line) == SR_SUCCESS) {
+ size_t prefix_len = strlen(prefix);
+
+ // Skip blank lines and too long lines.
+ if ((line.length() == 0) || (line.length() > kMaxLineLength)
+ || (line.at(0) == '#') || line.compare(0, 2, "/*") == 0
+ || line.compare(0, 2, " *") == 0) {
+ continue;
+ }
+
+ char buffer[kMaxLineLength];
+ strcpyn(buffer, sizeof(buffer), line.c_str());
+ int nstart = 0, nend = 0, vstart = 0, vend = 0;
+ sscanf(buffer, "user_pref(\"%n%*[^\"]%n\", %n%*[^)]%n);",
+ &nstart, &nend, &vstart, &vend);
+ if (vend > 0) {
+ char* name = buffer + nstart;
+ name[nend - nstart] = 0;
+ if ((vend - vstart >= 2) && (buffer[vstart] == '"')) {
+ vstart += 1;
+ vend -= 1;
+ }
+ char* value = buffer + vstart;
+ value[vend - vstart] = 0;
+ if ((strncmp(name, prefix, prefix_len) == 0) && *value) {
+ settings->Add(name + prefix_len, value);
+ }
+ } else {
+ LOG_F(LS_WARNING) << "Unparsed pref [" << buffer << "]";
+ }
+ }
+ fs->Close();
+ return true;
+}
+
+bool GetFirefoxProxySettings(const char* url, ProxyInfo* proxy) {
+ Url<char> purl(url);
+ Pathname path;
+ bool success = false;
+ if (GetDefaultFirefoxProfile(&path)) {
+ StringMap settings;
+ path.SetFilename("prefs.js");
+ if (ReadFirefoxPrefs(path, "network.proxy.", &settings)) {
+ success = true;
+ proxy->bypass_list =
+ settings.Get("no_proxies_on", "localhost, 127.0.0.1");
+ if (settings.Get("type") == "1") {
+ // User has manually specified a proxy, try to figure out what
+ // type it is.
+ if (ProxyListMatch(purl, proxy->bypass_list.c_str(), ',')) {
+ // Our url is in the list of url's to bypass proxy.
+ } else if (settings.Get("share_proxy_settings") == "true") {
+ proxy->type = PROXY_UNKNOWN;
+ proxy->address.SetIP(settings.Get("http"));
+ proxy->address.SetPort(atoi(settings.Get("http_port").c_str()));
+ } else if (settings.IsSet("socks")) {
+ proxy->type = PROXY_SOCKS5;
+ proxy->address.SetIP(settings.Get("socks"));
+ proxy->address.SetPort(atoi(settings.Get("socks_port").c_str()));
+ } else if (settings.IsSet("ssl")) {
+ proxy->type = PROXY_HTTPS;
+ proxy->address.SetIP(settings.Get("ssl"));
+ proxy->address.SetPort(atoi(settings.Get("ssl_port").c_str()));
+ } else if (settings.IsSet("http")) {
+ proxy->type = PROXY_HTTPS;
+ proxy->address.SetIP(settings.Get("http"));
+ proxy->address.SetPort(atoi(settings.Get("http_port").c_str()));
+ }
+ } else if (settings.Get("type") == "2") {
+ // Browser is configured to get proxy settings from a given url.
+ proxy->autoconfig_url = settings.Get("autoconfig_url").c_str();
+ } else if (settings.Get("type") == "4") {
+ // Browser is configured to auto detect proxy config.
+ proxy->autodetect = true;
+ } else {
+ // No proxy set.
+ }
+ }
+ }
+ return success;
+}
+
+#if defined(WEBRTC_WIN) // Windows specific implementation for reading Internet
+ // Explorer proxy settings.
+
+void LogGetProxyFault() {
+ LOG_GLEM(LERROR, WINHTTP) << "WinHttpGetProxyForUrl faulted!!";
+}
+
+BOOL MyWinHttpGetProxyForUrl(pfnWinHttpGetProxyForUrl pWHGPFU,
+ HINTERNET hWinHttp, LPCWSTR url,
+ WINHTTP_AUTOPROXY_OPTIONS *options,
+ WINHTTP_PROXY_INFO *info) {
+ // WinHttpGetProxyForUrl() can call plugins which can crash.
+ // In the case of McAfee scriptproxy.dll, it does crash in
+ // older versions. Try to catch crashes here and treat as an
+ // error.
+ BOOL success = FALSE;
+
+#if (_HAS_EXCEPTIONS == 0)
+ __try {
+ success = pWHGPFU(hWinHttp, url, options, info);
+ } __except(EXCEPTION_EXECUTE_HANDLER) {
+ // This is a separate function to avoid
+ // Visual C++ error 2712 when compiling with C++ EH
+ LogGetProxyFault();
+ }
+#else
+ success = pWHGPFU(hWinHttp, url, options, info);
+#endif // (_HAS_EXCEPTIONS == 0)
+
+ return success;
+}
+
+bool IsDefaultBrowserFirefox() {
+ HKEY key;
+ LONG result = RegOpenKeyEx(HKEY_CLASSES_ROOT, L"http\\shell\\open\\command",
+ 0, KEY_READ, &key);
+ if (ERROR_SUCCESS != result)
+ return false;
+
+ DWORD size, type;
+ bool success = false;
+ result = RegQueryValueEx(key, L"", 0, &type, NULL, &size);
+ if (result == ERROR_SUCCESS && type == REG_SZ) {
+ wchar_t* value = new wchar_t[size+1];
+ BYTE* buffer = reinterpret_cast<BYTE*>(value);
+ result = RegQueryValueEx(key, L"", 0, &type, buffer, &size);
+ if (result == ERROR_SUCCESS) {
+ // Size returned by RegQueryValueEx is in bytes, convert to number of
+ // wchar_t's.
+ size /= sizeof(value[0]);
+ value[size] = L'\0';
+ for (size_t i = 0; i < size; ++i) {
+ value[i] = tolowercase(value[i]);
+ }
+ success = (NULL != strstr(value, L"firefox.exe"));
+ }
+ delete[] value;
+ }
+
+ RegCloseKey(key);
+ return success;
+}
+
+bool GetWinHttpProxySettings(const char* url, ProxyInfo* proxy) {
+ HMODULE winhttp_handle = LoadLibrary(L"winhttp.dll");
+ if (winhttp_handle == NULL) {
+ LOG(LS_ERROR) << "Failed to load winhttp.dll.";
+ return false;
+ }
+ WINHTTP_CURRENT_USER_IE_PROXY_CONFIG iecfg;
+ memset(&iecfg, 0, sizeof(iecfg));
+ Url<char> purl(url);
+ pfnWinHttpGetIEProxyConfig pWHGIEPC =
+ reinterpret_cast<pfnWinHttpGetIEProxyConfig>(
+ GetProcAddress(winhttp_handle,
+ "WinHttpGetIEProxyConfigForCurrentUser"));
+ bool success = false;
+ if (pWHGIEPC && pWHGIEPC(&iecfg)) {
+ // We were read proxy config successfully.
+ success = true;
+ if (iecfg.fAutoDetect) {
+ proxy->autodetect = true;
+ }
+ if (iecfg.lpszAutoConfigUrl) {
+ proxy->autoconfig_url = ToUtf8(iecfg.lpszAutoConfigUrl);
+ GlobalFree(iecfg.lpszAutoConfigUrl);
+ }
+ if (iecfg.lpszProxyBypass) {
+ proxy->bypass_list = ToUtf8(iecfg.lpszProxyBypass);
+ GlobalFree(iecfg.lpszProxyBypass);
+ }
+ if (iecfg.lpszProxy) {
+ if (!ProxyListMatch(purl, proxy->bypass_list, ';')) {
+ ParseProxy(ToUtf8(iecfg.lpszProxy), proxy);
+ }
+ GlobalFree(iecfg.lpszProxy);
+ }
+ }
+ FreeLibrary(winhttp_handle);
+ return success;
+}
+
+// Uses the WinHTTP API to auto detect proxy for the given url. Firefox and IE
+// have slightly different option dialogs for proxy settings. In Firefox,
+// either a location of a proxy configuration file can be specified or auto
+// detection can be selected. In IE theese two options can be independently
+// selected. For the case where both options are selected (only IE) we try to
+// fetch the config file first, and if that fails we'll perform an auto
+// detection.
+//
+// Returns true if we successfully performed an auto detection not depending on
+// whether we found a proxy or not. Returns false on error.
+bool WinHttpAutoDetectProxyForUrl(const char* agent, const char* url,
+ ProxyInfo* proxy) {
+ Url<char> purl(url);
+ bool success = true;
+ HMODULE winhttp_handle = LoadLibrary(L"winhttp.dll");
+ if (winhttp_handle == NULL) {
+ LOG(LS_ERROR) << "Failed to load winhttp.dll.";
+ return false;
+ }
+ pfnWinHttpOpen pWHO =
+ reinterpret_cast<pfnWinHttpOpen>(GetProcAddress(winhttp_handle,
+ "WinHttpOpen"));
+ pfnWinHttpCloseHandle pWHCH =
+ reinterpret_cast<pfnWinHttpCloseHandle>(
+ GetProcAddress(winhttp_handle, "WinHttpCloseHandle"));
+ pfnWinHttpGetProxyForUrl pWHGPFU =
+ reinterpret_cast<pfnWinHttpGetProxyForUrl>(
+ GetProcAddress(winhttp_handle, "WinHttpGetProxyForUrl"));
+ if (pWHO && pWHCH && pWHGPFU) {
+ if (HINTERNET hWinHttp = pWHO(ToUtf16(agent).c_str(),
+ WINHTTP_ACCESS_TYPE_NO_PROXY,
+ WINHTTP_NO_PROXY_NAME,
+ WINHTTP_NO_PROXY_BYPASS,
+ 0)) {
+ BOOL result = FALSE;
+ WINHTTP_PROXY_INFO info;
+ memset(&info, 0, sizeof(info));
+ if (proxy->autodetect) {
+ // Use DHCP and DNS to try to find any proxy to use.
+ WINHTTP_AUTOPROXY_OPTIONS options;
+ memset(&options, 0, sizeof(options));
+ options.fAutoLogonIfChallenged = TRUE;
+
+ options.dwFlags |= WINHTTP_AUTOPROXY_AUTO_DETECT;
+ options.dwAutoDetectFlags |= WINHTTP_AUTO_DETECT_TYPE_DHCP
+ | WINHTTP_AUTO_DETECT_TYPE_DNS_A;
+ result = MyWinHttpGetProxyForUrl(
+ pWHGPFU, hWinHttp, ToUtf16(url).c_str(), &options, &info);
+ }
+ if (!result && !proxy->autoconfig_url.empty()) {
+ // We have the location of a proxy config file. Download it and
+ // execute it to find proxy settings for our url.
+ WINHTTP_AUTOPROXY_OPTIONS options;
+ memset(&options, 0, sizeof(options));
+ memset(&info, 0, sizeof(info));
+ options.fAutoLogonIfChallenged = TRUE;
+
+ std::wstring autoconfig_url16((ToUtf16)(proxy->autoconfig_url));
+ options.dwFlags |= WINHTTP_AUTOPROXY_CONFIG_URL;
+ options.lpszAutoConfigUrl = autoconfig_url16.c_str();
+
+ result = MyWinHttpGetProxyForUrl(
+ pWHGPFU, hWinHttp, ToUtf16(url).c_str(), &options, &info);
+ }
+ if (result) {
+ // Either the given auto config url was valid or auto
+ // detection found a proxy on this network.
+ if (info.lpszProxy) {
+ // TODO: Does this bypass list differ from the list
+ // retreived from GetWinHttpProxySettings earlier?
+ if (info.lpszProxyBypass) {
+ proxy->bypass_list = ToUtf8(info.lpszProxyBypass);
+ GlobalFree(info.lpszProxyBypass);
+ } else {
+ proxy->bypass_list.clear();
+ }
+ if (!ProxyListMatch(purl, proxy->bypass_list, ';')) {
+ // Found proxy for this URL. If parsing the address turns
+ // out ok then we are successful.
+ success = ParseProxy(ToUtf8(info.lpszProxy), proxy);
+ }
+ GlobalFree(info.lpszProxy);
+ }
+ } else {
+ // We could not find any proxy for this url.
+ LOG(LS_INFO) << "No proxy detected for " << url;
+ }
+ pWHCH(hWinHttp);
+ }
+ } else {
+ LOG(LS_ERROR) << "Failed loading WinHTTP functions.";
+ success = false;
+ }
+ FreeLibrary(winhttp_handle);
+ return success;
+}
+
+#if 0 // Below functions currently not used.
+
+bool GetJsProxySettings(const char* url, ProxyInfo* proxy) {
+ Url<char> purl(url);
+ bool success = false;
+
+ if (HMODULE hModJS = LoadLibrary(_T("jsproxy.dll"))) {
+ pfnInternetGetProxyInfo pIGPI =
+ reinterpret_cast<pfnInternetGetProxyInfo>(
+ GetProcAddress(hModJS, "InternetGetProxyInfo"));
+ if (pIGPI) {
+ char proxy[256], host[256];
+ memset(proxy, 0, sizeof(proxy));
+ char * ptr = proxy;
+ DWORD proxylen = sizeof(proxy);
+ std::string surl = Utf8String(url);
+ DWORD hostlen = _snprintf(host, sizeof(host), "http%s://%S",
+ purl.secure() ? "s" : "", purl.server());
+ if (pIGPI(surl.data(), surl.size(), host, hostlen, &ptr, &proxylen)) {
+ LOG(INFO) << "Proxy: " << proxy;
+ } else {
+ LOG_GLE(INFO) << "InternetGetProxyInfo";
+ }
+ }
+ FreeLibrary(hModJS);
+ }
+ return success;
+}
+
+bool GetWmProxySettings(const char* url, ProxyInfo* proxy) {
+ Url<char> purl(url);
+ bool success = false;
+
+ INSNetSourceCreator * nsc = 0;
+ HRESULT hr = CoCreateInstance(CLSID_ClientNetManager, 0, CLSCTX_ALL,
+ IID_INSNetSourceCreator, (LPVOID *) &nsc);
+ if (SUCCEEDED(hr)) {
+ if (SUCCEEDED(hr = nsc->Initialize())) {
+ VARIANT dispatch;
+ VariantInit(&dispatch);
+ if (SUCCEEDED(hr = nsc->GetNetSourceAdminInterface(L"http", &dispatch))) {
+ IWMSInternalAdminNetSource * ians = 0;
+ if (SUCCEEDED(hr = dispatch.pdispVal->QueryInterface(
+ IID_IWMSInternalAdminNetSource, (LPVOID *) &ians))) {
+ _bstr_t host(purl.server());
+ BSTR proxy = 0;
+ BOOL bProxyEnabled = FALSE;
+ DWORD port, context = 0;
+ if (SUCCEEDED(hr = ians->FindProxyForURL(
+ L"http", host, &bProxyEnabled, &proxy, &port, &context))) {
+ success = true;
+ if (bProxyEnabled) {
+ _bstr_t sproxy = proxy;
+ proxy->ptype = PT_HTTPS;
+ proxy->host = sproxy;
+ proxy->port = port;
+ }
+ }
+ SysFreeString(proxy);
+ if (FAILED(hr = ians->ShutdownProxyContext(context))) {
+ LOG(LS_INFO) << "IWMSInternalAdminNetSource::ShutdownProxyContext"
+ << "failed: " << hr;
+ }
+ ians->Release();
+ }
+ }
+ VariantClear(&dispatch);
+ if (FAILED(hr = nsc->Shutdown())) {
+ LOG(LS_INFO) << "INSNetSourceCreator::Shutdown failed: " << hr;
+ }
+ }
+ nsc->Release();
+ }
+ return success;
+}
+
+bool GetIePerConnectionProxySettings(const char* url, ProxyInfo* proxy) {
+ Url<char> purl(url);
+ bool success = false;
+
+ INTERNET_PER_CONN_OPTION_LIST list;
+ INTERNET_PER_CONN_OPTION options[3];
+ memset(&list, 0, sizeof(list));
+ memset(&options, 0, sizeof(options));
+
+ list.dwSize = sizeof(list);
+ list.dwOptionCount = 3;
+ list.pOptions = options;
+ options[0].dwOption = INTERNET_PER_CONN_FLAGS;
+ options[1].dwOption = INTERNET_PER_CONN_PROXY_SERVER;
+ options[2].dwOption = INTERNET_PER_CONN_PROXY_BYPASS;
+ DWORD dwSize = sizeof(list);
+
+ if (!InternetQueryOption(0, INTERNET_OPTION_PER_CONNECTION_OPTION, &list,
+ &dwSize)) {
+ LOG(LS_INFO) << "InternetQueryOption failed: " << GetLastError();
+ } else if ((options[0].Value.dwValue & PROXY_TYPE_PROXY) != 0) {
+ success = true;
+ if (!ProxyListMatch(purl, nonnull(options[2].Value.pszValue), _T(';'))) {
+ ParseProxy(nonnull(options[1].Value.pszValue), proxy);
+ }
+ } else if ((options[0].Value.dwValue & PROXY_TYPE_DIRECT) != 0) {
+ success = true;
+ } else {
+ LOG(LS_INFO) << "unknown internet access type: "
+ << options[0].Value.dwValue;
+ }
+ if (options[1].Value.pszValue) {
+ GlobalFree(options[1].Value.pszValue);
+ }
+ if (options[2].Value.pszValue) {
+ GlobalFree(options[2].Value.pszValue);
+ }
+ return success;
+}
+
+#endif // 0
+
+// Uses the InternetQueryOption function to retrieve proxy settings
+// from the registry. This will only give us the 'static' settings,
+// ie, not any information about auto config etc.
+bool GetIeLanProxySettings(const char* url, ProxyInfo* proxy) {
+ Url<char> purl(url);
+ bool success = false;
+
+ wchar_t buffer[1024];
+ memset(buffer, 0, sizeof(buffer));
+ INTERNET_PROXY_INFO * info = reinterpret_cast<INTERNET_PROXY_INFO *>(buffer);
+ DWORD dwSize = sizeof(buffer);
+
+ if (!InternetQueryOption(0, INTERNET_OPTION_PROXY, info, &dwSize)) {
+ LOG(LS_INFO) << "InternetQueryOption failed: " << GetLastError();
+ } else if (info->dwAccessType == INTERNET_OPEN_TYPE_DIRECT) {
+ success = true;
+ } else if (info->dwAccessType == INTERNET_OPEN_TYPE_PROXY) {
+ success = true;
+ if (!ProxyListMatch(purl, nonnull(reinterpret_cast<const char*>(
+ info->lpszProxyBypass)), ' ')) {
+ ParseProxy(nonnull(reinterpret_cast<const char*>(info->lpszProxy)),
+ proxy);
+ }
+ } else {
+ LOG(LS_INFO) << "unknown internet access type: " << info->dwAccessType;
+ }
+ return success;
+}
+
+bool GetIeProxySettings(const char* agent, const char* url, ProxyInfo* proxy) {
+ bool success = GetWinHttpProxySettings(url, proxy);
+ if (!success) {
+ // TODO: Should always call this if no proxy were detected by
+ // GetWinHttpProxySettings?
+ // WinHttp failed. Try using the InternetOptionQuery method instead.
+ return GetIeLanProxySettings(url, proxy);
+ }
+ return true;
+}
+
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // WEBRTC_MAC && !defined(WEBRTC_IOS) specific implementation for reading system wide
+ // proxy settings.
+
+bool p_getProxyInfoForTypeFromDictWithKeys(ProxyInfo* proxy,
+ ProxyType type,
+ const CFDictionaryRef proxyDict,
+ const CFStringRef enabledKey,
+ const CFStringRef hostKey,
+ const CFStringRef portKey) {
+ // whether or not we set up the proxy info.
+ bool result = false;
+
+ // we use this as a scratch variable for determining if operations
+ // succeeded.
+ bool converted = false;
+
+ // the data we need to construct the SocketAddress for the proxy.
+ std::string hostname;
+ int port;
+
+ if ((proxyDict != NULL) &&
+ (CFGetTypeID(proxyDict) == CFDictionaryGetTypeID())) {
+ // CoreFoundation stuff that we'll have to get from
+ // the dictionaries and interpret or convert into more usable formats.
+ CFNumberRef enabledCFNum;
+ CFNumberRef portCFNum;
+ CFStringRef hostCFStr;
+
+ enabledCFNum = (CFNumberRef)CFDictionaryGetValue(proxyDict, enabledKey);
+
+ if (p_isCFNumberTrue(enabledCFNum)) {
+ // let's see if we can get the address and port.
+ hostCFStr = (CFStringRef)CFDictionaryGetValue(proxyDict, hostKey);
+ converted = p_convertHostCFStringRefToCPPString(hostCFStr, hostname);
+ if (converted) {
+ portCFNum = (CFNumberRef)CFDictionaryGetValue(proxyDict, portKey);
+ converted = p_convertCFNumberToInt(portCFNum, &port);
+ if (converted) {
+ // we have something enabled, with a hostname and a port.
+ // That's sufficient to set up the proxy info.
+ proxy->type = type;
+ proxy->address.SetIP(hostname);
+ proxy->address.SetPort(port);
+ result = true;
+ }
+ }
+ }
+ }
+
+ return result;
+}
+
+// Looks for proxy information in the given dictionary,
+// return true if it found sufficient information to define one,
+// false otherwise. This is guaranteed to not change the values in proxy
+// unless a full-fledged proxy description was discovered in the dictionary.
+// However, at the present time this does not support username or password.
+// Checks first for a SOCKS proxy, then for HTTPS, then HTTP.
+bool GetMacProxySettingsFromDictionary(ProxyInfo* proxy,
+ const CFDictionaryRef proxyDict) {
+ // the function result.
+ bool gotProxy = false;
+
+
+ // first we see if there's a SOCKS proxy in place.
+ gotProxy = p_getProxyInfoForTypeFromDictWithKeys(proxy,
+ PROXY_SOCKS5,
+ proxyDict,
+ kSCPropNetProxiesSOCKSEnable,
+ kSCPropNetProxiesSOCKSProxy,
+ kSCPropNetProxiesSOCKSPort);
+
+ if (!gotProxy) {
+ // okay, no SOCKS proxy, let's look for https.
+ gotProxy = p_getProxyInfoForTypeFromDictWithKeys(proxy,
+ PROXY_HTTPS,
+ proxyDict,
+ kSCPropNetProxiesHTTPSEnable,
+ kSCPropNetProxiesHTTPSProxy,
+ kSCPropNetProxiesHTTPSPort);
+ if (!gotProxy) {
+ // Finally, try HTTP proxy. Note that flute doesn't
+ // differentiate between HTTPS and HTTP, hence we are using the
+ // same flute type here, ie. PROXY_HTTPS.
+ gotProxy = p_getProxyInfoForTypeFromDictWithKeys(
+ proxy, PROXY_HTTPS, proxyDict, kSCPropNetProxiesHTTPEnable,
+ kSCPropNetProxiesHTTPProxy, kSCPropNetProxiesHTTPPort);
+ }
+ }
+ return gotProxy;
+}
+
+// TODO(hughv) Update keychain functions. They work on 10.8, but are depricated.
+#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+bool p_putPasswordInProxyInfo(ProxyInfo* proxy) {
+ bool result = true; // by default we assume we're good.
+ // for all we know there isn't any password. We'll set to false
+ // if we find a problem.
+
+ // Ask the keychain for an internet password search for the given protocol.
+ OSStatus oss = 0;
+ SecKeychainAttributeList attrList;
+ attrList.count = 3;
+ SecKeychainAttribute attributes[3];
+ attrList.attr = attributes;
+
+ attributes[0].tag = kSecProtocolItemAttr;
+ attributes[0].length = sizeof(SecProtocolType);
+ SecProtocolType protocol;
+ switch (proxy->type) {
+ case PROXY_HTTPS :
+ protocol = kSecProtocolTypeHTTPS;
+ break;
+ case PROXY_SOCKS5 :
+ protocol = kSecProtocolTypeSOCKS;
+ break;
+ default :
+ LOG(LS_ERROR) << "asked for proxy password for unknown proxy type.";
+ result = false;
+ break;
+ }
+ attributes[0].data = &protocol;
+
+ UInt32 port = proxy->address.port();
+ attributes[1].tag = kSecPortItemAttr;
+ attributes[1].length = sizeof(UInt32);
+ attributes[1].data = &port;
+
+ std::string ip = proxy->address.ipaddr().ToString();
+ attributes[2].tag = kSecServerItemAttr;
+ attributes[2].length = ip.length();
+ attributes[2].data = const_cast<char*>(ip.c_str());
+
+ if (result) {
+ LOG(LS_INFO) << "trying to get proxy username/password";
+ SecKeychainSearchRef sref;
+ oss = SecKeychainSearchCreateFromAttributes(NULL,
+ kSecInternetPasswordItemClass,
+ &attrList, &sref);
+ if (0 == oss) {
+ LOG(LS_INFO) << "SecKeychainSearchCreateFromAttributes was good";
+ // Get the first item, if there is one.
+ SecKeychainItemRef iref;
+ oss = SecKeychainSearchCopyNext(sref, &iref);
+ if (0 == oss) {
+ LOG(LS_INFO) << "...looks like we have the username/password data";
+ // If there is, get the username and the password.
+
+ SecKeychainAttributeInfo attribsToGet;
+ attribsToGet.count = 1;
+ UInt32 tag = kSecAccountItemAttr;
+ UInt32 format = CSSM_DB_ATTRIBUTE_FORMAT_STRING;
+ void *data;
+ UInt32 length;
+ SecKeychainAttributeList *localList;
+
+ attribsToGet.tag = &tag;
+ attribsToGet.format = &format;
+ OSStatus copyres = SecKeychainItemCopyAttributesAndData(iref,
+ &attribsToGet,
+ NULL,
+ &localList,
+ &length,
+ &data);
+ if (0 == copyres) {
+ LOG(LS_INFO) << "...and we can pull it out.";
+ // now, we know from experimentation (sadly not from docs)
+ // that the username is in the local attribute list,
+ // and the password in the data,
+ // both without null termination but with info on their length.
+ // grab the password from the data.
+ std::string password;
+ password.append(static_cast<const char*>(data), length);
+
+ // make the password into a CryptString
+ // huh, at the time of writing, you can't.
+ // so we'll skip that for now and come back to it later.
+
+ // now put the username in the proxy.
+ if (1 <= localList->attr->length) {
+ proxy->username.append(
+ static_cast<const char*>(localList->attr->data),
+ localList->attr->length);
+ LOG(LS_INFO) << "username is " << proxy->username;
+ } else {
+ LOG(LS_ERROR) << "got keychain entry with no username";
+ result = false;
+ }
+ } else {
+ LOG(LS_ERROR) << "couldn't copy info from keychain.";
+ result = false;
+ }
+ SecKeychainItemFreeAttributesAndData(localList, data);
+ } else if (errSecItemNotFound == oss) {
+ LOG(LS_INFO) << "...username/password info not found";
+ } else {
+ // oooh, neither 0 nor itemNotFound.
+ LOG(LS_ERROR) << "Couldn't get keychain information, error code" << oss;
+ result = false;
+ }
+ } else if (errSecItemNotFound == oss) { // noop
+ } else {
+ // oooh, neither 0 nor itemNotFound.
+ LOG(LS_ERROR) << "Couldn't get keychain information, error code" << oss;
+ result = false;
+ }
+ }
+
+ return result;
+}
+
+bool GetMacProxySettings(ProxyInfo* proxy) {
+ // based on the Apple Technical Q&A QA1234
+ // http://developer.apple.com/qa/qa2001/qa1234.html
+ CFDictionaryRef proxyDict = SCDynamicStoreCopyProxies(NULL);
+ bool result = false;
+
+ if (proxyDict != NULL) {
+ // sending it off to another function makes it easier to unit test
+ // since we can make our own dictionary to hand to that function.
+ result = GetMacProxySettingsFromDictionary(proxy, proxyDict);
+
+ if (result) {
+ result = p_putPasswordInProxyInfo(proxy);
+ }
+
+ // We created the dictionary with something that had the
+ // word 'copy' in it, so we have to release it, according
+ // to the Carbon memory management standards.
+ CFRelease(proxyDict);
+ } else {
+ LOG(LS_ERROR) << "SCDynamicStoreCopyProxies failed";
+ }
+
+ return result;
+}
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+
+bool AutoDetectProxySettings(const char* agent, const char* url,
+ ProxyInfo* proxy) {
+#if defined(WEBRTC_WIN)
+ return WinHttpAutoDetectProxyForUrl(agent, url, proxy);
+#else
+ LOG(LS_WARNING) << "Proxy auto-detection not implemented for this platform";
+ return false;
+#endif
+}
+
+bool GetSystemDefaultProxySettings(const char* agent, const char* url,
+ ProxyInfo* proxy) {
+#if defined(WEBRTC_WIN)
+ return GetIeProxySettings(agent, url, proxy);
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ return GetMacProxySettings(proxy);
+#else
+ // TODO: Get System settings if browser is not firefox.
+ return GetFirefoxProxySettings(url, proxy);
+#endif
+}
+
+bool GetProxySettingsForUrl(const char* agent, const char* url,
+ ProxyInfo* proxy, bool long_operation) {
+ UserAgent a = GetAgent(agent);
+ bool result;
+ switch (a) {
+ case UA_FIREFOX: {
+ result = GetFirefoxProxySettings(url, proxy);
+ break;
+ }
+#if defined(WEBRTC_WIN)
+ case UA_INTERNETEXPLORER:
+ result = GetIeProxySettings(agent, url, proxy);
+ break;
+ case UA_UNKNOWN:
+ // Agent not defined, check default browser.
+ if (IsDefaultBrowserFirefox()) {
+ result = GetFirefoxProxySettings(url, proxy);
+ } else {
+ result = GetIeProxySettings(agent, url, proxy);
+ }
+ break;
+#endif // WEBRTC_WIN
+ default:
+ result = GetSystemDefaultProxySettings(agent, url, proxy);
+ break;
+ }
+
+ // TODO: Consider using the 'long_operation' parameter to
+ // decide whether to do the auto detection.
+ if (result && (proxy->autodetect ||
+ !proxy->autoconfig_url.empty())) {
+ // Use WinHTTP to auto detect proxy for us.
+ result = AutoDetectProxySettings(agent, url, proxy);
+ if (!result) {
+ // Either auto detection is not supported or we simply didn't
+ // find any proxy, reset type.
+ proxy->type = rtc::PROXY_NONE;
+ }
+ }
+ return result;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/proxydetect.h b/chromium/third_party/webrtc/base/proxydetect.h
new file mode 100644
index 00000000000..f9bf5f87344
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxydetect.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef _PROXYDETECT_H_
+#define _PROXYDETECT_H_
+
+#include "webrtc/base/proxyinfo.h"
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+namespace rtc {
+// Auto-detect the proxy server. Returns true if a proxy is configured,
+// although hostname may be empty if the proxy is not required for
+// the given URL.
+
+bool GetProxySettingsForUrl(const char* agent, const char* url,
+ rtc::ProxyInfo* proxy,
+ bool long_operation = false);
+
+} // namespace rtc
+
+#endif // _PROXYDETECT_H_
diff --git a/chromium/third_party/webrtc/base/proxydetect_unittest.cc b/chromium/third_party/webrtc/base/proxydetect_unittest.cc
new file mode 100644
index 00000000000..ca0b428fcec
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxydetect_unittest.cc
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/fileutils_mock.h"
+#include "webrtc/base/proxydetect.h"
+
+namespace rtc {
+
+static const std::string kFirefoxProfilesIni =
+ "[Profile0]\n"
+ "Name=default\n"
+ "IsRelative=1\n"
+ "Path=Profiles/2de53ejb.default\n"
+ "Default=1\n";
+
+static const std::string kFirefoxHeader =
+ "# Mozilla User Preferences\n"
+ "\n"
+ "/* Some Comments\n"
+ "*\n"
+ "*/\n"
+ "\n";
+
+static const std::string kFirefoxCorruptHeader =
+ "iuahueqe32164";
+
+static const std::string kProxyAddress = "proxy.net.com";
+
+// Mocking out platform specific path to firefox prefs file.
+class FirefoxPrefsFileSystem : public FakeFileSystem {
+ public:
+ explicit FirefoxPrefsFileSystem(const std::vector<File>& all_files) :
+ FakeFileSystem(all_files) {
+ }
+ virtual FileStream* OpenFile(const Pathname& filename,
+ const std::string& mode) {
+ // TODO: We could have a platform dependent check of paths here.
+ std::string name = filename.basename();
+ name.append(filename.extension());
+ EXPECT_TRUE(name.compare("prefs.js") == 0 ||
+ name.compare("profiles.ini") == 0);
+ FileStream* stream = FakeFileSystem::OpenFile(name, mode);
+ return stream;
+ }
+};
+
+class ProxyDetectTest : public testing::Test {
+};
+
+bool GetProxyInfo(const std::string prefs, ProxyInfo* info) {
+ std::vector<rtc::FakeFileSystem::File> files;
+ files.push_back(rtc::FakeFileSystem::File("profiles.ini",
+ kFirefoxProfilesIni));
+ files.push_back(rtc::FakeFileSystem::File("prefs.js", prefs));
+ rtc::FilesystemScope fs(new rtc::FirefoxPrefsFileSystem(files));
+ return GetProxySettingsForUrl("Firefox", "www.google.com", info, false);
+}
+
+// Verifies that an empty Firefox prefs file results in no proxy detected.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxEmptyPrefs) {
+ ProxyInfo proxy_info;
+ EXPECT_TRUE(GetProxyInfo(kFirefoxHeader, &proxy_info));
+ EXPECT_EQ(PROXY_NONE, proxy_info.type);
+}
+
+// Verifies that corrupted prefs file results in no proxy detected.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxCorruptedPrefs) {
+ ProxyInfo proxy_info;
+ EXPECT_TRUE(GetProxyInfo(kFirefoxCorruptHeader, &proxy_info));
+ EXPECT_EQ(PROXY_NONE, proxy_info.type);
+}
+
+// Verifies that SOCKS5 proxy is detected if configured. SOCKS uses a
+// handshake protocol to inform the proxy software about the
+// connection that the client is trying to make and may be used for
+// any form of TCP or UDP socket connection.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxProxySocks) {
+ ProxyInfo proxy_info;
+ SocketAddress proxy_address("proxy.socks.com", 6666);
+ std::string prefs(kFirefoxHeader);
+ prefs.append("user_pref(\"network.proxy.socks\", \"proxy.socks.com\");\n");
+ prefs.append("user_pref(\"network.proxy.socks_port\", 6666);\n");
+ prefs.append("user_pref(\"network.proxy.type\", 1);\n");
+
+ EXPECT_TRUE(GetProxyInfo(prefs, &proxy_info));
+
+ EXPECT_EQ(PROXY_SOCKS5, proxy_info.type);
+ EXPECT_EQ(proxy_address, proxy_info.address);
+}
+
+// Verified that SSL proxy is detected if configured. SSL proxy is an
+// extention of a HTTP proxy to support secure connections.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxProxySsl) {
+ ProxyInfo proxy_info;
+ SocketAddress proxy_address("proxy.ssl.com", 7777);
+ std::string prefs(kFirefoxHeader);
+
+ prefs.append("user_pref(\"network.proxy.ssl\", \"proxy.ssl.com\");\n");
+ prefs.append("user_pref(\"network.proxy.ssl_port\", 7777);\n");
+ prefs.append("user_pref(\"network.proxy.type\", 1);\n");
+
+ EXPECT_TRUE(GetProxyInfo(prefs, &proxy_info));
+
+ EXPECT_EQ(PROXY_HTTPS, proxy_info.type);
+ EXPECT_EQ(proxy_address, proxy_info.address);
+}
+
+// Verifies that a HTTP proxy is detected if configured.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxProxyHttp) {
+ ProxyInfo proxy_info;
+ SocketAddress proxy_address("proxy.http.com", 8888);
+ std::string prefs(kFirefoxHeader);
+
+ prefs.append("user_pref(\"network.proxy.http\", \"proxy.http.com\");\n");
+ prefs.append("user_pref(\"network.proxy.http_port\", 8888);\n");
+ prefs.append("user_pref(\"network.proxy.type\", 1);\n");
+
+ EXPECT_TRUE(GetProxyInfo(prefs, &proxy_info));
+
+ EXPECT_EQ(PROXY_HTTPS, proxy_info.type);
+ EXPECT_EQ(proxy_address, proxy_info.address);
+}
+
+// Verifies detection of automatic proxy detection.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxProxyAuto) {
+ ProxyInfo proxy_info;
+ std::string prefs(kFirefoxHeader);
+
+ prefs.append("user_pref(\"network.proxy.type\", 4);\n");
+
+ EXPECT_TRUE(GetProxyInfo(prefs, &proxy_info));
+
+ EXPECT_EQ(PROXY_NONE, proxy_info.type);
+ EXPECT_TRUE(proxy_info.autodetect);
+ EXPECT_TRUE(proxy_info.autoconfig_url.empty());
+}
+
+// Verifies detection of automatic proxy detection using a static url
+// to config file.
+TEST_F(ProxyDetectTest, DISABLED_TestFirefoxProxyAutoUrl) {
+ ProxyInfo proxy_info;
+ std::string prefs(kFirefoxHeader);
+
+ prefs.append(
+ "user_pref(\"network.proxy.autoconfig_url\", \"http://a/b.pac\");\n");
+ prefs.append("user_pref(\"network.proxy.type\", 2);\n");
+
+ EXPECT_TRUE(GetProxyInfo(prefs, &proxy_info));
+
+ EXPECT_FALSE(proxy_info.autodetect);
+ EXPECT_EQ(PROXY_NONE, proxy_info.type);
+ EXPECT_EQ(0, proxy_info.autoconfig_url.compare("http://a/b.pac"));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/proxyinfo.cc b/chromium/third_party/webrtc/base/proxyinfo.cc
new file mode 100644
index 00000000000..70c3b558471
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxyinfo.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/proxyinfo.h"
+
+namespace rtc {
+
+const char * ProxyToString(ProxyType proxy) {
+ const char * const PROXY_NAMES[] = { "none", "https", "socks5", "unknown" };
+ return PROXY_NAMES[proxy];
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/proxyinfo.h b/chromium/third_party/webrtc/base/proxyinfo.h
new file mode 100644
index 00000000000..9947f455252
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxyinfo.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_PROXYINFO_H__
+#define WEBRTC_BASE_PROXYINFO_H__
+
+#include <string>
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/cryptstring.h"
+
+namespace rtc {
+
+enum ProxyType {
+ PROXY_NONE,
+ PROXY_HTTPS,
+ PROXY_SOCKS5,
+ PROXY_UNKNOWN
+};
+const char * ProxyToString(ProxyType proxy);
+
+struct ProxyInfo {
+ ProxyType type;
+ SocketAddress address;
+ std::string autoconfig_url;
+ bool autodetect;
+ std::string bypass_list;
+ std::string username;
+ CryptString password;
+
+ ProxyInfo() : type(PROXY_NONE), autodetect(false) { }
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PROXYINFO_H__
diff --git a/chromium/third_party/webrtc/base/proxyserver.cc b/chromium/third_party/webrtc/base/proxyserver.cc
new file mode 100644
index 00000000000..548cfbf5b3d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxyserver.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/proxyserver.h"
+
+#include <algorithm>
+#include "webrtc/base/socketfactory.h"
+
+namespace rtc {
+
+// ProxyServer
+ProxyServer::ProxyServer(
+ SocketFactory* int_factory, const SocketAddress& int_addr,
+ SocketFactory* ext_factory, const SocketAddress& ext_ip)
+ : ext_factory_(ext_factory), ext_ip_(ext_ip.ipaddr(), 0), // strip off port
+ server_socket_(int_factory->CreateAsyncSocket(int_addr.family(),
+ SOCK_STREAM)) {
+ ASSERT(server_socket_.get() != NULL);
+ ASSERT(int_addr.family() == AF_INET || int_addr.family() == AF_INET6);
+ server_socket_->Bind(int_addr);
+ server_socket_->Listen(5);
+ server_socket_->SignalReadEvent.connect(this, &ProxyServer::OnAcceptEvent);
+}
+
+ProxyServer::~ProxyServer() {
+ for (BindingList::iterator it = bindings_.begin();
+ it != bindings_.end(); ++it) {
+ delete (*it);
+ }
+}
+
+void ProxyServer::OnAcceptEvent(AsyncSocket* socket) {
+ ASSERT(socket != NULL && socket == server_socket_.get());
+ AsyncSocket* int_socket = socket->Accept(NULL);
+ AsyncProxyServerSocket* wrapped_socket = WrapSocket(int_socket);
+ AsyncSocket* ext_socket = ext_factory_->CreateAsyncSocket(ext_ip_.family(),
+ SOCK_STREAM);
+ if (ext_socket) {
+ ext_socket->Bind(ext_ip_);
+ bindings_.push_back(new ProxyBinding(wrapped_socket, ext_socket));
+ } else {
+ LOG(LS_ERROR) << "Unable to create external socket on proxy accept event";
+ }
+}
+
+void ProxyServer::OnBindingDestroyed(ProxyBinding* binding) {
+ BindingList::iterator it =
+ std::find(bindings_.begin(), bindings_.end(), binding);
+ delete (*it);
+ bindings_.erase(it);
+}
+
+// ProxyBinding
+ProxyBinding::ProxyBinding(AsyncProxyServerSocket* int_socket,
+ AsyncSocket* ext_socket)
+ : int_socket_(int_socket), ext_socket_(ext_socket), connected_(false),
+ out_buffer_(kBufferSize), in_buffer_(kBufferSize) {
+ int_socket_->SignalConnectRequest.connect(this,
+ &ProxyBinding::OnConnectRequest);
+ int_socket_->SignalReadEvent.connect(this, &ProxyBinding::OnInternalRead);
+ int_socket_->SignalWriteEvent.connect(this, &ProxyBinding::OnInternalWrite);
+ int_socket_->SignalCloseEvent.connect(this, &ProxyBinding::OnInternalClose);
+ ext_socket_->SignalConnectEvent.connect(this,
+ &ProxyBinding::OnExternalConnect);
+ ext_socket_->SignalReadEvent.connect(this, &ProxyBinding::OnExternalRead);
+ ext_socket_->SignalWriteEvent.connect(this, &ProxyBinding::OnExternalWrite);
+ ext_socket_->SignalCloseEvent.connect(this, &ProxyBinding::OnExternalClose);
+}
+
+void ProxyBinding::OnConnectRequest(AsyncProxyServerSocket* socket,
+ const SocketAddress& addr) {
+ ASSERT(!connected_ && ext_socket_.get() != NULL);
+ ext_socket_->Connect(addr);
+ // TODO: handle errors here
+}
+
+void ProxyBinding::OnInternalRead(AsyncSocket* socket) {
+ Read(int_socket_.get(), &out_buffer_);
+ Write(ext_socket_.get(), &out_buffer_);
+}
+
+void ProxyBinding::OnInternalWrite(AsyncSocket* socket) {
+ Write(int_socket_.get(), &in_buffer_);
+}
+
+void ProxyBinding::OnInternalClose(AsyncSocket* socket, int err) {
+ Destroy();
+}
+
+void ProxyBinding::OnExternalConnect(AsyncSocket* socket) {
+ ASSERT(socket != NULL);
+ connected_ = true;
+ int_socket_->SendConnectResult(0, socket->GetRemoteAddress());
+}
+
+void ProxyBinding::OnExternalRead(AsyncSocket* socket) {
+ Read(ext_socket_.get(), &in_buffer_);
+ Write(int_socket_.get(), &in_buffer_);
+}
+
+void ProxyBinding::OnExternalWrite(AsyncSocket* socket) {
+ Write(ext_socket_.get(), &out_buffer_);
+}
+
+void ProxyBinding::OnExternalClose(AsyncSocket* socket, int err) {
+ if (!connected_) {
+ int_socket_->SendConnectResult(err, SocketAddress());
+ }
+ Destroy();
+}
+
+void ProxyBinding::Read(AsyncSocket* socket, FifoBuffer* buffer) {
+ // Only read if the buffer is empty.
+ ASSERT(socket != NULL);
+ size_t size;
+ int read;
+ if (buffer->GetBuffered(&size) && size == 0) {
+ void* p = buffer->GetWriteBuffer(&size);
+ read = socket->Recv(p, size);
+ buffer->ConsumeWriteBuffer(_max(read, 0));
+ }
+}
+
+void ProxyBinding::Write(AsyncSocket* socket, FifoBuffer* buffer) {
+ ASSERT(socket != NULL);
+ size_t size;
+ int written;
+ const void* p = buffer->GetReadData(&size);
+ written = socket->Send(p, size);
+ buffer->ConsumeReadData(_max(written, 0));
+}
+
+void ProxyBinding::Destroy() {
+ SignalDestroyed(this);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/proxyserver.h b/chromium/third_party/webrtc/base/proxyserver.h
new file mode 100644
index 00000000000..80e15d969c6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/proxyserver.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_PROXYSERVER_H_
+#define WEBRTC_BASE_PROXYSERVER_H_
+
+#include <list>
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/socketadapters.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+class SocketFactory;
+
+// ProxyServer is a base class that allows for easy construction of proxy
+// servers. With its helper class ProxyBinding, it contains all the necessary
+// logic for receiving and bridging connections. The specific client-server
+// proxy protocol is implemented by an instance of the AsyncProxyServerSocket
+// class; children of ProxyServer implement WrapSocket appropriately to return
+// the correct protocol handler.
+
+class ProxyBinding : public sigslot::has_slots<> {
+ public:
+ ProxyBinding(AsyncProxyServerSocket* in_socket, AsyncSocket* out_socket);
+ sigslot::signal1<ProxyBinding*> SignalDestroyed;
+
+ private:
+ void OnConnectRequest(AsyncProxyServerSocket* socket,
+ const SocketAddress& addr);
+ void OnInternalRead(AsyncSocket* socket);
+ void OnInternalWrite(AsyncSocket* socket);
+ void OnInternalClose(AsyncSocket* socket, int err);
+ void OnExternalConnect(AsyncSocket* socket);
+ void OnExternalRead(AsyncSocket* socket);
+ void OnExternalWrite(AsyncSocket* socket);
+ void OnExternalClose(AsyncSocket* socket, int err);
+
+ static void Read(AsyncSocket* socket, FifoBuffer* buffer);
+ static void Write(AsyncSocket* socket, FifoBuffer* buffer);
+ void Destroy();
+
+ static const int kBufferSize = 4096;
+ scoped_ptr<AsyncProxyServerSocket> int_socket_;
+ scoped_ptr<AsyncSocket> ext_socket_;
+ bool connected_;
+ FifoBuffer out_buffer_;
+ FifoBuffer in_buffer_;
+ DISALLOW_EVIL_CONSTRUCTORS(ProxyBinding);
+};
+
+class ProxyServer : public sigslot::has_slots<> {
+ public:
+ ProxyServer(SocketFactory* int_factory, const SocketAddress& int_addr,
+ SocketFactory* ext_factory, const SocketAddress& ext_ip);
+ virtual ~ProxyServer();
+
+ protected:
+ void OnAcceptEvent(AsyncSocket* socket);
+ virtual AsyncProxyServerSocket* WrapSocket(AsyncSocket* socket) = 0;
+ void OnBindingDestroyed(ProxyBinding* binding);
+
+ private:
+ typedef std::list<ProxyBinding*> BindingList;
+ SocketFactory* ext_factory_;
+ SocketAddress ext_ip_;
+ scoped_ptr<AsyncSocket> server_socket_;
+ BindingList bindings_;
+ DISALLOW_EVIL_CONSTRUCTORS(ProxyServer);
+};
+
+// SocksProxyServer is a simple extension of ProxyServer to implement SOCKS.
+class SocksProxyServer : public ProxyServer {
+ public:
+ SocksProxyServer(SocketFactory* int_factory, const SocketAddress& int_addr,
+ SocketFactory* ext_factory, const SocketAddress& ext_ip)
+ : ProxyServer(int_factory, int_addr, ext_factory, ext_ip) {
+ }
+ protected:
+ AsyncProxyServerSocket* WrapSocket(AsyncSocket* socket) {
+ return new AsyncSocksProxyServerSocket(socket);
+ }
+ DISALLOW_EVIL_CONSTRUCTORS(SocksProxyServer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_PROXYSERVER_H_
diff --git a/chromium/third_party/webrtc/base/ratelimiter.cc b/chromium/third_party/webrtc/base/ratelimiter.cc
new file mode 100644
index 00000000000..c4a251d1404
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratelimiter.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/ratelimiter.h"
+
+namespace rtc {
+
+bool RateLimiter::CanUse(size_t desired, double time) {
+ return ((time > period_end_ && desired <= max_per_period_) ||
+ (used_in_period_ + desired) <= max_per_period_);
+}
+
+void RateLimiter::Use(size_t used, double time) {
+ if (time > period_end_) {
+ period_start_ = time;
+ period_end_ = time + period_length_;
+ used_in_period_ = 0;
+ }
+ used_in_period_ += used;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ratelimiter.h b/chromium/third_party/webrtc/base/ratelimiter.h
new file mode 100644
index 00000000000..cf5d6b05b52
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratelimiter.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_RATELIMITER_H_
+#define WEBRTC_BASE_RATELIMITER_H_
+
+#include <stdlib.h>
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// Limits the rate of use to a certain maximum quantity per period of
+// time. Use, for example, for simple bandwidth throttling.
+//
+// It's implemented like a diet plan: You have so many calories per
+// day. If you hit the limit, you can't eat any more until the next
+// day.
+class RateLimiter {
+ public:
+ // For example, 100kb per second.
+ RateLimiter(size_t max, double period)
+ : max_per_period_(max),
+ period_length_(period),
+ used_in_period_(0),
+ period_start_(0.0),
+ period_end_(period) {
+ }
+ virtual ~RateLimiter() {}
+
+ // Returns true if if the desired quantity is available in the
+ // current period (< (max - used)). Once the given time passes the
+ // end of the period, used is set to zero and more use is available.
+ bool CanUse(size_t desired, double time);
+ // Increment the quantity used this period. If past the end of a
+ // period, a new period is started.
+ void Use(size_t used, double time);
+
+ size_t used_in_period() const {
+ return used_in_period_;
+ }
+
+ size_t max_per_period() const {
+ return max_per_period_;
+ }
+
+ private:
+ size_t max_per_period_;
+ double period_length_;
+ size_t used_in_period_;
+ double period_start_;
+ double period_end_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_RATELIMITER_H_
diff --git a/chromium/third_party/webrtc/base/ratelimiter_unittest.cc b/chromium/third_party/webrtc/base/ratelimiter_unittest.cc
new file mode 100644
index 00000000000..b54a751b77e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratelimiter_unittest.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/ratelimiter.h"
+
+namespace rtc {
+
+TEST(RateLimiterTest, TestCanUse) {
+ // Diet: Can eat 2,000 calories per day.
+ RateLimiter limiter = RateLimiter(2000, 1.0);
+
+ double monday = 1.0;
+ double tuesday = 2.0;
+ double thursday = 4.0;
+
+ EXPECT_TRUE(limiter.CanUse(0, monday));
+ EXPECT_TRUE(limiter.CanUse(1000, monday));
+ EXPECT_TRUE(limiter.CanUse(1999, monday));
+ EXPECT_TRUE(limiter.CanUse(2000, monday));
+ EXPECT_FALSE(limiter.CanUse(2001, monday));
+
+ limiter.Use(1000, monday);
+
+ EXPECT_TRUE(limiter.CanUse(0, monday));
+ EXPECT_TRUE(limiter.CanUse(999, monday));
+ EXPECT_TRUE(limiter.CanUse(1000, monday));
+ EXPECT_FALSE(limiter.CanUse(1001, monday));
+
+ limiter.Use(1000, monday);
+
+ EXPECT_TRUE(limiter.CanUse(0, monday));
+ EXPECT_FALSE(limiter.CanUse(1, monday));
+
+ EXPECT_TRUE(limiter.CanUse(0, tuesday));
+ EXPECT_TRUE(limiter.CanUse(1, tuesday));
+ EXPECT_TRUE(limiter.CanUse(1999, tuesday));
+ EXPECT_TRUE(limiter.CanUse(2000, tuesday));
+ EXPECT_FALSE(limiter.CanUse(2001, tuesday));
+
+ limiter.Use(1000, tuesday);
+
+ EXPECT_TRUE(limiter.CanUse(1000, tuesday));
+ EXPECT_FALSE(limiter.CanUse(1001, tuesday));
+
+ limiter.Use(1000, thursday);
+
+ EXPECT_TRUE(limiter.CanUse(1000, tuesday));
+ EXPECT_FALSE(limiter.CanUse(1001, tuesday));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ratetracker.cc b/chromium/third_party/webrtc/base/ratetracker.cc
new file mode 100644
index 00000000000..31ecd9bbb50
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratetracker.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/ratetracker.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+RateTracker::RateTracker()
+ : total_units_(0), units_second_(0),
+ last_units_second_time_(static_cast<uint32>(-1)),
+ last_units_second_calc_(0) {
+}
+
+size_t RateTracker::total_units() const {
+ return total_units_;
+}
+
+size_t RateTracker::units_second() {
+ // Snapshot units / second calculator. Determine how many seconds have
+ // elapsed since our last reference point. If over 1 second, establish
+ // a new reference point that is an integer number of seconds since the
+ // last one, and compute the units over that interval.
+ uint32 current_time = Time();
+ if (last_units_second_time_ != static_cast<uint32>(-1)) {
+ int delta = rtc::TimeDiff(current_time, last_units_second_time_);
+ if (delta >= 1000) {
+ int fraction_time = delta % 1000;
+ int seconds = delta / 1000;
+ int fraction_units =
+ static_cast<int>(total_units_ - last_units_second_calc_) *
+ fraction_time / delta;
+ // Compute "units received during the interval" / "seconds in interval"
+ units_second_ =
+ (total_units_ - last_units_second_calc_ - fraction_units) / seconds;
+ last_units_second_time_ = current_time - fraction_time;
+ last_units_second_calc_ = total_units_ - fraction_units;
+ }
+ }
+ if (last_units_second_time_ == static_cast<uint32>(-1)) {
+ last_units_second_time_ = current_time;
+ last_units_second_calc_ = total_units_;
+ }
+
+ return units_second_;
+}
+
+void RateTracker::Update(size_t units) {
+ total_units_ += units;
+}
+
+uint32 RateTracker::Time() const {
+ return rtc::Time();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ratetracker.h b/chromium/third_party/webrtc/base/ratetracker.h
new file mode 100644
index 00000000000..575bff75a46
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratetracker.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_RATETRACKER_H_
+#define WEBRTC_BASE_RATETRACKER_H_
+
+#include <stdlib.h>
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+// Computes instantaneous units per second.
+class RateTracker {
+ public:
+ RateTracker();
+ virtual ~RateTracker() {}
+
+ size_t total_units() const;
+ size_t units_second();
+ void Update(size_t units);
+
+ protected:
+ // overrideable for tests
+ virtual uint32 Time() const;
+
+ private:
+ size_t total_units_;
+ size_t units_second_;
+ uint32 last_units_second_time_;
+ size_t last_units_second_calc_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_RATETRACKER_H_
diff --git a/chromium/third_party/webrtc/base/ratetracker_unittest.cc b/chromium/third_party/webrtc/base/ratetracker_unittest.cc
new file mode 100644
index 00000000000..e9fee2b9f32
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ratetracker_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/ratetracker.h"
+
+namespace rtc {
+
+class RateTrackerForTest : public RateTracker {
+ public:
+ RateTrackerForTest() : time_(0) {}
+ virtual uint32 Time() const { return time_; }
+ void AdvanceTime(uint32 delta) { time_ += delta; }
+
+ private:
+ uint32 time_;
+};
+
+TEST(RateTrackerTest, TestBasics) {
+ RateTrackerForTest tracker;
+ EXPECT_EQ(0U, tracker.total_units());
+ EXPECT_EQ(0U, tracker.units_second());
+
+ // Add a sample.
+ tracker.Update(1234);
+ // Advance the clock by 100 ms.
+ tracker.AdvanceTime(100);
+ // total_units should advance, but units_second should stay 0.
+ EXPECT_EQ(1234U, tracker.total_units());
+ EXPECT_EQ(0U, tracker.units_second());
+
+ // Repeat.
+ tracker.Update(1234);
+ tracker.AdvanceTime(100);
+ EXPECT_EQ(1234U * 2, tracker.total_units());
+ EXPECT_EQ(0U, tracker.units_second());
+
+ // Advance the clock by 800 ms, so we've elapsed a full second.
+ // units_second should now be filled in properly.
+ tracker.AdvanceTime(800);
+ EXPECT_EQ(1234U * 2, tracker.total_units());
+ EXPECT_EQ(1234U * 2, tracker.units_second());
+
+ // Poll the tracker again immediately. The reported rate should stay the same.
+ EXPECT_EQ(1234U * 2, tracker.total_units());
+ EXPECT_EQ(1234U * 2, tracker.units_second());
+
+ // Do nothing and advance by a second. We should drop down to zero.
+ tracker.AdvanceTime(1000);
+ EXPECT_EQ(1234U * 2, tracker.total_units());
+ EXPECT_EQ(0U, tracker.units_second());
+
+ // Send a bunch of data at a constant rate for 5.5 "seconds".
+ // We should report the rate properly.
+ for (int i = 0; i < 5500; i += 100) {
+ tracker.Update(9876U);
+ tracker.AdvanceTime(100);
+ }
+ EXPECT_EQ(9876U * 10, tracker.units_second());
+
+ // Advance the clock by 500 ms. Since we sent nothing over this half-second,
+ // the reported rate should be reduced by half.
+ tracker.AdvanceTime(500);
+ EXPECT_EQ(9876U * 5, tracker.units_second());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/refcount.h b/chromium/third_party/webrtc/base/refcount.h
new file mode 100644
index 00000000000..7bb6da36a6c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/refcount.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TALK_APP_BASE_REFCOUNT_H_
+#define TALK_APP_BASE_REFCOUNT_H_
+
+#include <string.h>
+
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+
+// Reference count interface.
+class RefCountInterface {
+ public:
+ virtual int AddRef() = 0;
+ virtual int Release() = 0;
+ protected:
+ virtual ~RefCountInterface() {}
+};
+
+template <class T>
+class RefCountedObject : public T {
+ public:
+ RefCountedObject() : ref_count_(0) {
+ }
+
+ template<typename P>
+ explicit RefCountedObject(P p) : T(p), ref_count_(0) {
+ }
+
+ template<typename P1, typename P2>
+ RefCountedObject(P1 p1, P2 p2) : T(p1, p2), ref_count_(0) {
+ }
+
+ template<typename P1, typename P2, typename P3>
+ RefCountedObject(P1 p1, P2 p2, P3 p3) : T(p1, p2, p3), ref_count_(0) {
+ }
+
+ template<typename P1, typename P2, typename P3, typename P4>
+ RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4)
+ : T(p1, p2, p3, p4), ref_count_(0) {
+ }
+
+ template<typename P1, typename P2, typename P3, typename P4, typename P5>
+ RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5)
+ : T(p1, p2, p3, p4, p5), ref_count_(0) {
+ }
+
+ virtual int AddRef() {
+ return rtc::AtomicOps::Increment(&ref_count_);
+ }
+
+ virtual int Release() {
+ int count = rtc::AtomicOps::Decrement(&ref_count_);
+ if (!count) {
+ delete this;
+ }
+ return count;
+ }
+
+ protected:
+ virtual ~RefCountedObject() {
+ }
+
+ int ref_count_;
+};
+
+} // namespace rtc
+
+#endif // TALK_APP_BASE_REFCOUNT_H_
diff --git a/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h b/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h
new file mode 100644
index 00000000000..7138c8c5e15
--- /dev/null
+++ b/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_REFERENCECOUNTEDSINGLETONFACTORY_H_
+#define WEBRTC_BASE_REFERENCECOUNTEDSINGLETONFACTORY_H_
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+template <typename Interface> class rcsf_ptr;
+
+// A ReferenceCountedSingletonFactory is an object which owns another object,
+// and doles out the owned object to consumers in a reference-counted manner.
+// Thus, the factory owns at most one object of the desired kind, and
+// hands consumers a special pointer to it, through which they can access it.
+// When the consumers delete the pointer, the reference count goes down,
+// and if the reference count hits zero, the factory can throw the object
+// away. If a consumer requests the pointer and the factory has none,
+// it can create one on the fly and pass it back.
+template <typename Interface>
+class ReferenceCountedSingletonFactory {
+ friend class rcsf_ptr<Interface>;
+ public:
+ ReferenceCountedSingletonFactory() : ref_count_(0) {}
+
+ virtual ~ReferenceCountedSingletonFactory() {
+ ASSERT(ref_count_ == 0);
+ }
+
+ protected:
+ // Must be implemented in a sub-class. The sub-class may choose whether or not
+ // to cache the instance across lifetimes by either reset()'ing or not
+ // reset()'ing the scoped_ptr in CleanupInstance().
+ virtual bool SetupInstance() = 0;
+ virtual void CleanupInstance() = 0;
+
+ scoped_ptr<Interface> instance_;
+
+ private:
+ Interface* GetInstance() {
+ rtc::CritScope cs(&crit_);
+ if (ref_count_ == 0) {
+ if (!SetupInstance()) {
+ LOG(LS_VERBOSE) << "Failed to setup instance";
+ return NULL;
+ }
+ ASSERT(instance_.get() != NULL);
+ }
+ ++ref_count_;
+
+ LOG(LS_VERBOSE) << "Number of references: " << ref_count_;
+ return instance_.get();
+ }
+
+ void ReleaseInstance() {
+ rtc::CritScope cs(&crit_);
+ ASSERT(ref_count_ > 0);
+ ASSERT(instance_.get() != NULL);
+ --ref_count_;
+ LOG(LS_VERBOSE) << "Number of references: " << ref_count_;
+ if (ref_count_ == 0) {
+ CleanupInstance();
+ }
+ }
+
+ CriticalSection crit_;
+ int ref_count_;
+
+ DISALLOW_COPY_AND_ASSIGN(ReferenceCountedSingletonFactory);
+};
+
+template <typename Interface>
+class rcsf_ptr {
+ public:
+ // Create a pointer that uses the factory to get the instance.
+ // This is lazy - it won't generate the instance until it is requested.
+ explicit rcsf_ptr(ReferenceCountedSingletonFactory<Interface>* factory)
+ : instance_(NULL),
+ factory_(factory) {
+ }
+
+ ~rcsf_ptr() {
+ release();
+ }
+
+ Interface& operator*() {
+ EnsureAcquired();
+ return *instance_;
+ }
+
+ Interface* operator->() {
+ EnsureAcquired();
+ return instance_;
+ }
+
+ // Gets the pointer, creating the singleton if necessary. May return NULL if
+ // creation failed.
+ Interface* get() {
+ Acquire();
+ return instance_;
+ }
+
+ // Set instance to NULL and tell the factory we aren't using the instance
+ // anymore.
+ void release() {
+ if (instance_) {
+ instance_ = NULL;
+ factory_->ReleaseInstance();
+ }
+ }
+
+ // Lets us know whether instance is valid or not right now.
+ // Even though attempts to use the instance will automatically create it, it
+ // is advisable to check this because creation can fail.
+ bool valid() const {
+ return instance_ != NULL;
+ }
+
+ // Returns the factory that this pointer is using.
+ ReferenceCountedSingletonFactory<Interface>* factory() const {
+ return factory_;
+ }
+
+ private:
+ void EnsureAcquired() {
+ Acquire();
+ ASSERT(instance_ != NULL);
+ }
+
+ void Acquire() {
+ // Since we're getting a singleton back, acquire is a noop if instance is
+ // already populated.
+ if (!instance_) {
+ instance_ = factory_->GetInstance();
+ }
+ }
+
+ Interface* instance_;
+ ReferenceCountedSingletonFactory<Interface>* factory_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(rcsf_ptr);
+};
+
+}; // namespace rtc
+
+#endif // WEBRTC_BASE_REFERENCECOUNTEDSINGLETONFACTORY_H_
diff --git a/chromium/third_party/webrtc/base/referencecountedsingletonfactory_unittest.cc b/chromium/third_party/webrtc/base/referencecountedsingletonfactory_unittest.cc
new file mode 100644
index 00000000000..75d97a639a0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/referencecountedsingletonfactory_unittest.cc
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/referencecountedsingletonfactory.h"
+
+namespace rtc {
+
+class MyExistenceWatcher {
+ public:
+ MyExistenceWatcher() { create_called_ = true; }
+ ~MyExistenceWatcher() { delete_called_ = true; }
+
+ static bool create_called_;
+ static bool delete_called_;
+};
+
+bool MyExistenceWatcher::create_called_ = false;
+bool MyExistenceWatcher::delete_called_ = false;
+
+class TestReferenceCountedSingletonFactory :
+ public ReferenceCountedSingletonFactory<MyExistenceWatcher> {
+ protected:
+ virtual bool SetupInstance() {
+ instance_.reset(new MyExistenceWatcher());
+ return true;
+ }
+
+ virtual void CleanupInstance() {
+ instance_.reset();
+ }
+};
+
+static void DoCreateAndGoOutOfScope(
+ ReferenceCountedSingletonFactory<MyExistenceWatcher> *factory) {
+ rcsf_ptr<MyExistenceWatcher> ptr(factory);
+ ptr.get();
+ // and now ptr should go out of scope.
+}
+
+TEST(ReferenceCountedSingletonFactory, ZeroReferenceCountCausesDeletion) {
+ TestReferenceCountedSingletonFactory factory;
+ MyExistenceWatcher::delete_called_ = false;
+ DoCreateAndGoOutOfScope(&factory);
+ EXPECT_TRUE(MyExistenceWatcher::delete_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, NonZeroReferenceCountDoesNotDelete) {
+ TestReferenceCountedSingletonFactory factory;
+ rcsf_ptr<MyExistenceWatcher> ptr(&factory);
+ ptr.get();
+ MyExistenceWatcher::delete_called_ = false;
+ DoCreateAndGoOutOfScope(&factory);
+ EXPECT_FALSE(MyExistenceWatcher::delete_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, ReturnedPointersReferToSameThing) {
+ TestReferenceCountedSingletonFactory factory;
+ rcsf_ptr<MyExistenceWatcher> one(&factory), two(&factory);
+
+ EXPECT_EQ(one.get(), two.get());
+}
+
+TEST(ReferenceCountedSingletonFactory, Release) {
+ TestReferenceCountedSingletonFactory factory;
+
+ rcsf_ptr<MyExistenceWatcher> one(&factory);
+ one.get();
+
+ MyExistenceWatcher::delete_called_ = false;
+ one.release();
+ EXPECT_TRUE(MyExistenceWatcher::delete_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, GetWithoutRelease) {
+ TestReferenceCountedSingletonFactory factory;
+ rcsf_ptr<MyExistenceWatcher> one(&factory);
+ one.get();
+
+ MyExistenceWatcher::create_called_ = false;
+ one.get();
+ EXPECT_FALSE(MyExistenceWatcher::create_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, GetAfterRelease) {
+ TestReferenceCountedSingletonFactory factory;
+ rcsf_ptr<MyExistenceWatcher> one(&factory);
+
+ MyExistenceWatcher::create_called_ = false;
+ one.release();
+ one.get();
+ EXPECT_TRUE(MyExistenceWatcher::create_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, MultipleReleases) {
+ TestReferenceCountedSingletonFactory factory;
+ rcsf_ptr<MyExistenceWatcher> one(&factory), two(&factory);
+
+ MyExistenceWatcher::create_called_ = false;
+ MyExistenceWatcher::delete_called_ = false;
+ one.release();
+ EXPECT_FALSE(MyExistenceWatcher::delete_called_);
+ one.release();
+ EXPECT_FALSE(MyExistenceWatcher::delete_called_);
+ one.release();
+ EXPECT_FALSE(MyExistenceWatcher::delete_called_);
+ one.get();
+ EXPECT_TRUE(MyExistenceWatcher::create_called_);
+}
+
+TEST(ReferenceCountedSingletonFactory, Existentialism) {
+ TestReferenceCountedSingletonFactory factory;
+
+ rcsf_ptr<MyExistenceWatcher> one(&factory);
+
+ MyExistenceWatcher::create_called_ = false;
+ MyExistenceWatcher::delete_called_ = false;
+
+ one.get();
+ EXPECT_TRUE(MyExistenceWatcher::create_called_);
+ one.release();
+ EXPECT_TRUE(MyExistenceWatcher::delete_called_);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/rollingaccumulator.h b/chromium/third_party/webrtc/base/rollingaccumulator.h
new file mode 100644
index 00000000000..0dce0c3a09b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/rollingaccumulator.h
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ROLLINGACCUMULATOR_H_
+#define WEBRTC_BASE_ROLLINGACCUMULATOR_H_
+
+#include <vector>
+
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+// RollingAccumulator stores and reports statistics
+// over N most recent samples.
+//
+// T is assumed to be an int, long, double or float.
+template<typename T>
+class RollingAccumulator {
+ public:
+ explicit RollingAccumulator(size_t max_count)
+ : samples_(max_count) {
+ Reset();
+ }
+ ~RollingAccumulator() {
+ }
+
+ size_t max_count() const {
+ return samples_.size();
+ }
+
+ size_t count() const {
+ return count_;
+ }
+
+ void Reset() {
+ count_ = 0U;
+ next_index_ = 0U;
+ sum_ = 0.0;
+ sum_2_ = 0.0;
+ max_ = T();
+ max_stale_ = false;
+ min_ = T();
+ min_stale_ = false;
+ }
+
+ void AddSample(T sample) {
+ if (count_ == max_count()) {
+ // Remove oldest sample.
+ T sample_to_remove = samples_[next_index_];
+ sum_ -= sample_to_remove;
+ sum_2_ -= sample_to_remove * sample_to_remove;
+ if (sample_to_remove >= max_) {
+ max_stale_ = true;
+ }
+ if (sample_to_remove <= min_) {
+ min_stale_ = true;
+ }
+ } else {
+ // Increase count of samples.
+ ++count_;
+ }
+ // Add new sample.
+ samples_[next_index_] = sample;
+ sum_ += sample;
+ sum_2_ += sample * sample;
+ if (count_ == 1 || sample >= max_) {
+ max_ = sample;
+ max_stale_ = false;
+ }
+ if (count_ == 1 || sample <= min_) {
+ min_ = sample;
+ min_stale_ = false;
+ }
+ // Update next_index_.
+ next_index_ = (next_index_ + 1) % max_count();
+ }
+
+ T ComputeSum() const {
+ return static_cast<T>(sum_);
+ }
+
+ double ComputeMean() const {
+ if (count_ == 0) {
+ return 0.0;
+ }
+ return sum_ / count_;
+ }
+
+ T ComputeMax() const {
+ if (max_stale_) {
+ ASSERT(count_ > 0 &&
+ "It shouldn't be possible for max_stale_ && count_ == 0");
+ max_ = samples_[next_index_];
+ for (size_t i = 1u; i < count_; i++) {
+ max_ = _max(max_, samples_[(next_index_ + i) % max_count()]);
+ }
+ max_stale_ = false;
+ }
+ return max_;
+ }
+
+ T ComputeMin() const {
+ if (min_stale_) {
+ ASSERT(count_ > 0 &&
+ "It shouldn't be possible for min_stale_ && count_ == 0");
+ min_ = samples_[next_index_];
+ for (size_t i = 1u; i < count_; i++) {
+ min_ = _min(min_, samples_[(next_index_ + i) % max_count()]);
+ }
+ min_stale_ = false;
+ }
+ return min_;
+ }
+
+ // O(n) time complexity.
+ // Weights nth sample with weight (learning_rate)^n. Learning_rate should be
+ // between (0.0, 1.0], otherwise the non-weighted mean is returned.
+ double ComputeWeightedMean(double learning_rate) const {
+ if (count_ < 1 || learning_rate <= 0.0 || learning_rate >= 1.0) {
+ return ComputeMean();
+ }
+ double weighted_mean = 0.0;
+ double current_weight = 1.0;
+ double weight_sum = 0.0;
+ const size_t max_size = max_count();
+ for (size_t i = 0; i < count_; ++i) {
+ current_weight *= learning_rate;
+ weight_sum += current_weight;
+ // Add max_size to prevent underflow.
+ size_t index = (next_index_ + max_size - i - 1) % max_size;
+ weighted_mean += current_weight * samples_[index];
+ }
+ return weighted_mean / weight_sum;
+ }
+
+ // Compute estimated variance. Estimation is more accurate
+ // as the number of samples grows.
+ double ComputeVariance() const {
+ if (count_ == 0) {
+ return 0.0;
+ }
+ // Var = E[x^2] - (E[x])^2
+ double count_inv = 1.0 / count_;
+ double mean_2 = sum_2_ * count_inv;
+ double mean = sum_ * count_inv;
+ return mean_2 - (mean * mean);
+ }
+
+ private:
+ size_t count_;
+ size_t next_index_;
+ double sum_; // Sum(x) - double to avoid overflow
+ double sum_2_; // Sum(x*x) - double to avoid overflow
+ mutable T max_;
+ mutable bool max_stale_;
+ mutable T min_;
+ mutable bool min_stale_;
+ std::vector<T> samples_;
+
+ DISALLOW_COPY_AND_ASSIGN(RollingAccumulator);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_ROLLINGACCUMULATOR_H_
diff --git a/chromium/third_party/webrtc/base/rollingaccumulator_unittest.cc b/chromium/third_party/webrtc/base/rollingaccumulator_unittest.cc
new file mode 100644
index 00000000000..7e3d8cdf0e9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/rollingaccumulator_unittest.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/rollingaccumulator.h"
+
+namespace rtc {
+
+namespace {
+
+const double kLearningRate = 0.5;
+
+} // namespace
+
+TEST(RollingAccumulatorTest, ZeroSamples) {
+ RollingAccumulator<int> accum(10);
+
+ EXPECT_EQ(0U, accum.count());
+ EXPECT_DOUBLE_EQ(0.0, accum.ComputeMean());
+ EXPECT_DOUBLE_EQ(0.0, accum.ComputeVariance());
+ EXPECT_EQ(0, accum.ComputeMin());
+ EXPECT_EQ(0, accum.ComputeMax());
+}
+
+TEST(RollingAccumulatorTest, SomeSamples) {
+ RollingAccumulator<int> accum(10);
+ for (int i = 0; i < 4; ++i) {
+ accum.AddSample(i);
+ }
+
+ EXPECT_EQ(4U, accum.count());
+ EXPECT_EQ(6, accum.ComputeSum());
+ EXPECT_DOUBLE_EQ(1.5, accum.ComputeMean());
+ EXPECT_NEAR(2.26666, accum.ComputeWeightedMean(kLearningRate), 0.01);
+ EXPECT_DOUBLE_EQ(1.25, accum.ComputeVariance());
+ EXPECT_EQ(0, accum.ComputeMin());
+ EXPECT_EQ(3, accum.ComputeMax());
+}
+
+TEST(RollingAccumulatorTest, RollingSamples) {
+ RollingAccumulator<int> accum(10);
+ for (int i = 0; i < 12; ++i) {
+ accum.AddSample(i);
+ }
+
+ EXPECT_EQ(10U, accum.count());
+ EXPECT_EQ(65, accum.ComputeSum());
+ EXPECT_DOUBLE_EQ(6.5, accum.ComputeMean());
+ EXPECT_NEAR(10.0, accum.ComputeWeightedMean(kLearningRate), 0.01);
+ EXPECT_NEAR(9.0, accum.ComputeVariance(), 1.0);
+ EXPECT_EQ(2, accum.ComputeMin());
+ EXPECT_EQ(11, accum.ComputeMax());
+}
+
+TEST(RollingAccumulatorTest, ResetSamples) {
+ RollingAccumulator<int> accum(10);
+
+ for (int i = 0; i < 10; ++i) {
+ accum.AddSample(100);
+ }
+ EXPECT_EQ(10U, accum.count());
+ EXPECT_DOUBLE_EQ(100.0, accum.ComputeMean());
+ EXPECT_EQ(100, accum.ComputeMin());
+ EXPECT_EQ(100, accum.ComputeMax());
+
+ accum.Reset();
+ EXPECT_EQ(0U, accum.count());
+
+ for (int i = 0; i < 5; ++i) {
+ accum.AddSample(i);
+ }
+
+ EXPECT_EQ(5U, accum.count());
+ EXPECT_EQ(10, accum.ComputeSum());
+ EXPECT_DOUBLE_EQ(2.0, accum.ComputeMean());
+ EXPECT_EQ(0, accum.ComputeMin());
+ EXPECT_EQ(4, accum.ComputeMax());
+}
+
+TEST(RollingAccumulatorTest, RollingSamplesDouble) {
+ RollingAccumulator<double> accum(10);
+ for (int i = 0; i < 23; ++i) {
+ accum.AddSample(5 * i);
+ }
+
+ EXPECT_EQ(10u, accum.count());
+ EXPECT_DOUBLE_EQ(875.0, accum.ComputeSum());
+ EXPECT_DOUBLE_EQ(87.5, accum.ComputeMean());
+ EXPECT_NEAR(105.049, accum.ComputeWeightedMean(kLearningRate), 0.1);
+ EXPECT_NEAR(229.166667, accum.ComputeVariance(), 25);
+ EXPECT_DOUBLE_EQ(65.0, accum.ComputeMin());
+ EXPECT_DOUBLE_EQ(110.0, accum.ComputeMax());
+}
+
+TEST(RollingAccumulatorTest, ComputeWeightedMeanCornerCases) {
+ RollingAccumulator<int> accum(10);
+ EXPECT_DOUBLE_EQ(0.0, accum.ComputeWeightedMean(kLearningRate));
+ EXPECT_DOUBLE_EQ(0.0, accum.ComputeWeightedMean(0.0));
+ EXPECT_DOUBLE_EQ(0.0, accum.ComputeWeightedMean(1.1));
+
+ for (int i = 0; i < 8; ++i) {
+ accum.AddSample(i);
+ }
+
+ EXPECT_DOUBLE_EQ(3.5, accum.ComputeMean());
+ EXPECT_DOUBLE_EQ(3.5, accum.ComputeWeightedMean(0));
+ EXPECT_DOUBLE_EQ(3.5, accum.ComputeWeightedMean(1.1));
+ EXPECT_NEAR(6.0, accum.ComputeWeightedMean(kLearningRate), 0.1);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/safe_conversions.h b/chromium/third_party/webrtc/base/safe_conversions.h
new file mode 100644
index 00000000000..f6cb24e4126
--- /dev/null
+++ b/chromium/third_party/webrtc/base/safe_conversions.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/numerics/safe_conversions.h.
+
+#ifndef WEBRTC_BASE_SAFE_CONVERSIONS_H_
+#define WEBRTC_BASE_SAFE_CONVERSIONS_H_
+
+#include <limits>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/safe_conversions_impl.h"
+
+namespace rtc {
+
+inline void Check(bool condition) {
+ if (!condition) {
+ LOG(LS_ERROR) << "CHECK failed.";
+ Break();
+ // The program should have crashed at this point.
+ }
+}
+
+// Convenience function that returns true if the supplied value is in range
+// for the destination type.
+template <typename Dst, typename Src>
+inline bool IsValueInRangeForNumericType(Src value) {
+ return internal::RangeCheck<Dst>(value) == internal::TYPE_VALID;
+}
+
+// checked_cast<> is analogous to static_cast<> for numeric types,
+// except that it CHECKs that the specified numeric conversion will not
+// overflow or underflow. NaN source will always trigger a CHECK.
+template <typename Dst, typename Src>
+inline Dst checked_cast(Src value) {
+ Check(IsValueInRangeForNumericType<Dst>(value));
+ return static_cast<Dst>(value);
+}
+
+// saturated_cast<> is analogous to static_cast<> for numeric types, except
+// that the specified numeric conversion will saturate rather than overflow or
+// underflow. NaN assignment to an integral will trigger a CHECK condition.
+template <typename Dst, typename Src>
+inline Dst saturated_cast(Src value) {
+ // Optimization for floating point values, which already saturate.
+ if (std::numeric_limits<Dst>::is_iec559)
+ return static_cast<Dst>(value);
+
+ switch (internal::RangeCheck<Dst>(value)) {
+ case internal::TYPE_VALID:
+ return static_cast<Dst>(value);
+
+ case internal::TYPE_UNDERFLOW:
+ return std::numeric_limits<Dst>::min();
+
+ case internal::TYPE_OVERFLOW:
+ return std::numeric_limits<Dst>::max();
+
+ // Should fail only on attempting to assign NaN to a saturated integer.
+ case internal::TYPE_INVALID:
+ Check(false);
+ return std::numeric_limits<Dst>::max();
+ }
+
+ Check(false); // NOTREACHED();
+ return static_cast<Dst>(value);
+}
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SAFE_CONVERSIONS_H_
diff --git a/chromium/third_party/webrtc/base/safe_conversions_impl.h b/chromium/third_party/webrtc/base/safe_conversions_impl.h
new file mode 100644
index 00000000000..2950f970cec
--- /dev/null
+++ b/chromium/third_party/webrtc/base/safe_conversions_impl.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/numerics/safe_conversions_impl.h.
+
+#ifndef WEBRTC_BASE_SAFE_CONVERSIONS_IMPL_H_
+#define WEBRTC_BASE_SAFE_CONVERSIONS_IMPL_H_
+
+#include <limits>
+
+namespace rtc {
+namespace internal {
+
+enum DstSign {
+ DST_UNSIGNED,
+ DST_SIGNED
+};
+
+enum SrcSign {
+ SRC_UNSIGNED,
+ SRC_SIGNED
+};
+
+enum DstRange {
+ OVERLAPS_RANGE,
+ CONTAINS_RANGE
+};
+
+// Helper templates to statically determine if our destination type can contain
+// all values represented by the source type.
+
+template <typename Dst, typename Src,
+ DstSign IsDstSigned = std::numeric_limits<Dst>::is_signed ?
+ DST_SIGNED : DST_UNSIGNED,
+ SrcSign IsSrcSigned = std::numeric_limits<Src>::is_signed ?
+ SRC_SIGNED : SRC_UNSIGNED>
+struct StaticRangeCheck {};
+
+template <typename Dst, typename Src>
+struct StaticRangeCheck<Dst, Src, DST_SIGNED, SRC_SIGNED> {
+ typedef std::numeric_limits<Dst> DstLimits;
+ typedef std::numeric_limits<Src> SrcLimits;
+ // Compare based on max_exponent, which we must compute for integrals.
+ static const size_t kDstMaxExponent = DstLimits::is_iec559 ?
+ DstLimits::max_exponent :
+ (sizeof(Dst) * 8 - 1);
+ static const size_t kSrcMaxExponent = SrcLimits::is_iec559 ?
+ SrcLimits::max_exponent :
+ (sizeof(Src) * 8 - 1);
+ static const DstRange value = kDstMaxExponent >= kSrcMaxExponent ?
+ CONTAINS_RANGE : OVERLAPS_RANGE;
+};
+
+template <typename Dst, typename Src>
+struct StaticRangeCheck<Dst, Src, DST_UNSIGNED, SRC_UNSIGNED> {
+ static const DstRange value = sizeof(Dst) >= sizeof(Src) ?
+ CONTAINS_RANGE : OVERLAPS_RANGE;
+};
+
+template <typename Dst, typename Src>
+struct StaticRangeCheck<Dst, Src, DST_SIGNED, SRC_UNSIGNED> {
+ typedef std::numeric_limits<Dst> DstLimits;
+ typedef std::numeric_limits<Src> SrcLimits;
+ // Compare based on max_exponent, which we must compute for integrals.
+ static const size_t kDstMaxExponent = DstLimits::is_iec559 ?
+ DstLimits::max_exponent :
+ (sizeof(Dst) * 8 - 1);
+ static const size_t kSrcMaxExponent = sizeof(Src) * 8;
+ static const DstRange value = kDstMaxExponent >= kSrcMaxExponent ?
+ CONTAINS_RANGE : OVERLAPS_RANGE;
+};
+
+template <typename Dst, typename Src>
+struct StaticRangeCheck<Dst, Src, DST_UNSIGNED, SRC_SIGNED> {
+ static const DstRange value = OVERLAPS_RANGE;
+};
+
+
+enum RangeCheckResult {
+ TYPE_VALID = 0, // Value can be represented by the destination type.
+ TYPE_UNDERFLOW = 1, // Value would overflow.
+ TYPE_OVERFLOW = 2, // Value would underflow.
+ TYPE_INVALID = 3 // Source value is invalid (i.e. NaN).
+};
+
+// This macro creates a RangeCheckResult from an upper and lower bound
+// check by taking advantage of the fact that only NaN can be out of range in
+// both directions at once.
+#define BASE_NUMERIC_RANGE_CHECK_RESULT(is_in_upper_bound, is_in_lower_bound) \
+ RangeCheckResult(((is_in_upper_bound) ? 0 : TYPE_OVERFLOW) | \
+ ((is_in_lower_bound) ? 0 : TYPE_UNDERFLOW))
+
+template <typename Dst,
+ typename Src,
+ DstSign IsDstSigned = std::numeric_limits<Dst>::is_signed ?
+ DST_SIGNED : DST_UNSIGNED,
+ SrcSign IsSrcSigned = std::numeric_limits<Src>::is_signed ?
+ SRC_SIGNED : SRC_UNSIGNED,
+ DstRange IsSrcRangeContained = StaticRangeCheck<Dst, Src>::value>
+struct RangeCheckImpl {};
+
+// The following templates are for ranges that must be verified at runtime. We
+// split it into checks based on signedness to avoid confusing casts and
+// compiler warnings on signed an unsigned comparisons.
+
+// Dst range always contains the result: nothing to check.
+template <typename Dst, typename Src, DstSign IsDstSigned, SrcSign IsSrcSigned>
+struct RangeCheckImpl<Dst, Src, IsDstSigned, IsSrcSigned, CONTAINS_RANGE> {
+ static RangeCheckResult Check(Src value) {
+ return TYPE_VALID;
+ }
+};
+
+// Signed to signed narrowing.
+template <typename Dst, typename Src>
+struct RangeCheckImpl<Dst, Src, DST_SIGNED, SRC_SIGNED, OVERLAPS_RANGE> {
+ static RangeCheckResult Check(Src value) {
+ typedef std::numeric_limits<Dst> DstLimits;
+ return DstLimits::is_iec559 ?
+ BASE_NUMERIC_RANGE_CHECK_RESULT(
+ value <= static_cast<Src>(DstLimits::max()),
+ value >= static_cast<Src>(DstLimits::max() * -1)) :
+ BASE_NUMERIC_RANGE_CHECK_RESULT(
+ value <= static_cast<Src>(DstLimits::max()),
+ value >= static_cast<Src>(DstLimits::min()));
+ }
+};
+
+// Unsigned to unsigned narrowing.
+template <typename Dst, typename Src>
+struct RangeCheckImpl<Dst, Src, DST_UNSIGNED, SRC_UNSIGNED, OVERLAPS_RANGE> {
+ static RangeCheckResult Check(Src value) {
+ typedef std::numeric_limits<Dst> DstLimits;
+ return BASE_NUMERIC_RANGE_CHECK_RESULT(
+ value <= static_cast<Src>(DstLimits::max()), true);
+ }
+};
+
+// Unsigned to signed.
+template <typename Dst, typename Src>
+struct RangeCheckImpl<Dst, Src, DST_SIGNED, SRC_UNSIGNED, OVERLAPS_RANGE> {
+ static RangeCheckResult Check(Src value) {
+ typedef std::numeric_limits<Dst> DstLimits;
+ return sizeof(Dst) > sizeof(Src) ? TYPE_VALID :
+ BASE_NUMERIC_RANGE_CHECK_RESULT(
+ value <= static_cast<Src>(DstLimits::max()), true);
+ }
+};
+
+// Signed to unsigned.
+template <typename Dst, typename Src>
+struct RangeCheckImpl<Dst, Src, DST_UNSIGNED, SRC_SIGNED, OVERLAPS_RANGE> {
+ static RangeCheckResult Check(Src value) {
+ typedef std::numeric_limits<Dst> DstLimits;
+ typedef std::numeric_limits<Src> SrcLimits;
+ // Compare based on max_exponent, which we must compute for integrals.
+ static const size_t kDstMaxExponent = sizeof(Dst) * 8;
+ static const size_t kSrcMaxExponent = SrcLimits::is_iec559 ?
+ SrcLimits::max_exponent :
+ (sizeof(Src) * 8 - 1);
+ return (kDstMaxExponent >= kSrcMaxExponent) ?
+ BASE_NUMERIC_RANGE_CHECK_RESULT(true, value >= static_cast<Src>(0)) :
+ BASE_NUMERIC_RANGE_CHECK_RESULT(
+ value <= static_cast<Src>(DstLimits::max()),
+ value >= static_cast<Src>(0));
+ }
+};
+
+template <typename Dst, typename Src>
+inline RangeCheckResult RangeCheck(Src value) {
+ COMPILE_ASSERT(std::numeric_limits<Src>::is_specialized,
+ argument_must_be_numeric);
+ COMPILE_ASSERT(std::numeric_limits<Dst>::is_specialized,
+ result_must_be_numeric);
+ return RangeCheckImpl<Dst, Src>::Check(value);
+}
+
+} // namespace internal
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SAFE_CONVERSIONS_IMPL_H_
diff --git a/chromium/third_party/webrtc/base/schanneladapter.cc b/chromium/third_party/webrtc/base/schanneladapter.cc
new file mode 100644
index 00000000000..50c0638fd41
--- /dev/null
+++ b/chromium/third_party/webrtc/base/schanneladapter.cc
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32.h"
+#define SECURITY_WIN32
+#include <security.h>
+#include <schannel.h>
+
+#include <iomanip>
+#include <vector>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/schanneladapter.h"
+#include "webrtc/base/sec_buffer.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+/////////////////////////////////////////////////////////////////////////////
+// SChannelAdapter
+/////////////////////////////////////////////////////////////////////////////
+
+extern const ConstantLabel SECURITY_ERRORS[];
+
+const ConstantLabel SCHANNEL_BUFFER_TYPES[] = {
+ KLABEL(SECBUFFER_EMPTY), // 0
+ KLABEL(SECBUFFER_DATA), // 1
+ KLABEL(SECBUFFER_TOKEN), // 2
+ KLABEL(SECBUFFER_PKG_PARAMS), // 3
+ KLABEL(SECBUFFER_MISSING), // 4
+ KLABEL(SECBUFFER_EXTRA), // 5
+ KLABEL(SECBUFFER_STREAM_TRAILER), // 6
+ KLABEL(SECBUFFER_STREAM_HEADER), // 7
+ KLABEL(SECBUFFER_MECHLIST), // 11
+ KLABEL(SECBUFFER_MECHLIST_SIGNATURE), // 12
+ KLABEL(SECBUFFER_TARGET), // 13
+ KLABEL(SECBUFFER_CHANNEL_BINDINGS), // 14
+ LASTLABEL
+};
+
+void DescribeBuffer(LoggingSeverity severity, const char* prefix,
+ const SecBuffer& sb) {
+ LOG_V(severity)
+ << prefix
+ << "(" << sb.cbBuffer
+ << ", " << FindLabel(sb.BufferType & ~SECBUFFER_ATTRMASK,
+ SCHANNEL_BUFFER_TYPES)
+ << ", " << sb.pvBuffer << ")";
+}
+
+void DescribeBuffers(LoggingSeverity severity, const char* prefix,
+ const SecBufferDesc* sbd) {
+ if (!LOG_CHECK_LEVEL_V(severity))
+ return;
+ LOG_V(severity) << prefix << "(";
+ for (size_t i=0; i<sbd->cBuffers; ++i) {
+ DescribeBuffer(severity, " ", sbd->pBuffers[i]);
+ }
+ LOG_V(severity) << ")";
+}
+
+const ULONG SSL_FLAGS_DEFAULT = ISC_REQ_ALLOCATE_MEMORY
+ | ISC_REQ_CONFIDENTIALITY
+ | ISC_REQ_EXTENDED_ERROR
+ | ISC_REQ_INTEGRITY
+ | ISC_REQ_REPLAY_DETECT
+ | ISC_REQ_SEQUENCE_DETECT
+ | ISC_REQ_STREAM;
+ //| ISC_REQ_USE_SUPPLIED_CREDS;
+
+typedef std::vector<char> SChannelBuffer;
+
+struct SChannelAdapter::SSLImpl {
+ CredHandle cred;
+ CtxtHandle ctx;
+ bool cred_init, ctx_init;
+ SChannelBuffer inbuf, outbuf, readable;
+ SecPkgContext_StreamSizes sizes;
+
+ SSLImpl() : cred_init(false), ctx_init(false) { }
+};
+
+SChannelAdapter::SChannelAdapter(AsyncSocket* socket)
+ : SSLAdapter(socket), state_(SSL_NONE),
+ restartable_(false), signal_close_(false), message_pending_(false),
+ impl_(new SSLImpl) {
+}
+
+SChannelAdapter::~SChannelAdapter() {
+ Cleanup();
+}
+
+int
+SChannelAdapter::StartSSL(const char* hostname, bool restartable) {
+ if (state_ != SSL_NONE)
+ return ERROR_ALREADY_INITIALIZED;
+
+ ssl_host_name_ = hostname;
+ restartable_ = restartable;
+
+ if (socket_->GetState() != Socket::CS_CONNECTED) {
+ state_ = SSL_WAIT;
+ return 0;
+ }
+
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err, false);
+ return err;
+ }
+
+ return 0;
+}
+
+int
+SChannelAdapter::BeginSSL() {
+ LOG(LS_VERBOSE) << "BeginSSL: " << ssl_host_name_;
+ ASSERT(state_ == SSL_CONNECTING);
+
+ SECURITY_STATUS ret;
+
+ SCHANNEL_CRED sc_cred = { 0 };
+ sc_cred.dwVersion = SCHANNEL_CRED_VERSION;
+ //sc_cred.dwMinimumCipherStrength = 128; // Note: use system default
+ sc_cred.dwFlags = SCH_CRED_NO_DEFAULT_CREDS | SCH_CRED_AUTO_CRED_VALIDATION;
+
+ ret = AcquireCredentialsHandle(NULL, UNISP_NAME, SECPKG_CRED_OUTBOUND, NULL,
+ &sc_cred, NULL, NULL, &impl_->cred, NULL);
+ if (ret != SEC_E_OK) {
+ LOG(LS_ERROR) << "AcquireCredentialsHandle error: "
+ << ErrorName(ret, SECURITY_ERRORS);
+ return ret;
+ }
+ impl_->cred_init = true;
+
+ if (LOG_CHECK_LEVEL(LS_VERBOSE)) {
+ SecPkgCred_CipherStrengths cipher_strengths = { 0 };
+ ret = QueryCredentialsAttributes(&impl_->cred,
+ SECPKG_ATTR_CIPHER_STRENGTHS,
+ &cipher_strengths);
+ if (SUCCEEDED(ret)) {
+ LOG(LS_VERBOSE) << "SChannel cipher strength: "
+ << cipher_strengths.dwMinimumCipherStrength << " - "
+ << cipher_strengths.dwMaximumCipherStrength;
+ }
+
+ SecPkgCred_SupportedAlgs supported_algs = { 0 };
+ ret = QueryCredentialsAttributes(&impl_->cred,
+ SECPKG_ATTR_SUPPORTED_ALGS,
+ &supported_algs);
+ if (SUCCEEDED(ret)) {
+ LOG(LS_VERBOSE) << "SChannel supported algorithms:";
+ for (DWORD i=0; i<supported_algs.cSupportedAlgs; ++i) {
+ ALG_ID alg_id = supported_algs.palgSupportedAlgs[i];
+ PCCRYPT_OID_INFO oinfo = CryptFindOIDInfo(CRYPT_OID_INFO_ALGID_KEY,
+ &alg_id, 0);
+ LPCWSTR alg_name = (NULL != oinfo) ? oinfo->pwszName : L"Unknown";
+ LOG(LS_VERBOSE) << " " << ToUtf8(alg_name) << " (" << alg_id << ")";
+ }
+ CSecBufferBase::FreeSSPI(supported_algs.palgSupportedAlgs);
+ }
+ }
+
+ ULONG flags = SSL_FLAGS_DEFAULT, ret_flags = 0;
+ if (ignore_bad_cert())
+ flags |= ISC_REQ_MANUAL_CRED_VALIDATION;
+
+ CSecBufferBundle<2, CSecBufferBase::FreeSSPI> sb_out;
+ ret = InitializeSecurityContextA(&impl_->cred, NULL,
+ const_cast<char*>(ssl_host_name_.c_str()),
+ flags, 0, 0, NULL, 0,
+ &impl_->ctx, sb_out.desc(),
+ &ret_flags, NULL);
+ if (SUCCEEDED(ret))
+ impl_->ctx_init = true;
+ return ProcessContext(ret, NULL, sb_out.desc());
+}
+
+int
+SChannelAdapter::ContinueSSL() {
+ LOG(LS_VERBOSE) << "ContinueSSL";
+ ASSERT(state_ == SSL_CONNECTING);
+
+ SECURITY_STATUS ret;
+
+ CSecBufferBundle<2> sb_in;
+ sb_in[0].BufferType = SECBUFFER_TOKEN;
+ sb_in[0].cbBuffer = static_cast<unsigned long>(impl_->inbuf.size());
+ sb_in[0].pvBuffer = &impl_->inbuf[0];
+ //DescribeBuffers(LS_VERBOSE, "Input Buffer ", sb_in.desc());
+
+ ULONG flags = SSL_FLAGS_DEFAULT, ret_flags = 0;
+ if (ignore_bad_cert())
+ flags |= ISC_REQ_MANUAL_CRED_VALIDATION;
+
+ CSecBufferBundle<2, CSecBufferBase::FreeSSPI> sb_out;
+ ret = InitializeSecurityContextA(&impl_->cred, &impl_->ctx,
+ const_cast<char*>(ssl_host_name_.c_str()),
+ flags, 0, 0, sb_in.desc(), 0,
+ NULL, sb_out.desc(),
+ &ret_flags, NULL);
+ return ProcessContext(ret, sb_in.desc(), sb_out.desc());
+}
+
+int
+SChannelAdapter::ProcessContext(long int status, _SecBufferDesc* sbd_in,
+ _SecBufferDesc* sbd_out) {
+ if (status != SEC_E_OK && status != SEC_I_CONTINUE_NEEDED &&
+ status != SEC_E_INCOMPLETE_MESSAGE) {
+ LOG(LS_ERROR)
+ << "InitializeSecurityContext error: "
+ << ErrorName(status, SECURITY_ERRORS);
+ }
+ //if (sbd_in)
+ // DescribeBuffers(LS_VERBOSE, "Input Buffer ", sbd_in);
+ //if (sbd_out)
+ // DescribeBuffers(LS_VERBOSE, "Output Buffer ", sbd_out);
+
+ if (status == SEC_E_INCOMPLETE_MESSAGE) {
+ // Wait for more input from server.
+ return Flush();
+ }
+
+ if (FAILED(status)) {
+ // We can't continue. Common errors:
+ // SEC_E_CERT_EXPIRED - Typically, this means the computer clock is wrong.
+ return status;
+ }
+
+ // Note: we check both input and output buffers for SECBUFFER_EXTRA.
+ // Experience shows it appearing in the input, but the documentation claims
+ // it should appear in the output.
+ size_t extra = 0;
+ if (sbd_in) {
+ for (size_t i=0; i<sbd_in->cBuffers; ++i) {
+ SecBuffer& buffer = sbd_in->pBuffers[i];
+ if (buffer.BufferType == SECBUFFER_EXTRA) {
+ extra += buffer.cbBuffer;
+ }
+ }
+ }
+ if (sbd_out) {
+ for (size_t i=0; i<sbd_out->cBuffers; ++i) {
+ SecBuffer& buffer = sbd_out->pBuffers[i];
+ if (buffer.BufferType == SECBUFFER_EXTRA) {
+ extra += buffer.cbBuffer;
+ } else if (buffer.BufferType == SECBUFFER_TOKEN) {
+ impl_->outbuf.insert(impl_->outbuf.end(),
+ reinterpret_cast<char*>(buffer.pvBuffer),
+ reinterpret_cast<char*>(buffer.pvBuffer) + buffer.cbBuffer);
+ }
+ }
+ }
+
+ if (extra) {
+ ASSERT(extra <= impl_->inbuf.size());
+ size_t consumed = impl_->inbuf.size() - extra;
+ memmove(&impl_->inbuf[0], &impl_->inbuf[consumed], extra);
+ impl_->inbuf.resize(extra);
+ } else {
+ impl_->inbuf.clear();
+ }
+
+ if (SEC_I_CONTINUE_NEEDED == status) {
+ // Send data to server and wait for response.
+ // Note: ContinueSSL will result in a Flush, anyway.
+ return impl_->inbuf.empty() ? Flush() : ContinueSSL();
+ }
+
+ if (SEC_E_OK == status) {
+ LOG(LS_VERBOSE) << "QueryContextAttributes";
+ status = QueryContextAttributes(&impl_->ctx, SECPKG_ATTR_STREAM_SIZES,
+ &impl_->sizes);
+ if (FAILED(status)) {
+ LOG(LS_ERROR) << "QueryContextAttributes error: "
+ << ErrorName(status, SECURITY_ERRORS);
+ return status;
+ }
+
+ state_ = SSL_CONNECTED;
+
+ if (int err = DecryptData()) {
+ return err;
+ } else if (int err = Flush()) {
+ return err;
+ } else {
+ // If we decrypted any data, queue up a notification here
+ PostEvent();
+ // Signal our connectedness
+ AsyncSocketAdapter::OnConnectEvent(this);
+ }
+ return 0;
+ }
+
+ if (SEC_I_INCOMPLETE_CREDENTIALS == status) {
+ // We don't support client authentication in schannel.
+ return status;
+ }
+
+ // We don't expect any other codes
+ ASSERT(false);
+ return status;
+}
+
+int
+SChannelAdapter::DecryptData() {
+ SChannelBuffer& inbuf = impl_->inbuf;
+ SChannelBuffer& readable = impl_->readable;
+
+ while (!inbuf.empty()) {
+ CSecBufferBundle<4> in_buf;
+ in_buf[0].BufferType = SECBUFFER_DATA;
+ in_buf[0].cbBuffer = static_cast<unsigned long>(inbuf.size());
+ in_buf[0].pvBuffer = &inbuf[0];
+
+ //DescribeBuffers(LS_VERBOSE, "Decrypt In ", in_buf.desc());
+ SECURITY_STATUS status = DecryptMessage(&impl_->ctx, in_buf.desc(), 0, 0);
+ //DescribeBuffers(LS_VERBOSE, "Decrypt Out ", in_buf.desc());
+
+ // Note: We are explicitly treating SEC_E_OK, SEC_I_CONTEXT_EXPIRED, and
+ // any other successful results as continue.
+ if (SUCCEEDED(status)) {
+ size_t data_len = 0, extra_len = 0;
+ for (size_t i=0; i<in_buf.desc()->cBuffers; ++i) {
+ if (in_buf[i].BufferType == SECBUFFER_DATA) {
+ data_len += in_buf[i].cbBuffer;
+ readable.insert(readable.end(),
+ reinterpret_cast<char*>(in_buf[i].pvBuffer),
+ reinterpret_cast<char*>(in_buf[i].pvBuffer) + in_buf[i].cbBuffer);
+ } else if (in_buf[i].BufferType == SECBUFFER_EXTRA) {
+ extra_len += in_buf[i].cbBuffer;
+ }
+ }
+ // There is a bug on Win2K where SEC_I_CONTEXT_EXPIRED is misclassified.
+ if ((data_len == 0) && (inbuf[0] == 0x15)) {
+ status = SEC_I_CONTEXT_EXPIRED;
+ }
+ if (extra_len) {
+ size_t consumed = inbuf.size() - extra_len;
+ memmove(&inbuf[0], &inbuf[consumed], extra_len);
+ inbuf.resize(extra_len);
+ } else {
+ inbuf.clear();
+ }
+ // TODO: Handle SEC_I_CONTEXT_EXPIRED to do clean shutdown
+ if (status != SEC_E_OK) {
+ LOG(LS_INFO) << "DecryptMessage returned continuation code: "
+ << ErrorName(status, SECURITY_ERRORS);
+ }
+ continue;
+ }
+
+ if (status == SEC_E_INCOMPLETE_MESSAGE) {
+ break;
+ } else {
+ return status;
+ }
+ }
+
+ return 0;
+}
+
+void
+SChannelAdapter::Cleanup() {
+ if (impl_->ctx_init)
+ DeleteSecurityContext(&impl_->ctx);
+ if (impl_->cred_init)
+ FreeCredentialsHandle(&impl_->cred);
+ delete impl_;
+}
+
+void
+SChannelAdapter::PostEvent() {
+ // Check if there's anything notable to signal
+ if (impl_->readable.empty() && !signal_close_)
+ return;
+
+ // Only one post in the queue at a time
+ if (message_pending_)
+ return;
+
+ if (Thread* thread = Thread::Current()) {
+ message_pending_ = true;
+ thread->Post(this);
+ } else {
+ LOG(LS_ERROR) << "No thread context available for SChannelAdapter";
+ ASSERT(false);
+ }
+}
+
+void
+SChannelAdapter::Error(const char* context, int err, bool signal) {
+ LOG(LS_WARNING) << "SChannelAdapter::Error("
+ << context << ", "
+ << ErrorName(err, SECURITY_ERRORS) << ")";
+ state_ = SSL_ERROR;
+ SetError(err);
+ if (signal)
+ AsyncSocketAdapter::OnCloseEvent(this, err);
+}
+
+int
+SChannelAdapter::Read() {
+ char buffer[4096];
+ SChannelBuffer& inbuf = impl_->inbuf;
+ while (true) {
+ int ret = AsyncSocketAdapter::Recv(buffer, sizeof(buffer));
+ if (ret > 0) {
+ inbuf.insert(inbuf.end(), buffer, buffer + ret);
+ } else if (GetError() == EWOULDBLOCK) {
+ return 0; // Blocking
+ } else {
+ return GetError();
+ }
+ }
+}
+
+int
+SChannelAdapter::Flush() {
+ int result = 0;
+ size_t pos = 0;
+ SChannelBuffer& outbuf = impl_->outbuf;
+ while (pos < outbuf.size()) {
+ int sent = AsyncSocketAdapter::Send(&outbuf[pos], outbuf.size() - pos);
+ if (sent > 0) {
+ pos += sent;
+ } else if (GetError() == EWOULDBLOCK) {
+ break; // Blocking
+ } else {
+ result = GetError();
+ break;
+ }
+ }
+ if (int remainder = static_cast<int>(outbuf.size() - pos)) {
+ memmove(&outbuf[0], &outbuf[pos], remainder);
+ outbuf.resize(remainder);
+ } else {
+ outbuf.clear();
+ }
+ return result;
+}
+
+//
+// AsyncSocket Implementation
+//
+
+int
+SChannelAdapter::Send(const void* pv, size_t cb) {
+ switch (state_) {
+ case SSL_NONE:
+ return AsyncSocketAdapter::Send(pv, cb);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ default:
+ return SOCKET_ERROR;
+ }
+
+ size_t written = 0;
+ SChannelBuffer& outbuf = impl_->outbuf;
+ while (written < cb) {
+ const size_t encrypt_len = std::min<size_t>(cb - written,
+ impl_->sizes.cbMaximumMessage);
+
+ CSecBufferBundle<4> out_buf;
+ out_buf[0].BufferType = SECBUFFER_STREAM_HEADER;
+ out_buf[0].cbBuffer = impl_->sizes.cbHeader;
+ out_buf[1].BufferType = SECBUFFER_DATA;
+ out_buf[1].cbBuffer = static_cast<unsigned long>(encrypt_len);
+ out_buf[2].BufferType = SECBUFFER_STREAM_TRAILER;
+ out_buf[2].cbBuffer = impl_->sizes.cbTrailer;
+
+ size_t packet_len = out_buf[0].cbBuffer
+ + out_buf[1].cbBuffer
+ + out_buf[2].cbBuffer;
+
+ SChannelBuffer message;
+ message.resize(packet_len);
+ out_buf[0].pvBuffer = &message[0];
+ out_buf[1].pvBuffer = &message[out_buf[0].cbBuffer];
+ out_buf[2].pvBuffer = &message[out_buf[0].cbBuffer + out_buf[1].cbBuffer];
+
+ memcpy(out_buf[1].pvBuffer,
+ static_cast<const char*>(pv) + written,
+ encrypt_len);
+
+ //DescribeBuffers(LS_VERBOSE, "Encrypt In ", out_buf.desc());
+ SECURITY_STATUS res = EncryptMessage(&impl_->ctx, 0, out_buf.desc(), 0);
+ //DescribeBuffers(LS_VERBOSE, "Encrypt Out ", out_buf.desc());
+
+ if (FAILED(res)) {
+ Error("EncryptMessage", res, false);
+ return SOCKET_ERROR;
+ }
+
+ // We assume that the header and data segments do not change length,
+ // or else encrypting the concatenated packet in-place is wrong.
+ ASSERT(out_buf[0].cbBuffer == impl_->sizes.cbHeader);
+ ASSERT(out_buf[1].cbBuffer == static_cast<unsigned long>(encrypt_len));
+
+ // However, the length of the trailer may change due to padding.
+ ASSERT(out_buf[2].cbBuffer <= impl_->sizes.cbTrailer);
+
+ packet_len = out_buf[0].cbBuffer
+ + out_buf[1].cbBuffer
+ + out_buf[2].cbBuffer;
+
+ written += encrypt_len;
+ outbuf.insert(outbuf.end(), &message[0], &message[packet_len-1]+1);
+ }
+
+ if (int err = Flush()) {
+ state_ = SSL_ERROR;
+ SetError(err);
+ return SOCKET_ERROR;
+ }
+
+ return static_cast<int>(written);
+}
+
+int
+SChannelAdapter::Recv(void* pv, size_t cb) {
+ switch (state_) {
+ case SSL_NONE:
+ return AsyncSocketAdapter::Recv(pv, cb);
+
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+
+ case SSL_CONNECTED:
+ break;
+
+ case SSL_ERROR:
+ default:
+ return SOCKET_ERROR;
+ }
+
+ SChannelBuffer& readable = impl_->readable;
+ if (readable.empty()) {
+ SetError(EWOULDBLOCK);
+ return SOCKET_ERROR;
+ }
+ size_t read = _min(cb, readable.size());
+ memcpy(pv, &readable[0], read);
+ if (size_t remaining = readable.size() - read) {
+ memmove(&readable[0], &readable[read], remaining);
+ readable.resize(remaining);
+ } else {
+ readable.clear();
+ }
+
+ PostEvent();
+ return static_cast<int>(read);
+}
+
+int
+SChannelAdapter::Close() {
+ if (!impl_->readable.empty()) {
+ LOG(WARNING) << "SChannelAdapter::Close with readable data";
+ // Note: this isn't strictly an error, but we're using it temporarily to
+ // track bugs.
+ //ASSERT(false);
+ }
+ if (state_ == SSL_CONNECTED) {
+ DWORD token = SCHANNEL_SHUTDOWN;
+ CSecBufferBundle<1> sb_in;
+ sb_in[0].BufferType = SECBUFFER_TOKEN;
+ sb_in[0].cbBuffer = sizeof(token);
+ sb_in[0].pvBuffer = &token;
+ ApplyControlToken(&impl_->ctx, sb_in.desc());
+ // TODO: In theory, to do a nice shutdown, we need to begin shutdown
+ // negotiation with more calls to InitializeSecurityContext. Since the
+ // socket api doesn't support nice shutdown at this point, we don't bother.
+ }
+ Cleanup();
+ impl_ = new SSLImpl;
+ state_ = restartable_ ? SSL_WAIT : SSL_NONE;
+ signal_close_ = false;
+ message_pending_ = false;
+ return AsyncSocketAdapter::Close();
+}
+
+Socket::ConnState
+SChannelAdapter::GetState() const {
+ if (signal_close_)
+ return CS_CONNECTED;
+ ConnState state = socket_->GetState();
+ if ((state == CS_CONNECTED)
+ && ((state_ == SSL_WAIT) || (state_ == SSL_CONNECTING)))
+ state = CS_CONNECTING;
+ return state;
+}
+
+void
+SChannelAdapter::OnConnectEvent(AsyncSocket* socket) {
+ LOG(LS_VERBOSE) << "SChannelAdapter::OnConnectEvent";
+ if (state_ != SSL_WAIT) {
+ ASSERT(state_ == SSL_NONE);
+ AsyncSocketAdapter::OnConnectEvent(socket);
+ return;
+ }
+
+ state_ = SSL_CONNECTING;
+ if (int err = BeginSSL()) {
+ Error("BeginSSL", err);
+ }
+}
+
+void
+SChannelAdapter::OnReadEvent(AsyncSocket* socket) {
+ if (state_ == SSL_NONE) {
+ AsyncSocketAdapter::OnReadEvent(socket);
+ return;
+ }
+
+ if (int err = Read()) {
+ Error("Read", err);
+ return;
+ }
+
+ if (impl_->inbuf.empty())
+ return;
+
+ if (state_ == SSL_CONNECTED) {
+ if (int err = DecryptData()) {
+ Error("DecryptData", err);
+ } else if (!impl_->readable.empty()) {
+ AsyncSocketAdapter::OnReadEvent(this);
+ }
+ } else if (state_ == SSL_CONNECTING) {
+ if (int err = ContinueSSL()) {
+ Error("ContinueSSL", err);
+ }
+ }
+}
+
+void
+SChannelAdapter::OnWriteEvent(AsyncSocket* socket) {
+ if (state_ == SSL_NONE) {
+ AsyncSocketAdapter::OnWriteEvent(socket);
+ return;
+ }
+
+ if (int err = Flush()) {
+ Error("Flush", err);
+ return;
+ }
+
+ // See if we have more data to write
+ if (!impl_->outbuf.empty())
+ return;
+
+ // Buffer is empty, submit notification
+ if (state_ == SSL_CONNECTED) {
+ AsyncSocketAdapter::OnWriteEvent(socket);
+ }
+}
+
+void
+SChannelAdapter::OnCloseEvent(AsyncSocket* socket, int err) {
+ if ((state_ == SSL_NONE) || impl_->readable.empty()) {
+ AsyncSocketAdapter::OnCloseEvent(socket, err);
+ return;
+ }
+
+ // If readable is non-empty, then we have a pending Message
+ // that will allow us to signal close (eventually).
+ signal_close_ = true;
+}
+
+void
+SChannelAdapter::OnMessage(Message* pmsg) {
+ if (!message_pending_)
+ return; // This occurs when socket is closed
+
+ message_pending_ = false;
+ if (!impl_->readable.empty()) {
+ AsyncSocketAdapter::OnReadEvent(this);
+ } else if (signal_close_) {
+ signal_close_ = false;
+ AsyncSocketAdapter::OnCloseEvent(this, 0); // TODO: cache this error?
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/schanneladapter.h b/chromium/third_party/webrtc/base/schanneladapter.h
new file mode 100644
index 00000000000..d174b593f8b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/schanneladapter.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SCHANNELADAPTER_H__
+#define WEBRTC_BASE_SCHANNELADAPTER_H__
+
+#include <string>
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/messagequeue.h"
+struct _SecBufferDesc;
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+class SChannelAdapter : public SSLAdapter, public MessageHandler {
+public:
+ SChannelAdapter(AsyncSocket* socket);
+ virtual ~SChannelAdapter();
+
+ virtual int StartSSL(const char* hostname, bool restartable);
+ virtual int Send(const void* pv, size_t cb);
+ virtual int Recv(void* pv, size_t cb);
+ virtual int Close();
+
+ // Note that the socket returns ST_CONNECTING while SSL is being negotiated.
+ virtual ConnState GetState() const;
+
+protected:
+ enum SSLState {
+ SSL_NONE, SSL_WAIT, SSL_CONNECTING, SSL_CONNECTED, SSL_ERROR
+ };
+ struct SSLImpl;
+
+ virtual void OnConnectEvent(AsyncSocket* socket);
+ virtual void OnReadEvent(AsyncSocket* socket);
+ virtual void OnWriteEvent(AsyncSocket* socket);
+ virtual void OnCloseEvent(AsyncSocket* socket, int err);
+ virtual void OnMessage(Message* pmsg);
+
+ int BeginSSL();
+ int ContinueSSL();
+ int ProcessContext(long int status, _SecBufferDesc* sbd_in,
+ _SecBufferDesc* sbd_out);
+ int DecryptData();
+
+ int Read();
+ int Flush();
+ void Error(const char* context, int err, bool signal = true);
+ void Cleanup();
+
+ void PostEvent();
+
+private:
+ SSLState state_;
+ std::string ssl_host_name_;
+ // If true, socket will retain SSL configuration after Close.
+ bool restartable_;
+ // If true, we are delaying signalling close until all data is read.
+ bool signal_close_;
+ // If true, we are waiting to be woken up to signal readability or closure.
+ bool message_pending_;
+ SSLImpl* impl_;
+};
+
+/////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SCHANNELADAPTER_H__
diff --git a/chromium/third_party/webrtc/base/scoped_autorelease_pool.h b/chromium/third_party/webrtc/base/scoped_autorelease_pool.h
new file mode 100644
index 00000000000..d9cc3cb3625
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scoped_autorelease_pool.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Automatically initialize and and free an autoreleasepool. Never allocate
+// an instance of this class using "new" - that will result in a compile-time
+// error. Only use it as a stack object.
+//
+// Note: NSAutoreleasePool docs say that you should not normally need to
+// declare an NSAutoreleasePool as a member of an object - but there's nothing
+// that indicates it will be a problem, as long as the stack lifetime of the
+// pool exactly matches the stack lifetime of the object.
+
+#ifndef WEBRTC_BASE_SCOPED_AUTORELEASE_POOL_H__
+#define WEBRTC_BASE_SCOPED_AUTORELEASE_POOL_H__
+
+#if defined(WEBRTC_MAC)
+
+#include "webrtc/base/common.h"
+
+// This header may be included from Obj-C files or C++ files.
+#ifdef __OBJC__
+@class NSAutoreleasePool;
+#else
+class NSAutoreleasePool;
+#endif
+
+namespace rtc {
+
+class ScopedAutoreleasePool {
+ public:
+ ScopedAutoreleasePool();
+ ~ScopedAutoreleasePool();
+
+ private:
+ // Declaring private overrides of new and delete here enforces the "only use
+ // as a stack object" discipline.
+ //
+ // Note: new is declared as "throw()" to get around a gcc warning about new
+ // returning NULL, but this method will never get called and therefore will
+ // never actually throw any exception.
+ void* operator new(size_t size) throw() { return NULL; }
+ void operator delete (void* ptr) {}
+
+ NSAutoreleasePool* pool_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ScopedAutoreleasePool);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_MAC
+#endif // WEBRTC_BASE_SCOPED_AUTORELEASE_POOL_H__
diff --git a/chromium/third_party/webrtc/base/scoped_autorelease_pool.mm b/chromium/third_party/webrtc/base/scoped_autorelease_pool.mm
new file mode 100644
index 00000000000..4176aad0e9c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scoped_autorelease_pool.mm
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "webrtc/base/scoped_autorelease_pool.h"
+
+namespace rtc {
+
+ScopedAutoreleasePool::ScopedAutoreleasePool() {
+ pool_ = [[NSAutoreleasePool alloc] init];
+}
+
+ScopedAutoreleasePool::~ScopedAutoreleasePool() {
+ [pool_ drain];
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/scoped_ptr.h b/chromium/third_party/webrtc/base/scoped_ptr.h
new file mode 100644
index 00000000000..0c0a637a2e7
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scoped_ptr.h
@@ -0,0 +1,595 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Scopers help you manage ownership of a pointer, helping you easily manage the
+// a pointer within a scope, and automatically destroying the pointer at the
+// end of a scope. There are two main classes you will use, which correspond
+// to the operators new/delete and new[]/delete[].
+//
+// Example usage (scoped_ptr<T>):
+// {
+// scoped_ptr<Foo> foo(new Foo("wee"));
+// } // foo goes out of scope, releasing the pointer with it.
+//
+// {
+// scoped_ptr<Foo> foo; // No pointer managed.
+// foo.reset(new Foo("wee")); // Now a pointer is managed.
+// foo.reset(new Foo("wee2")); // Foo("wee") was destroyed.
+// foo.reset(new Foo("wee3")); // Foo("wee2") was destroyed.
+// foo->Method(); // Foo::Method() called.
+// foo.get()->Method(); // Foo::Method() called.
+// SomeFunc(foo.release()); // SomeFunc takes ownership, foo no longer
+// // manages a pointer.
+// foo.reset(new Foo("wee4")); // foo manages a pointer again.
+// foo.reset(); // Foo("wee4") destroyed, foo no longer
+// // manages a pointer.
+// } // foo wasn't managing a pointer, so nothing was destroyed.
+//
+// Example usage (scoped_ptr<T[]>):
+// {
+// scoped_ptr<Foo[]> foo(new Foo[100]);
+// foo.get()->Method(); // Foo::Method on the 0th element.
+// foo[10].Method(); // Foo::Method on the 10th element.
+// }
+//
+// These scopers also implement part of the functionality of C++11 unique_ptr
+// in that they are "movable but not copyable." You can use the scopers in
+// the parameter and return types of functions to signify ownership transfer
+// in to and out of a function. When calling a function that has a scoper
+// as the argument type, it must be called with the result of an analogous
+// scoper's Pass() function or another function that generates a temporary;
+// passing by copy will NOT work. Here is an example using scoped_ptr:
+//
+// void TakesOwnership(scoped_ptr<Foo> arg) {
+// // Do something with arg
+// }
+// scoped_ptr<Foo> CreateFoo() {
+// // No need for calling Pass() because we are constructing a temporary
+// // for the return value.
+// return scoped_ptr<Foo>(new Foo("new"));
+// }
+// scoped_ptr<Foo> PassThru(scoped_ptr<Foo> arg) {
+// return arg.Pass();
+// }
+//
+// {
+// scoped_ptr<Foo> ptr(new Foo("yay")); // ptr manages Foo("yay").
+// TakesOwnership(ptr.Pass()); // ptr no longer owns Foo("yay").
+// scoped_ptr<Foo> ptr2 = CreateFoo(); // ptr2 owns the return Foo.
+// scoped_ptr<Foo> ptr3 = // ptr3 now owns what was in ptr2.
+// PassThru(ptr2.Pass()); // ptr2 is correspondingly NULL.
+// }
+//
+// Notice that if you do not call Pass() when returning from PassThru(), or
+// when invoking TakesOwnership(), the code will not compile because scopers
+// are not copyable; they only implement move semantics which require calling
+// the Pass() function to signify a destructive transfer of state. CreateFoo()
+// is different though because we are constructing a temporary on the return
+// line and thus can avoid needing to call Pass().
+//
+// Pass() properly handles upcast in initialization, i.e. you can use a
+// scoped_ptr<Child> to initialize a scoped_ptr<Parent>:
+//
+// scoped_ptr<Foo> foo(new Foo());
+// scoped_ptr<FooParent> parent(foo.Pass());
+//
+// PassAs<>() should be used to upcast return value in return statement:
+//
+// scoped_ptr<Foo> CreateFoo() {
+// scoped_ptr<FooChild> result(new FooChild());
+// return result.PassAs<Foo>();
+// }
+//
+// Note that PassAs<>() is implemented only for scoped_ptr<T>, but not for
+// scoped_ptr<T[]>. This is because casting array pointers may not be safe.
+
+#ifndef WEBRTC_BASE_SCOPED_PTR_H__
+#define WEBRTC_BASE_SCOPED_PTR_H__
+
+#include <stddef.h> // for ptrdiff_t
+#include <stdlib.h> // for free() decl
+
+#include <algorithm> // For std::swap().
+
+#include "webrtc/base/common.h" // for ASSERT
+#include "webrtc/base/compile_assert.h" // for COMPILE_ASSERT
+#include "webrtc/base/move.h" // for TALK_MOVE_ONLY_TYPE_FOR_CPP_03
+#include "webrtc/base/template_util.h" // for is_convertible, is_array
+
+#ifdef WEBRTC_WIN
+namespace std { using ::ptrdiff_t; };
+#endif // WEBRTC_WIN
+
+namespace rtc {
+
+// Function object which deletes its parameter, which must be a pointer.
+// If C is an array type, invokes 'delete[]' on the parameter; otherwise,
+// invokes 'delete'. The default deleter for scoped_ptr<T>.
+template <class T>
+struct DefaultDeleter {
+ DefaultDeleter() {}
+ template <typename U> DefaultDeleter(const DefaultDeleter<U>& other) {
+ // IMPLEMENTATION NOTE: C++11 20.7.1.1.2p2 only provides this constructor
+ // if U* is implicitly convertible to T* and U is not an array type.
+ //
+ // Correct implementation should use SFINAE to disable this
+ // constructor. However, since there are no other 1-argument constructors,
+ // using a COMPILE_ASSERT() based on is_convertible<> and requiring
+ // complete types is simpler and will cause compile failures for equivalent
+ // misuses.
+ //
+ // Note, the is_convertible<U*, T*> check also ensures that U is not an
+ // array. T is guaranteed to be a non-array, so any U* where U is an array
+ // cannot convert to T*.
+ enum { T_must_be_complete = sizeof(T) };
+ enum { U_must_be_complete = sizeof(U) };
+ COMPILE_ASSERT((rtc::is_convertible<U*, T*>::value),
+ U_ptr_must_implicitly_convert_to_T_ptr);
+ }
+ inline void operator()(T* ptr) const {
+ enum { type_must_be_complete = sizeof(T) };
+ delete ptr;
+ }
+};
+
+// Specialization of DefaultDeleter for array types.
+template <class T>
+struct DefaultDeleter<T[]> {
+ inline void operator()(T* ptr) const {
+ enum { type_must_be_complete = sizeof(T) };
+ delete[] ptr;
+ }
+
+ private:
+ // Disable this operator for any U != T because it is undefined to execute
+ // an array delete when the static type of the array mismatches the dynamic
+ // type.
+ //
+ // References:
+ // C++98 [expr.delete]p3
+ // http://cplusplus.github.com/LWG/lwg-defects.html#938
+ template <typename U> void operator()(U* array) const;
+};
+
+template <class T, int n>
+struct DefaultDeleter<T[n]> {
+ // Never allow someone to declare something like scoped_ptr<int[10]>.
+ COMPILE_ASSERT(sizeof(T) == -1, do_not_use_array_with_size_as_type);
+};
+
+// Function object which invokes 'free' on its parameter, which must be
+// a pointer. Can be used to store malloc-allocated pointers in scoped_ptr:
+//
+// scoped_ptr<int, rtc::FreeDeleter> foo_ptr(
+// static_cast<int*>(malloc(sizeof(int))));
+struct FreeDeleter {
+ inline void operator()(void* ptr) const {
+ free(ptr);
+ }
+};
+
+namespace internal {
+
+// Minimal implementation of the core logic of scoped_ptr, suitable for
+// reuse in both scoped_ptr and its specializations.
+template <class T, class D>
+class scoped_ptr_impl {
+ public:
+ explicit scoped_ptr_impl(T* p) : data_(p) { }
+
+ // Initializer for deleters that have data parameters.
+ scoped_ptr_impl(T* p, const D& d) : data_(p, d) {}
+
+ // Templated constructor that destructively takes the value from another
+ // scoped_ptr_impl.
+ template <typename U, typename V>
+ scoped_ptr_impl(scoped_ptr_impl<U, V>* other)
+ : data_(other->release(), other->get_deleter()) {
+ // We do not support move-only deleters. We could modify our move
+ // emulation to have rtc::subtle::move() and
+ // rtc::subtle::forward()
+ // functions that are imperfect emulations of their C++11 equivalents,
+ // but until there's a requirement, just assume deleters are copyable.
+ }
+
+ template <typename U, typename V>
+ void TakeState(scoped_ptr_impl<U, V>* other) {
+ // See comment in templated constructor above regarding lack of support
+ // for move-only deleters.
+ reset(other->release());
+ get_deleter() = other->get_deleter();
+ }
+
+ ~scoped_ptr_impl() {
+ if (data_.ptr != NULL) {
+ // Not using get_deleter() saves one function call in non-optimized
+ // builds.
+ static_cast<D&>(data_)(data_.ptr);
+ }
+ }
+
+ void reset(T* p) {
+ // This is a self-reset, which is no longer allowed: http://crbug.com/162971
+ if (p != NULL && p == data_.ptr)
+ abort();
+
+ // Note that running data_.ptr = p can lead to undefined behavior if
+ // get_deleter()(get()) deletes this. In order to pevent this, reset()
+ // should update the stored pointer before deleting its old value.
+ //
+ // However, changing reset() to use that behavior may cause current code to
+ // break in unexpected ways. If the destruction of the owned object
+ // dereferences the scoped_ptr when it is destroyed by a call to reset(),
+ // then it will incorrectly dispatch calls to |p| rather than the original
+ // value of |data_.ptr|.
+ //
+ // During the transition period, set the stored pointer to NULL while
+ // deleting the object. Eventually, this safety check will be removed to
+ // prevent the scenario initially described from occuring and
+ // http://crbug.com/176091 can be closed.
+ T* old = data_.ptr;
+ data_.ptr = NULL;
+ if (old != NULL)
+ static_cast<D&>(data_)(old);
+ data_.ptr = p;
+ }
+
+ T* get() const { return data_.ptr; }
+
+ D& get_deleter() { return data_; }
+ const D& get_deleter() const { return data_; }
+
+ void swap(scoped_ptr_impl& p2) {
+ // Standard swap idiom: 'using std::swap' ensures that std::swap is
+ // present in the overload set, but we call swap unqualified so that
+ // any more-specific overloads can be used, if available.
+ using std::swap;
+ swap(static_cast<D&>(data_), static_cast<D&>(p2.data_));
+ swap(data_.ptr, p2.data_.ptr);
+ }
+
+ T* release() {
+ T* old_ptr = data_.ptr;
+ data_.ptr = NULL;
+ return old_ptr;
+ }
+
+ T** accept() {
+ reset(NULL);
+ return &(data_.ptr);
+ }
+
+ T** use() {
+ return &(data_.ptr);
+ }
+
+ private:
+ // Needed to allow type-converting constructor.
+ template <typename U, typename V> friend class scoped_ptr_impl;
+
+ // Use the empty base class optimization to allow us to have a D
+ // member, while avoiding any space overhead for it when D is an
+ // empty class. See e.g. http://www.cantrip.org/emptyopt.html for a good
+ // discussion of this technique.
+ struct Data : public D {
+ explicit Data(T* ptr_in) : ptr(ptr_in) {}
+ Data(T* ptr_in, const D& other) : D(other), ptr(ptr_in) {}
+ T* ptr;
+ };
+
+ Data data_;
+
+ DISALLOW_COPY_AND_ASSIGN(scoped_ptr_impl);
+};
+
+} // namespace internal
+
+// A scoped_ptr<T> is like a T*, except that the destructor of scoped_ptr<T>
+// automatically deletes the pointer it holds (if any).
+// That is, scoped_ptr<T> owns the T object that it points to.
+// Like a T*, a scoped_ptr<T> may hold either NULL or a pointer to a T object.
+// Also like T*, scoped_ptr<T> is thread-compatible, and once you
+// dereference it, you get the thread safety guarantees of T.
+//
+// The size of scoped_ptr is small. On most compilers, when using the
+// DefaultDeleter, sizeof(scoped_ptr<T>) == sizeof(T*). Custom deleters will
+// increase the size proportional to whatever state they need to have. See
+// comments inside scoped_ptr_impl<> for details.
+//
+// Current implementation targets having a strict subset of C++11's
+// unique_ptr<> features. Known deficiencies include not supporting move-only
+// deleteres, function pointers as deleters, and deleters with reference
+// types.
+template <class T, class D = rtc::DefaultDeleter<T> >
+class scoped_ptr {
+ TALK_MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue)
+
+ public:
+ // The element and deleter types.
+ typedef T element_type;
+ typedef D deleter_type;
+
+ // Constructor. Defaults to initializing with NULL.
+ scoped_ptr() : impl_(NULL) { }
+
+ // Constructor. Takes ownership of p.
+ explicit scoped_ptr(element_type* p) : impl_(p) { }
+
+ // Constructor. Allows initialization of a stateful deleter.
+ scoped_ptr(element_type* p, const D& d) : impl_(p, d) { }
+
+ // Constructor. Allows construction from a scoped_ptr rvalue for a
+ // convertible type and deleter.
+ //
+ // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this constructor distinct
+ // from the normal move constructor. By C++11 20.7.1.2.1.21, this constructor
+ // has different post-conditions if D is a reference type. Since this
+ // implementation does not support deleters with reference type,
+ // we do not need a separate move constructor allowing us to avoid one
+ // use of SFINAE. You only need to care about this if you modify the
+ // implementation of scoped_ptr.
+ template <typename U, typename V>
+ scoped_ptr(scoped_ptr<U, V> other) : impl_(&other.impl_) {
+ COMPILE_ASSERT(!rtc::is_array<U>::value, U_cannot_be_an_array);
+ }
+
+ // Constructor. Move constructor for C++03 move emulation of this type.
+ scoped_ptr(RValue rvalue) : impl_(&rvalue.object->impl_) { }
+
+ // operator=. Allows assignment from a scoped_ptr rvalue for a convertible
+ // type and deleter.
+ //
+ // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this operator= distinct from
+ // the normal move assignment operator. By C++11 20.7.1.2.3.4, this templated
+ // form has different requirements on for move-only Deleters. Since this
+ // implementation does not support move-only Deleters, we do not need a
+ // separate move assignment operator allowing us to avoid one use of SFINAE.
+ // You only need to care about this if you modify the implementation of
+ // scoped_ptr.
+ template <typename U, typename V>
+ scoped_ptr& operator=(scoped_ptr<U, V> rhs) {
+ COMPILE_ASSERT(!rtc::is_array<U>::value, U_cannot_be_an_array);
+ impl_.TakeState(&rhs.impl_);
+ return *this;
+ }
+
+ // Reset. Deletes the currently owned object, if any.
+ // Then takes ownership of a new object, if given.
+ void reset(element_type* p = NULL) { impl_.reset(p); }
+
+ // Accessors to get the owned object.
+ // operator* and operator-> will assert() if there is no current object.
+ element_type& operator*() const {
+ ASSERT(impl_.get() != NULL);
+ return *impl_.get();
+ }
+ element_type* operator->() const {
+ ASSERT(impl_.get() != NULL);
+ return impl_.get();
+ }
+ element_type* get() const { return impl_.get(); }
+
+ // Access to the deleter.
+ deleter_type& get_deleter() { return impl_.get_deleter(); }
+ const deleter_type& get_deleter() const { return impl_.get_deleter(); }
+
+ // Allow scoped_ptr<element_type> to be used in boolean expressions, but not
+ // implicitly convertible to a real bool (which is dangerous).
+ //
+ // Note that this trick is only safe when the == and != operators
+ // are declared explicitly, as otherwise "scoped_ptr1 ==
+ // scoped_ptr2" will compile but do the wrong thing (i.e., convert
+ // to Testable and then do the comparison).
+ private:
+ typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
+ scoped_ptr::*Testable;
+
+ public:
+ operator Testable() const { return impl_.get() ? &scoped_ptr::impl_ : NULL; }
+
+ // Comparison operators.
+ // These return whether two scoped_ptr refer to the same object, not just to
+ // two different but equal objects.
+ bool operator==(const element_type* p) const { return impl_.get() == p; }
+ bool operator!=(const element_type* p) const { return impl_.get() != p; }
+
+ // Swap two scoped pointers.
+ void swap(scoped_ptr& p2) {
+ impl_.swap(p2.impl_);
+ }
+
+ // Release a pointer.
+ // The return value is the current pointer held by this object.
+ // If this object holds a NULL pointer, the return value is NULL.
+ // After this operation, this object will hold a NULL pointer,
+ // and will not own the object any more.
+ element_type* release() WARN_UNUSED_RESULT {
+ return impl_.release();
+ }
+
+ // Delete the currently held pointer and return a pointer
+ // to allow overwriting of the current pointer address.
+ element_type** accept() WARN_UNUSED_RESULT {
+ return impl_.accept();
+ }
+
+ // Return a pointer to the current pointer address.
+ element_type** use() WARN_UNUSED_RESULT {
+ return impl_.use();
+ }
+
+ // C++98 doesn't support functions templates with default parameters which
+ // makes it hard to write a PassAs() that understands converting the deleter
+ // while preserving simple calling semantics.
+ //
+ // Until there is a use case for PassAs() with custom deleters, just ignore
+ // the custom deleter.
+ template <typename PassAsType>
+ scoped_ptr<PassAsType> PassAs() {
+ return scoped_ptr<PassAsType>(Pass());
+ }
+
+ private:
+ // Needed to reach into |impl_| in the constructor.
+ template <typename U, typename V> friend class scoped_ptr;
+ rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
+
+ // Forbidden for API compatibility with std::unique_ptr.
+ explicit scoped_ptr(int disallow_construction_from_null);
+
+ // Forbid comparison of scoped_ptr types. If U != T, it totally
+ // doesn't make sense, and if U == T, it still doesn't make sense
+ // because you should never have the same object owned by two different
+ // scoped_ptrs.
+ template <class U> bool operator==(scoped_ptr<U> const& p2) const;
+ template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
+};
+
+template <class T, class D>
+class scoped_ptr<T[], D> {
+ TALK_MOVE_ONLY_TYPE_FOR_CPP_03(scoped_ptr, RValue)
+
+ public:
+ // The element and deleter types.
+ typedef T element_type;
+ typedef D deleter_type;
+
+ // Constructor. Defaults to initializing with NULL.
+ scoped_ptr() : impl_(NULL) { }
+
+ // Constructor. Stores the given array. Note that the argument's type
+ // must exactly match T*. In particular:
+ // - it cannot be a pointer to a type derived from T, because it is
+ // inherently unsafe in the general case to access an array through a
+ // pointer whose dynamic type does not match its static type (eg., if
+ // T and the derived types had different sizes access would be
+ // incorrectly calculated). Deletion is also always undefined
+ // (C++98 [expr.delete]p3). If you're doing this, fix your code.
+ // - it cannot be NULL, because NULL is an integral expression, not a
+ // pointer to T. Use the no-argument version instead of explicitly
+ // passing NULL.
+ // - it cannot be const-qualified differently from T per unique_ptr spec
+ // (http://cplusplus.github.com/LWG/lwg-active.html#2118). Users wanting
+ // to work around this may use implicit_cast<const T*>().
+ // However, because of the first bullet in this comment, users MUST
+ // NOT use implicit_cast<Base*>() to upcast the static type of the array.
+ explicit scoped_ptr(element_type* array) : impl_(array) { }
+
+ // Constructor. Move constructor for C++03 move emulation of this type.
+ scoped_ptr(RValue rvalue) : impl_(&rvalue.object->impl_) { }
+
+ // operator=. Move operator= for C++03 move emulation of this type.
+ scoped_ptr& operator=(RValue rhs) {
+ impl_.TakeState(&rhs.object->impl_);
+ return *this;
+ }
+
+ // Reset. Deletes the currently owned array, if any.
+ // Then takes ownership of a new object, if given.
+ void reset(element_type* array = NULL) { impl_.reset(array); }
+
+ // Accessors to get the owned array.
+ element_type& operator[](size_t i) const {
+ ASSERT(impl_.get() != NULL);
+ return impl_.get()[i];
+ }
+ element_type* get() const { return impl_.get(); }
+
+ // Access to the deleter.
+ deleter_type& get_deleter() { return impl_.get_deleter(); }
+ const deleter_type& get_deleter() const { return impl_.get_deleter(); }
+
+ // Allow scoped_ptr<element_type> to be used in boolean expressions, but not
+ // implicitly convertible to a real bool (which is dangerous).
+ private:
+ typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
+ scoped_ptr::*Testable;
+
+ public:
+ operator Testable() const { return impl_.get() ? &scoped_ptr::impl_ : NULL; }
+
+ // Comparison operators.
+ // These return whether two scoped_ptr refer to the same object, not just to
+ // two different but equal objects.
+ bool operator==(element_type* array) const { return impl_.get() == array; }
+ bool operator!=(element_type* array) const { return impl_.get() != array; }
+
+ // Swap two scoped pointers.
+ void swap(scoped_ptr& p2) {
+ impl_.swap(p2.impl_);
+ }
+
+ // Release a pointer.
+ // The return value is the current pointer held by this object.
+ // If this object holds a NULL pointer, the return value is NULL.
+ // After this operation, this object will hold a NULL pointer,
+ // and will not own the object any more.
+ element_type* release() WARN_UNUSED_RESULT {
+ return impl_.release();
+ }
+
+ // Delete the currently held pointer and return a pointer
+ // to allow overwriting of the current pointer address.
+ element_type** accept() WARN_UNUSED_RESULT {
+ return impl_.accept();
+ }
+
+ // Return a pointer to the current pointer address.
+ element_type** use() WARN_UNUSED_RESULT {
+ return impl_.use();
+ }
+
+ private:
+ // Force element_type to be a complete type.
+ enum { type_must_be_complete = sizeof(element_type) };
+
+ // Actually hold the data.
+ rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
+
+ // Disable initialization from any type other than element_type*, by
+ // providing a constructor that matches such an initialization, but is
+ // private and has no definition. This is disabled because it is not safe to
+ // call delete[] on an array whose static type does not match its dynamic
+ // type.
+ template <typename U> explicit scoped_ptr(U* array);
+ explicit scoped_ptr(int disallow_construction_from_null);
+
+ // Disable reset() from any type other than element_type*, for the same
+ // reasons as the constructor above.
+ template <typename U> void reset(U* array);
+ void reset(int disallow_reset_from_null);
+
+ // Forbid comparison of scoped_ptr types. If U != T, it totally
+ // doesn't make sense, and if U == T, it still doesn't make sense
+ // because you should never have the same object owned by two different
+ // scoped_ptrs.
+ template <class U> bool operator==(scoped_ptr<U> const& p2) const;
+ template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
+};
+
+} // namespace rtc
+
+// Free functions
+template <class T, class D>
+void swap(rtc::scoped_ptr<T, D>& p1, rtc::scoped_ptr<T, D>& p2) {
+ p1.swap(p2);
+}
+
+template <class T, class D>
+bool operator==(T* p1, const rtc::scoped_ptr<T, D>& p2) {
+ return p1 == p2.get();
+}
+
+template <class T, class D>
+bool operator!=(T* p1, const rtc::scoped_ptr<T, D>& p2) {
+ return p1 != p2.get();
+}
+
+#endif // #ifndef WEBRTC_BASE_SCOPED_PTR_H__
diff --git a/chromium/third_party/webrtc/base/scoped_ref_ptr.h b/chromium/third_party/webrtc/base/scoped_ref_ptr.h
new file mode 100644
index 00000000000..a71c20ae324
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scoped_ref_ptr.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Originally these classes are from Chromium.
+// http://src.chromium.org/viewvc/chrome/trunk/src/base/memory/ref_counted.h?view=markup
+
+//
+// A smart pointer class for reference counted objects. Use this class instead
+// of calling AddRef and Release manually on a reference counted object to
+// avoid common memory leaks caused by forgetting to Release an object
+// reference. Sample usage:
+//
+// class MyFoo : public RefCounted<MyFoo> {
+// ...
+// };
+//
+// void some_function() {
+// scoped_refptr<MyFoo> foo = new MyFoo();
+// foo->Method(param);
+// // |foo| is released when this function returns
+// }
+//
+// void some_other_function() {
+// scoped_refptr<MyFoo> foo = new MyFoo();
+// ...
+// foo = NULL; // explicitly releases |foo|
+// ...
+// if (foo)
+// foo->Method(param);
+// }
+//
+// The above examples show how scoped_refptr<T> acts like a pointer to T.
+// Given two scoped_refptr<T> classes, it is also possible to exchange
+// references between the two objects, like so:
+//
+// {
+// scoped_refptr<MyFoo> a = new MyFoo();
+// scoped_refptr<MyFoo> b;
+//
+// b.swap(a);
+// // now, |b| references the MyFoo object, and |a| references NULL.
+// }
+//
+// To make both |a| and |b| in the above example reference the same MyFoo
+// object, simply use the assignment operator:
+//
+// {
+// scoped_refptr<MyFoo> a = new MyFoo();
+// scoped_refptr<MyFoo> b;
+//
+// b = a;
+// // now, |a| and |b| each own a reference to the same MyFoo object.
+// }
+//
+
+#ifndef WEBRTC_BASE_SCOPED_REF_PTR_H_
+#define WEBRTC_BASE_SCOPED_REF_PTR_H_
+
+#include <stddef.h>
+
+namespace rtc {
+
+template <class T>
+class scoped_refptr {
+ public:
+ scoped_refptr() : ptr_(NULL) {
+ }
+
+ scoped_refptr(T* p) : ptr_(p) {
+ if (ptr_)
+ ptr_->AddRef();
+ }
+
+ scoped_refptr(const scoped_refptr<T>& r) : ptr_(r.ptr_) {
+ if (ptr_)
+ ptr_->AddRef();
+ }
+
+ template <typename U>
+ scoped_refptr(const scoped_refptr<U>& r) : ptr_(r.get()) {
+ if (ptr_)
+ ptr_->AddRef();
+ }
+
+ ~scoped_refptr() {
+ if (ptr_)
+ ptr_->Release();
+ }
+
+ T* get() const { return ptr_; }
+ operator T*() const { return ptr_; }
+ T* operator->() const { return ptr_; }
+
+ // Release a pointer.
+ // The return value is the current pointer held by this object.
+ // If this object holds a NULL pointer, the return value is NULL.
+ // After this operation, this object will hold a NULL pointer,
+ // and will not own the object any more.
+ T* release() {
+ T* retVal = ptr_;
+ ptr_ = NULL;
+ return retVal;
+ }
+
+ scoped_refptr<T>& operator=(T* p) {
+ // AddRef first so that self assignment should work
+ if (p)
+ p->AddRef();
+ if (ptr_ )
+ ptr_ ->Release();
+ ptr_ = p;
+ return *this;
+ }
+
+ scoped_refptr<T>& operator=(const scoped_refptr<T>& r) {
+ return *this = r.ptr_;
+ }
+
+ template <typename U>
+ scoped_refptr<T>& operator=(const scoped_refptr<U>& r) {
+ return *this = r.get();
+ }
+
+ void swap(T** pp) {
+ T* p = ptr_;
+ ptr_ = *pp;
+ *pp = p;
+ }
+
+ void swap(scoped_refptr<T>& r) {
+ swap(&r.ptr_);
+ }
+
+ protected:
+ T* ptr_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SCOPED_REF_PTR_H_
diff --git a/chromium/third_party/webrtc/base/scopedptrcollection.h b/chromium/third_party/webrtc/base/scopedptrcollection.h
new file mode 100644
index 00000000000..47dff6503bc
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scopedptrcollection.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Stores a collection of pointers that are deleted when the container is
+// destructed.
+
+#ifndef WEBRTC_BASE_SCOPEDPTRCOLLECTION_H_
+#define WEBRTC_BASE_SCOPEDPTRCOLLECTION_H_
+
+#include <algorithm>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
+
+namespace rtc {
+
+template<class T>
+class ScopedPtrCollection {
+ public:
+ typedef std::vector<T*> VectorT;
+
+ ScopedPtrCollection() { }
+ ~ScopedPtrCollection() {
+ for (typename VectorT::iterator it = collection_.begin();
+ it != collection_.end(); ++it) {
+ delete *it;
+ }
+ }
+
+ const VectorT& collection() const { return collection_; }
+ void Reserve(size_t size) {
+ collection_.reserve(size);
+ }
+ void PushBack(T* t) {
+ collection_.push_back(t);
+ }
+
+ // Remove |t| from the collection without deleting it.
+ void Remove(T* t) {
+ collection_.erase(std::remove(collection_.begin(), collection_.end(), t),
+ collection_.end());
+ }
+
+ private:
+ VectorT collection_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScopedPtrCollection);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SCOPEDPTRCOLLECTION_H_
diff --git a/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc b/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc
new file mode 100644
index 00000000000..30b8ed9ed06
--- /dev/null
+++ b/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/scopedptrcollection.h"
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+namespace {
+
+class InstanceCounter {
+ public:
+ explicit InstanceCounter(int* num_instances)
+ : num_instances_(num_instances) {
+ ++(*num_instances_);
+ }
+ ~InstanceCounter() {
+ --(*num_instances_);
+ }
+
+ private:
+ int* num_instances_;
+
+ DISALLOW_COPY_AND_ASSIGN(InstanceCounter);
+};
+
+} // namespace
+
+class ScopedPtrCollectionTest : public testing::Test {
+ protected:
+ ScopedPtrCollectionTest()
+ : num_instances_(0),
+ collection_(new ScopedPtrCollection<InstanceCounter>()) {
+ }
+
+ int num_instances_;
+ scoped_ptr<ScopedPtrCollection<InstanceCounter> > collection_;
+};
+
+TEST_F(ScopedPtrCollectionTest, PushBack) {
+ EXPECT_EQ(0u, collection_->collection().size());
+ EXPECT_EQ(0, num_instances_);
+ const int kNum = 100;
+ for (int i = 0; i < kNum; ++i) {
+ collection_->PushBack(new InstanceCounter(&num_instances_));
+ }
+ EXPECT_EQ(static_cast<size_t>(kNum), collection_->collection().size());
+ EXPECT_EQ(kNum, num_instances_);
+ collection_.reset();
+ EXPECT_EQ(0, num_instances_);
+}
+
+TEST_F(ScopedPtrCollectionTest, Remove) {
+ InstanceCounter* ic = new InstanceCounter(&num_instances_);
+ collection_->PushBack(ic);
+ EXPECT_EQ(1u, collection_->collection().size());
+ collection_->Remove(ic);
+ EXPECT_EQ(1, num_instances_);
+ collection_.reset();
+ EXPECT_EQ(1, num_instances_);
+ delete ic;
+ EXPECT_EQ(0, num_instances_);
+}
+
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sec_buffer.h b/chromium/third_party/webrtc/base/sec_buffer.h
new file mode 100644
index 00000000000..d4cda00d46f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sec_buffer.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// @file Contains utility classes that make it easier to use SecBuffers
+
+#ifndef WEBRTC_BASE_SEC_BUFFER_H__
+#define WEBRTC_BASE_SEC_BUFFER_H__
+
+namespace rtc {
+
+// A base class for CSecBuffer<T>. Contains
+// all implementation that does not require
+// template arguments.
+class CSecBufferBase : public SecBuffer {
+ public:
+ CSecBufferBase() {
+ Clear();
+ }
+
+ // Uses the SSPI to free a pointer, must be
+ // used for buffers returned from SSPI APIs.
+ static void FreeSSPI(void *ptr) {
+ if ( ptr ) {
+ SECURITY_STATUS status;
+ status = ::FreeContextBuffer(ptr);
+ ASSERT(SEC_E_OK == status); // "Freeing context buffer"
+ }
+ }
+
+ // Deletes a buffer with operator delete
+ static void FreeDelete(void *ptr) {
+ delete [] reinterpret_cast<char*>(ptr);
+ }
+
+ // A noop delete, for buffers over other
+ // people's memory
+ static void FreeNone(void *ptr) {
+ }
+
+ protected:
+ // Clears the buffer to EMPTY & NULL
+ void Clear() {
+ this->BufferType = SECBUFFER_EMPTY;
+ this->cbBuffer = 0;
+ this->pvBuffer = NULL;
+ }
+};
+
+// Wrapper class for SecBuffer to take care
+// of initialization and destruction.
+template <void (*pfnFreeBuffer)(void *ptr)>
+class CSecBuffer: public CSecBufferBase {
+ public:
+ // Initializes buffer to empty & NULL
+ CSecBuffer() {
+ }
+
+ // Frees any allocated memory
+ ~CSecBuffer() {
+ Release();
+ }
+
+ // Frees the buffer appropriately, and re-nulls
+ void Release() {
+ pfnFreeBuffer(this->pvBuffer);
+ Clear();
+ }
+
+ private:
+ // A placeholder function for compile-time asserts on the class
+ void CompileAsserts() {
+ // never invoked...
+ assert(false); // _T("Notreached")
+
+ // This class must not extend the size of SecBuffer, since
+ // we use arrays of CSecBuffer in CSecBufferBundle below
+ cassert(sizeof(CSecBuffer<SSPIFree> == sizeof(SecBuffer)));
+ }
+};
+
+// Contains all generic implementation for the
+// SecBufferBundle class
+class SecBufferBundleBase {
+ public:
+};
+
+// A template class that bundles a SecBufferDesc with
+// one or more SecBuffers for convenience. Can take
+// care of deallocating buffers appropriately, as indicated
+// by pfnFreeBuffer function.
+// By default does no deallocation.
+template <int num_buffers,
+ void (*pfnFreeBuffer)(void *ptr) = CSecBufferBase::FreeNone>
+class CSecBufferBundle : public SecBufferBundleBase {
+ public:
+ // Constructs a security buffer bundle with num_buffers
+ // buffers, all of which are empty and nulled.
+ CSecBufferBundle() {
+ desc_.ulVersion = SECBUFFER_VERSION;
+ desc_.cBuffers = num_buffers;
+ desc_.pBuffers = buffers_;
+ }
+
+ // Frees all currently used buffers.
+ ~CSecBufferBundle() {
+ Release();
+ }
+
+ // Accessor for the descriptor
+ PSecBufferDesc desc() {
+ return &desc_;
+ }
+
+ // Accessor for the descriptor
+ const PSecBufferDesc desc() const {
+ return &desc_;
+ }
+
+ // returns the i-th security buffer
+ SecBuffer &operator[] (size_t num) {
+ ASSERT(num < num_buffers); // "Buffer index out of bounds"
+ return buffers_[num];
+ }
+
+ // returns the i-th security buffer
+ const SecBuffer &operator[] (size_t num) const {
+ ASSERT(num < num_buffers); // "Buffer index out of bounds"
+ return buffers_[num];
+ }
+
+ // Frees all non-NULL security buffers,
+ // using the deallocation function
+ void Release() {
+ for ( size_t i = 0; i < num_buffers; ++i ) {
+ buffers_[i].Release();
+ }
+ }
+
+ private:
+ // Our descriptor
+ SecBufferDesc desc_;
+ // Our bundled buffers, each takes care of its own
+ // initialization and destruction
+ CSecBuffer<pfnFreeBuffer> buffers_[num_buffers];
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SEC_BUFFER_H__
diff --git a/chromium/third_party/webrtc/base/sha1.cc b/chromium/third_party/webrtc/base/sha1.cc
new file mode 100644
index 00000000000..afc5569fd7d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sha1.cc
@@ -0,0 +1,286 @@
+/*
+ * SHA-1 in C
+ * By Steve Reid <sreid@sea-to-sky.net>
+ * 100% Public Domain
+ *
+ * -----------------
+ * Modified 7/98
+ * By James H. Brown <jbrown@burgoyne.com>
+ * Still 100% Public Domain
+ *
+ * Corrected a problem which generated improper hash values on 16 bit machines
+ * Routine SHA1Update changed from
+ * void SHA1Update(SHA1_CTX* context, unsigned char* data, unsigned int
+ * len)
+ * to
+ * void SHA1Update(SHA1_CTX* context, unsigned char* data, unsigned
+ * long len)
+ *
+ * The 'len' parameter was declared an int which works fine on 32 bit machines.
+ * However, on 16 bit machines an int is too small for the shifts being done
+ * against
+ * it. This caused the hash function to generate incorrect values if len was
+ * greater than 8191 (8K - 1) due to the 'len << 3' on line 3 of SHA1Update().
+ *
+ * Since the file IO in main() reads 16K at a time, any file 8K or larger would
+ * be guaranteed to generate the wrong hash (e.g. Test Vector #3, a million
+ * "a"s).
+ *
+ * I also changed the declaration of variables i & j in SHA1Update to
+ * unsigned long from unsigned int for the same reason.
+ *
+ * These changes should make no difference to any 32 bit implementations since
+ * an
+ * int and a long are the same size in those environments.
+ *
+ * --
+ * I also corrected a few compiler warnings generated by Borland C.
+ * 1. Added #include <process.h> for exit() prototype
+ * 2. Removed unused variable 'j' in SHA1Final
+ * 3. Changed exit(0) to return(0) at end of main.
+ *
+ * ALL changes I made can be located by searching for comments containing 'JHB'
+ * -----------------
+ * Modified 8/98
+ * By Steve Reid <sreid@sea-to-sky.net>
+ * Still 100% public domain
+ *
+ * 1- Removed #include <process.h> and used return() instead of exit()
+ * 2- Fixed overwriting of finalcount in SHA1Final() (discovered by Chris Hall)
+ * 3- Changed email address from steve@edmweb.com to sreid@sea-to-sky.net
+ *
+ * -----------------
+ * Modified 4/01
+ * By Saul Kravitz <Saul.Kravitz@celera.com>
+ * Still 100% PD
+ * Modified to run on Compaq Alpha hardware.
+ *
+ * -----------------
+ * Modified 07/2002
+ * By Ralph Giles <giles@ghostscript.com>
+ * Still 100% public domain
+ * modified for use with stdint types, autoconf
+ * code cleanup, removed attribution comments
+ * switched SHA1Final() argument order for consistency
+ * use SHA1_ prefix for public api
+ * move public api to sha1.h
+ *
+ * -----------------
+ * Modified 02/2012
+ * By Justin Uberti <juberti@google.com>
+ * Remove underscore from SHA1 prefix to avoid conflict with OpenSSL
+ * Remove test code
+ * Untabify
+ *
+ * -----------------
+ * Modified 03/2012
+ * By Ronghua Wu <ronghuawu@google.com>
+ * Change the typedef of uint32(8)_t to uint32(8). We need this because in the
+ * chromium android build, the stdio.h will include stdint.h which already
+ * defined uint32(8)_t.
+ *
+ * -----------------
+ * Modified 04/2012
+ * By Frank Barchard <fbarchard@google.com>
+ * Ported to C++, Google style, change len to size_t, enable SHA1HANDSOFF
+ *
+ * Test Vectors (from FIPS PUB 180-1)
+ * "abc"
+ * A9993E36 4706816A BA3E2571 7850C26C 9CD0D89D
+ * "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
+ * 84983E44 1C3BD26E BAAE4AA1 F95129E5 E54670F1
+ * A million repetitions of "a"
+ * 34AA973C D4C4DAA4 F61EEB2B DBAD2731 6534016F
+ */
+
+// Enabling SHA1HANDSOFF preserves the caller's data buffer.
+// Disabling SHA1HANDSOFF the buffer will be modified (end swapped).
+#define SHA1HANDSOFF
+
+#include "webrtc/base/sha1.h"
+
+#include <stdio.h>
+#include <string.h>
+
+namespace rtc {
+
+void SHA1Transform(uint32 state[5], const uint8 buffer[64]);
+
+#define rol(value, bits) (((value) << (bits)) | ((value) >> (32 - (bits))))
+
+// blk0() and blk() perform the initial expand.
+// I got the idea of expanding during the round function from SSLeay
+// FIXME: can we do this in an endian-proof way?
+#ifdef ARCH_CPU_BIG_ENDIAN
+#define blk0(i) block->l[i]
+#else
+#define blk0(i) (block->l[i] = (rol(block->l[i], 24) & 0xFF00FF00) | \
+ (rol(block->l[i], 8) & 0x00FF00FF))
+#endif
+#define blk(i) (block->l[i & 15] = rol(block->l[(i + 13) & 15] ^ \
+ block->l[(i + 8) & 15] ^ block->l[(i + 2) & 15] ^ block->l[i & 15], 1))
+
+// (R0+R1), R2, R3, R4 are the different operations used in SHA1.
+#define R0(v, w, x, y, z, i) \
+ z += ((w & (x ^ y)) ^ y) + blk0(i) + 0x5A827999 + rol(v, 5); \
+ w = rol(w, 30);
+#define R1(v, w, x, y, z, i) \
+ z += ((w & (x ^ y)) ^ y) + blk(i) + 0x5A827999 + rol(v, 5); \
+ w = rol(w, 30);
+#define R2(v, w, x, y, z, i) \
+ z += (w ^ x ^ y) + blk(i) + 0x6ED9EBA1 + rol(v, 5);\
+ w = rol(w, 30);
+#define R3(v, w, x, y, z, i) \
+ z += (((w | x) & y) | (w & x)) + blk(i) + 0x8F1BBCDC + rol(v, 5); \
+ w = rol(w, 30);
+#define R4(v, w, x, y, z, i) \
+ z += (w ^ x ^ y) + blk(i) + 0xCA62C1D6 + rol(v, 5); \
+ w = rol(w, 30);
+
+#ifdef VERBOSE // SAK
+void SHAPrintContext(SHA1_CTX *context, char *msg) {
+ printf("%s (%d,%d) %x %x %x %x %x\n",
+ msg,
+ context->count[0], context->count[1],
+ context->state[0],
+ context->state[1],
+ context->state[2],
+ context->state[3],
+ context->state[4]);
+}
+#endif /* VERBOSE */
+
+// Hash a single 512-bit block. This is the core of the algorithm.
+void SHA1Transform(uint32 state[5], const uint8 buffer[64]) {
+ union CHAR64LONG16 {
+ uint8 c[64];
+ uint32 l[16];
+ };
+#ifdef SHA1HANDSOFF
+ static uint8 workspace[64];
+ memcpy(workspace, buffer, 64);
+ CHAR64LONG16* block = reinterpret_cast<CHAR64LONG16*>(workspace);
+#else
+ // Note(fbarchard): This option does modify the user's data buffer.
+ CHAR64LONG16* block = const_cast<CHAR64LONG16*>(
+ reinterpret_cast<const CHAR64LONG16*>(buffer));
+#endif
+
+ // Copy context->state[] to working vars.
+ uint32 a = state[0];
+ uint32 b = state[1];
+ uint32 c = state[2];
+ uint32 d = state[3];
+ uint32 e = state[4];
+
+ // 4 rounds of 20 operations each. Loop unrolled.
+ // Note(fbarchard): The following has lint warnings for multiple ; on
+ // a line and no space after , but is left as-is to be similar to the
+ // original code.
+ R0(a,b,c,d,e,0); R0(e,a,b,c,d,1); R0(d,e,a,b,c,2); R0(c,d,e,a,b,3);
+ R0(b,c,d,e,a,4); R0(a,b,c,d,e,5); R0(e,a,b,c,d,6); R0(d,e,a,b,c,7);
+ R0(c,d,e,a,b,8); R0(b,c,d,e,a,9); R0(a,b,c,d,e,10); R0(e,a,b,c,d,11);
+ R0(d,e,a,b,c,12); R0(c,d,e,a,b,13); R0(b,c,d,e,a,14); R0(a,b,c,d,e,15);
+ R1(e,a,b,c,d,16); R1(d,e,a,b,c,17); R1(c,d,e,a,b,18); R1(b,c,d,e,a,19);
+ R2(a,b,c,d,e,20); R2(e,a,b,c,d,21); R2(d,e,a,b,c,22); R2(c,d,e,a,b,23);
+ R2(b,c,d,e,a,24); R2(a,b,c,d,e,25); R2(e,a,b,c,d,26); R2(d,e,a,b,c,27);
+ R2(c,d,e,a,b,28); R2(b,c,d,e,a,29); R2(a,b,c,d,e,30); R2(e,a,b,c,d,31);
+ R2(d,e,a,b,c,32); R2(c,d,e,a,b,33); R2(b,c,d,e,a,34); R2(a,b,c,d,e,35);
+ R2(e,a,b,c,d,36); R2(d,e,a,b,c,37); R2(c,d,e,a,b,38); R2(b,c,d,e,a,39);
+ R3(a,b,c,d,e,40); R3(e,a,b,c,d,41); R3(d,e,a,b,c,42); R3(c,d,e,a,b,43);
+ R3(b,c,d,e,a,44); R3(a,b,c,d,e,45); R3(e,a,b,c,d,46); R3(d,e,a,b,c,47);
+ R3(c,d,e,a,b,48); R3(b,c,d,e,a,49); R3(a,b,c,d,e,50); R3(e,a,b,c,d,51);
+ R3(d,e,a,b,c,52); R3(c,d,e,a,b,53); R3(b,c,d,e,a,54); R3(a,b,c,d,e,55);
+ R3(e,a,b,c,d,56); R3(d,e,a,b,c,57); R3(c,d,e,a,b,58); R3(b,c,d,e,a,59);
+ R4(a,b,c,d,e,60); R4(e,a,b,c,d,61); R4(d,e,a,b,c,62); R4(c,d,e,a,b,63);
+ R4(b,c,d,e,a,64); R4(a,b,c,d,e,65); R4(e,a,b,c,d,66); R4(d,e,a,b,c,67);
+ R4(c,d,e,a,b,68); R4(b,c,d,e,a,69); R4(a,b,c,d,e,70); R4(e,a,b,c,d,71);
+ R4(d,e,a,b,c,72); R4(c,d,e,a,b,73); R4(b,c,d,e,a,74); R4(a,b,c,d,e,75);
+ R4(e,a,b,c,d,76); R4(d,e,a,b,c,77); R4(c,d,e,a,b,78); R4(b,c,d,e,a,79);
+
+ // Add the working vars back into context.state[].
+ state[0] += a;
+ state[1] += b;
+ state[2] += c;
+ state[3] += d;
+ state[4] += e;
+}
+
+// SHA1Init - Initialize new context.
+void SHA1Init(SHA1_CTX* context) {
+ // SHA1 initialization constants.
+ context->state[0] = 0x67452301;
+ context->state[1] = 0xEFCDAB89;
+ context->state[2] = 0x98BADCFE;
+ context->state[3] = 0x10325476;
+ context->state[4] = 0xC3D2E1F0;
+ context->count[0] = context->count[1] = 0;
+}
+
+// Run your data through this.
+void SHA1Update(SHA1_CTX* context, const uint8* data, size_t input_len) {
+ size_t i = 0;
+
+#ifdef VERBOSE
+ SHAPrintContext(context, "before");
+#endif
+
+ // Compute number of bytes mod 64.
+ size_t index = (context->count[0] >> 3) & 63;
+
+ // Update number of bits.
+ // TODO: Use uint64 instead of 2 uint32 for count.
+ // count[0] has low 29 bits for byte count + 3 pad 0's making 32 bits for
+ // bit count.
+ // Add bit count to low uint32
+ context->count[0] += static_cast<uint32>(input_len << 3);
+ if (context->count[0] < static_cast<uint32>(input_len << 3)) {
+ ++context->count[1]; // if overlow (carry), add one to high word
+ }
+ context->count[1] += static_cast<uint32>(input_len >> 29);
+ if ((index + input_len) > 63) {
+ i = 64 - index;
+ memcpy(&context->buffer[index], data, i);
+ SHA1Transform(context->state, context->buffer);
+ for (; i + 63 < input_len; i += 64) {
+ SHA1Transform(context->state, data + i);
+ }
+ index = 0;
+ }
+ memcpy(&context->buffer[index], &data[i], input_len - i);
+
+#ifdef VERBOSE
+ SHAPrintContext(context, "after ");
+#endif
+}
+
+// Add padding and return the message digest.
+void SHA1Final(SHA1_CTX* context, uint8 digest[SHA1_DIGEST_SIZE]) {
+ uint8 finalcount[8];
+ for (int i = 0; i < 8; ++i) {
+ // Endian independent
+ finalcount[i] = static_cast<uint8>(
+ (context->count[(i >= 4 ? 0 : 1)] >> ((3 - (i & 3)) * 8) ) & 255);
+ }
+ SHA1Update(context, reinterpret_cast<const uint8*>("\200"), 1);
+ while ((context->count[0] & 504) != 448) {
+ SHA1Update(context, reinterpret_cast<const uint8*>("\0"), 1);
+ }
+ SHA1Update(context, finalcount, 8); // Should cause a SHA1Transform().
+ for (int i = 0; i < SHA1_DIGEST_SIZE; ++i) {
+ digest[i] = static_cast<uint8>(
+ (context->state[i >> 2] >> ((3 - (i & 3)) * 8) ) & 255);
+ }
+
+ // Wipe variables.
+ memset(context->buffer, 0, 64);
+ memset(context->state, 0, 20);
+ memset(context->count, 0, 8);
+ memset(finalcount, 0, 8); // SWR
+
+#ifdef SHA1HANDSOFF // Make SHA1Transform overwrite its own static vars.
+ SHA1Transform(context->state, context->buffer);
+#endif
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sha1.h b/chromium/third_party/webrtc/base/sha1.h
new file mode 100644
index 00000000000..4862a00498a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sha1.h
@@ -0,0 +1,32 @@
+/*
+ * SHA-1 in C
+ * By Steve Reid <sreid@sea-to-sky.net>
+ * 100% Public Domain
+ *
+*/
+
+// Ported to C++, Google style, under namespace rtc and uses basictypes.h
+
+#ifndef WEBRTC_BASE_SHA1_H_
+#define WEBRTC_BASE_SHA1_H_
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+struct SHA1_CTX {
+ uint32 state[5];
+ // TODO: Change bit count to uint64.
+ uint32 count[2]; // Bit count of input.
+ uint8 buffer[64];
+};
+
+#define SHA1_DIGEST_SIZE 20
+
+void SHA1Init(SHA1_CTX* context);
+void SHA1Update(SHA1_CTX* context, const uint8* data, size_t len);
+void SHA1Final(SHA1_CTX* context, uint8 digest[SHA1_DIGEST_SIZE]);
+
+#endif // WEBRTC_BASE_SHA1_H_
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sha1digest.h b/chromium/third_party/webrtc/base/sha1digest.h
new file mode 100644
index 00000000000..fb4c53e6e4d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sha1digest.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SHA1DIGEST_H_
+#define WEBRTC_BASE_SHA1DIGEST_H_
+
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/sha1.h"
+
+namespace rtc {
+
+// A simple wrapper for our SHA-1 implementation.
+class Sha1Digest : public MessageDigest {
+ public:
+ enum { kSize = SHA1_DIGEST_SIZE };
+ Sha1Digest() {
+ SHA1Init(&ctx_);
+ }
+ virtual size_t Size() const {
+ return kSize;
+ }
+ virtual void Update(const void* buf, size_t len) {
+ SHA1Update(&ctx_, static_cast<const uint8*>(buf), len);
+ }
+ virtual size_t Finish(void* buf, size_t len) {
+ if (len < kSize) {
+ return 0;
+ }
+ SHA1Final(&ctx_, static_cast<uint8*>(buf));
+ SHA1Init(&ctx_); // Reset for next use.
+ return kSize;
+ }
+
+ private:
+ SHA1_CTX ctx_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SHA1DIGEST_H_
diff --git a/chromium/third_party/webrtc/base/sha1digest_unittest.cc b/chromium/third_party/webrtc/base/sha1digest_unittest.cc
new file mode 100644
index 00000000000..d3c20438728
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sha1digest_unittest.cc
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/sha1digest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+std::string Sha1(const std::string& input) {
+ Sha1Digest sha1;
+ return ComputeDigest(&sha1, input);
+}
+
+TEST(Sha1DigestTest, TestSize) {
+ Sha1Digest sha1;
+ EXPECT_EQ(20, static_cast<int>(Sha1Digest::kSize));
+ EXPECT_EQ(20U, sha1.Size());
+}
+
+TEST(Sha1DigestTest, TestBasic) {
+ // Test vectors from sha1.c.
+ EXPECT_EQ("da39a3ee5e6b4b0d3255bfef95601890afd80709", Sha1(""));
+ EXPECT_EQ("a9993e364706816aba3e25717850c26c9cd0d89d", Sha1("abc"));
+ EXPECT_EQ("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+ Sha1("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"));
+ std::string a_million_as(1000000, 'a');
+ EXPECT_EQ("34aa973cd4c4daa4f61eeb2bdbad27316534016f", Sha1(a_million_as));
+}
+
+TEST(Sha1DigestTest, TestMultipleUpdates) {
+ Sha1Digest sha1;
+ std::string input =
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
+ char output[Sha1Digest::kSize];
+ for (size_t i = 0; i < input.size(); ++i) {
+ sha1.Update(&input[i], 1);
+ }
+ EXPECT_EQ(sha1.Size(), sha1.Finish(output, sizeof(output)));
+ EXPECT_EQ("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+ hex_encode(output, sizeof(output)));
+}
+
+TEST(Sha1DigestTest, TestReuse) {
+ Sha1Digest sha1;
+ std::string input = "abc";
+ EXPECT_EQ("a9993e364706816aba3e25717850c26c9cd0d89d",
+ ComputeDigest(&sha1, input));
+ input = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
+ EXPECT_EQ("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+ ComputeDigest(&sha1, input));
+}
+
+TEST(Sha1DigestTest, TestBufferTooSmall) {
+ Sha1Digest sha1;
+ std::string input = "abcdefghijklmnopqrstuvwxyz";
+ char output[Sha1Digest::kSize - 1];
+ sha1.Update(input.c_str(), input.size());
+ EXPECT_EQ(0U, sha1.Finish(output, sizeof(output)));
+}
+
+TEST(Sha1DigestTest, TestBufferConst) {
+ Sha1Digest sha1;
+ const int kLongSize = 1000000;
+ std::string input(kLongSize, '\0');
+ for (int i = 0; i < kLongSize; ++i) {
+ input[i] = static_cast<char>(i);
+ }
+ sha1.Update(input.c_str(), input.size());
+ for (int i = 0; i < kLongSize; ++i) {
+ EXPECT_EQ(static_cast<char>(i), input[i]);
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sharedexclusivelock.cc b/chromium/third_party/webrtc/base/sharedexclusivelock.cc
new file mode 100644
index 00000000000..9facf60eaca
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sharedexclusivelock.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/sharedexclusivelock.h"
+
+namespace rtc {
+
+SharedExclusiveLock::SharedExclusiveLock()
+ : shared_count_is_zero_(true, true),
+ shared_count_(0) {
+}
+
+void SharedExclusiveLock::LockExclusive() {
+ cs_exclusive_.Enter();
+ shared_count_is_zero_.Wait(rtc::kForever);
+}
+
+void SharedExclusiveLock::UnlockExclusive() {
+ cs_exclusive_.Leave();
+}
+
+void SharedExclusiveLock::LockShared() {
+ CritScope exclusive_scope(&cs_exclusive_);
+ CritScope shared_scope(&cs_shared_);
+ if (++shared_count_ == 1) {
+ shared_count_is_zero_.Reset();
+ }
+}
+
+void SharedExclusiveLock::UnlockShared() {
+ CritScope shared_scope(&cs_shared_);
+ if (--shared_count_ == 0) {
+ shared_count_is_zero_.Set();
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sharedexclusivelock.h b/chromium/third_party/webrtc/base/sharedexclusivelock.h
new file mode 100644
index 00000000000..f64d7cf5068
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sharedexclusivelock.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SHAREDEXCLUSIVELOCK_H_
+#define WEBRTC_BASE_SHAREDEXCLUSIVELOCK_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+
+namespace rtc {
+
+// This class provides shared-exclusive lock. It can be used in cases like
+// multiple-readers/single-writer model.
+class SharedExclusiveLock {
+ public:
+ SharedExclusiveLock();
+
+ // Locking/unlocking methods. It is encouraged to use SharedScope or
+ // ExclusiveScope for protection.
+ void LockExclusive();
+ void UnlockExclusive();
+ void LockShared();
+ void UnlockShared();
+
+ private:
+ rtc::CriticalSection cs_exclusive_;
+ rtc::CriticalSection cs_shared_;
+ rtc::Event shared_count_is_zero_;
+ int shared_count_;
+
+ DISALLOW_COPY_AND_ASSIGN(SharedExclusiveLock);
+};
+
+class SharedScope {
+ public:
+ explicit SharedScope(SharedExclusiveLock* lock) : lock_(lock) {
+ lock_->LockShared();
+ }
+
+ ~SharedScope() {
+ lock_->UnlockShared();
+ }
+
+ private:
+ SharedExclusiveLock* lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(SharedScope);
+};
+
+class ExclusiveScope {
+ public:
+ explicit ExclusiveScope(SharedExclusiveLock* lock) : lock_(lock) {
+ lock_->LockExclusive();
+ }
+
+ ~ExclusiveScope() {
+ lock_->UnlockExclusive();
+ }
+
+ private:
+ SharedExclusiveLock* lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(ExclusiveScope);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SHAREDEXCLUSIVELOCK_H_
diff --git a/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc b/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc
new file mode 100644
index 00000000000..42334af7557
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sharedexclusivelock.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+static const uint32 kMsgRead = 0;
+static const uint32 kMsgWrite = 0;
+static const int kNoWaitThresholdInMs = 10;
+static const int kWaitThresholdInMs = 80;
+static const int kProcessTimeInMs = 100;
+static const int kProcessTimeoutInMs = 5000;
+
+class SharedExclusiveTask : public MessageHandler {
+ public:
+ SharedExclusiveTask(SharedExclusiveLock* shared_exclusive_lock,
+ int* value,
+ bool* done)
+ : shared_exclusive_lock_(shared_exclusive_lock),
+ waiting_time_in_ms_(0),
+ value_(value),
+ done_(done) {
+ worker_thread_.reset(new Thread());
+ worker_thread_->Start();
+ }
+
+ int waiting_time_in_ms() const { return waiting_time_in_ms_; }
+
+ protected:
+ scoped_ptr<Thread> worker_thread_;
+ SharedExclusiveLock* shared_exclusive_lock_;
+ int waiting_time_in_ms_;
+ int* value_;
+ bool* done_;
+};
+
+class ReadTask : public SharedExclusiveTask {
+ public:
+ ReadTask(SharedExclusiveLock* shared_exclusive_lock, int* value, bool* done)
+ : SharedExclusiveTask(shared_exclusive_lock, value, done) {
+ }
+
+ void PostRead(int* value) {
+ worker_thread_->Post(this, kMsgRead, new TypedMessageData<int*>(value));
+ }
+
+ private:
+ virtual void OnMessage(Message* message) {
+ ASSERT(rtc::Thread::Current() == worker_thread_.get());
+ ASSERT(message != NULL);
+ ASSERT(message->message_id == kMsgRead);
+
+ TypedMessageData<int*>* message_data =
+ static_cast<TypedMessageData<int*>*>(message->pdata);
+
+ uint32 start_time = Time();
+ {
+ SharedScope ss(shared_exclusive_lock_);
+ waiting_time_in_ms_ = TimeDiff(Time(), start_time);
+
+ Thread::SleepMs(kProcessTimeInMs);
+ *message_data->data() = *value_;
+ *done_ = true;
+ }
+ delete message->pdata;
+ message->pdata = NULL;
+ }
+};
+
+class WriteTask : public SharedExclusiveTask {
+ public:
+ WriteTask(SharedExclusiveLock* shared_exclusive_lock, int* value, bool* done)
+ : SharedExclusiveTask(shared_exclusive_lock, value, done) {
+ }
+
+ void PostWrite(int value) {
+ worker_thread_->Post(this, kMsgWrite, new TypedMessageData<int>(value));
+ }
+
+ private:
+ virtual void OnMessage(Message* message) {
+ ASSERT(rtc::Thread::Current() == worker_thread_.get());
+ ASSERT(message != NULL);
+ ASSERT(message->message_id == kMsgWrite);
+
+ TypedMessageData<int>* message_data =
+ static_cast<TypedMessageData<int>*>(message->pdata);
+
+ uint32 start_time = Time();
+ {
+ ExclusiveScope es(shared_exclusive_lock_);
+ waiting_time_in_ms_ = TimeDiff(Time(), start_time);
+
+ Thread::SleepMs(kProcessTimeInMs);
+ *value_ = message_data->data();
+ *done_ = true;
+ }
+ delete message->pdata;
+ message->pdata = NULL;
+ }
+};
+
+// Unit test for SharedExclusiveLock.
+class SharedExclusiveLockTest
+ : public testing::Test {
+ public:
+ SharedExclusiveLockTest() : value_(0) {
+ }
+
+ virtual void SetUp() {
+ shared_exclusive_lock_.reset(new SharedExclusiveLock());
+ }
+
+ protected:
+ scoped_ptr<SharedExclusiveLock> shared_exclusive_lock_;
+ int value_;
+};
+
+// Flaky: https://code.google.com/p/webrtc/issues/detail?id=3318
+TEST_F(SharedExclusiveLockTest, DISABLED_TestSharedShared) {
+ int value0, value1;
+ bool done0, done1;
+ ReadTask reader0(shared_exclusive_lock_.get(), &value_, &done0);
+ ReadTask reader1(shared_exclusive_lock_.get(), &value_, &done1);
+
+ // Test shared locks can be shared without waiting.
+ {
+ SharedScope ss(shared_exclusive_lock_.get());
+ value_ = 1;
+ done0 = false;
+ done1 = false;
+ reader0.PostRead(&value0);
+ reader1.PostRead(&value1);
+ Thread::SleepMs(kProcessTimeInMs);
+ }
+
+ EXPECT_TRUE_WAIT(done0, kProcessTimeoutInMs);
+ EXPECT_EQ(1, value0);
+ EXPECT_LE(reader0.waiting_time_in_ms(), kNoWaitThresholdInMs);
+ EXPECT_TRUE_WAIT(done1, kProcessTimeoutInMs);
+ EXPECT_EQ(1, value1);
+ EXPECT_LE(reader1.waiting_time_in_ms(), kNoWaitThresholdInMs);
+}
+
+TEST_F(SharedExclusiveLockTest, TestSharedExclusive) {
+ bool done;
+ WriteTask writer(shared_exclusive_lock_.get(), &value_, &done);
+
+ // Test exclusive lock needs to wait for shared lock.
+ {
+ SharedScope ss(shared_exclusive_lock_.get());
+ value_ = 1;
+ done = false;
+ writer.PostWrite(2);
+ Thread::SleepMs(kProcessTimeInMs);
+ EXPECT_EQ(1, value_);
+ }
+
+ EXPECT_TRUE_WAIT(done, kProcessTimeoutInMs);
+ EXPECT_EQ(2, value_);
+ EXPECT_GE(writer.waiting_time_in_ms(), kWaitThresholdInMs);
+}
+
+TEST_F(SharedExclusiveLockTest, TestExclusiveShared) {
+ int value;
+ bool done;
+ ReadTask reader(shared_exclusive_lock_.get(), &value_, &done);
+
+ // Test shared lock needs to wait for exclusive lock.
+ {
+ ExclusiveScope es(shared_exclusive_lock_.get());
+ value_ = 1;
+ done = false;
+ reader.PostRead(&value);
+ Thread::SleepMs(kProcessTimeInMs);
+ value_ = 2;
+ }
+
+ EXPECT_TRUE_WAIT(done, kProcessTimeoutInMs);
+ EXPECT_EQ(2, value);
+ EXPECT_GE(reader.waiting_time_in_ms(), kWaitThresholdInMs);
+}
+
+TEST_F(SharedExclusiveLockTest, TestExclusiveExclusive) {
+ bool done;
+ WriteTask writer(shared_exclusive_lock_.get(), &value_, &done);
+
+ // Test exclusive lock needs to wait for exclusive lock.
+ {
+ ExclusiveScope es(shared_exclusive_lock_.get());
+ value_ = 1;
+ done = false;
+ writer.PostWrite(2);
+ Thread::SleepMs(kProcessTimeInMs);
+ EXPECT_EQ(1, value_);
+ }
+
+ EXPECT_TRUE_WAIT(done, kProcessTimeoutInMs);
+ EXPECT_EQ(2, value_);
+ EXPECT_GE(writer.waiting_time_in_ms(), kWaitThresholdInMs);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/signalthread.cc b/chromium/third_party/webrtc/base/signalthread.cc
new file mode 100644
index 00000000000..f95cb5fbc7a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/signalthread.cc
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/signalthread.h"
+
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// SignalThread
+///////////////////////////////////////////////////////////////////////////////
+
+SignalThread::SignalThread()
+ : main_(Thread::Current()),
+ worker_(this),
+ state_(kInit),
+ refcount_(1) {
+ main_->SignalQueueDestroyed.connect(this,
+ &SignalThread::OnMainThreadDestroyed);
+ worker_.SetName("SignalThread", this);
+}
+
+SignalThread::~SignalThread() {
+ ASSERT(refcount_ == 0);
+}
+
+bool SignalThread::SetName(const std::string& name, const void* obj) {
+ EnterExit ee(this);
+ ASSERT(main_->IsCurrent());
+ ASSERT(kInit == state_);
+ return worker_.SetName(name, obj);
+}
+
+bool SignalThread::SetPriority(ThreadPriority priority) {
+ EnterExit ee(this);
+ ASSERT(main_->IsCurrent());
+ ASSERT(kInit == state_);
+ return worker_.SetPriority(priority);
+}
+
+void SignalThread::Start() {
+ EnterExit ee(this);
+ ASSERT(main_->IsCurrent());
+ if (kInit == state_ || kComplete == state_) {
+ state_ = kRunning;
+ OnWorkStart();
+ worker_.Start();
+ } else {
+ ASSERT(false);
+ }
+}
+
+void SignalThread::Destroy(bool wait) {
+ EnterExit ee(this);
+ ASSERT(main_->IsCurrent());
+ if ((kInit == state_) || (kComplete == state_)) {
+ refcount_--;
+ } else if (kRunning == state_ || kReleasing == state_) {
+ state_ = kStopping;
+ // OnWorkStop() must follow Quit(), so that when the thread wakes up due to
+ // OWS(), ContinueWork() will return false.
+ worker_.Quit();
+ OnWorkStop();
+ if (wait) {
+ // Release the thread's lock so that it can return from ::Run.
+ cs_.Leave();
+ worker_.Stop();
+ cs_.Enter();
+ refcount_--;
+ }
+ } else {
+ ASSERT(false);
+ }
+}
+
+void SignalThread::Release() {
+ EnterExit ee(this);
+ ASSERT(main_->IsCurrent());
+ if (kComplete == state_) {
+ refcount_--;
+ } else if (kRunning == state_) {
+ state_ = kReleasing;
+ } else {
+ // if (kInit == state_) use Destroy()
+ ASSERT(false);
+ }
+}
+
+bool SignalThread::ContinueWork() {
+ EnterExit ee(this);
+ ASSERT(worker_.IsCurrent());
+ return worker_.ProcessMessages(0);
+}
+
+void SignalThread::OnMessage(Message *msg) {
+ EnterExit ee(this);
+ if (ST_MSG_WORKER_DONE == msg->message_id) {
+ ASSERT(main_->IsCurrent());
+ OnWorkDone();
+ bool do_delete = false;
+ if (kRunning == state_) {
+ state_ = kComplete;
+ } else {
+ do_delete = true;
+ }
+ if (kStopping != state_) {
+ // Before signaling that the work is done, make sure that the worker
+ // thread actually is done. We got here because DoWork() finished and
+ // Run() posted the ST_MSG_WORKER_DONE message. This means the worker
+ // thread is about to go away anyway, but sometimes it doesn't actually
+ // finish before SignalWorkDone is processed, and for a reusable
+ // SignalThread this makes an assert in thread.cc fire.
+ //
+ // Calling Stop() on the worker ensures that the OS thread that underlies
+ // the worker will finish, and will be set to NULL, enabling us to call
+ // Start() again.
+ worker_.Stop();
+ SignalWorkDone(this);
+ }
+ if (do_delete) {
+ refcount_--;
+ }
+ }
+}
+
+void SignalThread::Run() {
+ DoWork();
+ {
+ EnterExit ee(this);
+ if (main_) {
+ main_->Post(this, ST_MSG_WORKER_DONE);
+ }
+ }
+}
+
+void SignalThread::OnMainThreadDestroyed() {
+ EnterExit ee(this);
+ main_ = NULL;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/signalthread.h b/chromium/third_party/webrtc/base/signalthread.h
new file mode 100644
index 00000000000..a97bda1af67
--- /dev/null
+++ b/chromium/third_party/webrtc/base/signalthread.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SIGNALTHREAD_H_
+#define WEBRTC_BASE_SIGNALTHREAD_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// SignalThread - Base class for worker threads. The main thread should call
+// Start() to begin work, and then follow one of these models:
+// Normal: Wait for SignalWorkDone, and then call Release to destroy.
+// Cancellation: Call Release(true), to abort the worker thread.
+// Fire-and-forget: Call Release(false), which allows the thread to run to
+// completion, and then self-destruct without further notification.
+// Periodic tasks: Wait for SignalWorkDone, then eventually call Start()
+// again to repeat the task. When the instance isn't needed anymore,
+// call Release. DoWork, OnWorkStart and OnWorkStop are called again,
+// on a new thread.
+// The subclass should override DoWork() to perform the background task. By
+// periodically calling ContinueWork(), it can check for cancellation.
+// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work
+// tasks in the context of the main thread.
+///////////////////////////////////////////////////////////////////////////////
+
+class SignalThread
+ : public sigslot::has_slots<>,
+ protected MessageHandler {
+ public:
+ SignalThread();
+
+ // Context: Main Thread. Call before Start to change the worker's name.
+ bool SetName(const std::string& name, const void* obj);
+
+ // Context: Main Thread. Call before Start to change the worker's priority.
+ bool SetPriority(ThreadPriority priority);
+
+ // Context: Main Thread. Call to begin the worker thread.
+ void Start();
+
+ // Context: Main Thread. If the worker thread is not running, deletes the
+ // object immediately. Otherwise, asks the worker thread to abort processing,
+ // and schedules the object to be deleted once the worker exits.
+ // SignalWorkDone will not be signalled. If wait is true, does not return
+ // until the thread is deleted.
+ void Destroy(bool wait);
+
+ // Context: Main Thread. If the worker thread is complete, deletes the
+ // object immediately. Otherwise, schedules the object to be deleted once
+ // the worker thread completes. SignalWorkDone will be signalled.
+ void Release();
+
+ // Context: Main Thread. Signalled when work is complete.
+ sigslot::signal1<SignalThread *> SignalWorkDone;
+
+ enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE };
+
+ protected:
+ virtual ~SignalThread();
+
+ Thread* worker() { return &worker_; }
+
+ // Context: Main Thread. Subclass should override to do pre-work setup.
+ virtual void OnWorkStart() { }
+
+ // Context: Worker Thread. Subclass should override to do work.
+ virtual void DoWork() = 0;
+
+ // Context: Worker Thread. Subclass should call periodically to
+ // dispatch messages and determine if the thread should terminate.
+ bool ContinueWork();
+
+ // Context: Worker Thread. Subclass should override when extra work is
+ // needed to abort the worker thread.
+ virtual void OnWorkStop() { }
+
+ // Context: Main Thread. Subclass should override to do post-work cleanup.
+ virtual void OnWorkDone() { }
+
+ // Context: Any Thread. If subclass overrides, be sure to call the base
+ // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE)
+ virtual void OnMessage(Message *msg);
+
+ private:
+ enum State {
+ kInit, // Initialized, but not started
+ kRunning, // Started and doing work
+ kReleasing, // Same as running, but to be deleted when work is done
+ kComplete, // Work is done
+ kStopping, // Work is being interrupted
+ };
+
+ class Worker : public Thread {
+ public:
+ explicit Worker(SignalThread* parent) : parent_(parent) {}
+ virtual ~Worker() { Stop(); }
+ virtual void Run() { parent_->Run(); }
+
+ private:
+ SignalThread* parent_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(Worker);
+ };
+
+ class EnterExit {
+ public:
+ explicit EnterExit(SignalThread* t) : t_(t) {
+ t_->cs_.Enter();
+ // If refcount_ is zero then the object has already been deleted and we
+ // will be double-deleting it in ~EnterExit()! (shouldn't happen)
+ ASSERT(t_->refcount_ != 0);
+ ++t_->refcount_;
+ }
+ ~EnterExit() {
+ bool d = (0 == --t_->refcount_);
+ t_->cs_.Leave();
+ if (d)
+ delete t_;
+ }
+
+ private:
+ SignalThread* t_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit);
+ };
+
+ void Run();
+ void OnMainThreadDestroyed();
+
+ Thread* main_;
+ Worker worker_;
+ CriticalSection cs_;
+ State state_;
+ int refcount_;
+
+ DISALLOW_COPY_AND_ASSIGN(SignalThread);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SIGNALTHREAD_H_
diff --git a/chromium/third_party/webrtc/base/signalthread_unittest.cc b/chromium/third_party/webrtc/base/signalthread_unittest.cc
new file mode 100644
index 00000000000..e0ea54eb332
--- /dev/null
+++ b/chromium/third_party/webrtc/base/signalthread_unittest.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/signalthread.h"
+#include "webrtc/base/thread.h"
+
+using namespace rtc;
+
+class SignalThreadTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ class SlowSignalThread : public SignalThread {
+ public:
+ SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {
+ }
+
+ virtual ~SlowSignalThread() {
+ EXPECT_EQ(harness_->main_thread_, Thread::Current());
+ ++harness_->thread_deleted_;
+ }
+
+ const SignalThreadTest* harness() { return harness_; }
+
+ protected:
+ virtual void OnWorkStart() {
+ ASSERT_TRUE(harness_ != NULL);
+ ++harness_->thread_started_;
+ EXPECT_EQ(harness_->main_thread_, Thread::Current());
+ EXPECT_FALSE(worker()->RunningForTest()); // not started yet
+ }
+
+ virtual void OnWorkStop() {
+ ++harness_->thread_stopped_;
+ EXPECT_EQ(harness_->main_thread_, Thread::Current());
+ EXPECT_TRUE(worker()->RunningForTest()); // not stopped yet
+ }
+
+ virtual void OnWorkDone() {
+ ++harness_->thread_done_;
+ EXPECT_EQ(harness_->main_thread_, Thread::Current());
+ EXPECT_TRUE(worker()->RunningForTest()); // not stopped yet
+ }
+
+ virtual void DoWork() {
+ EXPECT_NE(harness_->main_thread_, Thread::Current());
+ EXPECT_EQ(worker(), Thread::Current());
+ Thread::Current()->socketserver()->Wait(250, false);
+ }
+
+ private:
+ SignalThreadTest* harness_;
+ DISALLOW_EVIL_CONSTRUCTORS(SlowSignalThread);
+ };
+
+ void OnWorkComplete(rtc::SignalThread* thread) {
+ SlowSignalThread* t = static_cast<SlowSignalThread*>(thread);
+ EXPECT_EQ(t->harness(), this);
+ EXPECT_EQ(main_thread_, Thread::Current());
+
+ ++thread_completed_;
+ if (!called_release_) {
+ thread->Release();
+ }
+ }
+
+ virtual void SetUp() {
+ main_thread_ = Thread::Current();
+ thread_ = new SlowSignalThread(this);
+ thread_->SignalWorkDone.connect(this, &SignalThreadTest::OnWorkComplete);
+ called_release_ = false;
+ thread_started_ = 0;
+ thread_done_ = 0;
+ thread_completed_ = 0;
+ thread_stopped_ = 0;
+ thread_deleted_ = 0;
+ }
+
+ virtual void TearDown() {
+ }
+
+ Thread* main_thread_;
+ SlowSignalThread* thread_;
+ bool called_release_;
+
+ int thread_started_;
+ int thread_done_;
+ int thread_completed_;
+ int thread_stopped_;
+ int thread_deleted_;
+};
+
+class OwnerThread : public Thread, public sigslot::has_slots<> {
+ public:
+ explicit OwnerThread(SignalThreadTest* harness)
+ : harness_(harness),
+ has_run_(false) {
+ }
+
+ virtual ~OwnerThread() {
+ Stop();
+ }
+
+ virtual void Run() {
+ SignalThreadTest::SlowSignalThread* signal_thread =
+ new SignalThreadTest::SlowSignalThread(harness_);
+ signal_thread->SignalWorkDone.connect(this, &OwnerThread::OnWorkDone);
+ signal_thread->Start();
+ Thread::Current()->socketserver()->Wait(100, false);
+ signal_thread->Release();
+ // Delete |signal_thread|.
+ signal_thread->Destroy(true);
+ has_run_ = true;
+ }
+
+ bool has_run() { return has_run_; }
+ void OnWorkDone(SignalThread* signal_thread) {
+ FAIL() << " This shouldn't get called.";
+ }
+
+ private:
+ SignalThreadTest* harness_;
+ bool has_run_;
+ DISALLOW_EVIL_CONSTRUCTORS(OwnerThread);
+};
+
+// Test for when the main thread goes away while the
+// signal thread is still working. This may happen
+// when shutting down the process.
+TEST_F(SignalThreadTest, OwnerThreadGoesAway) {
+ {
+ scoped_ptr<OwnerThread> owner(new OwnerThread(this));
+ main_thread_ = owner.get();
+ owner->Start();
+ while (!owner->has_run()) {
+ Thread::Current()->socketserver()->Wait(10, false);
+ }
+ }
+ // At this point the main thread has gone away.
+ // Give the SignalThread a little time to do its callback,
+ // which will crash if the signal thread doesn't handle
+ // this situation well.
+ Thread::Current()->socketserver()->Wait(500, false);
+}
+
+#define EXPECT_STATE(started, done, completed, stopped, deleted) \
+ EXPECT_EQ(started, thread_started_); \
+ EXPECT_EQ(done, thread_done_); \
+ EXPECT_EQ(completed, thread_completed_); \
+ EXPECT_EQ(stopped, thread_stopped_); \
+ EXPECT_EQ(deleted, thread_deleted_);
+
+TEST_F(SignalThreadTest, ThreadFinishes) {
+ thread_->Start();
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ Thread::SleepMs(500);
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_STATE(1, 1, 1, 0, 1);
+}
+
+TEST_F(SignalThreadTest, ReleasedThreadFinishes) {
+ thread_->Start();
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ thread_->Release();
+ called_release_ = true;
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ Thread::SleepMs(500);
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_STATE(1, 1, 1, 0, 1);
+}
+
+TEST_F(SignalThreadTest, DestroyedThreadCleansUp) {
+ thread_->Start();
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ thread_->Destroy(true);
+ EXPECT_STATE(1, 0, 0, 1, 1);
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_STATE(1, 0, 0, 1, 1);
+}
+
+TEST_F(SignalThreadTest, DeferredDestroyedThreadCleansUp) {
+ thread_->Start();
+ EXPECT_STATE(1, 0, 0, 0, 0);
+ thread_->Destroy(false);
+ EXPECT_STATE(1, 0, 0, 1, 0);
+ Thread::SleepMs(500);
+ EXPECT_STATE(1, 0, 0, 1, 0);
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_STATE(1, 1, 0, 1, 1);
+}
diff --git a/chromium/third_party/webrtc/base/sigslot.h b/chromium/third_party/webrtc/base/sigslot.h
new file mode 100644
index 00000000000..990d2efb76c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslot.h
@@ -0,0 +1,2850 @@
+// sigslot.h: Signal/Slot classes
+//
+// Written by Sarah Thompson (sarah@telergy.com) 2002.
+//
+// License: Public domain. You are free to use this code however you like, with the proviso that
+// the author takes on no responsibility or liability for any use.
+//
+// QUICK DOCUMENTATION
+//
+// (see also the full documentation at http://sigslot.sourceforge.net/)
+//
+// #define switches
+// SIGSLOT_PURE_ISO - Define this to force ISO C++ compliance. This also disables
+// all of the thread safety support on platforms where it is
+// available.
+//
+// SIGSLOT_USE_POSIX_THREADS - Force use of Posix threads when using a C++ compiler other than
+// gcc on a platform that supports Posix threads. (When using gcc,
+// this is the default - use SIGSLOT_PURE_ISO to disable this if
+// necessary)
+//
+// SIGSLOT_DEFAULT_MT_POLICY - Where thread support is enabled, this defaults to multi_threaded_global.
+// Otherwise, the default is single_threaded. #define this yourself to
+// override the default. In pure ISO mode, anything other than
+// single_threaded will cause a compiler error.
+//
+// PLATFORM NOTES
+//
+// Win32 - On Win32, the WEBRTC_WIN symbol must be #defined. Most mainstream
+// compilers do this by default, but you may need to define it
+// yourself if your build environment is less standard. This causes
+// the Win32 thread support to be compiled in and used automatically.
+//
+// Unix/Linux/BSD, etc. - If you're using gcc, it is assumed that you have Posix threads
+// available, so they are used automatically. You can override this
+// (as under Windows) with the SIGSLOT_PURE_ISO switch. If you're using
+// something other than gcc but still want to use Posix threads, you
+// need to #define SIGSLOT_USE_POSIX_THREADS.
+//
+// ISO C++ - If none of the supported platforms are detected, or if
+// SIGSLOT_PURE_ISO is defined, all multithreading support is turned off,
+// along with any code that might cause a pure ISO C++ environment to
+// complain. Before you ask, gcc -ansi -pedantic won't compile this
+// library, but gcc -ansi is fine. Pedantic mode seems to throw a lot of
+// errors that aren't really there. If you feel like investigating this,
+// please contact the author.
+//
+//
+// THREADING MODES
+//
+// single_threaded - Your program is assumed to be single threaded from the point of view
+// of signal/slot usage (i.e. all objects using signals and slots are
+// created and destroyed from a single thread). Behaviour if objects are
+// destroyed concurrently is undefined (i.e. you'll get the occasional
+// segmentation fault/memory exception).
+//
+// multi_threaded_global - Your program is assumed to be multi threaded. Objects using signals and
+// slots can be safely created and destroyed from any thread, even when
+// connections exist. In multi_threaded_global mode, this is achieved by a
+// single global mutex (actually a critical section on Windows because they
+// are faster). This option uses less OS resources, but results in more
+// opportunities for contention, possibly resulting in more context switches
+// than are strictly necessary.
+//
+// multi_threaded_local - Behaviour in this mode is essentially the same as multi_threaded_global,
+// except that each signal, and each object that inherits has_slots, all
+// have their own mutex/critical section. In practice, this means that
+// mutex collisions (and hence context switches) only happen if they are
+// absolutely essential. However, on some platforms, creating a lot of
+// mutexes can slow down the whole OS, so use this option with care.
+//
+// USING THE LIBRARY
+//
+// See the full documentation at http://sigslot.sourceforge.net/
+//
+//
+// Libjingle specific:
+// This file has been modified such that has_slots and signalx do not have to be
+// using the same threading requirements. E.g. it is possible to connect a
+// has_slots<single_threaded> and signal0<multi_threaded_local> or
+// has_slots<multi_threaded_local> and signal0<single_threaded>.
+// If has_slots is single threaded the user must ensure that it is not trying
+// to connect or disconnect to signalx concurrently or data race may occur.
+// If signalx is single threaded the user must ensure that disconnect, connect
+// or signal is not happening concurrently or data race may occur.
+
+#ifndef WEBRTC_BASE_SIGSLOT_H__
+#define WEBRTC_BASE_SIGSLOT_H__
+
+#include <list>
+#include <set>
+#include <stdlib.h>
+
+// On our copy of sigslot.h, we set single threading as default.
+#define SIGSLOT_DEFAULT_MT_POLICY single_threaded
+
+#if defined(SIGSLOT_PURE_ISO) || (!defined(WEBRTC_WIN) && !defined(__GNUG__) && !defined(SIGSLOT_USE_POSIX_THREADS))
+# define _SIGSLOT_SINGLE_THREADED
+#elif defined(WEBRTC_WIN)
+# define _SIGSLOT_HAS_WIN32_THREADS
+# if !defined(WIN32_LEAN_AND_MEAN)
+# define WIN32_LEAN_AND_MEAN
+# endif
+# include "webrtc/base/win32.h"
+#elif defined(__GNUG__) || defined(SIGSLOT_USE_POSIX_THREADS)
+# define _SIGSLOT_HAS_POSIX_THREADS
+# include <pthread.h>
+#else
+# define _SIGSLOT_SINGLE_THREADED
+#endif
+
+#ifndef SIGSLOT_DEFAULT_MT_POLICY
+# ifdef _SIGSLOT_SINGLE_THREADED
+# define SIGSLOT_DEFAULT_MT_POLICY single_threaded
+# else
+# define SIGSLOT_DEFAULT_MT_POLICY multi_threaded_local
+# endif
+#endif
+
+// TODO: change this namespace to rtc?
+namespace sigslot {
+
+ class single_threaded
+ {
+ public:
+ single_threaded()
+ {
+ ;
+ }
+
+ virtual ~single_threaded()
+ {
+ ;
+ }
+
+ virtual void lock()
+ {
+ ;
+ }
+
+ virtual void unlock()
+ {
+ ;
+ }
+ };
+
+#ifdef _SIGSLOT_HAS_WIN32_THREADS
+ // The multi threading policies only get compiled in if they are enabled.
+ class multi_threaded_global
+ {
+ public:
+ multi_threaded_global()
+ {
+ static bool isinitialised = false;
+
+ if(!isinitialised)
+ {
+ InitializeCriticalSection(get_critsec());
+ isinitialised = true;
+ }
+ }
+
+ multi_threaded_global(const multi_threaded_global&)
+ {
+ ;
+ }
+
+ virtual ~multi_threaded_global()
+ {
+ ;
+ }
+
+ virtual void lock()
+ {
+ EnterCriticalSection(get_critsec());
+ }
+
+ virtual void unlock()
+ {
+ LeaveCriticalSection(get_critsec());
+ }
+
+ private:
+ CRITICAL_SECTION* get_critsec()
+ {
+ static CRITICAL_SECTION g_critsec;
+ return &g_critsec;
+ }
+ };
+
+ class multi_threaded_local
+ {
+ public:
+ multi_threaded_local()
+ {
+ InitializeCriticalSection(&m_critsec);
+ }
+
+ multi_threaded_local(const multi_threaded_local&)
+ {
+ InitializeCriticalSection(&m_critsec);
+ }
+
+ virtual ~multi_threaded_local()
+ {
+ DeleteCriticalSection(&m_critsec);
+ }
+
+ virtual void lock()
+ {
+ EnterCriticalSection(&m_critsec);
+ }
+
+ virtual void unlock()
+ {
+ LeaveCriticalSection(&m_critsec);
+ }
+
+ private:
+ CRITICAL_SECTION m_critsec;
+ };
+#endif // _SIGSLOT_HAS_WIN32_THREADS
+
+#ifdef _SIGSLOT_HAS_POSIX_THREADS
+ // The multi threading policies only get compiled in if they are enabled.
+ class multi_threaded_global
+ {
+ public:
+ multi_threaded_global()
+ {
+ pthread_mutex_init(get_mutex(), NULL);
+ }
+
+ multi_threaded_global(const multi_threaded_global&)
+ {
+ ;
+ }
+
+ virtual ~multi_threaded_global()
+ {
+ ;
+ }
+
+ virtual void lock()
+ {
+ pthread_mutex_lock(get_mutex());
+ }
+
+ virtual void unlock()
+ {
+ pthread_mutex_unlock(get_mutex());
+ }
+
+ private:
+ pthread_mutex_t* get_mutex()
+ {
+ static pthread_mutex_t g_mutex;
+ return &g_mutex;
+ }
+ };
+
+ class multi_threaded_local
+ {
+ public:
+ multi_threaded_local()
+ {
+ pthread_mutex_init(&m_mutex, NULL);
+ }
+
+ multi_threaded_local(const multi_threaded_local&)
+ {
+ pthread_mutex_init(&m_mutex, NULL);
+ }
+
+ virtual ~multi_threaded_local()
+ {
+ pthread_mutex_destroy(&m_mutex);
+ }
+
+ virtual void lock()
+ {
+ pthread_mutex_lock(&m_mutex);
+ }
+
+ virtual void unlock()
+ {
+ pthread_mutex_unlock(&m_mutex);
+ }
+
+ private:
+ pthread_mutex_t m_mutex;
+ };
+#endif // _SIGSLOT_HAS_POSIX_THREADS
+
+ template<class mt_policy>
+ class lock_block
+ {
+ public:
+ mt_policy *m_mutex;
+
+ lock_block(mt_policy *mtx)
+ : m_mutex(mtx)
+ {
+ m_mutex->lock();
+ }
+
+ ~lock_block()
+ {
+ m_mutex->unlock();
+ }
+ };
+
+ class has_slots_interface;
+
+ template<class mt_policy>
+ class _connection_base0
+ {
+ public:
+ virtual ~_connection_base0() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit() = 0;
+ virtual _connection_base0* clone() = 0;
+ virtual _connection_base0* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class mt_policy>
+ class _connection_base1
+ {
+ public:
+ virtual ~_connection_base1() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type) = 0;
+ virtual _connection_base1<arg1_type, mt_policy>* clone() = 0;
+ virtual _connection_base1<arg1_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class mt_policy>
+ class _connection_base2
+ {
+ public:
+ virtual ~_connection_base2() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type) = 0;
+ virtual _connection_base2<arg1_type, arg2_type, mt_policy>* clone() = 0;
+ virtual _connection_base2<arg1_type, arg2_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class mt_policy>
+ class _connection_base3
+ {
+ public:
+ virtual ~_connection_base3() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type) = 0;
+ virtual _connection_base3<arg1_type, arg2_type, arg3_type, mt_policy>* clone() = 0;
+ virtual _connection_base3<arg1_type, arg2_type, arg3_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type, class mt_policy>
+ class _connection_base4
+ {
+ public:
+ virtual ~_connection_base4() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type, arg4_type) = 0;
+ virtual _connection_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>* clone() = 0;
+ virtual _connection_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class mt_policy>
+ class _connection_base5
+ {
+ public:
+ virtual ~_connection_base5() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type) = 0;
+ virtual _connection_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>* clone() = 0;
+ virtual _connection_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class mt_policy>
+ class _connection_base6
+ {
+ public:
+ virtual ~_connection_base6() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type, arg4_type, arg5_type,
+ arg6_type) = 0;
+ virtual _connection_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>* clone() = 0;
+ virtual _connection_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class mt_policy>
+ class _connection_base7
+ {
+ public:
+ virtual ~_connection_base7() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type, arg4_type, arg5_type,
+ arg6_type, arg7_type) = 0;
+ virtual _connection_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>* clone() = 0;
+ virtual _connection_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class arg8_type, class mt_policy>
+ class _connection_base8
+ {
+ public:
+ virtual ~_connection_base8() {}
+ virtual has_slots_interface* getdest() const = 0;
+ virtual void emit(arg1_type, arg2_type, arg3_type, arg4_type, arg5_type,
+ arg6_type, arg7_type, arg8_type) = 0;
+ virtual _connection_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>* clone() = 0;
+ virtual _connection_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>* duplicate(has_slots_interface* pnewdest) = 0;
+ };
+
+ class _signal_base_interface
+ {
+ public:
+ virtual void slot_disconnect(has_slots_interface* pslot) = 0;
+ virtual void slot_duplicate(const has_slots_interface* poldslot, has_slots_interface* pnewslot) = 0;
+ };
+
+ template<class mt_policy>
+ class _signal_base : public _signal_base_interface, public mt_policy
+ {
+ };
+
+ class has_slots_interface
+ {
+ public:
+ has_slots_interface()
+ {
+ ;
+ }
+
+ virtual void signal_connect(_signal_base_interface* sender) = 0;
+
+ virtual void signal_disconnect(_signal_base_interface* sender) = 0;
+
+ virtual ~has_slots_interface()
+ {
+ }
+
+ virtual void disconnect_all() = 0;
+ };
+
+ template<class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class has_slots : public has_slots_interface, public mt_policy
+ {
+ private:
+ typedef std::set<_signal_base_interface*> sender_set;
+ typedef sender_set::const_iterator const_iterator;
+
+ public:
+ has_slots()
+ {
+ ;
+ }
+
+ has_slots(const has_slots& hs)
+ {
+ lock_block<mt_policy> lock(this);
+ const_iterator it = hs.m_senders.begin();
+ const_iterator itEnd = hs.m_senders.end();
+
+ while(it != itEnd)
+ {
+ (*it)->slot_duplicate(&hs, this);
+ m_senders.insert(*it);
+ ++it;
+ }
+ }
+
+ void signal_connect(_signal_base_interface* sender)
+ {
+ lock_block<mt_policy> lock(this);
+ m_senders.insert(sender);
+ }
+
+ void signal_disconnect(_signal_base_interface* sender)
+ {
+ lock_block<mt_policy> lock(this);
+ m_senders.erase(sender);
+ }
+
+ virtual ~has_slots()
+ {
+ disconnect_all();
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ const_iterator it = m_senders.begin();
+ const_iterator itEnd = m_senders.end();
+
+ while(it != itEnd)
+ {
+ (*it)->slot_disconnect(this);
+ ++it;
+ }
+
+ m_senders.erase(m_senders.begin(), m_senders.end());
+ }
+
+ private:
+ sender_set m_senders;
+ };
+
+ template<class mt_policy>
+ class _signal_base0 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base0<mt_policy> *> connections_list;
+
+ _signal_base0()
+ {
+ ;
+ }
+
+ _signal_base0(const _signal_base0& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ ~_signal_base0()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class mt_policy>
+ class _signal_base1 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base1<arg1_type, mt_policy> *> connections_list;
+
+ _signal_base1()
+ {
+ ;
+ }
+
+ _signal_base1(const _signal_base1<arg1_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base1()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class mt_policy>
+ class _signal_base2 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base2<arg1_type, arg2_type, mt_policy> *>
+ connections_list;
+
+ _signal_base2()
+ {
+ ;
+ }
+
+ _signal_base2(const _signal_base2<arg1_type, arg2_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base2()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class mt_policy>
+ class _signal_base3 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base3<arg1_type, arg2_type, arg3_type, mt_policy> *>
+ connections_list;
+
+ _signal_base3()
+ {
+ ;
+ }
+
+ _signal_base3(const _signal_base3<arg1_type, arg2_type, arg3_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base3()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type, class mt_policy>
+ class _signal_base4 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base4<arg1_type, arg2_type, arg3_type,
+ arg4_type, mt_policy> *> connections_list;
+
+ _signal_base4()
+ {
+ ;
+ }
+
+ _signal_base4(const _signal_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base4()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class mt_policy>
+ class _signal_base5 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base5<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, mt_policy> *> connections_list;
+
+ _signal_base5()
+ {
+ ;
+ }
+
+ _signal_base5(const _signal_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base5()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class mt_policy>
+ class _signal_base6 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base6<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, mt_policy> *> connections_list;
+
+ _signal_base6()
+ {
+ ;
+ }
+
+ _signal_base6(const _signal_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base6()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class mt_policy>
+ class _signal_base7 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base7<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type, mt_policy> *> connections_list;
+
+ _signal_base7()
+ {
+ ;
+ }
+
+ _signal_base7(const _signal_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base7()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class arg8_type, class mt_policy>
+ class _signal_base8 : public _signal_base<mt_policy>
+ {
+ public:
+ typedef std::list<_connection_base8<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type, arg8_type, mt_policy> *>
+ connections_list;
+
+ _signal_base8()
+ {
+ ;
+ }
+
+ _signal_base8(const _signal_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>& s)
+ : _signal_base<mt_policy>(s)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = s.m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = s.m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_connect(this);
+ m_connected_slots.push_back((*it)->clone());
+
+ ++it;
+ }
+ }
+
+ void slot_duplicate(const has_slots_interface* oldtarget, has_slots_interface* newtarget)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == oldtarget)
+ {
+ m_connected_slots.push_back((*it)->duplicate(newtarget));
+ }
+
+ ++it;
+ }
+ }
+
+ ~_signal_base8()
+ {
+ disconnect_all();
+ }
+
+ bool is_empty()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ return it == itEnd;
+ }
+
+ void disconnect_all()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ (*it)->getdest()->signal_disconnect(this);
+ delete *it;
+
+ ++it;
+ }
+
+ m_connected_slots.erase(m_connected_slots.begin(), m_connected_slots.end());
+ }
+
+#ifdef _DEBUG
+ bool connected(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+ if ((*it)->getdest() == pclass)
+ return true;
+ it = itNext;
+ }
+ return false;
+ }
+#endif
+
+ void disconnect(has_slots_interface* pclass)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ if((*it)->getdest() == pclass)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ pclass->signal_disconnect(this);
+ return;
+ }
+
+ ++it;
+ }
+ }
+
+ void slot_disconnect(has_slots_interface* pslot)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::iterator it = m_connected_slots.begin();
+ typename connections_list::iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ typename connections_list::iterator itNext = it;
+ ++itNext;
+
+ if((*it)->getdest() == pslot)
+ {
+ delete *it;
+ m_connected_slots.erase(it);
+ }
+
+ it = itNext;
+ }
+ }
+
+ protected:
+ connections_list m_connected_slots;
+ };
+
+
+ template<class dest_type, class mt_policy>
+ class _connection0 : public _connection_base0<mt_policy>
+ {
+ public:
+ _connection0()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection0(dest_type* pobject, void (dest_type::*pmemfun)())
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection0()
+ {
+ }
+
+ virtual _connection_base0<mt_policy>* clone()
+ {
+ return new _connection0<dest_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base0<mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection0<dest_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit()
+ {
+ (m_pobject->*m_pmemfun)();
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)();
+ };
+
+ template<class dest_type, class arg1_type, class mt_policy>
+ class _connection1 : public _connection_base1<arg1_type, mt_policy>
+ {
+ public:
+ _connection1()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection1(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection1()
+ {
+ }
+
+ virtual _connection_base1<arg1_type, mt_policy>* clone()
+ {
+ return new _connection1<dest_type, arg1_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base1<arg1_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection1<dest_type, arg1_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1)
+ {
+ (m_pobject->*m_pmemfun)(a1);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class mt_policy>
+ class _connection2 : public _connection_base2<arg1_type, arg2_type, mt_policy>
+ {
+ public:
+ _connection2()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection2(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection2()
+ {
+ }
+
+ virtual _connection_base2<arg1_type, arg2_type, mt_policy>* clone()
+ {
+ return new _connection2<dest_type, arg1_type, arg2_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base2<arg1_type, arg2_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection2<dest_type, arg1_type, arg2_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type, class mt_policy>
+ class _connection3 : public _connection_base3<arg1_type, arg2_type, arg3_type, mt_policy>
+ {
+ public:
+ _connection3()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection3(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection3()
+ {
+ }
+
+ virtual _connection_base3<arg1_type, arg2_type, arg3_type, mt_policy>* clone()
+ {
+ return new _connection3<dest_type, arg1_type, arg2_type, arg3_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base3<arg1_type, arg2_type, arg3_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection3<dest_type, arg1_type, arg2_type, arg3_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type,
+ class arg4_type, class mt_policy>
+ class _connection4 : public _connection_base4<arg1_type, arg2_type,
+ arg3_type, arg4_type, mt_policy>
+ {
+ public:
+ _connection4()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection4(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection4()
+ {
+ }
+
+ virtual _connection_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>* clone()
+ {
+ return new _connection4<dest_type, arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection4<dest_type, arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3,
+ arg4_type a4)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3, a4);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type,
+ arg4_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type,
+ class arg4_type, class arg5_type, class mt_policy>
+ class _connection5 : public _connection_base5<arg1_type, arg2_type,
+ arg3_type, arg4_type, arg5_type, mt_policy>
+ {
+ public:
+ _connection5()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection5(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection5()
+ {
+ }
+
+ virtual _connection_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>* clone()
+ {
+ return new _connection5<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection5<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3, a4, a5);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type,
+ class arg4_type, class arg5_type, class arg6_type, class mt_policy>
+ class _connection6 : public _connection_base6<arg1_type, arg2_type,
+ arg3_type, arg4_type, arg5_type, arg6_type, mt_policy>
+ {
+ public:
+ _connection6()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection6(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection6()
+ {
+ }
+
+ virtual _connection_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>* clone()
+ {
+ return new _connection6<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection6<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3, a4, a5, a6);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type,
+ class arg4_type, class arg5_type, class arg6_type, class arg7_type, class mt_policy>
+ class _connection7 : public _connection_base7<arg1_type, arg2_type,
+ arg3_type, arg4_type, arg5_type, arg6_type, arg7_type, mt_policy>
+ {
+ public:
+ _connection7()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection7(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type, arg7_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection7()
+ {
+ }
+
+ virtual _connection_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>* clone()
+ {
+ return new _connection7<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection7<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3, a4, a5, a6, a7);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type);
+ };
+
+ template<class dest_type, class arg1_type, class arg2_type, class arg3_type,
+ class arg4_type, class arg5_type, class arg6_type, class arg7_type,
+ class arg8_type, class mt_policy>
+ class _connection8 : public _connection_base8<arg1_type, arg2_type,
+ arg3_type, arg4_type, arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>
+ {
+ public:
+ _connection8()
+ {
+ m_pobject = NULL;
+ m_pmemfun = NULL;
+ }
+
+ _connection8(dest_type* pobject, void (dest_type::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type,
+ arg7_type, arg8_type))
+ {
+ m_pobject = pobject;
+ m_pmemfun = pmemfun;
+ }
+
+ virtual ~_connection8()
+ {
+ }
+
+ virtual _connection_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>* clone()
+ {
+ return new _connection8<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>(*this);
+ }
+
+ virtual _connection_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>* duplicate(has_slots_interface* pnewdest)
+ {
+ return new _connection8<dest_type, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>((dest_type *)pnewdest, m_pmemfun);
+ }
+
+ virtual void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7, arg8_type a8)
+ {
+ (m_pobject->*m_pmemfun)(a1, a2, a3, a4, a5, a6, a7, a8);
+ }
+
+ virtual has_slots_interface* getdest() const
+ {
+ return m_pobject;
+ }
+
+ private:
+ dest_type* m_pobject;
+ void (dest_type::* m_pmemfun)(arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type);
+ };
+
+ template<class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal0 : public _signal_base0<mt_policy>
+ {
+ public:
+ typedef _signal_base0<mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal0()
+ {
+ ;
+ }
+
+ signal0(const signal0<mt_policy>& s)
+ : _signal_base0<mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)())
+ {
+ lock_block<mt_policy> lock(this);
+ _connection0<desttype, mt_policy>* conn =
+ new _connection0<desttype, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit();
+
+ it = itNext;
+ }
+ }
+
+ void operator()()
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit();
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal1 : public _signal_base1<arg1_type, mt_policy>
+ {
+ public:
+ typedef _signal_base1<arg1_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal1()
+ {
+ ;
+ }
+
+ signal1(const signal1<arg1_type, mt_policy>& s)
+ : _signal_base1<arg1_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection1<desttype, arg1_type, mt_policy>* conn =
+ new _connection1<desttype, arg1_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal2 : public _signal_base2<arg1_type, arg2_type, mt_policy>
+ {
+ public:
+ typedef _signal_base2<arg1_type, arg2_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal2()
+ {
+ ;
+ }
+
+ signal2(const signal2<arg1_type, arg2_type, mt_policy>& s)
+ : _signal_base2<arg1_type, arg2_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection2<desttype, arg1_type, arg2_type, mt_policy>* conn = new
+ _connection2<desttype, arg1_type, arg2_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal3 : public _signal_base3<arg1_type, arg2_type, arg3_type, mt_policy>
+ {
+ public:
+ typedef _signal_base3<arg1_type, arg2_type, arg3_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal3()
+ {
+ ;
+ }
+
+ signal3(const signal3<arg1_type, arg2_type, arg3_type, mt_policy>& s)
+ : _signal_base3<arg1_type, arg2_type, arg3_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection3<desttype, arg1_type, arg2_type, arg3_type, mt_policy>* conn =
+ new _connection3<desttype, arg1_type, arg2_type, arg3_type, mt_policy>(pclass,
+ pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal4 : public _signal_base4<arg1_type, arg2_type, arg3_type,
+ arg4_type, mt_policy>
+ {
+ public:
+ typedef _signal_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal4()
+ {
+ ;
+ }
+
+ signal4(const signal4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>& s)
+ : _signal_base4<arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection4<desttype, arg1_type, arg2_type, arg3_type, arg4_type, mt_policy>*
+ conn = new _connection4<desttype, arg1_type, arg2_type, arg3_type,
+ arg4_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal5 : public _signal_base5<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, mt_policy>
+ {
+ public:
+ typedef _signal_base5<arg1_type, arg2_type, arg3_type, arg4_type, arg5_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal5()
+ {
+ ;
+ }
+
+ signal5(const signal5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>& s)
+ : _signal_base5<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection5<desttype, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, mt_policy>* conn = new _connection5<desttype, arg1_type, arg2_type,
+ arg3_type, arg4_type, arg5_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5);
+
+ it = itNext;
+ }
+ }
+ };
+
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal6 : public _signal_base6<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, mt_policy>
+ {
+ public:
+ typedef _signal_base6<arg1_type, arg2_type, arg3_type, arg4_type, arg5_type, arg6_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal6()
+ {
+ ;
+ }
+
+ signal6(const signal6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>& s)
+ : _signal_base6<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection6<desttype, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, mt_policy>* conn =
+ new _connection6<desttype, arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal7 : public _signal_base7<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type, mt_policy>
+ {
+ public:
+ typedef _signal_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal7()
+ {
+ ;
+ }
+
+ signal7(const signal7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>& s)
+ : _signal_base7<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type,
+ arg7_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection7<desttype, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, mt_policy>* conn =
+ new _connection7<desttype, arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6, a7);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6, a7);
+
+ it = itNext;
+ }
+ }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type, class arg4_type,
+ class arg5_type, class arg6_type, class arg7_type, class arg8_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class signal8 : public _signal_base8<arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>
+ {
+ public:
+ typedef _signal_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy> base;
+ typedef typename base::connections_list connections_list;
+ using base::m_connected_slots;
+
+ signal8()
+ {
+ ;
+ }
+
+ signal8(const signal8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>& s)
+ : _signal_base8<arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>(s)
+ {
+ ;
+ }
+
+ template<class desttype>
+ void connect(desttype* pclass, void (desttype::*pmemfun)(arg1_type,
+ arg2_type, arg3_type, arg4_type, arg5_type, arg6_type,
+ arg7_type, arg8_type))
+ {
+ lock_block<mt_policy> lock(this);
+ _connection8<desttype, arg1_type, arg2_type, arg3_type, arg4_type,
+ arg5_type, arg6_type, arg7_type, arg8_type, mt_policy>* conn =
+ new _connection8<desttype, arg1_type, arg2_type, arg3_type,
+ arg4_type, arg5_type, arg6_type, arg7_type,
+ arg8_type, mt_policy>(pclass, pmemfun);
+ m_connected_slots.push_back(conn);
+ pclass->signal_connect(this);
+ }
+
+ void emit(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7, arg8_type a8)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6, a7, a8);
+
+ it = itNext;
+ }
+ }
+
+ void operator()(arg1_type a1, arg2_type a2, arg3_type a3, arg4_type a4,
+ arg5_type a5, arg6_type a6, arg7_type a7, arg8_type a8)
+ {
+ lock_block<mt_policy> lock(this);
+ typename connections_list::const_iterator itNext, it = m_connected_slots.begin();
+ typename connections_list::const_iterator itEnd = m_connected_slots.end();
+
+ while(it != itEnd)
+ {
+ itNext = it;
+ ++itNext;
+
+ (*it)->emit(a1, a2, a3, a4, a5, a6, a7, a8);
+
+ it = itNext;
+ }
+ }
+ };
+
+}; // namespace sigslot
+
+#endif // WEBRTC_BASE_SIGSLOT_H__
diff --git a/chromium/third_party/webrtc/base/sigslot_unittest.cc b/chromium/third_party/webrtc/base/sigslot_unittest.cc
new file mode 100644
index 00000000000..4d3041d1326
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslot_unittest.cc
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/sigslot.h"
+
+#include "webrtc/base/gunit.h"
+
+// This function, when passed a has_slots or signalx, will break the build if
+// its threading requirement is not single threaded
+static bool TemplateIsST(const sigslot::single_threaded* p) {
+ return true;
+}
+// This function, when passed a has_slots or signalx, will break the build if
+// its threading requirement is not multi threaded
+static bool TemplateIsMT(const sigslot::multi_threaded_local* p) {
+ return true;
+}
+
+class SigslotDefault : public testing::Test, public sigslot::has_slots<> {
+ protected:
+ sigslot::signal0<> signal_;
+};
+
+template<class slot_policy = sigslot::single_threaded,
+ class signal_policy = sigslot::single_threaded>
+class SigslotReceiver : public sigslot::has_slots<slot_policy> {
+ public:
+ SigslotReceiver() : signal_(NULL), signal_count_(0) {
+ }
+ ~SigslotReceiver() {
+ }
+
+ void Connect(sigslot::signal0<signal_policy>* signal) {
+ if (!signal) return;
+ Disconnect();
+ signal_ = signal;
+ signal->connect(this,
+ &SigslotReceiver<slot_policy, signal_policy>::OnSignal);
+ }
+ void Disconnect() {
+ if (!signal_) return;
+ signal_->disconnect(this);
+ signal_ = NULL;
+ }
+ void OnSignal() {
+ ++signal_count_;
+ }
+ int signal_count() { return signal_count_; }
+
+ private:
+ sigslot::signal0<signal_policy>* signal_;
+ int signal_count_;
+};
+
+template<class slot_policy = sigslot::single_threaded,
+ class mt_signal_policy = sigslot::multi_threaded_local>
+class SigslotSlotTest : public testing::Test {
+ protected:
+ SigslotSlotTest() {
+ mt_signal_policy mt_policy;
+ TemplateIsMT(&mt_policy);
+ }
+
+ virtual void SetUp() {
+ Connect();
+ }
+ virtual void TearDown() {
+ Disconnect();
+ }
+
+ void Disconnect() {
+ st_receiver_.Disconnect();
+ mt_receiver_.Disconnect();
+ }
+
+ void Connect() {
+ st_receiver_.Connect(&SignalSTLoopback);
+ mt_receiver_.Connect(&SignalMTLoopback);
+ }
+
+ int st_loop_back_count() { return st_receiver_.signal_count(); }
+ int mt_loop_back_count() { return mt_receiver_.signal_count(); }
+
+ sigslot::signal0<> SignalSTLoopback;
+ SigslotReceiver<slot_policy, sigslot::single_threaded> st_receiver_;
+ sigslot::signal0<mt_signal_policy> SignalMTLoopback;
+ SigslotReceiver<slot_policy, mt_signal_policy> mt_receiver_;
+};
+
+typedef SigslotSlotTest<> SigslotSTSlotTest;
+typedef SigslotSlotTest<sigslot::multi_threaded_local,
+ sigslot::multi_threaded_local> SigslotMTSlotTest;
+
+class multi_threaded_local_fake : public sigslot::multi_threaded_local {
+ public:
+ multi_threaded_local_fake() : lock_count_(0), unlock_count_(0) {
+ }
+
+ virtual void lock() {
+ ++lock_count_;
+ }
+ virtual void unlock() {
+ ++unlock_count_;
+ }
+
+ int lock_count() { return lock_count_; }
+
+ bool InCriticalSection() { return lock_count_ != unlock_count_; }
+
+ protected:
+ int lock_count_;
+ int unlock_count_;
+};
+
+typedef SigslotSlotTest<multi_threaded_local_fake,
+ multi_threaded_local_fake> SigslotMTLockBase;
+
+class SigslotMTLockTest : public SigslotMTLockBase {
+ protected:
+ SigslotMTLockTest() {}
+
+ virtual void SetUp() {
+ EXPECT_EQ(0, SlotLockCount());
+ SigslotMTLockBase::SetUp();
+ // Connects to two signals (ST and MT). However,
+ // SlotLockCount() only gets the count for the
+ // MT signal (there are two separate SigslotReceiver which
+ // keep track of their own count).
+ EXPECT_EQ(1, SlotLockCount());
+ }
+ virtual void TearDown() {
+ const int previous_lock_count = SlotLockCount();
+ SigslotMTLockBase::TearDown();
+ // Disconnects from two signals. Note analogous to SetUp().
+ EXPECT_EQ(previous_lock_count + 1, SlotLockCount());
+ }
+
+ int SlotLockCount() { return mt_receiver_.lock_count(); }
+ void Signal() { SignalMTLoopback(); }
+ int SignalLockCount() { return SignalMTLoopback.lock_count(); }
+ int signal_count() { return mt_loop_back_count(); }
+ bool InCriticalSection() { return SignalMTLoopback.InCriticalSection(); }
+};
+
+// This test will always succeed. However, if the default template instantiation
+// changes from single threaded to multi threaded it will break the build here.
+TEST_F(SigslotDefault, DefaultIsST) {
+ EXPECT_TRUE(TemplateIsST(this));
+ EXPECT_TRUE(TemplateIsST(&signal_));
+}
+
+// ST slot, ST signal
+TEST_F(SigslotSTSlotTest, STLoopbackTest) {
+ SignalSTLoopback();
+ EXPECT_EQ(1, st_loop_back_count());
+ EXPECT_EQ(0, mt_loop_back_count());
+}
+
+// ST slot, MT signal
+TEST_F(SigslotSTSlotTest, MTLoopbackTest) {
+ SignalMTLoopback();
+ EXPECT_EQ(1, mt_loop_back_count());
+ EXPECT_EQ(0, st_loop_back_count());
+}
+
+// ST slot, both ST and MT (separate) signal
+TEST_F(SigslotSTSlotTest, AllLoopbackTest) {
+ SignalSTLoopback();
+ SignalMTLoopback();
+ EXPECT_EQ(1, mt_loop_back_count());
+ EXPECT_EQ(1, st_loop_back_count());
+}
+
+TEST_F(SigslotSTSlotTest, Reconnect) {
+ SignalSTLoopback();
+ SignalMTLoopback();
+ EXPECT_EQ(1, mt_loop_back_count());
+ EXPECT_EQ(1, st_loop_back_count());
+ Disconnect();
+ SignalSTLoopback();
+ SignalMTLoopback();
+ EXPECT_EQ(1, mt_loop_back_count());
+ EXPECT_EQ(1, st_loop_back_count());
+ Connect();
+ SignalSTLoopback();
+ SignalMTLoopback();
+ EXPECT_EQ(2, mt_loop_back_count());
+ EXPECT_EQ(2, st_loop_back_count());
+}
+
+// MT slot, ST signal
+TEST_F(SigslotMTSlotTest, STLoopbackTest) {
+ SignalSTLoopback();
+ EXPECT_EQ(1, st_loop_back_count());
+ EXPECT_EQ(0, mt_loop_back_count());
+}
+
+// MT slot, MT signal
+TEST_F(SigslotMTSlotTest, MTLoopbackTest) {
+ SignalMTLoopback();
+ EXPECT_EQ(1, mt_loop_back_count());
+ EXPECT_EQ(0, st_loop_back_count());
+}
+
+// MT slot, both ST and MT (separate) signal
+TEST_F(SigslotMTSlotTest, AllLoopbackTest) {
+ SignalMTLoopback();
+ SignalSTLoopback();
+ EXPECT_EQ(1, st_loop_back_count());
+ EXPECT_EQ(1, mt_loop_back_count());
+}
+
+// Test that locks are acquired and released correctly.
+TEST_F(SigslotMTLockTest, LockSanity) {
+ const int lock_count = SignalLockCount();
+ Signal();
+ EXPECT_FALSE(InCriticalSection());
+ EXPECT_EQ(lock_count + 1, SignalLockCount());
+ EXPECT_EQ(1, signal_count());
+}
+
+// Destroy signal and slot in different orders.
+TEST(DestructionOrder, SignalFirst) {
+ sigslot::signal0<>* signal = new sigslot::signal0<>;
+ SigslotReceiver<>* receiver = new SigslotReceiver<>();
+ receiver->Connect(signal);
+ (*signal)();
+ EXPECT_EQ(1, receiver->signal_count());
+ delete signal;
+ delete receiver;
+}
+
+TEST(DestructionOrder, SlotFirst) {
+ sigslot::signal0<>* signal = new sigslot::signal0<>;
+ SigslotReceiver<>* receiver = new SigslotReceiver<>();
+ receiver->Connect(signal);
+ (*signal)();
+ EXPECT_EQ(1, receiver->signal_count());
+
+ delete receiver;
+ (*signal)();
+ delete signal;
+}
diff --git a/chromium/third_party/webrtc/base/sigslotrepeater.h b/chromium/third_party/webrtc/base/sigslotrepeater.h
new file mode 100644
index 00000000000..d1c891e026e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslotrepeater.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SIGSLOTREPEATER_H__
+#define WEBRTC_BASE_SIGSLOTREPEATER_H__
+
+// repeaters are both signals and slots, which are designed as intermediate
+// pass-throughs for signals and slots which don't know about each other (for
+// modularity or encapsulation). This eliminates the need to declare a signal
+// handler whose sole purpose is to fire another signal. The repeater connects
+// to the originating signal using the 'repeat' method. When the repeated
+// signal fires, the repeater will also fire.
+
+#include "webrtc/base/sigslot.h"
+
+namespace sigslot {
+
+ template<class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class repeater0 : public signal0<mt_policy>,
+ public has_slots<mt_policy>
+ {
+ public:
+ typedef signal0<mt_policy> base_type;
+ typedef repeater0<mt_policy> this_type;
+
+ repeater0() { }
+ repeater0(const this_type& s) : base_type(s) { }
+
+ void reemit() { signal0<mt_policy>::emit(); }
+ void repeat(base_type &s) { s.connect(this, &this_type::reemit); }
+ void stop(base_type &s) { s.disconnect(this); }
+ };
+
+ template<class arg1_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class repeater1 : public signal1<arg1_type, mt_policy>,
+ public has_slots<mt_policy>
+ {
+ public:
+ typedef signal1<arg1_type, mt_policy> base_type;
+ typedef repeater1<arg1_type, mt_policy> this_type;
+
+ repeater1() { }
+ repeater1(const this_type& s) : base_type(s) { }
+
+ void reemit(arg1_type a1) { signal1<arg1_type, mt_policy>::emit(a1); }
+ void repeat(base_type& s) { s.connect(this, &this_type::reemit); }
+ void stop(base_type &s) { s.disconnect(this); }
+ };
+
+ template<class arg1_type, class arg2_type, class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class repeater2 : public signal2<arg1_type, arg2_type, mt_policy>,
+ public has_slots<mt_policy>
+ {
+ public:
+ typedef signal2<arg1_type, arg2_type, mt_policy> base_type;
+ typedef repeater2<arg1_type, arg2_type, mt_policy> this_type;
+
+ repeater2() { }
+ repeater2(const this_type& s) : base_type(s) { }
+
+ void reemit(arg1_type a1, arg2_type a2) { signal2<arg1_type, arg2_type, mt_policy>::emit(a1,a2); }
+ void repeat(base_type& s) { s.connect(this, &this_type::reemit); }
+ void stop(base_type &s) { s.disconnect(this); }
+ };
+
+ template<class arg1_type, class arg2_type, class arg3_type,
+ class mt_policy = SIGSLOT_DEFAULT_MT_POLICY>
+ class repeater3 : public signal3<arg1_type, arg2_type, arg3_type, mt_policy>,
+ public has_slots<mt_policy>
+ {
+ public:
+ typedef signal3<arg1_type, arg2_type, arg3_type, mt_policy> base_type;
+ typedef repeater3<arg1_type, arg2_type, arg3_type, mt_policy> this_type;
+
+ repeater3() { }
+ repeater3(const this_type& s) : base_type(s) { }
+
+ void reemit(arg1_type a1, arg2_type a2, arg3_type a3) {
+ signal3<arg1_type, arg2_type, arg3_type, mt_policy>::emit(a1,a2,a3);
+ }
+ void repeat(base_type& s) { s.connect(this, &this_type::reemit); }
+ void stop(base_type &s) { s.disconnect(this); }
+ };
+
+} // namespace sigslot
+
+#endif // WEBRTC_BASE_SIGSLOTREPEATER_H__
diff --git a/chromium/third_party/webrtc/base/sigslottester.h b/chromium/third_party/webrtc/base/sigslottester.h
new file mode 100755
index 00000000000..ae781a97e30
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslottester.h
@@ -0,0 +1,199 @@
+// This file was GENERATED by command:
+// pump.py sigslottester.h.pump
+// DO NOT EDIT BY HAND!!!
+
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SIGSLOTTESTER_H_
+#define WEBRTC_BASE_SIGSLOTTESTER_H_
+
+// To generate sigslottester.h from sigslottester.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py sigslottester.h.pump
+
+
+// SigslotTester(s) are utility classes to check if signals owned by an
+// object are being invoked at the right time and with the right arguments.
+// They are meant to be used in tests. Tests must provide "capture" pointers
+// (i.e. address of variables) where the arguments from the signal callback
+// can be stored.
+//
+// Example:
+// /* Some signal */
+// sigslot::signal1<const std::string&> foo;
+//
+// /* We want to monitor foo in some test. Note how signal argument is
+// const std::string&, but capture-type is std::string. Capture type
+// must be type that can be assigned to. */
+// std::string capture;
+// SigslotTester1<const std::string&, std::string> slot(&foo, &capture);
+// foo.emit("hello");
+// EXPECT_EQ(1, slot.callback_count());
+// EXPECT_EQ("hello", capture);
+// /* See unit-tests for more examples */
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/sigslot.h"
+
+namespace rtc {
+
+// For all the templates below:
+// - A1-A5 is the type of the argument i in the callback. Signals may and often
+// do use const-references here for efficiency.
+// - C1-C5 is the type of the variable to capture argument i. These should be
+// non-const value types suitable for use as lvalues.
+
+template <class A1, class C1>
+class SigslotTester1 : public sigslot::has_slots<> {
+ public:
+ SigslotTester1(sigslot::signal1<A1>* signal,
+ C1* capture1)
+ : callback_count_(0),
+ capture1_(capture1) {
+ signal->connect(this, &SigslotTester1::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback(A1 arg1) {
+ callback_count_++;
+ *capture1_ = arg1;
+ }
+
+ int callback_count_;
+ C1* capture1_;
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester1);
+};
+
+template <class A1, class A2, class C1, class C2>
+class SigslotTester2 : public sigslot::has_slots<> {
+ public:
+ SigslotTester2(sigslot::signal2<A1, A2>* signal,
+ C1* capture1, C2* capture2)
+ : callback_count_(0),
+ capture1_(capture1), capture2_(capture2) {
+ signal->connect(this, &SigslotTester2::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback(A1 arg1, A2 arg2) {
+ callback_count_++;
+ *capture1_ = arg1;
+ *capture2_ = arg2;
+ }
+
+ int callback_count_;
+ C1* capture1_;
+ C2* capture2_;
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester2);
+};
+
+template <class A1, class A2, class A3, class C1, class C2, class C3>
+class SigslotTester3 : public sigslot::has_slots<> {
+ public:
+ SigslotTester3(sigslot::signal3<A1, A2, A3>* signal,
+ C1* capture1, C2* capture2, C3* capture3)
+ : callback_count_(0),
+ capture1_(capture1), capture2_(capture2), capture3_(capture3) {
+ signal->connect(this, &SigslotTester3::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3) {
+ callback_count_++;
+ *capture1_ = arg1;
+ *capture2_ = arg2;
+ *capture3_ = arg3;
+ }
+
+ int callback_count_;
+ C1* capture1_;
+ C2* capture2_;
+ C3* capture3_;
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester3);
+};
+
+template <class A1, class A2, class A3, class A4, class C1, class C2, class C3,
+ class C4>
+class SigslotTester4 : public sigslot::has_slots<> {
+ public:
+ SigslotTester4(sigslot::signal4<A1, A2, A3, A4>* signal,
+ C1* capture1, C2* capture2, C3* capture3, C4* capture4)
+ : callback_count_(0),
+ capture1_(capture1), capture2_(capture2), capture3_(capture3),
+ capture4_(capture4) {
+ signal->connect(this, &SigslotTester4::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3, A4 arg4) {
+ callback_count_++;
+ *capture1_ = arg1;
+ *capture2_ = arg2;
+ *capture3_ = arg3;
+ *capture4_ = arg4;
+ }
+
+ int callback_count_;
+ C1* capture1_;
+ C2* capture2_;
+ C3* capture3_;
+ C4* capture4_;
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester4);
+};
+
+template <class A1, class A2, class A3, class A4, class A5, class C1, class C2,
+ class C3, class C4, class C5>
+class SigslotTester5 : public sigslot::has_slots<> {
+ public:
+ SigslotTester5(sigslot::signal5<A1, A2, A3, A4, A5>* signal,
+ C1* capture1, C2* capture2, C3* capture3, C4* capture4,
+ C5* capture5)
+ : callback_count_(0),
+ capture1_(capture1), capture2_(capture2), capture3_(capture3),
+ capture4_(capture4), capture5_(capture5) {
+ signal->connect(this, &SigslotTester5::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3, A4 arg4, A5 arg5) {
+ callback_count_++;
+ *capture1_ = arg1;
+ *capture2_ = arg2;
+ *capture3_ = arg3;
+ *capture4_ = arg4;
+ *capture5_ = arg5;
+ }
+
+ int callback_count_;
+ C1* capture1_;
+ C2* capture2_;
+ C3* capture3_;
+ C4* capture4_;
+ C5* capture5_;
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester5);
+};
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SIGSLOTTESTER_H_
diff --git a/chromium/third_party/webrtc/base/sigslottester.h.pump b/chromium/third_party/webrtc/base/sigslottester.h.pump
new file mode 100755
index 00000000000..2fd9386a164
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslottester.h.pump
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SIGSLOTTESTER_H_
+#define WEBRTC_BASE_SIGSLOTTESTER_H_
+
+// To generate sigslottester.h from sigslottester.h.pump, execute:
+// /home/build/google3/third_party/gtest/scripts/pump.py sigslottester.h.pump
+
+
+// SigslotTester(s) are utility classes to check if signals owned by an
+// object are being invoked at the right time and with the right arguments.
+// They are meant to be used in tests. Tests must provide "capture" pointers
+// (i.e. address of variables) where the arguments from the signal callback
+// can be stored.
+//
+// Example:
+// /* Some signal */
+// sigslot::signal1<const std::string&> foo;
+//
+// /* We want to monitor foo in some test. Note how signal argument is
+// const std::string&, but capture-type is std::string. Capture type
+// must be type that can be assigned to. */
+// std::string capture;
+// SigslotTester1<const std::string&, std::string> slot(&foo, &capture);
+// foo.emit("hello");
+// EXPECT_EQ(1, slot.callback_count());
+// EXPECT_EQ("hello", capture);
+// /* See unit-tests for more examples */
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/sigslot.h"
+
+namespace rtc {
+
+// For all the templates below:
+// - A1-A5 is the type of the argument i in the callback. Signals may and often
+// do use const-references here for efficiency.
+// - C1-C5 is the type of the variable to capture argument i. These should be
+// non-const value types suitable for use as lvalues.
+
+$var n = 5
+$range i 1..n
+$for i [[
+$range j 1..i
+
+template <$for j , [[class A$j]], $for j , [[class C$j]]>
+class SigslotTester$i : public sigslot::has_slots<> {
+ public:
+ SigslotTester$i(sigslot::signal$i<$for j , [[A$j]]>* signal,
+ $for j , [[C$j* capture$j]])
+ : callback_count_(0),
+ $for j , [[capture$j[[]]_(capture$j)]] {
+ signal->connect(this, &SigslotTester$i::OnSignalCallback);
+ }
+
+ int callback_count() const { return callback_count_; }
+
+ private:
+ void OnSignalCallback($for j , [[A$j arg$j]]) {
+ callback_count_++;$for j [[
+
+ *capture$j[[]]_ = arg$j;]]
+
+ }
+
+ int callback_count_;$for j [[
+
+ C$j* capture$j[[]]_;]]
+
+
+ DISALLOW_COPY_AND_ASSIGN(SigslotTester$i);
+};
+
+]]
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SIGSLOTTESTER_H_
diff --git a/chromium/third_party/webrtc/base/sigslottester_unittest.cc b/chromium/third_party/webrtc/base/sigslottester_unittest.cc
new file mode 100755
index 00000000000..778e352f43d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sigslottester_unittest.cc
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2014 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/sigslottester.h"
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/sigslot.h"
+
+namespace rtc {
+
+TEST(SigslotTester, TestSignal1Arg) {
+ sigslot::signal1<int> source1;
+ int capture1;
+ SigslotTester1<int, int> slot1(&source1, &capture1);
+ EXPECT_EQ(0, slot1.callback_count());
+
+ source1.emit(10);
+ EXPECT_EQ(1, slot1.callback_count());
+ EXPECT_EQ(10, capture1);
+
+ source1.emit(20);
+ EXPECT_EQ(2, slot1.callback_count());
+ EXPECT_EQ(20, capture1);
+}
+
+TEST(SigslotTester, TestSignal2Args) {
+ sigslot::signal2<int, char> source2;
+ int capture1;
+ char capture2;
+ SigslotTester2<int, char, int, char> slot2(&source2, &capture1, &capture2);
+ EXPECT_EQ(0, slot2.callback_count());
+
+ source2.emit(10, 'x');
+ EXPECT_EQ(1, slot2.callback_count());
+ EXPECT_EQ(10, capture1);
+ EXPECT_EQ('x', capture2);
+
+ source2.emit(20, 'y');
+ EXPECT_EQ(2, slot2.callback_count());
+ EXPECT_EQ(20, capture1);
+ EXPECT_EQ('y', capture2);
+}
+
+// Since it applies for 1 and 2 args, we assume it will work for up to 5 args.
+
+TEST(SigslotTester, TestSignalWithConstReferenceArgs) {
+ sigslot::signal1<const std::string&> source1;
+ std::string capture1;
+ SigslotTester1<const std::string&, std::string> slot1(&source1, &capture1);
+ EXPECT_EQ(0, slot1.callback_count());
+ source1.emit("hello");
+ EXPECT_EQ(1, slot1.callback_count());
+ EXPECT_EQ("hello", capture1);
+}
+
+TEST(SigslotTester, TestSignalWithPointerToConstArgs) {
+ sigslot::signal1<const std::string*> source1;
+ const std::string* capture1;
+ SigslotTester1<const std::string*, const std::string*> slot1(&source1,
+ &capture1);
+ EXPECT_EQ(0, slot1.callback_count());
+ source1.emit(NULL);
+ EXPECT_EQ(1, slot1.callback_count());
+ EXPECT_EQ(NULL, capture1);
+}
+
+TEST(SigslotTester, TestSignalWithConstPointerArgs) {
+ sigslot::signal1<std::string* const> source1;
+ std::string* capture1;
+ SigslotTester1<std::string* const, std::string*> slot1(&source1, &capture1);
+ EXPECT_EQ(0, slot1.callback_count());
+ source1.emit(NULL);
+ EXPECT_EQ(1, slot1.callback_count());
+ EXPECT_EQ(NULL, capture1);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socket.h b/chromium/third_party/webrtc/base/socket.h
new file mode 100644
index 00000000000..725bd45d104
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socket.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKET_H__
+#define WEBRTC_BASE_SOCKET_H__
+
+#include <errno.h>
+
+#if defined(WEBRTC_POSIX)
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <arpa/inet.h>
+#include <netinet/in.h>
+#define SOCKET_EACCES EACCES
+#endif
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/socketaddress.h"
+
+// Rather than converting errors into a private namespace,
+// Reuse the POSIX socket api errors. Note this depends on
+// Win32 compatibility.
+
+#if defined(WEBRTC_WIN)
+#undef EWOULDBLOCK // Remove errno.h's definition for each macro below.
+#define EWOULDBLOCK WSAEWOULDBLOCK
+#undef EINPROGRESS
+#define EINPROGRESS WSAEINPROGRESS
+#undef EALREADY
+#define EALREADY WSAEALREADY
+#undef ENOTSOCK
+#define ENOTSOCK WSAENOTSOCK
+#undef EDESTADDRREQ
+#define EDESTADDRREQ WSAEDESTADDRREQ
+#undef EMSGSIZE
+#define EMSGSIZE WSAEMSGSIZE
+#undef EPROTOTYPE
+#define EPROTOTYPE WSAEPROTOTYPE
+#undef ENOPROTOOPT
+#define ENOPROTOOPT WSAENOPROTOOPT
+#undef EPROTONOSUPPORT
+#define EPROTONOSUPPORT WSAEPROTONOSUPPORT
+#undef ESOCKTNOSUPPORT
+#define ESOCKTNOSUPPORT WSAESOCKTNOSUPPORT
+#undef EOPNOTSUPP
+#define EOPNOTSUPP WSAEOPNOTSUPP
+#undef EPFNOSUPPORT
+#define EPFNOSUPPORT WSAEPFNOSUPPORT
+#undef EAFNOSUPPORT
+#define EAFNOSUPPORT WSAEAFNOSUPPORT
+#undef EADDRINUSE
+#define EADDRINUSE WSAEADDRINUSE
+#undef EADDRNOTAVAIL
+#define EADDRNOTAVAIL WSAEADDRNOTAVAIL
+#undef ENETDOWN
+#define ENETDOWN WSAENETDOWN
+#undef ENETUNREACH
+#define ENETUNREACH WSAENETUNREACH
+#undef ENETRESET
+#define ENETRESET WSAENETRESET
+#undef ECONNABORTED
+#define ECONNABORTED WSAECONNABORTED
+#undef ECONNRESET
+#define ECONNRESET WSAECONNRESET
+#undef ENOBUFS
+#define ENOBUFS WSAENOBUFS
+#undef EISCONN
+#define EISCONN WSAEISCONN
+#undef ENOTCONN
+#define ENOTCONN WSAENOTCONN
+#undef ESHUTDOWN
+#define ESHUTDOWN WSAESHUTDOWN
+#undef ETOOMANYREFS
+#define ETOOMANYREFS WSAETOOMANYREFS
+#undef ETIMEDOUT
+#define ETIMEDOUT WSAETIMEDOUT
+#undef ECONNREFUSED
+#define ECONNREFUSED WSAECONNREFUSED
+#undef ELOOP
+#define ELOOP WSAELOOP
+#undef ENAMETOOLONG
+#define ENAMETOOLONG WSAENAMETOOLONG
+#undef EHOSTDOWN
+#define EHOSTDOWN WSAEHOSTDOWN
+#undef EHOSTUNREACH
+#define EHOSTUNREACH WSAEHOSTUNREACH
+#undef ENOTEMPTY
+#define ENOTEMPTY WSAENOTEMPTY
+#undef EPROCLIM
+#define EPROCLIM WSAEPROCLIM
+#undef EUSERS
+#define EUSERS WSAEUSERS
+#undef EDQUOT
+#define EDQUOT WSAEDQUOT
+#undef ESTALE
+#define ESTALE WSAESTALE
+#undef EREMOTE
+#define EREMOTE WSAEREMOTE
+#undef EACCES
+#define SOCKET_EACCES WSAEACCES
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_POSIX)
+#define INVALID_SOCKET (-1)
+#define SOCKET_ERROR (-1)
+#define closesocket(s) close(s)
+#endif // WEBRTC_POSIX
+
+namespace rtc {
+
+inline bool IsBlockingError(int e) {
+ return (e == EWOULDBLOCK) || (e == EAGAIN) || (e == EINPROGRESS);
+}
+
+// General interface for the socket implementations of various networks. The
+// methods match those of normal UNIX sockets very closely.
+class Socket {
+ public:
+ virtual ~Socket() {}
+
+ // Returns the address to which the socket is bound. If the socket is not
+ // bound, then the any-address is returned.
+ virtual SocketAddress GetLocalAddress() const = 0;
+
+ // Returns the address to which the socket is connected. If the socket is
+ // not connected, then the any-address is returned.
+ virtual SocketAddress GetRemoteAddress() const = 0;
+
+ virtual int Bind(const SocketAddress& addr) = 0;
+ virtual int Connect(const SocketAddress& addr) = 0;
+ virtual int Send(const void *pv, size_t cb) = 0;
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr) = 0;
+ virtual int Recv(void *pv, size_t cb) = 0;
+ virtual int RecvFrom(void *pv, size_t cb, SocketAddress *paddr) = 0;
+ virtual int Listen(int backlog) = 0;
+ virtual Socket *Accept(SocketAddress *paddr) = 0;
+ virtual int Close() = 0;
+ virtual int GetError() const = 0;
+ virtual void SetError(int error) = 0;
+ inline bool IsBlocking() const { return IsBlockingError(GetError()); }
+
+ enum ConnState {
+ CS_CLOSED,
+ CS_CONNECTING,
+ CS_CONNECTED
+ };
+ virtual ConnState GetState() const = 0;
+
+ // Fills in the given uint16 with the current estimate of the MTU along the
+ // path to the address to which this socket is connected. NOTE: This method
+ // can block for up to 10 seconds on Windows.
+ virtual int EstimateMTU(uint16* mtu) = 0;
+
+ enum Option {
+ OPT_DONTFRAGMENT,
+ OPT_RCVBUF, // receive buffer size
+ OPT_SNDBUF, // send buffer size
+ OPT_NODELAY, // whether Nagle algorithm is enabled
+ OPT_IPV6_V6ONLY, // Whether the socket is IPv6 only.
+ OPT_DSCP, // DSCP code
+ OPT_RTP_SENDTIME_EXTN_ID, // This is a non-traditional socket option param.
+ // This is specific to libjingle and will be used
+ // if SendTime option is needed at socket level.
+ };
+ virtual int GetOption(Option opt, int* value) = 0;
+ virtual int SetOption(Option opt, int value) = 0;
+
+ protected:
+ Socket() {}
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(Socket);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKET_H__
diff --git a/chromium/third_party/webrtc/base/socket_unittest.cc b/chromium/third_party/webrtc/base/socket_unittest.cc
new file mode 100644
index 00000000000..6104eda4e4e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socket_unittest.cc
@@ -0,0 +1,1012 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/socket_unittest.h"
+
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/testutils.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+#define MAYBE_SKIP_IPV6 \
+ if (!HasIPv6Enabled()) { \
+ LOG(LS_INFO) << "No IPv6... skipping"; \
+ return; \
+ }
+
+
+void SocketTest::TestConnectIPv4() {
+ ConnectInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestConnectIPv6() {
+ MAYBE_SKIP_IPV6;
+ ConnectInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestConnectWithDnsLookupIPv4() {
+ ConnectWithDnsLookupInternal(kIPv4Loopback, "localhost");
+}
+
+void SocketTest::TestConnectWithDnsLookupIPv6() {
+ // TODO: Enable this when DNS resolution supports IPv6.
+ LOG(LS_INFO) << "Skipping IPv6 DNS test";
+ // ConnectWithDnsLookupInternal(kIPv6Loopback, "localhost6");
+}
+
+void SocketTest::TestConnectFailIPv4() {
+ ConnectFailInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestConnectFailIPv6() {
+ MAYBE_SKIP_IPV6;
+ ConnectFailInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestConnectWithDnsLookupFailIPv4() {
+ ConnectWithDnsLookupFailInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestConnectWithDnsLookupFailIPv6() {
+ MAYBE_SKIP_IPV6;
+ ConnectWithDnsLookupFailInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestConnectWithClosedSocketIPv4() {
+ ConnectWithClosedSocketInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestConnectWithClosedSocketIPv6() {
+ MAYBE_SKIP_IPV6;
+ ConnectWithClosedSocketInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestConnectWhileNotClosedIPv4() {
+ ConnectWhileNotClosedInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestConnectWhileNotClosedIPv6() {
+ MAYBE_SKIP_IPV6;
+ ConnectWhileNotClosedInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestServerCloseDuringConnectIPv4() {
+ ServerCloseDuringConnectInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestServerCloseDuringConnectIPv6() {
+ MAYBE_SKIP_IPV6;
+ ServerCloseDuringConnectInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestClientCloseDuringConnectIPv4() {
+ ClientCloseDuringConnectInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestClientCloseDuringConnectIPv6() {
+ MAYBE_SKIP_IPV6;
+ ClientCloseDuringConnectInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestServerCloseIPv4() {
+ ServerCloseInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestServerCloseIPv6() {
+ MAYBE_SKIP_IPV6;
+ ServerCloseInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestCloseInClosedCallbackIPv4() {
+ CloseInClosedCallbackInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestCloseInClosedCallbackIPv6() {
+ MAYBE_SKIP_IPV6;
+ CloseInClosedCallbackInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestSocketServerWaitIPv4() {
+ SocketServerWaitInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestSocketServerWaitIPv6() {
+ MAYBE_SKIP_IPV6;
+ SocketServerWaitInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestTcpIPv4() {
+ TcpInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestTcpIPv6() {
+ MAYBE_SKIP_IPV6;
+ TcpInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestSingleFlowControlCallbackIPv4() {
+ SingleFlowControlCallbackInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestSingleFlowControlCallbackIPv6() {
+ MAYBE_SKIP_IPV6;
+ SingleFlowControlCallbackInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestUdpIPv4() {
+ UdpInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestUdpIPv6() {
+ MAYBE_SKIP_IPV6;
+ UdpInternal(kIPv6Loopback);
+}
+
+void SocketTest::TestUdpReadyToSendIPv4() {
+#if !defined(WEBRTC_MAC)
+ // TODO(ronghuawu): Enable this test on mac/ios.
+ UdpReadyToSend(kIPv4Loopback);
+#endif
+}
+
+void SocketTest::TestUdpReadyToSendIPv6() {
+#if defined(WEBRTC_WIN)
+ // TODO(ronghuawu): Enable this test (currently flakey) on mac and linux.
+ MAYBE_SKIP_IPV6;
+ UdpReadyToSend(kIPv6Loopback);
+#endif
+}
+
+void SocketTest::TestGetSetOptionsIPv4() {
+ GetSetOptionsInternal(kIPv4Loopback);
+}
+
+void SocketTest::TestGetSetOptionsIPv6() {
+ MAYBE_SKIP_IPV6;
+ GetSetOptionsInternal(kIPv6Loopback);
+}
+
+// For unbound sockets, GetLocalAddress / GetRemoteAddress return AF_UNSPEC
+// values on Windows, but an empty address of the same family on Linux/MacOS X.
+bool IsUnspecOrEmptyIP(const IPAddress& address) {
+#if !defined(WEBRTC_WIN)
+ return IPIsAny(address);
+#else
+ return address.family() == AF_UNSPEC;
+#endif
+}
+
+void SocketTest::ConnectInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(ss_->CreateAsyncSocket(loopback.family(),
+ SOCK_STREAM));
+ sink.Monitor(client.get());
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, client->GetState());
+ EXPECT_PRED1(IsUnspecOrEmptyIP, client->GetLocalAddress().ipaddr());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, server->GetState());
+
+ // Ensure no pending server connections, since we haven't done anything yet.
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_addr));
+ EXPECT_TRUE(accept_addr.IsNil());
+
+ // Attempt connect to listening socket.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+ EXPECT_FALSE(client->GetLocalAddress().IsNil());
+ EXPECT_NE(server->GetLocalAddress(), client->GetLocalAddress());
+
+ // Client is connecting, outcome not yet determined.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client->GetState());
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+
+ // Server has pending connection, accept it.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ EXPECT_FALSE(accept_addr.IsNil());
+ EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr);
+
+ // Connected from server perspective, check the addresses are correct.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+ EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress());
+
+ // Connected from client perspective, check the addresses are correct.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+}
+
+void SocketTest::ConnectWithDnsLookupInternal(const IPAddress& loopback,
+ const std::string& host) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connect to listening socket.
+ SocketAddress dns_addr(server->GetLocalAddress());
+ dns_addr.SetIP(host);
+ EXPECT_EQ(0, client->Connect(dns_addr));
+ // TODO: Bind when doing DNS lookup.
+ //EXPECT_NE(kEmptyAddr, client->GetLocalAddress()); // Implicit Bind
+
+ // Client is connecting, outcome not yet determined.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client->GetState());
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+
+ // Server has pending connection, accept it.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ EXPECT_FALSE(accept_addr.IsNil());
+ EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr);
+
+ // Connected from server perspective, check the addresses are correct.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+ EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress());
+
+ // Connected from client perspective, check the addresses are correct.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+}
+
+void SocketTest::ConnectFailInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server, but don't listen yet.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+
+ // Attempt connect to a non-existent socket.
+ // We don't connect to the server socket created above, since on
+ // MacOS it takes about 75 seconds to get back an error!
+ SocketAddress bogus_addr(loopback, 65535);
+ EXPECT_EQ(0, client->Connect(bogus_addr));
+
+ // Wait for connection to fail (ECONNREFUSED).
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR));
+ EXPECT_TRUE(client->GetRemoteAddress().IsNil());
+
+ // Should be no pending server connections.
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_addr));
+ EXPECT_EQ(IPAddress(), accept_addr.ipaddr());
+}
+
+void SocketTest::ConnectWithDnsLookupFailInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server, but don't listen yet.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+
+ // Attempt connect to a non-existent host.
+ // We don't connect to the server socket created above, since on
+ // MacOS it takes about 75 seconds to get back an error!
+ SocketAddress bogus_dns_addr("not-a-real-hostname", 65535);
+ EXPECT_EQ(0, client->Connect(bogus_dns_addr));
+
+ // Wait for connection to fail (EHOSTNOTFOUND).
+ bool dns_lookup_finished = false;
+ WAIT_(client->GetState() == AsyncSocket::CS_CLOSED, kTimeout,
+ dns_lookup_finished);
+ if (!dns_lookup_finished) {
+ LOG(LS_WARNING) << "Skipping test; DNS resolution took longer than 5 "
+ << "seconds.";
+ return;
+ }
+
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR));
+ EXPECT_TRUE(client->GetRemoteAddress().IsNil());
+ // Should be no pending server connections.
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_addr));
+ EXPECT_TRUE(accept_addr.IsNil());
+}
+
+void SocketTest::ConnectWithClosedSocketInternal(const IPAddress& loopback) {
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Create a client and put in to CS_CLOSED state.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ EXPECT_EQ(0, client->Close());
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, client->GetState());
+
+ // Connect() should reinitialize the socket, and put it in to CS_CONNECTING.
+ EXPECT_EQ(0, client->Connect(SocketAddress(server->GetLocalAddress())));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client->GetState());
+}
+
+void SocketTest::ConnectWhileNotClosedInternal(const IPAddress& loopback) {
+ // Create server and listen.
+ testing::StreamSink sink;
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+ // Create client, connect.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ EXPECT_EQ(0, client->Connect(SocketAddress(server->GetLocalAddress())));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTING, client->GetState());
+ // Try to connect again. Should fail, but not interfere with original attempt.
+ EXPECT_EQ(SOCKET_ERROR,
+ client->Connect(SocketAddress(server->GetLocalAddress())));
+
+ // Accept the original connection.
+ SocketAddress accept_addr;
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ EXPECT_FALSE(accept_addr.IsNil());
+
+ // Check the states and addresses.
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+ EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress());
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+
+ // Try to connect again, to an unresolved hostname.
+ // Shouldn't break anything.
+ EXPECT_EQ(SOCKET_ERROR,
+ client->Connect(SocketAddress("localhost",
+ server->GetLocalAddress().port())));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, client->GetState());
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+}
+
+void SocketTest::ServerCloseDuringConnectInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connect to listening socket.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Close down the server while the socket is in the accept queue.
+ EXPECT_TRUE_WAIT(sink.Check(server.get(), testing::SSE_READ), kTimeout);
+ server->Close();
+
+ // This should fail the connection for the client. Clean up.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR));
+ client->Close();
+}
+
+void SocketTest::ClientCloseDuringConnectInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connect to listening socket.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Close down the client while the socket is in the accept queue.
+ EXPECT_TRUE_WAIT(sink.Check(server.get(), testing::SSE_READ), kTimeout);
+ client->Close();
+
+ // The connection should still be able to be accepted.
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+
+ // The accepted socket should then close (possibly with err, timing-related)
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, accepted->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(accepted.get(), testing::SSE_CLOSE) ||
+ sink.Check(accepted.get(), testing::SSE_ERROR));
+
+ // The client should not get a close event.
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+}
+
+void SocketTest::ServerCloseInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connection.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Accept connection.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+
+ // Both sides are now connected.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress());
+
+ // Send data to the client, and then close the connection.
+ EXPECT_EQ(1, accepted->Send("a", 1));
+ accepted->Close();
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, accepted->GetState());
+
+ // Expect that the client is notified, and has not yet closed.
+ EXPECT_TRUE_WAIT(sink.Check(client.get(), testing::SSE_READ), kTimeout);
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, client->GetState());
+
+ // Ensure the data can be read.
+ char buffer[10];
+ EXPECT_EQ(1, client->Recv(buffer, sizeof(buffer)));
+ EXPECT_EQ('a', buffer[0]);
+
+ // Now we should close, but the remote address will remain.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_FALSE(client->GetRemoteAddress().IsAnyIP());
+
+ // The closer should not get a close signal.
+ EXPECT_FALSE(sink.Check(accepted.get(), testing::SSE_CLOSE));
+ EXPECT_TRUE(accepted->GetRemoteAddress().IsNil());
+
+ // And the closee should only get a single signal.
+ Thread::Current()->ProcessMessages(0);
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+
+ // Close down the client and ensure all is good.
+ client->Close();
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_TRUE(client->GetRemoteAddress().IsNil());
+}
+
+class SocketCloser : public sigslot::has_slots<> {
+ public:
+ void OnClose(AsyncSocket* socket, int error) {
+ socket->Close(); // Deleting here would blow up the vector of handlers
+ // for the socket's signal.
+ }
+};
+
+void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketCloser closer;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+ client->SignalCloseEvent.connect(&closer, &SocketCloser::OnClose);
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connection.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Accept connection.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+
+ // Both sides are now connected.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress());
+
+ // Send data to the client, and then close the connection.
+ accepted->Close();
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, accepted->GetState());
+
+ // Expect that the client is notified, and has not yet closed.
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, client->GetState());
+
+ // Now we should be closed and invalidated
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_TRUE(Socket::CS_CLOSED == client->GetState());
+}
+
+class Sleeper : public MessageHandler {
+ public:
+ Sleeper() {}
+ void OnMessage(Message* msg) {
+ Thread::Current()->SleepMs(500);
+ }
+};
+
+void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create & connect server and client sockets.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
+ EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress());
+
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+
+ // Do an i/o operation, triggering an eventual callback.
+ EXPECT_FALSE(sink.Check(accepted.get(), testing::SSE_READ));
+ char buf[1024] = {0};
+
+ EXPECT_EQ(1024, client->Send(buf, 1024));
+ EXPECT_FALSE(sink.Check(accepted.get(), testing::SSE_READ));
+
+ // Shouldn't signal when blocked in a thread Send, where process_io is false.
+ scoped_ptr<Thread> thread(new Thread());
+ thread->Start();
+ Sleeper sleeper;
+ TypedMessageData<AsyncSocket*> data(client.get());
+ thread->Send(&sleeper, 0, &data);
+ EXPECT_FALSE(sink.Check(accepted.get(), testing::SSE_READ));
+
+ // But should signal when process_io is true.
+ EXPECT_TRUE_WAIT((sink.Check(accepted.get(), testing::SSE_READ)), kTimeout);
+ EXPECT_LT(0, accepted->Recv(buf, 1024));
+}
+
+void SocketTest::TcpInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create test data.
+ const size_t kDataSize = 1024 * 1024;
+ scoped_ptr<char[]> send_buffer(new char[kDataSize]);
+ scoped_ptr<char[]> recv_buffer(new char[kDataSize]);
+ size_t send_pos = 0, recv_pos = 0;
+ for (size_t i = 0; i < kDataSize; ++i) {
+ send_buffer[i] = static_cast<char>(i % 256);
+ recv_buffer[i] = 0;
+ }
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connection.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Accept connection.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+
+ // Both sides are now connected.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress());
+
+ // Send and receive a bunch of data.
+ bool send_waiting_for_writability = false;
+ bool send_expect_success = true;
+ bool recv_waiting_for_readability = true;
+ bool recv_expect_success = false;
+ int data_in_flight = 0;
+ while (recv_pos < kDataSize) {
+ // Send as much as we can if we've been cleared to send.
+ while (!send_waiting_for_writability && send_pos < kDataSize) {
+ int tosend = static_cast<int>(kDataSize - send_pos);
+ int sent = accepted->Send(send_buffer.get() + send_pos, tosend);
+ if (send_expect_success) {
+ // The first Send() after connecting or getting writability should
+ // succeed and send some data.
+ EXPECT_GT(sent, 0);
+ send_expect_success = false;
+ }
+ if (sent >= 0) {
+ EXPECT_LE(sent, tosend);
+ send_pos += sent;
+ data_in_flight += sent;
+ } else {
+ ASSERT_TRUE(accepted->IsBlocking());
+ send_waiting_for_writability = true;
+ }
+ }
+
+ // Read all the sent data.
+ while (data_in_flight > 0) {
+ if (recv_waiting_for_readability) {
+ // Wait until data is available.
+ EXPECT_TRUE_WAIT(sink.Check(client.get(), testing::SSE_READ), kTimeout);
+ recv_waiting_for_readability = false;
+ recv_expect_success = true;
+ }
+
+ // Receive as much as we can get in a single recv call.
+ int rcvd = client->Recv(recv_buffer.get() + recv_pos,
+ kDataSize - recv_pos);
+
+ if (recv_expect_success) {
+ // The first Recv() after getting readability should succeed and receive
+ // some data.
+ // TODO: The following line is disabled due to flakey pulse
+ // builds. Re-enable if/when possible.
+ // EXPECT_GT(rcvd, 0);
+ recv_expect_success = false;
+ }
+ if (rcvd >= 0) {
+ EXPECT_LE(rcvd, data_in_flight);
+ recv_pos += rcvd;
+ data_in_flight -= rcvd;
+ } else {
+ ASSERT_TRUE(client->IsBlocking());
+ recv_waiting_for_readability = true;
+ }
+ }
+
+ // Once all that we've sent has been rcvd, expect to be able to send again.
+ if (send_waiting_for_writability) {
+ EXPECT_TRUE_WAIT(sink.Check(accepted.get(), testing::SSE_WRITE),
+ kTimeout);
+ send_waiting_for_writability = false;
+ send_expect_success = true;
+ }
+ }
+
+ // The received data matches the sent data.
+ EXPECT_EQ(kDataSize, send_pos);
+ EXPECT_EQ(kDataSize, recv_pos);
+ EXPECT_EQ(0, memcmp(recv_buffer.get(), send_buffer.get(), kDataSize));
+
+ // Close down.
+ accepted->Close();
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CLOSED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_CLOSE));
+ client->Close();
+}
+
+void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+
+ // Create client.
+ scoped_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(client.get());
+
+ // Create server and listen.
+ scoped_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+ EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
+ EXPECT_EQ(0, server->Listen(5));
+
+ // Attempt connection.
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Accept connection.
+ EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(accepted);
+ sink.Monitor(accepted.get());
+
+ // Both sides are now connected.
+ EXPECT_EQ_WAIT(AsyncSocket::CS_CONNECTED, client->GetState(), kTimeout);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+ EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress());
+
+ // Expect a writable callback from the connect.
+ EXPECT_TRUE_WAIT(sink.Check(accepted.get(), testing::SSE_WRITE), kTimeout);
+
+ // Fill the socket buffer.
+ char buf[1024 * 16] = {0};
+ int sends = 0;
+ while (++sends && accepted->Send(&buf, ARRAY_SIZE(buf)) != -1) {}
+ EXPECT_TRUE(accepted->IsBlocking());
+
+ // Wait until data is available.
+ EXPECT_TRUE_WAIT(sink.Check(client.get(), testing::SSE_READ), kTimeout);
+
+ // Pull data.
+ for (int i = 0; i < sends; ++i) {
+ client->Recv(buf, ARRAY_SIZE(buf));
+ }
+
+ // Expect at least one additional writable callback.
+ EXPECT_TRUE_WAIT(sink.Check(accepted.get(), testing::SSE_WRITE), kTimeout);
+
+ // Adding data in response to the writeable callback shouldn't cause infinite
+ // callbacks.
+ int extras = 0;
+ for (int i = 0; i < 100; ++i) {
+ accepted->Send(&buf, ARRAY_SIZE(buf));
+ rtc::Thread::Current()->ProcessMessages(1);
+ if (sink.Check(accepted.get(), testing::SSE_WRITE)) {
+ extras++;
+ }
+ }
+ EXPECT_LT(extras, 2);
+
+ // Close down.
+ accepted->Close();
+ client->Close();
+}
+
+void SocketTest::UdpInternal(const IPAddress& loopback) {
+ SocketAddress empty = EmptySocketAddressWithFamily(loopback.family());
+ // Test basic bind and connect behavior.
+ AsyncSocket* socket =
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM);
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, socket->GetState());
+ EXPECT_EQ(0, socket->Bind(SocketAddress(loopback, 0)));
+ SocketAddress addr1 = socket->GetLocalAddress();
+ EXPECT_EQ(0, socket->Connect(addr1));
+ EXPECT_EQ(AsyncSocket::CS_CONNECTED, socket->GetState());
+ socket->Close();
+ EXPECT_EQ(AsyncSocket::CS_CLOSED, socket->GetState());
+ delete socket;
+
+ // Test send/receive behavior.
+ scoped_ptr<TestClient> client1(
+ new TestClient(AsyncUDPSocket::Create(ss_, addr1)));
+ scoped_ptr<TestClient> client2(
+ new TestClient(AsyncUDPSocket::Create(ss_, empty)));
+
+ SocketAddress addr2;
+ EXPECT_EQ(3, client2->SendTo("foo", 3, addr1));
+ EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &addr2));
+
+ SocketAddress addr3;
+ EXPECT_EQ(6, client1->SendTo("bizbaz", 6, addr2));
+ EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &addr3));
+ EXPECT_EQ(addr3, addr1);
+ // TODO: figure out what the intent is here
+ for (int i = 0; i < 10; ++i) {
+ client2.reset(new TestClient(AsyncUDPSocket::Create(ss_, empty)));
+
+ SocketAddress addr4;
+ EXPECT_EQ(3, client2->SendTo("foo", 3, addr1));
+ EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &addr4));
+ EXPECT_EQ(addr4.ipaddr(), addr2.ipaddr());
+
+ SocketAddress addr5;
+ EXPECT_EQ(6, client1->SendTo("bizbaz", 6, addr4));
+ EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &addr5));
+ EXPECT_EQ(addr5, addr1);
+
+ addr2 = addr4;
+ }
+}
+
+void SocketTest::UdpReadyToSend(const IPAddress& loopback) {
+ SocketAddress empty = EmptySocketAddressWithFamily(loopback.family());
+ // RFC 5737 - The blocks 192.0.2.0/24 (TEST-NET-1) ... are provided for use in
+ // documentation.
+ // RFC 3849 - 2001:DB8::/32 as a documentation-only prefix.
+ std::string dest = (loopback.family() == AF_INET6) ?
+ "2001:db8::1" : "192.0.2.0";
+ SocketAddress test_addr(dest, 2345);
+
+ // Test send
+ scoped_ptr<TestClient> client(
+ new TestClient(AsyncUDPSocket::Create(ss_, empty)));
+ int test_packet_size = 1200;
+ rtc::scoped_ptr<char[]> test_packet(new char[test_packet_size]);
+ // Init the test packet just to avoid memcheck warning.
+ memset(test_packet.get(), 0, test_packet_size);
+ // Set the send buffer size to the same size as the test packet to have a
+ // better chance to get EWOULDBLOCK.
+ int send_buffer_size = test_packet_size;
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ send_buffer_size /= 2;
+#endif
+ client->SetOption(rtc::Socket::OPT_SNDBUF, send_buffer_size);
+
+ int error = 0;
+ uint32 start_ms = Time();
+ int sent_packet_num = 0;
+ int expected_error = EWOULDBLOCK;
+ while (start_ms + kTimeout > Time()) {
+ int ret = client->SendTo(test_packet.get(), test_packet_size, test_addr);
+ ++sent_packet_num;
+ if (ret != test_packet_size) {
+ error = client->GetError();
+ if (error == expected_error) {
+ LOG(LS_INFO) << "Got expected error code after sending "
+ << sent_packet_num << " packets.";
+ break;
+ }
+ }
+ }
+ EXPECT_EQ(expected_error, error);
+ EXPECT_FALSE(client->ready_to_send());
+ EXPECT_TRUE_WAIT(client->ready_to_send(), kTimeout);
+ LOG(LS_INFO) << "Got SignalReadyToSend";
+}
+
+void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) {
+ rtc::scoped_ptr<AsyncSocket> socket(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM));
+ socket->Bind(SocketAddress(loopback, 0));
+
+ // Check SNDBUF/RCVBUF.
+ const int desired_size = 12345;
+#if defined(WEBRTC_LINUX)
+ // Yes, really. It's in the kernel source.
+ const int expected_size = desired_size * 2;
+#else // !WEBRTC_LINUX
+ const int expected_size = desired_size;
+#endif // !WEBRTC_LINUX
+ int recv_size = 0;
+ int send_size = 0;
+ // get the initial sizes
+ ASSERT_NE(-1, socket->GetOption(Socket::OPT_RCVBUF, &recv_size));
+ ASSERT_NE(-1, socket->GetOption(Socket::OPT_SNDBUF, &send_size));
+ // set our desired sizes
+ ASSERT_NE(-1, socket->SetOption(Socket::OPT_RCVBUF, desired_size));
+ ASSERT_NE(-1, socket->SetOption(Socket::OPT_SNDBUF, desired_size));
+ // get the sizes again
+ ASSERT_NE(-1, socket->GetOption(Socket::OPT_RCVBUF, &recv_size));
+ ASSERT_NE(-1, socket->GetOption(Socket::OPT_SNDBUF, &send_size));
+ // make sure they are right
+ ASSERT_EQ(expected_size, recv_size);
+ ASSERT_EQ(expected_size, send_size);
+
+ // Check that we can't set NODELAY on a UDP socket.
+ int current_nd, desired_nd = 1;
+ ASSERT_EQ(-1, socket->GetOption(Socket::OPT_NODELAY, &current_nd));
+ ASSERT_EQ(-1, socket->SetOption(Socket::OPT_NODELAY, desired_nd));
+
+ // Skip the esimate MTU test for IPv6 for now.
+ if (loopback.family() != AF_INET6) {
+ // Try estimating MTU.
+ rtc::scoped_ptr<AsyncSocket>
+ mtu_socket(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM));
+ mtu_socket->Bind(SocketAddress(loopback, 0));
+ uint16 mtu;
+ // should fail until we connect
+ ASSERT_EQ(-1, mtu_socket->EstimateMTU(&mtu));
+ mtu_socket->Connect(SocketAddress(loopback, 0));
+#if defined(WEBRTC_WIN)
+ // now it should succeed
+ ASSERT_NE(-1, mtu_socket->EstimateMTU(&mtu));
+ ASSERT_GE(mtu, 1492); // should be at least the 1492 "plateau" on localhost
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ // except on WEBRTC_MAC && !WEBRTC_IOS, where it's not yet implemented
+ ASSERT_EQ(-1, mtu_socket->EstimateMTU(&mtu));
+#else
+ // and the behavior seems unpredictable on Linux,
+ // failing on the build machine
+ // but succeeding on my Ubiquity instance.
+#endif
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socket_unittest.h b/chromium/third_party/webrtc/base/socket_unittest.h
new file mode 100644
index 00000000000..d368afb3f53
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socket_unittest.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKET_UNITTEST_H_
+#define WEBRTC_BASE_SOCKET_UNITTEST_H_
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+// Generic socket tests, to be used when testing individual socketservers.
+// Derive your specific test class from SocketTest, install your
+// socketserver, and call the SocketTest test methods.
+class SocketTest : public testing::Test {
+ protected:
+ SocketTest() : ss_(NULL), kIPv4Loopback(INADDR_LOOPBACK),
+ kIPv6Loopback(in6addr_loopback) {}
+ virtual void SetUp() { ss_ = Thread::Current()->socketserver(); }
+ void TestConnectIPv4();
+ void TestConnectIPv6();
+ void TestConnectWithDnsLookupIPv4();
+ void TestConnectWithDnsLookupIPv6();
+ void TestConnectFailIPv4();
+ void TestConnectFailIPv6();
+ void TestConnectWithDnsLookupFailIPv4();
+ void TestConnectWithDnsLookupFailIPv6();
+ void TestConnectWithClosedSocketIPv4();
+ void TestConnectWithClosedSocketIPv6();
+ void TestConnectWhileNotClosedIPv4();
+ void TestConnectWhileNotClosedIPv6();
+ void TestServerCloseDuringConnectIPv4();
+ void TestServerCloseDuringConnectIPv6();
+ void TestClientCloseDuringConnectIPv4();
+ void TestClientCloseDuringConnectIPv6();
+ void TestServerCloseIPv4();
+ void TestServerCloseIPv6();
+ void TestCloseInClosedCallbackIPv4();
+ void TestCloseInClosedCallbackIPv6();
+ void TestSocketServerWaitIPv4();
+ void TestSocketServerWaitIPv6();
+ void TestTcpIPv4();
+ void TestTcpIPv6();
+ void TestSingleFlowControlCallbackIPv4();
+ void TestSingleFlowControlCallbackIPv6();
+ void TestUdpIPv4();
+ void TestUdpIPv6();
+ void TestUdpReadyToSendIPv4();
+ void TestUdpReadyToSendIPv6();
+ void TestGetSetOptionsIPv4();
+ void TestGetSetOptionsIPv6();
+
+ private:
+ void ConnectInternal(const IPAddress& loopback);
+ void ConnectWithDnsLookupInternal(const IPAddress& loopback,
+ const std::string& host);
+ void ConnectFailInternal(const IPAddress& loopback);
+
+ void ConnectWithDnsLookupFailInternal(const IPAddress& loopback);
+ void ConnectWithClosedSocketInternal(const IPAddress& loopback);
+ void ConnectWhileNotClosedInternal(const IPAddress& loopback);
+ void ServerCloseDuringConnectInternal(const IPAddress& loopback);
+ void ClientCloseDuringConnectInternal(const IPAddress& loopback);
+ void ServerCloseInternal(const IPAddress& loopback);
+ void CloseInClosedCallbackInternal(const IPAddress& loopback);
+ void SocketServerWaitInternal(const IPAddress& loopback);
+ void TcpInternal(const IPAddress& loopback);
+ void SingleFlowControlCallbackInternal(const IPAddress& loopback);
+ void UdpInternal(const IPAddress& loopback);
+ void UdpReadyToSend(const IPAddress& loopback);
+ void GetSetOptionsInternal(const IPAddress& loopback);
+
+ static const int kTimeout = 5000; // ms
+ SocketServer* ss_;
+ const IPAddress kIPv4Loopback;
+ const IPAddress kIPv6Loopback;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKET_UNITTEST_H_
diff --git a/chromium/third_party/webrtc/base/socketadapters.cc b/chromium/third_party/webrtc/base/socketadapters.cc
new file mode 100644
index 00000000000..1cdd1bcbebd
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketadapters.cc
@@ -0,0 +1,893 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(_MSC_VER) && _MSC_VER < 1300
+#pragma warning(disable:4786)
+#endif
+
+#include <time.h>
+#include <errno.h>
+
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#define SECURITY_WIN32
+#include <security.h>
+#endif
+
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/socketadapters.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/sec_buffer.h"
+#endif // WEBRTC_WIN
+
+namespace rtc {
+
+BufferedReadAdapter::BufferedReadAdapter(AsyncSocket* socket, size_t size)
+ : AsyncSocketAdapter(socket), buffer_size_(size),
+ data_len_(0), buffering_(false) {
+ buffer_ = new char[buffer_size_];
+}
+
+BufferedReadAdapter::~BufferedReadAdapter() {
+ delete [] buffer_;
+}
+
+int BufferedReadAdapter::Send(const void *pv, size_t cb) {
+ if (buffering_) {
+ // TODO: Spoof error better; Signal Writeable
+ socket_->SetError(EWOULDBLOCK);
+ return -1;
+ }
+ return AsyncSocketAdapter::Send(pv, cb);
+}
+
+int BufferedReadAdapter::Recv(void *pv, size_t cb) {
+ if (buffering_) {
+ socket_->SetError(EWOULDBLOCK);
+ return -1;
+ }
+
+ size_t read = 0;
+
+ if (data_len_) {
+ read = _min(cb, data_len_);
+ memcpy(pv, buffer_, read);
+ data_len_ -= read;
+ if (data_len_ > 0) {
+ memmove(buffer_, buffer_ + read, data_len_);
+ }
+ pv = static_cast<char *>(pv) + read;
+ cb -= read;
+ }
+
+ // FIX: If cb == 0, we won't generate another read event
+
+ int res = AsyncSocketAdapter::Recv(pv, cb);
+ if (res < 0)
+ return res;
+
+ return res + static_cast<int>(read);
+}
+
+void BufferedReadAdapter::BufferInput(bool on) {
+ buffering_ = on;
+}
+
+void BufferedReadAdapter::OnReadEvent(AsyncSocket * socket) {
+ ASSERT(socket == socket_);
+
+ if (!buffering_) {
+ AsyncSocketAdapter::OnReadEvent(socket);
+ return;
+ }
+
+ if (data_len_ >= buffer_size_) {
+ LOG(INFO) << "Input buffer overflow";
+ ASSERT(false);
+ data_len_ = 0;
+ }
+
+ int len = socket_->Recv(buffer_ + data_len_, buffer_size_ - data_len_);
+ if (len < 0) {
+ // TODO: Do something better like forwarding the error to the user.
+ LOG_ERR(INFO) << "Recv";
+ return;
+ }
+
+ data_len_ += len;
+
+ ProcessInput(buffer_, &data_len_);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// This is a SSL v2 CLIENT_HELLO message.
+// TODO: Should this have a session id? The response doesn't have a
+// certificate, so the hello should have a session id.
+static const uint8 kSslClientHello[] = {
+ 0x80, 0x46, // msg len
+ 0x01, // CLIENT_HELLO
+ 0x03, 0x01, // SSL 3.1
+ 0x00, 0x2d, // ciphersuite len
+ 0x00, 0x00, // session id len
+ 0x00, 0x10, // challenge len
+ 0x01, 0x00, 0x80, 0x03, 0x00, 0x80, 0x07, 0x00, 0xc0, // ciphersuites
+ 0x06, 0x00, 0x40, 0x02, 0x00, 0x80, 0x04, 0x00, 0x80, //
+ 0x00, 0x00, 0x04, 0x00, 0xfe, 0xff, 0x00, 0x00, 0x0a, //
+ 0x00, 0xfe, 0xfe, 0x00, 0x00, 0x09, 0x00, 0x00, 0x64, //
+ 0x00, 0x00, 0x62, 0x00, 0x00, 0x03, 0x00, 0x00, 0x06, //
+ 0x1f, 0x17, 0x0c, 0xa6, 0x2f, 0x00, 0x78, 0xfc, // challenge
+ 0x46, 0x55, 0x2e, 0xb1, 0x83, 0x39, 0xf1, 0xea //
+};
+
+// This is a TLSv1 SERVER_HELLO message.
+static const uint8 kSslServerHello[] = {
+ 0x16, // handshake message
+ 0x03, 0x01, // SSL 3.1
+ 0x00, 0x4a, // message len
+ 0x02, // SERVER_HELLO
+ 0x00, 0x00, 0x46, // handshake len
+ 0x03, 0x01, // SSL 3.1
+ 0x42, 0x85, 0x45, 0xa7, 0x27, 0xa9, 0x5d, 0xa0, // server random
+ 0xb3, 0xc5, 0xe7, 0x53, 0xda, 0x48, 0x2b, 0x3f, //
+ 0xc6, 0x5a, 0xca, 0x89, 0xc1, 0x58, 0x52, 0xa1, //
+ 0x78, 0x3c, 0x5b, 0x17, 0x46, 0x00, 0x85, 0x3f, //
+ 0x20, // session id len
+ 0x0e, 0xd3, 0x06, 0x72, 0x5b, 0x5b, 0x1b, 0x5f, // session id
+ 0x15, 0xac, 0x13, 0xf9, 0x88, 0x53, 0x9d, 0x9b, //
+ 0xe8, 0x3d, 0x7b, 0x0c, 0x30, 0x32, 0x6e, 0x38, //
+ 0x4d, 0xa2, 0x75, 0x57, 0x41, 0x6c, 0x34, 0x5c, //
+ 0x00, 0x04, // RSA/RC4-128/MD5
+ 0x00 // null compression
+};
+
+AsyncSSLSocket::AsyncSSLSocket(AsyncSocket* socket)
+ : BufferedReadAdapter(socket, 1024) {
+}
+
+int AsyncSSLSocket::Connect(const SocketAddress& addr) {
+ // Begin buffering before we connect, so that there isn't a race condition
+ // between potential senders and receiving the OnConnectEvent signal
+ BufferInput(true);
+ return BufferedReadAdapter::Connect(addr);
+}
+
+void AsyncSSLSocket::OnConnectEvent(AsyncSocket * socket) {
+ ASSERT(socket == socket_);
+ // TODO: we could buffer output too...
+ VERIFY(sizeof(kSslClientHello) ==
+ DirectSend(kSslClientHello, sizeof(kSslClientHello)));
+}
+
+void AsyncSSLSocket::ProcessInput(char* data, size_t* len) {
+ if (*len < sizeof(kSslServerHello))
+ return;
+
+ if (memcmp(kSslServerHello, data, sizeof(kSslServerHello)) != 0) {
+ Close();
+ SignalCloseEvent(this, 0); // TODO: error code?
+ return;
+ }
+
+ *len -= sizeof(kSslServerHello);
+ if (*len > 0) {
+ memmove(data, data + sizeof(kSslServerHello), *len);
+ }
+
+ bool remainder = (*len > 0);
+ BufferInput(false);
+ SignalConnectEvent(this);
+
+ // FIX: if SignalConnect causes the socket to be destroyed, we are in trouble
+ if (remainder)
+ SignalReadEvent(this);
+}
+
+AsyncSSLServerSocket::AsyncSSLServerSocket(AsyncSocket* socket)
+ : BufferedReadAdapter(socket, 1024) {
+ BufferInput(true);
+}
+
+void AsyncSSLServerSocket::ProcessInput(char* data, size_t* len) {
+ // We only accept client hello messages.
+ if (*len < sizeof(kSslClientHello)) {
+ return;
+ }
+
+ if (memcmp(kSslClientHello, data, sizeof(kSslClientHello)) != 0) {
+ Close();
+ SignalCloseEvent(this, 0);
+ return;
+ }
+
+ *len -= sizeof(kSslClientHello);
+
+ // Clients should not send more data until the handshake is completed.
+ ASSERT(*len == 0);
+
+ // Send a server hello back to the client.
+ DirectSend(kSslServerHello, sizeof(kSslServerHello));
+
+ // Handshake completed for us, redirect input to our parent.
+ BufferInput(false);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+AsyncHttpsProxySocket::AsyncHttpsProxySocket(AsyncSocket* socket,
+ const std::string& user_agent,
+ const SocketAddress& proxy,
+ const std::string& username,
+ const CryptString& password)
+ : BufferedReadAdapter(socket, 1024), proxy_(proxy), agent_(user_agent),
+ user_(username), pass_(password), force_connect_(false), state_(PS_ERROR),
+ context_(0) {
+}
+
+AsyncHttpsProxySocket::~AsyncHttpsProxySocket() {
+ delete context_;
+}
+
+int AsyncHttpsProxySocket::Connect(const SocketAddress& addr) {
+ int ret;
+ LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::Connect("
+ << proxy_.ToSensitiveString() << ")";
+ dest_ = addr;
+ state_ = PS_INIT;
+ if (ShouldIssueConnect()) {
+ BufferInput(true);
+ }
+ ret = BufferedReadAdapter::Connect(proxy_);
+ // TODO: Set state_ appropriately if Connect fails.
+ return ret;
+}
+
+SocketAddress AsyncHttpsProxySocket::GetRemoteAddress() const {
+ return dest_;
+}
+
+int AsyncHttpsProxySocket::Close() {
+ headers_.clear();
+ state_ = PS_ERROR;
+ dest_.Clear();
+ delete context_;
+ context_ = NULL;
+ return BufferedReadAdapter::Close();
+}
+
+Socket::ConnState AsyncHttpsProxySocket::GetState() const {
+ if (state_ < PS_TUNNEL) {
+ return CS_CONNECTING;
+ } else if (state_ == PS_TUNNEL) {
+ return CS_CONNECTED;
+ } else {
+ return CS_CLOSED;
+ }
+}
+
+void AsyncHttpsProxySocket::OnConnectEvent(AsyncSocket * socket) {
+ LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::OnConnectEvent";
+ if (!ShouldIssueConnect()) {
+ state_ = PS_TUNNEL;
+ BufferedReadAdapter::OnConnectEvent(socket);
+ return;
+ }
+ SendRequest();
+}
+
+void AsyncHttpsProxySocket::OnCloseEvent(AsyncSocket * socket, int err) {
+ LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::OnCloseEvent(" << err << ")";
+ if ((state_ == PS_WAIT_CLOSE) && (err == 0)) {
+ state_ = PS_ERROR;
+ Connect(dest_);
+ } else {
+ BufferedReadAdapter::OnCloseEvent(socket, err);
+ }
+}
+
+void AsyncHttpsProxySocket::ProcessInput(char* data, size_t* len) {
+ size_t start = 0;
+ for (size_t pos = start; state_ < PS_TUNNEL && pos < *len;) {
+ if (state_ == PS_SKIP_BODY) {
+ size_t consume = _min(*len - pos, content_length_);
+ pos += consume;
+ start = pos;
+ content_length_ -= consume;
+ if (content_length_ == 0) {
+ EndResponse();
+ }
+ continue;
+ }
+
+ if (data[pos++] != '\n')
+ continue;
+
+ size_t len = pos - start - 1;
+ if ((len > 0) && (data[start + len - 1] == '\r'))
+ --len;
+
+ data[start + len] = 0;
+ ProcessLine(data + start, len);
+ start = pos;
+ }
+
+ *len -= start;
+ if (*len > 0) {
+ memmove(data, data + start, *len);
+ }
+
+ if (state_ != PS_TUNNEL)
+ return;
+
+ bool remainder = (*len > 0);
+ BufferInput(false);
+ SignalConnectEvent(this);
+
+ // FIX: if SignalConnect causes the socket to be destroyed, we are in trouble
+ if (remainder)
+ SignalReadEvent(this); // TODO: signal this??
+}
+
+bool AsyncHttpsProxySocket::ShouldIssueConnect() const {
+ // TODO: Think about whether a more sophisticated test
+ // than dest port == 80 is needed.
+ return force_connect_ || (dest_.port() != 80);
+}
+
+void AsyncHttpsProxySocket::SendRequest() {
+ std::stringstream ss;
+ ss << "CONNECT " << dest_.ToString() << " HTTP/1.0\r\n";
+ ss << "User-Agent: " << agent_ << "\r\n";
+ ss << "Host: " << dest_.HostAsURIString() << "\r\n";
+ ss << "Content-Length: 0\r\n";
+ ss << "Proxy-Connection: Keep-Alive\r\n";
+ ss << headers_;
+ ss << "\r\n";
+ std::string str = ss.str();
+ DirectSend(str.c_str(), str.size());
+ state_ = PS_LEADER;
+ expect_close_ = true;
+ content_length_ = 0;
+ headers_.clear();
+
+ LOG(LS_VERBOSE) << "AsyncHttpsProxySocket >> " << str;
+}
+
+void AsyncHttpsProxySocket::ProcessLine(char * data, size_t len) {
+ LOG(LS_VERBOSE) << "AsyncHttpsProxySocket << " << data;
+
+ if (len == 0) {
+ if (state_ == PS_TUNNEL_HEADERS) {
+ state_ = PS_TUNNEL;
+ } else if (state_ == PS_ERROR_HEADERS) {
+ Error(defer_error_);
+ return;
+ } else if (state_ == PS_SKIP_HEADERS) {
+ if (content_length_) {
+ state_ = PS_SKIP_BODY;
+ } else {
+ EndResponse();
+ return;
+ }
+ } else {
+ static bool report = false;
+ if (!unknown_mechanisms_.empty() && !report) {
+ report = true;
+ std::string msg(
+ "Unable to connect to the Google Talk service due to an incompatibility "
+ "with your proxy.\r\nPlease help us resolve this issue by submitting the "
+ "following information to us using our technical issue submission form "
+ "at:\r\n\r\n"
+ "http://www.google.com/support/talk/bin/request.py\r\n\r\n"
+ "We apologize for the inconvenience.\r\n\r\n"
+ "Information to submit to Google: "
+ );
+ //std::string msg("Please report the following information to foo@bar.com:\r\nUnknown methods: ");
+ msg.append(unknown_mechanisms_);
+#if defined(WEBRTC_WIN)
+ MessageBoxA(0, msg.c_str(), "Oops!", MB_OK);
+#endif
+#if defined(WEBRTC_POSIX)
+ // TODO: Raise a signal so the UI can be separated.
+ LOG(LS_ERROR) << "Oops!\n\n" << msg;
+#endif
+ }
+ // Unexpected end of headers
+ Error(0);
+ return;
+ }
+ } else if (state_ == PS_LEADER) {
+ unsigned int code;
+ if (sscanf(data, "HTTP/%*u.%*u %u", &code) != 1) {
+ Error(0);
+ return;
+ }
+ switch (code) {
+ case 200:
+ // connection good!
+ state_ = PS_TUNNEL_HEADERS;
+ return;
+#if defined(HTTP_STATUS_PROXY_AUTH_REQ) && (HTTP_STATUS_PROXY_AUTH_REQ != 407)
+#error Wrong code for HTTP_STATUS_PROXY_AUTH_REQ
+#endif
+ case 407: // HTTP_STATUS_PROXY_AUTH_REQ
+ state_ = PS_AUTHENTICATE;
+ return;
+ default:
+ defer_error_ = 0;
+ state_ = PS_ERROR_HEADERS;
+ return;
+ }
+ } else if ((state_ == PS_AUTHENTICATE)
+ && (_strnicmp(data, "Proxy-Authenticate:", 19) == 0)) {
+ std::string response, auth_method;
+ switch (HttpAuthenticate(data + 19, len - 19,
+ proxy_, "CONNECT", "/",
+ user_, pass_, context_, response, auth_method)) {
+ case HAR_IGNORE:
+ LOG(LS_VERBOSE) << "Ignoring Proxy-Authenticate: " << auth_method;
+ if (!unknown_mechanisms_.empty())
+ unknown_mechanisms_.append(", ");
+ unknown_mechanisms_.append(auth_method);
+ break;
+ case HAR_RESPONSE:
+ headers_ = "Proxy-Authorization: ";
+ headers_.append(response);
+ headers_.append("\r\n");
+ state_ = PS_SKIP_HEADERS;
+ unknown_mechanisms_.clear();
+ break;
+ case HAR_CREDENTIALS:
+ defer_error_ = SOCKET_EACCES;
+ state_ = PS_ERROR_HEADERS;
+ unknown_mechanisms_.clear();
+ break;
+ case HAR_ERROR:
+ defer_error_ = 0;
+ state_ = PS_ERROR_HEADERS;
+ unknown_mechanisms_.clear();
+ break;
+ }
+ } else if (_strnicmp(data, "Content-Length:", 15) == 0) {
+ content_length_ = strtoul(data + 15, 0, 0);
+ } else if (_strnicmp(data, "Proxy-Connection: Keep-Alive", 28) == 0) {
+ expect_close_ = false;
+ /*
+ } else if (_strnicmp(data, "Connection: close", 17) == 0) {
+ expect_close_ = true;
+ */
+ }
+}
+
+void AsyncHttpsProxySocket::EndResponse() {
+ if (!expect_close_) {
+ SendRequest();
+ return;
+ }
+
+ // No point in waiting for the server to close... let's close now
+ // TODO: Refactor out PS_WAIT_CLOSE
+ state_ = PS_WAIT_CLOSE;
+ BufferedReadAdapter::Close();
+ OnCloseEvent(this, 0);
+}
+
+void AsyncHttpsProxySocket::Error(int error) {
+ BufferInput(false);
+ Close();
+ SetError(error);
+ SignalCloseEvent(this, error);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+AsyncSocksProxySocket::AsyncSocksProxySocket(AsyncSocket* socket,
+ const SocketAddress& proxy,
+ const std::string& username,
+ const CryptString& password)
+ : BufferedReadAdapter(socket, 1024), state_(SS_ERROR), proxy_(proxy),
+ user_(username), pass_(password) {
+}
+
+int AsyncSocksProxySocket::Connect(const SocketAddress& addr) {
+ int ret;
+ dest_ = addr;
+ state_ = SS_INIT;
+ BufferInput(true);
+ ret = BufferedReadAdapter::Connect(proxy_);
+ // TODO: Set state_ appropriately if Connect fails.
+ return ret;
+}
+
+SocketAddress AsyncSocksProxySocket::GetRemoteAddress() const {
+ return dest_;
+}
+
+int AsyncSocksProxySocket::Close() {
+ state_ = SS_ERROR;
+ dest_.Clear();
+ return BufferedReadAdapter::Close();
+}
+
+Socket::ConnState AsyncSocksProxySocket::GetState() const {
+ if (state_ < SS_TUNNEL) {
+ return CS_CONNECTING;
+ } else if (state_ == SS_TUNNEL) {
+ return CS_CONNECTED;
+ } else {
+ return CS_CLOSED;
+ }
+}
+
+void AsyncSocksProxySocket::OnConnectEvent(AsyncSocket* socket) {
+ SendHello();
+}
+
+void AsyncSocksProxySocket::ProcessInput(char* data, size_t* len) {
+ ASSERT(state_ < SS_TUNNEL);
+
+ ByteBuffer response(data, *len);
+
+ if (state_ == SS_HELLO) {
+ uint8 ver, method;
+ if (!response.ReadUInt8(&ver) ||
+ !response.ReadUInt8(&method))
+ return;
+
+ if (ver != 5) {
+ Error(0);
+ return;
+ }
+
+ if (method == 0) {
+ SendConnect();
+ } else if (method == 2) {
+ SendAuth();
+ } else {
+ Error(0);
+ return;
+ }
+ } else if (state_ == SS_AUTH) {
+ uint8 ver, status;
+ if (!response.ReadUInt8(&ver) ||
+ !response.ReadUInt8(&status))
+ return;
+
+ if ((ver != 1) || (status != 0)) {
+ Error(SOCKET_EACCES);
+ return;
+ }
+
+ SendConnect();
+ } else if (state_ == SS_CONNECT) {
+ uint8 ver, rep, rsv, atyp;
+ if (!response.ReadUInt8(&ver) ||
+ !response.ReadUInt8(&rep) ||
+ !response.ReadUInt8(&rsv) ||
+ !response.ReadUInt8(&atyp))
+ return;
+
+ if ((ver != 5) || (rep != 0)) {
+ Error(0);
+ return;
+ }
+
+ uint16 port;
+ if (atyp == 1) {
+ uint32 addr;
+ if (!response.ReadUInt32(&addr) ||
+ !response.ReadUInt16(&port))
+ return;
+ LOG(LS_VERBOSE) << "Bound on " << addr << ":" << port;
+ } else if (atyp == 3) {
+ uint8 len;
+ std::string addr;
+ if (!response.ReadUInt8(&len) ||
+ !response.ReadString(&addr, len) ||
+ !response.ReadUInt16(&port))
+ return;
+ LOG(LS_VERBOSE) << "Bound on " << addr << ":" << port;
+ } else if (atyp == 4) {
+ std::string addr;
+ if (!response.ReadString(&addr, 16) ||
+ !response.ReadUInt16(&port))
+ return;
+ LOG(LS_VERBOSE) << "Bound on <IPV6>:" << port;
+ } else {
+ Error(0);
+ return;
+ }
+
+ state_ = SS_TUNNEL;
+ }
+
+ // Consume parsed data
+ *len = response.Length();
+ memcpy(data, response.Data(), *len);
+
+ if (state_ != SS_TUNNEL)
+ return;
+
+ bool remainder = (*len > 0);
+ BufferInput(false);
+ SignalConnectEvent(this);
+
+ // FIX: if SignalConnect causes the socket to be destroyed, we are in trouble
+ if (remainder)
+ SignalReadEvent(this); // TODO: signal this??
+}
+
+void AsyncSocksProxySocket::SendHello() {
+ ByteBuffer request;
+ request.WriteUInt8(5); // Socks Version
+ if (user_.empty()) {
+ request.WriteUInt8(1); // Authentication Mechanisms
+ request.WriteUInt8(0); // No authentication
+ } else {
+ request.WriteUInt8(2); // Authentication Mechanisms
+ request.WriteUInt8(0); // No authentication
+ request.WriteUInt8(2); // Username/Password
+ }
+ DirectSend(request.Data(), request.Length());
+ state_ = SS_HELLO;
+}
+
+void AsyncSocksProxySocket::SendAuth() {
+ ByteBuffer request;
+ request.WriteUInt8(1); // Negotiation Version
+ request.WriteUInt8(static_cast<uint8>(user_.size()));
+ request.WriteString(user_); // Username
+ request.WriteUInt8(static_cast<uint8>(pass_.GetLength()));
+ size_t len = pass_.GetLength() + 1;
+ char * sensitive = new char[len];
+ pass_.CopyTo(sensitive, true);
+ request.WriteString(sensitive); // Password
+ memset(sensitive, 0, len);
+ delete [] sensitive;
+ DirectSend(request.Data(), request.Length());
+ state_ = SS_AUTH;
+}
+
+void AsyncSocksProxySocket::SendConnect() {
+ ByteBuffer request;
+ request.WriteUInt8(5); // Socks Version
+ request.WriteUInt8(1); // CONNECT
+ request.WriteUInt8(0); // Reserved
+ if (dest_.IsUnresolved()) {
+ std::string hostname = dest_.hostname();
+ request.WriteUInt8(3); // DOMAINNAME
+ request.WriteUInt8(static_cast<uint8>(hostname.size()));
+ request.WriteString(hostname); // Destination Hostname
+ } else {
+ request.WriteUInt8(1); // IPV4
+ request.WriteUInt32(dest_.ip()); // Destination IP
+ }
+ request.WriteUInt16(dest_.port()); // Destination Port
+ DirectSend(request.Data(), request.Length());
+ state_ = SS_CONNECT;
+}
+
+void AsyncSocksProxySocket::Error(int error) {
+ state_ = SS_ERROR;
+ BufferInput(false);
+ Close();
+ SetError(SOCKET_EACCES);
+ SignalCloseEvent(this, error);
+}
+
+AsyncSocksProxyServerSocket::AsyncSocksProxyServerSocket(AsyncSocket* socket)
+ : AsyncProxyServerSocket(socket, kBufferSize), state_(SS_HELLO) {
+ BufferInput(true);
+}
+
+void AsyncSocksProxyServerSocket::ProcessInput(char* data, size_t* len) {
+ // TODO: See if the whole message has arrived
+ ASSERT(state_ < SS_CONNECT_PENDING);
+
+ ByteBuffer response(data, *len);
+ if (state_ == SS_HELLO) {
+ HandleHello(&response);
+ } else if (state_ == SS_AUTH) {
+ HandleAuth(&response);
+ } else if (state_ == SS_CONNECT) {
+ HandleConnect(&response);
+ }
+
+ // Consume parsed data
+ *len = response.Length();
+ memcpy(data, response.Data(), *len);
+}
+
+void AsyncSocksProxyServerSocket::DirectSend(const ByteBuffer& buf) {
+ BufferedReadAdapter::DirectSend(buf.Data(), buf.Length());
+}
+
+void AsyncSocksProxyServerSocket::HandleHello(ByteBuffer* request) {
+ uint8 ver, num_methods;
+ if (!request->ReadUInt8(&ver) ||
+ !request->ReadUInt8(&num_methods)) {
+ Error(0);
+ return;
+ }
+
+ if (ver != 5) {
+ Error(0);
+ return;
+ }
+
+ // Handle either no-auth (0) or user/pass auth (2)
+ uint8 method = 0xFF;
+ if (num_methods > 0 && !request->ReadUInt8(&method)) {
+ Error(0);
+ return;
+ }
+
+ // TODO: Ask the server which method to use.
+ SendHelloReply(method);
+ if (method == 0) {
+ state_ = SS_CONNECT;
+ } else if (method == 2) {
+ state_ = SS_AUTH;
+ } else {
+ state_ = SS_ERROR;
+ }
+}
+
+void AsyncSocksProxyServerSocket::SendHelloReply(int method) {
+ ByteBuffer response;
+ response.WriteUInt8(5); // Socks Version
+ response.WriteUInt8(method); // Auth method
+ DirectSend(response);
+}
+
+void AsyncSocksProxyServerSocket::HandleAuth(ByteBuffer* request) {
+ uint8 ver, user_len, pass_len;
+ std::string user, pass;
+ if (!request->ReadUInt8(&ver) ||
+ !request->ReadUInt8(&user_len) ||
+ !request->ReadString(&user, user_len) ||
+ !request->ReadUInt8(&pass_len) ||
+ !request->ReadString(&pass, pass_len)) {
+ Error(0);
+ return;
+ }
+
+ // TODO: Allow for checking of credentials.
+ SendAuthReply(0);
+ state_ = SS_CONNECT;
+}
+
+void AsyncSocksProxyServerSocket::SendAuthReply(int result) {
+ ByteBuffer response;
+ response.WriteUInt8(1); // Negotiation Version
+ response.WriteUInt8(result);
+ DirectSend(response);
+}
+
+void AsyncSocksProxyServerSocket::HandleConnect(ByteBuffer* request) {
+ uint8 ver, command, reserved, addr_type;
+ uint32 ip;
+ uint16 port;
+ if (!request->ReadUInt8(&ver) ||
+ !request->ReadUInt8(&command) ||
+ !request->ReadUInt8(&reserved) ||
+ !request->ReadUInt8(&addr_type) ||
+ !request->ReadUInt32(&ip) ||
+ !request->ReadUInt16(&port)) {
+ Error(0);
+ return;
+ }
+
+ if (ver != 5 || command != 1 ||
+ reserved != 0 || addr_type != 1) {
+ Error(0);
+ return;
+ }
+
+ SignalConnectRequest(this, SocketAddress(ip, port));
+ state_ = SS_CONNECT_PENDING;
+}
+
+void AsyncSocksProxyServerSocket::SendConnectResult(int result,
+ const SocketAddress& addr) {
+ if (state_ != SS_CONNECT_PENDING)
+ return;
+
+ ByteBuffer response;
+ response.WriteUInt8(5); // Socks version
+ response.WriteUInt8((result != 0)); // 0x01 is generic error
+ response.WriteUInt8(0); // reserved
+ response.WriteUInt8(1); // IPv4 address
+ response.WriteUInt32(addr.ip());
+ response.WriteUInt16(addr.port());
+ DirectSend(response);
+ BufferInput(false);
+ state_ = SS_TUNNEL;
+}
+
+void AsyncSocksProxyServerSocket::Error(int error) {
+ state_ = SS_ERROR;
+ BufferInput(false);
+ Close();
+ SetError(SOCKET_EACCES);
+ SignalCloseEvent(this, error);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+LoggingSocketAdapter::LoggingSocketAdapter(AsyncSocket* socket,
+ LoggingSeverity level,
+ const char * label, bool hex_mode)
+ : AsyncSocketAdapter(socket), level_(level), hex_mode_(hex_mode) {
+ label_.append("[");
+ label_.append(label);
+ label_.append("]");
+}
+
+int LoggingSocketAdapter::Send(const void *pv, size_t cb) {
+ int res = AsyncSocketAdapter::Send(pv, cb);
+ if (res > 0)
+ LogMultiline(level_, label_.c_str(), false, pv, res, hex_mode_, &lms_);
+ return res;
+}
+
+int LoggingSocketAdapter::SendTo(const void *pv, size_t cb,
+ const SocketAddress& addr) {
+ int res = AsyncSocketAdapter::SendTo(pv, cb, addr);
+ if (res > 0)
+ LogMultiline(level_, label_.c_str(), false, pv, res, hex_mode_, &lms_);
+ return res;
+}
+
+int LoggingSocketAdapter::Recv(void *pv, size_t cb) {
+ int res = AsyncSocketAdapter::Recv(pv, cb);
+ if (res > 0)
+ LogMultiline(level_, label_.c_str(), true, pv, res, hex_mode_, &lms_);
+ return res;
+}
+
+int LoggingSocketAdapter::RecvFrom(void *pv, size_t cb, SocketAddress *paddr) {
+ int res = AsyncSocketAdapter::RecvFrom(pv, cb, paddr);
+ if (res > 0)
+ LogMultiline(level_, label_.c_str(), true, pv, res, hex_mode_, &lms_);
+ return res;
+}
+
+int LoggingSocketAdapter::Close() {
+ LogMultiline(level_, label_.c_str(), false, NULL, 0, hex_mode_, &lms_);
+ LogMultiline(level_, label_.c_str(), true, NULL, 0, hex_mode_, &lms_);
+ LOG_V(level_) << label_ << " Closed locally";
+ return socket_->Close();
+}
+
+void LoggingSocketAdapter::OnConnectEvent(AsyncSocket * socket) {
+ LOG_V(level_) << label_ << " Connected";
+ AsyncSocketAdapter::OnConnectEvent(socket);
+}
+
+void LoggingSocketAdapter::OnCloseEvent(AsyncSocket * socket, int err) {
+ LogMultiline(level_, label_.c_str(), false, NULL, 0, hex_mode_, &lms_);
+ LogMultiline(level_, label_.c_str(), true, NULL, 0, hex_mode_, &lms_);
+ LOG_V(level_) << label_ << " Closed with error: " << err;
+ AsyncSocketAdapter::OnCloseEvent(socket, err);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketadapters.h b/chromium/third_party/webrtc/base/socketadapters.h
new file mode 100644
index 00000000000..3292df28924
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketadapters.h
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETADAPTERS_H_
+#define WEBRTC_BASE_SOCKETADAPTERS_H_
+
+#include <map>
+#include <string>
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/cryptstring.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+struct HttpAuthContext;
+class ByteBuffer;
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements a socket adapter that can buffer and process data internally,
+// as in the case of connecting to a proxy, where you must speak the proxy
+// protocol before commencing normal socket behavior.
+class BufferedReadAdapter : public AsyncSocketAdapter {
+ public:
+ BufferedReadAdapter(AsyncSocket* socket, size_t buffer_size);
+ virtual ~BufferedReadAdapter();
+
+ virtual int Send(const void* pv, size_t cb);
+ virtual int Recv(void* pv, size_t cb);
+
+ protected:
+ int DirectSend(const void* pv, size_t cb) {
+ return AsyncSocketAdapter::Send(pv, cb);
+ }
+
+ void BufferInput(bool on = true);
+ virtual void ProcessInput(char* data, size_t* len) = 0;
+
+ virtual void OnReadEvent(AsyncSocket * socket);
+
+ private:
+ char * buffer_;
+ size_t buffer_size_, data_len_;
+ bool buffering_;
+ DISALLOW_EVIL_CONSTRUCTORS(BufferedReadAdapter);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Interface for implementing proxy server sockets.
+class AsyncProxyServerSocket : public BufferedReadAdapter {
+ public:
+ AsyncProxyServerSocket(AsyncSocket* socket, size_t buffer_size)
+ : BufferedReadAdapter(socket, buffer_size) {}
+ sigslot::signal2<AsyncProxyServerSocket*,
+ const SocketAddress&> SignalConnectRequest;
+ virtual void SendConnectResult(int err, const SocketAddress& addr) = 0;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements a socket adapter that performs the client side of a
+// fake SSL handshake. Used for "ssltcp" P2P functionality.
+class AsyncSSLSocket : public BufferedReadAdapter {
+ public:
+ explicit AsyncSSLSocket(AsyncSocket* socket);
+
+ virtual int Connect(const SocketAddress& addr);
+
+ protected:
+ virtual void OnConnectEvent(AsyncSocket* socket);
+ virtual void ProcessInput(char* data, size_t* len);
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncSSLSocket);
+};
+
+// Implements a socket adapter that performs the server side of a
+// fake SSL handshake. Used when implementing a relay server that does "ssltcp".
+class AsyncSSLServerSocket : public BufferedReadAdapter {
+ public:
+ explicit AsyncSSLServerSocket(AsyncSocket* socket);
+
+ protected:
+ virtual void ProcessInput(char* data, size_t* len);
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncSSLServerSocket);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements a socket adapter that speaks the HTTP/S proxy protocol.
+class AsyncHttpsProxySocket : public BufferedReadAdapter {
+ public:
+ AsyncHttpsProxySocket(AsyncSocket* socket, const std::string& user_agent,
+ const SocketAddress& proxy,
+ const std::string& username, const CryptString& password);
+ virtual ~AsyncHttpsProxySocket();
+
+ // If connect is forced, the adapter will always issue an HTTP CONNECT to the
+ // target address. Otherwise, it will connect only if the destination port
+ // is not port 80.
+ void SetForceConnect(bool force) { force_connect_ = force; }
+
+ virtual int Connect(const SocketAddress& addr);
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int Close();
+ virtual ConnState GetState() const;
+
+ protected:
+ virtual void OnConnectEvent(AsyncSocket* socket);
+ virtual void OnCloseEvent(AsyncSocket* socket, int err);
+ virtual void ProcessInput(char* data, size_t* len);
+
+ bool ShouldIssueConnect() const;
+ void SendRequest();
+ void ProcessLine(char* data, size_t len);
+ void EndResponse();
+ void Error(int error);
+
+ private:
+ SocketAddress proxy_, dest_;
+ std::string agent_, user_, headers_;
+ CryptString pass_;
+ bool force_connect_;
+ size_t content_length_;
+ int defer_error_;
+ bool expect_close_;
+ enum ProxyState {
+ PS_INIT, PS_LEADER, PS_AUTHENTICATE, PS_SKIP_HEADERS, PS_ERROR_HEADERS,
+ PS_TUNNEL_HEADERS, PS_SKIP_BODY, PS_TUNNEL, PS_WAIT_CLOSE, PS_ERROR
+ } state_;
+ HttpAuthContext * context_;
+ std::string unknown_mechanisms_;
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncHttpsProxySocket);
+};
+
+/* TODO: Implement this.
+class AsyncHttpsProxyServerSocket : public AsyncProxyServerSocket {
+ public:
+ explicit AsyncHttpsProxyServerSocket(AsyncSocket* socket);
+
+ private:
+ virtual void ProcessInput(char * data, size_t& len);
+ void Error(int error);
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncHttpsProxyServerSocket);
+};
+*/
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements a socket adapter that speaks the SOCKS proxy protocol.
+class AsyncSocksProxySocket : public BufferedReadAdapter {
+ public:
+ AsyncSocksProxySocket(AsyncSocket* socket, const SocketAddress& proxy,
+ const std::string& username, const CryptString& password);
+
+ virtual int Connect(const SocketAddress& addr);
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int Close();
+ virtual ConnState GetState() const;
+
+ protected:
+ virtual void OnConnectEvent(AsyncSocket* socket);
+ virtual void ProcessInput(char* data, size_t* len);
+
+ void SendHello();
+ void SendConnect();
+ void SendAuth();
+ void Error(int error);
+
+ private:
+ enum State {
+ SS_INIT, SS_HELLO, SS_AUTH, SS_CONNECT, SS_TUNNEL, SS_ERROR
+ };
+ State state_;
+ SocketAddress proxy_, dest_;
+ std::string user_;
+ CryptString pass_;
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncSocksProxySocket);
+};
+
+// Implements a proxy server socket for the SOCKS protocol.
+class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket {
+ public:
+ explicit AsyncSocksProxyServerSocket(AsyncSocket* socket);
+
+ private:
+ virtual void ProcessInput(char* data, size_t* len);
+ void DirectSend(const ByteBuffer& buf);
+
+ void HandleHello(ByteBuffer* request);
+ void SendHelloReply(int method);
+ void HandleAuth(ByteBuffer* request);
+ void SendAuthReply(int result);
+ void HandleConnect(ByteBuffer* request);
+ virtual void SendConnectResult(int result, const SocketAddress& addr);
+
+ void Error(int error);
+
+ static const int kBufferSize = 1024;
+ enum State {
+ SS_HELLO, SS_AUTH, SS_CONNECT, SS_CONNECT_PENDING, SS_TUNNEL, SS_ERROR
+ };
+ State state_;
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncSocksProxyServerSocket);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements a socket adapter that logs everything that it sends and receives.
+class LoggingSocketAdapter : public AsyncSocketAdapter {
+ public:
+ LoggingSocketAdapter(AsyncSocket* socket, LoggingSeverity level,
+ const char * label, bool hex_mode = false);
+
+ virtual int Send(const void *pv, size_t cb);
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr);
+ virtual int Recv(void *pv, size_t cb);
+ virtual int RecvFrom(void *pv, size_t cb, SocketAddress *paddr);
+ virtual int Close();
+
+ protected:
+ virtual void OnConnectEvent(AsyncSocket * socket);
+ virtual void OnCloseEvent(AsyncSocket * socket, int err);
+
+ private:
+ LoggingSeverity level_;
+ std::string label_;
+ bool hex_mode_;
+ LogMultilineState lms_;
+ DISALLOW_EVIL_CONSTRUCTORS(LoggingSocketAdapter);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETADAPTERS_H_
diff --git a/chromium/third_party/webrtc/base/socketaddress.cc b/chromium/third_party/webrtc/base/socketaddress.cc
new file mode 100644
index 00000000000..47ddd0400ea
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketaddress.cc
@@ -0,0 +1,383 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/socketaddress.h"
+
+#if defined(WEBRTC_POSIX)
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#if defined(OPENBSD)
+#include <netinet/in_systm.h>
+#endif
+#if !defined(__native_client__)
+#include <netinet/ip.h>
+#endif
+#include <arpa/inet.h>
+#include <netdb.h>
+#include <unistd.h>
+#endif
+
+#include <sstream>
+
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/nethelpers.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+namespace rtc {
+
+SocketAddress::SocketAddress() {
+ Clear();
+}
+
+SocketAddress::SocketAddress(const std::string& hostname, int port) {
+ SetIP(hostname);
+ SetPort(port);
+}
+
+SocketAddress::SocketAddress(uint32 ip_as_host_order_integer, int port) {
+ SetIP(IPAddress(ip_as_host_order_integer));
+ SetPort(port);
+}
+
+SocketAddress::SocketAddress(const IPAddress& ip, int port) {
+ SetIP(ip);
+ SetPort(port);
+}
+
+SocketAddress::SocketAddress(const SocketAddress& addr) {
+ this->operator=(addr);
+}
+
+void SocketAddress::Clear() {
+ hostname_.clear();
+ literal_ = false;
+ ip_ = IPAddress();
+ port_ = 0;
+ scope_id_ = 0;
+}
+
+bool SocketAddress::IsNil() const {
+ return hostname_.empty() && IPIsUnspec(ip_) && 0 == port_;
+}
+
+bool SocketAddress::IsComplete() const {
+ return (!IPIsAny(ip_)) && (0 != port_);
+}
+
+SocketAddress& SocketAddress::operator=(const SocketAddress& addr) {
+ hostname_ = addr.hostname_;
+ ip_ = addr.ip_;
+ port_ = addr.port_;
+ literal_ = addr.literal_;
+ scope_id_ = addr.scope_id_;
+ return *this;
+}
+
+void SocketAddress::SetIP(uint32 ip_as_host_order_integer) {
+ hostname_.clear();
+ literal_ = false;
+ ip_ = IPAddress(ip_as_host_order_integer);
+ scope_id_ = 0;
+}
+
+void SocketAddress::SetIP(const IPAddress& ip) {
+ hostname_.clear();
+ literal_ = false;
+ ip_ = ip;
+ scope_id_ = 0;
+}
+
+void SocketAddress::SetIP(const std::string& hostname) {
+ hostname_ = hostname;
+ literal_ = IPFromString(hostname, &ip_);
+ if (!literal_) {
+ ip_ = IPAddress();
+ }
+ scope_id_ = 0;
+}
+
+void SocketAddress::SetResolvedIP(uint32 ip_as_host_order_integer) {
+ ip_ = IPAddress(ip_as_host_order_integer);
+ scope_id_ = 0;
+}
+
+void SocketAddress::SetResolvedIP(const IPAddress& ip) {
+ ip_ = ip;
+ scope_id_ = 0;
+}
+
+void SocketAddress::SetPort(int port) {
+ ASSERT((0 <= port) && (port < 65536));
+ port_ = port;
+}
+
+uint32 SocketAddress::ip() const {
+ return ip_.v4AddressAsHostOrderInteger();
+}
+
+const IPAddress& SocketAddress::ipaddr() const {
+ return ip_;
+}
+
+uint16 SocketAddress::port() const {
+ return port_;
+}
+
+std::string SocketAddress::HostAsURIString() const {
+ // If the hostname was a literal IP string, it may need to have square
+ // brackets added (for SocketAddress::ToString()).
+ if (!literal_ && !hostname_.empty())
+ return hostname_;
+ if (ip_.family() == AF_INET6) {
+ return "[" + ip_.ToString() + "]";
+ } else {
+ return ip_.ToString();
+ }
+}
+
+std::string SocketAddress::HostAsSensitiveURIString() const {
+ // If the hostname was a literal IP string, it may need to have square
+ // brackets added (for SocketAddress::ToString()).
+ if (!literal_ && !hostname_.empty())
+ return hostname_;
+ if (ip_.family() == AF_INET6) {
+ return "[" + ip_.ToSensitiveString() + "]";
+ } else {
+ return ip_.ToSensitiveString();
+ }
+}
+
+std::string SocketAddress::PortAsString() const {
+ std::ostringstream ost;
+ ost << port_;
+ return ost.str();
+}
+
+std::string SocketAddress::ToString() const {
+ std::ostringstream ost;
+ ost << *this;
+ return ost.str();
+}
+
+std::string SocketAddress::ToSensitiveString() const {
+ std::ostringstream ost;
+ ost << HostAsSensitiveURIString() << ":" << port();
+ return ost.str();
+}
+
+bool SocketAddress::FromString(const std::string& str) {
+ if (str.at(0) == '[') {
+ std::string::size_type closebracket = str.rfind(']');
+ if (closebracket != std::string::npos) {
+ std::string::size_type colon = str.find(':', closebracket);
+ if (colon != std::string::npos && colon > closebracket) {
+ SetPort(strtoul(str.substr(colon + 1).c_str(), NULL, 10));
+ SetIP(str.substr(1, closebracket - 1));
+ } else {
+ return false;
+ }
+ }
+ } else {
+ std::string::size_type pos = str.find(':');
+ if (std::string::npos == pos)
+ return false;
+ SetPort(strtoul(str.substr(pos + 1).c_str(), NULL, 10));
+ SetIP(str.substr(0, pos));
+ }
+ return true;
+}
+
+std::ostream& operator<<(std::ostream& os, const SocketAddress& addr) {
+ os << addr.HostAsURIString() << ":" << addr.port();
+ return os;
+}
+
+bool SocketAddress::IsAnyIP() const {
+ return IPIsAny(ip_);
+}
+
+bool SocketAddress::IsLoopbackIP() const {
+ return IPIsLoopback(ip_) || (IPIsAny(ip_) &&
+ 0 == strcmp(hostname_.c_str(), "localhost"));
+}
+
+bool SocketAddress::IsPrivateIP() const {
+ return IPIsPrivate(ip_);
+}
+
+bool SocketAddress::IsUnresolvedIP() const {
+ return IPIsUnspec(ip_) && !literal_ && !hostname_.empty();
+}
+
+bool SocketAddress::operator==(const SocketAddress& addr) const {
+ return EqualIPs(addr) && EqualPorts(addr);
+}
+
+bool SocketAddress::operator<(const SocketAddress& addr) const {
+ if (ip_ < addr.ip_)
+ return true;
+ else if (addr.ip_ < ip_)
+ return false;
+
+ // We only check hostnames if both IPs are zero. This matches EqualIPs()
+ if (addr.IsAnyIP()) {
+ if (hostname_ < addr.hostname_)
+ return true;
+ else if (addr.hostname_ < hostname_)
+ return false;
+ }
+
+ return port_ < addr.port_;
+}
+
+bool SocketAddress::EqualIPs(const SocketAddress& addr) const {
+ return (ip_ == addr.ip_) &&
+ ((!IPIsAny(ip_)) || (hostname_ == addr.hostname_));
+}
+
+bool SocketAddress::EqualPorts(const SocketAddress& addr) const {
+ return (port_ == addr.port_);
+}
+
+size_t SocketAddress::Hash() const {
+ size_t h = 0;
+ h ^= HashIP(ip_);
+ h ^= port_ | (port_ << 16);
+ return h;
+}
+
+void SocketAddress::ToSockAddr(sockaddr_in* saddr) const {
+ memset(saddr, 0, sizeof(*saddr));
+ if (ip_.family() != AF_INET) {
+ saddr->sin_family = AF_UNSPEC;
+ return;
+ }
+ saddr->sin_family = AF_INET;
+ saddr->sin_port = HostToNetwork16(port_);
+ if (IPIsAny(ip_)) {
+ saddr->sin_addr.s_addr = INADDR_ANY;
+ } else {
+ saddr->sin_addr = ip_.ipv4_address();
+ }
+}
+
+bool SocketAddress::FromSockAddr(const sockaddr_in& saddr) {
+ if (saddr.sin_family != AF_INET)
+ return false;
+ SetIP(NetworkToHost32(saddr.sin_addr.s_addr));
+ SetPort(NetworkToHost16(saddr.sin_port));
+ literal_ = false;
+ return true;
+}
+
+static size_t ToSockAddrStorageHelper(sockaddr_storage* addr,
+ IPAddress ip, int port, int scope_id) {
+ memset(addr, 0, sizeof(sockaddr_storage));
+ addr->ss_family = ip.family();
+ if (addr->ss_family == AF_INET6) {
+ sockaddr_in6* saddr = reinterpret_cast<sockaddr_in6*>(addr);
+ saddr->sin6_addr = ip.ipv6_address();
+ saddr->sin6_port = HostToNetwork16(port);
+ saddr->sin6_scope_id = scope_id;
+ return sizeof(sockaddr_in6);
+ } else if (addr->ss_family == AF_INET) {
+ sockaddr_in* saddr = reinterpret_cast<sockaddr_in*>(addr);
+ saddr->sin_addr = ip.ipv4_address();
+ saddr->sin_port = HostToNetwork16(port);
+ return sizeof(sockaddr_in);
+ }
+ return 0;
+}
+
+size_t SocketAddress::ToDualStackSockAddrStorage(sockaddr_storage *addr) const {
+ return ToSockAddrStorageHelper(addr, ip_.AsIPv6Address(), port_, scope_id_);
+}
+
+size_t SocketAddress::ToSockAddrStorage(sockaddr_storage* addr) const {
+ return ToSockAddrStorageHelper(addr, ip_, port_, scope_id_);
+}
+
+std::string SocketAddress::IPToString(uint32 ip_as_host_order_integer) {
+ return IPAddress(ip_as_host_order_integer).ToString();
+}
+
+std::string IPToSensitiveString(uint32 ip_as_host_order_integer) {
+ return IPAddress(ip_as_host_order_integer).ToSensitiveString();
+}
+
+bool SocketAddress::StringToIP(const std::string& hostname, uint32* ip) {
+ in_addr addr;
+ if (rtc::inet_pton(AF_INET, hostname.c_str(), &addr) == 0)
+ return false;
+ *ip = NetworkToHost32(addr.s_addr);
+ return true;
+}
+
+bool SocketAddress::StringToIP(const std::string& hostname, IPAddress* ip) {
+ in_addr addr4;
+ if (rtc::inet_pton(AF_INET, hostname.c_str(), &addr4) > 0) {
+ if (ip) {
+ *ip = IPAddress(addr4);
+ }
+ return true;
+ }
+
+ in6_addr addr6;
+ if (rtc::inet_pton(AF_INET6, hostname.c_str(), &addr6) > 0) {
+ if (ip) {
+ *ip = IPAddress(addr6);
+ }
+ return true;
+ }
+ return false;
+}
+
+uint32 SocketAddress::StringToIP(const std::string& hostname) {
+ uint32 ip = 0;
+ StringToIP(hostname, &ip);
+ return ip;
+}
+
+bool SocketAddressFromSockAddrStorage(const sockaddr_storage& addr,
+ SocketAddress* out) {
+ if (!out) {
+ return false;
+ }
+ if (addr.ss_family == AF_INET) {
+ const sockaddr_in* saddr = reinterpret_cast<const sockaddr_in*>(&addr);
+ *out = SocketAddress(IPAddress(saddr->sin_addr),
+ NetworkToHost16(saddr->sin_port));
+ return true;
+ } else if (addr.ss_family == AF_INET6) {
+ const sockaddr_in6* saddr = reinterpret_cast<const sockaddr_in6*>(&addr);
+ *out = SocketAddress(IPAddress(saddr->sin6_addr),
+ NetworkToHost16(saddr->sin6_port));
+ out->SetScopeID(saddr->sin6_scope_id);
+ return true;
+ }
+ return false;
+}
+
+SocketAddress EmptySocketAddressWithFamily(int family) {
+ if (family == AF_INET) {
+ return SocketAddress(IPAddress(INADDR_ANY), 0);
+ } else if (family == AF_INET6) {
+ return SocketAddress(IPAddress(in6addr_any), 0);
+ }
+ return SocketAddress();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketaddress.h b/chromium/third_party/webrtc/base/socketaddress.h
new file mode 100644
index 00000000000..f8256fc6292
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketaddress.h
@@ -0,0 +1,214 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETADDRESS_H_
+#define WEBRTC_BASE_SOCKETADDRESS_H_
+
+#include <string>
+#include <vector>
+#include <iosfwd>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/ipaddress.h"
+
+#undef SetPort
+
+struct sockaddr_in;
+struct sockaddr_storage;
+
+namespace rtc {
+
+// Records an IP address and port.
+class SocketAddress {
+ public:
+ // Creates a nil address.
+ SocketAddress();
+
+ // Creates the address with the given host and port. Host may be a
+ // literal IP string or a hostname to be resolved later.
+ SocketAddress(const std::string& hostname, int port);
+
+ // Creates the address with the given IP and port.
+ // IP is given as an integer in host byte order. V4 only, to be deprecated.
+ SocketAddress(uint32 ip_as_host_order_integer, int port);
+
+ // Creates the address with the given IP and port.
+ SocketAddress(const IPAddress& ip, int port);
+
+ // Creates a copy of the given address.
+ SocketAddress(const SocketAddress& addr);
+
+ // Resets to the nil address.
+ void Clear();
+
+ // Determines if this is a nil address (empty hostname, any IP, null port)
+ bool IsNil() const;
+
+ // Returns true if ip and port are set.
+ bool IsComplete() const;
+
+ // Replaces our address with the given one.
+ SocketAddress& operator=(const SocketAddress& addr);
+
+ // Changes the IP of this address to the given one, and clears the hostname
+ // IP is given as an integer in host byte order. V4 only, to be deprecated..
+ void SetIP(uint32 ip_as_host_order_integer);
+
+ // Changes the IP of this address to the given one, and clears the hostname.
+ void SetIP(const IPAddress& ip);
+
+ // Changes the hostname of this address to the given one.
+ // Does not resolve the address; use Resolve to do so.
+ void SetIP(const std::string& hostname);
+
+ // Sets the IP address while retaining the hostname. Useful for bypassing
+ // DNS for a pre-resolved IP.
+ // IP is given as an integer in host byte order. V4 only, to be deprecated.
+ void SetResolvedIP(uint32 ip_as_host_order_integer);
+
+ // Sets the IP address while retaining the hostname. Useful for bypassing
+ // DNS for a pre-resolved IP.
+ void SetResolvedIP(const IPAddress& ip);
+
+ // Changes the port of this address to the given one.
+ void SetPort(int port);
+
+ // Returns the hostname.
+ const std::string& hostname() const { return hostname_; }
+
+ // Returns the IP address as a host byte order integer.
+ // Returns 0 for non-v4 addresses.
+ uint32 ip() const;
+
+ const IPAddress& ipaddr() const;
+
+ int family() const {return ip_.family(); }
+
+ // Returns the port part of this address.
+ uint16 port() const;
+
+ // Returns the scope ID associated with this address. Scope IDs are a
+ // necessary addition to IPv6 link-local addresses, with different network
+ // interfaces having different scope-ids for their link-local addresses.
+ // IPv4 address do not have scope_ids and sockaddr_in structures do not have
+ // a field for them.
+ int scope_id() const {return scope_id_; }
+ void SetScopeID(int id) { scope_id_ = id; }
+
+ // Returns the 'host' portion of the address (hostname or IP) in a form
+ // suitable for use in a URI. If both IP and hostname are present, hostname
+ // is preferred. IPv6 addresses are enclosed in square brackets ('[' and ']').
+ std::string HostAsURIString() const;
+
+ // Same as HostAsURIString but anonymizes IP addresses by hiding the last
+ // part.
+ std::string HostAsSensitiveURIString() const;
+
+ // Returns the port as a string.
+ std::string PortAsString() const;
+
+ // Returns hostname:port or [hostname]:port.
+ std::string ToString() const;
+
+ // Same as ToString but anonymizes it by hiding the last part.
+ std::string ToSensitiveString() const;
+
+ // Parses hostname:port and [hostname]:port.
+ bool FromString(const std::string& str);
+
+ friend std::ostream& operator<<(std::ostream& os, const SocketAddress& addr);
+
+ // Determines whether this represents a missing / any IP address.
+ // That is, 0.0.0.0 or ::.
+ // Hostname and/or port may be set.
+ bool IsAnyIP() const;
+ inline bool IsAny() const { return IsAnyIP(); } // deprecated
+
+ // Determines whether the IP address refers to a loopback address.
+ // For v4 addresses this means the address is in the range 127.0.0.0/8.
+ // For v6 addresses this means the address is ::1.
+ bool IsLoopbackIP() const;
+
+ // Determines whether the IP address is in one of the private ranges:
+ // For v4: 127.0.0.0/8 10.0.0.0/8 192.168.0.0/16 172.16.0.0/12.
+ // For v6: FE80::/16 and ::1.
+ bool IsPrivateIP() const;
+
+ // Determines whether the hostname has been resolved to an IP.
+ bool IsUnresolvedIP() const;
+ inline bool IsUnresolved() const { return IsUnresolvedIP(); } // deprecated
+
+ // Determines whether this address is identical to the given one.
+ bool operator ==(const SocketAddress& addr) const;
+ inline bool operator !=(const SocketAddress& addr) const {
+ return !this->operator ==(addr);
+ }
+
+ // Compares based on IP and then port.
+ bool operator <(const SocketAddress& addr) const;
+
+ // Determines whether this address has the same IP as the one given.
+ bool EqualIPs(const SocketAddress& addr) const;
+
+ // Determines whether this address has the same port as the one given.
+ bool EqualPorts(const SocketAddress& addr) const;
+
+ // Hashes this address into a small number.
+ size_t Hash() const;
+
+ // Write this address to a sockaddr_in.
+ // If IPv6, will zero out the sockaddr_in and sets family to AF_UNSPEC.
+ void ToSockAddr(sockaddr_in* saddr) const;
+
+ // Read this address from a sockaddr_in.
+ bool FromSockAddr(const sockaddr_in& saddr);
+
+ // Read and write the address to/from a sockaddr_storage.
+ // Dual stack version always sets family to AF_INET6, and maps v4 addresses.
+ // The other version doesn't map, and outputs an AF_INET address for
+ // v4 or mapped addresses, and AF_INET6 addresses for others.
+ // Returns the size of the sockaddr_in or sockaddr_in6 structure that is
+ // written to the sockaddr_storage, or zero on failure.
+ size_t ToDualStackSockAddrStorage(sockaddr_storage* saddr) const;
+ size_t ToSockAddrStorage(sockaddr_storage* saddr) const;
+
+ // Converts the IP address given in 'compact form' into dotted form.
+ // IP is given as an integer in host byte order. V4 only, to be deprecated.
+ // TODO: Deprecate this.
+ static std::string IPToString(uint32 ip_as_host_order_integer);
+
+ // Same as IPToString but anonymizes it by hiding the last part.
+ // TODO: Deprecate this.
+ static std::string IPToSensitiveString(uint32 ip_as_host_order_integer);
+
+ // Converts the IP address given in dotted form into compact form.
+ // Only dotted names (A.B.C.D) are converted.
+ // Output integer is returned in host byte order.
+ // TODO: Deprecate, replace wth agnostic versions.
+ static bool StringToIP(const std::string& str, uint32* ip);
+ static uint32 StringToIP(const std::string& str);
+
+ // Converts the IP address given in printable form into an IPAddress.
+ static bool StringToIP(const std::string& str, IPAddress* ip);
+
+ private:
+ std::string hostname_;
+ IPAddress ip_;
+ uint16 port_;
+ int scope_id_;
+ bool literal_; // Indicates that 'hostname_' contains a literal IP string.
+};
+
+bool SocketAddressFromSockAddrStorage(const sockaddr_storage& saddr,
+ SocketAddress* out);
+SocketAddress EmptySocketAddressWithFamily(int family);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETADDRESS_H_
diff --git a/chromium/third_party/webrtc/base/socketaddress_unittest.cc b/chromium/third_party/webrtc/base/socketaddress_unittest.cc
new file mode 100644
index 00000000000..6166183feba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketaddress_unittest.cc
@@ -0,0 +1,335 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_POSIX)
+#include <netinet/in.h> // for sockaddr_in
+#endif
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace rtc {
+
+const in6_addr kTestV6Addr = { { {0x20, 0x01, 0x0d, 0xb8,
+ 0x10, 0x20, 0x30, 0x40,
+ 0x50, 0x60, 0x70, 0x80,
+ 0x90, 0xA0, 0xB0, 0xC0} } };
+const in6_addr kMappedV4Addr = { { {0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0xFF, 0xFF,
+ 0x01, 0x02, 0x03, 0x04} } };
+const std::string kTestV6AddrString = "2001:db8:1020:3040:5060:7080:90a0:b0c0";
+const std::string kTestV6AddrAnonymizedString = "2001:db8:1020::";
+const std::string kTestV6AddrFullString =
+ "[2001:db8:1020:3040:5060:7080:90a0:b0c0]:5678";
+const std::string kTestV6AddrFullAnonymizedString = "[2001:db8:1020::]:5678";
+
+TEST(SocketAddressTest, TestDefaultCtor) {
+ SocketAddress addr;
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(), addr.ipaddr());
+ EXPECT_EQ(0, addr.port());
+ EXPECT_EQ("", addr.hostname());
+}
+
+TEST(SocketAddressTest, TestIPPortCtor) {
+ SocketAddress addr(IPAddress(0x01020304), 5678);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestIPv4StringPortCtor) {
+ SocketAddress addr("1.2.3.4", 5678);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("1.2.3.4", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestIPv6StringPortCtor) {
+ SocketAddress addr2(kTestV6AddrString, 1234);
+ IPAddress tocheck(kTestV6Addr);
+
+ EXPECT_FALSE(addr2.IsUnresolvedIP());
+ EXPECT_EQ(tocheck, addr2.ipaddr());
+ EXPECT_EQ(1234, addr2.port());
+ EXPECT_EQ(kTestV6AddrString, addr2.hostname());
+ EXPECT_EQ("[" + kTestV6AddrString + "]:1234", addr2.ToString());
+}
+
+TEST(SocketAddressTest, TestSpecialStringPortCtor) {
+ // inet_addr doesn't handle this address properly.
+ SocketAddress addr("255.255.255.255", 5678);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0xFFFFFFFFU), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("255.255.255.255", addr.hostname());
+ EXPECT_EQ("255.255.255.255:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestHostnamePortCtor) {
+ SocketAddress addr("a.b.com", 5678);
+ EXPECT_TRUE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("a.b.com", addr.hostname());
+ EXPECT_EQ("a.b.com:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestCopyCtor) {
+ SocketAddress from("1.2.3.4", 5678);
+ SocketAddress addr(from);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("1.2.3.4", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestAssign) {
+ SocketAddress from("1.2.3.4", 5678);
+ SocketAddress addr(IPAddress(0x88888888), 9999);
+ addr = from;
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("1.2.3.4", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestSetIPPort) {
+ SocketAddress addr(IPAddress(0x88888888), 9999);
+ addr.SetIP(IPAddress(0x01020304));
+ addr.SetPort(5678);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestSetIPFromString) {
+ SocketAddress addr(IPAddress(0x88888888), 9999);
+ addr.SetIP("1.2.3.4");
+ addr.SetPort(5678);
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("1.2.3.4", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestSetIPFromHostname) {
+ SocketAddress addr(IPAddress(0x88888888), 9999);
+ addr.SetIP("a.b.com");
+ addr.SetPort(5678);
+ EXPECT_TRUE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("a.b.com", addr.hostname());
+ EXPECT_EQ("a.b.com:5678", addr.ToString());
+ addr.SetResolvedIP(IPAddress(0x01020304));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ("a.b.com", addr.hostname());
+ EXPECT_EQ("a.b.com:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestFromIPv4String) {
+ SocketAddress addr;
+ EXPECT_TRUE(addr.FromString("1.2.3.4:5678"));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("1.2.3.4", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestFromIPv6String) {
+ SocketAddress addr;
+ EXPECT_TRUE(addr.FromString(kTestV6AddrFullString));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ(kTestV6AddrString, addr.hostname());
+ EXPECT_EQ(kTestV6AddrFullString, addr.ToString());
+}
+
+TEST(SocketAddressTest, TestFromHostname) {
+ SocketAddress addr;
+ EXPECT_TRUE(addr.FromString("a.b.com:5678"));
+ EXPECT_TRUE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("a.b.com", addr.hostname());
+ EXPECT_EQ("a.b.com:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestToFromSockAddr) {
+ SocketAddress from("1.2.3.4", 5678), addr;
+ sockaddr_in addr_in;
+ from.ToSockAddr(&addr_in);
+ EXPECT_TRUE(addr.FromSockAddr(addr_in));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+}
+
+TEST(SocketAddressTest, TestToFromSockAddrStorage) {
+ SocketAddress from("1.2.3.4", 5678), addr;
+ sockaddr_storage addr_storage;
+ from.ToSockAddrStorage(&addr_storage);
+ EXPECT_TRUE(SocketAddressFromSockAddrStorage(addr_storage, &addr));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(0x01020304U), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ("1.2.3.4:5678", addr.ToString());
+
+ addr.Clear();
+ from.ToDualStackSockAddrStorage(&addr_storage);
+ EXPECT_TRUE(SocketAddressFromSockAddrStorage(addr_storage, &addr));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(kMappedV4Addr), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ("[::ffff:1.2.3.4]:5678", addr.ToString());
+
+ addr.Clear();
+ memset(&addr_storage, 0, sizeof(sockaddr_storage));
+ from = SocketAddress(kTestV6AddrString, 5678);
+ from.SetScopeID(6);
+ from.ToSockAddrStorage(&addr_storage);
+ EXPECT_TRUE(SocketAddressFromSockAddrStorage(addr_storage, &addr));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(kTestV6Addr), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ(kTestV6AddrFullString, addr.ToString());
+ EXPECT_EQ(6, addr.scope_id());
+
+ addr.Clear();
+ from.ToDualStackSockAddrStorage(&addr_storage);
+ EXPECT_TRUE(SocketAddressFromSockAddrStorage(addr_storage, &addr));
+ EXPECT_FALSE(addr.IsUnresolvedIP());
+ EXPECT_EQ(IPAddress(kTestV6Addr), addr.ipaddr());
+ EXPECT_EQ(5678, addr.port());
+ EXPECT_EQ("", addr.hostname());
+ EXPECT_EQ(kTestV6AddrFullString, addr.ToString());
+ EXPECT_EQ(6, addr.scope_id());
+
+ addr = from;
+ addr_storage.ss_family = AF_UNSPEC;
+ EXPECT_FALSE(SocketAddressFromSockAddrStorage(addr_storage, &addr));
+ EXPECT_EQ(from, addr);
+
+ EXPECT_FALSE(SocketAddressFromSockAddrStorage(addr_storage, NULL));
+}
+
+bool AreEqual(const SocketAddress& addr1,
+ const SocketAddress& addr2) {
+ return addr1 == addr2 && addr2 == addr1 &&
+ !(addr1 != addr2) && !(addr2 != addr1);
+}
+
+bool AreUnequal(const SocketAddress& addr1,
+ const SocketAddress& addr2) {
+ return !(addr1 == addr2) && !(addr2 == addr1) &&
+ addr1 != addr2 && addr2 != addr1;
+}
+
+TEST(SocketAddressTest, TestEqualityOperators) {
+ SocketAddress addr1("1.2.3.4", 5678);
+ SocketAddress addr2("1.2.3.4", 5678);
+ EXPECT_PRED2(AreEqual, addr1, addr2);
+
+ addr2 = SocketAddress("0.0.0.1", 5678);
+ EXPECT_PRED2(AreUnequal, addr1, addr2);
+
+ addr2 = SocketAddress("1.2.3.4", 1234);
+ EXPECT_PRED2(AreUnequal, addr1, addr2);
+
+ addr2 = SocketAddress(kTestV6AddrString, 5678);
+ EXPECT_PRED2(AreUnequal, addr1, addr2);
+
+ addr1 = SocketAddress(kTestV6AddrString, 5678);
+ EXPECT_PRED2(AreEqual, addr1, addr2);
+
+ addr2 = SocketAddress(kTestV6AddrString, 1234);
+ EXPECT_PRED2(AreUnequal, addr1, addr2);
+
+ addr2 = SocketAddress("fe80::1", 5678);
+ EXPECT_PRED2(AreUnequal, addr1, addr2);
+}
+
+bool IsLessThan(const SocketAddress& addr1,
+ const SocketAddress& addr2) {
+ return addr1 < addr2 &&
+ !(addr2 < addr1) &&
+ !(addr1 == addr2);
+}
+
+TEST(SocketAddressTest, TestComparisonOperator) {
+ SocketAddress addr1("1.2.3.4", 5678);
+ SocketAddress addr2("1.2.3.4", 5678);
+
+ EXPECT_FALSE(addr1 < addr2);
+ EXPECT_FALSE(addr2 < addr1);
+
+ addr2 = SocketAddress("1.2.3.4", 5679);
+ EXPECT_PRED2(IsLessThan, addr1, addr2);
+
+ addr2 = SocketAddress("2.2.3.4", 49152);
+ EXPECT_PRED2(IsLessThan, addr1, addr2);
+
+ addr2 = SocketAddress(kTestV6AddrString, 5678);
+ EXPECT_PRED2(IsLessThan, addr1, addr2);
+
+ addr1 = SocketAddress("fe80::1", 5678);
+ EXPECT_PRED2(IsLessThan, addr2, addr1);
+
+ addr2 = SocketAddress("fe80::1", 5679);
+ EXPECT_PRED2(IsLessThan, addr1, addr2);
+
+ addr2 = SocketAddress("fe80::1", 5678);
+ EXPECT_FALSE(addr1 < addr2);
+ EXPECT_FALSE(addr2 < addr1);
+}
+
+TEST(SocketAddressTest, TestToSensitiveString) {
+ SocketAddress addr_v4("1.2.3.4", 5678);
+ EXPECT_EQ("1.2.3.4", addr_v4.HostAsURIString());
+ EXPECT_EQ("1.2.3.4:5678", addr_v4.ToString());
+ EXPECT_EQ("1.2.3.4", addr_v4.HostAsSensitiveURIString());
+ EXPECT_EQ("1.2.3.4:5678", addr_v4.ToSensitiveString());
+ IPAddress::set_strip_sensitive(true);
+ EXPECT_EQ("1.2.3.x", addr_v4.HostAsSensitiveURIString());
+ EXPECT_EQ("1.2.3.x:5678", addr_v4.ToSensitiveString());
+ IPAddress::set_strip_sensitive(false);
+
+ SocketAddress addr_v6(kTestV6AddrString, 5678);
+ EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsURIString());
+ EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToString());
+ EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsSensitiveURIString());
+ EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToSensitiveString());
+ IPAddress::set_strip_sensitive(true);
+ EXPECT_EQ("[" + kTestV6AddrAnonymizedString + "]",
+ addr_v6.HostAsSensitiveURIString());
+ EXPECT_EQ(kTestV6AddrFullAnonymizedString, addr_v6.ToSensitiveString());
+ IPAddress::set_strip_sensitive(false);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketaddresspair.cc b/chromium/third_party/webrtc/base/socketaddresspair.cc
new file mode 100644
index 00000000000..dfa8b25a048
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketaddresspair.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/socketaddresspair.h"
+
+namespace rtc {
+
+SocketAddressPair::SocketAddressPair(
+ const SocketAddress& src, const SocketAddress& dest)
+ : src_(src), dest_(dest) {
+}
+
+
+bool SocketAddressPair::operator ==(const SocketAddressPair& p) const {
+ return (src_ == p.src_) && (dest_ == p.dest_);
+}
+
+bool SocketAddressPair::operator <(const SocketAddressPair& p) const {
+ if (src_ < p.src_)
+ return true;
+ if (p.src_ < src_)
+ return false;
+ if (dest_ < p.dest_)
+ return true;
+ if (p.dest_ < dest_)
+ return false;
+ return false;
+}
+
+size_t SocketAddressPair::Hash() const {
+ return src_.Hash() ^ dest_.Hash();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketaddresspair.h b/chromium/third_party/webrtc/base/socketaddresspair.h
new file mode 100644
index 00000000000..73a627f104e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketaddresspair.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETADDRESSPAIR_H__
+#define WEBRTC_BASE_SOCKETADDRESSPAIR_H__
+
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+
+// Records a pair (source,destination) of socket addresses. The two addresses
+// identify a connection between two machines. (For UDP, this "connection" is
+// not maintained explicitly in a socket.)
+class SocketAddressPair {
+public:
+ SocketAddressPair() {}
+ SocketAddressPair(const SocketAddress& srs, const SocketAddress& dest);
+
+ const SocketAddress& source() const { return src_; }
+ const SocketAddress& destination() const { return dest_; }
+
+ bool operator ==(const SocketAddressPair& r) const;
+ bool operator <(const SocketAddressPair& r) const;
+
+ size_t Hash() const;
+
+private:
+ SocketAddress src_;
+ SocketAddress dest_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETADDRESSPAIR_H__
diff --git a/chromium/third_party/webrtc/base/socketfactory.h b/chromium/third_party/webrtc/base/socketfactory.h
new file mode 100644
index 00000000000..fe0f32bdbb0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketfactory.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETFACTORY_H__
+#define WEBRTC_BASE_SOCKETFACTORY_H__
+
+#include "webrtc/base/socket.h"
+#include "webrtc/base/asyncsocket.h"
+
+namespace rtc {
+
+class SocketFactory {
+public:
+ virtual ~SocketFactory() {}
+
+ // Returns a new socket for blocking communication. The type can be
+ // SOCK_DGRAM and SOCK_STREAM.
+ // TODO: C++ inheritance rules mean that all users must have both
+ // CreateSocket(int) and CreateSocket(int,int). Will remove CreateSocket(int)
+ // (and CreateAsyncSocket(int) when all callers are changed.
+ virtual Socket* CreateSocket(int type) = 0;
+ virtual Socket* CreateSocket(int family, int type) = 0;
+ // Returns a new socket for nonblocking communication. The type can be
+ // SOCK_DGRAM and SOCK_STREAM.
+ virtual AsyncSocket* CreateAsyncSocket(int type) = 0;
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type) = 0;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETFACTORY_H__
diff --git a/chromium/third_party/webrtc/base/socketpool.cc b/chromium/third_party/webrtc/base/socketpool.cc
new file mode 100644
index 00000000000..8e61cc31344
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketpool.cc
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <iomanip>
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/socketfactory.h"
+#include "webrtc/base/socketpool.h"
+#include "webrtc/base/socketstream.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamCache - Caches a set of open streams, defers creation to a separate
+// StreamPool.
+///////////////////////////////////////////////////////////////////////////////
+
+StreamCache::StreamCache(StreamPool* pool) : pool_(pool) {
+}
+
+StreamCache::~StreamCache() {
+ for (ConnectedList::iterator it = active_.begin(); it != active_.end();
+ ++it) {
+ delete it->second;
+ }
+ for (ConnectedList::iterator it = cached_.begin(); it != cached_.end();
+ ++it) {
+ delete it->second;
+ }
+}
+
+StreamInterface* StreamCache::RequestConnectedStream(
+ const SocketAddress& remote, int* err) {
+ LOG_F(LS_VERBOSE) << "(" << remote << ")";
+ for (ConnectedList::iterator it = cached_.begin(); it != cached_.end();
+ ++it) {
+ if (remote == it->first) {
+ it->second->SignalEvent.disconnect(this);
+ // Move from cached_ to active_
+ active_.push_front(*it);
+ cached_.erase(it);
+ if (err)
+ *err = 0;
+ LOG_F(LS_VERBOSE) << "Providing cached stream";
+ return active_.front().second;
+ }
+ }
+ if (StreamInterface* stream = pool_->RequestConnectedStream(remote, err)) {
+ // We track active streams so that we can remember their address
+ active_.push_front(ConnectedStream(remote, stream));
+ LOG_F(LS_VERBOSE) << "Providing new stream";
+ return active_.front().second;
+ }
+ return NULL;
+}
+
+void StreamCache::ReturnConnectedStream(StreamInterface* stream) {
+ for (ConnectedList::iterator it = active_.begin(); it != active_.end();
+ ++it) {
+ if (stream == it->second) {
+ LOG_F(LS_VERBOSE) << "(" << it->first << ")";
+ if (stream->GetState() == SS_CLOSED) {
+ // Return closed streams
+ LOG_F(LS_VERBOSE) << "Returning closed stream";
+ pool_->ReturnConnectedStream(it->second);
+ } else {
+ // Monitor open streams
+ stream->SignalEvent.connect(this, &StreamCache::OnStreamEvent);
+ LOG_F(LS_VERBOSE) << "Caching stream";
+ cached_.push_front(*it);
+ }
+ active_.erase(it);
+ return;
+ }
+ }
+ ASSERT(false);
+}
+
+void StreamCache::OnStreamEvent(StreamInterface* stream, int events, int err) {
+ if ((events & SE_CLOSE) == 0) {
+ LOG_F(LS_WARNING) << "(" << events << ", " << err
+ << ") received non-close event";
+ return;
+ }
+ for (ConnectedList::iterator it = cached_.begin(); it != cached_.end();
+ ++it) {
+ if (stream == it->second) {
+ LOG_F(LS_VERBOSE) << "(" << it->first << ")";
+ // We don't cache closed streams, so return it.
+ it->second->SignalEvent.disconnect(this);
+ LOG_F(LS_VERBOSE) << "Returning closed stream";
+ pool_->ReturnConnectedStream(it->second);
+ cached_.erase(it);
+ return;
+ }
+ }
+ ASSERT(false);
+}
+
+//////////////////////////////////////////////////////////////////////
+// NewSocketPool
+//////////////////////////////////////////////////////////////////////
+
+NewSocketPool::NewSocketPool(SocketFactory* factory) : factory_(factory) {
+}
+
+NewSocketPool::~NewSocketPool() {
+}
+
+StreamInterface*
+NewSocketPool::RequestConnectedStream(const SocketAddress& remote, int* err) {
+ AsyncSocket* socket =
+ factory_->CreateAsyncSocket(remote.family(), SOCK_STREAM);
+ if (!socket) {
+ if (err)
+ *err = -1;
+ return NULL;
+ }
+ if ((socket->Connect(remote) != 0) && !socket->IsBlocking()) {
+ if (err)
+ *err = socket->GetError();
+ delete socket;
+ return NULL;
+ }
+ if (err)
+ *err = 0;
+ return new SocketStream(socket);
+}
+
+void
+NewSocketPool::ReturnConnectedStream(StreamInterface* stream) {
+ Thread::Current()->Dispose(stream);
+}
+
+//////////////////////////////////////////////////////////////////////
+// ReuseSocketPool
+//////////////////////////////////////////////////////////////////////
+
+ReuseSocketPool::ReuseSocketPool(SocketFactory* factory)
+: factory_(factory), stream_(NULL), checked_out_(false) {
+}
+
+ReuseSocketPool::~ReuseSocketPool() {
+ ASSERT(!checked_out_);
+ delete stream_;
+}
+
+StreamInterface*
+ReuseSocketPool::RequestConnectedStream(const SocketAddress& remote, int* err) {
+ // Only one socket can be used from this "pool" at a time
+ ASSERT(!checked_out_);
+ if (!stream_) {
+ LOG_F(LS_VERBOSE) << "Creating new socket";
+ int family = remote.family();
+ // TODO: Deal with this when we/I clean up DNS resolution.
+ if (remote.IsUnresolvedIP()) {
+ family = AF_INET;
+ }
+ AsyncSocket* socket =
+ factory_->CreateAsyncSocket(family, SOCK_STREAM);
+ if (!socket) {
+ if (err)
+ *err = -1;
+ return NULL;
+ }
+ stream_ = new SocketStream(socket);
+ }
+ if ((stream_->GetState() == SS_OPEN) && (remote == remote_)) {
+ LOG_F(LS_VERBOSE) << "Reusing connection to: " << remote_;
+ } else {
+ remote_ = remote;
+ stream_->Close();
+ if ((stream_->GetSocket()->Connect(remote_) != 0)
+ && !stream_->GetSocket()->IsBlocking()) {
+ if (err)
+ *err = stream_->GetSocket()->GetError();
+ return NULL;
+ } else {
+ LOG_F(LS_VERBOSE) << "Opening connection to: " << remote_;
+ }
+ }
+ stream_->SignalEvent.disconnect(this);
+ checked_out_ = true;
+ if (err)
+ *err = 0;
+ return stream_;
+}
+
+void
+ReuseSocketPool::ReturnConnectedStream(StreamInterface* stream) {
+ ASSERT(stream == stream_);
+ ASSERT(checked_out_);
+ checked_out_ = false;
+ // Until the socket is reused, monitor it to determine if it closes.
+ stream_->SignalEvent.connect(this, &ReuseSocketPool::OnStreamEvent);
+}
+
+void
+ReuseSocketPool::OnStreamEvent(StreamInterface* stream, int events, int err) {
+ ASSERT(stream == stream_);
+ ASSERT(!checked_out_);
+
+ // If the stream was written to and then immediately returned to us then
+ // we may get a writable notification for it, which we should ignore.
+ if (events == SE_WRITE) {
+ LOG_F(LS_VERBOSE) << "Pooled Socket unexpectedly writable: ignoring";
+ return;
+ }
+
+ // If the peer sent data, we can't process it, so drop the connection.
+ // If the socket has closed, clean it up.
+ // In either case, we'll reconnect it the next time it is used.
+ ASSERT(0 != (events & (SE_READ|SE_CLOSE)));
+ if (0 != (events & SE_CLOSE)) {
+ LOG_F(LS_VERBOSE) << "Connection closed with error: " << err;
+ } else {
+ LOG_F(LS_VERBOSE) << "Pooled Socket unexpectedly readable: closing";
+ }
+ stream_->Close();
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// LoggingPoolAdapter - Adapts a StreamPool to supply streams with attached
+// LoggingAdapters.
+///////////////////////////////////////////////////////////////////////////////
+
+LoggingPoolAdapter::LoggingPoolAdapter(
+ StreamPool* pool, LoggingSeverity level, const std::string& label,
+ bool binary_mode)
+ : pool_(pool), level_(level), label_(label), binary_mode_(binary_mode) {
+}
+
+LoggingPoolAdapter::~LoggingPoolAdapter() {
+ for (StreamList::iterator it = recycle_bin_.begin();
+ it != recycle_bin_.end(); ++it) {
+ delete *it;
+ }
+}
+
+StreamInterface* LoggingPoolAdapter::RequestConnectedStream(
+ const SocketAddress& remote, int* err) {
+ if (StreamInterface* stream = pool_->RequestConnectedStream(remote, err)) {
+ ASSERT(SS_CLOSED != stream->GetState());
+ std::stringstream ss;
+ ss << label_ << "(0x" << std::setfill('0') << std::hex << std::setw(8)
+ << stream << ")";
+ LOG_V(level_) << ss.str()
+ << ((SS_OPEN == stream->GetState()) ? " Connected"
+ : " Connecting")
+ << " to " << remote;
+ if (recycle_bin_.empty()) {
+ return new LoggingAdapter(stream, level_, ss.str(), binary_mode_);
+ }
+ LoggingAdapter* logging = recycle_bin_.front();
+ recycle_bin_.pop_front();
+ logging->set_label(ss.str());
+ logging->Attach(stream);
+ return logging;
+ }
+ return NULL;
+}
+
+void LoggingPoolAdapter::ReturnConnectedStream(StreamInterface* stream) {
+ LoggingAdapter* logging = static_cast<LoggingAdapter*>(stream);
+ pool_->ReturnConnectedStream(logging->Detach());
+ recycle_bin_.push_back(logging);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketpool.h b/chromium/third_party/webrtc/base/socketpool.h
new file mode 100644
index 00000000000..7bcaa062d06
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketpool.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETPOOL_H_
+#define WEBRTC_BASE_SOCKETPOOL_H_
+
+#include <deque>
+#include <list>
+#include "webrtc/base/logging.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+
+class AsyncSocket;
+class LoggingAdapter;
+class SocketFactory;
+class SocketStream;
+class StreamInterface;
+
+//////////////////////////////////////////////////////////////////////
+// StreamPool
+//////////////////////////////////////////////////////////////////////
+
+class StreamPool {
+public:
+ virtual ~StreamPool() { }
+
+ virtual StreamInterface* RequestConnectedStream(const SocketAddress& remote,
+ int* err) = 0;
+ virtual void ReturnConnectedStream(StreamInterface* stream) = 0;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamCache - Caches a set of open streams, defers creation/destruction to
+// the supplied StreamPool.
+///////////////////////////////////////////////////////////////////////////////
+
+class StreamCache : public StreamPool, public sigslot::has_slots<> {
+public:
+ StreamCache(StreamPool* pool);
+ virtual ~StreamCache();
+
+ // StreamPool Interface
+ virtual StreamInterface* RequestConnectedStream(const SocketAddress& remote,
+ int* err);
+ virtual void ReturnConnectedStream(StreamInterface* stream);
+
+private:
+ typedef std::pair<SocketAddress, StreamInterface*> ConnectedStream;
+ typedef std::list<ConnectedStream> ConnectedList;
+
+ void OnStreamEvent(StreamInterface* stream, int events, int err);
+
+ // We delegate stream creation and deletion to this pool.
+ StreamPool* pool_;
+ // Streams that are in use (returned from RequestConnectedStream).
+ ConnectedList active_;
+ // Streams which were returned to us, but are still open.
+ ConnectedList cached_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// NewSocketPool
+// Creates a new stream on every request
+///////////////////////////////////////////////////////////////////////////////
+
+class NewSocketPool : public StreamPool {
+public:
+ NewSocketPool(SocketFactory* factory);
+ virtual ~NewSocketPool();
+
+ // StreamPool Interface
+ virtual StreamInterface* RequestConnectedStream(const SocketAddress& remote,
+ int* err);
+ virtual void ReturnConnectedStream(StreamInterface* stream);
+
+private:
+ SocketFactory* factory_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// ReuseSocketPool
+// Maintains a single socket at a time, and will reuse it without closing if
+// the destination address is the same.
+///////////////////////////////////////////////////////////////////////////////
+
+class ReuseSocketPool : public StreamPool, public sigslot::has_slots<> {
+public:
+ ReuseSocketPool(SocketFactory* factory);
+ virtual ~ReuseSocketPool();
+
+ // StreamPool Interface
+ virtual StreamInterface* RequestConnectedStream(const SocketAddress& remote,
+ int* err);
+ virtual void ReturnConnectedStream(StreamInterface* stream);
+
+private:
+ void OnStreamEvent(StreamInterface* stream, int events, int err);
+
+ SocketFactory* factory_;
+ SocketStream* stream_;
+ SocketAddress remote_;
+ bool checked_out_; // Whether the stream is currently checked out
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// LoggingPoolAdapter - Adapts a StreamPool to supply streams with attached
+// LoggingAdapters.
+///////////////////////////////////////////////////////////////////////////////
+
+class LoggingPoolAdapter : public StreamPool {
+public:
+ LoggingPoolAdapter(StreamPool* pool, LoggingSeverity level,
+ const std::string& label, bool binary_mode);
+ virtual ~LoggingPoolAdapter();
+
+ // StreamPool Interface
+ virtual StreamInterface* RequestConnectedStream(const SocketAddress& remote,
+ int* err);
+ virtual void ReturnConnectedStream(StreamInterface* stream);
+
+private:
+ StreamPool* pool_;
+ LoggingSeverity level_;
+ std::string label_;
+ bool binary_mode_;
+ typedef std::deque<LoggingAdapter*> StreamList;
+ StreamList recycle_bin_;
+};
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETPOOL_H_
diff --git a/chromium/third_party/webrtc/base/socketserver.h b/chromium/third_party/webrtc/base/socketserver.h
new file mode 100644
index 00000000000..467105a6a8d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketserver.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETSERVER_H_
+#define WEBRTC_BASE_SOCKETSERVER_H_
+
+#include "webrtc/base/socketfactory.h"
+
+namespace rtc {
+
+class MessageQueue;
+
+// Provides the ability to wait for activity on a set of sockets. The Thread
+// class provides a nice wrapper on a socket server.
+//
+// The server is also a socket factory. The sockets it creates will be
+// notified of asynchronous I/O from this server's Wait method.
+class SocketServer : public SocketFactory {
+ public:
+ // When the socket server is installed into a Thread, this function is
+ // called to allow the socket server to use the thread's message queue for
+ // any messaging that it might need to perform.
+ virtual void SetMessageQueue(MessageQueue* queue) {}
+
+ // Sleeps until:
+ // 1) cms milliseconds have elapsed (unless cms == kForever)
+ // 2) WakeUp() is called
+ // While sleeping, I/O is performed if process_io is true.
+ virtual bool Wait(int cms, bool process_io) = 0;
+
+ // Causes the current wait (if one is in progress) to wake up.
+ virtual void WakeUp() = 0;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/socketstream.cc b/chromium/third_party/webrtc/base/socketstream.cc
new file mode 100644
index 00000000000..b0acf94c583
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketstream.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/socketstream.h"
+
+namespace rtc {
+
+SocketStream::SocketStream(AsyncSocket* socket) : socket_(NULL) {
+ Attach(socket);
+}
+
+SocketStream::~SocketStream() {
+ delete socket_;
+}
+
+void SocketStream::Attach(AsyncSocket* socket) {
+ if (socket_)
+ delete socket_;
+ socket_ = socket;
+ if (socket_) {
+ socket_->SignalConnectEvent.connect(this, &SocketStream::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this, &SocketStream::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this, &SocketStream::OnWriteEvent);
+ socket_->SignalCloseEvent.connect(this, &SocketStream::OnCloseEvent);
+ }
+}
+
+AsyncSocket* SocketStream::Detach() {
+ AsyncSocket* socket = socket_;
+ if (socket_) {
+ socket_->SignalConnectEvent.disconnect(this);
+ socket_->SignalReadEvent.disconnect(this);
+ socket_->SignalWriteEvent.disconnect(this);
+ socket_->SignalCloseEvent.disconnect(this);
+ socket_ = NULL;
+ }
+ return socket;
+}
+
+StreamState SocketStream::GetState() const {
+ ASSERT(socket_ != NULL);
+ switch (socket_->GetState()) {
+ case Socket::CS_CONNECTED:
+ return SS_OPEN;
+ case Socket::CS_CONNECTING:
+ return SS_OPENING;
+ case Socket::CS_CLOSED:
+ default:
+ return SS_CLOSED;
+ }
+}
+
+StreamResult SocketStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ ASSERT(socket_ != NULL);
+ int result = socket_->Recv(buffer, buffer_len);
+ if (result < 0) {
+ if (socket_->IsBlocking())
+ return SR_BLOCK;
+ if (error)
+ *error = socket_->GetError();
+ return SR_ERROR;
+ }
+ if ((result > 0) || (buffer_len == 0)) {
+ if (read)
+ *read = result;
+ return SR_SUCCESS;
+ }
+ return SR_EOS;
+}
+
+StreamResult SocketStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ ASSERT(socket_ != NULL);
+ int result = socket_->Send(data, data_len);
+ if (result < 0) {
+ if (socket_->IsBlocking())
+ return SR_BLOCK;
+ if (error)
+ *error = socket_->GetError();
+ return SR_ERROR;
+ }
+ if (written)
+ *written = result;
+ return SR_SUCCESS;
+}
+
+void SocketStream::Close() {
+ ASSERT(socket_ != NULL);
+ socket_->Close();
+}
+
+void SocketStream::OnConnectEvent(AsyncSocket* socket) {
+ ASSERT(socket == socket_);
+ SignalEvent(this, SE_OPEN | SE_READ | SE_WRITE, 0);
+}
+
+void SocketStream::OnReadEvent(AsyncSocket* socket) {
+ ASSERT(socket == socket_);
+ SignalEvent(this, SE_READ, 0);
+}
+
+void SocketStream::OnWriteEvent(AsyncSocket* socket) {
+ ASSERT(socket == socket_);
+ SignalEvent(this, SE_WRITE, 0);
+}
+
+void SocketStream::OnCloseEvent(AsyncSocket* socket, int err) {
+ ASSERT(socket == socket_);
+ SignalEvent(this, SE_CLOSE, err);
+}
+
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/socketstream.h b/chromium/third_party/webrtc/base/socketstream.h
new file mode 100644
index 00000000000..ce9939b0ff6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/socketstream.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2005 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SOCKETSTREAM_H_
+#define WEBRTC_BASE_SOCKETSTREAM_H_
+
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+class SocketStream : public StreamInterface, public sigslot::has_slots<> {
+ public:
+ explicit SocketStream(AsyncSocket* socket);
+ virtual ~SocketStream();
+
+ void Attach(AsyncSocket* socket);
+ AsyncSocket* Detach();
+
+ AsyncSocket* GetSocket() { return socket_; }
+
+ virtual StreamState GetState() const;
+
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+
+ virtual void Close();
+
+ private:
+ void OnConnectEvent(AsyncSocket* socket);
+ void OnReadEvent(AsyncSocket* socket);
+ void OnWriteEvent(AsyncSocket* socket);
+ void OnCloseEvent(AsyncSocket* socket, int err);
+
+ AsyncSocket* socket_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SocketStream);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SOCKETSTREAM_H_
diff --git a/chromium/third_party/webrtc/base/ssladapter.cc b/chromium/third_party/webrtc/base/ssladapter.cc
new file mode 100644
index 00000000000..d83a2779e8e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ssladapter.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#include "webrtc/base/ssladapter.h"
+
+#include "webrtc/base/sslconfig.h"
+
+#if SSL_USE_SCHANNEL
+
+#include "schanneladapter.h"
+
+#elif SSL_USE_OPENSSL // && !SSL_USE_SCHANNEL
+
+#include "openssladapter.h"
+
+#elif SSL_USE_NSS // && !SSL_USE_CHANNEL && !SSL_USE_OPENSSL
+
+#include "nssstreamadapter.h"
+
+#endif // SSL_USE_OPENSSL && !SSL_USE_SCHANNEL && !SSL_USE_NSS
+
+///////////////////////////////////////////////////////////////////////////////
+
+namespace rtc {
+
+SSLAdapter*
+SSLAdapter::Create(AsyncSocket* socket) {
+#if SSL_USE_SCHANNEL
+ return new SChannelAdapter(socket);
+#elif SSL_USE_OPENSSL // && !SSL_USE_SCHANNEL
+ return new OpenSSLAdapter(socket);
+#else // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
+ delete socket;
+ return NULL;
+#endif // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+#if SSL_USE_OPENSSL
+
+bool InitializeSSL(VerificationCallback callback) {
+ return OpenSSLAdapter::InitializeSSL(callback);
+}
+
+bool InitializeSSLThread() {
+ return OpenSSLAdapter::InitializeSSLThread();
+}
+
+bool CleanupSSL() {
+ return OpenSSLAdapter::CleanupSSL();
+}
+
+#elif SSL_USE_NSS // !SSL_USE_OPENSSL
+
+bool InitializeSSL(VerificationCallback callback) {
+ return NSSContext::InitializeSSL(callback);
+}
+
+bool InitializeSSLThread() {
+ return NSSContext::InitializeSSLThread();
+}
+
+bool CleanupSSL() {
+ return NSSContext::CleanupSSL();
+}
+
+#else // !SSL_USE_OPENSSL && !SSL_USE_NSS
+
+bool InitializeSSL(VerificationCallback callback) {
+ return true;
+}
+
+bool InitializeSSLThread() {
+ return true;
+}
+
+bool CleanupSSL() {
+ return true;
+}
+
+#endif // !SSL_USE_OPENSSL && !SSL_USE_NSS
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ssladapter.h b/chromium/third_party/webrtc/base/ssladapter.h
new file mode 100644
index 00000000000..87b993ffbc5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/ssladapter.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLADAPTER_H_
+#define WEBRTC_BASE_SSLADAPTER_H_
+
+#include "webrtc/base/asyncsocket.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+class SSLAdapter : public AsyncSocketAdapter {
+ public:
+ explicit SSLAdapter(AsyncSocket* socket)
+ : AsyncSocketAdapter(socket), ignore_bad_cert_(false) { }
+
+ bool ignore_bad_cert() const { return ignore_bad_cert_; }
+ void set_ignore_bad_cert(bool ignore) { ignore_bad_cert_ = ignore; }
+
+ // StartSSL returns 0 if successful.
+ // If StartSSL is called while the socket is closed or connecting, the SSL
+ // negotiation will begin as soon as the socket connects.
+ virtual int StartSSL(const char* hostname, bool restartable) = 0;
+
+ // Create the default SSL adapter for this platform. On failure, returns NULL
+ // and deletes |socket|. Otherwise, the returned SSLAdapter takes ownership
+ // of |socket|.
+ static SSLAdapter* Create(AsyncSocket* socket);
+
+ private:
+ // If true, the server certificate need not match the configured hostname.
+ bool ignore_bad_cert_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+typedef bool (*VerificationCallback)(void* cert);
+
+// Call this on the main thread, before using SSL.
+// Call CleanupSSLThread when finished with SSL.
+bool InitializeSSL(VerificationCallback callback = NULL);
+
+// Call to initialize additional threads.
+bool InitializeSSLThread();
+
+// Call to cleanup additional threads, and also the main thread.
+bool CleanupSSL();
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLADAPTER_H_
diff --git a/chromium/third_party/webrtc/base/sslconfig.h b/chromium/third_party/webrtc/base/sslconfig.h
new file mode 100644
index 00000000000..d824ab0627c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslconfig.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLCONFIG_H_
+#define WEBRTC_BASE_SSLCONFIG_H_
+
+// If no preference has been indicated, default to SChannel on Windows and
+// OpenSSL everywhere else, if it is available.
+#if !defined(SSL_USE_SCHANNEL) && !defined(SSL_USE_OPENSSL) && \
+ !defined(SSL_USE_NSS)
+#if defined(WEBRTC_WIN)
+
+#define SSL_USE_SCHANNEL 1
+
+#else // defined(WEBRTC_WIN)
+
+#if defined(HAVE_OPENSSL_SSL_H)
+#define SSL_USE_OPENSSL 1
+#elif defined(HAVE_NSS_SSL_H)
+#define SSL_USE_NSS 1
+#endif
+
+#endif // !defined(WEBRTC_WIN)
+#endif
+
+#endif // WEBRTC_BASE_SSLCONFIG_H_
diff --git a/chromium/third_party/webrtc/base/sslfingerprint.cc b/chromium/third_party/webrtc/base/sslfingerprint.cc
new file mode 100644
index 00000000000..1419243c84c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslfingerprint.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/sslfingerprint.h"
+
+#include <ctype.h>
+#include <string>
+
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/stringencode.h"
+
+namespace rtc {
+
+SSLFingerprint* SSLFingerprint::Create(
+ const std::string& algorithm, const rtc::SSLIdentity* identity) {
+ if (!identity) {
+ return NULL;
+ }
+
+ return Create(algorithm, &(identity->certificate()));
+}
+
+SSLFingerprint* SSLFingerprint::Create(
+ const std::string& algorithm, const rtc::SSLCertificate* cert) {
+ uint8 digest_val[64];
+ size_t digest_len;
+ bool ret = cert->ComputeDigest(
+ algorithm, digest_val, sizeof(digest_val), &digest_len);
+ if (!ret) {
+ return NULL;
+ }
+
+ return new SSLFingerprint(algorithm, digest_val, digest_len);
+}
+
+SSLFingerprint* SSLFingerprint::CreateFromRfc4572(
+ const std::string& algorithm, const std::string& fingerprint) {
+ if (algorithm.empty() || !rtc::IsFips180DigestAlgorithm(algorithm))
+ return NULL;
+
+ if (fingerprint.empty())
+ return NULL;
+
+ size_t value_len;
+ char value[rtc::MessageDigest::kMaxSize];
+ value_len = rtc::hex_decode_with_delimiter(value, sizeof(value),
+ fingerprint.c_str(),
+ fingerprint.length(),
+ ':');
+ if (!value_len)
+ return NULL;
+
+ return new SSLFingerprint(algorithm,
+ reinterpret_cast<uint8*>(value),
+ value_len);
+}
+
+SSLFingerprint::SSLFingerprint(
+ const std::string& algorithm, const uint8* digest_in, size_t digest_len)
+ : algorithm(algorithm) {
+ digest.SetData(digest_in, digest_len);
+}
+
+SSLFingerprint::SSLFingerprint(const SSLFingerprint& from)
+ : algorithm(from.algorithm), digest(from.digest) {}
+
+bool SSLFingerprint::operator==(const SSLFingerprint& other) const {
+ return algorithm == other.algorithm &&
+ digest == other.digest;
+}
+
+std::string SSLFingerprint::GetRfc4572Fingerprint() const {
+ std::string fingerprint =
+ rtc::hex_encode_with_delimiter(
+ digest.data(), digest.length(), ':');
+ std::transform(fingerprint.begin(), fingerprint.end(),
+ fingerprint.begin(), ::toupper);
+ return fingerprint;
+}
+
+std::string SSLFingerprint::ToString() {
+ std::string fp_str = algorithm;
+ fp_str.append(" ");
+ fp_str.append(GetRfc4572Fingerprint());
+ return fp_str;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sslfingerprint.h b/chromium/third_party/webrtc/base/sslfingerprint.h
new file mode 100644
index 00000000000..a63b3dd875c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslfingerprint.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLFINGERPRINT_H_
+#define WEBRTC_BASE_SSLFINGERPRINT_H_
+
+#include <string>
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/sslidentity.h"
+
+namespace rtc {
+
+class SSLCertificate;
+
+struct SSLFingerprint {
+ static SSLFingerprint* Create(const std::string& algorithm,
+ const rtc::SSLIdentity* identity);
+
+ static SSLFingerprint* Create(const std::string& algorithm,
+ const rtc::SSLCertificate* cert);
+
+ static SSLFingerprint* CreateFromRfc4572(const std::string& algorithm,
+ const std::string& fingerprint);
+
+ SSLFingerprint(const std::string& algorithm, const uint8* digest_in,
+ size_t digest_len);
+
+ SSLFingerprint(const SSLFingerprint& from);
+
+ bool operator==(const SSLFingerprint& other) const;
+
+ std::string GetRfc4572Fingerprint() const;
+
+ std::string ToString();
+
+ std::string algorithm;
+ rtc::Buffer digest;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLFINGERPRINT_H_
diff --git a/chromium/third_party/webrtc/base/sslidentity.cc b/chromium/third_party/webrtc/base/sslidentity.cc
new file mode 100644
index 00000000000..00085740d04
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslidentity.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Handling of certificates and keypairs for SSLStreamAdapter's peer mode.
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#include "webrtc/base/sslidentity.h"
+
+#include <string>
+
+#include "webrtc/base/base64.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/sslconfig.h"
+
+#if SSL_USE_SCHANNEL
+
+#elif SSL_USE_OPENSSL // !SSL_USE_SCHANNEL
+
+#include "webrtc/base/opensslidentity.h"
+
+#elif SSL_USE_NSS // !SSL_USE_SCHANNEL && !SSL_USE_OPENSSL
+
+#include "webrtc/base/nssidentity.h"
+
+#endif // SSL_USE_SCHANNEL
+
+namespace rtc {
+
+const char kPemTypeCertificate[] = "CERTIFICATE";
+const char kPemTypeRsaPrivateKey[] = "RSA PRIVATE KEY";
+
+bool SSLIdentity::PemToDer(const std::string& pem_type,
+ const std::string& pem_string,
+ std::string* der) {
+ // Find the inner body. We need this to fulfill the contract of
+ // returning pem_length.
+ size_t header = pem_string.find("-----BEGIN " + pem_type + "-----");
+ if (header == std::string::npos)
+ return false;
+
+ size_t body = pem_string.find("\n", header);
+ if (body == std::string::npos)
+ return false;
+
+ size_t trailer = pem_string.find("-----END " + pem_type + "-----");
+ if (trailer == std::string::npos)
+ return false;
+
+ std::string inner = pem_string.substr(body + 1, trailer - (body + 1));
+
+ *der = Base64::Decode(inner, Base64::DO_PARSE_WHITE |
+ Base64::DO_PAD_ANY |
+ Base64::DO_TERM_BUFFER);
+ return true;
+}
+
+std::string SSLIdentity::DerToPem(const std::string& pem_type,
+ const unsigned char* data,
+ size_t length) {
+ std::stringstream result;
+
+ result << "-----BEGIN " << pem_type << "-----\n";
+
+ std::string b64_encoded;
+ Base64::EncodeFromArray(data, length, &b64_encoded);
+
+ // Divide the Base-64 encoded data into 64-character chunks, as per
+ // 4.3.2.4 of RFC 1421.
+ static const size_t kChunkSize = 64;
+ size_t chunks = (b64_encoded.size() + (kChunkSize - 1)) / kChunkSize;
+ for (size_t i = 0, chunk_offset = 0; i < chunks;
+ ++i, chunk_offset += kChunkSize) {
+ result << b64_encoded.substr(chunk_offset, kChunkSize);
+ result << "\n";
+ }
+
+ result << "-----END " << pem_type << "-----\n";
+
+ return result.str();
+}
+
+#if SSL_USE_SCHANNEL
+
+SSLCertificate* SSLCertificate::FromPEMString(const std::string& pem_string) {
+ return NULL;
+}
+
+SSLIdentity* SSLIdentity::Generate(const std::string& common_name) {
+ return NULL;
+}
+
+SSLIdentity* GenerateForTest(const SSLIdentityParams& params) {
+ return NULL;
+}
+
+SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key,
+ const std::string& certificate) {
+ return NULL;
+}
+
+#elif SSL_USE_OPENSSL // !SSL_USE_SCHANNEL
+
+SSLCertificate* SSLCertificate::FromPEMString(const std::string& pem_string) {
+ return OpenSSLCertificate::FromPEMString(pem_string);
+}
+
+SSLIdentity* SSLIdentity::Generate(const std::string& common_name) {
+ return OpenSSLIdentity::Generate(common_name);
+}
+
+SSLIdentity* SSLIdentity::GenerateForTest(const SSLIdentityParams& params) {
+ return OpenSSLIdentity::GenerateForTest(params);
+}
+
+SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key,
+ const std::string& certificate) {
+ return OpenSSLIdentity::FromPEMStrings(private_key, certificate);
+}
+
+#elif SSL_USE_NSS // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
+
+SSLCertificate* SSLCertificate::FromPEMString(const std::string& pem_string) {
+ return NSSCertificate::FromPEMString(pem_string);
+}
+
+SSLIdentity* SSLIdentity::Generate(const std::string& common_name) {
+ return NSSIdentity::Generate(common_name);
+}
+
+SSLIdentity* SSLIdentity::GenerateForTest(const SSLIdentityParams& params) {
+ return NSSIdentity::GenerateForTest(params);
+}
+
+SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key,
+ const std::string& certificate) {
+ return NSSIdentity::FromPEMStrings(private_key, certificate);
+}
+
+#else // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL && !SSL_USE_NSS
+
+#error "No SSL implementation"
+
+#endif // SSL_USE_SCHANNEL
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sslidentity.h b/chromium/third_party/webrtc/base/sslidentity.h
new file mode 100644
index 00000000000..a0f32fd3b1a
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslidentity.h
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Handling of certificates and keypairs for SSLStreamAdapter's peer mode.
+
+#ifndef WEBRTC_BASE_SSLIDENTITY_H_
+#define WEBRTC_BASE_SSLIDENTITY_H_
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/messagedigest.h"
+
+namespace rtc {
+
+// Forward declaration due to circular dependency with SSLCertificate.
+class SSLCertChain;
+
+// Abstract interface overridden by SSL library specific
+// implementations.
+
+// A somewhat opaque type used to encapsulate a certificate.
+// Wraps the SSL library's notion of a certificate, with reference counting.
+// The SSLCertificate object is pretty much immutable once created.
+// (The OpenSSL implementation only does reference counting and
+// possibly caching of intermediate results.)
+class SSLCertificate {
+ public:
+ // Parses and build a certificate from a PEM encoded string.
+ // Returns NULL on failure.
+ // The length of the string representation of the certificate is
+ // stored in *pem_length if it is non-NULL, and only if
+ // parsing was successful.
+ // Caller is responsible for freeing the returned object.
+ static SSLCertificate* FromPEMString(const std::string& pem_string);
+ virtual ~SSLCertificate() {}
+
+ // Returns a new SSLCertificate object instance wrapping the same
+ // underlying certificate, including its chain if present.
+ // Caller is responsible for freeing the returned object.
+ virtual SSLCertificate* GetReference() const = 0;
+
+ // Provides the cert chain, or returns false. The caller owns the chain.
+ // The chain includes a copy of each certificate, excluding the leaf.
+ virtual bool GetChain(SSLCertChain** chain) const = 0;
+
+ // Returns a PEM encoded string representation of the certificate.
+ virtual std::string ToPEMString() const = 0;
+
+ // Provides a DER encoded binary representation of the certificate.
+ virtual void ToDER(Buffer* der_buffer) const = 0;
+
+ // Gets the name of the digest algorithm that was used to compute this
+ // certificate's signature.
+ virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const = 0;
+
+ // Compute the digest of the certificate given algorithm
+ virtual bool ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const = 0;
+};
+
+// SSLCertChain is a simple wrapper for a vector of SSLCertificates. It serves
+// primarily to ensure proper memory management (especially deletion) of the
+// SSLCertificate pointers.
+class SSLCertChain {
+ public:
+ // These constructors copy the provided SSLCertificate(s), so the caller
+ // retains ownership.
+ explicit SSLCertChain(const std::vector<SSLCertificate*>& certs) {
+ ASSERT(!certs.empty());
+ certs_.resize(certs.size());
+ std::transform(certs.begin(), certs.end(), certs_.begin(), DupCert);
+ }
+ explicit SSLCertChain(const SSLCertificate* cert) {
+ certs_.push_back(cert->GetReference());
+ }
+
+ ~SSLCertChain() {
+ std::for_each(certs_.begin(), certs_.end(), DeleteCert);
+ }
+
+ // Vector access methods.
+ size_t GetSize() const { return certs_.size(); }
+
+ // Returns a temporary reference, only valid until the chain is destroyed.
+ const SSLCertificate& Get(size_t pos) const { return *(certs_[pos]); }
+
+ // Returns a new SSLCertChain object instance wrapping the same underlying
+ // certificate chain. Caller is responsible for freeing the returned object.
+ SSLCertChain* Copy() const {
+ return new SSLCertChain(certs_);
+ }
+
+ private:
+ // Helper function for duplicating a vector of certificates.
+ static SSLCertificate* DupCert(const SSLCertificate* cert) {
+ return cert->GetReference();
+ }
+
+ // Helper function for deleting a vector of certificates.
+ static void DeleteCert(SSLCertificate* cert) { delete cert; }
+
+ std::vector<SSLCertificate*> certs_;
+
+ DISALLOW_COPY_AND_ASSIGN(SSLCertChain);
+};
+
+// Parameters for generating an identity for testing. If common_name is
+// non-empty, it will be used for the certificate's subject and issuer name,
+// otherwise a random string will be used. |not_before| and |not_after| are
+// offsets to the current time in number of seconds.
+struct SSLIdentityParams {
+ std::string common_name;
+ int not_before; // in seconds.
+ int not_after; // in seconds.
+};
+
+// Our identity in an SSL negotiation: a keypair and certificate (both
+// with the same public key).
+// This too is pretty much immutable once created.
+class SSLIdentity {
+ public:
+ // Generates an identity (keypair and self-signed certificate). If
+ // common_name is non-empty, it will be used for the certificate's
+ // subject and issuer name, otherwise a random string will be used.
+ // Returns NULL on failure.
+ // Caller is responsible for freeing the returned object.
+ static SSLIdentity* Generate(const std::string& common_name);
+
+ // Generates an identity with the specified validity period.
+ static SSLIdentity* GenerateForTest(const SSLIdentityParams& params);
+
+ // Construct an identity from a private key and a certificate.
+ static SSLIdentity* FromPEMStrings(const std::string& private_key,
+ const std::string& certificate);
+
+ virtual ~SSLIdentity() {}
+
+ // Returns a new SSLIdentity object instance wrapping the same
+ // identity information.
+ // Caller is responsible for freeing the returned object.
+ virtual SSLIdentity* GetReference() const = 0;
+
+ // Returns a temporary reference to the certificate.
+ virtual const SSLCertificate& certificate() const = 0;
+
+ // Helpers for parsing converting between PEM and DER format.
+ static bool PemToDer(const std::string& pem_type,
+ const std::string& pem_string,
+ std::string* der);
+ static std::string DerToPem(const std::string& pem_type,
+ const unsigned char* data,
+ size_t length);
+};
+
+extern const char kPemTypeCertificate[];
+extern const char kPemTypeRsaPrivateKey[];
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLIDENTITY_H_
diff --git a/chromium/third_party/webrtc/base/sslidentity_unittest.cc b/chromium/third_party/webrtc/base/sslidentity_unittest.cc
new file mode 100644
index 00000000000..1486bebb5b8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslidentity_unittest.cc
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslidentity.h"
+
+using rtc::SSLIdentity;
+
+const char kTestCertificate[] = "-----BEGIN CERTIFICATE-----\n"
+ "MIIB6TCCAVICAQYwDQYJKoZIhvcNAQEEBQAwWzELMAkGA1UEBhMCQVUxEzARBgNV\n"
+ "BAgTClF1ZWVuc2xhbmQxGjAYBgNVBAoTEUNyeXB0U29mdCBQdHkgTHRkMRswGQYD\n"
+ "VQQDExJUZXN0IENBICgxMDI0IGJpdCkwHhcNMDAxMDE2MjIzMTAzWhcNMDMwMTE0\n"
+ "MjIzMTAzWjBjMQswCQYDVQQGEwJBVTETMBEGA1UECBMKUXVlZW5zbGFuZDEaMBgG\n"
+ "A1UEChMRQ3J5cHRTb2Z0IFB0eSBMdGQxIzAhBgNVBAMTGlNlcnZlciB0ZXN0IGNl\n"
+ "cnQgKDUxMiBiaXQpMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJ+zw4Qnlf8SMVIP\n"
+ "Fe9GEcStgOY2Ww/dgNdhjeD8ckUJNP5VZkVDTGiXav6ooKXfX3j/7tdkuD8Ey2//\n"
+ "Kv7+ue0CAwEAATANBgkqhkiG9w0BAQQFAAOBgQCT0grFQeZaqYb5EYfk20XixZV4\n"
+ "GmyAbXMftG1Eo7qGiMhYzRwGNWxEYojf5PZkYZXvSqZ/ZXHXa4g59jK/rJNnaVGM\n"
+ "k+xIX8mxQvlV0n5O9PIha5BX5teZnkHKgL8aKKLKW1BK7YTngsfSzzaeame5iKfz\n"
+ "itAE+OjGF+PFKbwX8Q==\n"
+ "-----END CERTIFICATE-----\n";
+
+const unsigned char kTestCertSha1[] = {0xA6, 0xC8, 0x59, 0xEA,
+ 0xC3, 0x7E, 0x6D, 0x33,
+ 0xCF, 0xE2, 0x69, 0x9D,
+ 0x74, 0xE6, 0xF6, 0x8A,
+ 0x9E, 0x47, 0xA7, 0xCA};
+
+class SSLIdentityTest : public testing::Test {
+ public:
+ SSLIdentityTest() :
+ identity1_(), identity2_() {
+ }
+
+ ~SSLIdentityTest() {
+ }
+
+ static void SetUpTestCase() {
+ rtc::InitializeSSL();
+ }
+
+ static void TearDownTestCase() {
+ rtc::CleanupSSL();
+ }
+
+ virtual void SetUp() {
+ identity1_.reset(SSLIdentity::Generate("test1"));
+ identity2_.reset(SSLIdentity::Generate("test2"));
+
+ ASSERT_TRUE(identity1_);
+ ASSERT_TRUE(identity2_);
+
+ test_cert_.reset(
+ rtc::SSLCertificate::FromPEMString(kTestCertificate));
+ ASSERT_TRUE(test_cert_);
+ }
+
+ void TestGetSignatureDigestAlgorithm() {
+ std::string digest_algorithm;
+ // Both NSSIdentity::Generate and OpenSSLIdentity::Generate are
+ // hard-coded to generate RSA-SHA1 certificates.
+ ASSERT_TRUE(identity1_->certificate().GetSignatureDigestAlgorithm(
+ &digest_algorithm));
+ ASSERT_EQ(rtc::DIGEST_SHA_1, digest_algorithm);
+ ASSERT_TRUE(identity2_->certificate().GetSignatureDigestAlgorithm(
+ &digest_algorithm));
+ ASSERT_EQ(rtc::DIGEST_SHA_1, digest_algorithm);
+
+ // The test certificate has an MD5-based signature.
+ ASSERT_TRUE(test_cert_->GetSignatureDigestAlgorithm(&digest_algorithm));
+ ASSERT_EQ(rtc::DIGEST_MD5, digest_algorithm);
+ }
+
+ void TestDigest(const std::string &algorithm, size_t expected_len,
+ const unsigned char *expected_digest = NULL) {
+ unsigned char digest1[64];
+ unsigned char digest1b[64];
+ unsigned char digest2[64];
+ size_t digest1_len;
+ size_t digest1b_len;
+ size_t digest2_len;
+ bool rv;
+
+ rv = identity1_->certificate().ComputeDigest(algorithm,
+ digest1, sizeof(digest1),
+ &digest1_len);
+ EXPECT_TRUE(rv);
+ EXPECT_EQ(expected_len, digest1_len);
+
+ rv = identity1_->certificate().ComputeDigest(algorithm,
+ digest1b, sizeof(digest1b),
+ &digest1b_len);
+ EXPECT_TRUE(rv);
+ EXPECT_EQ(expected_len, digest1b_len);
+ EXPECT_EQ(0, memcmp(digest1, digest1b, expected_len));
+
+
+ rv = identity2_->certificate().ComputeDigest(algorithm,
+ digest2, sizeof(digest2),
+ &digest2_len);
+ EXPECT_TRUE(rv);
+ EXPECT_EQ(expected_len, digest2_len);
+ EXPECT_NE(0, memcmp(digest1, digest2, expected_len));
+
+ // If we have an expected hash for the test cert, check it.
+ if (expected_digest) {
+ unsigned char digest3[64];
+ size_t digest3_len;
+
+ rv = test_cert_->ComputeDigest(algorithm, digest3, sizeof(digest3),
+ &digest3_len);
+ EXPECT_TRUE(rv);
+ EXPECT_EQ(expected_len, digest3_len);
+ EXPECT_EQ(0, memcmp(digest3, expected_digest, expected_len));
+ }
+ }
+
+ private:
+ rtc::scoped_ptr<SSLIdentity> identity1_;
+ rtc::scoped_ptr<SSLIdentity> identity2_;
+ rtc::scoped_ptr<rtc::SSLCertificate> test_cert_;
+};
+
+TEST_F(SSLIdentityTest, DigestSHA1) {
+ TestDigest(rtc::DIGEST_SHA_1, 20, kTestCertSha1);
+}
+
+// HASH_AlgSHA224 is not supported in the chromium linux build.
+#if SSL_USE_NSS
+TEST_F(SSLIdentityTest, DISABLED_DigestSHA224) {
+#else
+TEST_F(SSLIdentityTest, DigestSHA224) {
+#endif
+ TestDigest(rtc::DIGEST_SHA_224, 28);
+}
+
+TEST_F(SSLIdentityTest, DigestSHA256) {
+ TestDigest(rtc::DIGEST_SHA_256, 32);
+}
+
+TEST_F(SSLIdentityTest, DigestSHA384) {
+ TestDigest(rtc::DIGEST_SHA_384, 48);
+}
+
+TEST_F(SSLIdentityTest, DigestSHA512) {
+ TestDigest(rtc::DIGEST_SHA_512, 64);
+}
+
+TEST_F(SSLIdentityTest, FromPEMStrings) {
+ static const char kRSA_PRIVATE_KEY_PEM[] =
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n";
+
+ static const char kCERT_PEM[] =
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n";
+
+ rtc::scoped_ptr<SSLIdentity> identity(
+ SSLIdentity::FromPEMStrings(kRSA_PRIVATE_KEY_PEM, kCERT_PEM));
+ EXPECT_TRUE(identity);
+ EXPECT_EQ(kCERT_PEM, identity->certificate().ToPEMString());
+}
+
+TEST_F(SSLIdentityTest, PemDerConversion) {
+ std::string der;
+ EXPECT_TRUE(SSLIdentity::PemToDer("CERTIFICATE", kTestCertificate, &der));
+
+ EXPECT_EQ(kTestCertificate, SSLIdentity::DerToPem(
+ "CERTIFICATE",
+ reinterpret_cast<const unsigned char*>(der.data()), der.length()));
+}
+
+TEST_F(SSLIdentityTest, GetSignatureDigestAlgorithm) {
+ TestGetSignatureDigestAlgorithm();
+}
diff --git a/chromium/third_party/webrtc/base/sslroots.h b/chromium/third_party/webrtc/base/sslroots.h
new file mode 100644
index 00000000000..31d601c1694
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslroots.h
@@ -0,0 +1,4932 @@
+// This file is the root certificates in C form that are needed to connect to
+// Google.
+
+// It was generated with the following command line:
+// > python //depot/googleclient/talk/tools/generate_sslroots.py
+// //depot/google3/security/cacerts/for_connecting_to_google/roots.pem
+
+/* subject:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
+/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */
+
+namespace rtc {
+
+const unsigned char AddTrust_External_Root_certificate[1082]={
+0x30,0x82,0x04,0x36,0x30,0x82,0x03,0x1E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
+0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,
+0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,0x20,
+0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,
+0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,
+0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,
+0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,0x5A,
+0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,
+0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,
+0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,
+0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,
+0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,
+0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,
+0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,
+0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
+0x01,0x00,0xB7,0xF7,0x1A,0x33,0xE6,0xF2,0x00,0x04,0x2D,0x39,0xE0,0x4E,0x5B,0xED,
+0x1F,0xBC,0x6C,0x0F,0xCD,0xB5,0xFA,0x23,0xB6,0xCE,0xDE,0x9B,0x11,0x33,0x97,0xA4,
+0x29,0x4C,0x7D,0x93,0x9F,0xBD,0x4A,0xBC,0x93,0xED,0x03,0x1A,0xE3,0x8F,0xCF,0xE5,
+0x6D,0x50,0x5A,0xD6,0x97,0x29,0x94,0x5A,0x80,0xB0,0x49,0x7A,0xDB,0x2E,0x95,0xFD,
+0xB8,0xCA,0xBF,0x37,0x38,0x2D,0x1E,0x3E,0x91,0x41,0xAD,0x70,0x56,0xC7,0xF0,0x4F,
+0x3F,0xE8,0x32,0x9E,0x74,0xCA,0xC8,0x90,0x54,0xE9,0xC6,0x5F,0x0F,0x78,0x9D,0x9A,
+0x40,0x3C,0x0E,0xAC,0x61,0xAA,0x5E,0x14,0x8F,0x9E,0x87,0xA1,0x6A,0x50,0xDC,0xD7,
+0x9A,0x4E,0xAF,0x05,0xB3,0xA6,0x71,0x94,0x9C,0x71,0xB3,0x50,0x60,0x0A,0xC7,0x13,
+0x9D,0x38,0x07,0x86,0x02,0xA8,0xE9,0xA8,0x69,0x26,0x18,0x90,0xAB,0x4C,0xB0,0x4F,
+0x23,0xAB,0x3A,0x4F,0x84,0xD8,0xDF,0xCE,0x9F,0xE1,0x69,0x6F,0xBB,0xD7,0x42,0xD7,
+0x6B,0x44,0xE4,0xC7,0xAD,0xEE,0x6D,0x41,0x5F,0x72,0x5A,0x71,0x08,0x37,0xB3,0x79,
+0x65,0xA4,0x59,0xA0,0x94,0x37,0xF7,0x00,0x2F,0x0D,0xC2,0x92,0x72,0xDA,0xD0,0x38,
+0x72,0xDB,0x14,0xA8,0x45,0xC4,0x5D,0x2A,0x7D,0xB7,0xB4,0xD6,0xC4,0xEE,0xAC,0xCD,
+0x13,0x44,0xB7,0xC9,0x2B,0xDD,0x43,0x00,0x25,0xFA,0x61,0xB9,0x69,0x6A,0x58,0x23,
+0x11,0xB7,0xA7,0x33,0x8F,0x56,0x75,0x59,0xF5,0xCD,0x29,0xD7,0x46,0xB7,0x0A,0x2B,
+0x65,0xB6,0xD3,0x42,0x6F,0x15,0xB2,0xB8,0x7B,0xFB,0xEF,0xE9,0x5D,0x53,0xD5,0x34,
+0x5A,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xDC,0x30,0x81,0xD9,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,0x26,0xF7,
+0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0x30,0x0B,0x06,0x03,
+0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,
+0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x99,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x81,0x91,0x30,0x81,0x8E,0x80,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,
+0x26,0xF7,0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0xA1,0x73,
+0xA4,0x71,0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,
+0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,
+0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x1D,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,
+0x6E,0x61,0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,
+0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,
+0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,
+0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xB0,0x9B,0xE0,0x85,0x25,0xC2,
+0xD6,0x23,0xE2,0x0F,0x96,0x06,0x92,0x9D,0x41,0x98,0x9C,0xD9,0x84,0x79,0x81,0xD9,
+0x1E,0x5B,0x14,0x07,0x23,0x36,0x65,0x8F,0xB0,0xD8,0x77,0xBB,0xAC,0x41,0x6C,0x47,
+0x60,0x83,0x51,0xB0,0xF9,0x32,0x3D,0xE7,0xFC,0xF6,0x26,0x13,0xC7,0x80,0x16,0xA5,
+0xBF,0x5A,0xFC,0x87,0xCF,0x78,0x79,0x89,0x21,0x9A,0xE2,0x4C,0x07,0x0A,0x86,0x35,
+0xBC,0xF2,0xDE,0x51,0xC4,0xD2,0x96,0xB7,0xDC,0x7E,0x4E,0xEE,0x70,0xFD,0x1C,0x39,
+0xEB,0x0C,0x02,0x51,0x14,0x2D,0x8E,0xBD,0x16,0xE0,0xC1,0xDF,0x46,0x75,0xE7,0x24,
+0xAD,0xEC,0xF4,0x42,0xB4,0x85,0x93,0x70,0x10,0x67,0xBA,0x9D,0x06,0x35,0x4A,0x18,
+0xD3,0x2B,0x7A,0xCC,0x51,0x42,0xA1,0x7A,0x63,0xD1,0xE6,0xBB,0xA1,0xC5,0x2B,0xC2,
+0x36,0xBE,0x13,0x0D,0xE6,0xBD,0x63,0x7E,0x79,0x7B,0xA7,0x09,0x0D,0x40,0xAB,0x6A,
+0xDD,0x8F,0x8A,0xC3,0xF6,0xF6,0x8C,0x1A,0x42,0x05,0x51,0xD4,0x45,0xF5,0x9F,0xA7,
+0x62,0x21,0x68,0x15,0x20,0x43,0x3C,0x99,0xE7,0x7C,0xBD,0x24,0xD8,0xA9,0x91,0x17,
+0x73,0x88,0x3F,0x56,0x1B,0x31,0x38,0x18,0xB4,0x71,0x0F,0x9A,0xCD,0xC8,0x0E,0x9E,
+0x8E,0x2E,0x1B,0xE1,0x8C,0x98,0x83,0xCB,0x1F,0x31,0xF1,0x44,0x4C,0xC6,0x04,0x73,
+0x49,0x76,0x60,0x0F,0xC7,0xF8,0xBD,0x17,0x80,0x6B,0x2E,0xE9,0xCC,0x4C,0x0E,0x5A,
+0x9A,0x79,0x0F,0x20,0x0A,0x2E,0xD5,0x9E,0x63,0x26,0x1E,0x55,0x92,0x94,0xD8,0x82,
+0x17,0x5A,0x7B,0xD0,0xBC,0xC7,0x8F,0x4E,0x86,0x04,
+};
+
+
+/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Class 1 CA Root */
+/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Class 1 CA Root */
+
+
+const unsigned char AddTrust_Low_Value_Services_Root_certificate[1052]={
+0x30,0x82,0x04,0x18,0x30,0x82,0x03,0x00,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
+0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x41,0x64,
+0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x43,
+0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,
+0x31,0x30,0x33,0x38,0x33,0x31,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,
+0x30,0x33,0x38,0x33,0x31,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,
+0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,
+0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,
+0x55,0x04,0x03,0x13,0x18,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C,
+0x61,0x73,0x73,0x20,0x31,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,
+0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
+0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0x96,0x96,
+0xD4,0x21,0x49,0x60,0xE2,0x6B,0xE8,0x41,0x07,0x0C,0xDE,0xC4,0xE0,0xDC,0x13,0x23,
+0xCD,0xC1,0x35,0xC7,0xFB,0xD6,0x4E,0x11,0x0A,0x67,0x5E,0xF5,0x06,0x5B,0x6B,0xA5,
+0x08,0x3B,0x5B,0x29,0x16,0x3A,0xE7,0x87,0xB2,0x34,0x06,0xC5,0xBC,0x05,0xA5,0x03,
+0x7C,0x82,0xCB,0x29,0x10,0xAE,0xE1,0x88,0x81,0xBD,0xD6,0x9E,0xD3,0xFE,0x2D,0x56,
+0xC1,0x15,0xCE,0xE3,0x26,0x9D,0x15,0x2E,0x10,0xFB,0x06,0x8F,0x30,0x04,0xDE,0xA7,
+0xB4,0x63,0xB4,0xFF,0xB1,0x9C,0xAE,0x3C,0xAF,0x77,0xB6,0x56,0xC5,0xB5,0xAB,0xA2,
+0xE9,0x69,0x3A,0x3D,0x0E,0x33,0x79,0x32,0x3F,0x70,0x82,0x92,0x99,0x61,0x6D,0x8D,
+0x30,0x08,0x8F,0x71,0x3F,0xA6,0x48,0x57,0x19,0xF8,0x25,0xDC,0x4B,0x66,0x5C,0xA5,
+0x74,0x8F,0x98,0xAE,0xC8,0xF9,0xC0,0x06,0x22,0xE7,0xAC,0x73,0xDF,0xA5,0x2E,0xFB,
+0x52,0xDC,0xB1,0x15,0x65,0x20,0xFA,0x35,0x66,0x69,0xDE,0xDF,0x2C,0xF1,0x6E,0xBC,
+0x30,0xDB,0x2C,0x24,0x12,0xDB,0xEB,0x35,0x35,0x68,0x90,0xCB,0x00,0xB0,0x97,0x21,
+0x3D,0x74,0x21,0x23,0x65,0x34,0x2B,0xBB,0x78,0x59,0xA3,0xD6,0xE1,0x76,0x39,0x9A,
+0xA4,0x49,0x8E,0x8C,0x74,0xAF,0x6E,0xA4,0x9A,0xA3,0xD9,0x9B,0xD2,0x38,0x5C,0x9B,
+0xA2,0x18,0xCC,0x75,0x23,0x84,0xBE,0xEB,0xE2,0x4D,0x33,0x71,0x8E,0x1A,0xF0,0xC2,
+0xF8,0xC7,0x1D,0xA2,0xAD,0x03,0x97,0x2C,0xF8,0xCF,0x25,0xC6,0xF6,0xB8,0x24,0x31,
+0xB1,0x63,0x5D,0x92,0x7F,0x63,0xF0,0x25,0xC9,0x53,0x2E,0x1F,0xBF,0x4D,0x02,0x03,
+0x01,0x00,0x01,0xA3,0x81,0xD2,0x30,0x81,0xCF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,
+0x04,0x16,0x04,0x14,0x95,0xB1,0xB4,0xF0,0x94,0xB6,0xBD,0xC7,0xDA,0xD1,0x11,0x09,
+0x21,0xBE,0xC1,0xAF,0x49,0xFD,0x10,0x7B,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,
+0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x8F,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,
+0x87,0x30,0x81,0x84,0x80,0x14,0x95,0xB1,0xB4,0xF0,0x94,0xB6,0xBD,0xC7,0xDA,0xD1,
+0x11,0x09,0x21,0xBE,0xC1,0xAF,0x49,0xFD,0x10,0x7B,0xA1,0x69,0xA4,0x67,0x30,0x65,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,
+0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,
+0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,
+0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,
+0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x41,0x64,0x64,
+0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x43,0x41,
+0x20,0x52,0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x2C,0x6D,0x64,0x1B,
+0x1F,0xCD,0x0D,0xDD,0xB9,0x01,0xFA,0x96,0x63,0x34,0x32,0x48,0x47,0x99,0xAE,0x97,
+0xED,0xFD,0x72,0x16,0xA6,0x73,0x47,0x5A,0xF4,0xEB,0xDD,0xE9,0xF5,0xD6,0xFB,0x45,
+0xCC,0x29,0x89,0x44,0x5D,0xBF,0x46,0x39,0x3D,0xE8,0xEE,0xBC,0x4D,0x54,0x86,0x1E,
+0x1D,0x6C,0xE3,0x17,0x27,0x43,0xE1,0x89,0x56,0x2B,0xA9,0x6F,0x72,0x4E,0x49,0x33,
+0xE3,0x72,0x7C,0x2A,0x23,0x9A,0xBC,0x3E,0xFF,0x28,0x2A,0xED,0xA3,0xFF,0x1C,0x23,
+0xBA,0x43,0x57,0x09,0x67,0x4D,0x4B,0x62,0x06,0x2D,0xF8,0xFF,0x6C,0x9D,0x60,0x1E,
+0xD8,0x1C,0x4B,0x7D,0xB5,0x31,0x2F,0xD9,0xD0,0x7C,0x5D,0xF8,0xDE,0x6B,0x83,0x18,
+0x78,0x37,0x57,0x2F,0xE8,0x33,0x07,0x67,0xDF,0x1E,0xC7,0x6B,0x2A,0x95,0x76,0xAE,
+0x8F,0x57,0xA3,0xF0,0xF4,0x52,0xB4,0xA9,0x53,0x08,0xCF,0xE0,0x4F,0xD3,0x7A,0x53,
+0x8B,0xFD,0xBB,0x1C,0x56,0x36,0xF2,0xFE,0xB2,0xB6,0xE5,0x76,0xBB,0xD5,0x22,0x65,
+0xA7,0x3F,0xFE,0xD1,0x66,0xAD,0x0B,0xBC,0x6B,0x99,0x86,0xEF,0x3F,0x7D,0xF3,0x18,
+0x32,0xCA,0x7B,0xC6,0xE3,0xAB,0x64,0x46,0x95,0xF8,0x26,0x69,0xD9,0x55,0x83,0x7B,
+0x2C,0x96,0x07,0xFF,0x59,0x2C,0x44,0xA3,0xC6,0xE5,0xE9,0xA9,0xDC,0xA1,0x63,0x80,
+0x5A,0x21,0x5E,0x21,0xCF,0x53,0x54,0xF0,0xBA,0x6F,0x89,0xDB,0xA8,0xAA,0x95,0xCF,
+0x8B,0xE3,0x71,0xCC,0x1E,0x1B,0x20,0x44,0x08,0xC0,0x7A,0xB6,0x40,0xFD,0xC4,0xE4,
+0x35,0xE1,0x1D,0x16,0x1C,0xD0,0xBC,0x2B,0x8E,0xD6,0x71,0xD9,
+};
+
+
+/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */
+/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */
+
+
+const unsigned char AddTrust_Public_Services_Root_certificate[1049]={
+0x30,0x82,0x04,0x15,0x30,0x82,0x02,0xFD,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x64,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
+0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x41,0x64,
+0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x43,0x41,
+0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,
+0x30,0x34,0x31,0x35,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,
+0x34,0x31,0x35,0x30,0x5A,0x30,0x64,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,
+0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
+0x04,0x03,0x13,0x17,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x75,0x62,
+0x6C,0x69,0x63,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xE9,0x1A,0x30,0x8F,
+0x83,0x88,0x14,0xC1,0x20,0xD8,0x3C,0x9B,0x8F,0x1B,0x7E,0x03,0x74,0xBB,0xDA,0x69,
+0xD3,0x46,0xA5,0xF8,0x8E,0xC2,0x0C,0x11,0x90,0x51,0xA5,0x2F,0x66,0x54,0x40,0x55,
+0xEA,0xDB,0x1F,0x4A,0x56,0xEE,0x9F,0x23,0x6E,0xF4,0x39,0xCB,0xA1,0xB9,0x6F,0xF2,
+0x7E,0xF9,0x5D,0x87,0x26,0x61,0x9E,0x1C,0xF8,0xE2,0xEC,0xA6,0x81,0xF8,0x21,0xC5,
+0x24,0xCC,0x11,0x0C,0x3F,0xDB,0x26,0x72,0x7A,0xC7,0x01,0x97,0x07,0x17,0xF9,0xD7,
+0x18,0x2C,0x30,0x7D,0x0E,0x7A,0x1E,0x62,0x1E,0xC6,0x4B,0xC0,0xFD,0x7D,0x62,0x77,
+0xD3,0x44,0x1E,0x27,0xF6,0x3F,0x4B,0x44,0xB3,0xB7,0x38,0xD9,0x39,0x1F,0x60,0xD5,
+0x51,0x92,0x73,0x03,0xB4,0x00,0x69,0xE3,0xF3,0x14,0x4E,0xEE,0xD1,0xDC,0x09,0xCF,
+0x77,0x34,0x46,0x50,0xB0,0xF8,0x11,0xF2,0xFE,0x38,0x79,0xF7,0x07,0x39,0xFE,0x51,
+0x92,0x97,0x0B,0x5B,0x08,0x5F,0x34,0x86,0x01,0xAD,0x88,0x97,0xEB,0x66,0xCD,0x5E,
+0xD1,0xFF,0xDC,0x7D,0xF2,0x84,0xDA,0xBA,0x77,0xAD,0xDC,0x80,0x08,0xC7,0xA7,0x87,
+0xD6,0x55,0x9F,0x97,0x6A,0xE8,0xC8,0x11,0x64,0xBA,0xE7,0x19,0x29,0x3F,0x11,0xB3,
+0x78,0x90,0x84,0x20,0x52,0x5B,0x11,0xEF,0x78,0xD0,0x83,0xF6,0xD5,0x48,0x90,0xD0,
+0x30,0x1C,0xCF,0x80,0xF9,0x60,0xFE,0x79,0xE4,0x88,0xF2,0xDD,0x00,0xEB,0x94,0x45,
+0xEB,0x65,0x94,0x69,0x40,0xBA,0xC0,0xD5,0xB4,0xB8,0xBA,0x7D,0x04,0x11,0xA8,0xEB,
+0x31,0x05,0x96,0x94,0x4E,0x58,0x21,0x8E,0x9F,0xD0,0x60,0xFD,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x81,0xD1,0x30,0x81,0xCE,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0x81,0x3E,0x37,0xD8,0x92,0xB0,0x1F,0x77,0x9F,0x5C,0xB4,0xAB,0x73,0xAA,
+0xE7,0xF6,0x34,0x60,0x2F,0xFA,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,
+0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x81,0x8E,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x86,0x30,
+0x81,0x83,0x80,0x14,0x81,0x3E,0x37,0xD8,0x92,0xB0,0x1F,0x77,0x9F,0x5C,0xB4,0xAB,
+0x73,0xAA,0xE7,0xF6,0x34,0x60,0x2F,0xFA,0xA1,0x68,0xA4,0x66,0x30,0x64,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,
+0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,
+0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,
+0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x41,0x64,0x64,0x54,0x72,
+0x75,0x73,0x74,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x43,0x41,0x20,0x52,0x6F,
+0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0x15,0x4A,0xF8,0x24,0xDA,
+0x23,0x56,0x16,0x93,0x76,0xDD,0x36,0x28,0xB9,0xAE,0x1B,0xB8,0xC3,0xF1,0x64,0xBA,
+0x20,0x18,0x78,0x95,0x29,0x27,0x57,0x05,0xBC,0x7C,0x2A,0xF4,0xB9,0x51,0x55,0xDA,
+0x87,0x02,0xDE,0x0F,0x16,0x17,0x31,0xF8,0xAA,0x79,0x2E,0x09,0x13,0xBB,0xAF,0xB2,
+0x20,0x19,0x12,0xE5,0x93,0xF9,0x4B,0xF9,0x83,0xE8,0x44,0xD5,0xB2,0x41,0x25,0xBF,
+0x88,0x75,0x6F,0xFF,0x10,0xFC,0x4A,0x54,0xD0,0x5F,0xF0,0xFA,0xEF,0x36,0x73,0x7D,
+0x1B,0x36,0x45,0xC6,0x21,0x6D,0xB4,0x15,0xB8,0x4E,0xCF,0x9C,0x5C,0xA5,0x3D,0x5A,
+0x00,0x8E,0x06,0xE3,0x3C,0x6B,0x32,0x7B,0xF2,0x9F,0xF0,0xB6,0xFD,0xDF,0xF0,0x28,
+0x18,0x48,0xF0,0xC6,0xBC,0xD0,0xBF,0x34,0x80,0x96,0xC2,0x4A,0xB1,0x6D,0x8E,0xC7,
+0x90,0x45,0xDE,0x2F,0x67,0xAC,0x45,0x04,0xA3,0x7A,0xDC,0x55,0x92,0xC9,0x47,0x66,
+0xD8,0x1A,0x8C,0xC7,0xED,0x9C,0x4E,0x9A,0xE0,0x12,0xBB,0xB5,0x6A,0x4C,0x84,0xE1,
+0xE1,0x22,0x0D,0x87,0x00,0x64,0xFE,0x8C,0x7D,0x62,0x39,0x65,0xA6,0xEF,0x42,0xB6,
+0x80,0x25,0x12,0x61,0x01,0xA8,0x24,0x13,0x70,0x00,0x11,0x26,0x5F,0xFA,0x35,0x50,
+0xC5,0x48,0xCC,0x06,0x47,0xE8,0x27,0xD8,0x70,0x8D,0x5F,0x64,0xE6,0xA1,0x44,0x26,
+0x5E,0x22,0xEC,0x92,0xCD,0xFF,0x42,0x9A,0x44,0x21,0x6D,0x5C,0xC5,0xE3,0x22,0x1D,
+0x5F,0x47,0x12,0xE7,0xCE,0x5F,0x5D,0xFA,0xD8,0xAA,0xB1,0x33,0x2D,0xD9,0x76,0xF2,
+0x4E,0x3A,0x33,0x0C,0x2B,0xB3,0x2D,0x90,0x06,
+};
+
+
+/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Qualified CA Root */
+/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Qualified CA Root */
+
+
+const unsigned char AddTrust_Qualified_Certificates_Root_certificate[1058]={
+0x30,0x82,0x04,0x1E,0x30,0x82,0x03,0x06,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x67,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,
+0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,
+0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,
+0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x41,0x64,
+0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x51,0x75,0x61,0x6C,0x69,0x66,0x69,0x65,0x64,
+0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,
+0x33,0x30,0x31,0x30,0x34,0x34,0x35,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,
+0x30,0x31,0x30,0x34,0x34,0x35,0x30,0x5A,0x30,0x67,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,
+0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,
+0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x23,0x30,0x21,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,
+0x51,0x75,0x61,0x6C,0x69,0x66,0x69,0x65,0x64,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,
+0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
+0x01,0x00,0xE4,0x1E,0x9A,0xFE,0xDC,0x09,0x5A,0x87,0xA4,0x9F,0x47,0xBE,0x11,0x5F,
+0xAF,0x84,0x34,0xDB,0x62,0x3C,0x79,0x78,0xB7,0xE9,0x30,0xB5,0xEC,0x0C,0x1C,0x2A,
+0xC4,0x16,0xFF,0xE0,0xEC,0x71,0xEB,0x8A,0xF5,0x11,0x6E,0xED,0x4F,0x0D,0x91,0xD2,
+0x12,0x18,0x2D,0x49,0x15,0x01,0xC2,0xA4,0x22,0x13,0xC7,0x11,0x64,0xFF,0x22,0x12,
+0x9A,0xB9,0x8E,0x5C,0x2F,0x08,0xCF,0x71,0x6A,0xB3,0x67,0x01,0x59,0xF1,0x5D,0x46,
+0xF3,0xB0,0x78,0xA5,0xF6,0x0E,0x42,0x7A,0xE3,0x7F,0x1B,0xCC,0xD0,0xF0,0xB7,0x28,
+0xFD,0x2A,0xEA,0x9E,0xB3,0xB0,0xB9,0x04,0xAA,0xFD,0xF6,0xC7,0xB4,0xB1,0xB8,0x2A,
+0xA0,0xFB,0x58,0xF1,0x19,0xA0,0x6F,0x70,0x25,0x7E,0x3E,0x69,0x4A,0x7F,0x0F,0x22,
+0xD8,0xEF,0xAD,0x08,0x11,0x9A,0x29,0x99,0xE1,0xAA,0x44,0x45,0x9A,0x12,0x5E,0x3E,
+0x9D,0x6D,0x52,0xFC,0xE7,0xA0,0x3D,0x68,0x2F,0xF0,0x4B,0x70,0x7C,0x13,0x38,0xAD,
+0xBC,0x15,0x25,0xF1,0xD6,0xCE,0xAB,0xA2,0xC0,0x31,0xD6,0x2F,0x9F,0xE0,0xFF,0x14,
+0x59,0xFC,0x84,0x93,0xD9,0x87,0x7C,0x4C,0x54,0x13,0xEB,0x9F,0xD1,0x2D,0x11,0xF8,
+0x18,0x3A,0x3A,0xDE,0x25,0xD9,0xF7,0xD3,0x40,0xED,0xA4,0x06,0x12,0xC4,0x3B,0xE1,
+0x91,0xC1,0x56,0x35,0xF0,0x14,0xDC,0x65,0x36,0x09,0x6E,0xAB,0xA4,0x07,0xC7,0x35,
+0xD1,0xC2,0x03,0x33,0x36,0x5B,0x75,0x26,0x6D,0x42,0xF1,0x12,0x6B,0x43,0x6F,0x4B,
+0x71,0x94,0xFA,0x34,0x1D,0xED,0x13,0x6E,0xCA,0x80,0x7F,0x98,0x2F,0x6C,0xB9,0x65,
+0xD8,0xE9,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xD4,0x30,0x81,0xD1,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x39,0x95,0x8B,0x62,0x8B,0x5C,0xC9,0xD4,
+0x80,0xBA,0x58,0x0F,0x97,0x3F,0x15,0x08,0x43,0xCC,0x98,0xA7,0x30,0x0B,0x06,0x03,
+0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,
+0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x91,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x81,0x89,0x30,0x81,0x86,0x80,0x14,0x39,0x95,0x8B,0x62,0x8B,0x5C,
+0xC9,0xD4,0x80,0xBA,0x58,0x0F,0x97,0x3F,0x15,0x08,0x43,0xCC,0x98,0xA7,0xA1,0x6B,
+0xA4,0x69,0x30,0x67,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,
+0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,
+0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,
+0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13,
+0x1A,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x51,0x75,0x61,0x6C,0x69,0x66,
+0x69,0x65,0x64,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x19,0xAB,0x75,0xEA,0xF8,0x8B,0x65,0x61,0x95,0x13,0xBA,0x69,0x04,0xEF,
+0x86,0xCA,0x13,0xA0,0xC7,0xAA,0x4F,0x64,0x1B,0x3F,0x18,0xF6,0xA8,0x2D,0x2C,0x55,
+0x8F,0x05,0xB7,0x30,0xEA,0x42,0x6A,0x1D,0xC0,0x25,0x51,0x2D,0xA7,0xBF,0x0C,0xB3,
+0xED,0xEF,0x08,0x7F,0x6C,0x3C,0x46,0x1A,0xEA,0x18,0x43,0xDF,0x76,0xCC,0xF9,0x66,
+0x86,0x9C,0x2C,0x68,0xF5,0xE9,0x17,0xF8,0x31,0xB3,0x18,0xC4,0xD6,0x48,0x7D,0x23,
+0x4C,0x68,0xC1,0x7E,0xBB,0x01,0x14,0x6F,0xC5,0xD9,0x6E,0xDE,0xBB,0x04,0x42,0x6A,
+0xF8,0xF6,0x5C,0x7D,0xE5,0xDA,0xFA,0x87,0xEB,0x0D,0x35,0x52,0x67,0xD0,0x9E,0x97,
+0x76,0x05,0x93,0x3F,0x95,0xC7,0x01,0xE6,0x69,0x55,0x38,0x7F,0x10,0x61,0x99,0xC9,
+0xE3,0x5F,0xA6,0xCA,0x3E,0x82,0x63,0x48,0xAA,0xE2,0x08,0x48,0x3E,0xAA,0xF2,0xB2,
+0x85,0x62,0xA6,0xB4,0xA7,0xD9,0xBD,0x37,0x9C,0x68,0xB5,0x2D,0x56,0x7D,0xB0,0xB7,
+0x3F,0xA0,0xB1,0x07,0xD6,0xE9,0x4F,0xDC,0xDE,0x45,0x71,0x30,0x32,0x7F,0x1B,0x2E,
+0x09,0xF9,0xBF,0x52,0xA1,0xEE,0xC2,0x80,0x3E,0x06,0x5C,0x2E,0x55,0x40,0xC1,0x1B,
+0xF5,0x70,0x45,0xB0,0xDC,0x5D,0xFA,0xF6,0x72,0x5A,0x77,0xD2,0x63,0xCD,0xCF,0x58,
+0x89,0x00,0x42,0x63,0x3F,0x79,0x39,0xD0,0x44,0xB0,0x82,0x6E,0x41,0x19,0xE8,0xDD,
+0xE0,0xC1,0x88,0x5A,0xD1,0x1E,0x71,0x93,0x1F,0x24,0x30,0x74,0xE5,0x1E,0xA8,0xDE,
+0x3C,0x27,0x37,0x7F,0x83,0xAE,0x9E,0x77,0xCF,0xF0,0x30,0xB1,0xFF,0x4B,0x99,0xE8,
+0xC6,0xA1,
+};
+
+
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */
+
+
+const unsigned char AffirmTrust_Commercial_certificate[848]={
+0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x77,
+0x77,0x06,0x27,0x26,0xA9,0xB1,0x7C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
+0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06,
+0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x30,0x1E,0x17,0x0D,
+0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x17,0x0D,0x33,
+0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x30,0x44,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,
+0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,
+0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,
+0x61,0x6C,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xF6,0x1B,0x4F,0x67,0x07,0x2B,0xA1,0x15,0xF5,0x06,0x22,0xCB,0x1F,
+0x01,0xB2,0xE3,0x73,0x45,0x06,0x44,0x49,0x2C,0xBB,0x49,0x25,0x14,0xD6,0xCE,0xC3,
+0xB7,0xAB,0x2C,0x4F,0xC6,0x41,0x32,0x94,0x57,0xFA,0x12,0xA7,0x5B,0x0E,0xE2,0x8F,
+0x1F,0x1E,0x86,0x19,0xA7,0xAA,0xB5,0x2D,0xB9,0x5F,0x0D,0x8A,0xC2,0xAF,0x85,0x35,
+0x79,0x32,0x2D,0xBB,0x1C,0x62,0x37,0xF2,0xB1,0x5B,0x4A,0x3D,0xCA,0xCD,0x71,0x5F,
+0xE9,0x42,0xBE,0x94,0xE8,0xC8,0xDE,0xF9,0x22,0x48,0x64,0xC6,0xE5,0xAB,0xC6,0x2B,
+0x6D,0xAD,0x05,0xF0,0xFA,0xD5,0x0B,0xCF,0x9A,0xE5,0xF0,0x50,0xA4,0x8B,0x3B,0x47,
+0xA5,0x23,0x5B,0x7A,0x7A,0xF8,0x33,0x3F,0xB8,0xEF,0x99,0x97,0xE3,0x20,0xC1,0xD6,
+0x28,0x89,0xCF,0x94,0xFB,0xB9,0x45,0xED,0xE3,0x40,0x17,0x11,0xD4,0x74,0xF0,0x0B,
+0x31,0xE2,0x2B,0x26,0x6A,0x9B,0x4C,0x57,0xAE,0xAC,0x20,0x3E,0xBA,0x45,0x7A,0x05,
+0xF3,0xBD,0x9B,0x69,0x15,0xAE,0x7D,0x4E,0x20,0x63,0xC4,0x35,0x76,0x3A,0x07,0x02,
+0xC9,0x37,0xFD,0xC7,0x47,0xEE,0xE8,0xF1,0x76,0x1D,0x73,0x15,0xF2,0x97,0xA4,0xB5,
+0xC8,0x7A,0x79,0xD9,0x42,0xAA,0x2B,0x7F,0x5C,0xFE,0xCE,0x26,0x4F,0xA3,0x66,0x81,
+0x35,0xAF,0x44,0xBA,0x54,0x1E,0x1C,0x30,0x32,0x65,0x9D,0xE6,0x3C,0x93,0x5E,0x50,
+0x4E,0x7A,0xE3,0x3A,0xD4,0x6E,0xCC,0x1A,0xFB,0xF9,0xD2,0x37,0xAE,0x24,0x2A,0xAB,
+0x57,0x03,0x22,0x28,0x0D,0x49,0x75,0x7F,0xB7,0x28,0xDA,0x75,0xBF,0x8E,0xE3,0xDC,
+0x0E,0x79,0x31,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9D,0x93,0xC6,0x53,0x8B,0x5E,0xCA,0xAF,0x3F,
+0x9F,0x1E,0x0F,0xE5,0x99,0x95,0xBC,0x24,0xF6,0x94,0x8F,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x58,0xAC,0xF4,0x04,0x0E,0xCD,0xC0,0x0D,0xFF,0x0A,0xFD,0xD4,0xBA,0x16,0x5F,0x29,
+0xBD,0x7B,0x68,0x99,0x58,0x49,0xD2,0xB4,0x1D,0x37,0x4D,0x7F,0x27,0x7D,0x46,0x06,
+0x5D,0x43,0xC6,0x86,0x2E,0x3E,0x73,0xB2,0x26,0x7D,0x4F,0x93,0xA9,0xB6,0xC4,0x2A,
+0x9A,0xAB,0x21,0x97,0x14,0xB1,0xDE,0x8C,0xD3,0xAB,0x89,0x15,0xD8,0x6B,0x24,0xD4,
+0xF1,0x16,0xAE,0xD8,0xA4,0x5C,0xD4,0x7F,0x51,0x8E,0xED,0x18,0x01,0xB1,0x93,0x63,
+0xBD,0xBC,0xF8,0x61,0x80,0x9A,0x9E,0xB1,0xCE,0x42,0x70,0xE2,0xA9,0x7D,0x06,0x25,
+0x7D,0x27,0xA1,0xFE,0x6F,0xEC,0xB3,0x1E,0x24,0xDA,0xE3,0x4B,0x55,0x1A,0x00,0x3B,
+0x35,0xB4,0x3B,0xD9,0xD7,0x5D,0x30,0xFD,0x81,0x13,0x89,0xF2,0xC2,0x06,0x2B,0xED,
+0x67,0xC4,0x8E,0xC9,0x43,0xB2,0x5C,0x6B,0x15,0x89,0x02,0xBC,0x62,0xFC,0x4E,0xF2,
+0xB5,0x33,0xAA,0xB2,0x6F,0xD3,0x0A,0xA2,0x50,0xE3,0xF6,0x3B,0xE8,0x2E,0x44,0xC2,
+0xDB,0x66,0x38,0xA9,0x33,0x56,0x48,0xF1,0x6D,0x1B,0x33,0x8D,0x0D,0x8C,0x3F,0x60,
+0x37,0x9D,0xD3,0xCA,0x6D,0x7E,0x34,0x7E,0x0D,0x9F,0x72,0x76,0x8B,0x1B,0x9F,0x72,
+0xFD,0x52,0x35,0x41,0x45,0x02,0x96,0x2F,0x1C,0xB2,0x9A,0x73,0x49,0x21,0xB1,0x49,
+0x47,0x45,0x47,0xB4,0xEF,0x6A,0x34,0x11,0xC9,0x4D,0x9A,0xCC,0x59,0xB7,0xD6,0x02,
+0x9E,0x5A,0x4E,0x65,0xB5,0x94,0xAE,0x1B,0xDF,0x29,0xB0,0x16,0xF1,0xBF,0x00,0x9E,
+0x07,0x3A,0x17,0x64,0xB5,0x04,0xB5,0x23,0x21,0x99,0x0A,0x95,0x3B,0x97,0x7C,0xEF,
+};
+
+
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Networking */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Networking */
+
+
+const unsigned char AffirmTrust_Networking_certificate[848]={
+0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x7C,
+0x4F,0x04,0x39,0x1C,0xD4,0x99,0x2D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
+0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06,
+0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69,0x6E,0x67,0x30,0x1E,0x17,0x0D,
+0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x17,0x0D,0x33,
+0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x30,0x44,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,
+0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,
+0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69,
+0x6E,0x67,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xB4,0x84,0xCC,0x33,0x17,0x2E,0x6B,0x94,0x6C,0x6B,0x61,0x52,0xA0,
+0xEB,0xA3,0xCF,0x79,0x94,0x4C,0xE5,0x94,0x80,0x99,0xCB,0x55,0x64,0x44,0x65,0x8F,
+0x67,0x64,0xE2,0x06,0xE3,0x5C,0x37,0x49,0xF6,0x2F,0x9B,0x84,0x84,0x1E,0x2D,0xF2,
+0x60,0x9D,0x30,0x4E,0xCC,0x84,0x85,0xE2,0x2C,0xCF,0x1E,0x9E,0xFE,0x36,0xAB,0x33,
+0x77,0x35,0x44,0xD8,0x35,0x96,0x1A,0x3D,0x36,0xE8,0x7A,0x0E,0xD8,0xD5,0x47,0xA1,
+0x6A,0x69,0x8B,0xD9,0xFC,0xBB,0x3A,0xAE,0x79,0x5A,0xD5,0xF4,0xD6,0x71,0xBB,0x9A,
+0x90,0x23,0x6B,0x9A,0xB7,0x88,0x74,0x87,0x0C,0x1E,0x5F,0xB9,0x9E,0x2D,0xFA,0xAB,
+0x53,0x2B,0xDC,0xBB,0x76,0x3E,0x93,0x4C,0x08,0x08,0x8C,0x1E,0xA2,0x23,0x1C,0xD4,
+0x6A,0xAD,0x22,0xBA,0x99,0x01,0x2E,0x6D,0x65,0xCB,0xBE,0x24,0x66,0x55,0x24,0x4B,
+0x40,0x44,0xB1,0x1B,0xD7,0xE1,0xC2,0x85,0xC0,0xDE,0x10,0x3F,0x3D,0xED,0xB8,0xFC,
+0xF1,0xF1,0x23,0x53,0xDC,0xBF,0x65,0x97,0x6F,0xD9,0xF9,0x40,0x71,0x8D,0x7D,0xBD,
+0x95,0xD4,0xCE,0xBE,0xA0,0x5E,0x27,0x23,0xDE,0xFD,0xA6,0xD0,0x26,0x0E,0x00,0x29,
+0xEB,0x3C,0x46,0xF0,0x3D,0x60,0xBF,0x3F,0x50,0xD2,0xDC,0x26,0x41,0x51,0x9E,0x14,
+0x37,0x42,0x04,0xA3,0x70,0x57,0xA8,0x1B,0x87,0xED,0x2D,0xFA,0x7B,0xEE,0x8C,0x0A,
+0xE3,0xA9,0x66,0x89,0x19,0xCB,0x41,0xF9,0xDD,0x44,0x36,0x61,0xCF,0xE2,0x77,0x46,
+0xC8,0x7D,0xF6,0xF4,0x92,0x81,0x36,0xFD,0xDB,0x34,0xF1,0x72,0x7E,0xF3,0x0C,0x16,
+0xBD,0xB4,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x07,0x1F,0xD2,0xE7,0x9C,0xDA,0xC2,0x6E,0xA2,
+0x40,0xB4,0xB0,0x7A,0x50,0x10,0x50,0x74,0xC4,0xC8,0xBD,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x89,0x57,0xB2,0x16,0x7A,0xA8,0xC2,0xFD,0xD6,0xD9,0x9B,0x9B,0x34,0xC2,0x9C,0xB4,
+0x32,0x14,0x4D,0xA7,0xA4,0xDF,0xEC,0xBE,0xA7,0xBE,0xF8,0x43,0xDB,0x91,0x37,0xCE,
+0xB4,0x32,0x2E,0x50,0x55,0x1A,0x35,0x4E,0x76,0x43,0x71,0x20,0xEF,0x93,0x77,0x4E,
+0x15,0x70,0x2E,0x87,0xC3,0xC1,0x1D,0x6D,0xDC,0xCB,0xB5,0x27,0xD4,0x2C,0x56,0xD1,
+0x52,0x53,0x3A,0x44,0xD2,0x73,0xC8,0xC4,0x1B,0x05,0x65,0x5A,0x62,0x92,0x9C,0xEE,
+0x41,0x8D,0x31,0xDB,0xE7,0x34,0xEA,0x59,0x21,0xD5,0x01,0x7A,0xD7,0x64,0xB8,0x64,
+0x39,0xCD,0xC9,0xED,0xAF,0xED,0x4B,0x03,0x48,0xA7,0xA0,0x99,0x01,0x80,0xDC,0x65,
+0xA3,0x36,0xAE,0x65,0x59,0x48,0x4F,0x82,0x4B,0xC8,0x65,0xF1,0x57,0x1D,0xE5,0x59,
+0x2E,0x0A,0x3F,0x6C,0xD8,0xD1,0xF5,0xE5,0x09,0xB4,0x6C,0x54,0x00,0x0A,0xE0,0x15,
+0x4D,0x87,0x75,0x6D,0xB7,0x58,0x96,0x5A,0xDD,0x6D,0xD2,0x00,0xA0,0xF4,0x9B,0x48,
+0xBE,0xC3,0x37,0xA4,0xBA,0x36,0xE0,0x7C,0x87,0x85,0x97,0x1A,0x15,0xA2,0xDE,0x2E,
+0xA2,0x5B,0xBD,0xAF,0x18,0xF9,0x90,0x50,0xCD,0x70,0x59,0xF8,0x27,0x67,0x47,0xCB,
+0xC7,0xA0,0x07,0x3A,0x7D,0xD1,0x2C,0x5D,0x6C,0x19,0x3A,0x66,0xB5,0x7D,0xFD,0x91,
+0x6F,0x82,0xB1,0xBE,0x08,0x93,0xDB,0x14,0x47,0xF1,0xA2,0x37,0xC7,0x45,0x9E,0x3C,
+0xC7,0x77,0xAF,0x64,0xA8,0x93,0xDF,0xF6,0x69,0x83,0x82,0x60,0xF2,0x49,0x42,0x34,
+0xED,0x5A,0x00,0x54,0x85,0x1C,0x16,0x36,0x92,0x0C,0x5C,0xFA,0xA6,0xAD,0xBF,0xDB,
+};
+
+
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium */
+
+
+const unsigned char AffirmTrust_Premium_certificate[1354]={
+0x30,0x82,0x05,0x46,0x30,0x82,0x03,0x2E,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x6D,
+0x8C,0x14,0x46,0xB1,0xA6,0x0A,0xEE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,
+0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,
+0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,
+0x31,0x32,0x39,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x17,0x0D,0x34,0x30,0x31,0x32,
+0x33,0x31,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,
+0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,
+0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x82,0x02,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC4,0x12,0xDF,
+0xA9,0x5F,0xFE,0x41,0xDD,0xDD,0xF5,0x9F,0x8A,0xE3,0xF6,0xAC,0xE1,0x3C,0x78,0x9A,
+0xBC,0xD8,0xF0,0x7F,0x7A,0xA0,0x33,0x2A,0xDC,0x8D,0x20,0x5B,0xAE,0x2D,0x6F,0xE7,
+0x93,0xD9,0x36,0x70,0x6A,0x68,0xCF,0x8E,0x51,0xA3,0x85,0x5B,0x67,0x04,0xA0,0x10,
+0x24,0x6F,0x5D,0x28,0x82,0xC1,0x97,0x57,0xD8,0x48,0x29,0x13,0xB6,0xE1,0xBE,0x91,
+0x4D,0xDF,0x85,0x0C,0x53,0x18,0x9A,0x1E,0x24,0xA2,0x4F,0x8F,0xF0,0xA2,0x85,0x0B,
+0xCB,0xF4,0x29,0x7F,0xD2,0xA4,0x58,0xEE,0x26,0x4D,0xC9,0xAA,0xA8,0x7B,0x9A,0xD9,
+0xFA,0x38,0xDE,0x44,0x57,0x15,0xE5,0xF8,0x8C,0xC8,0xD9,0x48,0xE2,0x0D,0x16,0x27,
+0x1D,0x1E,0xC8,0x83,0x85,0x25,0xB7,0xBA,0xAA,0x55,0x41,0xCC,0x03,0x22,0x4B,0x2D,
+0x91,0x8D,0x8B,0xE6,0x89,0xAF,0x66,0xC7,0xE9,0xFF,0x2B,0xE9,0x3C,0xAC,0xDA,0xD2,
+0xB3,0xC3,0xE1,0x68,0x9C,0x89,0xF8,0x7A,0x00,0x56,0xDE,0xF4,0x55,0x95,0x6C,0xFB,
+0xBA,0x64,0xDD,0x62,0x8B,0xDF,0x0B,0x77,0x32,0xEB,0x62,0xCC,0x26,0x9A,0x9B,0xBB,
+0xAA,0x62,0x83,0x4C,0xB4,0x06,0x7A,0x30,0xC8,0x29,0xBF,0xED,0x06,0x4D,0x97,0xB9,
+0x1C,0xC4,0x31,0x2B,0xD5,0x5F,0xBC,0x53,0x12,0x17,0x9C,0x99,0x57,0x29,0x66,0x77,
+0x61,0x21,0x31,0x07,0x2E,0x25,0x49,0x9D,0x18,0xF2,0xEE,0xF3,0x2B,0x71,0x8C,0xB5,
+0xBA,0x39,0x07,0x49,0x77,0xFC,0xEF,0x2E,0x92,0x90,0x05,0x8D,0x2D,0x2F,0x77,0x7B,
+0xEF,0x43,0xBF,0x35,0xBB,0x9A,0xD8,0xF9,0x73,0xA7,0x2C,0xF2,0xD0,0x57,0xEE,0x28,
+0x4E,0x26,0x5F,0x8F,0x90,0x68,0x09,0x2F,0xB8,0xF8,0xDC,0x06,0xE9,0x2E,0x9A,0x3E,
+0x51,0xA7,0xD1,0x22,0xC4,0x0A,0xA7,0x38,0x48,0x6C,0xB3,0xF9,0xFF,0x7D,0xAB,0x86,
+0x57,0xE3,0xBA,0xD6,0x85,0x78,0x77,0xBA,0x43,0xEA,0x48,0x7F,0xF6,0xD8,0xBE,0x23,
+0x6D,0x1E,0xBF,0xD1,0x36,0x6C,0x58,0x5C,0xF1,0xEE,0xA4,0x19,0x54,0x1A,0xF5,0x03,
+0xD2,0x76,0xE6,0xE1,0x8C,0xBD,0x3C,0xB3,0xD3,0x48,0x4B,0xE2,0xC8,0xF8,0x7F,0x92,
+0xA8,0x76,0x46,0x9C,0x42,0x65,0x3E,0xA4,0x1E,0xC1,0x07,0x03,0x5A,0x46,0x2D,0xB8,
+0x97,0xF3,0xB7,0xD5,0xB2,0x55,0x21,0xEF,0xBA,0xDC,0x4C,0x00,0x97,0xFB,0x14,0x95,
+0x27,0x33,0xBF,0xE8,0x43,0x47,0x46,0xD2,0x08,0x99,0x16,0x60,0x3B,0x9A,0x7E,0xD2,
+0xE6,0xED,0x38,0xEA,0xEC,0x01,0x1E,0x3C,0x48,0x56,0x49,0x09,0xC7,0x4C,0x37,0x00,
+0x9E,0x88,0x0E,0xC0,0x73,0xE1,0x6F,0x66,0xE9,0x72,0x47,0x30,0x3E,0x10,0xE5,0x0B,
+0x03,0xC9,0x9A,0x42,0x00,0x6C,0xC5,0x94,0x7E,0x61,0xC4,0x8A,0xDF,0x7F,0x82,0x1A,
+0x0B,0x59,0xC4,0x59,0x32,0x77,0xB3,0xBC,0x60,0x69,0x56,0x39,0xFD,0xB4,0x06,0x7B,
+0x2C,0xD6,0x64,0x36,0xD9,0xBD,0x48,0xED,0x84,0x1F,0x7E,0xA5,0x22,0x8F,0x2A,0xB8,
+0x42,0xF4,0x82,0xB7,0xD4,0x53,0x90,0x78,0x4E,0x2D,0x1A,0xFD,0x81,0x6F,0x44,0xD7,
+0x3B,0x01,0x74,0x96,0x42,0xE0,0x00,0xE2,0x2E,0x6B,0xEA,0xC5,0xEE,0x72,0xAC,0xBB,
+0xBF,0xFE,0xEA,0xAA,0xA8,0xF8,0xDC,0xF6,0xB2,0x79,0x8A,0xB6,0x67,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x9D,0xC0,0x67,0xA6,0x0C,0x22,0xD9,0x26,0xF5,0x45,0xAB,0xA6,0x65,0x52,0x11,
+0x27,0xD8,0x45,0xAC,0x63,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB3,0x57,0x4D,0x10,0x62,0x4E,
+0x3A,0xE4,0xAC,0xEA,0xB8,0x1C,0xAF,0x32,0x23,0xC8,0xB3,0x49,0x5A,0x51,0x9C,0x76,
+0x28,0x8D,0x79,0xAA,0x57,0x46,0x17,0xD5,0xF5,0x52,0xF6,0xB7,0x44,0xE8,0x08,0x44,
+0xBF,0x18,0x84,0xD2,0x0B,0x80,0xCD,0xC5,0x12,0xFD,0x00,0x55,0x05,0x61,0x87,0x41,
+0xDC,0xB5,0x24,0x9E,0x3C,0xC4,0xD8,0xC8,0xFB,0x70,0x9E,0x2F,0x78,0x96,0x83,0x20,
+0x36,0xDE,0x7C,0x0F,0x69,0x13,0x88,0xA5,0x75,0x36,0x98,0x08,0xA6,0xC6,0xDF,0xAC,
+0xCE,0xE3,0x58,0xD6,0xB7,0x3E,0xDE,0xBA,0xF3,0xEB,0x34,0x40,0xD8,0xA2,0x81,0xF5,
+0x78,0x3F,0x2F,0xD5,0xA5,0xFC,0xD9,0xA2,0xD4,0x5E,0x04,0x0E,0x17,0xAD,0xFE,0x41,
+0xF0,0xE5,0xB2,0x72,0xFA,0x44,0x82,0x33,0x42,0xE8,0x2D,0x58,0xF7,0x56,0x8C,0x62,
+0x3F,0xBA,0x42,0xB0,0x9C,0x0C,0x5C,0x7E,0x2E,0x65,0x26,0x5C,0x53,0x4F,0x00,0xB2,
+0x78,0x7E,0xA1,0x0D,0x99,0x2D,0x8D,0xB8,0x1D,0x8E,0xA2,0xC4,0xB0,0xFD,0x60,0xD0,
+0x30,0xA4,0x8E,0xC8,0x04,0x62,0xA9,0xC4,0xED,0x35,0xDE,0x7A,0x97,0xED,0x0E,0x38,
+0x5E,0x92,0x2F,0x93,0x70,0xA5,0xA9,0x9C,0x6F,0xA7,0x7D,0x13,0x1D,0x7E,0xC6,0x08,
+0x48,0xB1,0x5E,0x67,0xEB,0x51,0x08,0x25,0xE9,0xE6,0x25,0x6B,0x52,0x29,0x91,0x9C,
+0xD2,0x39,0x73,0x08,0x57,0xDE,0x99,0x06,0xB4,0x5B,0x9D,0x10,0x06,0xE1,0xC2,0x00,
+0xA8,0xB8,0x1C,0x4A,0x02,0x0A,0x14,0xD0,0xC1,0x41,0xCA,0xFB,0x8C,0x35,0x21,0x7D,
+0x82,0x38,0xF2,0xA9,0x54,0x91,0x19,0x35,0x93,0x94,0x6D,0x6A,0x3A,0xC5,0xB2,0xD0,
+0xBB,0x89,0x86,0x93,0xE8,0x9B,0xC9,0x0F,0x3A,0xA7,0x7A,0xB8,0xA1,0xF0,0x78,0x46,
+0xFA,0xFC,0x37,0x2F,0xE5,0x8A,0x84,0xF3,0xDF,0xFE,0x04,0xD9,0xA1,0x68,0xA0,0x2F,
+0x24,0xE2,0x09,0x95,0x06,0xD5,0x95,0xCA,0xE1,0x24,0x96,0xEB,0x7C,0xF6,0x93,0x05,
+0xBB,0xED,0x73,0xE9,0x2D,0xD1,0x75,0x39,0xD7,0xE7,0x24,0xDB,0xD8,0x4E,0x5F,0x43,
+0x8F,0x9E,0xD0,0x14,0x39,0xBF,0x55,0x70,0x48,0x99,0x57,0x31,0xB4,0x9C,0xEE,0x4A,
+0x98,0x03,0x96,0x30,0x1F,0x60,0x06,0xEE,0x1B,0x23,0xFE,0x81,0x60,0x23,0x1A,0x47,
+0x62,0x85,0xA5,0xCC,0x19,0x34,0x80,0x6F,0xB3,0xAC,0x1A,0xE3,0x9F,0xF0,0x7B,0x48,
+0xAD,0xD5,0x01,0xD9,0x67,0xB6,0xA9,0x72,0x93,0xEA,0x2D,0x66,0xB5,0xB2,0xB8,0xE4,
+0x3D,0x3C,0xB2,0xEF,0x4C,0x8C,0xEA,0xEB,0x07,0xBF,0xAB,0x35,0x9A,0x55,0x86,0xBC,
+0x18,0xA6,0xB5,0xA8,0x5E,0xB4,0x83,0x6C,0x6B,0x69,0x40,0xD3,0x9F,0xDC,0xF1,0xC3,
+0x69,0x6B,0xB9,0xE1,0x6D,0x09,0xF4,0xF1,0xAA,0x50,0x76,0x0A,0x7A,0x7D,0x7A,0x17,
+0xA1,0x55,0x96,0x42,0x99,0x31,0x09,0xDD,0x60,0x11,0x8D,0x05,0x30,0x7E,0xE6,0x8E,
+0x46,0xD1,0x9D,0x14,0xDA,0xC7,0x17,0xE4,0x05,0x96,0x8C,0xC4,0x24,0xB5,0x1B,0xCF,
+0x14,0x07,0xB2,0x40,0xF8,0xA3,0x9E,0x41,0x86,0xBC,0x04,0xD0,0x6B,0x96,0xC8,0x2A,
+0x80,0x34,0xFD,0xBF,0xEF,0x06,0xA3,0xDD,0x58,0xC5,0x85,0x3D,0x3E,0x8F,0xFE,0x9E,
+0x29,0xE0,0xB6,0xB8,0x09,0x68,0x19,0x1C,0x18,0x43,
+};
+
+
+/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
+/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */
+
+
+const unsigned char AffirmTrust_Premium_ECC_certificate[514]={
+0x30,0x82,0x01,0xFE,0x30,0x82,0x01,0x85,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x74,
+0x97,0x25,0x8A,0xC7,0x3F,0x7A,0x54,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
+0x04,0x03,0x03,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,
+0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,
+0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x50,
+0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,0x43,0x30,0x1E,0x17,0x0D,0x31,0x30,
+0x30,0x31,0x32,0x39,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x17,0x0D,0x34,0x30,0x31,
+0x32,0x33,0x31,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,
+0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,
+0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,
+0x43,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x0D,0x30,0x5E,0x1B,0x15,0x9D,0x03,
+0xD0,0xA1,0x79,0x35,0xB7,0x3A,0x3C,0x92,0x7A,0xCA,0x15,0x1C,0xCD,0x62,0xF3,0x9C,
+0x26,0x5C,0x07,0x3D,0xE5,0x54,0xFA,0xA3,0xD6,0xCC,0x12,0xEA,0xF4,0x14,0x5F,0xE8,
+0x8E,0x19,0xAB,0x2F,0x2E,0x48,0xE6,0xAC,0x18,0x43,0x78,0xAC,0xD0,0x37,0xC3,0xBD,
+0xB2,0xCD,0x2C,0xE6,0x47,0xE2,0x1A,0xE6,0x63,0xB8,0x3D,0x2E,0x2F,0x78,0xC4,0x4F,
+0xDB,0xF4,0x0F,0xA4,0x68,0x4C,0x55,0x72,0x6B,0x95,0x1D,0x4E,0x18,0x42,0x95,0x78,
+0xCC,0x37,0x3C,0x91,0xE2,0x9B,0x65,0x2B,0x29,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9A,0xAF,0x29,0x7A,0xC0,0x11,0x35,0x35,
+0x26,0x51,0x30,0x00,0xC3,0x6A,0xFE,0x40,0xD5,0xAE,0xD6,0x3C,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
+0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
+0x17,0x09,0xF3,0x87,0x88,0x50,0x5A,0xAF,0xC8,0xC0,0x42,0xBF,0x47,0x5F,0xF5,0x6C,
+0x6A,0x86,0xE0,0xC4,0x27,0x74,0xE4,0x38,0x53,0xD7,0x05,0x7F,0x1B,0x34,0xE3,0xC6,
+0x2F,0xB3,0xCA,0x09,0x3C,0x37,0x9D,0xD7,0xE7,0xB8,0x46,0xF1,0xFD,0xA1,0xE2,0x71,
+0x02,0x30,0x42,0x59,0x87,0x43,0xD4,0x51,0xDF,0xBA,0xD3,0x09,0x32,0x5A,0xCE,0x88,
+0x7E,0x57,0x3D,0x9C,0x5F,0x42,0x6B,0xF5,0x07,0x2D,0xB5,0xF0,0x82,0x93,0xF9,0x59,
+0x6F,0xAE,0x64,0xFA,0x58,0xE5,0x8B,0x1E,0xE3,0x63,0xBE,0xB5,0x81,0xCD,0x6F,0x02,
+0x8C,0x79,
+};
+
+
+/* subject:/C=US/O=America Online Inc./CN=America Online Root Certification Authority 1 */
+/* issuer :/C=US/O=America Online Inc./CN=America Online Root Certification Authority 1 */
+
+
+const unsigned char America_Online_Root_Certification_Authority_1_certificate[936]={
+0x30,0x82,0x03,0xA4,0x30,0x82,0x02,0x8C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,
+0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x36,0x30,0x34,
+0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,0x20,0x4F,
+0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x31,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x38,0x30,0x36,
+0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x31,0x31,0x39,0x32,0x30,0x34,
+0x33,0x30,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,
+0x65,0x72,0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,
+0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x31,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
+0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA8,0x2F,0xE8,0xA4,0x69,0x06,
+0x03,0x47,0xC3,0xE9,0x2A,0x98,0xFF,0x19,0xA2,0x70,0x9A,0xC6,0x50,0xB2,0x7E,0xA5,
+0xDF,0x68,0x4D,0x1B,0x7C,0x0F,0xB6,0x97,0x68,0x7D,0x2D,0xA6,0x8B,0x97,0xE9,0x64,
+0x86,0xC9,0xA3,0xEF,0xA0,0x86,0xBF,0x60,0x65,0x9C,0x4B,0x54,0x88,0xC2,0x48,0xC5,
+0x4A,0x39,0xBF,0x14,0xE3,0x59,0x55,0xE5,0x19,0xB4,0x74,0xC8,0xB4,0x05,0x39,0x5C,
+0x16,0xA5,0xE2,0x95,0x05,0xE0,0x12,0xAE,0x59,0x8B,0xA2,0x33,0x68,0x58,0x1C,0xA6,
+0xD4,0x15,0xB7,0xD8,0x9F,0xD7,0xDC,0x71,0xAB,0x7E,0x9A,0xBF,0x9B,0x8E,0x33,0x0F,
+0x22,0xFD,0x1F,0x2E,0xE7,0x07,0x36,0xEF,0x62,0x39,0xC5,0xDD,0xCB,0xBA,0x25,0x14,
+0x23,0xDE,0x0C,0xC6,0x3D,0x3C,0xCE,0x82,0x08,0xE6,0x66,0x3E,0xDA,0x51,0x3B,0x16,
+0x3A,0xA3,0x05,0x7F,0xA0,0xDC,0x87,0xD5,0x9C,0xFC,0x72,0xA9,0xA0,0x7D,0x78,0xE4,
+0xB7,0x31,0x55,0x1E,0x65,0xBB,0xD4,0x61,0xB0,0x21,0x60,0xED,0x10,0x32,0x72,0xC5,
+0x92,0x25,0x1E,0xF8,0x90,0x4A,0x18,0x78,0x47,0xDF,0x7E,0x30,0x37,0x3E,0x50,0x1B,
+0xDB,0x1C,0xD3,0x6B,0x9A,0x86,0x53,0x07,0xB0,0xEF,0xAC,0x06,0x78,0xF8,0x84,0x99,
+0xFE,0x21,0x8D,0x4C,0x80,0xB6,0x0C,0x82,0xF6,0x66,0x70,0x79,0x1A,0xD3,0x4F,0xA3,
+0xCF,0xF1,0xCF,0x46,0xB0,0x4B,0x0F,0x3E,0xDD,0x88,0x62,0xB8,0x8C,0xA9,0x09,0x28,
+0x3B,0x7A,0xC7,0x97,0xE1,0x1E,0xE5,0xF4,0x9F,0xC0,0xC0,0xAE,0x24,0xA0,0xC8,0xA1,
+0xD9,0x0F,0xD6,0x7B,0x26,0x82,0x69,0x32,0x3D,0xA7,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x00,
+0xAD,0xD9,0xA3,0xF6,0x79,0xF6,0x6E,0x74,0xA9,0x7F,0x33,0x3D,0x81,0x17,0xD7,0x4C,
+0xCF,0x33,0xDE,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
+0x00,0xAD,0xD9,0xA3,0xF6,0x79,0xF6,0x6E,0x74,0xA9,0x7F,0x33,0x3D,0x81,0x17,0xD7,
+0x4C,0xCF,0x33,0xDE,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x7C,0x8A,0xD1,0x1F,0x18,0x37,0x82,0xE0,
+0xB8,0xB0,0xA3,0xED,0x56,0x95,0xC8,0x62,0x61,0x9C,0x05,0xA2,0xCD,0xC2,0x62,0x26,
+0x61,0xCD,0x10,0x16,0xD7,0xCC,0xB4,0x65,0x34,0xD0,0x11,0x8A,0xAD,0xA8,0xA9,0x05,
+0x66,0xEF,0x74,0xF3,0x6D,0x5F,0x9D,0x99,0xAF,0xF6,0x8B,0xFB,0xEB,0x52,0xB2,0x05,
+0x98,0xA2,0x6F,0x2A,0xC5,0x54,0xBD,0x25,0xBD,0x5F,0xAE,0xC8,0x86,0xEA,0x46,0x2C,
+0xC1,0xB3,0xBD,0xC1,0xE9,0x49,0x70,0x18,0x16,0x97,0x08,0x13,0x8C,0x20,0xE0,0x1B,
+0x2E,0x3A,0x47,0xCB,0x1E,0xE4,0x00,0x30,0x95,0x5B,0xF4,0x45,0xA3,0xC0,0x1A,0xB0,
+0x01,0x4E,0xAB,0xBD,0xC0,0x23,0x6E,0x63,0x3F,0x80,0x4A,0xC5,0x07,0xED,0xDC,0xE2,
+0x6F,0xC7,0xC1,0x62,0xF1,0xE3,0x72,0xD6,0x04,0xC8,0x74,0x67,0x0B,0xFA,0x88,0xAB,
+0xA1,0x01,0xC8,0x6F,0xF0,0x14,0xAF,0xD2,0x99,0xCD,0x51,0x93,0x7E,0xED,0x2E,0x38,
+0xC7,0xBD,0xCE,0x46,0x50,0x3D,0x72,0xE3,0x79,0x25,0x9D,0x9B,0x88,0x2B,0x10,0x20,
+0xDD,0xA5,0xB8,0x32,0x9F,0x8D,0xE0,0x29,0xDF,0x21,0x74,0x86,0x82,0xDB,0x2F,0x82,
+0x30,0xC6,0xC7,0x35,0x86,0xB3,0xF9,0x96,0x5F,0x46,0xDB,0x0C,0x45,0xFD,0xF3,0x50,
+0xC3,0x6F,0xC6,0xC3,0x48,0xAD,0x46,0xA6,0xE1,0x27,0x47,0x0A,0x1D,0x0E,0x9B,0xB6,
+0xC2,0x77,0x7F,0x63,0xF2,0xE0,0x7D,0x1A,0xBE,0xFC,0xE0,0xDF,0xD7,0xC7,0xA7,0x6C,
+0xB0,0xF9,0xAE,0xBA,0x3C,0xFD,0x74,0xB4,0x11,0xE8,0x58,0x0D,0x80,0xBC,0xD3,0xA8,
+0x80,0x3A,0x99,0xED,0x75,0xCC,0x46,0x7B,
+};
+
+
+/* subject:/C=US/O=America Online Inc./CN=America Online Root Certification Authority 2 */
+/* issuer :/C=US/O=America Online Inc./CN=America Online Root Certification Authority 2 */
+
+
+const unsigned char America_Online_Root_Certification_Authority_2_certificate[1448]={
+0x30,0x82,0x05,0xA4,0x30,0x82,0x03,0x8C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,
+0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x36,0x30,0x34,
+0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,0x69,0x63,0x61,0x20,0x4F,
+0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x38,0x30,0x36,
+0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x30,0x39,0x32,0x39,0x31,0x34,0x30,
+0x38,0x30,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x41,0x6D,
+0x65,0x72,0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x41,0x6D,0x65,0x72,
+0x69,0x63,0x61,0x20,0x4F,0x6E,0x6C,0x69,0x6E,0x65,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,
+0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xCC,0x41,0x45,0x1D,0xE9,0x3D,
+0x4D,0x10,0xF6,0x8C,0xB1,0x41,0xC9,0xE0,0x5E,0xCB,0x0D,0xB7,0xBF,0x47,0x73,0xD3,
+0xF0,0x55,0x4D,0xDD,0xC6,0x0C,0xFA,0xB1,0x66,0x05,0x6A,0xCD,0x78,0xB4,0xDC,0x02,
+0xDB,0x4E,0x81,0xF3,0xD7,0xA7,0x7C,0x71,0xBC,0x75,0x63,0xA0,0x5D,0xE3,0x07,0x0C,
+0x48,0xEC,0x25,0xC4,0x03,0x20,0xF4,0xFF,0x0E,0x3B,0x12,0xFF,0x9B,0x8D,0xE1,0xC6,
+0xD5,0x1B,0xB4,0x6D,0x22,0xE3,0xB1,0xDB,0x7F,0x21,0x64,0xAF,0x86,0xBC,0x57,0x22,
+0x2A,0xD6,0x47,0x81,0x57,0x44,0x82,0x56,0x53,0xBD,0x86,0x14,0x01,0x0B,0xFC,0x7F,
+0x74,0xA4,0x5A,0xAE,0xF1,0xBA,0x11,0xB5,0x9B,0x58,0x5A,0x80,0xB4,0x37,0x78,0x09,
+0x33,0x7C,0x32,0x47,0x03,0x5C,0xC4,0xA5,0x83,0x48,0xF4,0x57,0x56,0x6E,0x81,0x36,
+0x27,0x18,0x4F,0xEC,0x9B,0x28,0xC2,0xD4,0xB4,0xD7,0x7C,0x0C,0x3E,0x0C,0x2B,0xDF,
+0xCA,0x04,0xD7,0xC6,0x8E,0xEA,0x58,0x4E,0xA8,0xA4,0xA5,0x18,0x1C,0x6C,0x45,0x98,
+0xA3,0x41,0xD1,0x2D,0xD2,0xC7,0x6D,0x8D,0x19,0xF1,0xAD,0x79,0xB7,0x81,0x3F,0xBD,
+0x06,0x82,0x27,0x2D,0x10,0x58,0x05,0xB5,0x78,0x05,0xB9,0x2F,0xDB,0x0C,0x6B,0x90,
+0x90,0x7E,0x14,0x59,0x38,0xBB,0x94,0x24,0x13,0xE5,0xD1,0x9D,0x14,0xDF,0xD3,0x82,
+0x4D,0x46,0xF0,0x80,0x39,0x52,0x32,0x0F,0xE3,0x84,0xB2,0x7A,0x43,0xF2,0x5E,0xDE,
+0x5F,0x3F,0x1D,0xDD,0xE3,0xB2,0x1B,0xA0,0xA1,0x2A,0x23,0x03,0x6E,0x2E,0x01,0x15,
+0x87,0x5C,0xA6,0x75,0x75,0xC7,0x97,0x61,0xBE,0xDE,0x86,0xDC,0xD4,0x48,0xDB,0xBD,
+0x2A,0xBF,0x4A,0x55,0xDA,0xE8,0x7D,0x50,0xFB,0xB4,0x80,0x17,0xB8,0x94,0xBF,0x01,
+0x3D,0xEA,0xDA,0xBA,0x7C,0xE0,0x58,0x67,0x17,0xB9,0x58,0xE0,0x88,0x86,0x46,0x67,
+0x6C,0x9D,0x10,0x47,0x58,0x32,0xD0,0x35,0x7C,0x79,0x2A,0x90,0xA2,0x5A,0x10,0x11,
+0x23,0x35,0xAD,0x2F,0xCC,0xE4,0x4A,0x5B,0xA7,0xC8,0x27,0xF2,0x83,0xDE,0x5E,0xBB,
+0x5E,0x77,0xE7,0xE8,0xA5,0x6E,0x63,0xC2,0x0D,0x5D,0x61,0xD0,0x8C,0xD2,0x6C,0x5A,
+0x21,0x0E,0xCA,0x28,0xA3,0xCE,0x2A,0xE9,0x95,0xC7,0x48,0xCF,0x96,0x6F,0x1D,0x92,
+0x25,0xC8,0xC6,0xC6,0xC1,0xC1,0x0C,0x05,0xAC,0x26,0xC4,0xD2,0x75,0xD2,0xE1,0x2A,
+0x67,0xC0,0x3D,0x5B,0xA5,0x9A,0xEB,0xCF,0x7B,0x1A,0xA8,0x9D,0x14,0x45,0xE5,0x0F,
+0xA0,0x9A,0x65,0xDE,0x2F,0x28,0xBD,0xCE,0x6F,0x94,0x66,0x83,0x48,0x29,0xD8,0xEA,
+0x65,0x8C,0xAF,0x93,0xD9,0x64,0x9F,0x55,0x57,0x26,0xBF,0x6F,0xCB,0x37,0x31,0x99,
+0xA3,0x60,0xBB,0x1C,0xAD,0x89,0x34,0x32,0x62,0xB8,0x43,0x21,0x06,0x72,0x0C,0xA1,
+0x5C,0x6D,0x46,0xC5,0xFA,0x29,0xCF,0x30,0xDE,0x89,0xDC,0x71,0x5B,0xDD,0xB6,0x37,
+0x3E,0xDF,0x50,0xF5,0xB8,0x07,0x25,0x26,0xE5,0xBC,0xB5,0xFE,0x3C,0x02,0xB3,0xB7,
+0xF8,0xBE,0x43,0xC1,0x87,0x11,0x94,0x9E,0x23,0x6C,0x17,0x8A,0xB8,0x8A,0x27,0x0C,
+0x54,0x47,0xF0,0xA9,0xB3,0xC0,0x80,0x8C,0xA0,0x27,0xEB,0x1D,0x19,0xE3,0x07,0x8E,
+0x77,0x70,0xCA,0x2B,0xF4,0x7D,0x76,0xE0,0x78,0x67,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4D,
+0x45,0xC1,0x68,0x38,0xBB,0x73,0xA9,0x69,0xA1,0x20,0xE7,0xED,0xF5,0x22,0xA1,0x23,
+0x14,0xD7,0x9E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
+0x4D,0x45,0xC1,0x68,0x38,0xBB,0x73,0xA9,0x69,0xA1,0x20,0xE7,0xED,0xF5,0x22,0xA1,
+0x23,0x14,0xD7,0x9E,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x67,0x6B,0x06,0xB9,0x5F,0x45,0x3B,0x2A,
+0x4B,0x33,0xB3,0xE6,0x1B,0x6B,0x59,0x4E,0x22,0xCC,0xB9,0xB7,0xA4,0x25,0xC9,0xA7,
+0xC4,0xF0,0x54,0x96,0x0B,0x64,0xF3,0xB1,0x58,0x4F,0x5E,0x51,0xFC,0xB2,0x97,0x7B,
+0x27,0x65,0xC2,0xE5,0xCA,0xE7,0x0D,0x0C,0x25,0x7B,0x62,0xE3,0xFA,0x9F,0xB4,0x87,
+0xB7,0x45,0x46,0xAF,0x83,0xA5,0x97,0x48,0x8C,0xA5,0xBD,0xF1,0x16,0x2B,0x9B,0x76,
+0x2C,0x7A,0x35,0x60,0x6C,0x11,0x80,0x97,0xCC,0xA9,0x92,0x52,0xE6,0x2B,0xE6,0x69,
+0xED,0xA9,0xF8,0x36,0x2D,0x2C,0x77,0xBF,0x61,0x48,0xD1,0x63,0x0B,0xB9,0x5B,0x52,
+0xED,0x18,0xB0,0x43,0x42,0x22,0xA6,0xB1,0x77,0xAE,0xDE,0x69,0xC5,0xCD,0xC7,0x1C,
+0xA1,0xB1,0xA5,0x1C,0x10,0xFB,0x18,0xBE,0x1A,0x70,0xDD,0xC1,0x92,0x4B,0xBE,0x29,
+0x5A,0x9D,0x3F,0x35,0xBE,0xE5,0x7D,0x51,0xF8,0x55,0xE0,0x25,0x75,0x23,0x87,0x1E,
+0x5C,0xDC,0xBA,0x9D,0xB0,0xAC,0xB3,0x69,0xDB,0x17,0x83,0xC9,0xF7,0xDE,0x0C,0xBC,
+0x08,0xDC,0x91,0x9E,0xA8,0xD0,0xD7,0x15,0x37,0x73,0xA5,0x35,0xB8,0xFC,0x7E,0xC5,
+0x44,0x40,0x06,0xC3,0xEB,0xF8,0x22,0x80,0x5C,0x47,0xCE,0x02,0xE3,0x11,0x9F,0x44,
+0xFF,0xFD,0x9A,0x32,0xCC,0x7D,0x64,0x51,0x0E,0xEB,0x57,0x26,0x76,0x3A,0xE3,0x1E,
+0x22,0x3C,0xC2,0xA6,0x36,0xDD,0x19,0xEF,0xA7,0xFC,0x12,0xF3,0x26,0xC0,0x59,0x31,
+0x85,0x4C,0x9C,0xD8,0xCF,0xDF,0xA4,0xCC,0xCC,0x29,0x93,0xFF,0x94,0x6D,0x76,0x5C,
+0x13,0x08,0x97,0xF2,0xED,0xA5,0x0B,0x4D,0xDD,0xE8,0xC9,0x68,0x0E,0x66,0xD3,0x00,
+0x0E,0x33,0x12,0x5B,0xBC,0x95,0xE5,0x32,0x90,0xA8,0xB3,0xC6,0x6C,0x83,0xAD,0x77,
+0xEE,0x8B,0x7E,0x7E,0xB1,0xA9,0xAB,0xD3,0xE1,0xF1,0xB6,0xC0,0xB1,0xEA,0x88,0xC0,
+0xE7,0xD3,0x90,0xE9,0x28,0x92,0x94,0x7B,0x68,0x7B,0x97,0x2A,0x0A,0x67,0x2D,0x85,
+0x02,0x38,0x10,0xE4,0x03,0x61,0xD4,0xDA,0x25,0x36,0xC7,0x08,0x58,0x2D,0xA1,0xA7,
+0x51,0xAF,0x30,0x0A,0x49,0xF5,0xA6,0x69,0x87,0x07,0x2D,0x44,0x46,0x76,0x8E,0x2A,
+0xE5,0x9A,0x3B,0xD7,0x18,0xA2,0xFC,0x9C,0x38,0x10,0xCC,0xC6,0x3B,0xD2,0xB5,0x17,
+0x3A,0x6F,0xFD,0xAE,0x25,0xBD,0xF5,0x72,0x59,0x64,0xB1,0x74,0x2A,0x38,0x5F,0x18,
+0x4C,0xDF,0xCF,0x71,0x04,0x5A,0x36,0xD4,0xBF,0x2F,0x99,0x9C,0xE8,0xD9,0xBA,0xB1,
+0x95,0xE6,0x02,0x4B,0x21,0xA1,0x5B,0xD5,0xC1,0x4F,0x8F,0xAE,0x69,0x6D,0x53,0xDB,
+0x01,0x93,0xB5,0x5C,0x1E,0x18,0xDD,0x64,0x5A,0xCA,0x18,0x28,0x3E,0x63,0x04,0x11,
+0xFD,0x1C,0x8D,0x00,0x0F,0xB8,0x37,0xDF,0x67,0x8A,0x9D,0x66,0xA9,0x02,0x6A,0x91,
+0xFF,0x13,0xCA,0x2F,0x5D,0x83,0xBC,0x87,0x93,0x6C,0xDC,0x24,0x51,0x16,0x04,0x25,
+0x66,0xFA,0xB3,0xD9,0xC2,0xBA,0x29,0xBE,0x9A,0x48,0x38,0x82,0x99,0xF4,0xBF,0x3B,
+0x4A,0x31,0x19,0xF9,0xBF,0x8E,0x21,0x33,0x14,0xCA,0x4F,0x54,0x5F,0xFB,0xCE,0xFB,
+0x8F,0x71,0x7F,0xFD,0x5E,0x19,0xA0,0x0F,0x4B,0x91,0xB8,0xC4,0x54,0xBC,0x06,0xB0,
+0x45,0x8F,0x26,0x91,0xA2,0x8E,0xFE,0xA9,
+};
+
+
+/* subject:/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
+/* issuer :/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */
+
+
+const unsigned char Baltimore_CyberTrust_Root_certificate[891]={
+0x30,0x82,0x03,0x77,0x30,0x82,0x02,0x5F,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x02,
+0x00,0x00,0xB9,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,
+0x45,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,
+0x69,0x6D,0x6F,0x72,0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,
+0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,
+0x55,0x04,0x03,0x13,0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,
+0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,
+0x17,0x0D,0x30,0x30,0x30,0x35,0x31,0x32,0x31,0x38,0x34,0x36,0x30,0x30,0x5A,0x17,
+0x0D,0x32,0x35,0x30,0x35,0x31,0x32,0x32,0x33,0x35,0x39,0x30,0x30,0x5A,0x30,0x5A,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x45,0x31,0x12,0x30,
+0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,
+0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,0x43,0x79,0x62,0x65,
+0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,
+0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,0x79,0x62,0x65,0x72,
+0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,
+0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA3,0x04,0xBB,0x22,0xAB,
+0x98,0x3D,0x57,0xE8,0x26,0x72,0x9A,0xB5,0x79,0xD4,0x29,0xE2,0xE1,0xE8,0x95,0x80,
+0xB1,0xB0,0xE3,0x5B,0x8E,0x2B,0x29,0x9A,0x64,0xDF,0xA1,0x5D,0xED,0xB0,0x09,0x05,
+0x6D,0xDB,0x28,0x2E,0xCE,0x62,0xA2,0x62,0xFE,0xB4,0x88,0xDA,0x12,0xEB,0x38,0xEB,
+0x21,0x9D,0xC0,0x41,0x2B,0x01,0x52,0x7B,0x88,0x77,0xD3,0x1C,0x8F,0xC7,0xBA,0xB9,
+0x88,0xB5,0x6A,0x09,0xE7,0x73,0xE8,0x11,0x40,0xA7,0xD1,0xCC,0xCA,0x62,0x8D,0x2D,
+0xE5,0x8F,0x0B,0xA6,0x50,0xD2,0xA8,0x50,0xC3,0x28,0xEA,0xF5,0xAB,0x25,0x87,0x8A,
+0x9A,0x96,0x1C,0xA9,0x67,0xB8,0x3F,0x0C,0xD5,0xF7,0xF9,0x52,0x13,0x2F,0xC2,0x1B,
+0xD5,0x70,0x70,0xF0,0x8F,0xC0,0x12,0xCA,0x06,0xCB,0x9A,0xE1,0xD9,0xCA,0x33,0x7A,
+0x77,0xD6,0xF8,0xEC,0xB9,0xF1,0x68,0x44,0x42,0x48,0x13,0xD2,0xC0,0xC2,0xA4,0xAE,
+0x5E,0x60,0xFE,0xB6,0xA6,0x05,0xFC,0xB4,0xDD,0x07,0x59,0x02,0xD4,0x59,0x18,0x98,
+0x63,0xF5,0xA5,0x63,0xE0,0x90,0x0C,0x7D,0x5D,0xB2,0x06,0x7A,0xF3,0x85,0xEA,0xEB,
+0xD4,0x03,0xAE,0x5E,0x84,0x3E,0x5F,0xFF,0x15,0xED,0x69,0xBC,0xF9,0x39,0x36,0x72,
+0x75,0xCF,0x77,0x52,0x4D,0xF3,0xC9,0x90,0x2C,0xB9,0x3D,0xE5,0xC9,0x23,0x53,0x3F,
+0x1F,0x24,0x98,0x21,0x5C,0x07,0x99,0x29,0xBD,0xC6,0x3A,0xEC,0xE7,0x6E,0x86,0x3A,
+0x6B,0x97,0x74,0x63,0x33,0xBD,0x68,0x18,0x31,0xF0,0x78,0x8D,0x76,0xBF,0xFC,0x9E,
+0x8E,0x5D,0x2A,0x86,0xA7,0x4D,0x90,0xDC,0x27,0x1A,0x39,0x02,0x03,0x01,0x00,0x01,
+0xA3,0x45,0x30,0x43,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xE5,
+0x9D,0x59,0x30,0x82,0x47,0x58,0xCC,0xAC,0xFA,0x08,0x54,0x36,0x86,0x7B,0x3A,0xB5,
+0x04,0x4D,0xF0,0x30,0x12,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x08,0x30,
+0x06,0x01,0x01,0xFF,0x02,0x01,0x03,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,
+0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x85,0x0C,0x5D,0x8E,0xE4,
+0x6F,0x51,0x68,0x42,0x05,0xA0,0xDD,0xBB,0x4F,0x27,0x25,0x84,0x03,0xBD,0xF7,0x64,
+0xFD,0x2D,0xD7,0x30,0xE3,0xA4,0x10,0x17,0xEB,0xDA,0x29,0x29,0xB6,0x79,0x3F,0x76,
+0xF6,0x19,0x13,0x23,0xB8,0x10,0x0A,0xF9,0x58,0xA4,0xD4,0x61,0x70,0xBD,0x04,0x61,
+0x6A,0x12,0x8A,0x17,0xD5,0x0A,0xBD,0xC5,0xBC,0x30,0x7C,0xD6,0xE9,0x0C,0x25,0x8D,
+0x86,0x40,0x4F,0xEC,0xCC,0xA3,0x7E,0x38,0xC6,0x37,0x11,0x4F,0xED,0xDD,0x68,0x31,
+0x8E,0x4C,0xD2,0xB3,0x01,0x74,0xEE,0xBE,0x75,0x5E,0x07,0x48,0x1A,0x7F,0x70,0xFF,
+0x16,0x5C,0x84,0xC0,0x79,0x85,0xB8,0x05,0xFD,0x7F,0xBE,0x65,0x11,0xA3,0x0F,0xC0,
+0x02,0xB4,0xF8,0x52,0x37,0x39,0x04,0xD5,0xA9,0x31,0x7A,0x18,0xBF,0xA0,0x2A,0xF4,
+0x12,0x99,0xF7,0xA3,0x45,0x82,0xE3,0x3C,0x5E,0xF5,0x9D,0x9E,0xB5,0xC8,0x9E,0x7C,
+0x2E,0xC8,0xA4,0x9E,0x4E,0x08,0x14,0x4B,0x6D,0xFD,0x70,0x6D,0x6B,0x1A,0x63,0xBD,
+0x64,0xE6,0x1F,0xB7,0xCE,0xF0,0xF2,0x9F,0x2E,0xBB,0x1B,0xB7,0xF2,0x50,0x88,0x73,
+0x92,0xC2,0xE2,0xE3,0x16,0x8D,0x9A,0x32,0x02,0xAB,0x8E,0x18,0xDD,0xE9,0x10,0x11,
+0xEE,0x7E,0x35,0xAB,0x90,0xAF,0x3E,0x30,0x94,0x7A,0xD0,0x33,0x3D,0xA7,0x65,0x0F,
+0xF5,0xFC,0x8E,0x9E,0x62,0xCF,0x47,0x44,0x2C,0x01,0x5D,0xBB,0x1D,0xB5,0x32,0xD2,
+0x47,0xD2,0x38,0x2E,0xD0,0xFE,0x81,0xDC,0x32,0x6A,0x1E,0xB5,0xEE,0x3C,0xD5,0xFC,
+0xE7,0x81,0x1D,0x19,0xC3,0x24,0x42,0xEA,0x63,0x39,0xA9,
+};
+
+
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */
+
+
+const unsigned char Comodo_AAA_Services_root_certificate[1078]={
+0x30,0x82,0x04,0x32,0x30,0x82,0x03,0x1A,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7B,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x1E,0x17,0x0D,
+0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,
+0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
+0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
+0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
+0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
+0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,0x41,0x20,0x4C,
+0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x0C,
+0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,
+0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
+0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBE,0x40,0x9D,0xF4,0x6E,0xE1,
+0xEA,0x76,0x87,0x1C,0x4D,0x45,0x44,0x8E,0xBE,0x46,0xC8,0x83,0x06,0x9D,0xC1,0x2A,
+0xFE,0x18,0x1F,0x8E,0xE4,0x02,0xFA,0xF3,0xAB,0x5D,0x50,0x8A,0x16,0x31,0x0B,0x9A,
+0x06,0xD0,0xC5,0x70,0x22,0xCD,0x49,0x2D,0x54,0x63,0xCC,0xB6,0x6E,0x68,0x46,0x0B,
+0x53,0xEA,0xCB,0x4C,0x24,0xC0,0xBC,0x72,0x4E,0xEA,0xF1,0x15,0xAE,0xF4,0x54,0x9A,
+0x12,0x0A,0xC3,0x7A,0xB2,0x33,0x60,0xE2,0xDA,0x89,0x55,0xF3,0x22,0x58,0xF3,0xDE,
+0xDC,0xCF,0xEF,0x83,0x86,0xA2,0x8C,0x94,0x4F,0x9F,0x68,0xF2,0x98,0x90,0x46,0x84,
+0x27,0xC7,0x76,0xBF,0xE3,0xCC,0x35,0x2C,0x8B,0x5E,0x07,0x64,0x65,0x82,0xC0,0x48,
+0xB0,0xA8,0x91,0xF9,0x61,0x9F,0x76,0x20,0x50,0xA8,0x91,0xC7,0x66,0xB5,0xEB,0x78,
+0x62,0x03,0x56,0xF0,0x8A,0x1A,0x13,0xEA,0x31,0xA3,0x1E,0xA0,0x99,0xFD,0x38,0xF6,
+0xF6,0x27,0x32,0x58,0x6F,0x07,0xF5,0x6B,0xB8,0xFB,0x14,0x2B,0xAF,0xB7,0xAA,0xCC,
+0xD6,0x63,0x5F,0x73,0x8C,0xDA,0x05,0x99,0xA8,0x38,0xA8,0xCB,0x17,0x78,0x36,0x51,
+0xAC,0xE9,0x9E,0xF4,0x78,0x3A,0x8D,0xCF,0x0F,0xD9,0x42,0xE2,0x98,0x0C,0xAB,0x2F,
+0x9F,0x0E,0x01,0xDE,0xEF,0x9F,0x99,0x49,0xF1,0x2D,0xDF,0xAC,0x74,0x4D,0x1B,0x98,
+0xB5,0x47,0xC5,0xE5,0x29,0xD1,0xF9,0x90,0x18,0xC7,0x62,0x9C,0xBE,0x83,0xC7,0x26,
+0x7B,0x3E,0x8A,0x25,0xC7,0xC0,0xDD,0x9D,0xE6,0x35,0x68,0x10,0x20,0x9D,0x8F,0xD8,
+0xDE,0xD2,0xC3,0x84,0x9C,0x0D,0x5E,0xE8,0x2F,0xC9,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x81,0xC0,0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,
+0xA0,0x11,0x0A,0x23,0x3E,0x96,0xF1,0x07,0xEC,0xE2,0xAF,0x29,0xEF,0x82,0xA5,0x7F,
+0xD0,0x30,0xA4,0xB4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x7B,0x06,0x03,0x55,0x1D,0x1F,0x04,0x74,0x30,0x72,
+0x30,0x38,0xA0,0x36,0xA0,0x34,0x86,0x32,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,
+0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,
+0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x36,0xA0,0x34,0xA0,0x32,
+0x86,0x30,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,
+0x6F,0x64,0x6F,0x2E,0x6E,0x65,0x74,0x2F,0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,
+0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
+0x00,0x03,0x82,0x01,0x01,0x00,0x08,0x56,0xFC,0x02,0xF0,0x9B,0xE8,0xFF,0xA4,0xFA,
+0xD6,0x7B,0xC6,0x44,0x80,0xCE,0x4F,0xC4,0xC5,0xF6,0x00,0x58,0xCC,0xA6,0xB6,0xBC,
+0x14,0x49,0x68,0x04,0x76,0xE8,0xE6,0xEE,0x5D,0xEC,0x02,0x0F,0x60,0xD6,0x8D,0x50,
+0x18,0x4F,0x26,0x4E,0x01,0xE3,0xE6,0xB0,0xA5,0xEE,0xBF,0xBC,0x74,0x54,0x41,0xBF,
+0xFD,0xFC,0x12,0xB8,0xC7,0x4F,0x5A,0xF4,0x89,0x60,0x05,0x7F,0x60,0xB7,0x05,0x4A,
+0xF3,0xF6,0xF1,0xC2,0xBF,0xC4,0xB9,0x74,0x86,0xB6,0x2D,0x7D,0x6B,0xCC,0xD2,0xF3,
+0x46,0xDD,0x2F,0xC6,0xE0,0x6A,0xC3,0xC3,0x34,0x03,0x2C,0x7D,0x96,0xDD,0x5A,0xC2,
+0x0E,0xA7,0x0A,0x99,0xC1,0x05,0x8B,0xAB,0x0C,0x2F,0xF3,0x5C,0x3A,0xCF,0x6C,0x37,
+0x55,0x09,0x87,0xDE,0x53,0x40,0x6C,0x58,0xEF,0xFC,0xB6,0xAB,0x65,0x6E,0x04,0xF6,
+0x1B,0xDC,0x3C,0xE0,0x5A,0x15,0xC6,0x9E,0xD9,0xF1,0x59,0x48,0x30,0x21,0x65,0x03,
+0x6C,0xEC,0xE9,0x21,0x73,0xEC,0x9B,0x03,0xA1,0xE0,0x37,0xAD,0xA0,0x15,0x18,0x8F,
+0xFA,0xBA,0x02,0xCE,0xA7,0x2C,0xA9,0x10,0x13,0x2C,0xD4,0xE5,0x08,0x26,0xAB,0x22,
+0x97,0x60,0xF8,0x90,0x5E,0x74,0xD4,0xA2,0x9A,0x53,0xBD,0xF2,0xA9,0x68,0xE0,0xA2,
+0x6E,0xC2,0xD7,0x6C,0xB1,0xA3,0x0F,0x9E,0xBF,0xEB,0x68,0xE7,0x56,0xF2,0xAE,0xF2,
+0xE3,0x2B,0x38,0x3A,0x09,0x81,0xB5,0x6B,0x85,0xD7,0xBE,0x2D,0xED,0x3F,0x1A,0xB7,
+0xB2,0x63,0xE2,0xF5,0x62,0x2C,0x82,0xD4,0x6A,0x00,0x41,0x50,0xF1,0x39,0x83,0x9F,
+0x95,0xE9,0x36,0x96,0x98,0x6E,
+};
+
+
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */
+
+
+const unsigned char COMODO_Certification_Authority_certificate[1057]={
+0x30,0x82,0x04,0x1D,0x30,0x82,0x03,0x05,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4E,
+0x81,0x2D,0x8A,0x82,0x65,0xE0,0x0B,0x02,0xEE,0x3E,0x35,0x02,0x46,0xE5,0x3D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30,0x25,0x06,0x03,0x55,
+0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x30,0x31,0x30,0x30,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,
+0x39,0x5A,0x30,0x81,0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,
+0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,
+0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,
+0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,
+0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30,
+0x25,0x06,0x03,0x55,0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
+0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD0,0x40,0x8B,0x8B,0x72,0xE3,0x91,0x1B,0xF7,
+0x51,0xC1,0x1B,0x54,0x04,0x98,0xD3,0xA9,0xBF,0xC1,0xE6,0x8A,0x5D,0x3B,0x87,0xFB,
+0xBB,0x88,0xCE,0x0D,0xE3,0x2F,0x3F,0x06,0x96,0xF0,0xA2,0x29,0x50,0x99,0xAE,0xDB,
+0x3B,0xA1,0x57,0xB0,0x74,0x51,0x71,0xCD,0xED,0x42,0x91,0x4D,0x41,0xFE,0xA9,0xC8,
+0xD8,0x6A,0x86,0x77,0x44,0xBB,0x59,0x66,0x97,0x50,0x5E,0xB4,0xD4,0x2C,0x70,0x44,
+0xCF,0xDA,0x37,0x95,0x42,0x69,0x3C,0x30,0xC4,0x71,0xB3,0x52,0xF0,0x21,0x4D,0xA1,
+0xD8,0xBA,0x39,0x7C,0x1C,0x9E,0xA3,0x24,0x9D,0xF2,0x83,0x16,0x98,0xAA,0x16,0x7C,
+0x43,0x9B,0x15,0x5B,0xB7,0xAE,0x34,0x91,0xFE,0xD4,0x62,0x26,0x18,0x46,0x9A,0x3F,
+0xEB,0xC1,0xF9,0xF1,0x90,0x57,0xEB,0xAC,0x7A,0x0D,0x8B,0xDB,0x72,0x30,0x6A,0x66,
+0xD5,0xE0,0x46,0xA3,0x70,0xDC,0x68,0xD9,0xFF,0x04,0x48,0x89,0x77,0xDE,0xB5,0xE9,
+0xFB,0x67,0x6D,0x41,0xE9,0xBC,0x39,0xBD,0x32,0xD9,0x62,0x02,0xF1,0xB1,0xA8,0x3D,
+0x6E,0x37,0x9C,0xE2,0x2F,0xE2,0xD3,0xA2,0x26,0x8B,0xC6,0xB8,0x55,0x43,0x88,0xE1,
+0x23,0x3E,0xA5,0xD2,0x24,0x39,0x6A,0x47,0xAB,0x00,0xD4,0xA1,0xB3,0xA9,0x25,0xFE,
+0x0D,0x3F,0xA7,0x1D,0xBA,0xD3,0x51,0xC1,0x0B,0xA4,0xDA,0xAC,0x38,0xEF,0x55,0x50,
+0x24,0x05,0x65,0x46,0x93,0x34,0x4F,0x2D,0x8D,0xAD,0xC6,0xD4,0x21,0x19,0xD2,0x8E,
+0xCA,0x05,0x61,0x71,0x07,0x73,0x47,0xE5,0x8A,0x19,0x12,0xBD,0x04,0x4D,0xCE,0x4E,
+0x9C,0xA5,0x48,0xAC,0xBB,0x26,0xF7,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x8E,0x30,
+0x81,0x8B,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x0B,0x58,0xE5,
+0x8B,0xC6,0x4C,0x15,0x37,0xA4,0x40,0xA9,0x30,0xA9,0x21,0xBE,0x47,0x36,0x5A,0x56,
+0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
+0x01,0xFF,0x30,0x49,0x06,0x03,0x55,0x1D,0x1F,0x04,0x42,0x30,0x40,0x30,0x3E,0xA0,
+0x3C,0xA0,0x3A,0x86,0x38,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,
+0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x43,0x4F,0x4D,
+0x4F,0x44,0x4F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x2E,0x63,0x72,0x6C,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,
+0x00,0x3E,0x98,0x9E,0x9B,0xF6,0x1B,0xE9,0xD7,0x39,0xB7,0x78,0xAE,0x1D,0x72,0x18,
+0x49,0xD3,0x87,0xE4,0x43,0x82,0xEB,0x3F,0xC9,0xAA,0xF5,0xA8,0xB5,0xEF,0x55,0x7C,
+0x21,0x52,0x65,0xF9,0xD5,0x0D,0xE1,0x6C,0xF4,0x3E,0x8C,0x93,0x73,0x91,0x2E,0x02,
+0xC4,0x4E,0x07,0x71,0x6F,0xC0,0x8F,0x38,0x61,0x08,0xA8,0x1E,0x81,0x0A,0xC0,0x2F,
+0x20,0x2F,0x41,0x8B,0x91,0xDC,0x48,0x45,0xBC,0xF1,0xC6,0xDE,0xBA,0x76,0x6B,0x33,
+0xC8,0x00,0x2D,0x31,0x46,0x4C,0xED,0xE7,0x9D,0xCF,0x88,0x94,0xFF,0x33,0xC0,0x56,
+0xE8,0x24,0x86,0x26,0xB8,0xD8,0x38,0x38,0xDF,0x2A,0x6B,0xDD,0x12,0xCC,0xC7,0x3F,
+0x47,0x17,0x4C,0xA2,0xC2,0x06,0x96,0x09,0xD6,0xDB,0xFE,0x3F,0x3C,0x46,0x41,0xDF,
+0x58,0xE2,0x56,0x0F,0x3C,0x3B,0xC1,0x1C,0x93,0x35,0xD9,0x38,0x52,0xAC,0xEE,0xC8,
+0xEC,0x2E,0x30,0x4E,0x94,0x35,0xB4,0x24,0x1F,0x4B,0x78,0x69,0xDA,0xF2,0x02,0x38,
+0xCC,0x95,0x52,0x93,0xF0,0x70,0x25,0x59,0x9C,0x20,0x67,0xC4,0xEE,0xF9,0x8B,0x57,
+0x61,0xF4,0x92,0x76,0x7D,0x3F,0x84,0x8D,0x55,0xB7,0xE8,0xE5,0xAC,0xD5,0xF1,0xF5,
+0x19,0x56,0xA6,0x5A,0xFB,0x90,0x1C,0xAF,0x93,0xEB,0xE5,0x1C,0xD4,0x67,0x97,0x5D,
+0x04,0x0E,0xBE,0x0B,0x83,0xA6,0x17,0x83,0xB9,0x30,0x12,0xA0,0xC5,0x33,0x15,0x05,
+0xB9,0x0D,0xFB,0xC7,0x05,0x76,0xE3,0xD8,0x4A,0x8D,0xFC,0x34,0x17,0xA3,0xC6,0x21,
+0x28,0xBE,0x30,0x45,0x31,0x1E,0xC7,0x78,0xBE,0x58,0x61,0x38,0xAC,0x3B,0xE2,0x01,
+0x65,
+};
+
+
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */
+
+
+const unsigned char COMODO_ECC_Certification_Authority_certificate[653]={
+0x30,0x82,0x02,0x89,0x30,0x82,0x02,0x0F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x1F,
+0x47,0xAF,0xAA,0x62,0x00,0x70,0x50,0x54,0x4C,0x01,0x9E,0x9B,0x63,0x99,0x2A,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x85,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,
+0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,
+0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,
+0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,
+0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,
+0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,0x33,0x30,0x36,0x30,0x30,0x30,
+0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,
+0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,
+0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,
+0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,
+0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,
+0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,
+0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,
+0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x76,0x30,0x10,0x06,
+0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,
+0x62,0x00,0x04,0x03,0x47,0x7B,0x2F,0x75,0xC9,0x82,0x15,0x85,0xFB,0x75,0xE4,0x91,
+0x16,0xD4,0xAB,0x62,0x99,0xF5,0x3E,0x52,0x0B,0x06,0xCE,0x41,0x00,0x7F,0x97,0xE1,
+0x0A,0x24,0x3C,0x1D,0x01,0x04,0xEE,0x3D,0xD2,0x8D,0x09,0x97,0x0C,0xE0,0x75,0xE4,
+0xFA,0xFB,0x77,0x8A,0x2A,0xF5,0x03,0x60,0x4B,0x36,0x8B,0x16,0x23,0x16,0xAD,0x09,
+0x71,0xF4,0x4A,0xF4,0x28,0x50,0xB4,0xFE,0x88,0x1C,0x6E,0x3F,0x6C,0x2F,0x2F,0x09,
+0x59,0x5B,0xA5,0x5B,0x0B,0x33,0x99,0xE2,0xC3,0x3D,0x89,0xF9,0x6A,0x2C,0xEF,0xB2,
+0xD3,0x06,0xE9,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0x75,0x71,0xA7,0x19,0x48,0x19,0xBC,0x9D,0x9D,0xEA,0x41,0x47,0xDF,0x94,
+0xC4,0x48,0x77,0x99,0xD3,0x79,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,
+0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,0xEF,0x03,0x5B,0x7A,0xAC,
+0xB7,0x78,0x0A,0x72,0xB7,0x88,0xDF,0xFF,0xB5,0x46,0x14,0x09,0x0A,0xFA,0xA0,0xE6,
+0x7D,0x08,0xC6,0x1A,0x87,0xBD,0x18,0xA8,0x73,0xBD,0x26,0xCA,0x60,0x0C,0x9D,0xCE,
+0x99,0x9F,0xCF,0x5C,0x0F,0x30,0xE1,0xBE,0x14,0x31,0xEA,0x02,0x30,0x14,0xF4,0x93,
+0x3C,0x49,0xA7,0x33,0x7A,0x90,0x46,0x47,0xB3,0x63,0x7D,0x13,0x9B,0x4E,0xB7,0x6F,
+0x18,0x37,0x80,0x53,0xFE,0xDD,0x20,0xE0,0x35,0x9A,0x36,0xD1,0xC7,0x01,0xB9,0xE6,
+0xDC,0xDD,0xF3,0xFF,0x1D,0x2C,0x3A,0x16,0x57,0xD9,0x92,0x39,0xD6,
+};
+
+
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */
+
+
+const unsigned char Comodo_Secure_Services_root_certificate[1091]={
+0x30,0x82,0x04,0x3F,0x30,0x82,0x03,0x27,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
+0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
+0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
+0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,
+0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,
+0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,
+0xC0,0x71,0x33,0x82,0x8A,0xD0,0x70,0xEB,0x73,0x87,0x82,0x40,0xD5,0x1D,0xE4,0xCB,
+0xC9,0x0E,0x42,0x90,0xF9,0xDE,0x34,0xB9,0xA1,0xBA,0x11,0xF4,0x25,0x85,0xF3,0xCC,
+0x72,0x6D,0xF2,0x7B,0x97,0x6B,0xB3,0x07,0xF1,0x77,0x24,0x91,0x5F,0x25,0x8F,0xF6,
+0x74,0x3D,0xE4,0x80,0xC2,0xF8,0x3C,0x0D,0xF3,0xBF,0x40,0xEA,0xF7,0xC8,0x52,0xD1,
+0x72,0x6F,0xEF,0xC8,0xAB,0x41,0xB8,0x6E,0x2E,0x17,0x2A,0x95,0x69,0x0C,0xCD,0xD2,
+0x1E,0x94,0x7B,0x2D,0x94,0x1D,0xAA,0x75,0xD7,0xB3,0x98,0xCB,0xAC,0xBC,0x64,0x53,
+0x40,0xBC,0x8F,0xAC,0xAC,0x36,0xCB,0x5C,0xAD,0xBB,0xDD,0xE0,0x94,0x17,0xEC,0xD1,
+0x5C,0xD0,0xBF,0xEF,0xA5,0x95,0xC9,0x90,0xC5,0xB0,0xAC,0xFB,0x1B,0x43,0xDF,0x7A,
+0x08,0x5D,0xB7,0xB8,0xF2,0x40,0x1B,0x2B,0x27,0x9E,0x50,0xCE,0x5E,0x65,0x82,0x88,
+0x8C,0x5E,0xD3,0x4E,0x0C,0x7A,0xEA,0x08,0x91,0xB6,0x36,0xAA,0x2B,0x42,0xFB,0xEA,
+0xC2,0xA3,0x39,0xE5,0xDB,0x26,0x38,0xAD,0x8B,0x0A,0xEE,0x19,0x63,0xC7,0x1C,0x24,
+0xDF,0x03,0x78,0xDA,0xE6,0xEA,0xC1,0x47,0x1A,0x0B,0x0B,0x46,0x09,0xDD,0x02,0xFC,
+0xDE,0xCB,0x87,0x5F,0xD7,0x30,0x63,0x68,0xA1,0xAE,0xDC,0x32,0xA1,0xBA,0xBE,0xFE,
+0x44,0xAB,0x68,0xB6,0xA5,0x17,0x15,0xFD,0xBD,0xD5,0xA7,0xA7,0x9A,0xE4,0x44,0x33,
+0xE9,0x88,0x8E,0xFC,0xED,0x51,0xEB,0x93,0x71,0x4E,0xAD,0x01,0xE7,0x44,0x8E,0xAB,
+0x2D,0xCB,0xA8,0xFE,0x01,0x49,0x48,0xF0,0xC0,0xDD,0xC7,0x68,0xD8,0x92,0xFE,0x3D,
+0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC7,0x30,0x81,0xC4,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0x3C,0xD8,0x93,0x88,0xC2,0xC0,0x82,0x09,0xCC,0x01,
+0x99,0x06,0x93,0x20,0xE9,0x9E,0x70,0x09,0x63,0x4F,0x30,0x0E,0x06,0x03,0x55,0x1D,
+0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x81,0x06,0x03,
+0x55,0x1D,0x1F,0x04,0x7A,0x30,0x78,0x30,0x3B,0xA0,0x39,0xA0,0x37,0x86,0x35,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
+0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
+0x2E,0x63,0x72,0x6C,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,0x74,0x70,
+0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x2E,0x6E,0x65,
+0x74,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x87,0x01,0x6D,0x23,0x1D,0x7E,0x5B,0x17,0x7D,0xC1,0x61,0x32,0xCF,
+0x8F,0xE7,0xF3,0x8A,0x94,0x59,0x66,0xE0,0x9E,0x28,0xA8,0x5E,0xD3,0xB7,0xF4,0x34,
+0xE6,0xAA,0x39,0xB2,0x97,0x16,0xC5,0x82,0x6F,0x32,0xA4,0xE9,0x8C,0xE7,0xAF,0xFD,
+0xEF,0xC2,0xE8,0xB9,0x4B,0xAA,0xA3,0xF4,0xE6,0xDA,0x8D,0x65,0x21,0xFB,0xBA,0x80,
+0xEB,0x26,0x28,0x85,0x1A,0xFE,0x39,0x8C,0xDE,0x5B,0x04,0x04,0xB4,0x54,0xF9,0xA3,
+0x67,0x9E,0x41,0xFA,0x09,0x52,0xCC,0x05,0x48,0xA8,0xC9,0x3F,0x21,0x04,0x1E,0xCE,
+0x48,0x6B,0xFC,0x85,0xE8,0xC2,0x7B,0xAF,0x7F,0xB7,0xCC,0xF8,0x5F,0x3A,0xFD,0x35,
+0xC6,0x0D,0xEF,0x97,0xDC,0x4C,0xAB,0x11,0xE1,0x6B,0xCB,0x31,0xD1,0x6C,0xFB,0x48,
+0x80,0xAB,0xDC,0x9C,0x37,0xB8,0x21,0x14,0x4B,0x0D,0x71,0x3D,0xEC,0x83,0x33,0x6E,
+0xD1,0x6E,0x32,0x16,0xEC,0x98,0xC7,0x16,0x8B,0x59,0xA6,0x34,0xAB,0x05,0x57,0x2D,
+0x93,0xF7,0xAA,0x13,0xCB,0xD2,0x13,0xE2,0xB7,0x2E,0x3B,0xCD,0x6B,0x50,0x17,0x09,
+0x68,0x3E,0xB5,0x26,0x57,0xEE,0xB6,0xE0,0xB6,0xDD,0xB9,0x29,0x80,0x79,0x7D,0x8F,
+0xA3,0xF0,0xA4,0x28,0xA4,0x15,0xC4,0x85,0xF4,0x27,0xD4,0x6B,0xBF,0xE5,0x5C,0xE4,
+0x65,0x02,0x76,0x54,0xB4,0xE3,0x37,0x66,0x24,0xD3,0x19,0x61,0xC8,0x52,0x10,0xE5,
+0x8B,0x37,0x9A,0xB9,0xA9,0xF9,0x1D,0xBF,0xEA,0x99,0x92,0x61,0x96,0xFF,0x01,0xCD,
+0xA1,0x5F,0x0D,0xBC,0x71,0xBC,0x0E,0xAC,0x0B,0x1D,0x47,0x45,0x1D,0xC1,0xEC,0x7C,
+0xEC,0xFD,0x29,
+};
+
+
+/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
+/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */
+
+
+const unsigned char Comodo_Trusted_Services_root_certificate[1095]={
+0x30,0x82,0x04,0x43,0x30,0x82,0x03,0x2B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,
+0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,
+0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,
+0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,
+0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,0x55,
+0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
+0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
+0x5A,0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,
+0x30,0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,
+0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,
+0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,
+0x06,0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,
+0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,
+0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,
+0x55,0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,
+0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,
+0x01,0x00,0xDF,0x71,0x6F,0x36,0x58,0x53,0x5A,0xF2,0x36,0x54,0x57,0x80,0xC4,0x74,
+0x08,0x20,0xED,0x18,0x7F,0x2A,0x1D,0xE6,0x35,0x9A,0x1E,0x25,0xAC,0x9C,0xE5,0x96,
+0x7E,0x72,0x52,0xA0,0x15,0x42,0xDB,0x59,0xDD,0x64,0x7A,0x1A,0xD0,0xB8,0x7B,0xDD,
+0x39,0x15,0xBC,0x55,0x48,0xC4,0xED,0x3A,0x00,0xEA,0x31,0x11,0xBA,0xF2,0x71,0x74,
+0x1A,0x67,0xB8,0xCF,0x33,0xCC,0xA8,0x31,0xAF,0xA3,0xE3,0xD7,0x7F,0xBF,0x33,0x2D,
+0x4C,0x6A,0x3C,0xEC,0x8B,0xC3,0x92,0xD2,0x53,0x77,0x24,0x74,0x9C,0x07,0x6E,0x70,
+0xFC,0xBD,0x0B,0x5B,0x76,0xBA,0x5F,0xF2,0xFF,0xD7,0x37,0x4B,0x4A,0x60,0x78,0xF7,
+0xF0,0xFA,0xCA,0x70,0xB4,0xEA,0x59,0xAA,0xA3,0xCE,0x48,0x2F,0xA9,0xC3,0xB2,0x0B,
+0x7E,0x17,0x72,0x16,0x0C,0xA6,0x07,0x0C,0x1B,0x38,0xCF,0xC9,0x62,0xB7,0x3F,0xA0,
+0x93,0xA5,0x87,0x41,0xF2,0xB7,0x70,0x40,0x77,0xD8,0xBE,0x14,0x7C,0xE3,0xA8,0xC0,
+0x7A,0x8E,0xE9,0x63,0x6A,0xD1,0x0F,0x9A,0xC6,0xD2,0xF4,0x8B,0x3A,0x14,0x04,0x56,
+0xD4,0xED,0xB8,0xCC,0x6E,0xF5,0xFB,0xE2,0x2C,0x58,0xBD,0x7F,0x4F,0x6B,0x2B,0xF7,
+0x60,0x24,0x58,0x24,0xCE,0x26,0xEF,0x34,0x91,0x3A,0xD5,0xE3,0x81,0xD0,0xB2,0xF0,
+0x04,0x02,0xD7,0x5B,0xB7,0x3E,0x92,0xAC,0x6B,0x12,0x8A,0xF9,0xE4,0x05,0xB0,0x3B,
+0x91,0x49,0x5C,0xB2,0xEB,0x53,0xEA,0xF8,0x9F,0x47,0x86,0xEE,0xBF,0x95,0xC0,0xC0,
+0x06,0x9F,0xD2,0x5B,0x5E,0x11,0x1B,0xF4,0xC7,0x04,0x35,0x29,0xD2,0x55,0x5C,0xE4,
+0xED,0xEB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC9,0x30,0x81,0xC6,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC5,0x7B,0x58,0xBD,0xED,0xDA,0x25,0x69,
+0xD2,0xF7,0x59,0x16,0xA8,0xB3,0x32,0xC0,0x7B,0x27,0x5B,0xF4,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x83,
+0x06,0x03,0x55,0x1D,0x1F,0x04,0x7C,0x30,0x7A,0x30,0x3C,0xA0,0x3A,0xA0,0x38,0x86,
+0x36,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,
+0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,
+0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x3A,0xA0,0x38,0xA0,0x36,0x86,0x34,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,
+0x2E,0x6E,0x65,0x74,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,
+0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xC8,0x93,0x81,0x3B,0x89,0xB4,0xAF,0xB8,0x84,
+0x12,0x4C,0x8D,0xD2,0xF0,0xDB,0x70,0xBA,0x57,0x86,0x15,0x34,0x10,0xB9,0x2F,0x7F,
+0x1E,0xB0,0xA8,0x89,0x60,0xA1,0x8A,0xC2,0x77,0x0C,0x50,0x4A,0x9B,0x00,0x8B,0xD8,
+0x8B,0xF4,0x41,0xE2,0xD0,0x83,0x8A,0x4A,0x1C,0x14,0x06,0xB0,0xA3,0x68,0x05,0x70,
+0x31,0x30,0xA7,0x53,0x9B,0x0E,0xE9,0x4A,0xA0,0x58,0x69,0x67,0x0E,0xAE,0x9D,0xF6,
+0xA5,0x2C,0x41,0xBF,0x3C,0x06,0x6B,0xE4,0x59,0xCC,0x6D,0x10,0xF1,0x96,0x6F,0x1F,
+0xDF,0xF4,0x04,0x02,0xA4,0x9F,0x45,0x3E,0xC8,0xD8,0xFA,0x36,0x46,0x44,0x50,0x3F,
+0x82,0x97,0x91,0x1F,0x28,0xDB,0x18,0x11,0x8C,0x2A,0xE4,0x65,0x83,0x57,0x12,0x12,
+0x8C,0x17,0x3F,0x94,0x36,0xFE,0x5D,0xB0,0xC0,0x04,0x77,0x13,0xB8,0xF4,0x15,0xD5,
+0x3F,0x38,0xCC,0x94,0x3A,0x55,0xD0,0xAC,0x98,0xF5,0xBA,0x00,0x5F,0xE0,0x86,0x19,
+0x81,0x78,0x2F,0x28,0xC0,0x7E,0xD3,0xCC,0x42,0x0A,0xF5,0xAE,0x50,0xA0,0xD1,0x3E,
+0xC6,0xA1,0x71,0xEC,0x3F,0xA0,0x20,0x8C,0x66,0x3A,0x89,0xB4,0x8E,0xD4,0xD8,0xB1,
+0x4D,0x25,0x47,0xEE,0x2F,0x88,0xC8,0xB5,0xE1,0x05,0x45,0xC0,0xBE,0x14,0x71,0xDE,
+0x7A,0xFD,0x8E,0x7B,0x7D,0x4D,0x08,0x96,0xA5,0x12,0x73,0xF0,0x2D,0xCA,0x37,0x27,
+0x74,0x12,0x27,0x4C,0xCB,0xB6,0x97,0xE9,0xD9,0xAE,0x08,0x6D,0x5A,0x39,0x40,0xDD,
+0x05,0x47,0x75,0x6A,0x5A,0x21,0xB3,0xA3,0x18,0xCF,0x4E,0xF7,0x2E,0x57,0xB7,0x98,
+0x70,0x5E,0xC8,0xC4,0x78,0xB0,0x62,
+};
+
+
+/* subject:/O=Cybertrust, Inc/CN=Cybertrust Global Root */
+/* issuer :/O=Cybertrust, Inc/CN=Cybertrust Global Root */
+
+
+const unsigned char Cybertrust_Global_Root_certificate[933]={
+0x30,0x82,0x03,0xA1,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x0F,0x85,0xAA,0x2D,0x48,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,
+0x2C,0x20,0x49,0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
+0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,0x35,
+0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,0x30,
+0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,
+0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x43,0x79,0x62,
+0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,
+0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
+0x82,0x01,0x01,0x00,0xF8,0xC8,0xBC,0xBD,0x14,0x50,0x66,0x13,0xFF,0xF0,0xD3,0x79,
+0xEC,0x23,0xF2,0xB7,0x1A,0xC7,0x8E,0x85,0xF1,0x12,0x73,0xA6,0x19,0xAA,0x10,0xDB,
+0x9C,0xA2,0x65,0x74,0x5A,0x77,0x3E,0x51,0x7D,0x56,0xF6,0xDC,0x23,0xB6,0xD4,0xED,
+0x5F,0x58,0xB1,0x37,0x4D,0xD5,0x49,0x0E,0x6E,0xF5,0x6A,0x87,0xD6,0xD2,0x8C,0xD2,
+0x27,0xC6,0xE2,0xFF,0x36,0x9F,0x98,0x65,0xA0,0x13,0x4E,0xC6,0x2A,0x64,0x9B,0xD5,
+0x90,0x12,0xCF,0x14,0x06,0xF4,0x3B,0xE3,0xD4,0x28,0xBE,0xE8,0x0E,0xF8,0xAB,0x4E,
+0x48,0x94,0x6D,0x8E,0x95,0x31,0x10,0x5C,0xED,0xA2,0x2D,0xBD,0xD5,0x3A,0x6D,0xB2,
+0x1C,0xBB,0x60,0xC0,0x46,0x4B,0x01,0xF5,0x49,0xAE,0x7E,0x46,0x8A,0xD0,0x74,0x8D,
+0xA1,0x0C,0x02,0xCE,0xEE,0xFC,0xE7,0x8F,0xB8,0x6B,0x66,0xF3,0x7F,0x44,0x00,0xBF,
+0x66,0x25,0x14,0x2B,0xDD,0x10,0x30,0x1D,0x07,0x96,0x3F,0x4D,0xF6,0x6B,0xB8,0x8F,
+0xB7,0x7B,0x0C,0xA5,0x38,0xEB,0xDE,0x47,0xDB,0xD5,0x5D,0x39,0xFC,0x88,0xA7,0xF3,
+0xD7,0x2A,0x74,0xF1,0xE8,0x5A,0xA2,0x3B,0x9F,0x50,0xBA,0xA6,0x8C,0x45,0x35,0xC2,
+0x50,0x65,0x95,0xDC,0x63,0x82,0xEF,0xDD,0xBF,0x77,0x4D,0x9C,0x62,0xC9,0x63,0x73,
+0x16,0xD0,0x29,0x0F,0x49,0xA9,0x48,0xF0,0xB3,0xAA,0xB7,0x6C,0xC5,0xA7,0x30,0x39,
+0x40,0x5D,0xAE,0xC4,0xE2,0x5D,0x26,0x53,0xF0,0xCE,0x1C,0x23,0x08,0x61,0xA8,0x94,
+0x19,0xBA,0x04,0x62,0x40,0xEC,0x1F,0x38,0x70,0x77,0x12,0x06,0x71,0xA7,0x30,0x18,
+0x5D,0x25,0x27,0xA5,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xA5,0x30,0x81,0xA2,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB6,0x08,0x7B,0x0D,0x7A,
+0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,0x57,0x30,
+0x3F,0x06,0x03,0x55,0x1D,0x1F,0x04,0x38,0x30,0x36,0x30,0x34,0xA0,0x32,0xA0,0x30,
+0x86,0x2E,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x32,0x2E,0x70,0x75,
+0x62,0x6C,0x69,0x63,0x2D,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x63,
+0x72,0x6C,0x2F,0x63,0x74,0x2F,0x63,0x74,0x72,0x6F,0x6F,0x74,0x2E,0x63,0x72,0x6C,
+0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xB6,0x08,0x7B,
+0x0D,0x7A,0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,
+0x57,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x82,0x01,0x01,0x00,0x56,0xEF,0x0A,0x23,0xA0,0x54,0x4E,0x95,0x97,0xC9,0xF8,
+0x89,0xDA,0x45,0xC1,0xD4,0xA3,0x00,0x25,0xF4,0x1F,0x13,0xAB,0xB7,0xA3,0x85,0x58,
+0x69,0xC2,0x30,0xAD,0xD8,0x15,0x8A,0x2D,0xE3,0xC9,0xCD,0x81,0x5A,0xF8,0x73,0x23,
+0x5A,0xA7,0x7C,0x05,0xF3,0xFD,0x22,0x3B,0x0E,0xD1,0x06,0xC4,0xDB,0x36,0x4C,0x73,
+0x04,0x8E,0xE5,0xB0,0x22,0xE4,0xC5,0xF3,0x2E,0xA5,0xD9,0x23,0xE3,0xB8,0x4E,0x4A,
+0x20,0xA7,0x6E,0x02,0x24,0x9F,0x22,0x60,0x67,0x7B,0x8B,0x1D,0x72,0x09,0xC5,0x31,
+0x5C,0xE9,0x79,0x9F,0x80,0x47,0x3D,0xAD,0xA1,0x0B,0x07,0x14,0x3D,0x47,0xFF,0x03,
+0x69,0x1A,0x0C,0x0B,0x44,0xE7,0x63,0x25,0xA7,0x7F,0xB2,0xC9,0xB8,0x76,0x84,0xED,
+0x23,0xF6,0x7D,0x07,0xAB,0x45,0x7E,0xD3,0xDF,0xB3,0xBF,0xE9,0x8A,0xB6,0xCD,0xA8,
+0xA2,0x67,0x2B,0x52,0xD5,0xB7,0x65,0xF0,0x39,0x4C,0x63,0xA0,0x91,0x79,0x93,0x52,
+0x0F,0x54,0xDD,0x83,0xBB,0x9F,0xD1,0x8F,0xA7,0x53,0x73,0xC3,0xCB,0xFF,0x30,0xEC,
+0x7C,0x04,0xB8,0xD8,0x44,0x1F,0x93,0x5F,0x71,0x09,0x22,0xB7,0x6E,0x3E,0xEA,0x1C,
+0x03,0x4E,0x9D,0x1A,0x20,0x61,0xFB,0x81,0x37,0xEC,0x5E,0xFC,0x0A,0x45,0xAB,0xD7,
+0xE7,0x17,0x55,0xD0,0xA0,0xEA,0x60,0x9B,0xA6,0xF6,0xE3,0x8C,0x5B,0x29,0xC2,0x06,
+0x60,0x14,0x9D,0x2D,0x97,0x4C,0xA9,0x93,0x15,0x9D,0x61,0xC4,0x01,0x5F,0x48,0xD6,
+0x58,0xBD,0x56,0x31,0x12,0x4E,0x11,0xC8,0x21,0xE0,0xB3,0x11,0x91,0x65,0xDB,0xB4,
+0xA6,0x88,0x38,0xCE,0x55,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */
+
+
+const unsigned char DigiCert_Assured_ID_Root_CA_certificate[955]={
+0x30,0x82,0x03,0xB7,0x30,0x82,0x02,0x9F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0C,
+0xE7,0xE0,0xE5,0x17,0xD8,0x46,0xFE,0x8F,0xE5,0x60,0xFC,0x1B,0xF0,0x30,0x39,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x65,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,
+0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,
+0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,
+0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,
+0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,
+0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAD,0x0E,0x15,
+0xCE,0xE4,0x43,0x80,0x5C,0xB1,0x87,0xF3,0xB7,0x60,0xF9,0x71,0x12,0xA5,0xAE,0xDC,
+0x26,0x94,0x88,0xAA,0xF4,0xCE,0xF5,0x20,0x39,0x28,0x58,0x60,0x0C,0xF8,0x80,0xDA,
+0xA9,0x15,0x95,0x32,0x61,0x3C,0xB5,0xB1,0x28,0x84,0x8A,0x8A,0xDC,0x9F,0x0A,0x0C,
+0x83,0x17,0x7A,0x8F,0x90,0xAC,0x8A,0xE7,0x79,0x53,0x5C,0x31,0x84,0x2A,0xF6,0x0F,
+0x98,0x32,0x36,0x76,0xCC,0xDE,0xDD,0x3C,0xA8,0xA2,0xEF,0x6A,0xFB,0x21,0xF2,0x52,
+0x61,0xDF,0x9F,0x20,0xD7,0x1F,0xE2,0xB1,0xD9,0xFE,0x18,0x64,0xD2,0x12,0x5B,0x5F,
+0xF9,0x58,0x18,0x35,0xBC,0x47,0xCD,0xA1,0x36,0xF9,0x6B,0x7F,0xD4,0xB0,0x38,0x3E,
+0xC1,0x1B,0xC3,0x8C,0x33,0xD9,0xD8,0x2F,0x18,0xFE,0x28,0x0F,0xB3,0xA7,0x83,0xD6,
+0xC3,0x6E,0x44,0xC0,0x61,0x35,0x96,0x16,0xFE,0x59,0x9C,0x8B,0x76,0x6D,0xD7,0xF1,
+0xA2,0x4B,0x0D,0x2B,0xFF,0x0B,0x72,0xDA,0x9E,0x60,0xD0,0x8E,0x90,0x35,0xC6,0x78,
+0x55,0x87,0x20,0xA1,0xCF,0xE5,0x6D,0x0A,0xC8,0x49,0x7C,0x31,0x98,0x33,0x6C,0x22,
+0xE9,0x87,0xD0,0x32,0x5A,0xA2,0xBA,0x13,0x82,0x11,0xED,0x39,0x17,0x9D,0x99,0x3A,
+0x72,0xA1,0xE6,0xFA,0xA4,0xD9,0xD5,0x17,0x31,0x75,0xAE,0x85,0x7D,0x22,0xAE,0x3F,
+0x01,0x46,0x86,0xF6,0x28,0x79,0xC8,0xB1,0xDA,0xE4,0x57,0x17,0xC4,0x7E,0x1C,0x0E,
+0xB0,0xB4,0x92,0xA6,0x56,0xB3,0xBD,0xB2,0x97,0xED,0xAA,0xA7,0xF0,0xB7,0xC5,0xA8,
+0x3F,0x95,0x16,0xD0,0xFF,0xA1,0x96,0xEB,0x08,0x5F,0x18,0x77,0x4F,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,0xA7,
+0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,
+0x16,0x80,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,
+0xA7,0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xA2,0x0E,0xBC,0xDF,0xE2,
+0xED,0xF0,0xE3,0x72,0x73,0x7A,0x64,0x94,0xBF,0xF7,0x72,0x66,0xD8,0x32,0xE4,0x42,
+0x75,0x62,0xAE,0x87,0xEB,0xF2,0xD5,0xD9,0xDE,0x56,0xB3,0x9F,0xCC,0xCE,0x14,0x28,
+0xB9,0x0D,0x97,0x60,0x5C,0x12,0x4C,0x58,0xE4,0xD3,0x3D,0x83,0x49,0x45,0x58,0x97,
+0x35,0x69,0x1A,0xA8,0x47,0xEA,0x56,0xC6,0x79,0xAB,0x12,0xD8,0x67,0x81,0x84,0xDF,
+0x7F,0x09,0x3C,0x94,0xE6,0xB8,0x26,0x2C,0x20,0xBD,0x3D,0xB3,0x28,0x89,0xF7,0x5F,
+0xFF,0x22,0xE2,0x97,0x84,0x1F,0xE9,0x65,0xEF,0x87,0xE0,0xDF,0xC1,0x67,0x49,0xB3,
+0x5D,0xEB,0xB2,0x09,0x2A,0xEB,0x26,0xED,0x78,0xBE,0x7D,0x3F,0x2B,0xF3,0xB7,0x26,
+0x35,0x6D,0x5F,0x89,0x01,0xB6,0x49,0x5B,0x9F,0x01,0x05,0x9B,0xAB,0x3D,0x25,0xC1,
+0xCC,0xB6,0x7F,0xC2,0xF1,0x6F,0x86,0xC6,0xFA,0x64,0x68,0xEB,0x81,0x2D,0x94,0xEB,
+0x42,0xB7,0xFA,0x8C,0x1E,0xDD,0x62,0xF1,0xBE,0x50,0x67,0xB7,0x6C,0xBD,0xF3,0xF1,
+0x1F,0x6B,0x0C,0x36,0x07,0x16,0x7F,0x37,0x7C,0xA9,0x5B,0x6D,0x7A,0xF1,0x12,0x46,
+0x60,0x83,0xD7,0x27,0x04,0xBE,0x4B,0xCE,0x97,0xBE,0xC3,0x67,0x2A,0x68,0x11,0xDF,
+0x80,0xE7,0x0C,0x33,0x66,0xBF,0x13,0x0D,0x14,0x6E,0xF3,0x7F,0x1F,0x63,0x10,0x1E,
+0xFA,0x8D,0x1B,0x25,0x6D,0x6C,0x8F,0xA5,0xB7,0x61,0x01,0xB1,0xD2,0xA3,0x26,0xA1,
+0x10,0x71,0x9D,0xAD,0xE2,0xC3,0xF9,0xC3,0x99,0x51,0xB7,0x2B,0x07,0x08,0xCE,0x2E,
+0xE6,0x50,0xB2,0xA7,0xFA,0x0A,0x45,0x2F,0xA2,0xF0,0xF2,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */
+
+
+const unsigned char DigiCert_Global_Root_CA_certificate[947]={
+0x30,0x82,0x03,0xAF,0x30,0x82,0x02,0x97,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x08,
+0x3B,0xE0,0x56,0x90,0x42,0x46,0xB1,0xA1,0x75,0x6A,0xC9,0x59,0x91,0xC7,0x4A,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x61,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,
+0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
+0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,
+0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,
+0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,
+0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,
+0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,
+0x69,0x43,0x65,0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,
+0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xE2,0x3B,0xE1,0x11,0x72,0xDE,0xA8,0xA4,0xD3,0xA3,0x57,
+0xAA,0x50,0xA2,0x8F,0x0B,0x77,0x90,0xC9,0xA2,0xA5,0xEE,0x12,0xCE,0x96,0x5B,0x01,
+0x09,0x20,0xCC,0x01,0x93,0xA7,0x4E,0x30,0xB7,0x53,0xF7,0x43,0xC4,0x69,0x00,0x57,
+0x9D,0xE2,0x8D,0x22,0xDD,0x87,0x06,0x40,0x00,0x81,0x09,0xCE,0xCE,0x1B,0x83,0xBF,
+0xDF,0xCD,0x3B,0x71,0x46,0xE2,0xD6,0x66,0xC7,0x05,0xB3,0x76,0x27,0x16,0x8F,0x7B,
+0x9E,0x1E,0x95,0x7D,0xEE,0xB7,0x48,0xA3,0x08,0xDA,0xD6,0xAF,0x7A,0x0C,0x39,0x06,
+0x65,0x7F,0x4A,0x5D,0x1F,0xBC,0x17,0xF8,0xAB,0xBE,0xEE,0x28,0xD7,0x74,0x7F,0x7A,
+0x78,0x99,0x59,0x85,0x68,0x6E,0x5C,0x23,0x32,0x4B,0xBF,0x4E,0xC0,0xE8,0x5A,0x6D,
+0xE3,0x70,0xBF,0x77,0x10,0xBF,0xFC,0x01,0xF6,0x85,0xD9,0xA8,0x44,0x10,0x58,0x32,
+0xA9,0x75,0x18,0xD5,0xD1,0xA2,0xBE,0x47,0xE2,0x27,0x6A,0xF4,0x9A,0x33,0xF8,0x49,
+0x08,0x60,0x8B,0xD4,0x5F,0xB4,0x3A,0x84,0xBF,0xA1,0xAA,0x4A,0x4C,0x7D,0x3E,0xCF,
+0x4F,0x5F,0x6C,0x76,0x5E,0xA0,0x4B,0x37,0x91,0x9E,0xDC,0x22,0xE6,0x6D,0xCE,0x14,
+0x1A,0x8E,0x6A,0xCB,0xFE,0xCD,0xB3,0x14,0x64,0x17,0xC7,0x5B,0x29,0x9E,0x32,0xBF,
+0xF2,0xEE,0xFA,0xD3,0x0B,0x42,0xD4,0xAB,0xB7,0x41,0x32,0xDA,0x0C,0xD4,0xEF,0xF8,
+0x81,0xD5,0xBB,0x8D,0x58,0x3F,0xB5,0x1B,0xE8,0x49,0x28,0xA2,0x70,0xDA,0x31,0x04,
+0xDD,0xF7,0xB2,0x16,0xF2,0x4C,0x0A,0x4E,0x07,0xA8,0xED,0x4A,0x3D,0x5E,0xB5,0x7F,
+0xA3,0x90,0xC3,0xAF,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,
+0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x03,0xDE,0x50,0x35,0x56,0xD1,
+0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30,0x1F,
+0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x03,0xDE,0x50,0x35,0x56,
+0xD1,0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0xCB,0x9C,0x37,0xAA,0x48,0x13,0x12,0x0A,0xFA,0xDD,0x44,0x9C,0x4F,
+0x52,0xB0,0xF4,0xDF,0xAE,0x04,0xF5,0x79,0x79,0x08,0xA3,0x24,0x18,0xFC,0x4B,0x2B,
+0x84,0xC0,0x2D,0xB9,0xD5,0xC7,0xFE,0xF4,0xC1,0x1F,0x58,0xCB,0xB8,0x6D,0x9C,0x7A,
+0x74,0xE7,0x98,0x29,0xAB,0x11,0xB5,0xE3,0x70,0xA0,0xA1,0xCD,0x4C,0x88,0x99,0x93,
+0x8C,0x91,0x70,0xE2,0xAB,0x0F,0x1C,0xBE,0x93,0xA9,0xFF,0x63,0xD5,0xE4,0x07,0x60,
+0xD3,0xA3,0xBF,0x9D,0x5B,0x09,0xF1,0xD5,0x8E,0xE3,0x53,0xF4,0x8E,0x63,0xFA,0x3F,
+0xA7,0xDB,0xB4,0x66,0xDF,0x62,0x66,0xD6,0xD1,0x6E,0x41,0x8D,0xF2,0x2D,0xB5,0xEA,
+0x77,0x4A,0x9F,0x9D,0x58,0xE2,0x2B,0x59,0xC0,0x40,0x23,0xED,0x2D,0x28,0x82,0x45,
+0x3E,0x79,0x54,0x92,0x26,0x98,0xE0,0x80,0x48,0xA8,0x37,0xEF,0xF0,0xD6,0x79,0x60,
+0x16,0xDE,0xAC,0xE8,0x0E,0xCD,0x6E,0xAC,0x44,0x17,0x38,0x2F,0x49,0xDA,0xE1,0x45,
+0x3E,0x2A,0xB9,0x36,0x53,0xCF,0x3A,0x50,0x06,0xF7,0x2E,0xE8,0xC4,0x57,0x49,0x6C,
+0x61,0x21,0x18,0xD5,0x04,0xAD,0x78,0x3C,0x2C,0x3A,0x80,0x6B,0xA7,0xEB,0xAF,0x15,
+0x14,0xE9,0xD8,0x89,0xC1,0xB9,0x38,0x6C,0xE2,0x91,0x6C,0x8A,0xFF,0x64,0xB9,0x77,
+0x25,0x57,0x30,0xC0,0x1B,0x24,0xA3,0xE1,0xDC,0xE9,0xDF,0x47,0x7C,0xB5,0xB4,0x24,
+0x08,0x05,0x30,0xEC,0x2D,0xBD,0x0B,0xBF,0x45,0xBF,0x50,0xB9,0xA9,0xF3,0xEB,0x98,
+0x01,0x12,0xAD,0xC8,0x88,0xC6,0x98,0x34,0x5F,0x8D,0x0A,0x3C,0xC6,0xE9,0xD5,0x95,
+0x95,0x6D,0xDE,
+};
+
+
+/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */
+/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */
+
+
+const unsigned char DigiCert_High_Assurance_EV_Root_CA_certificate[969]={
+0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x02,
+0xAC,0x5C,0x26,0x6A,0x0B,0x40,0x9B,0x8F,0x0B,0x79,0xF2,0xAE,0x46,0x25,0x77,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x6C,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,
+0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,
+0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,
+0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65,
+0x72,0x74,0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63,
+0x65,0x20,0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,
+0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,
+0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x6C,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,
+0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,
+0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x2B,0x30,
+0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,
+0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63,0x65,0x20,
+0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,
+0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC6,0xCC,0xE5,0x73,0xE6,
+0xFB,0xD4,0xBB,0xE5,0x2D,0x2D,0x32,0xA6,0xDF,0xE5,0x81,0x3F,0xC9,0xCD,0x25,0x49,
+0xB6,0x71,0x2A,0xC3,0xD5,0x94,0x34,0x67,0xA2,0x0A,0x1C,0xB0,0x5F,0x69,0xA6,0x40,
+0xB1,0xC4,0xB7,0xB2,0x8F,0xD0,0x98,0xA4,0xA9,0x41,0x59,0x3A,0xD3,0xDC,0x94,0xD6,
+0x3C,0xDB,0x74,0x38,0xA4,0x4A,0xCC,0x4D,0x25,0x82,0xF7,0x4A,0xA5,0x53,0x12,0x38,
+0xEE,0xF3,0x49,0x6D,0x71,0x91,0x7E,0x63,0xB6,0xAB,0xA6,0x5F,0xC3,0xA4,0x84,0xF8,
+0x4F,0x62,0x51,0xBE,0xF8,0xC5,0xEC,0xDB,0x38,0x92,0xE3,0x06,0xE5,0x08,0x91,0x0C,
+0xC4,0x28,0x41,0x55,0xFB,0xCB,0x5A,0x89,0x15,0x7E,0x71,0xE8,0x35,0xBF,0x4D,0x72,
+0x09,0x3D,0xBE,0x3A,0x38,0x50,0x5B,0x77,0x31,0x1B,0x8D,0xB3,0xC7,0x24,0x45,0x9A,
+0xA7,0xAC,0x6D,0x00,0x14,0x5A,0x04,0xB7,0xBA,0x13,0xEB,0x51,0x0A,0x98,0x41,0x41,
+0x22,0x4E,0x65,0x61,0x87,0x81,0x41,0x50,0xA6,0x79,0x5C,0x89,0xDE,0x19,0x4A,0x57,
+0xD5,0x2E,0xE6,0x5D,0x1C,0x53,0x2C,0x7E,0x98,0xCD,0x1A,0x06,0x16,0xA4,0x68,0x73,
+0xD0,0x34,0x04,0x13,0x5C,0xA1,0x71,0xD3,0x5A,0x7C,0x55,0xDB,0x5E,0x64,0xE1,0x37,
+0x87,0x30,0x56,0x04,0xE5,0x11,0xB4,0x29,0x80,0x12,0xF1,0x79,0x39,0x88,0xA2,0x02,
+0x11,0x7C,0x27,0x66,0xB7,0x88,0xB7,0x78,0xF2,0xCA,0x0A,0xA8,0x38,0xAB,0x0A,0x64,
+0xC2,0xBF,0x66,0x5D,0x95,0x84,0xC1,0xA1,0x25,0x1E,0x87,0x5D,0x1A,0x50,0x0B,0x20,
+0x12,0xCC,0x41,0xBB,0x6E,0x0B,0x51,0x38,0xB8,0x4B,0xCB,0x02,0x03,0x01,0x00,0x01,
+0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,
+0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02,0xEF,
+0x63,0x64,0x2B,0xC3,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,
+0x14,0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02,
+0xEF,0x63,0x64,0x2B,0xC3,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1C,0x1A,0x06,0x97,0xDC,0xD7,0x9C,
+0x9F,0x3C,0x88,0x66,0x06,0x08,0x57,0x21,0xDB,0x21,0x47,0xF8,0x2A,0x67,0xAA,0xBF,
+0x18,0x32,0x76,0x40,0x10,0x57,0xC1,0x8A,0xF3,0x7A,0xD9,0x11,0x65,0x8E,0x35,0xFA,
+0x9E,0xFC,0x45,0xB5,0x9E,0xD9,0x4C,0x31,0x4B,0xB8,0x91,0xE8,0x43,0x2C,0x8E,0xB3,
+0x78,0xCE,0xDB,0xE3,0x53,0x79,0x71,0xD6,0xE5,0x21,0x94,0x01,0xDA,0x55,0x87,0x9A,
+0x24,0x64,0xF6,0x8A,0x66,0xCC,0xDE,0x9C,0x37,0xCD,0xA8,0x34,0xB1,0x69,0x9B,0x23,
+0xC8,0x9E,0x78,0x22,0x2B,0x70,0x43,0xE3,0x55,0x47,0x31,0x61,0x19,0xEF,0x58,0xC5,
+0x85,0x2F,0x4E,0x30,0xF6,0xA0,0x31,0x16,0x23,0xC8,0xE7,0xE2,0x65,0x16,0x33,0xCB,
+0xBF,0x1A,0x1B,0xA0,0x3D,0xF8,0xCA,0x5E,0x8B,0x31,0x8B,0x60,0x08,0x89,0x2D,0x0C,
+0x06,0x5C,0x52,0xB7,0xC4,0xF9,0x0A,0x98,0xD1,0x15,0x5F,0x9F,0x12,0xBE,0x7C,0x36,
+0x63,0x38,0xBD,0x44,0xA4,0x7F,0xE4,0x26,0x2B,0x0A,0xC4,0x97,0x69,0x0D,0xE9,0x8C,
+0xE2,0xC0,0x10,0x57,0xB8,0xC8,0x76,0x12,0x91,0x55,0xF2,0x48,0x69,0xD8,0xBC,0x2A,
+0x02,0x5B,0x0F,0x44,0xD4,0x20,0x31,0xDB,0xF4,0xBA,0x70,0x26,0x5D,0x90,0x60,0x9E,
+0xBC,0x4B,0x17,0x09,0x2F,0xB4,0xCB,0x1E,0x43,0x68,0xC9,0x07,0x27,0xC1,0xD2,0x5C,
+0xF7,0xEA,0x21,0xB9,0x68,0x12,0x9C,0x3C,0x9C,0xBF,0x9E,0xFC,0x80,0x5C,0x9B,0x63,
+0xCD,0xEC,0x47,0xAA,0x25,0x27,0x67,0xA0,0x37,0xF3,0x00,0x82,0x7D,0x54,0xD7,0xA9,
+0xF8,0xE9,0x2E,0x13,0xA3,0x77,0xE8,0x1F,0x4A,
+};
+
+
+/* subject:/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
+/* issuer :/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */
+
+
+const unsigned char Entrust_net_Premium_2048_Secure_Server_CA_certificate[1120]={
+0x30,0x82,0x04,0x5C,0x30,0x82,0x03,0x44,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x38,
+0x63,0xB9,0x66,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x40,0x30,0x3E,0x06,
+0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,
+0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x5F,0x32,0x30,0x34,0x38,0x20,0x69,
+0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,
+0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,
+0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,
+0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,
+0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,0x45,
+0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,
+0x32,0x32,0x34,0x31,0x37,0x35,0x30,0x35,0x31,0x5A,0x17,0x0D,0x31,0x39,0x31,0x32,
+0x32,0x34,0x31,0x38,0x32,0x30,0x35,0x31,0x5A,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,
+0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
+0x65,0x74,0x31,0x40,0x30,0x3E,0x06,0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,
+0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,
+0x5F,0x32,0x30,0x34,0x38,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,
+0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,
+0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,
+0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,
+0x03,0x55,0x04,0x03,0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,
+0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,
+0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
+0x00,0xAD,0x4D,0x4B,0xA9,0x12,0x86,0xB2,0xEA,0xA3,0x20,0x07,0x15,0x16,0x64,0x2A,
+0x2B,0x4B,0xD1,0xBF,0x0B,0x4A,0x4D,0x8E,0xED,0x80,0x76,0xA5,0x67,0xB7,0x78,0x40,
+0xC0,0x73,0x42,0xC8,0x68,0xC0,0xDB,0x53,0x2B,0xDD,0x5E,0xB8,0x76,0x98,0x35,0x93,
+0x8B,0x1A,0x9D,0x7C,0x13,0x3A,0x0E,0x1F,0x5B,0xB7,0x1E,0xCF,0xE5,0x24,0x14,0x1E,
+0xB1,0x81,0xA9,0x8D,0x7D,0xB8,0xCC,0x6B,0x4B,0x03,0xF1,0x02,0x0C,0xDC,0xAB,0xA5,
+0x40,0x24,0x00,0x7F,0x74,0x94,0xA1,0x9D,0x08,0x29,0xB3,0x88,0x0B,0xF5,0x87,0x77,
+0x9D,0x55,0xCD,0xE4,0xC3,0x7E,0xD7,0x6A,0x64,0xAB,0x85,0x14,0x86,0x95,0x5B,0x97,
+0x32,0x50,0x6F,0x3D,0xC8,0xBA,0x66,0x0C,0xE3,0xFC,0xBD,0xB8,0x49,0xC1,0x76,0x89,
+0x49,0x19,0xFD,0xC0,0xA8,0xBD,0x89,0xA3,0x67,0x2F,0xC6,0x9F,0xBC,0x71,0x19,0x60,
+0xB8,0x2D,0xE9,0x2C,0xC9,0x90,0x76,0x66,0x7B,0x94,0xE2,0xAF,0x78,0xD6,0x65,0x53,
+0x5D,0x3C,0xD6,0x9C,0xB2,0xCF,0x29,0x03,0xF9,0x2F,0xA4,0x50,0xB2,0xD4,0x48,0xCE,
+0x05,0x32,0x55,0x8A,0xFD,0xB2,0x64,0x4C,0x0E,0xE4,0x98,0x07,0x75,0xDB,0x7F,0xDF,
+0xB9,0x08,0x55,0x60,0x85,0x30,0x29,0xF9,0x7B,0x48,0xA4,0x69,0x86,0xE3,0x35,0x3F,
+0x1E,0x86,0x5D,0x7A,0x7A,0x15,0xBD,0xEF,0x00,0x8E,0x15,0x22,0x54,0x17,0x00,0x90,
+0x26,0x93,0xBC,0x0E,0x49,0x68,0x91,0xBF,0xF8,0x47,0xD3,0x9D,0x95,0x42,0xC1,0x0E,
+0x4D,0xDF,0x6F,0x26,0xCF,0xC3,0x18,0x21,0x62,0x66,0x43,0x70,0xD6,0xD5,0xC0,0x07,
+0xE1,0x02,0x03,0x01,0x00,0x01,0xA3,0x74,0x30,0x72,0x30,0x11,0x06,0x09,0x60,0x86,
+0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x1F,0x06,
+0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,
+0xBE,0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,0xBE,
+0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x10,0x30,0x0E,0x1B,0x08,
+0x56,0x35,0x2E,0x30,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x59,0x47,0xAC,0x21,0x84,0x8A,0x17,0xC9,0x9C,0x89,0x53,0x1E,0xBA,0x80,0x85,0x1A,
+0xC6,0x3C,0x4E,0x3E,0xB1,0x9C,0xB6,0x7C,0xC6,0x92,0x5D,0x18,0x64,0x02,0xE3,0xD3,
+0x06,0x08,0x11,0x61,0x7C,0x63,0xE3,0x2B,0x9D,0x31,0x03,0x70,0x76,0xD2,0xA3,0x28,
+0xA0,0xF4,0xBB,0x9A,0x63,0x73,0xED,0x6D,0xE5,0x2A,0xDB,0xED,0x14,0xA9,0x2B,0xC6,
+0x36,0x11,0xD0,0x2B,0xEB,0x07,0x8B,0xA5,0xDA,0x9E,0x5C,0x19,0x9D,0x56,0x12,0xF5,
+0x54,0x29,0xC8,0x05,0xED,0xB2,0x12,0x2A,0x8D,0xF4,0x03,0x1B,0xFF,0xE7,0x92,0x10,
+0x87,0xB0,0x3A,0xB5,0xC3,0x9D,0x05,0x37,0x12,0xA3,0xC7,0xF4,0x15,0xB9,0xD5,0xA4,
+0x39,0x16,0x9B,0x53,0x3A,0x23,0x91,0xF1,0xA8,0x82,0xA2,0x6A,0x88,0x68,0xC1,0x79,
+0x02,0x22,0xBC,0xAA,0xA6,0xD6,0xAE,0xDF,0xB0,0x14,0x5F,0xB8,0x87,0xD0,0xDD,0x7C,
+0x7F,0x7B,0xFF,0xAF,0x1C,0xCF,0xE6,0xDB,0x07,0xAD,0x5E,0xDB,0x85,0x9D,0xD0,0x2B,
+0x0D,0x33,0xDB,0x04,0xD1,0xE6,0x49,0x40,0x13,0x2B,0x76,0xFB,0x3E,0xE9,0x9C,0x89,
+0x0F,0x15,0xCE,0x18,0xB0,0x85,0x78,0x21,0x4F,0x6B,0x4F,0x0E,0xFA,0x36,0x67,0xCD,
+0x07,0xF2,0xFF,0x08,0xD0,0xE2,0xDE,0xD9,0xBF,0x2A,0xAF,0xB8,0x87,0x86,0x21,0x3C,
+0x04,0xCA,0xB7,0x94,0x68,0x7F,0xCF,0x3C,0xE9,0x98,0xD7,0x38,0xFF,0xEC,0xC0,0xD9,
+0x50,0xF0,0x2E,0x4B,0x58,0xAE,0x46,0x6F,0xD0,0x2E,0xC3,0x60,0xDA,0x72,0x55,0x72,
+0xBD,0x4C,0x45,0x9E,0x61,0xBA,0xBF,0x84,0x81,0x92,0x03,0xD1,0xD2,0x69,0x7C,0xC5,
+};
+
+
+/* subject:/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority */
+/* issuer :/C=US/O=Entrust.net/OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Secure Server Certification Authority */
+
+
+const unsigned char Entrust_net_Secure_Server_CA_certificate[1244]={
+0x30,0x82,0x04,0xD8,0x30,0x82,0x04,0x41,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x37,
+0x4A,0xD2,0x43,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x81,0xC3,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,0x30,0x39,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
+0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,
+0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,
+0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,
+0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x3A,0x30,0x38,
+0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,
+0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x53,0x65,0x72,0x76,0x65,0x72,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x35,
+0x32,0x35,0x31,0x36,0x30,0x39,0x34,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x35,0x32,
+0x35,0x31,0x36,0x33,0x39,0x34,0x30,0x5A,0x30,0x81,0xC3,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,
+0x30,0x39,0x06,0x03,0x55,0x04,0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,
+0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,
+0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,
+0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,
+0x65,0x64,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,
+0x53,0x65,0x72,0x76,0x65,0x72,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x81,
+0x9D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
+0x03,0x81,0x8B,0x00,0x30,0x81,0x87,0x02,0x81,0x81,0x00,0xCD,0x28,0x83,0x34,0x54,
+0x1B,0x89,0xF3,0x0F,0xAF,0x37,0x91,0x31,0xFF,0xAF,0x31,0x60,0xC9,0xA8,0xE8,0xB2,
+0x10,0x68,0xED,0x9F,0xE7,0x93,0x36,0xF1,0x0A,0x64,0xBB,0x47,0xF5,0x04,0x17,0x3F,
+0x23,0x47,0x4D,0xC5,0x27,0x19,0x81,0x26,0x0C,0x54,0x72,0x0D,0x88,0x2D,0xD9,0x1F,
+0x9A,0x12,0x9F,0xBC,0xB3,0x71,0xD3,0x80,0x19,0x3F,0x47,0x66,0x7B,0x8C,0x35,0x28,
+0xD2,0xB9,0x0A,0xDF,0x24,0xDA,0x9C,0xD6,0x50,0x79,0x81,0x7A,0x5A,0xD3,0x37,0xF7,
+0xC2,0x4A,0xD8,0x29,0x92,0x26,0x64,0xD1,0xE4,0x98,0x6C,0x3A,0x00,0x8A,0xF5,0x34,
+0x9B,0x65,0xF8,0xED,0xE3,0x10,0xFF,0xFD,0xB8,0x49,0x58,0xDC,0xA0,0xDE,0x82,0x39,
+0x6B,0x81,0xB1,0x16,0x19,0x61,0xB9,0x54,0xB6,0xE6,0x43,0x02,0x01,0x03,0xA3,0x82,
+0x01,0xD7,0x30,0x82,0x01,0xD3,0x30,0x11,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,
+0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x82,0x01,0x19,0x06,0x03,0x55,
+0x1D,0x1F,0x04,0x82,0x01,0x10,0x30,0x82,0x01,0x0C,0x30,0x81,0xDE,0xA0,0x81,0xDB,
+0xA0,0x81,0xD8,0xA4,0x81,0xD5,0x30,0x81,0xD2,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x3B,0x30,0x39,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x32,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,
+0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x6E,0x63,0x6F,0x72,
+0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,
+0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,
+0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x03,0x13,0x31,0x45,0x6E,0x74,0x72,0x75,
+0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x53,0x65,
+0x72,0x76,0x65,0x72,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
+0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31,0x0D,0x30,0x0B,
+0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x29,0xA0,0x27,0xA0,
+0x25,0x86,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x65,0x6E,
+0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x52,0x4C,0x2F,0x6E,0x65,
+0x74,0x31,0x2E,0x63,0x72,0x6C,0x30,0x2B,0x06,0x03,0x55,0x1D,0x10,0x04,0x24,0x30,
+0x22,0x80,0x0F,0x31,0x39,0x39,0x39,0x30,0x35,0x32,0x35,0x31,0x36,0x30,0x39,0x34,
+0x30,0x5A,0x81,0x0F,0x32,0x30,0x31,0x39,0x30,0x35,0x32,0x35,0x31,0x36,0x30,0x39,
+0x34,0x30,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,
+0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xF0,0x17,0x62,
+0x13,0x55,0x3D,0xB3,0xFF,0x0A,0x00,0x6B,0xFB,0x50,0x84,0x97,0xF3,0xED,0x62,0xD0,
+0x1A,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xF0,0x17,0x62,0x13,
+0x55,0x3D,0xB3,0xFF,0x0A,0x00,0x6B,0xFB,0x50,0x84,0x97,0xF3,0xED,0x62,0xD0,0x1A,
+0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x19,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x0C,0x30,0x0A,0x1B,
+0x04,0x56,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x90,0xDC,0x30,0x02,
+0xFA,0x64,0x74,0xC2,0xA7,0x0A,0xA5,0x7C,0x21,0x8D,0x34,0x17,0xA8,0xFB,0x47,0x0E,
+0xFF,0x25,0x7C,0x8D,0x13,0x0A,0xFB,0xE4,0x98,0xB5,0xEF,0x8C,0xF8,0xC5,0x10,0x0D,
+0xF7,0x92,0xBE,0xF1,0xC3,0xD5,0xD5,0x95,0x6A,0x04,0xBB,0x2C,0xCE,0x26,0x36,0x65,
+0xC8,0x31,0xC6,0xE7,0xEE,0x3F,0xE3,0x57,0x75,0x84,0x7A,0x11,0xEF,0x46,0x4F,0x18,
+0xF4,0xD3,0x98,0xBB,0xA8,0x87,0x32,0xBA,0x72,0xF6,0x3C,0xE2,0x3D,0x9F,0xD7,0x1D,
+0xD9,0xC3,0x60,0x43,0x8C,0x58,0x0E,0x22,0x96,0x2F,0x62,0xA3,0x2C,0x1F,0xBA,0xAD,
+0x05,0xEF,0xAB,0x32,0x78,0x87,0xA0,0x54,0x73,0x19,0xB5,0x5C,0x05,0xF9,0x52,0x3E,
+0x6D,0x2D,0x45,0x0B,0xF7,0x0A,0x93,0xEA,0xED,0x06,0xF9,0xB2,
+};
+
+
+/* subject:/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
+/* issuer :/C=US/O=Entrust, Inc./OU=www.entrust.net/CPS is incorporated by reference/OU=(c) 2006 Entrust, Inc./CN=Entrust Root Certification Authority */
+
+
+const unsigned char Entrust_Root_Certification_Authority_certificate[1173]={
+0x30,0x82,0x04,0x91,0x30,0x82,0x03,0x79,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x45,
+0x6B,0x50,0x54,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74,
+0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,0x6E,0x63,0x6F,
+0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x65,
+0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x03,0x13,
+0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,
+0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,
+0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x32,0x37,0x32,
+0x30,0x32,0x33,0x34,0x32,0x5A,0x17,0x0D,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,
+0x35,0x33,0x34,0x32,0x5A,0x30,0x81,0xB0,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,
+0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,
+0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,
+0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x20,0x69,0x73,0x20,0x69,
+0x6E,0x63,0x6F,0x72,0x70,0x6F,0x72,0x61,0x74,0x65,0x64,0x20,0x62,0x79,0x20,0x72,
+0x65,0x66,0x65,0x72,0x65,0x6E,0x63,0x65,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x16,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x45,0x6E,0x74,0x72,
+0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,
+0x04,0x03,0x13,0x24,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
+0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xB6,0x95,0xB6,0x43,0x42,0xFA,0xC6,
+0x6D,0x2A,0x6F,0x48,0xDF,0x94,0x4C,0x39,0x57,0x05,0xEE,0xC3,0x79,0x11,0x41,0x68,
+0x36,0xED,0xEC,0xFE,0x9A,0x01,0x8F,0xA1,0x38,0x28,0xFC,0xF7,0x10,0x46,0x66,0x2E,
+0x4D,0x1E,0x1A,0xB1,0x1A,0x4E,0xC6,0xD1,0xC0,0x95,0x88,0xB0,0xC9,0xFF,0x31,0x8B,
+0x33,0x03,0xDB,0xB7,0x83,0x7B,0x3E,0x20,0x84,0x5E,0xED,0xB2,0x56,0x28,0xA7,0xF8,
+0xE0,0xB9,0x40,0x71,0x37,0xC5,0xCB,0x47,0x0E,0x97,0x2A,0x68,0xC0,0x22,0x95,0x62,
+0x15,0xDB,0x47,0xD9,0xF5,0xD0,0x2B,0xFF,0x82,0x4B,0xC9,0xAD,0x3E,0xDE,0x4C,0xDB,
+0x90,0x80,0x50,0x3F,0x09,0x8A,0x84,0x00,0xEC,0x30,0x0A,0x3D,0x18,0xCD,0xFB,0xFD,
+0x2A,0x59,0x9A,0x23,0x95,0x17,0x2C,0x45,0x9E,0x1F,0x6E,0x43,0x79,0x6D,0x0C,0x5C,
+0x98,0xFE,0x48,0xA7,0xC5,0x23,0x47,0x5C,0x5E,0xFD,0x6E,0xE7,0x1E,0xB4,0xF6,0x68,
+0x45,0xD1,0x86,0x83,0x5B,0xA2,0x8A,0x8D,0xB1,0xE3,0x29,0x80,0xFE,0x25,0x71,0x88,
+0xAD,0xBE,0xBC,0x8F,0xAC,0x52,0x96,0x4B,0xAA,0x51,0x8D,0xE4,0x13,0x31,0x19,0xE8,
+0x4E,0x4D,0x9F,0xDB,0xAC,0xB3,0x6A,0xD5,0xBC,0x39,0x54,0x71,0xCA,0x7A,0x7A,0x7F,
+0x90,0xDD,0x7D,0x1D,0x80,0xD9,0x81,0xBB,0x59,0x26,0xC2,0x11,0xFE,0xE6,0x93,0xE2,
+0xF7,0x80,0xE4,0x65,0xFB,0x34,0x37,0x0E,0x29,0x80,0x70,0x4D,0xAF,0x38,0x86,0x2E,
+0x9E,0x7F,0x57,0xAF,0x9E,0x17,0xAE,0xEB,0x1C,0xCB,0x28,0x21,0x5F,0xB6,0x1C,0xD8,
+0xE7,0xA2,0x04,0x22,0xF9,0xD3,0xDA,0xD8,0xCB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
+0xB0,0x30,0x81,0xAD,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x2B,0x06,0x03,0x55,0x1D,0x10,0x04,0x24,0x30,0x22,
+0x80,0x0F,0x32,0x30,0x30,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x32,0x33,0x34,0x32,
+0x5A,0x81,0x0F,0x32,0x30,0x32,0x36,0x31,0x31,0x32,0x37,0x32,0x30,0x35,0x33,0x34,
+0x32,0x5A,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x68,
+0x90,0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,
+0x84,0xBD,0x6D,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x68,0x90,
+0xE4,0x67,0xA4,0xA6,0x53,0x80,0xC7,0x86,0x66,0xA4,0xF1,0xF7,0x4B,0x43,0xFB,0x84,
+0xBD,0x6D,0x30,0x1D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,
+0x10,0x30,0x0E,0x1B,0x08,0x56,0x37,0x2E,0x31,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,
+0x90,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x82,0x01,0x01,0x00,0x93,0xD4,0x30,0xB0,0xD7,0x03,0x20,0x2A,0xD0,0xF9,0x63,
+0xE8,0x91,0x0C,0x05,0x20,0xA9,0x5F,0x19,0xCA,0x7B,0x72,0x4E,0xD4,0xB1,0xDB,0xD0,
+0x96,0xFB,0x54,0x5A,0x19,0x2C,0x0C,0x08,0xF7,0xB2,0xBC,0x85,0xA8,0x9D,0x7F,0x6D,
+0x3B,0x52,0xB3,0x2A,0xDB,0xE7,0xD4,0x84,0x8C,0x63,0xF6,0x0F,0xCB,0x26,0x01,0x91,
+0x50,0x6C,0xF4,0x5F,0x14,0xE2,0x93,0x74,0xC0,0x13,0x9E,0x30,0x3A,0x50,0xE3,0xB4,
+0x60,0xC5,0x1C,0xF0,0x22,0x44,0x8D,0x71,0x47,0xAC,0xC8,0x1A,0xC9,0xE9,0x9B,0x9A,
+0x00,0x60,0x13,0xFF,0x70,0x7E,0x5F,0x11,0x4D,0x49,0x1B,0xB3,0x15,0x52,0x7B,0xC9,
+0x54,0xDA,0xBF,0x9D,0x95,0xAF,0x6B,0x9A,0xD8,0x9E,0xE9,0xF1,0xE4,0x43,0x8D,0xE2,
+0x11,0x44,0x3A,0xBF,0xAF,0xBD,0x83,0x42,0x73,0x52,0x8B,0xAA,0xBB,0xA7,0x29,0xCF,
+0xF5,0x64,0x1C,0x0A,0x4D,0xD1,0xBC,0xAA,0xAC,0x9F,0x2A,0xD0,0xFF,0x7F,0x7F,0xDA,
+0x7D,0xEA,0xB1,0xED,0x30,0x25,0xC1,0x84,0xDA,0x34,0xD2,0x5B,0x78,0x83,0x56,0xEC,
+0x9C,0x36,0xC3,0x26,0xE2,0x11,0xF6,0x67,0x49,0x1D,0x92,0xAB,0x8C,0xFB,0xEB,0xFF,
+0x7A,0xEE,0x85,0x4A,0xA7,0x50,0x80,0xF0,0xA7,0x5C,0x4A,0x94,0x2E,0x5F,0x05,0x99,
+0x3C,0x52,0x41,0xE0,0xCD,0xB4,0x63,0xCF,0x01,0x43,0xBA,0x9C,0x83,0xDC,0x8F,0x60,
+0x3B,0xF3,0x5A,0xB4,0xB4,0x7B,0xAE,0xDA,0x0B,0x90,0x38,0x75,0xEF,0x81,0x1D,0x66,
+0xD2,0xF7,0x57,0x70,0x36,0xB3,0xBF,0xFC,0x28,0xAF,0x71,0x25,0x85,0x5B,0x13,0xFE,
+0x1E,0x7F,0x5A,0xB4,0x3C,
+};
+
+
+/* subject:/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
+/* issuer :/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */
+
+
+const unsigned char Equifax_Secure_CA_certificate[804]={
+0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x35,
+0xDE,0xF4,0xCF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
+0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,
+0x35,0x31,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,
+0x31,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,
+0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC1,
+0x5D,0xB1,0x58,0x67,0x08,0x62,0xEE,0xA0,0x9A,0x2D,0x1F,0x08,0x6D,0x91,0x14,0x68,
+0x98,0x0A,0x1E,0xFE,0xDA,0x04,0x6F,0x13,0x84,0x62,0x21,0xC3,0xD1,0x7C,0xCE,0x9F,
+0x05,0xE0,0xB8,0x01,0xF0,0x4E,0x34,0xEC,0xE2,0x8A,0x95,0x04,0x64,0xAC,0xF1,0x6B,
+0x53,0x5F,0x05,0xB3,0xCB,0x67,0x80,0xBF,0x42,0x02,0x8E,0xFE,0xDD,0x01,0x09,0xEC,
+0xE1,0x00,0x14,0x4F,0xFC,0xFB,0xF0,0x0C,0xDD,0x43,0xBA,0x5B,0x2B,0xE1,0x1F,0x80,
+0x70,0x99,0x15,0x57,0x93,0x16,0xF1,0x0F,0x97,0x6A,0xB7,0xC2,0x68,0x23,0x1C,0xCC,
+0x4D,0x59,0x30,0xAC,0x51,0x1E,0x3B,0xAF,0x2B,0xD6,0xEE,0x63,0x45,0x7B,0xC5,0xD9,
+0x5F,0x50,0xD2,0xE3,0x50,0x0F,0x3A,0x88,0xE7,0xBF,0x14,0xFD,0xE0,0xC7,0xB9,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03,
+0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30,
+0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10,
+0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
+0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,0x75,0x69,0x66,
+0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31,
+0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A,
+0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x38,0x30,
+0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,0x31,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,
+0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
+0x30,0x16,0x80,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,
+0x20,0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
+0x16,0x04,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,0x20,
+0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,
+0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06,
+0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x81,0x81,0x00,0x58,0xCE,0x29,0xEA,0xFC,0xF7,0xDE,0xB5,0xCE,0x02,0xB9,0x17,
+0xB5,0x85,0xD1,0xB9,0xE3,0xE0,0x95,0xCC,0x25,0x31,0x0D,0x00,0xA6,0x92,0x6E,0x7F,
+0xB6,0x92,0x63,0x9E,0x50,0x95,0xD1,0x9A,0x6F,0xE4,0x11,0xDE,0x63,0x85,0x6E,0x98,
+0xEE,0xA8,0xFF,0x5A,0xC8,0xD3,0x55,0xB2,0x66,0x71,0x57,0xDE,0xC0,0x21,0xEB,0x3D,
+0x2A,0xA7,0x23,0x49,0x01,0x04,0x86,0x42,0x7B,0xFC,0xEE,0x7F,0xA2,0x16,0x52,0xB5,
+0x67,0x67,0xD3,0x40,0xDB,0x3B,0x26,0x58,0xB2,0x28,0x77,0x3D,0xAE,0x14,0x77,0x61,
+0xD6,0xFA,0x2A,0x66,0x27,0xA0,0x0D,0xFA,0xA7,0x73,0x5C,0xEA,0x70,0xF1,0x94,0x21,
+0x65,0x44,0x5F,0xFA,0xFC,0xEF,0x29,0x68,0xA9,0xA2,0x87,0x79,0xEF,0x79,0xEF,0x4F,
+0xAC,0x07,0x77,0x38,
+};
+
+
+/* subject:/C=US/O=Equifax Secure Inc./CN=Equifax Secure eBusiness CA-1 */
+/* issuer :/C=US/O=Equifax Secure Inc./CN=Equifax Secure eBusiness CA-1 */
+
+
+const unsigned char Equifax_Secure_eBusiness_CA_1_certificate[646]={
+0x30,0x82,0x02,0x82,0x30,0x82,0x01,0xEB,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x04,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
+0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
+0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x26,0x30,0x24,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,
+0x65,0x63,0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,
+0x43,0x41,0x2D,0x31,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x31,0x30,0x34,
+0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x36,0x32,0x31,0x30,0x34,0x30,
+0x30,0x30,0x30,0x5A,0x30,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,
+0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,
+0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,
+0x81,0x89,0x02,0x81,0x81,0x00,0xCE,0x2F,0x19,0xBC,0x17,0xB7,0x77,0xDE,0x93,0xA9,
+0x5F,0x5A,0x0D,0x17,0x4F,0x34,0x1A,0x0C,0x98,0xF4,0x22,0xD9,0x59,0xD4,0xC4,0x68,
+0x46,0xF0,0xB4,0x35,0xC5,0x85,0x03,0x20,0xC6,0xAF,0x45,0xA5,0x21,0x51,0x45,0x41,
+0xEB,0x16,0x58,0x36,0x32,0x6F,0xE2,0x50,0x62,0x64,0xF9,0xFD,0x51,0x9C,0xAA,0x24,
+0xD9,0xF4,0x9D,0x83,0x2A,0x87,0x0A,0x21,0xD3,0x12,0x38,0x34,0x6C,0x8D,0x00,0x6E,
+0x5A,0xA0,0xD9,0x42,0xEE,0x1A,0x21,0x95,0xF9,0x52,0x4C,0x55,0x5A,0xC5,0x0F,0x38,
+0x4F,0x46,0xFA,0x6D,0xF8,0x2E,0x35,0xD6,0x1D,0x7C,0xEB,0xE2,0xF0,0xB0,0x75,0x80,
+0xC8,0xA9,0x13,0xAC,0xBE,0x88,0xEF,0x3A,0x6E,0xAB,0x5F,0x2A,0x38,0x62,0x02,0xB0,
+0x12,0x7B,0xFE,0x8F,0xA6,0x03,0x02,0x03,0x01,0x00,0x01,0xA3,0x66,0x30,0x64,0x30,
+0x11,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,
+0x00,0x07,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,
+0x01,0x01,0xFF,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
+0x4A,0x78,0x32,0x52,0x11,0xDB,0x59,0x16,0x36,0x5E,0xDF,0xC1,0x14,0x36,0x40,0x6A,
+0x47,0x7C,0x4C,0xA1,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4A,
+0x78,0x32,0x52,0x11,0xDB,0x59,0x16,0x36,0x5E,0xDF,0xC1,0x14,0x36,0x40,0x6A,0x47,
+0x7C,0x4C,0xA1,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,
+0x05,0x00,0x03,0x81,0x81,0x00,0x75,0x5B,0xA8,0x9B,0x03,0x11,0xE6,0xE9,0x56,0x4C,
+0xCD,0xF9,0xA9,0x4C,0xC0,0x0D,0x9A,0xF3,0xCC,0x65,0x69,0xE6,0x25,0x76,0xCC,0x59,
+0xB7,0xD6,0x54,0xC3,0x1D,0xCD,0x99,0xAC,0x19,0xDD,0xB4,0x85,0xD5,0xE0,0x3D,0xFC,
+0x62,0x20,0xA7,0x84,0x4B,0x58,0x65,0xF1,0xE2,0xF9,0x95,0x21,0x3F,0xF5,0xD4,0x7E,
+0x58,0x1E,0x47,0x87,0x54,0x3E,0x58,0xA1,0xB5,0xB5,0xF8,0x2A,0xEF,0x71,0xE7,0xBC,
+0xC3,0xF6,0xB1,0x49,0x46,0xE2,0xD7,0xA0,0x6B,0xE5,0x56,0x7A,0x9A,0x27,0x98,0x7C,
+0x46,0x62,0x14,0xE7,0xC9,0xFC,0x6E,0x03,0x12,0x79,0x80,0x38,0x1D,0x48,0x82,0x8D,
+0xFC,0x17,0xFE,0x2A,0x96,0x2B,0xB5,0x62,0xA6,0xA6,0x3D,0xBD,0x7F,0x92,0x59,0xCD,
+0x5A,0x2A,0x82,0xB2,0x37,0x79,
+};
+
+
+/* subject:/C=US/O=Equifax Secure/OU=Equifax Secure eBusiness CA-2 */
+/* issuer :/C=US/O=Equifax Secure/OU=Equifax Secure eBusiness CA-2 */
+
+
+const unsigned char Equifax_Secure_eBusiness_CA_2_certificate[804]={
+0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x37,
+0x70,0xCF,0xB5,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
+0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,
+0x2D,0x32,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x33,0x31,0x32,0x31,0x34,
+0x34,0x35,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,0x33,0x31,0x32,0x31,0x34,0x34,
+0x35,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,
+0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
+0x75,0x72,0x65,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,
+0x2D,0x32,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xE4,
+0x39,0x39,0x93,0x1E,0x52,0x06,0x1B,0x28,0x36,0xF8,0xB2,0xA3,0x29,0xC5,0xED,0x8E,
+0xB2,0x11,0xBD,0xFE,0xEB,0xE7,0xB4,0x74,0xC2,0x8F,0xFF,0x05,0xE7,0xD9,0x9D,0x06,
+0xBF,0x12,0xC8,0x3F,0x0E,0xF2,0xD6,0xD1,0x24,0xB2,0x11,0xDE,0xD1,0x73,0x09,0x8A,
+0xD4,0xB1,0x2C,0x98,0x09,0x0D,0x1E,0x50,0x46,0xB2,0x83,0xA6,0x45,0x8D,0x62,0x68,
+0xBB,0x85,0x1B,0x20,0x70,0x32,0xAA,0x40,0xCD,0xA6,0x96,0x5F,0xC4,0x71,0x37,0x3F,
+0x04,0xF3,0xB7,0x41,0x24,0x39,0x07,0x1A,0x1E,0x2E,0x61,0x58,0xA0,0x12,0x0B,0xE5,
+0xA5,0xDF,0xC5,0xAB,0xEA,0x37,0x71,0xCC,0x1C,0xC8,0x37,0x3A,0xB9,0x97,0x52,0xA7,
+0xAC,0xC5,0x6A,0x24,0x94,0x4E,0x9C,0x7B,0xCF,0xC0,0x6A,0xD6,0xDF,0x21,0xBD,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03,
+0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30,
+0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
+0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x1D,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,
+0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x32,0x31,
+0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A,
+0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x39,0x30,
+0x36,0x32,0x33,0x31,0x32,0x31,0x34,0x34,0x35,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D,
+0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
+0x30,0x16,0x80,0x14,0x50,0x9E,0x0B,0xEA,0xAF,0x5E,0xB9,0x20,0x48,0xA6,0x50,0x6A,
+0xCB,0xFD,0xD8,0x20,0x7A,0xA7,0x82,0x76,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
+0x16,0x04,0x14,0x50,0x9E,0x0B,0xEA,0xAF,0x5E,0xB9,0x20,0x48,0xA6,0x50,0x6A,0xCB,
+0xFD,0xD8,0x20,0x7A,0xA7,0x82,0x76,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,
+0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06,
+0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x81,0x81,0x00,0x0C,0x86,0x82,0xAD,0xE8,0x4E,0x1A,0xF5,0x8E,0x89,0x27,0xE2,
+0x35,0x58,0x3D,0x29,0xB4,0x07,0x8F,0x36,0x50,0x95,0xBF,0x6E,0xC1,0x9E,0xEB,0xC4,
+0x90,0xB2,0x85,0xA8,0xBB,0xB7,0x42,0xE0,0x0F,0x07,0x39,0xDF,0xFB,0x9E,0x90,0xB2,
+0xD1,0xC1,0x3E,0x53,0x9F,0x03,0x44,0xB0,0x7E,0x4B,0xF4,0x6F,0xE4,0x7C,0x1F,0xE7,
+0xE2,0xB1,0xE4,0xB8,0x9A,0xEF,0xC3,0xBD,0xCE,0xDE,0x0B,0x32,0x34,0xD9,0xDE,0x28,
+0xED,0x33,0x6B,0xC4,0xD4,0xD7,0x3D,0x12,0x58,0xAB,0x7D,0x09,0x2D,0xCB,0x70,0xF5,
+0x13,0x8A,0x94,0xA1,0x27,0xA4,0xD6,0x70,0xC5,0x6D,0x94,0xB5,0xC9,0x7D,0x9D,0xA0,
+0xD2,0xC6,0x08,0x49,0xD9,0x66,0x9B,0xA6,0xD3,0xF4,0x0B,0xDC,0xC5,0x26,0x57,0xE1,
+0x91,0x30,0xEA,0xCD,
+};
+
+
+/* subject:/C=US/O=Equifax Secure Inc./CN=Equifax Secure Global eBusiness CA-1 */
+/* issuer :/C=US/O=Equifax Secure Inc./CN=Equifax Secure Global eBusiness CA-1 */
+
+
+const unsigned char Equifax_Secure_Global_eBusiness_CA_certificate[660]={
+0x30,0x82,0x02,0x90,0x30,0x82,0x01,0xF9,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
+0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,
+0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,
+0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,
+0x06,0x03,0x55,0x04,0x03,0x13,0x24,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,
+0x65,0x63,0x75,0x72,0x65,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x65,0x42,0x75,
+0x73,0x69,0x6E,0x65,0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x1E,0x17,0x0D,0x39,
+0x39,0x30,0x36,0x32,0x31,0x30,0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,
+0x30,0x36,0x32,0x31,0x30,0x34,0x30,0x30,0x30,0x30,0x5A,0x30,0x5A,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1C,0x30,0x1A,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x13,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,
+0x75,0x72,0x65,0x20,0x49,0x6E,0x63,0x2E,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,
+0x03,0x13,0x24,0x45,0x71,0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,
+0x65,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x65,0x42,0x75,0x73,0x69,0x6E,0x65,
+0x73,0x73,0x20,0x43,0x41,0x2D,0x31,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,
+0x02,0x81,0x81,0x00,0xBA,0xE7,0x17,0x90,0x02,0x65,0xB1,0x34,0x55,0x3C,0x49,0xC2,
+0x51,0xD5,0xDF,0xA7,0xD1,0x37,0x8F,0xD1,0xE7,0x81,0x73,0x41,0x52,0x60,0x9B,0x9D,
+0xA1,0x17,0x26,0x78,0xAD,0xC7,0xB1,0xE8,0x26,0x94,0x32,0xB5,0xDE,0x33,0x8D,0x3A,
+0x2F,0xDB,0xF2,0x9A,0x7A,0x5A,0x73,0x98,0xA3,0x5C,0xE9,0xFB,0x8A,0x73,0x1B,0x5C,
+0xE7,0xC3,0xBF,0x80,0x6C,0xCD,0xA9,0xF4,0xD6,0x2B,0xC0,0xF7,0xF9,0x99,0xAA,0x63,
+0xA2,0xB1,0x47,0x02,0x0F,0xD4,0xE4,0x51,0x3A,0x12,0x3C,0x6C,0x8A,0x5A,0x54,0x84,
+0x70,0xDB,0xC1,0xC5,0x90,0xCF,0x72,0x45,0xCB,0xA8,0x59,0xC0,0xCD,0x33,0x9D,0x3F,
+0xA3,0x96,0xEB,0x85,0x33,0x21,0x1C,0x3E,0x1E,0x3E,0x60,0x6E,0x76,0x9C,0x67,0x85,
+0xC5,0xC8,0xC3,0x61,0x02,0x03,0x01,0x00,0x01,0xA3,0x66,0x30,0x64,0x30,0x11,0x06,
+0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,
+0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
+0xFF,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xBE,0xA8,
+0xA0,0x74,0x72,0x50,0x6B,0x44,0xB7,0xC9,0x23,0xD8,0xFB,0xA8,0xFF,0xB3,0x57,0x6B,
+0x68,0x6C,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBE,0xA8,0xA0,
+0x74,0x72,0x50,0x6B,0x44,0xB7,0xC9,0x23,0xD8,0xFB,0xA8,0xFF,0xB3,0x57,0x6B,0x68,
+0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,
+0x03,0x81,0x81,0x00,0x30,0xE2,0x01,0x51,0xAA,0xC7,0xEA,0x5F,0xDA,0xB9,0xD0,0x65,
+0x0F,0x30,0xD6,0x3E,0xDA,0x0D,0x14,0x49,0x6E,0x91,0x93,0x27,0x14,0x31,0xEF,0xC4,
+0xF7,0x2D,0x45,0xF8,0xEC,0xC7,0xBF,0xA2,0x41,0x0D,0x23,0xB4,0x92,0xF9,0x19,0x00,
+0x67,0xBD,0x01,0xAF,0xCD,0xE0,0x71,0xFC,0x5A,0xCF,0x64,0xC4,0xE0,0x96,0x98,0xD0,
+0xA3,0x40,0xE2,0x01,0x8A,0xEF,0x27,0x07,0xF1,0x65,0x01,0x8A,0x44,0x2D,0x06,0x65,
+0x75,0x52,0xC0,0x86,0x10,0x20,0x21,0x5F,0x6C,0x6B,0x0F,0x6C,0xAE,0x09,0x1C,0xAF,
+0xF2,0xA2,0x18,0x34,0xC4,0x75,0xA4,0x73,0x1C,0xF1,0x8D,0xDC,0xEF,0xAD,0xF9,0xB3,
+0x76,0xB4,0x92,0xBF,0xDC,0x95,0x10,0x1E,0xBE,0xCB,0xC8,0x3B,0x5A,0x84,0x60,0x19,
+0x56,0x94,0xA9,0x55,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */
+
+
+const unsigned char GeoTrust_Global_CA_certificate[856]={
+0x30,0x82,0x03,0x54,0x30,0x82,0x02,0x3C,0xA0,0x03,0x02,0x01,0x02,0x02,0x03,0x02,
+0x34,0x56,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,
+0x00,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,
+0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,
+0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,
+0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,
+0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x31,0x30,
+0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x32,0x30,0x35,0x32,0x31,0x30,0x34,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
+0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,
+0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,
+0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0xCC,0x18,0x63,0x30,0xFD,
+0xF4,0x17,0x23,0x1A,0x56,0x7E,0x5B,0xDF,0x3C,0x6C,0x38,0xE4,0x71,0xB7,0x78,0x91,
+0xD4,0xBC,0xA1,0xD8,0x4C,0xF8,0xA8,0x43,0xB6,0x03,0xE9,0x4D,0x21,0x07,0x08,0x88,
+0xDA,0x58,0x2F,0x66,0x39,0x29,0xBD,0x05,0x78,0x8B,0x9D,0x38,0xE8,0x05,0xB7,0x6A,
+0x7E,0x71,0xA4,0xE6,0xC4,0x60,0xA6,0xB0,0xEF,0x80,0xE4,0x89,0x28,0x0F,0x9E,0x25,
+0xD6,0xED,0x83,0xF3,0xAD,0xA6,0x91,0xC7,0x98,0xC9,0x42,0x18,0x35,0x14,0x9D,0xAD,
+0x98,0x46,0x92,0x2E,0x4F,0xCA,0xF1,0x87,0x43,0xC1,0x16,0x95,0x57,0x2D,0x50,0xEF,
+0x89,0x2D,0x80,0x7A,0x57,0xAD,0xF2,0xEE,0x5F,0x6B,0xD2,0x00,0x8D,0xB9,0x14,0xF8,
+0x14,0x15,0x35,0xD9,0xC0,0x46,0xA3,0x7B,0x72,0xC8,0x91,0xBF,0xC9,0x55,0x2B,0xCD,
+0xD0,0x97,0x3E,0x9C,0x26,0x64,0xCC,0xDF,0xCE,0x83,0x19,0x71,0xCA,0x4E,0xE6,0xD4,
+0xD5,0x7B,0xA9,0x19,0xCD,0x55,0xDE,0xC8,0xEC,0xD2,0x5E,0x38,0x53,0xE5,0x5C,0x4F,
+0x8C,0x2D,0xFE,0x50,0x23,0x36,0xFC,0x66,0xE6,0xCB,0x8E,0xA4,0x39,0x19,0x00,0xB7,
+0x95,0x02,0x39,0x91,0x0B,0x0E,0xFE,0x38,0x2E,0xD1,0x1D,0x05,0x9A,0xF6,0x4D,0x3E,
+0x6F,0x0F,0x07,0x1D,0xAF,0x2C,0x1E,0x8F,0x60,0x39,0xE2,0xFA,0x36,0x53,0x13,0x39,
+0xD4,0x5E,0x26,0x2B,0xDB,0x3D,0xA8,0x14,0xBD,0x32,0xEB,0x18,0x03,0x28,0x52,0x04,
+0x71,0xE5,0xAB,0x33,0x3D,0xE1,0x38,0xBB,0x07,0x36,0x84,0x62,0x9C,0x79,0xEA,0x16,
+0x30,0xF4,0x5F,0xC0,0x2B,0xE8,0x71,0x6B,0xE4,0xF9,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x53,0x30,0x51,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC0,
+0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,0xB8,
+0xCA,0xCC,0x4E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,
+0xC0,0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,
+0xB8,0xCA,0xCC,0x4E,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x35,0xE3,0x29,0x6A,0xE5,0x2F,0x5D,0x54,
+0x8E,0x29,0x50,0x94,0x9F,0x99,0x1A,0x14,0xE4,0x8F,0x78,0x2A,0x62,0x94,0xA2,0x27,
+0x67,0x9E,0xD0,0xCF,0x1A,0x5E,0x47,0xE9,0xC1,0xB2,0xA4,0xCF,0xDD,0x41,0x1A,0x05,
+0x4E,0x9B,0x4B,0xEE,0x4A,0x6F,0x55,0x52,0xB3,0x24,0xA1,0x37,0x0A,0xEB,0x64,0x76,
+0x2A,0x2E,0x2C,0xF3,0xFD,0x3B,0x75,0x90,0xBF,0xFA,0x71,0xD8,0xC7,0x3D,0x37,0xD2,
+0xB5,0x05,0x95,0x62,0xB9,0xA6,0xDE,0x89,0x3D,0x36,0x7B,0x38,0x77,0x48,0x97,0xAC,
+0xA6,0x20,0x8F,0x2E,0xA6,0xC9,0x0C,0xC2,0xB2,0x99,0x45,0x00,0xC7,0xCE,0x11,0x51,
+0x22,0x22,0xE0,0xA5,0xEA,0xB6,0x15,0x48,0x09,0x64,0xEA,0x5E,0x4F,0x74,0xF7,0x05,
+0x3E,0xC7,0x8A,0x52,0x0C,0xDB,0x15,0xB4,0xBD,0x6D,0x9B,0xE5,0xC6,0xB1,0x54,0x68,
+0xA9,0xE3,0x69,0x90,0xB6,0x9A,0xA5,0x0F,0xB8,0xB9,0x3F,0x20,0x7D,0xAE,0x4A,0xB5,
+0xB8,0x9C,0xE4,0x1D,0xB6,0xAB,0xE6,0x94,0xA5,0xC1,0xC7,0x83,0xAD,0xDB,0xF5,0x27,
+0x87,0x0E,0x04,0x6C,0xD5,0xFF,0xDD,0xA0,0x5D,0xED,0x87,0x52,0xB7,0x2B,0x15,0x02,
+0xAE,0x39,0xA6,0x6A,0x74,0xE9,0xDA,0xC4,0xE7,0xBC,0x4D,0x34,0x1E,0xA9,0x5C,0x4D,
+0x33,0x5F,0x92,0x09,0x2F,0x88,0x66,0x5D,0x77,0x97,0xC7,0x1D,0x76,0x13,0xA9,0xD5,
+0xE5,0xF1,0x16,0x09,0x11,0x35,0xD5,0xAC,0xDB,0x24,0x71,0x70,0x2C,0x98,0x56,0x0B,
+0xD9,0x17,0xB4,0xD1,0xE3,0x51,0x2B,0x5E,0x75,0xE8,0xD5,0xD0,0xDC,0x4F,0x34,0xED,
+0xC2,0x05,0x66,0x80,0xA1,0xCB,0xE6,0x33,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */
+
+
+const unsigned char GeoTrust_Global_CA_2_certificate[874]={
+0x30,0x82,0x03,0x66,0x30,0x82,0x02,0x4E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x03,0x13,
+0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,
+0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34,0x30,
+0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x33,0x30,0x34,0x30,0x35,
+0x30,0x30,0x30,0x30,0x5A,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,
+0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,
+0x06,0x03,0x55,0x04,0x03,0x13,0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xEF,0x3C,0x4D,0x40,
+0x3D,0x10,0xDF,0x3B,0x53,0x00,0xE1,0x67,0xFE,0x94,0x60,0x15,0x3E,0x85,0x88,0xF1,
+0x89,0x0D,0x90,0xC8,0x28,0x23,0x99,0x05,0xE8,0x2B,0x20,0x9D,0xC6,0xF3,0x60,0x46,
+0xD8,0xC1,0xB2,0xD5,0x8C,0x31,0xD9,0xDC,0x20,0x79,0x24,0x81,0xBF,0x35,0x32,0xFC,
+0x63,0x69,0xDB,0xB1,0x2A,0x6B,0xEE,0x21,0x58,0xF2,0x08,0xE9,0x78,0xCB,0x6F,0xCB,
+0xFC,0x16,0x52,0xC8,0x91,0xC4,0xFF,0x3D,0x73,0xDE,0xB1,0x3E,0xA7,0xC2,0x7D,0x66,
+0xC1,0xF5,0x7E,0x52,0x24,0x1A,0xE2,0xD5,0x67,0x91,0xD0,0x82,0x10,0xD7,0x78,0x4B,
+0x4F,0x2B,0x42,0x39,0xBD,0x64,0x2D,0x40,0xA0,0xB0,0x10,0xD3,0x38,0x48,0x46,0x88,
+0xA1,0x0C,0xBB,0x3A,0x33,0x2A,0x62,0x98,0xFB,0x00,0x9D,0x13,0x59,0x7F,0x6F,0x3B,
+0x72,0xAA,0xEE,0xA6,0x0F,0x86,0xF9,0x05,0x61,0xEA,0x67,0x7F,0x0C,0x37,0x96,0x8B,
+0xE6,0x69,0x16,0x47,0x11,0xC2,0x27,0x59,0x03,0xB3,0xA6,0x60,0xC2,0x21,0x40,0x56,
+0xFA,0xA0,0xC7,0x7D,0x3A,0x13,0xE3,0xEC,0x57,0xC7,0xB3,0xD6,0xAE,0x9D,0x89,0x80,
+0xF7,0x01,0xE7,0x2C,0xF6,0x96,0x2B,0x13,0x0D,0x79,0x2C,0xD9,0xC0,0xE4,0x86,0x7B,
+0x4B,0x8C,0x0C,0x72,0x82,0x8A,0xFB,0x17,0xCD,0x00,0x6C,0x3A,0x13,0x3C,0xB0,0x84,
+0x87,0x4B,0x16,0x7A,0x29,0xB2,0x4F,0xDB,0x1D,0xD4,0x0B,0xF3,0x66,0x37,0xBD,0xD8,
+0xF6,0x57,0xBB,0x5E,0x24,0x7A,0xB8,0x3C,0x8B,0xB9,0xFA,0x92,0x1A,0x1A,0x84,0x9E,
+0xD8,0x74,0x8F,0xAA,0x1B,0x7F,0x5E,0xF4,0xFE,0x45,0x22,0x21,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,0x10,
+0x15,0x58,0x20,0x05,0x09,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,
+0x80,0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,
+0x10,0x15,0x58,0x20,0x05,0x09,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0xB5,0x2B,0xAB,0x5D,
+0x10,0xFC,0x7B,0xB2,0xB2,0x5E,0xAC,0x9B,0x0E,0x7E,0x53,0x78,0x59,0x3E,0x42,0x04,
+0xFE,0x75,0xA3,0xAD,0xAC,0x81,0x4E,0xD7,0x02,0x8B,0x5E,0xC4,0x2D,0xC8,0x52,0x76,
+0xC7,0x2C,0x1F,0xFC,0x81,0x32,0x98,0xD1,0x4B,0xC6,0x92,0x93,0x33,0x35,0x31,0x2F,
+0xFC,0xD8,0x1D,0x44,0xDD,0xE0,0x81,0x7F,0x9D,0xE9,0x8B,0xE1,0x64,0x91,0x62,0x0B,
+0x39,0x08,0x8C,0xAC,0x74,0x9D,0x59,0xD9,0x7A,0x59,0x52,0x97,0x11,0xB9,0x16,0x7B,
+0x6F,0x45,0xD3,0x96,0xD9,0x31,0x7D,0x02,0x36,0x0F,0x9C,0x3B,0x6E,0xCF,0x2C,0x0D,
+0x03,0x46,0x45,0xEB,0xA0,0xF4,0x7F,0x48,0x44,0xC6,0x08,0x40,0xCC,0xDE,0x1B,0x70,
+0xB5,0x29,0xAD,0xBA,0x8B,0x3B,0x34,0x65,0x75,0x1B,0x71,0x21,0x1D,0x2C,0x14,0x0A,
+0xB0,0x96,0x95,0xB8,0xD6,0xEA,0xF2,0x65,0xFB,0x29,0xBA,0x4F,0xEA,0x91,0x93,0x74,
+0x69,0xB6,0xF2,0xFF,0xE1,0x1A,0xD0,0x0C,0xD1,0x76,0x85,0xCB,0x8A,0x25,0xBD,0x97,
+0x5E,0x2C,0x6F,0x15,0x99,0x26,0xE7,0xB6,0x29,0xFF,0x22,0xEC,0xC9,0x02,0xC7,0x56,
+0x00,0xCD,0x49,0xB9,0xB3,0x6C,0x7B,0x53,0x04,0x1A,0xE2,0xA8,0xC9,0xAA,0x12,0x05,
+0x23,0xC2,0xCE,0xE7,0xBB,0x04,0x02,0xCC,0xC0,0x47,0xA2,0xE4,0xC4,0x29,0x2F,0x5B,
+0x45,0x57,0x89,0x51,0xEE,0x3C,0xEB,0x52,0x08,0xFF,0x07,0x35,0x1E,0x9F,0x35,0x6A,
+0x47,0x4A,0x56,0x98,0xD1,0x5A,0x85,0x1F,0x8C,0xF5,0x22,0xBF,0xAB,0xCE,0x83,0xF3,
+0xE2,0x22,0x29,0xAE,0x7D,0x83,0x40,0xA8,0xBA,0x6C,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */
+
+
+const unsigned char GeoTrust_Primary_Certification_Authority_certificate[896]={
+0x30,0x82,0x03,0x7C,0x30,0x82,0x02,0x64,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
+0xAC,0xB5,0x6A,0xFD,0x69,0xB6,0x15,0x3A,0x63,0x6C,0xAF,0xDA,0xFA,0xC4,0xA1,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x58,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,
+0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,
+0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,
+0x32,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,
+0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xBE,0xB8,0x15,0x7B,0xFF,0xD4,0x7C,0x7D,0x67,0xAD,0x83,0x64,0x7B,
+0xC8,0x42,0x53,0x2D,0xDF,0xF6,0x84,0x08,0x20,0x61,0xD6,0x01,0x59,0x6A,0x9C,0x44,
+0x11,0xAF,0xEF,0x76,0xFD,0x95,0x7E,0xCE,0x61,0x30,0xBB,0x7A,0x83,0x5F,0x02,0xBD,
+0x01,0x66,0xCA,0xEE,0x15,0x8D,0x6F,0xA1,0x30,0x9C,0xBD,0xA1,0x85,0x9E,0x94,0x3A,
+0xF3,0x56,0x88,0x00,0x31,0xCF,0xD8,0xEE,0x6A,0x96,0x02,0xD9,0xED,0x03,0x8C,0xFB,
+0x75,0x6D,0xE7,0xEA,0xB8,0x55,0x16,0x05,0x16,0x9A,0xF4,0xE0,0x5E,0xB1,0x88,0xC0,
+0x64,0x85,0x5C,0x15,0x4D,0x88,0xC7,0xB7,0xBA,0xE0,0x75,0xE9,0xAD,0x05,0x3D,0x9D,
+0xC7,0x89,0x48,0xE0,0xBB,0x28,0xC8,0x03,0xE1,0x30,0x93,0x64,0x5E,0x52,0xC0,0x59,
+0x70,0x22,0x35,0x57,0x88,0x8A,0xF1,0x95,0x0A,0x83,0xD7,0xBC,0x31,0x73,0x01,0x34,
+0xED,0xEF,0x46,0x71,0xE0,0x6B,0x02,0xA8,0x35,0x72,0x6B,0x97,0x9B,0x66,0xE0,0xCB,
+0x1C,0x79,0x5F,0xD8,0x1A,0x04,0x68,0x1E,0x47,0x02,0xE6,0x9D,0x60,0xE2,0x36,0x97,
+0x01,0xDF,0xCE,0x35,0x92,0xDF,0xBE,0x67,0xC7,0x6D,0x77,0x59,0x3B,0x8F,0x9D,0xD6,
+0x90,0x15,0x94,0xBC,0x42,0x34,0x10,0xC1,0x39,0xF9,0xB1,0x27,0x3E,0x7E,0xD6,0x8A,
+0x75,0xC5,0xB2,0xAF,0x96,0xD3,0xA2,0xDE,0x9B,0xE4,0x98,0xBE,0x7D,0xE1,0xE9,0x81,
+0xAD,0xB6,0x6F,0xFC,0xD7,0x0E,0xDA,0xE0,0x34,0xB0,0x0D,0x1A,0x77,0xE7,0xE3,0x08,
+0x98,0xEF,0x58,0xFA,0x9C,0x84,0xB7,0x36,0xAF,0xC2,0xDF,0xAC,0xD2,0xF4,0x10,0x06,
+0x70,0x71,0x35,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,
+0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,
+0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x2C,0xD5,0x50,0x41,0x97,0x15,0x8B,0xF0,
+0x8F,0x36,0x61,0x5B,0x4A,0xFB,0x6B,0xD9,0x99,0xC9,0x33,0x92,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,
+0x5A,0x70,0x7F,0x2C,0xDD,0xB7,0x34,0x4F,0xF5,0x86,0x51,0xA9,0x26,0xBE,0x4B,0xB8,
+0xAA,0xF1,0x71,0x0D,0xDC,0x61,0xC7,0xA0,0xEA,0x34,0x1E,0x7A,0x77,0x0F,0x04,0x35,
+0xE8,0x27,0x8F,0x6C,0x90,0xBF,0x91,0x16,0x24,0x46,0x3E,0x4A,0x4E,0xCE,0x2B,0x16,
+0xD5,0x0B,0x52,0x1D,0xFC,0x1F,0x67,0xA2,0x02,0x45,0x31,0x4F,0xCE,0xF3,0xFA,0x03,
+0xA7,0x79,0x9D,0x53,0x6A,0xD9,0xDA,0x63,0x3A,0xF8,0x80,0xD7,0xD3,0x99,0xE1,0xA5,
+0xE1,0xBE,0xD4,0x55,0x71,0x98,0x35,0x3A,0xBE,0x93,0xEA,0xAE,0xAD,0x42,0xB2,0x90,
+0x6F,0xE0,0xFC,0x21,0x4D,0x35,0x63,0x33,0x89,0x49,0xD6,0x9B,0x4E,0xCA,0xC7,0xE7,
+0x4E,0x09,0x00,0xF7,0xDA,0xC7,0xEF,0x99,0x62,0x99,0x77,0xB6,0x95,0x22,0x5E,0x8A,
+0xA0,0xAB,0xF4,0xB8,0x78,0x98,0xCA,0x38,0x19,0x99,0xC9,0x72,0x9E,0x78,0xCD,0x4B,
+0xAC,0xAF,0x19,0xA0,0x73,0x12,0x2D,0xFC,0xC2,0x41,0xBA,0x81,0x91,0xDA,0x16,0x5A,
+0x31,0xB7,0xF9,0xB4,0x71,0x80,0x12,0x48,0x99,0x72,0x73,0x5A,0x59,0x53,0xC1,0x63,
+0x52,0x33,0xED,0xA7,0xC9,0xD2,0x39,0x02,0x70,0xFA,0xE0,0xB1,0x42,0x66,0x29,0xAA,
+0x9B,0x51,0xED,0x30,0x54,0x22,0x14,0x5F,0xD9,0xAB,0x1D,0xC1,0xE4,0x94,0xF0,0xF8,
+0xF5,0x2B,0xF7,0xEA,0xCA,0x78,0x46,0xD6,0xB8,0x91,0xFD,0xA6,0x0D,0x2B,0x1A,0x14,
+0x01,0x3E,0x80,0xF0,0x42,0xA0,0x95,0x07,0x5E,0x6D,0xCD,0xCC,0x4B,0xA4,0x45,0x8D,
+0xAB,0x12,0xE8,0xB3,0xDE,0x5A,0xE5,0xA0,0x7C,0xE8,0x0F,0x22,0x1D,0x5A,0xE9,0x59,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
+/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */
+
+
+const unsigned char GeoTrust_Primary_Certification_Authority___G2_certificate[690]={
+0x30,0x82,0x02,0xAE,0x30,0x82,0x02,0x35,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x3C,
+0xB2,0xF4,0x48,0x0A,0x00,0xE2,0xFE,0xEB,0x24,0x3B,0x5E,0x60,0x3E,0xC3,0x6B,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x98,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,
+0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,
+0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,
+0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,
+0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,
+0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,
+0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,
+0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,
+0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,
+0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
+0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
+0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,
+0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x15,0xB1,0xE8,0xFD,0x03,0x15,0x43,
+0xE5,0xAC,0xEB,0x87,0x37,0x11,0x62,0xEF,0xD2,0x83,0x36,0x52,0x7D,0x45,0x57,0x0B,
+0x4A,0x8D,0x7B,0x54,0x3B,0x3A,0x6E,0x5F,0x15,0x02,0xC0,0x50,0xA6,0xCF,0x25,0x2F,
+0x7D,0xCA,0x48,0xB8,0xC7,0x50,0x63,0x1C,0x2A,0x21,0x08,0x7C,0x9A,0x36,0xD8,0x0B,
+0xFE,0xD1,0x26,0xC5,0x58,0x31,0x30,0x28,0x25,0xF3,0x5D,0x5D,0xA3,0xB8,0xB6,0xA5,
+0xB4,0x92,0xED,0x6C,0x2C,0x9F,0xEB,0xDD,0x43,0x89,0xA2,0x3C,0x4B,0x48,0x91,0x1D,
+0x50,0xEC,0x26,0xDF,0xD6,0x60,0x2E,0xBD,0x21,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x15,0x5F,0x35,0x57,0x51,0x55,0xFB,
+0x25,0xB2,0xAD,0x03,0x69,0xFC,0x01,0xA3,0xFA,0xBE,0x11,0x55,0xD5,0x30,0x0A,0x06,
+0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,
+0x64,0x96,0x59,0xA6,0xE8,0x09,0xDE,0x8B,0xBA,0xFA,0x5A,0x88,0x88,0xF0,0x1F,0x91,
+0xD3,0x46,0xA8,0xF2,0x4A,0x4C,0x02,0x63,0xFB,0x6C,0x5F,0x38,0xDB,0x2E,0x41,0x93,
+0xA9,0x0E,0xE6,0x9D,0xDC,0x31,0x1C,0xB2,0xA0,0xA7,0x18,0x1C,0x79,0xE1,0xC7,0x36,
+0x02,0x30,0x3A,0x56,0xAF,0x9A,0x74,0x6C,0xF6,0xFB,0x83,0xE0,0x33,0xD3,0x08,0x5F,
+0xA1,0x9C,0xC2,0x5B,0x9F,0x46,0xD6,0xB6,0xCB,0x91,0x06,0x63,0xA2,0x06,0xE7,0x33,
+0xAC,0x3E,0xA8,0x81,0x12,0xD0,0xCB,0xBA,0xD0,0x92,0x0B,0xB6,0x9E,0x96,0xAA,0x04,
+0x0F,0x8A,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
+/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */
+
+
+const unsigned char GeoTrust_Primary_Certification_Authority___G3_certificate[1026]={
+0x30,0x82,0x03,0xFE,0x30,0x82,0x02,0xE6,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x15,
+0xAC,0x6E,0x94,0x19,0xB2,0x79,0x4B,0x41,0xF6,0x27,0xA9,0xC3,0x18,0x0F,0x1F,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,
+0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,
+0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,
+0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,
+0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,
+0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,
+0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xDC,0xE2,0x5E,0x62,0x58,0x1D,0x33,0x57,0x39,0x32,0x33,
+0xFA,0xEB,0xCB,0x87,0x8C,0xA7,0xD4,0x4A,0xDD,0x06,0x88,0xEA,0x64,0x8E,0x31,0x98,
+0xA5,0x38,0x90,0x1E,0x98,0xCF,0x2E,0x63,0x2B,0xF0,0x46,0xBC,0x44,0xB2,0x89,0xA1,
+0xC0,0x28,0x0C,0x49,0x70,0x21,0x95,0x9F,0x64,0xC0,0xA6,0x93,0x12,0x02,0x65,0x26,
+0x86,0xC6,0xA5,0x89,0xF0,0xFA,0xD7,0x84,0xA0,0x70,0xAF,0x4F,0x1A,0x97,0x3F,0x06,
+0x44,0xD5,0xC9,0xEB,0x72,0x10,0x7D,0xE4,0x31,0x28,0xFB,0x1C,0x61,0xE6,0x28,0x07,
+0x44,0x73,0x92,0x22,0x69,0xA7,0x03,0x88,0x6C,0x9D,0x63,0xC8,0x52,0xDA,0x98,0x27,
+0xE7,0x08,0x4C,0x70,0x3E,0xB4,0xC9,0x12,0xC1,0xC5,0x67,0x83,0x5D,0x33,0xF3,0x03,
+0x11,0xEC,0x6A,0xD0,0x53,0xE2,0xD1,0xBA,0x36,0x60,0x94,0x80,0xBB,0x61,0x63,0x6C,
+0x5B,0x17,0x7E,0xDF,0x40,0x94,0x1E,0xAB,0x0D,0xC2,0x21,0x28,0x70,0x88,0xFF,0xD6,
+0x26,0x6C,0x6C,0x60,0x04,0x25,0x4E,0x55,0x7E,0x7D,0xEF,0xBF,0x94,0x48,0xDE,0xB7,
+0x1D,0xDD,0x70,0x8D,0x05,0x5F,0x88,0xA5,0x9B,0xF2,0xC2,0xEE,0xEA,0xD1,0x40,0x41,
+0x6D,0x62,0x38,0x1D,0x56,0x06,0xC5,0x03,0x47,0x51,0x20,0x19,0xFC,0x7B,0x10,0x0B,
+0x0E,0x62,0xAE,0x76,0x55,0xBF,0x5F,0x77,0xBE,0x3E,0x49,0x01,0x53,0x3D,0x98,0x25,
+0x03,0x76,0x24,0x5A,0x1D,0xB4,0xDB,0x89,0xEA,0x79,0xE5,0xB6,0xB3,0x3B,0x3F,0xBA,
+0x4C,0x28,0x41,0x7F,0x06,0xAC,0x6A,0x8E,0xC1,0xD0,0xF6,0x05,0x1D,0x7D,0xE6,0x42,
+0x86,0xE3,0xA5,0xD5,0x47,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,
+0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC4,0x79,0xCA,0x8E,0xA1,0x4E,
+0x03,0x1D,0x1C,0xDC,0x6B,0xDB,0x31,0x5B,0x94,0x3E,0x3F,0x30,0x7F,0x2D,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x2D,0xC5,0x13,0xCF,0x56,0x80,0x7B,0x7A,0x78,0xBD,0x9F,0xAE,0x2C,0x99,
+0xE7,0xEF,0xDA,0xDF,0x94,0x5E,0x09,0x69,0xA7,0xE7,0x6E,0x68,0x8C,0xBD,0x72,0xBE,
+0x47,0xA9,0x0E,0x97,0x12,0xB8,0x4A,0xF1,0x64,0xD3,0x39,0xDF,0x25,0x34,0xD4,0xC1,
+0xCD,0x4E,0x81,0xF0,0x0F,0x04,0xC4,0x24,0xB3,0x34,0x96,0xC6,0xA6,0xAA,0x30,0xDF,
+0x68,0x61,0x73,0xD7,0xF9,0x8E,0x85,0x89,0xEF,0x0E,0x5E,0x95,0x28,0x4A,0x2A,0x27,
+0x8F,0x10,0x8E,0x2E,0x7C,0x86,0xC4,0x02,0x9E,0xDA,0x0C,0x77,0x65,0x0E,0x44,0x0D,
+0x92,0xFD,0xFD,0xB3,0x16,0x36,0xFA,0x11,0x0D,0x1D,0x8C,0x0E,0x07,0x89,0x6A,0x29,
+0x56,0xF7,0x72,0xF4,0xDD,0x15,0x9C,0x77,0x35,0x66,0x57,0xAB,0x13,0x53,0xD8,0x8E,
+0xC1,0x40,0xC5,0xD7,0x13,0x16,0x5A,0x72,0xC7,0xB7,0x69,0x01,0xC4,0x7A,0xB1,0x83,
+0x01,0x68,0x7D,0x8D,0x41,0xA1,0x94,0x18,0xC1,0x25,0x5C,0xFC,0xF0,0xFE,0x83,0x02,
+0x87,0x7C,0x0D,0x0D,0xCF,0x2E,0x08,0x5C,0x4A,0x40,0x0D,0x3E,0xEC,0x81,0x61,0xE6,
+0x24,0xDB,0xCA,0xE0,0x0E,0x2D,0x07,0xB2,0x3E,0x56,0xDC,0x8D,0xF5,0x41,0x85,0x07,
+0x48,0x9B,0x0C,0x0B,0xCB,0x49,0x3F,0x7D,0xEC,0xB7,0xFD,0xCB,0x8D,0x67,0x89,0x1A,
+0xAB,0xED,0xBB,0x1E,0xA3,0x00,0x08,0x08,0x17,0x2A,0x82,0x5C,0x31,0x5D,0x46,0x8A,
+0x2D,0x0F,0x86,0x9B,0x74,0xD9,0x45,0xFB,0xD4,0x40,0xB1,0x7A,0xAA,0x68,0x2D,0x86,
+0xB2,0x99,0x22,0xE1,0xC1,0x2B,0xC7,0x9C,0xF8,0xF3,0x5F,0xA8,0x82,0x12,0xEB,0x19,
+0x11,0x2D,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */
+
+
+const unsigned char GeoTrust_Universal_CA_certificate[1388]={
+0x30,0x82,0x05,0x68,0x30,0x82,0x03,0x50,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x03,0x13,
+0x15,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,
+0x73,0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34,
+0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30,0x34,0x30,
+0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,
+0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1E,0x30,
+0x1C,0x06,0x03,0x55,0x04,0x03,0x13,0x15,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,
+0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x02,
+0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
+0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xA6,0x15,
+0x55,0xA0,0xA3,0xC6,0xE0,0x1F,0x8C,0x9D,0x21,0x50,0xD7,0xC1,0xBE,0x2B,0x5B,0xB5,
+0xA4,0x9E,0xA1,0xD9,0x72,0x58,0xBD,0x00,0x1B,0x4C,0xBF,0x61,0xC9,0x14,0x1D,0x45,
+0x82,0xAB,0xC6,0x1D,0x80,0xD6,0x3D,0xEB,0x10,0x9C,0x3A,0xAF,0x6D,0x24,0xF8,0xBC,
+0x71,0x01,0x9E,0x06,0xF5,0x7C,0x5F,0x1E,0xC1,0x0E,0x55,0xCA,0x83,0x9A,0x59,0x30,
+0xAE,0x19,0xCB,0x30,0x48,0x95,0xED,0x22,0x37,0x8D,0xF4,0x4A,0x9A,0x72,0x66,0x3E,
+0xAD,0x95,0xC0,0xE0,0x16,0x00,0xE0,0x10,0x1F,0x2B,0x31,0x0E,0xD7,0x94,0x54,0xD3,
+0x42,0x33,0xA0,0x34,0x1D,0x1E,0x45,0x76,0xDD,0x4F,0xCA,0x18,0x37,0xEC,0x85,0x15,
+0x7A,0x19,0x08,0xFC,0xD5,0xC7,0x9C,0xF0,0xF2,0xA9,0x2E,0x10,0xA9,0x92,0xE6,0x3D,
+0x58,0x3D,0xA9,0x16,0x68,0x3C,0x2F,0x75,0x21,0x18,0x7F,0x28,0x77,0xA5,0xE1,0x61,
+0x17,0xB7,0xA6,0xE9,0xF8,0x1E,0x99,0xDB,0x73,0x6E,0xF4,0x0A,0xA2,0x21,0x6C,0xEE,
+0xDA,0xAA,0x85,0x92,0x66,0xAF,0xF6,0x7A,0x6B,0x82,0xDA,0xBA,0x22,0x08,0x35,0x0F,
+0xCF,0x42,0xF1,0x35,0xFA,0x6A,0xEE,0x7E,0x2B,0x25,0xCC,0x3A,0x11,0xE4,0x6D,0xAF,
+0x73,0xB2,0x76,0x1D,0xAD,0xD0,0xB2,0x78,0x67,0x1A,0xA4,0x39,0x1C,0x51,0x0B,0x67,
+0x56,0x83,0xFD,0x38,0x5D,0x0D,0xCE,0xDD,0xF0,0xBB,0x2B,0x96,0x1F,0xDE,0x7B,0x32,
+0x52,0xFD,0x1D,0xBB,0xB5,0x06,0xA1,0xB2,0x21,0x5E,0xA5,0xD6,0x95,0x68,0x7F,0xF0,
+0x99,0x9E,0xDC,0x45,0x08,0x3E,0xE7,0xD2,0x09,0x0D,0x35,0x94,0xDD,0x80,0x4E,0x53,
+0x97,0xD7,0xB5,0x09,0x44,0x20,0x64,0x16,0x17,0x03,0x02,0x4C,0x53,0x0D,0x68,0xDE,
+0xD5,0xAA,0x72,0x4D,0x93,0x6D,0x82,0x0E,0xDB,0x9C,0xBD,0xCF,0xB4,0xF3,0x5C,0x5D,
+0x54,0x7A,0x69,0x09,0x96,0xD6,0xDB,0x11,0xC1,0x8D,0x75,0xA8,0xB4,0xCF,0x39,0xC8,
+0xCE,0x3C,0xBC,0x24,0x7C,0xE6,0x62,0xCA,0xE1,0xBD,0x7D,0xA7,0xBD,0x57,0x65,0x0B,
+0xE4,0xFE,0x25,0xED,0xB6,0x69,0x10,0xDC,0x28,0x1A,0x46,0xBD,0x01,0x1D,0xD0,0x97,
+0xB5,0xE1,0x98,0x3B,0xC0,0x37,0x64,0xD6,0x3D,0x94,0xEE,0x0B,0xE1,0xF5,0x28,0xAE,
+0x0B,0x56,0xBF,0x71,0x8B,0x23,0x29,0x41,0x8E,0x86,0xC5,0x4B,0x52,0x7B,0xD8,0x71,
+0xAB,0x1F,0x8A,0x15,0xA6,0x3B,0x83,0x5A,0xD7,0x58,0x01,0x51,0xC6,0x4C,0x41,0xD9,
+0x7F,0xD8,0x41,0x67,0x72,0xA2,0x28,0xDF,0x60,0x83,0xA9,0x9E,0xC8,0x7B,0xFC,0x53,
+0x73,0x72,0x59,0xF5,0x93,0x7A,0x17,0x76,0x0E,0xCE,0xF7,0xE5,0x5C,0xD9,0x0B,0x55,
+0x34,0xA2,0xAA,0x5B,0xB5,0x6A,0x54,0xE7,0x13,0xCA,0x57,0xEC,0x97,0x6D,0xF4,0x5E,
+0x06,0x2F,0x45,0x8B,0x58,0xD4,0x23,0x16,0x92,0xE4,0x16,0x6E,0x28,0x63,0x59,0x30,
+0xDF,0x50,0x01,0x9C,0x63,0x89,0x1A,0x9F,0xDB,0x17,0x94,0x82,0x70,0x37,0xC3,0x24,
+0x9E,0x9A,0x47,0xD6,0x5A,0xCA,0x4E,0xA8,0x69,0x89,0x72,0x1F,0x91,0x6C,0xDB,0x7E,
+0x9E,0x1B,0xAD,0xC7,0x1F,0x73,0xDD,0x2C,0x4F,0x19,0x65,0xFD,0x7F,0x93,0x40,0x10,
+0x2E,0xD2,0xF0,0xED,0x3C,0x9E,0x2E,0x28,0x3E,0x69,0x26,0x33,0xC5,0x7B,0x02,0x03,
+0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,
+0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,
+0x16,0x04,0x14,0xDA,0xBB,0x2E,0xAA,0xB0,0x0C,0xB8,0x88,0x26,0x51,0x74,0x5C,0x6D,
+0x03,0xD3,0xC0,0xD8,0x8F,0x7A,0xD6,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,
+0x30,0x16,0x80,0x14,0xDA,0xBB,0x2E,0xAA,0xB0,0x0C,0xB8,0x88,0x26,0x51,0x74,0x5C,
+0x6D,0x03,0xD3,0xC0,0xD8,0x8F,0x7A,0xD6,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,
+0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x31,0x78,0xE6,0xC7,
+0xB5,0xDF,0xB8,0x94,0x40,0xC9,0x71,0xC4,0xA8,0x35,0xEC,0x46,0x1D,0xC2,0x85,0xF3,
+0x28,0x58,0x86,0xB0,0x0B,0xFC,0x8E,0xB2,0x39,0x8F,0x44,0x55,0xAB,0x64,0x84,0x5C,
+0x69,0xA9,0xD0,0x9A,0x38,0x3C,0xFA,0xE5,0x1F,0x35,0xE5,0x44,0xE3,0x80,0x79,0x94,
+0x68,0xA4,0xBB,0xC4,0x9F,0x3D,0xE1,0x34,0xCD,0x30,0x46,0x8B,0x54,0x2B,0x95,0xA5,
+0xEF,0xF7,0x3F,0x99,0x84,0xFD,0x35,0xE6,0xCF,0x31,0xC6,0xDC,0x6A,0xBF,0xA7,0xD7,
+0x23,0x08,0xE1,0x98,0x5E,0xC3,0x5A,0x08,0x76,0xA9,0xA6,0xAF,0x77,0x2F,0xB7,0x60,
+0xBD,0x44,0x46,0x6A,0xEF,0x97,0xFF,0x73,0x95,0xC1,0x8E,0xE8,0x93,0xFB,0xFD,0x31,
+0xB7,0xEC,0x57,0x11,0x11,0x45,0x9B,0x30,0xF1,0x1A,0x88,0x39,0xC1,0x4F,0x3C,0xA7,
+0x00,0xD5,0xC7,0xFC,0xAB,0x6D,0x80,0x22,0x70,0xA5,0x0C,0xE0,0x5D,0x04,0x29,0x02,
+0xFB,0xCB,0xA0,0x91,0xD1,0x7C,0xD6,0xC3,0x7E,0x50,0xD5,0x9D,0x58,0xBE,0x41,0x38,
+0xEB,0xB9,0x75,0x3C,0x15,0xD9,0x9B,0xC9,0x4A,0x83,0x59,0xC0,0xDA,0x53,0xFD,0x33,
+0xBB,0x36,0x18,0x9B,0x85,0x0F,0x15,0xDD,0xEE,0x2D,0xAC,0x76,0x93,0xB9,0xD9,0x01,
+0x8D,0x48,0x10,0xA8,0xFB,0xF5,0x38,0x86,0xF1,0xDB,0x0A,0xC6,0xBD,0x84,0xA3,0x23,
+0x41,0xDE,0xD6,0x77,0x6F,0x85,0xD4,0x85,0x1C,0x50,0xE0,0xAE,0x51,0x8A,0xBA,0x8D,
+0x3E,0x76,0xE2,0xB9,0xCA,0x27,0xF2,0x5F,0x9F,0xEF,0x6E,0x59,0x0D,0x06,0xD8,0x2B,
+0x17,0xA4,0xD2,0x7C,0x6B,0xBB,0x5F,0x14,0x1A,0x48,0x8F,0x1A,0x4C,0xE7,0xB3,0x47,
+0x1C,0x8E,0x4C,0x45,0x2B,0x20,0xEE,0x48,0xDF,0xE7,0xDD,0x09,0x8E,0x18,0xA8,0xDA,
+0x40,0x8D,0x92,0x26,0x11,0x53,0x61,0x73,0x5D,0xEB,0xBD,0xE7,0xC4,0x4D,0x29,0x37,
+0x61,0xEB,0xAC,0x39,0x2D,0x67,0x2E,0x16,0xD6,0xF5,0x00,0x83,0x85,0xA1,0xCC,0x7F,
+0x76,0xC4,0x7D,0xE4,0xB7,0x4B,0x66,0xEF,0x03,0x45,0x60,0x69,0xB6,0x0C,0x52,0x96,
+0x92,0x84,0x5E,0xA6,0xA3,0xB5,0xA4,0x3E,0x2B,0xD9,0xCC,0xD8,0x1B,0x47,0xAA,0xF2,
+0x44,0xDA,0x4F,0xF9,0x03,0xE8,0xF0,0x14,0xCB,0x3F,0xF3,0x83,0xDE,0xD0,0xC1,0x54,
+0xE3,0xB7,0xE8,0x0A,0x37,0x4D,0x8B,0x20,0x59,0x03,0x30,0x19,0xA1,0x2C,0xC8,0xBD,
+0x11,0x1F,0xDF,0xAE,0xC9,0x4A,0xC5,0xF3,0x27,0x66,0x66,0x86,0xAC,0x68,0x91,0xFF,
+0xD9,0xE6,0x53,0x1C,0x0F,0x8B,0x5C,0x69,0x65,0x0A,0x26,0xC8,0x1E,0x34,0xC3,0x5D,
+0x51,0x7B,0xD7,0xA9,0x9C,0x06,0xA1,0x36,0xDD,0xD5,0x89,0x94,0xBC,0xD9,0xE4,0x2D,
+0x0C,0x5E,0x09,0x6C,0x08,0x97,0x7C,0xA3,0x3D,0x7C,0x93,0xFF,0x3F,0xA1,0x14,0xA7,
+0xCF,0xB5,0x5D,0xEB,0xDB,0xDB,0x1C,0xC4,0x76,0xDF,0x88,0xB9,0xBD,0x45,0x05,0x95,
+0x1B,0xAE,0xFC,0x46,0x6A,0x4C,0xAF,0x48,0xE3,0xCE,0xAE,0x0F,0xD2,0x7E,0xEB,0xE6,
+0x6C,0x9C,0x4F,0x81,0x6A,0x7A,0x64,0xAC,0xBB,0x3E,0xD5,0xE7,0xCB,0x76,0x2E,0xC5,
+0xA7,0x48,0xC1,0x5C,0x90,0x0F,0xCB,0xC8,0x3F,0xFA,0xE6,0x32,0xE1,0x8D,0x1B,0x6F,
+0xA4,0xE6,0x8E,0xD8,0xF9,0x29,0x48,0x8A,0xCE,0x73,0xFE,0x2C,
+};
+
+
+/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
+/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */
+
+
+const unsigned char GeoTrust_Universal_CA_2_certificate[1392]={
+0x30,0x82,0x05,0x6C,0x30,0x82,0x03,0x54,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,
+0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,
+0x17,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,
+0x73,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,
+0x30,0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30,
+0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x47,0x65,0x6F,0x54,0x72,0x75,
+0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x20,
+0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,
+0x01,0x00,0xB3,0x54,0x52,0xC1,0xC9,0x3E,0xF2,0xD9,0xDC,0xB1,0x53,0x1A,0x59,0x29,
+0xE7,0xB1,0xC3,0x45,0x28,0xE5,0xD7,0xD1,0xED,0xC5,0xC5,0x4B,0xA1,0xAA,0x74,0x7B,
+0x57,0xAF,0x4A,0x26,0xFC,0xD8,0xF5,0x5E,0xA7,0x6E,0x19,0xDB,0x74,0x0C,0x4F,0x35,
+0x5B,0x32,0x0B,0x01,0xE3,0xDB,0xEB,0x7A,0x77,0x35,0xEA,0xAA,0x5A,0xE0,0xD6,0xE8,
+0xA1,0x57,0x94,0xF0,0x90,0xA3,0x74,0x56,0x94,0x44,0x30,0x03,0x1E,0x5C,0x4E,0x2B,
+0x85,0x26,0x74,0x82,0x7A,0x0C,0x76,0xA0,0x6F,0x4D,0xCE,0x41,0x2D,0xA0,0x15,0x06,
+0x14,0x5F,0xB7,0x42,0xCD,0x7B,0x8F,0x58,0x61,0x34,0xDC,0x2A,0x08,0xF9,0x2E,0xC3,
+0x01,0xA6,0x22,0x44,0x1C,0x4C,0x07,0x82,0xE6,0x5B,0xCE,0xD0,0x4A,0x7C,0x04,0xD3,
+0x19,0x73,0x27,0xF0,0xAA,0x98,0x7F,0x2E,0xAF,0x4E,0xEB,0x87,0x1E,0x24,0x77,0x6A,
+0x5D,0xB6,0xE8,0x5B,0x45,0xBA,0xDC,0xC3,0xA1,0x05,0x6F,0x56,0x8E,0x8F,0x10,0x26,
+0xA5,0x49,0xC3,0x2E,0xD7,0x41,0x87,0x22,0xE0,0x4F,0x86,0xCA,0x60,0xB5,0xEA,0xA1,
+0x63,0xC0,0x01,0x97,0x10,0x79,0xBD,0x00,0x3C,0x12,0x6D,0x2B,0x15,0xB1,0xAC,0x4B,
+0xB1,0xEE,0x18,0xB9,0x4E,0x96,0xDC,0xDC,0x76,0xFF,0x3B,0xBE,0xCF,0x5F,0x03,0xC0,
+0xFC,0x3B,0xE8,0xBE,0x46,0x1B,0xFF,0xDA,0x40,0xC2,0x52,0xF7,0xFE,0xE3,0x3A,0xF7,
+0x6A,0x77,0x35,0xD0,0xDA,0x8D,0xEB,0x5E,0x18,0x6A,0x31,0xC7,0x1E,0xBA,0x3C,0x1B,
+0x28,0xD6,0x6B,0x54,0xC6,0xAA,0x5B,0xD7,0xA2,0x2C,0x1B,0x19,0xCC,0xA2,0x02,0xF6,
+0x9B,0x59,0xBD,0x37,0x6B,0x86,0xB5,0x6D,0x82,0xBA,0xD8,0xEA,0xC9,0x56,0xBC,0xA9,
+0x36,0x58,0xFD,0x3E,0x19,0xF3,0xED,0x0C,0x26,0xA9,0x93,0x38,0xF8,0x4F,0xC1,0x5D,
+0x22,0x06,0xD0,0x97,0xEA,0xE1,0xAD,0xC6,0x55,0xE0,0x81,0x2B,0x28,0x83,0x3A,0xFA,
+0xF4,0x7B,0x21,0x51,0x00,0xBE,0x52,0x38,0xCE,0xCD,0x66,0x79,0xA8,0xF4,0x81,0x56,
+0xE2,0xD0,0x83,0x09,0x47,0x51,0x5B,0x50,0x6A,0xCF,0xDB,0x48,0x1A,0x5D,0x3E,0xF7,
+0xCB,0xF6,0x65,0xF7,0x6C,0xF1,0x95,0xF8,0x02,0x3B,0x32,0x56,0x82,0x39,0x7A,0x5B,
+0xBD,0x2F,0x89,0x1B,0xBF,0xA1,0xB4,0xE8,0xFF,0x7F,0x8D,0x8C,0xDF,0x03,0xF1,0x60,
+0x4E,0x58,0x11,0x4C,0xEB,0xA3,0x3F,0x10,0x2B,0x83,0x9A,0x01,0x73,0xD9,0x94,0x6D,
+0x84,0x00,0x27,0x66,0xAC,0xF0,0x70,0x40,0x09,0x42,0x92,0xAD,0x4F,0x93,0x0D,0x61,
+0x09,0x51,0x24,0xD8,0x92,0xD5,0x0B,0x94,0x61,0xB2,0x87,0xB2,0xED,0xFF,0x9A,0x35,
+0xFF,0x85,0x54,0xCA,0xED,0x44,0x43,0xAC,0x1B,0x3C,0x16,0x6B,0x48,0x4A,0x0A,0x1C,
+0x40,0x88,0x1F,0x92,0xC2,0x0B,0x00,0x05,0xFF,0xF2,0xC8,0x02,0x4A,0xA4,0xAA,0xA9,
+0xCC,0x99,0x96,0x9C,0x2F,0x58,0xE0,0x7D,0xE1,0xBE,0xBB,0x07,0xDC,0x5F,0x04,0x72,
+0x5C,0x31,0x34,0xC3,0xEC,0x5F,0x2D,0xE0,0x3D,0x64,0x90,0x22,0xE6,0xD1,0xEC,0xB8,
+0x2E,0xDD,0x59,0xAE,0xD9,0xA1,0x37,0xBF,0x54,0x35,0xDC,0x73,0x32,0x4F,0x8C,0x04,
+0x1E,0x33,0xB2,0xC9,0x46,0xF1,0xD8,0x5C,0xC8,0x55,0x50,0xC9,0x68,0xBD,0xA8,0xBA,
+0x36,0x09,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,0xF0,
+0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x1F,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,
+0xF0,0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,
+0x66,0xC1,0xC6,0x23,0xF3,0xD9,0xE0,0x2E,0x6E,0x5F,0xE8,0xCF,0xAE,0xB0,0xB0,0x25,
+0x4D,0x2B,0xF8,0x3B,0x58,0x9B,0x40,0x24,0x37,0x5A,0xCB,0xAB,0x16,0x49,0xFF,0xB3,
+0x75,0x79,0x33,0xA1,0x2F,0x6D,0x70,0x17,0x34,0x91,0xFE,0x67,0x7E,0x8F,0xEC,0x9B,
+0xE5,0x5E,0x82,0xA9,0x55,0x1F,0x2F,0xDC,0xD4,0x51,0x07,0x12,0xFE,0xAC,0x16,0x3E,
+0x2C,0x35,0xC6,0x63,0xFC,0xDC,0x10,0xEB,0x0D,0xA3,0xAA,0xD0,0x7C,0xCC,0xD1,0xD0,
+0x2F,0x51,0x2E,0xC4,0x14,0x5A,0xDE,0xE8,0x19,0xE1,0x3E,0xC6,0xCC,0xA4,0x29,0xE7,
+0x2E,0x84,0xAA,0x06,0x30,0x78,0x76,0x54,0x73,0x28,0x98,0x59,0x38,0xE0,0x00,0x0D,
+0x62,0xD3,0x42,0x7D,0x21,0x9F,0xAE,0x3D,0x3A,0x8C,0xD5,0xFA,0x77,0x0D,0x18,0x2B,
+0x16,0x0E,0x5F,0x36,0xE1,0xFC,0x2A,0xB5,0x30,0x24,0xCF,0xE0,0x63,0x0C,0x7B,0x58,
+0x1A,0xFE,0x99,0xBA,0x42,0x12,0xB1,0x91,0xF4,0x7C,0x68,0xE2,0xC8,0xE8,0xAF,0x2C,
+0xEA,0xC9,0x7E,0xAE,0xBB,0x2A,0x3D,0x0D,0x15,0xDC,0x34,0x95,0xB6,0x18,0x74,0xA8,
+0x6A,0x0F,0xC7,0xB4,0xF4,0x13,0xC4,0xE4,0x5B,0xED,0x0A,0xD2,0xA4,0x97,0x4C,0x2A,
+0xED,0x2F,0x6C,0x12,0x89,0x3D,0xF1,0x27,0x70,0xAA,0x6A,0x03,0x52,0x21,0x9F,0x40,
+0xA8,0x67,0x50,0xF2,0xF3,0x5A,0x1F,0xDF,0xDF,0x23,0xF6,0xDC,0x78,0x4E,0xE6,0x98,
+0x4F,0x55,0x3A,0x53,0xE3,0xEF,0xF2,0xF4,0x9F,0xC7,0x7C,0xD8,0x58,0xAF,0x29,0x22,
+0x97,0xB8,0xE0,0xBD,0x91,0x2E,0xB0,0x76,0xEC,0x57,0x11,0xCF,0xEF,0x29,0x44,0xF3,
+0xE9,0x85,0x7A,0x60,0x63,0xE4,0x5D,0x33,0x89,0x17,0xD9,0x31,0xAA,0xDA,0xD6,0xF3,
+0x18,0x35,0x72,0xCF,0x87,0x2B,0x2F,0x63,0x23,0x84,0x5D,0x84,0x8C,0x3F,0x57,0xA0,
+0x88,0xFC,0x99,0x91,0x28,0x26,0x69,0x99,0xD4,0x8F,0x97,0x44,0xBE,0x8E,0xD5,0x48,
+0xB1,0xA4,0x28,0x29,0xF1,0x15,0xB4,0xE1,0xE5,0x9E,0xDD,0xF8,0x8F,0xA6,0x6F,0x26,
+0xD7,0x09,0x3C,0x3A,0x1C,0x11,0x0E,0xA6,0x6C,0x37,0xF7,0xAD,0x44,0x87,0x2C,0x28,
+0xC7,0xD8,0x74,0x82,0xB3,0xD0,0x6F,0x4A,0x57,0xBB,0x35,0x29,0x27,0xA0,0x8B,0xE8,
+0x21,0xA7,0x87,0x64,0x36,0x5D,0xCC,0xD8,0x16,0xAC,0xC7,0xB2,0x27,0x40,0x92,0x55,
+0x38,0x28,0x8D,0x51,0x6E,0xDD,0x14,0x67,0x53,0x6C,0x71,0x5C,0x26,0x84,0x4D,0x75,
+0x5A,0xB6,0x7E,0x60,0x56,0xA9,0x4D,0xAD,0xFB,0x9B,0x1E,0x97,0xF3,0x0D,0xD9,0xD2,
+0x97,0x54,0x77,0xDA,0x3D,0x12,0xB7,0xE0,0x1E,0xEF,0x08,0x06,0xAC,0xF9,0x85,0x87,
+0xE9,0xA2,0xDC,0xAF,0x7E,0x18,0x12,0x83,0xFD,0x56,0x17,0x41,0x2E,0xD5,0x29,0x82,
+0x7D,0x99,0xF4,0x31,0xF6,0x71,0xA9,0xCF,0x2C,0x01,0x27,0xA5,0x05,0xB9,0xAA,0xB2,
+0x48,0x4E,0x2A,0xEF,0x9F,0x93,0x52,0x51,0x95,0x3C,0x52,0x73,0x8E,0x56,0x4C,0x17,
+0x40,0xC0,0x09,0x28,0xE4,0x8B,0x6A,0x48,0x53,0xDB,0xEC,0xCD,0x55,0x55,0xF1,0xC6,
+0xF8,0xE9,0xA2,0x2C,0x4C,0xA6,0xD1,0x26,0x5F,0x7E,0xAF,0x5A,0x4C,0xDA,0x1F,0xA6,
+0xF2,0x1C,0x2C,0x7E,0xAE,0x02,0x16,0xD2,0x56,0xD0,0x2F,0x57,0x53,0x47,0xE8,0x92,
+};
+
+
+/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
+/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */
+
+
+const unsigned char GlobalSign_Root_CA_certificate[889]={
+0x30,0x82,0x03,0x75,0x30,0x82,0x02,0x5D,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x15,0x4B,0x5A,0xC3,0x94,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,
+0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,
+0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,
+0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x43,0x41,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x39,0x30,0x31,0x31,0x32,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,0x31,0x32,0x38,0x31,0x32,0x30,0x30,0x30,
+0x30,0x5A,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x42,
+0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,
+0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,
+0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,
+0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0x0E,0xE6,0x99,
+0x8D,0xCE,0xA3,0xE3,0x4F,0x8A,0x7E,0xFB,0xF1,0x8B,0x83,0x25,0x6B,0xEA,0x48,0x1F,
+0xF1,0x2A,0xB0,0xB9,0x95,0x11,0x04,0xBD,0xF0,0x63,0xD1,0xE2,0x67,0x66,0xCF,0x1C,
+0xDD,0xCF,0x1B,0x48,0x2B,0xEE,0x8D,0x89,0x8E,0x9A,0xAF,0x29,0x80,0x65,0xAB,0xE9,
+0xC7,0x2D,0x12,0xCB,0xAB,0x1C,0x4C,0x70,0x07,0xA1,0x3D,0x0A,0x30,0xCD,0x15,0x8D,
+0x4F,0xF8,0xDD,0xD4,0x8C,0x50,0x15,0x1C,0xEF,0x50,0xEE,0xC4,0x2E,0xF7,0xFC,0xE9,
+0x52,0xF2,0x91,0x7D,0xE0,0x6D,0xD5,0x35,0x30,0x8E,0x5E,0x43,0x73,0xF2,0x41,0xE9,
+0xD5,0x6A,0xE3,0xB2,0x89,0x3A,0x56,0x39,0x38,0x6F,0x06,0x3C,0x88,0x69,0x5B,0x2A,
+0x4D,0xC5,0xA7,0x54,0xB8,0x6C,0x89,0xCC,0x9B,0xF9,0x3C,0xCA,0xE5,0xFD,0x89,0xF5,
+0x12,0x3C,0x92,0x78,0x96,0xD6,0xDC,0x74,0x6E,0x93,0x44,0x61,0xD1,0x8D,0xC7,0x46,
+0xB2,0x75,0x0E,0x86,0xE8,0x19,0x8A,0xD5,0x6D,0x6C,0xD5,0x78,0x16,0x95,0xA2,0xE9,
+0xC8,0x0A,0x38,0xEB,0xF2,0x24,0x13,0x4F,0x73,0x54,0x93,0x13,0x85,0x3A,0x1B,0xBC,
+0x1E,0x34,0xB5,0x8B,0x05,0x8C,0xB9,0x77,0x8B,0xB1,0xDB,0x1F,0x20,0x91,0xAB,0x09,
+0x53,0x6E,0x90,0xCE,0x7B,0x37,0x74,0xB9,0x70,0x47,0x91,0x22,0x51,0x63,0x16,0x79,
+0xAE,0xB1,0xAE,0x41,0x26,0x08,0xC8,0x19,0x2B,0xD1,0x46,0xAA,0x48,0xD6,0x64,0x2A,
+0xD7,0x83,0x34,0xFF,0x2C,0x2A,0xC1,0x6C,0x19,0x43,0x4A,0x07,0x85,0xE7,0xD3,0x7C,
+0xF6,0x21,0x68,0xEF,0xEA,0xF2,0x52,0x9F,0x7F,0x93,0x90,0xCF,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
+0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x60,0x7B,0x66,0x1A,0x45,0x0D,0x97,0xCA,0x89,0x50,0x2F,0x7D,0x04,0xCD,0x34,
+0xA8,0xFF,0xFC,0xFD,0x4B,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xD6,0x73,0xE7,0x7C,0x4F,0x76,0xD0,
+0x8D,0xBF,0xEC,0xBA,0xA2,0xBE,0x34,0xC5,0x28,0x32,0xB5,0x7C,0xFC,0x6C,0x9C,0x2C,
+0x2B,0xBD,0x09,0x9E,0x53,0xBF,0x6B,0x5E,0xAA,0x11,0x48,0xB6,0xE5,0x08,0xA3,0xB3,
+0xCA,0x3D,0x61,0x4D,0xD3,0x46,0x09,0xB3,0x3E,0xC3,0xA0,0xE3,0x63,0x55,0x1B,0xF2,
+0xBA,0xEF,0xAD,0x39,0xE1,0x43,0xB9,0x38,0xA3,0xE6,0x2F,0x8A,0x26,0x3B,0xEF,0xA0,
+0x50,0x56,0xF9,0xC6,0x0A,0xFD,0x38,0xCD,0xC4,0x0B,0x70,0x51,0x94,0x97,0x98,0x04,
+0xDF,0xC3,0x5F,0x94,0xD5,0x15,0xC9,0x14,0x41,0x9C,0xC4,0x5D,0x75,0x64,0x15,0x0D,
+0xFF,0x55,0x30,0xEC,0x86,0x8F,0xFF,0x0D,0xEF,0x2C,0xB9,0x63,0x46,0xF6,0xAA,0xFC,
+0xDF,0xBC,0x69,0xFD,0x2E,0x12,0x48,0x64,0x9A,0xE0,0x95,0xF0,0xA6,0xEF,0x29,0x8F,
+0x01,0xB1,0x15,0xB5,0x0C,0x1D,0xA5,0xFE,0x69,0x2C,0x69,0x24,0x78,0x1E,0xB3,0xA7,
+0x1C,0x71,0x62,0xEE,0xCA,0xC8,0x97,0xAC,0x17,0x5D,0x8A,0xC2,0xF8,0x47,0x86,0x6E,
+0x2A,0xC4,0x56,0x31,0x95,0xD0,0x67,0x89,0x85,0x2B,0xF9,0x6C,0xA6,0x5D,0x46,0x9D,
+0x0C,0xAA,0x82,0xE4,0x99,0x51,0xDD,0x70,0xB7,0xDB,0x56,0x3D,0x61,0xE4,0x6A,0xE1,
+0x5C,0xD6,0xF6,0xFE,0x3D,0xDE,0x41,0xCC,0x07,0xAE,0x63,0x52,0xBF,0x53,0x53,0xF4,
+0x2B,0xE9,0xC7,0xFD,0xB6,0xF7,0x82,0x5F,0x85,0xD2,0x41,0x18,0xDB,0x81,0xB3,0x04,
+0x1C,0xC5,0x1F,0xA4,0x80,0x6F,0x15,0x20,0xC9,0xDE,0x0C,0x88,0x0A,0x1D,0xD6,0x66,
+0x55,0xE2,0xFC,0x48,0xC9,0x29,0x26,0x69,0xE0,
+};
+
+
+/* subject:/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_Root_CA___R2_certificate[958]={
+0x30,0x82,0x03,0xBA,0x30,0x82,0x02,0xA2,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x0F,0x86,0x26,0xE6,0x0D,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,
+0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
+0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
+0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,
+0x35,0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,
+0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
+0x0A,0x02,0x82,0x01,0x01,0x00,0xA6,0xCF,0x24,0x0E,0xBE,0x2E,0x6F,0x28,0x99,0x45,
+0x42,0xC4,0xAB,0x3E,0x21,0x54,0x9B,0x0B,0xD3,0x7F,0x84,0x70,0xFA,0x12,0xB3,0xCB,
+0xBF,0x87,0x5F,0xC6,0x7F,0x86,0xD3,0xB2,0x30,0x5C,0xD6,0xFD,0xAD,0xF1,0x7B,0xDC,
+0xE5,0xF8,0x60,0x96,0x09,0x92,0x10,0xF5,0xD0,0x53,0xDE,0xFB,0x7B,0x7E,0x73,0x88,
+0xAC,0x52,0x88,0x7B,0x4A,0xA6,0xCA,0x49,0xA6,0x5E,0xA8,0xA7,0x8C,0x5A,0x11,0xBC,
+0x7A,0x82,0xEB,0xBE,0x8C,0xE9,0xB3,0xAC,0x96,0x25,0x07,0x97,0x4A,0x99,0x2A,0x07,
+0x2F,0xB4,0x1E,0x77,0xBF,0x8A,0x0F,0xB5,0x02,0x7C,0x1B,0x96,0xB8,0xC5,0xB9,0x3A,
+0x2C,0xBC,0xD6,0x12,0xB9,0xEB,0x59,0x7D,0xE2,0xD0,0x06,0x86,0x5F,0x5E,0x49,0x6A,
+0xB5,0x39,0x5E,0x88,0x34,0xEC,0xBC,0x78,0x0C,0x08,0x98,0x84,0x6C,0xA8,0xCD,0x4B,
+0xB4,0xA0,0x7D,0x0C,0x79,0x4D,0xF0,0xB8,0x2D,0xCB,0x21,0xCA,0xD5,0x6C,0x5B,0x7D,
+0xE1,0xA0,0x29,0x84,0xA1,0xF9,0xD3,0x94,0x49,0xCB,0x24,0x62,0x91,0x20,0xBC,0xDD,
+0x0B,0xD5,0xD9,0xCC,0xF9,0xEA,0x27,0x0A,0x2B,0x73,0x91,0xC6,0x9D,0x1B,0xAC,0xC8,
+0xCB,0xE8,0xE0,0xA0,0xF4,0x2F,0x90,0x8B,0x4D,0xFB,0xB0,0x36,0x1B,0xF6,0x19,0x7A,
+0x85,0xE0,0x6D,0xF2,0x61,0x13,0x88,0x5C,0x9F,0xE0,0x93,0x0A,0x51,0x97,0x8A,0x5A,
+0xCE,0xAF,0xAB,0xD5,0xF7,0xAA,0x09,0xAA,0x60,0xBD,0xDC,0xD9,0x5F,0xDF,0x72,0xA9,
+0x60,0x13,0x5E,0x00,0x01,0xC9,0x4A,0xFA,0x3F,0xA4,0xEA,0x07,0x03,0x21,0x02,0x8E,
+0x82,0xCA,0x03,0xC2,0x9B,0x8F,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x9C,0x30,0x81,
+0x99,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
+0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9B,0xE2,0x07,
+0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,
+0x2E,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,0x2D,0x30,0x2B,0xA0,0x29,
+0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x67,
+0x6C,0x6F,0x62,0x61,0x6C,0x73,0x69,0x67,0x6E,0x2E,0x6E,0x65,0x74,0x2F,0x72,0x6F,
+0x6F,0x74,0x2D,0x72,0x32,0x2E,0x63,0x72,0x6C,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,
+0x04,0x18,0x30,0x16,0x80,0x14,0x9B,0xE2,0x07,0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,
+0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,0x2E,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0x81,
+0x53,0x87,0x1C,0x68,0x97,0x86,0x91,0xEC,0xE0,0x4A,0xB8,0x44,0x0B,0xAB,0x81,0xAC,
+0x27,0x4F,0xD6,0xC1,0xB8,0x1C,0x43,0x78,0xB3,0x0C,0x9A,0xFC,0xEA,0x2C,0x3C,0x6E,
+0x61,0x1B,0x4D,0x4B,0x29,0xF5,0x9F,0x05,0x1D,0x26,0xC1,0xB8,0xE9,0x83,0x00,0x62,
+0x45,0xB6,0xA9,0x08,0x93,0xB9,0xA9,0x33,0x4B,0x18,0x9A,0xC2,0xF8,0x87,0x88,0x4E,
+0xDB,0xDD,0x71,0x34,0x1A,0xC1,0x54,0xDA,0x46,0x3F,0xE0,0xD3,0x2A,0xAB,0x6D,0x54,
+0x22,0xF5,0x3A,0x62,0xCD,0x20,0x6F,0xBA,0x29,0x89,0xD7,0xDD,0x91,0xEE,0xD3,0x5C,
+0xA2,0x3E,0xA1,0x5B,0x41,0xF5,0xDF,0xE5,0x64,0x43,0x2D,0xE9,0xD5,0x39,0xAB,0xD2,
+0xA2,0xDF,0xB7,0x8B,0xD0,0xC0,0x80,0x19,0x1C,0x45,0xC0,0x2D,0x8C,0xE8,0xF8,0x2D,
+0xA4,0x74,0x56,0x49,0xC5,0x05,0xB5,0x4F,0x15,0xDE,0x6E,0x44,0x78,0x39,0x87,0xA8,
+0x7E,0xBB,0xF3,0x79,0x18,0x91,0xBB,0xF4,0x6F,0x9D,0xC1,0xF0,0x8C,0x35,0x8C,0x5D,
+0x01,0xFB,0xC3,0x6D,0xB9,0xEF,0x44,0x6D,0x79,0x46,0x31,0x7E,0x0A,0xFE,0xA9,0x82,
+0xC1,0xFF,0xEF,0xAB,0x6E,0x20,0xC4,0x50,0xC9,0x5F,0x9D,0x4D,0x9B,0x17,0x8C,0x0C,
+0xE5,0x01,0xC9,0xA0,0x41,0x6A,0x73,0x53,0xFA,0xA5,0x50,0xB4,0x6E,0x25,0x0F,0xFB,
+0x4C,0x18,0xF4,0xFD,0x52,0xD9,0x8E,0x69,0xB1,0xE8,0x11,0x0F,0xDE,0x88,0xD8,0xFB,
+0x1D,0x49,0xF7,0xAA,0xDE,0x95,0xCF,0x20,0x78,0xC2,0x60,0x12,0xDB,0x25,0x40,0x8C,
+0x6A,0xFC,0x7E,0x42,0x38,0x40,0x64,0x12,0xF7,0x9E,0x81,0xE1,0x93,0x2E,
+};
+
+
+/* subject:/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
+/* issuer :/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */
+
+
+const unsigned char GlobalSign_Root_CA___R3_certificate[867]={
+0x30,0x82,0x03,0x5F,0x30,0x82,0x02,0x47,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04,
+0x00,0x00,0x00,0x00,0x01,0x21,0x58,0x53,0x08,0xA2,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,
+0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,
+0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,
+0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x33,0x31,
+0x38,0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x31,0x38,
+0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,0x11,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,
+0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,
+0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
+0x0A,0x02,0x82,0x01,0x01,0x00,0xCC,0x25,0x76,0x90,0x79,0x06,0x78,0x22,0x16,0xF5,
+0xC0,0x83,0xB6,0x84,0xCA,0x28,0x9E,0xFD,0x05,0x76,0x11,0xC5,0xAD,0x88,0x72,0xFC,
+0x46,0x02,0x43,0xC7,0xB2,0x8A,0x9D,0x04,0x5F,0x24,0xCB,0x2E,0x4B,0xE1,0x60,0x82,
+0x46,0xE1,0x52,0xAB,0x0C,0x81,0x47,0x70,0x6C,0xDD,0x64,0xD1,0xEB,0xF5,0x2C,0xA3,
+0x0F,0x82,0x3D,0x0C,0x2B,0xAE,0x97,0xD7,0xB6,0x14,0x86,0x10,0x79,0xBB,0x3B,0x13,
+0x80,0x77,0x8C,0x08,0xE1,0x49,0xD2,0x6A,0x62,0x2F,0x1F,0x5E,0xFA,0x96,0x68,0xDF,
+0x89,0x27,0x95,0x38,0x9F,0x06,0xD7,0x3E,0xC9,0xCB,0x26,0x59,0x0D,0x73,0xDE,0xB0,
+0xC8,0xE9,0x26,0x0E,0x83,0x15,0xC6,0xEF,0x5B,0x8B,0xD2,0x04,0x60,0xCA,0x49,0xA6,
+0x28,0xF6,0x69,0x3B,0xF6,0xCB,0xC8,0x28,0x91,0xE5,0x9D,0x8A,0x61,0x57,0x37,0xAC,
+0x74,0x14,0xDC,0x74,0xE0,0x3A,0xEE,0x72,0x2F,0x2E,0x9C,0xFB,0xD0,0xBB,0xBF,0xF5,
+0x3D,0x00,0xE1,0x06,0x33,0xE8,0x82,0x2B,0xAE,0x53,0xA6,0x3A,0x16,0x73,0x8C,0xDD,
+0x41,0x0E,0x20,0x3A,0xC0,0xB4,0xA7,0xA1,0xE9,0xB2,0x4F,0x90,0x2E,0x32,0x60,0xE9,
+0x57,0xCB,0xB9,0x04,0x92,0x68,0x68,0xE5,0x38,0x26,0x60,0x75,0xB2,0x9F,0x77,0xFF,
+0x91,0x14,0xEF,0xAE,0x20,0x49,0xFC,0xAD,0x40,0x15,0x48,0xD1,0x02,0x31,0x61,0x19,
+0x5E,0xB8,0x97,0xEF,0xAD,0x77,0xB7,0x64,0x9A,0x7A,0xBF,0x5F,0xC1,0x13,0xEF,0x9B,
+0x62,0xFB,0x0D,0x6C,0xE0,0x54,0x69,0x16,0xA9,0x03,0xDA,0x6E,0xE9,0x83,0x93,0x71,
+0x76,0xC6,0x69,0x85,0x82,0x17,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,
+0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x8F,0xF0,0x4B,0x7F,0xA8,
+0x2E,0x45,0x24,0xAE,0x4D,0x50,0xFA,0x63,0x9A,0x8B,0xDE,0xE2,0xDD,0x1B,0xBC,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x4B,0x40,0xDB,0xC0,0x50,0xAA,0xFE,0xC8,0x0C,0xEF,0xF7,0x96,0x54,
+0x45,0x49,0xBB,0x96,0x00,0x09,0x41,0xAC,0xB3,0x13,0x86,0x86,0x28,0x07,0x33,0xCA,
+0x6B,0xE6,0x74,0xB9,0xBA,0x00,0x2D,0xAE,0xA4,0x0A,0xD3,0xF5,0xF1,0xF1,0x0F,0x8A,
+0xBF,0x73,0x67,0x4A,0x83,0xC7,0x44,0x7B,0x78,0xE0,0xAF,0x6E,0x6C,0x6F,0x03,0x29,
+0x8E,0x33,0x39,0x45,0xC3,0x8E,0xE4,0xB9,0x57,0x6C,0xAA,0xFC,0x12,0x96,0xEC,0x53,
+0xC6,0x2D,0xE4,0x24,0x6C,0xB9,0x94,0x63,0xFB,0xDC,0x53,0x68,0x67,0x56,0x3E,0x83,
+0xB8,0xCF,0x35,0x21,0xC3,0xC9,0x68,0xFE,0xCE,0xDA,0xC2,0x53,0xAA,0xCC,0x90,0x8A,
+0xE9,0xF0,0x5D,0x46,0x8C,0x95,0xDD,0x7A,0x58,0x28,0x1A,0x2F,0x1D,0xDE,0xCD,0x00,
+0x37,0x41,0x8F,0xED,0x44,0x6D,0xD7,0x53,0x28,0x97,0x7E,0xF3,0x67,0x04,0x1E,0x15,
+0xD7,0x8A,0x96,0xB4,0xD3,0xDE,0x4C,0x27,0xA4,0x4C,0x1B,0x73,0x73,0x76,0xF4,0x17,
+0x99,0xC2,0x1F,0x7A,0x0E,0xE3,0x2D,0x08,0xAD,0x0A,0x1C,0x2C,0xFF,0x3C,0xAB,0x55,
+0x0E,0x0F,0x91,0x7E,0x36,0xEB,0xC3,0x57,0x49,0xBE,0xE1,0x2E,0x2D,0x7C,0x60,0x8B,
+0xC3,0x41,0x51,0x13,0x23,0x9D,0xCE,0xF7,0x32,0x6B,0x94,0x01,0xA8,0x99,0xE7,0x2C,
+0x33,0x1F,0x3A,0x3B,0x25,0xD2,0x86,0x40,0xCE,0x3B,0x2C,0x86,0x78,0xC9,0x61,0x2F,
+0x14,0xBA,0xEE,0xDB,0x55,0x6F,0xDF,0x84,0xEE,0x05,0x09,0x4D,0xBD,0x28,0xD8,0x72,
+0xCE,0xD3,0x62,0x50,0x65,0x1E,0xEB,0x92,0x97,0x83,0x31,0xD9,0xB3,0xB5,0xCA,0x47,
+0x58,0x3F,0x5F,
+};
+
+
+/* subject:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
+/* issuer :/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */
+
+
+const unsigned char Go_Daddy_Class_2_CA_certificate[1028]={
+0x30,0x82,0x04,0x00,0x30,0x82,0x02,0xE8,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,
+0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,
+0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,
+0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x36,0x32,0x39,0x31,0x37,
+0x30,0x36,0x32,0x30,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,0x32,0x39,0x31,0x37,0x30,
+0x36,0x32,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,
+0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
+0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,
+0x00,0x30,0x82,0x01,0x08,0x02,0x82,0x01,0x01,0x00,0xDE,0x9D,0xD7,0xEA,0x57,0x18,
+0x49,0xA1,0x5B,0xEB,0xD7,0x5F,0x48,0x86,0xEA,0xBE,0xDD,0xFF,0xE4,0xEF,0x67,0x1C,
+0xF4,0x65,0x68,0xB3,0x57,0x71,0xA0,0x5E,0x77,0xBB,0xED,0x9B,0x49,0xE9,0x70,0x80,
+0x3D,0x56,0x18,0x63,0x08,0x6F,0xDA,0xF2,0xCC,0xD0,0x3F,0x7F,0x02,0x54,0x22,0x54,
+0x10,0xD8,0xB2,0x81,0xD4,0xC0,0x75,0x3D,0x4B,0x7F,0xC7,0x77,0xC3,0x3E,0x78,0xAB,
+0x1A,0x03,0xB5,0x20,0x6B,0x2F,0x6A,0x2B,0xB1,0xC5,0x88,0x7E,0xC4,0xBB,0x1E,0xB0,
+0xC1,0xD8,0x45,0x27,0x6F,0xAA,0x37,0x58,0xF7,0x87,0x26,0xD7,0xD8,0x2D,0xF6,0xA9,
+0x17,0xB7,0x1F,0x72,0x36,0x4E,0xA6,0x17,0x3F,0x65,0x98,0x92,0xDB,0x2A,0x6E,0x5D,
+0xA2,0xFE,0x88,0xE0,0x0B,0xDE,0x7F,0xE5,0x8D,0x15,0xE1,0xEB,0xCB,0x3A,0xD5,0xE2,
+0x12,0xA2,0x13,0x2D,0xD8,0x8E,0xAF,0x5F,0x12,0x3D,0xA0,0x08,0x05,0x08,0xB6,0x5C,
+0xA5,0x65,0x38,0x04,0x45,0x99,0x1E,0xA3,0x60,0x60,0x74,0xC5,0x41,0xA5,0x72,0x62,
+0x1B,0x62,0xC5,0x1F,0x6F,0x5F,0x1A,0x42,0xBE,0x02,0x51,0x65,0xA8,0xAE,0x23,0x18,
+0x6A,0xFC,0x78,0x03,0xA9,0x4D,0x7F,0x80,0xC3,0xFA,0xAB,0x5A,0xFC,0xA1,0x40,0xA4,
+0xCA,0x19,0x16,0xFE,0xB2,0xC8,0xEF,0x5E,0x73,0x0D,0xEE,0x77,0xBD,0x9A,0xF6,0x79,
+0x98,0xBC,0xB1,0x07,0x67,0xA2,0x15,0x0D,0xDD,0xA0,0x58,0xC6,0x44,0x7B,0x0A,0x3E,
+0x62,0x28,0x5F,0xBA,0x41,0x07,0x53,0x58,0xCF,0x11,0x7E,0x38,0x74,0xC5,0xF8,0xFF,
+0xB5,0x69,0x90,0x8F,0x84,0x74,0xEA,0x97,0x1B,0xAF,0x02,0x01,0x03,0xA3,0x81,0xC0,
+0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xD2,0xC4,
+0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,0xFE,0xDD,0xA8,0x6A,
+0xD4,0xE3,0x30,0x81,0x8D,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x85,0x30,0x81,0x82,
+0x80,0x14,0xD2,0xC4,0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,
+0xFE,0xDD,0xA8,0x6A,0xD4,0xE3,0xA1,0x67,0xA4,0x65,0x30,0x63,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,
+0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,
+0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,
+0x01,0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
+0x82,0x01,0x01,0x00,0x32,0x4B,0xF3,0xB2,0xCA,0x3E,0x91,0xFC,0x12,0xC6,0xA1,0x07,
+0x8C,0x8E,0x77,0xA0,0x33,0x06,0x14,0x5C,0x90,0x1E,0x18,0xF7,0x08,0xA6,0x3D,0x0A,
+0x19,0xF9,0x87,0x80,0x11,0x6E,0x69,0xE4,0x96,0x17,0x30,0xFF,0x34,0x91,0x63,0x72,
+0x38,0xEE,0xCC,0x1C,0x01,0xA3,0x1D,0x94,0x28,0xA4,0x31,0xF6,0x7A,0xC4,0x54,0xD7,
+0xF6,0xE5,0x31,0x58,0x03,0xA2,0xCC,0xCE,0x62,0xDB,0x94,0x45,0x73,0xB5,0xBF,0x45,
+0xC9,0x24,0xB5,0xD5,0x82,0x02,0xAD,0x23,0x79,0x69,0x8D,0xB8,0xB6,0x4D,0xCE,0xCF,
+0x4C,0xCA,0x33,0x23,0xE8,0x1C,0x88,0xAA,0x9D,0x8B,0x41,0x6E,0x16,0xC9,0x20,0xE5,
+0x89,0x9E,0xCD,0x3B,0xDA,0x70,0xF7,0x7E,0x99,0x26,0x20,0x14,0x54,0x25,0xAB,0x6E,
+0x73,0x85,0xE6,0x9B,0x21,0x9D,0x0A,0x6C,0x82,0x0E,0xA8,0xF8,0xC2,0x0C,0xFA,0x10,
+0x1E,0x6C,0x96,0xEF,0x87,0x0D,0xC4,0x0F,0x61,0x8B,0xAD,0xEE,0x83,0x2B,0x95,0xF8,
+0x8E,0x92,0x84,0x72,0x39,0xEB,0x20,0xEA,0x83,0xED,0x83,0xCD,0x97,0x6E,0x08,0xBC,
+0xEB,0x4E,0x26,0xB6,0x73,0x2B,0xE4,0xD3,0xF6,0x4C,0xFE,0x26,0x71,0xE2,0x61,0x11,
+0x74,0x4A,0xFF,0x57,0x1A,0x87,0x0F,0x75,0x48,0x2E,0xCF,0x51,0x69,0x17,0xA0,0x02,
+0x12,0x61,0x95,0xD5,0xD1,0x40,0xB2,0x10,0x4C,0xEE,0xC4,0xAC,0x10,0x43,0xA6,0xA5,
+0x9E,0x0A,0xD5,0x95,0x62,0x9A,0x0D,0xCF,0x88,0x82,0xC5,0x32,0x0C,0xE4,0x2B,0x9F,
+0x45,0xE6,0x0D,0x9F,0x28,0x9C,0xB1,0xB9,0x2A,0x5A,0x57,0xAD,0x37,0x0F,0xAF,0x1D,
+0x7F,0xDB,0xBD,0x9F,
+};
+
+
+/* subject:/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */
+/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */
+
+
+const unsigned char Go_Daddy_Root_Certificate_Authority___G2_certificate[969]={
+0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
+0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
+0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63,0x6F,0x6D,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x6F,0x20,
+0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
+0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30,
+0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33,
+0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x06,0x13,0x02,0x55,0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,
+0x41,0x72,0x69,0x7A,0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,
+0x13,0x0A,0x53,0x63,0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18,
+0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63,
+0x6F,0x6D,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,
+0x03,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,
+0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBF,0x71,0x62,0x08,
+0xF1,0xFA,0x59,0x34,0xF7,0x1B,0xC9,0x18,0xA3,0xF7,0x80,0x49,0x58,0xE9,0x22,0x83,
+0x13,0xA6,0xC5,0x20,0x43,0x01,0x3B,0x84,0xF1,0xE6,0x85,0x49,0x9F,0x27,0xEA,0xF6,
+0x84,0x1B,0x4E,0xA0,0xB4,0xDB,0x70,0x98,0xC7,0x32,0x01,0xB1,0x05,0x3E,0x07,0x4E,
+0xEE,0xF4,0xFA,0x4F,0x2F,0x59,0x30,0x22,0xE7,0xAB,0x19,0x56,0x6B,0xE2,0x80,0x07,
+0xFC,0xF3,0x16,0x75,0x80,0x39,0x51,0x7B,0xE5,0xF9,0x35,0xB6,0x74,0x4E,0xA9,0x8D,
+0x82,0x13,0xE4,0xB6,0x3F,0xA9,0x03,0x83,0xFA,0xA2,0xBE,0x8A,0x15,0x6A,0x7F,0xDE,
+0x0B,0xC3,0xB6,0x19,0x14,0x05,0xCA,0xEA,0xC3,0xA8,0x04,0x94,0x3B,0x46,0x7C,0x32,
+0x0D,0xF3,0x00,0x66,0x22,0xC8,0x8D,0x69,0x6D,0x36,0x8C,0x11,0x18,0xB7,0xD3,0xB2,
+0x1C,0x60,0xB4,0x38,0xFA,0x02,0x8C,0xCE,0xD3,0xDD,0x46,0x07,0xDE,0x0A,0x3E,0xEB,
+0x5D,0x7C,0xC8,0x7C,0xFB,0xB0,0x2B,0x53,0xA4,0x92,0x62,0x69,0x51,0x25,0x05,0x61,
+0x1A,0x44,0x81,0x8C,0x2C,0xA9,0x43,0x96,0x23,0xDF,0xAC,0x3A,0x81,0x9A,0x0E,0x29,
+0xC5,0x1C,0xA9,0xE9,0x5D,0x1E,0xB6,0x9E,0x9E,0x30,0x0A,0x39,0xCE,0xF1,0x88,0x80,
+0xFB,0x4B,0x5D,0xCC,0x32,0xEC,0x85,0x62,0x43,0x25,0x34,0x02,0x56,0x27,0x01,0x91,
+0xB4,0x3B,0x70,0x2A,0x3F,0x6E,0xB1,0xE8,0x9C,0x88,0x01,0x7D,0x9F,0xD4,0xF9,0xDB,
+0x53,0x6D,0x60,0x9D,0xBF,0x2C,0xE7,0x58,0xAB,0xB8,0x5F,0x46,0xFC,0xCE,0xC4,0x1B,
+0x03,0x3C,0x09,0xEB,0x49,0x31,0x5C,0x69,0x46,0xB3,0xE0,0x47,0x02,0x03,0x01,0x00,
+0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x3A,0x9A,0x85,0x07,0x10,0x67,0x28,0xB6,0xEF,0xF6,0xBD,0x05,0x41,0x6E,0x20,
+0xC1,0x94,0xDA,0x0F,0xDE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,
+0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0xDB,0x5D,0x79,0xD5,0xF9,0x97,
+0x59,0x67,0x03,0x61,0xF1,0x7E,0x3B,0x06,0x31,0x75,0x2D,0xA1,0x20,0x8E,0x4F,0x65,
+0x87,0xB4,0xF7,0xA6,0x9C,0xBC,0xD8,0xE9,0x2F,0xD0,0xDB,0x5A,0xEE,0xCF,0x74,0x8C,
+0x73,0xB4,0x38,0x42,0xDA,0x05,0x7B,0xF8,0x02,0x75,0xB8,0xFD,0xA5,0xB1,0xD7,0xAE,
+0xF6,0xD7,0xDE,0x13,0xCB,0x53,0x10,0x7E,0x8A,0x46,0xD1,0x97,0xFA,0xB7,0x2E,0x2B,
+0x11,0xAB,0x90,0xB0,0x27,0x80,0xF9,0xE8,0x9F,0x5A,0xE9,0x37,0x9F,0xAB,0xE4,0xDF,
+0x6C,0xB3,0x85,0x17,0x9D,0x3D,0xD9,0x24,0x4F,0x79,0x91,0x35,0xD6,0x5F,0x04,0xEB,
+0x80,0x83,0xAB,0x9A,0x02,0x2D,0xB5,0x10,0xF4,0xD8,0x90,0xC7,0x04,0x73,0x40,0xED,
+0x72,0x25,0xA0,0xA9,0x9F,0xEC,0x9E,0xAB,0x68,0x12,0x99,0x57,0xC6,0x8F,0x12,0x3A,
+0x09,0xA4,0xBD,0x44,0xFD,0x06,0x15,0x37,0xC1,0x9B,0xE4,0x32,0xA3,0xED,0x38,0xE8,
+0xD8,0x64,0xF3,0x2C,0x7E,0x14,0xFC,0x02,0xEA,0x9F,0xCD,0xFF,0x07,0x68,0x17,0xDB,
+0x22,0x90,0x38,0x2D,0x7A,0x8D,0xD1,0x54,0xF1,0x69,0xE3,0x5F,0x33,0xCA,0x7A,0x3D,
+0x7B,0x0A,0xE3,0xCA,0x7F,0x5F,0x39,0xE5,0xE2,0x75,0xBA,0xC5,0x76,0x18,0x33,0xCE,
+0x2C,0xF0,0x2F,0x4C,0xAD,0xF7,0xB1,0xE7,0xCE,0x4F,0xA8,0xC4,0x9B,0x4A,0x54,0x06,
+0xC5,0x7F,0x7D,0xD5,0x08,0x0F,0xE2,0x1C,0xFE,0x7E,0x17,0xB8,0xAC,0x5E,0xF6,0xD4,
+0x16,0xB2,0x43,0x09,0x0C,0x4D,0xF6,0xA7,0x6B,0xB4,0x99,0x84,0x65,0xCA,0x7A,0x88,
+0xE2,0xE2,0x44,0xBE,0x5C,0xF7,0xEA,0x1C,0xF5,
+};
+
+
+/* subject:/C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Global Root */
+/* issuer :/C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc./CN=GTE CyberTrust Global Root */
+
+
+const unsigned char GTE_CyberTrust_Global_Root_certificate[606]={
+0x30,0x82,0x02,0x5A,0x30,0x82,0x01,0xC3,0x02,0x02,0x01,0xA5,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,0x75,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x18,0x30,0x16,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0F,0x47,0x54,0x45,0x20,0x43,0x6F,0x72,0x70,0x6F,0x72,0x61,
+0x74,0x69,0x6F,0x6E,0x31,0x27,0x30,0x25,0x06,0x03,0x55,0x04,0x0B,0x13,0x1E,0x47,
+0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x6F,
+0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x23,0x30,
+0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x47,0x54,0x45,0x20,0x43,0x79,0x62,0x65,
+0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,
+0x6F,0x74,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x31,0x33,0x30,0x30,0x32,0x39,
+0x30,0x30,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x31,0x33,0x32,0x33,0x35,0x39,0x30,
+0x30,0x5A,0x30,0x75,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04,0x0A,0x13,0x0F,0x47,0x54,0x45,0x20,
+0x43,0x6F,0x72,0x70,0x6F,0x72,0x61,0x74,0x69,0x6F,0x6E,0x31,0x27,0x30,0x25,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x1E,0x47,0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,
+0x72,0x75,0x73,0x74,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x47,
+0x54,0x45,0x20,0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,
+0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,
+0x81,0x89,0x02,0x81,0x81,0x00,0x95,0x0F,0xA0,0xB6,0xF0,0x50,0x9C,0xE8,0x7A,0xC7,
+0x88,0xCD,0xDD,0x17,0x0E,0x2E,0xB0,0x94,0xD0,0x1B,0x3D,0x0E,0xF6,0x94,0xC0,0x8A,
+0x94,0xC7,0x06,0xC8,0x90,0x97,0xC8,0xB8,0x64,0x1A,0x7A,0x7E,0x6C,0x3C,0x53,0xE1,
+0x37,0x28,0x73,0x60,0x7F,0xB2,0x97,0x53,0x07,0x9F,0x53,0xF9,0x6D,0x58,0x94,0xD2,
+0xAF,0x8D,0x6D,0x88,0x67,0x80,0xE6,0xED,0xB2,0x95,0xCF,0x72,0x31,0xCA,0xA5,0x1C,
+0x72,0xBA,0x5C,0x02,0xE7,0x64,0x42,0xE7,0xF9,0xA9,0x2C,0xD6,0x3A,0x0D,0xAC,0x8D,
+0x42,0xAA,0x24,0x01,0x39,0xE6,0x9C,0x3F,0x01,0x85,0x57,0x0D,0x58,0x87,0x45,0xF8,
+0xD3,0x85,0xAA,0x93,0x69,0x26,0x85,0x70,0x48,0x80,0x3F,0x12,0x15,0xC7,0x79,0xB4,
+0x1F,0x05,0x2F,0x3B,0x62,0x99,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x6D,0xEB,
+0x1B,0x09,0xE9,0x5E,0xD9,0x51,0xDB,0x67,0x22,0x61,0xA4,0x2A,0x3C,0x48,0x77,0xE3,
+0xA0,0x7C,0xA6,0xDE,0x73,0xA2,0x14,0x03,0x85,0x3D,0xFB,0xAB,0x0E,0x30,0xC5,0x83,
+0x16,0x33,0x81,0x13,0x08,0x9E,0x7B,0x34,0x4E,0xDF,0x40,0xC8,0x74,0xD7,0xB9,0x7D,
+0xDC,0xF4,0x76,0x55,0x7D,0x9B,0x63,0x54,0x18,0xE9,0xF0,0xEA,0xF3,0x5C,0xB1,0xD9,
+0x8B,0x42,0x1E,0xB9,0xC0,0x95,0x4E,0xBA,0xFA,0xD5,0xE2,0x7C,0xF5,0x68,0x61,0xBF,
+0x8E,0xEC,0x05,0x97,0x5F,0x5B,0xB0,0xD7,0xA3,0x85,0x34,0xC4,0x24,0xA7,0x0D,0x0F,
+0x95,0x93,0xEF,0xCB,0x94,0xD8,0x9E,0x1F,0x9D,0x5C,0x85,0x6D,0xC7,0xAA,0xAE,0x4F,
+0x1F,0x22,0xB5,0xCD,0x95,0xAD,0xBA,0xA7,0xCC,0xF9,0xAB,0x0B,0x7A,0x7F,
+};
+
+
+/* subject:/C=US/O=Network Solutions L.L.C./CN=Network Solutions Certificate Authority */
+/* issuer :/C=US/O=Network Solutions L.L.C./CN=Network Solutions Certificate Authority */
+
+
+const unsigned char Network_Solutions_Certificate_Authority_certificate[1002]={
+0x30,0x82,0x03,0xE6,0x30,0x82,0x02,0xCE,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x57,
+0xCB,0x33,0x6F,0xC2,0x5C,0x16,0xE6,0x47,0x16,0x17,0xE3,0x90,0x31,0x68,0xE0,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x62,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,
+0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x20,
+0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x4C,0x2E,0x4C,0x2E,0x43,0x2E,
+0x31,0x30,0x30,0x2E,0x06,0x03,0x55,0x04,0x03,0x13,0x27,0x4E,0x65,0x74,0x77,0x6F,
+0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x43,0x65,0x72,
+0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x30,0x31,0x30,0x30,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,
+0x39,0x5A,0x30,0x62,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x20,0x4C,0x2E,
+0x4C,0x2E,0x43,0x2E,0x31,0x30,0x30,0x2E,0x06,0x03,0x55,0x04,0x03,0x13,0x27,0x4E,
+0x65,0x74,0x77,0x6F,0x72,0x6B,0x20,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
+0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xE4,0xBC,0x7E,0x92,0x30,0x6D,0xC6,0xD8,0x8E,
+0x2B,0x0B,0xBC,0x46,0xCE,0xE0,0x27,0x96,0xDE,0xDE,0xF9,0xFA,0x12,0xD3,0x3C,0x33,
+0x73,0xB3,0x04,0x2F,0xBC,0x71,0x8C,0xE5,0x9F,0xB6,0x22,0x60,0x3E,0x5F,0x5D,0xCE,
+0x09,0xFF,0x82,0x0C,0x1B,0x9A,0x51,0x50,0x1A,0x26,0x89,0xDD,0xD5,0x61,0x5D,0x19,
+0xDC,0x12,0x0F,0x2D,0x0A,0xA2,0x43,0x5D,0x17,0xD0,0x34,0x92,0x20,0xEA,0x73,0xCF,
+0x38,0x2C,0x06,0x26,0x09,0x7A,0x72,0xF7,0xFA,0x50,0x32,0xF8,0xC2,0x93,0xD3,0x69,
+0xA2,0x23,0xCE,0x41,0xB1,0xCC,0xE4,0xD5,0x1F,0x36,0xD1,0x8A,0x3A,0xF8,0x8C,0x63,
+0xE2,0x14,0x59,0x69,0xED,0x0D,0xD3,0x7F,0x6B,0xE8,0xB8,0x03,0xE5,0x4F,0x6A,0xE5,
+0x98,0x63,0x69,0x48,0x05,0xBE,0x2E,0xFF,0x33,0xB6,0xE9,0x97,0x59,0x69,0xF8,0x67,
+0x19,0xAE,0x93,0x61,0x96,0x44,0x15,0xD3,0x72,0xB0,0x3F,0xBC,0x6A,0x7D,0xEC,0x48,
+0x7F,0x8D,0xC3,0xAB,0xAA,0x71,0x2B,0x53,0x69,0x41,0x53,0x34,0xB5,0xB0,0xB9,0xC5,
+0x06,0x0A,0xC4,0xB0,0x45,0xF5,0x41,0x5D,0x6E,0x89,0x45,0x7B,0x3D,0x3B,0x26,0x8C,
+0x74,0xC2,0xE5,0xD2,0xD1,0x7D,0xB2,0x11,0xD4,0xFB,0x58,0x32,0x22,0x9A,0x80,0xC9,
+0xDC,0xFD,0x0C,0xE9,0x7F,0x5E,0x03,0x97,0xCE,0x3B,0x00,0x14,0x87,0x27,0x70,0x38,
+0xA9,0x8E,0x6E,0xB3,0x27,0x76,0x98,0x51,0xE0,0x05,0xE3,0x21,0xAB,0x1A,0xD5,0x85,
+0x22,0x3C,0x29,0xB5,0x9A,0x16,0xC5,0x80,0xA8,0xF4,0xBB,0x6B,0x30,0x8F,0x2F,0x46,
+0x02,0xA2,0xB1,0x0C,0x22,0xE0,0xD3,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x97,0x30,
+0x81,0x94,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x21,0x30,0xC9,
+0xFB,0x00,0xD7,0x4E,0x98,0xDA,0x87,0xAA,0x2A,0xD0,0xA7,0x2E,0xB1,0x40,0x31,0xA7,
+0x4C,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
+0x01,0xFF,0x30,0x52,0x06,0x03,0x55,0x1D,0x1F,0x04,0x4B,0x30,0x49,0x30,0x47,0xA0,
+0x45,0xA0,0x43,0x86,0x41,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,
+0x6E,0x65,0x74,0x73,0x6F,0x6C,0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x4E,0x65,
+0x74,0x77,0x6F,0x72,0x6B,0x53,0x6F,0x6C,0x75,0x74,0x69,0x6F,0x6E,0x73,0x43,0x65,
+0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x2E,0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xBB,0xAE,0x4B,0xE7,0xB7,0x57,
+0xEB,0x7F,0xAA,0x2D,0xB7,0x73,0x47,0x85,0x6A,0xC1,0xE4,0xA5,0x1D,0xE4,0xE7,0x3C,
+0xE9,0xF4,0x59,0x65,0x77,0xB5,0x7A,0x5B,0x5A,0x8D,0x25,0x36,0xE0,0x7A,0x97,0x2E,
+0x38,0xC0,0x57,0x60,0x83,0x98,0x06,0x83,0x9F,0xB9,0x76,0x7A,0x6E,0x50,0xE0,0xBA,
+0x88,0x2C,0xFC,0x45,0xCC,0x18,0xB0,0x99,0x95,0x51,0x0E,0xEC,0x1D,0xB8,0x88,0xFF,
+0x87,0x50,0x1C,0x82,0xC2,0xE3,0xE0,0x32,0x80,0xBF,0xA0,0x0B,0x47,0xC8,0xC3,0x31,
+0xEF,0x99,0x67,0x32,0x80,0x4F,0x17,0x21,0x79,0x0C,0x69,0x5C,0xDE,0x5E,0x34,0xAE,
+0x02,0xB5,0x26,0xEA,0x50,0xDF,0x7F,0x18,0x65,0x2C,0xC9,0xF2,0x63,0xE1,0xA9,0x07,
+0xFE,0x7C,0x71,0x1F,0x6B,0x33,0x24,0x6A,0x1E,0x05,0xF7,0x05,0x68,0xC0,0x6A,0x12,
+0xCB,0x2E,0x5E,0x61,0xCB,0xAE,0x28,0xD3,0x7E,0xC2,0xB4,0x66,0x91,0x26,0x5F,0x3C,
+0x2E,0x24,0x5F,0xCB,0x58,0x0F,0xEB,0x28,0xEC,0xAF,0x11,0x96,0xF3,0xDC,0x7B,0x6F,
+0xC0,0xA7,0x88,0xF2,0x53,0x77,0xB3,0x60,0x5E,0xAE,0xAE,0x28,0xDA,0x35,0x2C,0x6F,
+0x34,0x45,0xD3,0x26,0xE1,0xDE,0xEC,0x5B,0x4F,0x27,0x6B,0x16,0x7C,0xBD,0x44,0x04,
+0x18,0x82,0xB3,0x89,0x79,0x17,0x10,0x71,0x3D,0x7A,0xA2,0x16,0x4E,0xF5,0x01,0xCD,
+0xA4,0x6C,0x65,0x68,0xA1,0x49,0x76,0x5C,0x43,0xC9,0xD8,0xBC,0x36,0x67,0x6C,0xA5,
+0x94,0xB5,0xD4,0xCC,0xB9,0xBD,0x6A,0x35,0x56,0x21,0xDE,0xD8,0xC3,0xEB,0xFB,0xCB,
+0xA4,0x60,0x4C,0xB0,0x55,0xA0,0xA0,0x7B,0x57,0xB2,
+};
+
+
+/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 3 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 3 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+
+
+const unsigned char RSA_Root_Certificate_1_certificate[747]={
+0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
+0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
+0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
+0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
+0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
+0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
+0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
+0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
+0x32,0x36,0x30,0x30,0x32,0x32,0x33,0x33,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
+0x36,0x30,0x30,0x32,0x32,0x33,0x33,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
+0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
+0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
+0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
+0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
+0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
+0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
+0x81,0x81,0x00,0xE3,0x98,0x51,0x96,0x1C,0xE8,0xD5,0xB1,0x06,0x81,0x6A,0x57,0xC3,
+0x72,0x75,0x93,0xAB,0xCF,0x9E,0xA6,0xFC,0xF3,0x16,0x52,0xD6,0x2D,0x4D,0x9F,0x35,
+0x44,0xA8,0x2E,0x04,0x4D,0x07,0x49,0x8A,0x38,0x29,0xF5,0x77,0x37,0xE7,0xB7,0xAB,
+0x5D,0xDF,0x36,0x71,0x14,0x99,0x8F,0xDC,0xC2,0x92,0xF1,0xE7,0x60,0x92,0x97,0xEC,
+0xD8,0x48,0xDC,0xBF,0xC1,0x02,0x20,0xC6,0x24,0xA4,0x28,0x4C,0x30,0x5A,0x76,0x6D,
+0xB1,0x5C,0xF3,0xDD,0xDE,0x9E,0x10,0x71,0xA1,0x88,0xC7,0x5B,0x9B,0x41,0x6D,0xCA,
+0xB0,0xB8,0x8E,0x15,0xEE,0xAD,0x33,0x2B,0xCF,0x47,0x04,0x5C,0x75,0x71,0x0A,0x98,
+0x24,0x98,0x29,0xA7,0x49,0x59,0xA5,0xDD,0xF8,0xB7,0x43,0x62,0x61,0xF3,0xD3,0xE2,
+0xD0,0x55,0x3F,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x56,0xBB,0x02,0x58,0x84,
+0x67,0x08,0x2C,0xDF,0x1F,0xDB,0x7B,0x49,0x33,0xF5,0xD3,0x67,0x9D,0xF4,0xB4,0x0A,
+0x10,0xB3,0xC9,0xC5,0x2C,0xE2,0x92,0x6A,0x71,0x78,0x27,0xF2,0x70,0x83,0x42,0xD3,
+0x3E,0xCF,0xA9,0x54,0xF4,0xF1,0xD8,0x92,0x16,0x8C,0xD1,0x04,0xCB,0x4B,0xAB,0xC9,
+0x9F,0x45,0xAE,0x3C,0x8A,0xA9,0xB0,0x71,0x33,0x5D,0xC8,0xC5,0x57,0xDF,0xAF,0xA8,
+0x35,0xB3,0x7F,0x89,0x87,0xE9,0xE8,0x25,0x92,0xB8,0x7F,0x85,0x7A,0xAE,0xD6,0xBC,
+0x1E,0x37,0x58,0x2A,0x67,0xC9,0x91,0xCF,0x2A,0x81,0x3E,0xED,0xC6,0x39,0xDF,0xC0,
+0x3E,0x19,0x9C,0x19,0xCC,0x13,0x4D,0x82,0x41,0xB5,0x8C,0xDE,0xE0,0x3D,0x60,0x08,
+0x20,0x0F,0x45,0x7E,0x6B,0xA2,0x7F,0xA3,0x8C,0x15,0xEE,
+};
+
+
+/* subject:/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
+/* issuer :/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */
+
+
+const unsigned char Starfield_Class_2_CA_certificate[1043]={
+0x30,0x82,0x04,0x0F,0x30,0x82,0x02,0xF7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,
+0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,
+0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,
+0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,
+0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,
+0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,
+0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,
+0x36,0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,
+0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
+0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
+0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,0x00,0x30,0x82,0x01,0x08,0x02,
+0x82,0x01,0x01,0x00,0xB7,0x32,0xC8,0xFE,0xE9,0x71,0xA6,0x04,0x85,0xAD,0x0C,0x11,
+0x64,0xDF,0xCE,0x4D,0xEF,0xC8,0x03,0x18,0x87,0x3F,0xA1,0xAB,0xFB,0x3C,0xA6,0x9F,
+0xF0,0xC3,0xA1,0xDA,0xD4,0xD8,0x6E,0x2B,0x53,0x90,0xFB,0x24,0xA4,0x3E,0x84,0xF0,
+0x9E,0xE8,0x5F,0xEC,0xE5,0x27,0x44,0xF5,0x28,0xA6,0x3F,0x7B,0xDE,0xE0,0x2A,0xF0,
+0xC8,0xAF,0x53,0x2F,0x9E,0xCA,0x05,0x01,0x93,0x1E,0x8F,0x66,0x1C,0x39,0xA7,0x4D,
+0xFA,0x5A,0xB6,0x73,0x04,0x25,0x66,0xEB,0x77,0x7F,0xE7,0x59,0xC6,0x4A,0x99,0x25,
+0x14,0x54,0xEB,0x26,0xC7,0xF3,0x7F,0x19,0xD5,0x30,0x70,0x8F,0xAF,0xB0,0x46,0x2A,
+0xFF,0xAD,0xEB,0x29,0xED,0xD7,0x9F,0xAA,0x04,0x87,0xA3,0xD4,0xF9,0x89,0xA5,0x34,
+0x5F,0xDB,0x43,0x91,0x82,0x36,0xD9,0x66,0x3C,0xB1,0xB8,0xB9,0x82,0xFD,0x9C,0x3A,
+0x3E,0x10,0xC8,0x3B,0xEF,0x06,0x65,0x66,0x7A,0x9B,0x19,0x18,0x3D,0xFF,0x71,0x51,
+0x3C,0x30,0x2E,0x5F,0xBE,0x3D,0x77,0x73,0xB2,0x5D,0x06,0x6C,0xC3,0x23,0x56,0x9A,
+0x2B,0x85,0x26,0x92,0x1C,0xA7,0x02,0xB3,0xE4,0x3F,0x0D,0xAF,0x08,0x79,0x82,0xB8,
+0x36,0x3D,0xEA,0x9C,0xD3,0x35,0xB3,0xBC,0x69,0xCA,0xF5,0xCC,0x9D,0xE8,0xFD,0x64,
+0x8D,0x17,0x80,0x33,0x6E,0x5E,0x4A,0x5D,0x99,0xC9,0x1E,0x87,0xB4,0x9D,0x1A,0xC0,
+0xD5,0x6E,0x13,0x35,0x23,0x5E,0xDF,0x9B,0x5F,0x3D,0xEF,0xD6,0xF7,0x76,0xC2,0xEA,
+0x3E,0xBB,0x78,0x0D,0x1C,0x42,0x67,0x6B,0x04,0xD8,0xF8,0xD6,0xDA,0x6F,0x8B,0xF2,
+0x44,0xA0,0x01,0xAB,0x02,0x01,0x03,0xA3,0x81,0xC5,0x30,0x81,0xC2,0x30,0x1D,0x06,
+0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBF,0x5F,0xB7,0xD1,0xCE,0xDD,0x1F,0x86,
+0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,0x30,0x81,0x92,0x06,
+0x03,0x55,0x1D,0x23,0x04,0x81,0x8A,0x30,0x81,0x87,0x80,0x14,0xBF,0x5F,0xB7,0xD1,
+0xCE,0xDD,0x1F,0x86,0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,
+0xA1,0x6C,0xA4,0x6A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,
+0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,
+0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,
+0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,0x01,
+0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x05,0x9D,0x3F,0x88,0x9D,0xD1,0xC9,0x1A,0x55,0xA1,0xAC,0x69,0xF3,
+0xF3,0x59,0xDA,0x9B,0x01,0x87,0x1A,0x4F,0x57,0xA9,0xA1,0x79,0x09,0x2A,0xDB,0xF7,
+0x2F,0xB2,0x1E,0xCC,0xC7,0x5E,0x6A,0xD8,0x83,0x87,0xA1,0x97,0xEF,0x49,0x35,0x3E,
+0x77,0x06,0x41,0x58,0x62,0xBF,0x8E,0x58,0xB8,0x0A,0x67,0x3F,0xEC,0xB3,0xDD,0x21,
+0x66,0x1F,0xC9,0x54,0xFA,0x72,0xCC,0x3D,0x4C,0x40,0xD8,0x81,0xAF,0x77,0x9E,0x83,
+0x7A,0xBB,0xA2,0xC7,0xF5,0x34,0x17,0x8E,0xD9,0x11,0x40,0xF4,0xFC,0x2C,0x2A,0x4D,
+0x15,0x7F,0xA7,0x62,0x5D,0x2E,0x25,0xD3,0x00,0x0B,0x20,0x1A,0x1D,0x68,0xF9,0x17,
+0xB8,0xF4,0xBD,0x8B,0xED,0x28,0x59,0xDD,0x4D,0x16,0x8B,0x17,0x83,0xC8,0xB2,0x65,
+0xC7,0x2D,0x7A,0xA5,0xAA,0xBC,0x53,0x86,0x6D,0xDD,0x57,0xA4,0xCA,0xF8,0x20,0x41,
+0x0B,0x68,0xF0,0xF4,0xFB,0x74,0xBE,0x56,0x5D,0x7A,0x79,0xF5,0xF9,0x1D,0x85,0xE3,
+0x2D,0x95,0xBE,0xF5,0x71,0x90,0x43,0xCC,0x8D,0x1F,0x9A,0x00,0x0A,0x87,0x29,0xE9,
+0x55,0x22,0x58,0x00,0x23,0xEA,0xE3,0x12,0x43,0x29,0x5B,0x47,0x08,0xDD,0x8C,0x41,
+0x6A,0x65,0x06,0xA8,0xE5,0x21,0xAA,0x41,0xB4,0x95,0x21,0x95,0xB9,0x7D,0xD1,0x34,
+0xAB,0x13,0xD6,0xAD,0xBC,0xDC,0xE2,0x3D,0x39,0xCD,0xBD,0x3E,0x75,0x70,0xA1,0x18,
+0x59,0x03,0xC9,0x22,0xB4,0x8F,0x9C,0xD5,0x5E,0x2A,0xD7,0xA5,0xB6,0xD4,0x0A,0x6D,
+0xF8,0xB7,0x40,0x11,0x46,0x9A,0x1F,0x79,0x0E,0x62,0xBF,0x0F,0x97,0xEC,0xE0,0x2F,
+0x1F,0x17,0x94,
+};
+
+
+/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
+/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */
+
+
+const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={
+0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
+0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
+0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,
+0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,
+0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,
+0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,
+0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30,0x30,0x30,0x30,0x30,
+0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,
+0x5A,0x30,0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,
+0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,
+0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,
+0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,
+0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,
+0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,
+0x65,0x6C,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
+0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,
+0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,
+0x82,0x01,0x01,0x00,0xBD,0xED,0xC1,0x03,0xFC,0xF6,0x8F,0xFC,0x02,0xB1,0x6F,0x5B,
+0x9F,0x48,0xD9,0x9D,0x79,0xE2,0xA2,0xB7,0x03,0x61,0x56,0x18,0xC3,0x47,0xB6,0xD7,
+0xCA,0x3D,0x35,0x2E,0x89,0x43,0xF7,0xA1,0x69,0x9B,0xDE,0x8A,0x1A,0xFD,0x13,0x20,
+0x9C,0xB4,0x49,0x77,0x32,0x29,0x56,0xFD,0xB9,0xEC,0x8C,0xDD,0x22,0xFA,0x72,0xDC,
+0x27,0x61,0x97,0xEE,0xF6,0x5A,0x84,0xEC,0x6E,0x19,0xB9,0x89,0x2C,0xDC,0x84,0x5B,
+0xD5,0x74,0xFB,0x6B,0x5F,0xC5,0x89,0xA5,0x10,0x52,0x89,0x46,0x55,0xF4,0xB8,0x75,
+0x1C,0xE6,0x7F,0xE4,0x54,0xAE,0x4B,0xF8,0x55,0x72,0x57,0x02,0x19,0xF8,0x17,0x71,
+0x59,0xEB,0x1E,0x28,0x07,0x74,0xC5,0x9D,0x48,0xBE,0x6C,0xB4,0xF4,0xA4,0xB0,0xF3,
+0x64,0x37,0x79,0x92,0xC0,0xEC,0x46,0x5E,0x7F,0xE1,0x6D,0x53,0x4C,0x62,0xAF,0xCD,
+0x1F,0x0B,0x63,0xBB,0x3A,0x9D,0xFB,0xFC,0x79,0x00,0x98,0x61,0x74,0xCF,0x26,0x82,
+0x40,0x63,0xF3,0xB2,0x72,0x6A,0x19,0x0D,0x99,0xCA,0xD4,0x0E,0x75,0xCC,0x37,0xFB,
+0x8B,0x89,0xC1,0x59,0xF1,0x62,0x7F,0x5F,0xB3,0x5F,0x65,0x30,0xF8,0xA7,0xB7,0x4D,
+0x76,0x5A,0x1E,0x76,0x5E,0x34,0xC0,0xE8,0x96,0x56,0x99,0x8A,0xB3,0xF0,0x7F,0xA4,
+0xCD,0xBD,0xDC,0x32,0x31,0x7C,0x91,0xCF,0xE0,0x5F,0x11,0xF8,0x6B,0xAA,0x49,0x5C,
+0xD1,0x99,0x94,0xD1,0xA2,0xE3,0x63,0x5B,0x09,0x76,0xB5,0x56,0x62,0xE1,0x4B,0x74,
+0x1D,0x96,0xD4,0x26,0xD4,0x08,0x04,0x59,0xD0,0x98,0x0E,0x0E,0xE6,0xDE,0xFC,0xC3,
+0xEC,0x1F,0x90,0xF1,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7C,0x0C,0x32,0x1F,0xA7,0xD9,0x30,
+0x7F,0xC4,0x7D,0x68,0xA3,0x62,0xA8,0xA1,0xCE,0xAB,0x07,0x5B,0x27,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,
+0x00,0x11,0x59,0xFA,0x25,0x4F,0x03,0x6F,0x94,0x99,0x3B,0x9A,0x1F,0x82,0x85,0x39,
+0xD4,0x76,0x05,0x94,0x5E,0xE1,0x28,0x93,0x6D,0x62,0x5D,0x09,0xC2,0xA0,0xA8,0xD4,
+0xB0,0x75,0x38,0xF1,0x34,0x6A,0x9D,0xE4,0x9F,0x8A,0x86,0x26,0x51,0xE6,0x2C,0xD1,
+0xC6,0x2D,0x6E,0x95,0x20,0x4A,0x92,0x01,0xEC,0xB8,0x8A,0x67,0x7B,0x31,0xE2,0x67,
+0x2E,0x8C,0x95,0x03,0x26,0x2E,0x43,0x9D,0x4A,0x31,0xF6,0x0E,0xB5,0x0C,0xBB,0xB7,
+0xE2,0x37,0x7F,0x22,0xBA,0x00,0xA3,0x0E,0x7B,0x52,0xFB,0x6B,0xBB,0x3B,0xC4,0xD3,
+0x79,0x51,0x4E,0xCD,0x90,0xF4,0x67,0x07,0x19,0xC8,0x3C,0x46,0x7A,0x0D,0x01,0x7D,
+0xC5,0x58,0xE7,0x6D,0xE6,0x85,0x30,0x17,0x9A,0x24,0xC4,0x10,0xE0,0x04,0xF7,0xE0,
+0xF2,0x7F,0xD4,0xAA,0x0A,0xFF,0x42,0x1D,0x37,0xED,0x94,0xE5,0x64,0x59,0x12,0x20,
+0x77,0x38,0xD3,0x32,0x3E,0x38,0x81,0x75,0x96,0x73,0xFA,0x68,0x8F,0xB1,0xCB,0xCE,
+0x1F,0xC5,0xEC,0xFA,0x9C,0x7E,0xCF,0x7E,0xB1,0xF1,0x07,0x2D,0xB6,0xFC,0xBF,0xCA,
+0xA4,0xBF,0xD0,0x97,0x05,0x4A,0xBC,0xEA,0x18,0x28,0x02,0x90,0xBD,0x54,0x78,0x09,
+0x21,0x71,0xD3,0xD1,0x7D,0x1D,0xD9,0x16,0xB0,0xA9,0x61,0x3D,0xD0,0x0A,0x00,0x22,
+0xFC,0xC7,0x7B,0xCB,0x09,0x64,0x45,0x0B,0x3B,0x40,0x81,0xF7,0x7D,0x7C,0x32,0xF5,
+0x98,0xCA,0x58,0x8E,0x7D,0x2A,0xEE,0x90,0x59,0x73,0x64,0xF9,0x36,0x74,0x5E,0x25,
+0xA1,0xF5,0x66,0x05,0x2E,0x7F,0x39,0x15,0xA9,0x2A,0xFB,0x50,0x8B,0x8E,0x85,0x69,
+0xF4,
+};
+
+
+/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Services Root Certificate Authority - G2 */
+/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Services Root Certificate Authority - G2 */
+
+
+const unsigned char Starfield_Services_Root_Certificate_Authority___G2_certificate[1011]={
+0x30,0x82,0x03,0xEF,0x30,0x82,0x02,0xD7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,
+0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,
+0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,
+0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3B,0x30,
+0x39,0x06,0x03,0x55,0x04,0x03,0x13,0x32,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,
+0x64,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x52,0x6F,0x6F,0x74,0x20,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,
+0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,
+0x30,0x39,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,
+0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10,0x30,0x0E,0x06,0x03,
+0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,
+0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,
+0x65,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,
+0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,
+0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3B,0x30,0x39,0x06,0x03,0x55,0x04,
+0x03,0x13,0x32,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x53,0x65,0x72,
+0x76,0x69,0x63,0x65,0x73,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
+0x20,0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,
+0x0A,0x02,0x82,0x01,0x01,0x00,0xD5,0x0C,0x3A,0xC4,0x2A,0xF9,0x4E,0xE2,0xF5,0xBE,
+0x19,0x97,0x5F,0x8E,0x88,0x53,0xB1,0x1F,0x3F,0xCB,0xCF,0x9F,0x20,0x13,0x6D,0x29,
+0x3A,0xC8,0x0F,0x7D,0x3C,0xF7,0x6B,0x76,0x38,0x63,0xD9,0x36,0x60,0xA8,0x9B,0x5E,
+0x5C,0x00,0x80,0xB2,0x2F,0x59,0x7F,0xF6,0x87,0xF9,0x25,0x43,0x86,0xE7,0x69,0x1B,
+0x52,0x9A,0x90,0xE1,0x71,0xE3,0xD8,0x2D,0x0D,0x4E,0x6F,0xF6,0xC8,0x49,0xD9,0xB6,
+0xF3,0x1A,0x56,0xAE,0x2B,0xB6,0x74,0x14,0xEB,0xCF,0xFB,0x26,0xE3,0x1A,0xBA,0x1D,
+0x96,0x2E,0x6A,0x3B,0x58,0x94,0x89,0x47,0x56,0xFF,0x25,0xA0,0x93,0x70,0x53,0x83,
+0xDA,0x84,0x74,0x14,0xC3,0x67,0x9E,0x04,0x68,0x3A,0xDF,0x8E,0x40,0x5A,0x1D,0x4A,
+0x4E,0xCF,0x43,0x91,0x3B,0xE7,0x56,0xD6,0x00,0x70,0xCB,0x52,0xEE,0x7B,0x7D,0xAE,
+0x3A,0xE7,0xBC,0x31,0xF9,0x45,0xF6,0xC2,0x60,0xCF,0x13,0x59,0x02,0x2B,0x80,0xCC,
+0x34,0x47,0xDF,0xB9,0xDE,0x90,0x65,0x6D,0x02,0xCF,0x2C,0x91,0xA6,0xA6,0xE7,0xDE,
+0x85,0x18,0x49,0x7C,0x66,0x4E,0xA3,0x3A,0x6D,0xA9,0xB5,0xEE,0x34,0x2E,0xBA,0x0D,
+0x03,0xB8,0x33,0xDF,0x47,0xEB,0xB1,0x6B,0x8D,0x25,0xD9,0x9B,0xCE,0x81,0xD1,0x45,
+0x46,0x32,0x96,0x70,0x87,0xDE,0x02,0x0E,0x49,0x43,0x85,0xB6,0x6C,0x73,0xBB,0x64,
+0xEA,0x61,0x41,0xAC,0xC9,0xD4,0x54,0xDF,0x87,0x2F,0xC7,0x22,0xB2,0x26,0xCC,0x9F,
+0x59,0x54,0x68,0x9F,0xFC,0xBE,0x2A,0x2F,0xC4,0x55,0x1C,0x75,0x40,0x60,0x17,0x85,
+0x02,0x55,0x39,0x8B,0x7F,0x05,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,
+0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,
+0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,
+0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9C,0x5F,0x00,0xDF,0xAA,
+0x01,0xD7,0x30,0x2B,0x38,0x88,0xA2,0xB8,0x6D,0x4A,0x9C,0xF2,0x11,0x91,0x83,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,
+0x01,0x01,0x00,0x4B,0x36,0xA6,0x84,0x77,0x69,0xDD,0x3B,0x19,0x9F,0x67,0x23,0x08,
+0x6F,0x0E,0x61,0xC9,0xFD,0x84,0xDC,0x5F,0xD8,0x36,0x81,0xCD,0xD8,0x1B,0x41,0x2D,
+0x9F,0x60,0xDD,0xC7,0x1A,0x68,0xD9,0xD1,0x6E,0x86,0xE1,0x88,0x23,0xCF,0x13,0xDE,
+0x43,0xCF,0xE2,0x34,0xB3,0x04,0x9D,0x1F,0x29,0xD5,0xBF,0xF8,0x5E,0xC8,0xD5,0xC1,
+0xBD,0xEE,0x92,0x6F,0x32,0x74,0xF2,0x91,0x82,0x2F,0xBD,0x82,0x42,0x7A,0xAD,0x2A,
+0xB7,0x20,0x7D,0x4D,0xBC,0x7A,0x55,0x12,0xC2,0x15,0xEA,0xBD,0xF7,0x6A,0x95,0x2E,
+0x6C,0x74,0x9F,0xCF,0x1C,0xB4,0xF2,0xC5,0x01,0xA3,0x85,0xD0,0x72,0x3E,0xAD,0x73,
+0xAB,0x0B,0x9B,0x75,0x0C,0x6D,0x45,0xB7,0x8E,0x94,0xAC,0x96,0x37,0xB5,0xA0,0xD0,
+0x8F,0x15,0x47,0x0E,0xE3,0xE8,0x83,0xDD,0x8F,0xFD,0xEF,0x41,0x01,0x77,0xCC,0x27,
+0xA9,0x62,0x85,0x33,0xF2,0x37,0x08,0xEF,0x71,0xCF,0x77,0x06,0xDE,0xC8,0x19,0x1D,
+0x88,0x40,0xCF,0x7D,0x46,0x1D,0xFF,0x1E,0xC7,0xE1,0xCE,0xFF,0x23,0xDB,0xC6,0xFA,
+0x8D,0x55,0x4E,0xA9,0x02,0xE7,0x47,0x11,0x46,0x3E,0xF4,0xFD,0xBD,0x7B,0x29,0x26,
+0xBB,0xA9,0x61,0x62,0x37,0x28,0xB6,0x2D,0x2A,0xF6,0x10,0x86,0x64,0xC9,0x70,0xA7,
+0xD2,0xAD,0xB7,0x29,0x70,0x79,0xEA,0x3C,0xDA,0x63,0x25,0x9F,0xFD,0x68,0xB7,0x30,
+0xEC,0x70,0xFB,0x75,0x8A,0xB7,0x6D,0x60,0x67,0xB2,0x1E,0xC8,0xB9,0xE9,0xD8,0xA8,
+0x6F,0x02,0x8B,0x67,0x0D,0x4D,0x26,0x57,0x71,0xDA,0x20,0xFC,0xC1,0x4A,0x50,0x8D,
+0xB1,0x28,0xBA,
+};
+
+
+/* subject:/C=IL/O=StartCom Ltd./OU=Secure Digital Certificate Signing/CN=StartCom Certification Authority */
+/* issuer :/C=IL/O=StartCom Ltd./OU=Secure Digital Certificate Signing/CN=StartCom Certification Authority */
+
+
+const unsigned char StartCom_Certification_Authority_certificate[1931]={
+0x30,0x82,0x07,0x87,0x30,0x82,0x05,0x6F,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x2D,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x7D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,
+0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x22,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x44,0x69,0x67,0x69,0x74,0x61,0x6C,0x20,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x69,0x67,0x6E,
+0x69,0x6E,0x67,0x31,0x29,0x30,0x27,0x06,0x03,0x55,0x04,0x03,0x13,0x20,0x53,0x74,
+0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,
+0x17,0x0D,0x30,0x36,0x30,0x39,0x31,0x37,0x31,0x39,0x34,0x36,0x33,0x37,0x5A,0x17,
+0x0D,0x33,0x36,0x30,0x39,0x31,0x37,0x31,0x39,0x34,0x36,0x33,0x36,0x5A,0x30,0x7D,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,0x30,
+0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,
+0x20,0x4C,0x74,0x64,0x2E,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x0B,0x13,0x22,
+0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x44,0x69,0x67,0x69,0x74,0x61,0x6C,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x69,0x67,0x6E,0x69,
+0x6E,0x67,0x31,0x29,0x30,0x27,0x06,0x03,0x55,0x04,0x03,0x13,0x20,0x53,0x74,0x61,
+0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,
+0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x02,
+0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,
+0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC1,0x88,
+0xDB,0x09,0xBC,0x6C,0x46,0x7C,0x78,0x9F,0x95,0x7B,0xB5,0x33,0x90,0xF2,0x72,0x62,
+0xD6,0xC1,0x36,0x20,0x22,0x24,0x5E,0xCE,0xE9,0x77,0xF2,0x43,0x0A,0xA2,0x06,0x64,
+0xA4,0xCC,0x8E,0x36,0xF8,0x38,0xE6,0x23,0xF0,0x6E,0x6D,0xB1,0x3C,0xDD,0x72,0xA3,
+0x85,0x1C,0xA1,0xD3,0x3D,0xB4,0x33,0x2B,0xD3,0x2F,0xAF,0xFE,0xEA,0xB0,0x41,0x59,
+0x67,0xB6,0xC4,0x06,0x7D,0x0A,0x9E,0x74,0x85,0xD6,0x79,0x4C,0x80,0x37,0x7A,0xDF,
+0x39,0x05,0x52,0x59,0xF7,0xF4,0x1B,0x46,0x43,0xA4,0xD2,0x85,0x85,0xD2,0xC3,0x71,
+0xF3,0x75,0x62,0x34,0xBA,0x2C,0x8A,0x7F,0x1E,0x8F,0xEE,0xED,0x34,0xD0,0x11,0xC7,
+0x96,0xCD,0x52,0x3D,0xBA,0x33,0xD6,0xDD,0x4D,0xDE,0x0B,0x3B,0x4A,0x4B,0x9F,0xC2,
+0x26,0x2F,0xFA,0xB5,0x16,0x1C,0x72,0x35,0x77,0xCA,0x3C,0x5D,0xE6,0xCA,0xE1,0x26,
+0x8B,0x1A,0x36,0x76,0x5C,0x01,0xDB,0x74,0x14,0x25,0xFE,0xED,0xB5,0xA0,0x88,0x0F,
+0xDD,0x78,0xCA,0x2D,0x1F,0x07,0x97,0x30,0x01,0x2D,0x72,0x79,0xFA,0x46,0xD6,0x13,
+0x2A,0xA8,0xB9,0xA6,0xAB,0x83,0x49,0x1D,0xE5,0xF2,0xEF,0xDD,0xE4,0x01,0x8E,0x18,
+0x0A,0x8F,0x63,0x53,0x16,0x85,0x62,0xA9,0x0E,0x19,0x3A,0xCC,0xB5,0x66,0xA6,0xC2,
+0x6B,0x74,0x07,0xE4,0x2B,0xE1,0x76,0x3E,0xB4,0x6D,0xD8,0xF6,0x44,0xE1,0x73,0x62,
+0x1F,0x3B,0xC4,0xBE,0xA0,0x53,0x56,0x25,0x6C,0x51,0x09,0xF7,0xAA,0xAB,0xCA,0xBF,
+0x76,0xFD,0x6D,0x9B,0xF3,0x9D,0xDB,0xBF,0x3D,0x66,0xBC,0x0C,0x56,0xAA,0xAF,0x98,
+0x48,0x95,0x3A,0x4B,0xDF,0xA7,0x58,0x50,0xD9,0x38,0x75,0xA9,0x5B,0xEA,0x43,0x0C,
+0x02,0xFF,0x99,0xEB,0xE8,0x6C,0x4D,0x70,0x5B,0x29,0x65,0x9C,0xDD,0xAA,0x5D,0xCC,
+0xAF,0x01,0x31,0xEC,0x0C,0xEB,0xD2,0x8D,0xE8,0xEA,0x9C,0x7B,0xE6,0x6E,0xF7,0x27,
+0x66,0x0C,0x1A,0x48,0xD7,0x6E,0x42,0xE3,0x3F,0xDE,0x21,0x3E,0x7B,0xE1,0x0D,0x70,
+0xFB,0x63,0xAA,0xA8,0x6C,0x1A,0x54,0xB4,0x5C,0x25,0x7A,0xC9,0xA2,0xC9,0x8B,0x16,
+0xA6,0xBB,0x2C,0x7E,0x17,0x5E,0x05,0x4D,0x58,0x6E,0x12,0x1D,0x01,0xEE,0x12,0x10,
+0x0D,0xC6,0x32,0x7F,0x18,0xFF,0xFC,0xF4,0xFA,0xCD,0x6E,0x91,0xE8,0x36,0x49,0xBE,
+0x1A,0x48,0x69,0x8B,0xC2,0x96,0x4D,0x1A,0x12,0xB2,0x69,0x17,0xC1,0x0A,0x90,0xD6,
+0xFA,0x79,0x22,0x48,0xBF,0xBA,0x7B,0x69,0xF8,0x70,0xC7,0xFA,0x7A,0x37,0xD8,0xD8,
+0x0D,0xD2,0x76,0x4F,0x57,0xFF,0x90,0xB7,0xE3,0x91,0xD2,0xDD,0xEF,0xC2,0x60,0xB7,
+0x67,0x3A,0xDD,0xFE,0xAA,0x9C,0xF0,0xD4,0x8B,0x7F,0x72,0x22,0xCE,0xC6,0x9F,0x97,
+0xB6,0xF8,0xAF,0x8A,0xA0,0x10,0xA8,0xD9,0xFB,0x18,0xC6,0xB6,0xB5,0x5C,0x52,0x3C,
+0x89,0xB6,0x19,0x2A,0x73,0x01,0x0A,0x0F,0x03,0xB3,0x12,0x60,0xF2,0x7A,0x2F,0x81,
+0xDB,0xA3,0x6E,0xFF,0x26,0x30,0x97,0xF5,0x8B,0xDD,0x89,0x57,0xB6,0xAD,0x3D,0xB3,
+0xAF,0x2B,0xC5,0xB7,0x76,0x02,0xF0,0xA5,0xD6,0x2B,0x9A,0x86,0x14,0x2A,0x72,0xF6,
+0xE3,0x33,0x8C,0x5D,0x09,0x4B,0x13,0xDF,0xBB,0x8C,0x74,0x13,0x52,0x4B,0x02,0x03,
+0x01,0x00,0x01,0xA3,0x82,0x02,0x10,0x30,0x82,0x02,0x0C,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,
+0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4E,0x0B,0xEF,0x1A,0xA4,0x40,0x5B,0xA5,0x17,
+0x69,0x87,0x30,0xCA,0x34,0x68,0x43,0xD0,0x41,0xAE,0xF2,0x30,0x1F,0x06,0x03,0x55,
+0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x4E,0x0B,0xEF,0x1A,0xA4,0x40,0x5B,0xA5,
+0x17,0x69,0x87,0x30,0xCA,0x34,0x68,0x43,0xD0,0x41,0xAE,0xF2,0x30,0x82,0x01,0x5A,
+0x06,0x03,0x55,0x1D,0x20,0x04,0x82,0x01,0x51,0x30,0x82,0x01,0x4D,0x30,0x82,0x01,
+0x49,0x06,0x0B,0x2B,0x06,0x01,0x04,0x01,0x81,0xB5,0x37,0x01,0x01,0x01,0x30,0x82,
+0x01,0x38,0x30,0x2E,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x02,0x01,0x16,0x22,
+0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,
+0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x70,0x6F,0x6C,0x69,0x63,0x79,0x2E,0x70,
+0x64,0x66,0x30,0x34,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x02,0x01,0x16,0x28,
+0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,
+0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,0x69,0x6E,0x74,0x65,0x72,0x6D,0x65,0x64,
+0x69,0x61,0x74,0x65,0x2E,0x70,0x64,0x66,0x30,0x81,0xCF,0x06,0x08,0x2B,0x06,0x01,
+0x05,0x05,0x07,0x02,0x02,0x30,0x81,0xC2,0x30,0x27,0x16,0x20,0x53,0x74,0x61,0x72,
+0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x20,0x28,0x53,0x74,
+0x61,0x72,0x74,0x43,0x6F,0x6D,0x29,0x20,0x4C,0x74,0x64,0x2E,0x30,0x03,0x02,0x01,
+0x01,0x1A,0x81,0x96,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x20,0x4C,0x69,0x61,0x62,
+0x69,0x6C,0x69,0x74,0x79,0x2C,0x20,0x72,0x65,0x61,0x64,0x20,0x74,0x68,0x65,0x20,
+0x73,0x65,0x63,0x74,0x69,0x6F,0x6E,0x20,0x2A,0x4C,0x65,0x67,0x61,0x6C,0x20,0x4C,
+0x69,0x6D,0x69,0x74,0x61,0x74,0x69,0x6F,0x6E,0x73,0x2A,0x20,0x6F,0x66,0x20,0x74,
+0x68,0x65,0x20,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x61,0x76,0x61,0x69,0x6C,
+0x61,0x62,0x6C,0x65,0x20,0x61,0x74,0x20,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,
+0x77,0x77,0x2E,0x73,0x74,0x61,0x72,0x74,0x73,0x73,0x6C,0x2E,0x63,0x6F,0x6D,0x2F,
+0x70,0x6F,0x6C,0x69,0x63,0x79,0x2E,0x70,0x64,0x66,0x30,0x11,0x06,0x09,0x60,0x86,
+0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x38,0x06,
+0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x01,0x0D,0x04,0x2B,0x16,0x29,0x53,0x74,
+0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x46,0x72,0x65,0x65,0x20,0x53,0x53,0x4C,0x20,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x8E,0x8F,0xE7,0xDC,0x94,
+0x79,0x7C,0xF1,0x85,0x7F,0x9F,0x49,0x6F,0x6B,0xCA,0x5D,0xFB,0x8C,0xFE,0x04,0xC5,
+0xC1,0x62,0xD1,0x7D,0x42,0x8A,0xBC,0x53,0xB7,0x94,0x03,0x66,0x30,0x3F,0xB1,0xE7,
+0x0A,0xA7,0x50,0x20,0x55,0x25,0x7F,0x76,0x7A,0x14,0x0D,0xEB,0x04,0x0E,0x40,0xE6,
+0x3E,0xD8,0x88,0xAB,0x07,0x27,0x83,0xA9,0x75,0xA6,0x37,0x73,0xC7,0xFD,0x4B,0xD2,
+0x4D,0xAD,0x17,0x40,0xC8,0x46,0xBE,0x3B,0x7F,0x51,0xFC,0xC3,0xB6,0x05,0x31,0xDC,
+0xCD,0x85,0x22,0x4E,0x71,0xB7,0xF2,0x71,0x5E,0xB0,0x1A,0xC6,0xBA,0x93,0x8B,0x78,
+0x92,0x4A,0x85,0xF8,0x78,0x0F,0x83,0xFE,0x2F,0xAD,0x2C,0xF7,0xE4,0xA4,0xBB,0x2D,
+0xD0,0xE7,0x0D,0x3A,0xB8,0x3E,0xCE,0xF6,0x78,0xF6,0xAE,0x47,0x24,0xCA,0xA3,0x35,
+0x36,0xCE,0xC7,0xC6,0x87,0x98,0xDA,0xEC,0xFB,0xE9,0xB2,0xCE,0x27,0x9B,0x88,0xC3,
+0x04,0xA1,0xF6,0x0B,0x59,0x68,0xAF,0xC9,0xDB,0x10,0x0F,0x4D,0xF6,0x64,0x63,0x5C,
+0xA5,0x12,0x6F,0x92,0xB2,0x93,0x94,0xC7,0x88,0x17,0x0E,0x93,0xB6,0x7E,0x62,0x8B,
+0x90,0x7F,0xAB,0x4E,0x9F,0xFC,0xE3,0x75,0x14,0x4F,0x2A,0x32,0xDF,0x5B,0x0D,0xE0,
+0xF5,0x7B,0x93,0x0D,0xAB,0xA1,0xCF,0x87,0xE1,0xA5,0x04,0x45,0xE8,0x3C,0x12,0xA5,
+0x09,0xC5,0xB0,0xD1,0xB7,0x53,0xF3,0x60,0x14,0xBA,0x85,0x69,0x6A,0x21,0x7C,0x1F,
+0x75,0x61,0x17,0x20,0x17,0x7B,0x6C,0x3B,0x41,0x29,0x5C,0xE1,0xAC,0x5A,0xD1,0xCD,
+0x8C,0x9B,0xEB,0x60,0x1D,0x19,0xEC,0xF7,0xE5,0xB0,0xDA,0xF9,0x79,0x18,0xA5,0x45,
+0x3F,0x49,0x43,0x57,0xD2,0xDD,0x24,0xD5,0x2C,0xA3,0xFD,0x91,0x8D,0x27,0xB5,0xE5,
+0xEB,0x14,0x06,0x9A,0x4C,0x7B,0x21,0xBB,0x3A,0xAD,0x30,0x06,0x18,0xC0,0xD8,0xC1,
+0x6B,0x2C,0x7F,0x59,0x5C,0x5D,0x91,0xB1,0x70,0x22,0x57,0xEB,0x8A,0x6B,0x48,0x4A,
+0xD5,0x0F,0x29,0xEC,0xC6,0x40,0xC0,0x2F,0x88,0x4C,0x68,0x01,0x17,0x77,0xF4,0x24,
+0x19,0x4F,0xBD,0xFA,0xE1,0xB2,0x20,0x21,0x4B,0xDD,0x1A,0xD8,0x29,0x7D,0xAA,0xB8,
+0xDE,0x54,0xEC,0x21,0x55,0x80,0x6C,0x1E,0xF5,0x30,0xC8,0xA3,0x10,0xE5,0xB2,0xE6,
+0x2A,0x14,0x31,0xC3,0x85,0x2D,0x8C,0x98,0xB1,0x86,0x5A,0x4F,0x89,0x59,0x2D,0xB9,
+0xC7,0xF7,0x1C,0xC8,0x8A,0x7F,0xC0,0x9D,0x05,0x4A,0xE6,0x42,0x4F,0x62,0xA3,0x6D,
+0x29,0xA4,0x1F,0x85,0xAB,0xDB,0xE5,0x81,0xC8,0xAD,0x2A,0x3D,0x4C,0x5D,0x5B,0x84,
+0x26,0x71,0xC4,0x85,0x5E,0x71,0x24,0xCA,0xA5,0x1B,0x6C,0xD8,0x61,0xD3,0x1A,0xE0,
+0x54,0xDB,0xCE,0xBA,0xA9,0x32,0xB5,0x22,0xF6,0x73,0x41,0x09,0x5D,0xB8,0x17,0x5D,
+0x0E,0x0F,0x99,0x90,0xD6,0x47,0xDA,0x6F,0x0A,0x3A,0x62,0x28,0x14,0x67,0x82,0xD9,
+0xF1,0xD0,0x80,0x59,0x9B,0xCB,0x31,0xD8,0x9B,0x0F,0x8C,0x77,0x4E,0xB5,0x68,0x8A,
+0xF2,0x6C,0xF6,0x24,0x0E,0x2D,0x6C,0x70,0xC5,0x73,0xD1,0xDE,0x14,0xD0,0x71,0x8F,
+0xB6,0xD3,0x7B,0x02,0xF6,0xE3,0xB8,0xD4,0x09,0x6E,0x6B,0x9E,0x75,0x84,0x39,0xE6,
+0x7F,0x25,0xA5,0xF2,0x48,0x00,0xC0,0xA4,0x01,0xDA,0x3F,
+};
+
+
+/* subject:/C=IL/O=StartCom Ltd./CN=StartCom Certification Authority G2 */
+/* issuer :/C=IL/O=StartCom Ltd./CN=StartCom Certification Authority G2 */
+
+
+const unsigned char StartCom_Certification_Authority_G2_certificate[1383]={
+0x30,0x82,0x05,0x63,0x30,0x82,0x03,0x4B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x3B,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,
+0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x4C,0x31,0x16,
+0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,
+0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2C,0x30,0x2A,0x06,0x03,0x55,0x04,0x03,0x13,
+0x23,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,0x65,0x72,0x74,0x69,0x66,
+0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x31,0x30,0x31,0x30,0x31,
+0x30,0x30,0x30,0x31,0x5A,0x17,0x0D,0x33,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,
+0x39,0x30,0x31,0x5A,0x30,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x49,0x4C,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x53,0x74,
+0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x4C,0x74,0x64,0x2E,0x31,0x2C,0x30,0x2A,0x06,
+0x03,0x55,0x04,0x03,0x13,0x23,0x53,0x74,0x61,0x72,0x74,0x43,0x6F,0x6D,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x47,0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,
+0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xB6,0x89,0x36,0x5B,0x07,0xB7,
+0x20,0x36,0xBD,0x82,0xBB,0xE1,0x16,0x20,0x03,0x95,0x7A,0xAF,0x0E,0xA3,0x55,0xC9,
+0x25,0x99,0x4A,0xC5,0xD0,0x56,0x41,0x87,0x90,0x4D,0x21,0x60,0xA4,0x14,0x87,0x3B,
+0xCD,0xFD,0xB2,0x3E,0xB4,0x67,0x03,0x6A,0xED,0xE1,0x0F,0x4B,0xC0,0x91,0x85,0x70,
+0x45,0xE0,0x42,0x9E,0xDE,0x29,0x23,0xD4,0x01,0x0D,0xA0,0x10,0x79,0xB8,0xDB,0x03,
+0xBD,0xF3,0xA9,0x2F,0xD1,0xC6,0xE0,0x0F,0xCB,0x9E,0x8A,0x14,0x0A,0xB8,0xBD,0xF6,
+0x56,0x62,0xF1,0xC5,0x72,0xB6,0x32,0x25,0xD9,0xB2,0xF3,0xBD,0x65,0xC5,0x0D,0x2C,
+0x6E,0xD5,0x92,0x6F,0x18,0x8B,0x00,0x41,0x14,0x82,0x6F,0x40,0x20,0x26,0x7A,0x28,
+0x0F,0xF5,0x1E,0x7F,0x27,0xF7,0x94,0xB1,0x37,0x3D,0xB7,0xC7,0x91,0xF7,0xE2,0x01,
+0xEC,0xFD,0x94,0x89,0xE1,0xCC,0x6E,0xD3,0x36,0xD6,0x0A,0x19,0x79,0xAE,0xD7,0x34,
+0x82,0x65,0xFF,0x7C,0x42,0xBB,0xB6,0xDD,0x0B,0xA6,0x34,0xAF,0x4B,0x60,0xFE,0x7F,
+0x43,0x49,0x06,0x8B,0x8C,0x43,0xB8,0x56,0xF2,0xD9,0x7F,0x21,0x43,0x17,0xEA,0xA7,
+0x48,0x95,0x01,0x75,0x75,0xEA,0x2B,0xA5,0x43,0x95,0xEA,0x15,0x84,0x9D,0x08,0x8D,
+0x26,0x6E,0x55,0x9B,0xAB,0xDC,0xD2,0x39,0xD2,0x31,0x1D,0x60,0xE2,0xAC,0xCC,0x56,
+0x45,0x24,0xF5,0x1C,0x54,0xAB,0xEE,0x86,0xDD,0x96,0x32,0x85,0xF8,0x4C,0x4F,0xE8,
+0x95,0x76,0xB6,0x05,0xDD,0x36,0x23,0x67,0xBC,0xFF,0x15,0xE2,0xCA,0x3B,0xE6,0xA6,
+0xEC,0x3B,0xEC,0x26,0x11,0x34,0x48,0x8D,0xF6,0x80,0x2B,0x1A,0x23,0x02,0xEB,0x8A,
+0x1C,0x3A,0x76,0x2A,0x7B,0x56,0x16,0x1C,0x72,0x2A,0xB3,0xAA,0xE3,0x60,0xA5,0x00,
+0x9F,0x04,0x9B,0xE2,0x6F,0x1E,0x14,0x58,0x5B,0xA5,0x6C,0x8B,0x58,0x3C,0xC3,0xBA,
+0x4E,0x3A,0x5C,0xF7,0xE1,0x96,0x2B,0x3E,0xEF,0x07,0xBC,0xA4,0xE5,0x5D,0xCC,0x4D,
+0x9F,0x0D,0xE1,0xDC,0xAA,0xBB,0xE1,0x6E,0x1A,0xEC,0x8F,0xE1,0xB6,0x4C,0x4D,0x79,
+0x72,0x5D,0x17,0x35,0x0B,0x1D,0xD7,0xC1,0x47,0xDA,0x96,0x24,0xE0,0xD0,0x72,0xA8,
+0x5A,0x5F,0x66,0x2D,0x10,0xDC,0x2F,0x2A,0x13,0xAE,0x26,0xFE,0x0A,0x1C,0x19,0xCC,
+0xD0,0x3E,0x0B,0x9C,0xC8,0x09,0x2E,0xF9,0x5B,0x96,0x7A,0x47,0x9C,0xE9,0x7A,0xF3,
+0x05,0x50,0x74,0x95,0x73,0x9E,0x30,0x09,0xF3,0x97,0x82,0x5E,0xE6,0x8F,0x39,0x08,
+0x1E,0x59,0xE5,0x35,0x14,0x42,0x13,0xFF,0x00,0x9C,0xF7,0xBE,0xAA,0x50,0xCF,0xE2,
+0x51,0x48,0xD7,0xB8,0x6F,0xAF,0xF8,0x4E,0x7E,0x33,0x98,0x92,0x14,0x62,0x3A,0x75,
+0x63,0xCF,0x7B,0xFA,0xDE,0x82,0x3B,0xA9,0xBB,0x39,0xE2,0xC4,0xBD,0x2C,0x00,0x0E,
+0xC8,0x17,0xAC,0x13,0xEF,0x4D,0x25,0x8E,0xD8,0xB3,0x90,0x2F,0xA9,0xDA,0x29,0x7D,
+0x1D,0xAF,0x74,0x3A,0xB2,0x27,0xC0,0xC1,0x1E,0x3E,0x75,0xA3,0x16,0xA9,0xAF,0x7A,
+0x22,0x5D,0x9F,0x13,0x1A,0xCF,0xA7,0xA0,0xEB,0xE3,0x86,0x0A,0xD3,0xFD,0xE6,0x96,
+0x95,0xD7,0x23,0xC8,0x37,0xDD,0xC4,0x7C,0xAA,0x36,0xAC,0x98,0x1A,0x12,0xB1,0xE0,
+0x4E,0xE8,0xB1,0x3B,0xF5,0xD6,0x6F,0xF1,0x30,0xD7,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x4B,
+0xC5,0xB4,0x40,0x6B,0xAD,0x1C,0xB3,0xA5,0x1C,0x65,0x6E,0x46,0x36,0x89,0x87,0x05,
+0x0C,0x0E,0xB6,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,
+0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x73,0x57,0x3F,0x2C,0xD5,0x95,0x32,0x7E,0x37,
+0xDB,0x96,0x92,0xEB,0x19,0x5E,0x7E,0x53,0xE7,0x41,0xEC,0x11,0xB6,0x47,0xEF,0xB5,
+0xDE,0xED,0x74,0x5C,0xC5,0xF1,0x8E,0x49,0xE0,0xFC,0x6E,0x99,0x13,0xCD,0x9F,0x8A,
+0xDA,0xCD,0x3A,0x0A,0xD8,0x3A,0x5A,0x09,0x3F,0x5F,0x34,0xD0,0x2F,0x03,0xD2,0x66,
+0x1D,0x1A,0xBD,0x9C,0x90,0x37,0xC8,0x0C,0x8E,0x07,0x5A,0x94,0x45,0x46,0x2A,0xE6,
+0xBE,0x7A,0xDA,0xA1,0xA9,0xA4,0x69,0x12,0x92,0xB0,0x7D,0x36,0xD4,0x44,0x87,0xD7,
+0x51,0xF1,0x29,0x63,0xD6,0x75,0xCD,0x16,0xE4,0x27,0x89,0x1D,0xF8,0xC2,0x32,0x48,
+0xFD,0xDB,0x99,0xD0,0x8F,0x5F,0x54,0x74,0xCC,0xAC,0x67,0x34,0x11,0x62,0xD9,0x0C,
+0x0A,0x37,0x87,0xD1,0xA3,0x17,0x48,0x8E,0xD2,0x17,0x1D,0xF6,0xD7,0xFD,0xDB,0x65,
+0xEB,0xFD,0xA8,0xD4,0xF5,0xD6,0x4F,0xA4,0x5B,0x75,0xE8,0xC5,0xD2,0x60,0xB2,0xDB,
+0x09,0x7E,0x25,0x8B,0x7B,0xBA,0x52,0x92,0x9E,0x3E,0xE8,0xC5,0x77,0xA1,0x3C,0xE0,
+0x4A,0x73,0x6B,0x61,0xCF,0x86,0xDC,0x43,0xFF,0xFF,0x21,0xFE,0x23,0x5D,0x24,0x4A,
+0xF5,0xD3,0x6D,0x0F,0x62,0x04,0x05,0x57,0x82,0xDA,0x6E,0xA4,0x33,0x25,0x79,0x4B,
+0x2E,0x54,0x19,0x8B,0xCC,0x2C,0x3D,0x30,0xE9,0xD1,0x06,0xFF,0xE8,0x32,0x46,0xBE,
+0xB5,0x33,0x76,0x77,0xA8,0x01,0x5D,0x96,0xC1,0xC1,0xD5,0xBE,0xAE,0x25,0xC0,0xC9,
+0x1E,0x0A,0x09,0x20,0x88,0xA1,0x0E,0xC9,0xF3,0x6F,0x4D,0x82,0x54,0x00,0x20,0xA7,
+0xD2,0x8F,0xE4,0x39,0x54,0x17,0x2E,0x8D,0x1E,0xB8,0x1B,0xBB,0x1B,0xBD,0x9A,0x4E,
+0x3B,0x10,0x34,0xDC,0x9C,0x88,0x53,0xEF,0xA2,0x31,0x5B,0x58,0x4F,0x91,0x62,0xC8,
+0xC2,0x9A,0x9A,0xCD,0x15,0x5D,0x38,0xA9,0xD6,0xBE,0xF8,0x13,0xB5,0x9F,0x12,0x69,
+0xF2,0x50,0x62,0xAC,0xFB,0x17,0x37,0xF4,0xEE,0xB8,0x75,0x67,0x60,0x10,0xFB,0x83,
+0x50,0xF9,0x44,0xB5,0x75,0x9C,0x40,0x17,0xB2,0xFE,0xFD,0x79,0x5D,0x6E,0x58,0x58,
+0x5F,0x30,0xFC,0x00,0xAE,0xAF,0x33,0xC1,0x0E,0x4E,0x6C,0xBA,0xA7,0xA6,0xA1,0x7F,
+0x32,0xDB,0x38,0xE0,0xB1,0x72,0x17,0x0A,0x2B,0x91,0xEC,0x6A,0x63,0x26,0xED,0x89,
+0xD4,0x78,0xCC,0x74,0x1E,0x05,0xF8,0x6B,0xFE,0x8C,0x6A,0x76,0x39,0x29,0xAE,0x65,
+0x23,0x12,0x95,0x08,0x22,0x1C,0x97,0xCE,0x5B,0x06,0xEE,0x0C,0xE2,0xBB,0xBC,0x1F,
+0x44,0x93,0xF6,0xD8,0x38,0x45,0x05,0x21,0xED,0xE4,0xAD,0xAB,0x12,0xB6,0x03,0xA4,
+0x42,0x2E,0x2D,0xC4,0x09,0x3A,0x03,0x67,0x69,0x84,0x9A,0xE1,0x59,0x90,0x8A,0x28,
+0x85,0xD5,0x5D,0x74,0xB1,0xD1,0x0E,0x20,0x58,0x9B,0x13,0xA5,0xB0,0x63,0xA6,0xED,
+0x7B,0x47,0xFD,0x45,0x55,0x30,0xA4,0xEE,0x9A,0xD4,0xE6,0xE2,0x87,0xEF,0x98,0xC9,
+0x32,0x82,0x11,0x29,0x22,0xBC,0x00,0x0A,0x31,0x5E,0x2D,0x0F,0xC0,0x8E,0xE9,0x6B,
+0xB2,0x8F,0x2E,0x06,0xD8,0xD1,0x91,0xC7,0xC6,0x12,0xF4,0x4C,0xFD,0x30,0x17,0xC3,
+0xC1,0xDA,0x38,0x5B,0xE3,0xA9,0xEA,0xE6,0xA1,0xBA,0x79,0xEF,0x73,0xD8,0xB6,0x53,
+0x57,0x2D,0xF6,0xD0,0xE1,0xD7,0x48,
+};
+
+
+/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 2 CA/CN=TC TrustCenter Class 2 CA II */
+/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 2 CA/CN=TC TrustCenter Class 2 CA II */
+
+
+const unsigned char TC_TrustCenter_Class_2_CA_II_certificate[1198]={
+0x30,0x82,0x04,0xAA,0x30,0x82,0x03,0x92,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x2E,
+0x6A,0x00,0x01,0x00,0x02,0x1F,0xD7,0x52,0x21,0x2C,0x11,0x5C,0x3B,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x76,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
+0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
+0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x41,0x31,0x25,0x30,
+0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,
+0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,
+0x41,0x20,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x31,0x31,0x32,0x31,0x34,
+0x33,0x38,0x34,0x33,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,0x32,0x32,0x35,
+0x39,0x35,0x39,0x5A,0x30,0x76,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,
+0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,
+0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,
+0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,
+0x20,0x32,0x20,0x43,0x41,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,
+0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,
+0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x41,0x20,0x49,0x49,0x30,0x82,0x01,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAB,0x80,0x87,
+0x9B,0x8E,0xF0,0xC3,0x7C,0x87,0xD7,0xE8,0x24,0x82,0x11,0xB3,0x3C,0xDD,0x43,0x62,
+0xEE,0xF8,0xC3,0x45,0xDA,0xE8,0xE1,0xA0,0x5F,0xD1,0x2A,0xB2,0xEA,0x93,0x68,0xDF,
+0xB4,0xC8,0xD6,0x43,0xE9,0xC4,0x75,0x59,0x7F,0xFC,0xE1,0x1D,0xF8,0x31,0x70,0x23,
+0x1B,0x88,0x9E,0x27,0xB9,0x7B,0xFD,0x3A,0xD2,0xC9,0xA9,0xE9,0x14,0x2F,0x90,0xBE,
+0x03,0x52,0xC1,0x49,0xCD,0xF6,0xFD,0xE4,0x08,0x66,0x0B,0x57,0x8A,0xA2,0x42,0xA0,
+0xB8,0xD5,0x7F,0x69,0x5C,0x90,0x32,0xB2,0x97,0x0D,0xCA,0x4A,0xDC,0x46,0x3E,0x02,
+0x55,0x89,0x53,0xE3,0x1A,0x5A,0xCB,0x36,0xC6,0x07,0x56,0xF7,0x8C,0xCF,0x11,0xF4,
+0x4C,0xBB,0x30,0x70,0x04,0x95,0xA5,0xF6,0x39,0x8C,0xFD,0x73,0x81,0x08,0x7D,0x89,
+0x5E,0x32,0x1E,0x22,0xA9,0x22,0x45,0x4B,0xB0,0x66,0x2E,0x30,0xCC,0x9F,0x65,0xFD,
+0xFC,0xCB,0x81,0xA9,0xF1,0xE0,0x3B,0xAF,0xA3,0x86,0xD1,0x89,0xEA,0xC4,0x45,0x79,
+0x50,0x5D,0xAE,0xE9,0x21,0x74,0x92,0x4D,0x8B,0x59,0x82,0x8F,0x94,0xE3,0xE9,0x4A,
+0xF1,0xE7,0x49,0xB0,0x14,0xE3,0xF5,0x62,0xCB,0xD5,0x72,0xBD,0x1F,0xB9,0xD2,0x9F,
+0xA0,0xCD,0xA8,0xFA,0x01,0xC8,0xD9,0x0D,0xDF,0xDA,0xFC,0x47,0x9D,0xB3,0xC8,0x54,
+0xDF,0x49,0x4A,0xF1,0x21,0xA9,0xFE,0x18,0x4E,0xEE,0x48,0xD4,0x19,0xBB,0xEF,0x7D,
+0xE4,0xE2,0x9D,0xCB,0x5B,0xB6,0x6E,0xFF,0xE3,0xCD,0x5A,0xE7,0x74,0x82,0x05,0xBA,
+0x80,0x25,0x38,0xCB,0xE4,0x69,0x9E,0xAF,0x41,0xAA,0x1A,0x84,0xF5,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x82,0x01,0x34,0x30,0x82,0x01,0x30,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0xE3,0xAB,0x54,0x4C,0x80,0xA1,0xDB,0x56,0x43,0xB7,
+0x91,0x4A,0xCB,0xF3,0x82,0x7A,0x13,0x5C,0x08,0xAB,0x30,0x81,0xED,0x06,0x03,0x55,
+0x1D,0x1F,0x04,0x81,0xE5,0x30,0x81,0xE2,0x30,0x81,0xDF,0xA0,0x81,0xDC,0xA0,0x81,
+0xD9,0x86,0x35,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,
+0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,0x72,0x2E,0x64,0x65,0x2F,0x63,0x72,0x6C,
+0x2F,0x76,0x32,0x2F,0x74,0x63,0x5F,0x63,0x6C,0x61,0x73,0x73,0x5F,0x32,0x5F,0x63,
+0x61,0x5F,0x49,0x49,0x2E,0x63,0x72,0x6C,0x86,0x81,0x9F,0x6C,0x64,0x61,0x70,0x3A,
+0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,
+0x72,0x2E,0x64,0x65,0x2F,0x43,0x4E,0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,
+0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x25,0x32,0x30,0x43,0x6C,0x61,0x73,0x73,
+0x25,0x32,0x30,0x32,0x25,0x32,0x30,0x43,0x41,0x25,0x32,0x30,0x49,0x49,0x2C,0x4F,
+0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,
+0x72,0x25,0x32,0x30,0x47,0x6D,0x62,0x48,0x2C,0x4F,0x55,0x3D,0x72,0x6F,0x6F,0x74,
+0x63,0x65,0x72,0x74,0x73,0x2C,0x44,0x43,0x3D,0x74,0x72,0x75,0x73,0x74,0x63,0x65,
+0x6E,0x74,0x65,0x72,0x2C,0x44,0x43,0x3D,0x64,0x65,0x3F,0x63,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x52,0x65,0x76,0x6F,0x63,0x61,0x74,0x69,0x6F,0x6E,
+0x4C,0x69,0x73,0x74,0x3F,0x62,0x61,0x73,0x65,0x3F,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8C,0xD7,
+0xDF,0x7E,0xEE,0x1B,0x80,0x10,0xB3,0x83,0xF5,0xDB,0x11,0xEA,0x6B,0x4B,0xA8,0x92,
+0x18,0xD9,0xF7,0x07,0x39,0xF5,0x2C,0xBE,0x06,0x75,0x7A,0x68,0x53,0x15,0x1C,0xEA,
+0x4A,0xED,0x5E,0xFC,0x23,0xB2,0x13,0xA0,0xD3,0x09,0xFF,0xF6,0xF6,0x2E,0x6B,0x41,
+0x71,0x79,0xCD,0xE2,0x6D,0xFD,0xAE,0x59,0x6B,0x85,0x1D,0xB8,0x4E,0x22,0x9A,0xED,
+0x66,0x39,0x6E,0x4B,0x94,0xE6,0x55,0xFC,0x0B,0x1B,0x8B,0x77,0xC1,0x53,0x13,0x66,
+0x89,0xD9,0x28,0xD6,0x8B,0xF3,0x45,0x4A,0x63,0xB7,0xFD,0x7B,0x0B,0x61,0x5D,0xB8,
+0x6D,0xBE,0xC3,0xDC,0x5B,0x79,0xD2,0xED,0x86,0xE5,0xA2,0x4D,0xBE,0x5E,0x74,0x7C,
+0x6A,0xED,0x16,0x38,0x1F,0x7F,0x58,0x81,0x5A,0x1A,0xEB,0x32,0x88,0x2D,0xB2,0xF3,
+0x39,0x77,0x80,0xAF,0x5E,0xB6,0x61,0x75,0x29,0xDB,0x23,0x4D,0x88,0xCA,0x50,0x28,
+0xCB,0x85,0xD2,0xD3,0x10,0xA2,0x59,0x6E,0xD3,0x93,0x54,0x00,0x7A,0xA2,0x46,0x95,
+0x86,0x05,0x9C,0xA9,0x19,0x98,0xE5,0x31,0x72,0x0C,0x00,0xE2,0x67,0xD9,0x40,0xE0,
+0x24,0x33,0x7B,0x6F,0x2C,0xB9,0x5C,0xAB,0x65,0x9D,0x2C,0xAC,0x76,0xEA,0x35,0x99,
+0xF5,0x97,0xB9,0x0F,0x24,0xEC,0xC7,0x76,0x21,0x28,0x65,0xAE,0x57,0xE8,0x07,0x88,
+0x75,0x4A,0x56,0xA0,0xD2,0x05,0x3A,0xA4,0xE6,0x8D,0x92,0x88,0x2C,0xF3,0xF2,0xE1,
+0xC1,0xC6,0x61,0xDB,0x41,0xC5,0xC7,0x9B,0xF7,0x0E,0x1A,0x51,0x45,0xC2,0x61,0x6B,
+0xDC,0x64,0x27,0x17,0x8C,0x5A,0xB7,0xDA,0x74,0x28,0xCD,0x97,0xE4,0xBD,
+};
+
+
+/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 3 CA/CN=TC TrustCenter Class 3 CA II */
+/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 3 CA/CN=TC TrustCenter Class 3 CA II */
+
+
+const unsigned char TC_TrustCenter_Class_3_CA_II_certificate[1198]={
+0x30,0x82,0x04,0xAA,0x30,0x82,0x03,0x92,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x4A,
+0x47,0x00,0x01,0x00,0x02,0xE5,0xA0,0x5D,0xD6,0x3F,0x00,0x51,0xBF,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x76,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
+0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
+0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,0x41,0x31,0x25,0x30,
+0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,
+0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,
+0x41,0x20,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x31,0x31,0x32,0x31,0x34,
+0x34,0x31,0x35,0x37,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,0x32,0x32,0x35,
+0x39,0x35,0x39,0x5A,0x30,0x76,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,
+0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,
+0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,
+0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,
+0x20,0x33,0x20,0x43,0x41,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,
+0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,
+0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x43,0x41,0x20,0x49,0x49,0x30,0x82,0x01,0x22,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xB4,0xE0,0xBB,
+0x51,0xBB,0x39,0x5C,0x8B,0x04,0xC5,0x4C,0x79,0x1C,0x23,0x86,0x31,0x10,0x63,0x43,
+0x55,0x27,0x3F,0xC6,0x45,0xC7,0xA4,0x3D,0xEC,0x09,0x0D,0x1A,0x1E,0x20,0xC2,0x56,
+0x1E,0xDE,0x1B,0x37,0x07,0x30,0x22,0x2F,0x6F,0xF1,0x06,0xF1,0xAB,0xAD,0xD6,0xC8,
+0xAB,0x61,0xA3,0x2F,0x43,0xC4,0xB0,0xB2,0x2D,0xFC,0xC3,0x96,0x69,0x7B,0x7E,0x8A,
+0xE4,0xCC,0xC0,0x39,0x12,0x90,0x42,0x60,0xC9,0xCC,0x35,0x68,0xEE,0xDA,0x5F,0x90,
+0x56,0x5F,0xCD,0x1C,0x4D,0x5B,0x58,0x49,0xEB,0x0E,0x01,0x4F,0x64,0xFA,0x2C,0x3C,
+0x89,0x58,0xD8,0x2F,0x2E,0xE2,0xB0,0x68,0xE9,0x22,0x3B,0x75,0x89,0xD6,0x44,0x1A,
+0x65,0xF2,0x1B,0x97,0x26,0x1D,0x28,0x6D,0xAC,0xE8,0xBD,0x59,0x1D,0x2B,0x24,0xF6,
+0xD6,0x84,0x03,0x66,0x88,0x24,0x00,0x78,0x60,0xF1,0xF8,0xAB,0xFE,0x02,0xB2,0x6B,
+0xFB,0x22,0xFB,0x35,0xE6,0x16,0xD1,0xAD,0xF6,0x2E,0x12,0xE4,0xFA,0x35,0x6A,0xE5,
+0x19,0xB9,0x5D,0xDB,0x3B,0x1E,0x1A,0xFB,0xD3,0xFF,0x15,0x14,0x08,0xD8,0x09,0x6A,
+0xBA,0x45,0x9D,0x14,0x79,0x60,0x7D,0xAF,0x40,0x8A,0x07,0x73,0xB3,0x93,0x96,0xD3,
+0x74,0x34,0x8D,0x3A,0x37,0x29,0xDE,0x5C,0xEC,0xF5,0xEE,0x2E,0x31,0xC2,0x20,0xDC,
+0xBE,0xF1,0x4F,0x7F,0x23,0x52,0xD9,0x5B,0xE2,0x64,0xD9,0x9C,0xAA,0x07,0x08,0xB5,
+0x45,0xBD,0xD1,0xD0,0x31,0xC1,0xAB,0x54,0x9F,0xA9,0xD2,0xC3,0x62,0x60,0x03,0xF1,
+0xBB,0x39,0x4A,0x92,0x4A,0x3D,0x0A,0xB9,0x9D,0xC5,0xA0,0xFE,0x37,0x02,0x03,0x01,
+0x00,0x01,0xA3,0x82,0x01,0x34,0x30,0x82,0x01,0x30,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0xD4,0xA2,0xFC,0x9F,0xB3,0xC3,0xD8,0x03,0xD3,0x57,
+0x5C,0x07,0xA4,0xD0,0x24,0xA7,0xC0,0xF2,0x00,0xD4,0x30,0x81,0xED,0x06,0x03,0x55,
+0x1D,0x1F,0x04,0x81,0xE5,0x30,0x81,0xE2,0x30,0x81,0xDF,0xA0,0x81,0xDC,0xA0,0x81,
+0xD9,0x86,0x35,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,
+0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,0x72,0x2E,0x64,0x65,0x2F,0x63,0x72,0x6C,
+0x2F,0x76,0x32,0x2F,0x74,0x63,0x5F,0x63,0x6C,0x61,0x73,0x73,0x5F,0x33,0x5F,0x63,
+0x61,0x5F,0x49,0x49,0x2E,0x63,0x72,0x6C,0x86,0x81,0x9F,0x6C,0x64,0x61,0x70,0x3A,
+0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,
+0x72,0x2E,0x64,0x65,0x2F,0x43,0x4E,0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,
+0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x25,0x32,0x30,0x43,0x6C,0x61,0x73,0x73,
+0x25,0x32,0x30,0x33,0x25,0x32,0x30,0x43,0x41,0x25,0x32,0x30,0x49,0x49,0x2C,0x4F,
+0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,
+0x72,0x25,0x32,0x30,0x47,0x6D,0x62,0x48,0x2C,0x4F,0x55,0x3D,0x72,0x6F,0x6F,0x74,
+0x63,0x65,0x72,0x74,0x73,0x2C,0x44,0x43,0x3D,0x74,0x72,0x75,0x73,0x74,0x63,0x65,
+0x6E,0x74,0x65,0x72,0x2C,0x44,0x43,0x3D,0x64,0x65,0x3F,0x63,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x65,0x52,0x65,0x76,0x6F,0x63,0x61,0x74,0x69,0x6F,0x6E,
+0x4C,0x69,0x73,0x74,0x3F,0x62,0x61,0x73,0x65,0x3F,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x36,0x60,
+0xE4,0x70,0xF7,0x06,0x20,0x43,0xD9,0x23,0x1A,0x42,0xF2,0xF8,0xA3,0xB2,0xB9,0x4D,
+0x8A,0xB4,0xF3,0xC2,0x9A,0x55,0x31,0x7C,0xC4,0x3B,0x67,0x9A,0xB4,0xDF,0x4D,0x0E,
+0x8A,0x93,0x4A,0x17,0x8B,0x1B,0x8D,0xCA,0x89,0xE1,0xCF,0x3A,0x1E,0xAC,0x1D,0xF1,
+0x9C,0x32,0xB4,0x8E,0x59,0x76,0xA2,0x41,0x85,0x25,0x37,0xA0,0x13,0xD0,0xF5,0x7C,
+0x4E,0xD5,0xEA,0x96,0xE2,0x6E,0x72,0xC1,0xBB,0x2A,0xFE,0x6C,0x6E,0xF8,0x91,0x98,
+0x46,0xFC,0xC9,0x1B,0x57,0x5B,0xEA,0xC8,0x1A,0x3B,0x3F,0xB0,0x51,0x98,0x3C,0x07,
+0xDA,0x2C,0x59,0x01,0xDA,0x8B,0x44,0xE8,0xE1,0x74,0xFD,0xA7,0x68,0xDD,0x54,0xBA,
+0x83,0x46,0xEC,0xC8,0x46,0xB5,0xF8,0xAF,0x97,0xC0,0x3B,0x09,0x1C,0x8F,0xCE,0x72,
+0x96,0x3D,0x33,0x56,0x70,0xBC,0x96,0xCB,0xD8,0xD5,0x7D,0x20,0x9A,0x83,0x9F,0x1A,
+0xDC,0x39,0xF1,0xC5,0x72,0xA3,0x11,0x03,0xFD,0x3B,0x42,0x52,0x29,0xDB,0xE8,0x01,
+0xF7,0x9B,0x5E,0x8C,0xD6,0x8D,0x86,0x4E,0x19,0xFA,0xBC,0x1C,0xBE,0xC5,0x21,0xA5,
+0x87,0x9E,0x78,0x2E,0x36,0xDB,0x09,0x71,0xA3,0x72,0x34,0xF8,0x6C,0xE3,0x06,0x09,
+0xF2,0x5E,0x56,0xA5,0xD3,0xDD,0x98,0xFA,0xD4,0xE6,0x06,0xF4,0xF0,0xB6,0x20,0x63,
+0x4B,0xEA,0x29,0xBD,0xAA,0x82,0x66,0x1E,0xFB,0x81,0xAA,0xA7,0x37,0xAD,0x13,0x18,
+0xE6,0x92,0xC3,0x81,0xC1,0x33,0xBB,0x88,0x1E,0xA1,0xE7,0xE2,0xB4,0xBD,0x31,0x6C,
+0x0E,0x51,0x3D,0x6F,0xFB,0x96,0x56,0x80,0xE2,0x36,0x17,0xD1,0xDC,0xE4,
+};
+
+
+/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
+/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */
+
+
+const unsigned char TC_TrustCenter_Universal_CA_I_certificate[993]={
+0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x1D,
+0xA2,0x00,0x01,0x00,0x02,0xEC,0xB7,0x60,0x80,0x78,0x8D,0xB6,0x06,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x79,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
+0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
+0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,
+0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,
+0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,
+0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x33,0x32,
+0x32,0x31,0x35,0x35,0x34,0x32,0x38,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,
+0x32,0x32,0x35,0x39,0x35,0x39,0x5A,0x30,0x79,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,
+0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,
+0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x54,
+0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,
+0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x03,
+0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,
+0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,
+0x20,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xA4,0x77,0x23,0x96,0x44,0xAF,0x90,0xF4,0x31,0xA7,0x10,0xF4,0x26,
+0x87,0x9C,0xF3,0x38,0xD9,0x0F,0x5E,0xDE,0xCF,0x41,0xE8,0x31,0xAD,0xC6,0x74,0x91,
+0x24,0x96,0x78,0x1E,0x09,0xA0,0x9B,0x9A,0x95,0x4A,0x4A,0xF5,0x62,0x7C,0x02,0xA8,
+0xCA,0xAC,0xFB,0x5A,0x04,0x76,0x39,0xDE,0x5F,0xF1,0xF9,0xB3,0xBF,0xF3,0x03,0x58,
+0x55,0xD2,0xAA,0xB7,0xE3,0x04,0x22,0xD1,0xF8,0x94,0xDA,0x22,0x08,0x00,0x8D,0xD3,
+0x7C,0x26,0x5D,0xCC,0x77,0x79,0xE7,0x2C,0x78,0x39,0xA8,0x26,0x73,0x0E,0xA2,0x5D,
+0x25,0x69,0x85,0x4F,0x55,0x0E,0x9A,0xEF,0xC6,0xB9,0x44,0xE1,0x57,0x3D,0xDF,0x1F,
+0x54,0x22,0xE5,0x6F,0x65,0xAA,0x33,0x84,0x3A,0xF3,0xCE,0x7A,0xBE,0x55,0x97,0xAE,
+0x8D,0x12,0x0F,0x14,0x33,0xE2,0x50,0x70,0xC3,0x49,0x87,0x13,0xBC,0x51,0xDE,0xD7,
+0x98,0x12,0x5A,0xEF,0x3A,0x83,0x33,0x92,0x06,0x75,0x8B,0x92,0x7C,0x12,0x68,0x7B,
+0x70,0x6A,0x0F,0xB5,0x9B,0xB6,0x77,0x5B,0x48,0x59,0x9D,0xE4,0xEF,0x5A,0xAD,0xF3,
+0xC1,0x9E,0xD4,0xD7,0x45,0x4E,0xCA,0x56,0x34,0x21,0xBC,0x3E,0x17,0x5B,0x6F,0x77,
+0x0C,0x48,0x01,0x43,0x29,0xB0,0xDD,0x3F,0x96,0x6E,0xE6,0x95,0xAA,0x0C,0xC0,0x20,
+0xB6,0xFD,0x3E,0x36,0x27,0x9C,0xE3,0x5C,0xCF,0x4E,0x81,0xDC,0x19,0xBB,0x91,0x90,
+0x7D,0xEC,0xE6,0x97,0x04,0x1E,0x93,0xCC,0x22,0x49,0xD7,0x97,0x86,0xB6,0x13,0x0A,
+0x3C,0x43,0x23,0x77,0x7E,0xF0,0xDC,0xE6,0xCD,0x24,0x1F,0x3B,0x83,0x9B,0x34,0x3A,
+0x83,0x34,0xE3,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x1F,0x06,0x03,
+0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
+0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0F,0x06,
+0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,
+0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,
+0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE,
+0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,
+0x00,0x28,0xD2,0xE0,0x86,0xD5,0xE6,0xF8,0x7B,0xF0,0x97,0xDC,0x22,0x6B,0x3B,0x95,
+0x14,0x56,0x0F,0x11,0x30,0xA5,0x9A,0x4F,0x3A,0xB0,0x3A,0xE0,0x06,0xCB,0x65,0xF5,
+0xED,0xC6,0x97,0x27,0xFE,0x25,0xF2,0x57,0xE6,0x5E,0x95,0x8C,0x3E,0x64,0x60,0x15,
+0x5A,0x7F,0x2F,0x0D,0x01,0xC5,0xB1,0x60,0xFD,0x45,0x35,0xCF,0xF0,0xB2,0xBF,0x06,
+0xD9,0xEF,0x5A,0xBE,0xB3,0x62,0x21,0xB4,0xD7,0xAB,0x35,0x7C,0x53,0x3E,0xA6,0x27,
+0xF1,0xA1,0x2D,0xDA,0x1A,0x23,0x9D,0xCC,0xDD,0xEC,0x3C,0x2D,0x9E,0x27,0x34,0x5D,
+0x0F,0xC2,0x36,0x79,0xBC,0xC9,0x4A,0x62,0x2D,0xED,0x6B,0xD9,0x7D,0x41,0x43,0x7C,
+0xB6,0xAA,0xCA,0xED,0x61,0xB1,0x37,0x82,0x15,0x09,0x1A,0x8A,0x16,0x30,0xD8,0xEC,
+0xC9,0xD6,0x47,0x72,0x78,0x4B,0x10,0x46,0x14,0x8E,0x5F,0x0E,0xAF,0xEC,0xC7,0x2F,
+0xAB,0x10,0xD7,0xB6,0xF1,0x6E,0xEC,0x86,0xB2,0xC2,0xE8,0x0D,0x92,0x73,0xDC,0xA2,
+0xF4,0x0F,0x3A,0xBF,0x61,0x23,0x10,0x89,0x9C,0x48,0x40,0x6E,0x70,0x00,0xB3,0xD3,
+0xBA,0x37,0x44,0x58,0x11,0x7A,0x02,0x6A,0x88,0xF0,0x37,0x34,0xF0,0x19,0xE9,0xAC,
+0xD4,0x65,0x73,0xF6,0x69,0x8C,0x64,0x94,0x3A,0x79,0x85,0x29,0xB0,0x16,0x2B,0x0C,
+0x82,0x3F,0x06,0x9C,0xC7,0xFD,0x10,0x2B,0x9E,0x0F,0x2C,0xB6,0x9E,0xE3,0x15,0xBF,
+0xD9,0x36,0x1C,0xBA,0x25,0x1A,0x52,0x3D,0x1A,0xEC,0x22,0x0C,0x1C,0xE0,0xA4,0xA2,
+0x3D,0xF0,0xE8,0x39,0xCF,0x81,0xC0,0x7B,0xED,0x5D,0x1F,0x6F,0xC5,0xD0,0x0B,0xD7,
+0x98,
+};
+
+
+/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA III */
+/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA III */
+
+
+const unsigned char TC_TrustCenter_Universal_CA_III_certificate[997]={
+0x30,0x82,0x03,0xE1,0x30,0x82,0x02,0xC9,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x63,
+0x25,0x00,0x01,0x00,0x02,0x14,0x8D,0x33,0x15,0x02,0xE4,0x6C,0xF4,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x7B,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,
+0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,
+0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,
+0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x03,0x13,0x1F,0x54,0x43,0x20,0x54,0x72,0x75,
+0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,
+0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,
+0x39,0x30,0x39,0x30,0x38,0x31,0x35,0x32,0x37,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,
+0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,
+0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,
+0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x28,0x30,0x26,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1F,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,
+0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,
+0x43,0x41,0x20,0x49,0x49,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
+0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC2,0xDA,0x9C,0x62,0xB0,0xB9,0x71,0x12,0xB0,
+0x0B,0xC8,0x1A,0x57,0xB2,0xAE,0x83,0x14,0x99,0xB3,0x34,0x4B,0x9B,0x90,0xA2,0xC5,
+0xE7,0xE7,0x2F,0x02,0xA0,0x4D,0x2D,0xA4,0xFA,0x85,0xDA,0x9B,0x25,0x85,0x2D,0x40,
+0x28,0x20,0x6D,0xEA,0xE0,0xBD,0xB1,0x48,0x83,0x22,0x29,0x44,0x9F,0x4E,0x83,0xEE,
+0x35,0x51,0x13,0x73,0x74,0xD5,0xBC,0xF2,0x30,0x66,0x94,0x53,0xC0,0x40,0x36,0x2F,
+0x0C,0x84,0x65,0xCE,0x0F,0x6E,0xC2,0x58,0x93,0xE8,0x2C,0x0B,0x3A,0xE9,0xC1,0x8E,
+0xFB,0xF2,0x6B,0xCA,0x3C,0xE2,0x9C,0x4E,0x8E,0xE4,0xF9,0x7D,0xD3,0x27,0x9F,0x1B,
+0xD5,0x67,0x78,0x87,0x2D,0x7F,0x0B,0x47,0xB3,0xC7,0xE8,0xC9,0x48,0x7C,0xAF,0x2F,
+0xCC,0x0A,0xD9,0x41,0xEF,0x9F,0xFE,0x9A,0xE1,0xB2,0xAE,0xF9,0x53,0xB5,0xE5,0xE9,
+0x46,0x9F,0x60,0xE3,0xDF,0x8D,0xD3,0x7F,0xFB,0x96,0x7E,0xB3,0xB5,0x72,0xF8,0x4B,
+0xAD,0x08,0x79,0xCD,0x69,0x89,0x40,0x27,0xF5,0x2A,0xC1,0xAD,0x43,0xEC,0xA4,0x53,
+0xC8,0x61,0xB6,0xF7,0xD2,0x79,0x2A,0x67,0x18,0x76,0x48,0x6D,0x5B,0x25,0x01,0xD1,
+0x26,0xC5,0xB7,0x57,0x69,0x23,0x15,0x5B,0x61,0x8A,0xAD,0xF0,0x1B,0x2D,0xD9,0xAF,
+0x5C,0xF1,0x26,0x90,0x69,0xA9,0xD5,0x0C,0x40,0xF5,0x33,0x80,0x43,0x8F,0x9C,0xA3,
+0x76,0x2A,0x45,0xB4,0xAF,0xBF,0x7F,0x3E,0x87,0x3F,0x76,0xC5,0xCD,0x2A,0xDE,0x20,
+0xC5,0x16,0x58,0xCB,0xF9,0x1B,0xF5,0x0F,0xCB,0x0D,0x11,0x52,0x64,0xB8,0xD2,0x76,
+0x62,0x77,0x83,0xF1,0x58,0x9F,0xFF,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,
+0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x56,0xE7,0xE1,
+0x5B,0x25,0x43,0x80,0xE0,0xF6,0x8C,0xE1,0x71,0xBC,0x8E,0xE5,0x80,0x2F,0xC4,0x48,
+0xE2,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,
+0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,
+0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x56,0xE7,0xE1,
+0x5B,0x25,0x43,0x80,0xE0,0xF6,0x8C,0xE1,0x71,0xBC,0x8E,0xE5,0x80,0x2F,0xC4,0x48,
+0xE2,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,
+0x03,0x82,0x01,0x01,0x00,0x83,0xC7,0xAF,0xEA,0x7F,0x4D,0x0A,0x3C,0x39,0xB1,0x68,
+0xBE,0x7B,0x6D,0x89,0x2E,0xE9,0xB3,0x09,0xE7,0x18,0x57,0x8D,0x85,0x9A,0x17,0xF3,
+0x76,0x42,0x50,0x13,0x0F,0xC7,0x90,0x6F,0x33,0xAD,0xC5,0x49,0x60,0x2B,0x6C,0x49,
+0x58,0x19,0xD4,0xE2,0xBE,0xB7,0xBF,0xAB,0x49,0xBC,0x94,0xC8,0xAB,0xBE,0x28,0x6C,
+0x16,0x68,0xE0,0xC8,0x97,0x46,0x20,0xA0,0x68,0x67,0x60,0x88,0x39,0x20,0x51,0xD8,
+0x68,0x01,0x11,0xCE,0xA7,0xF6,0x11,0x07,0xF6,0xEC,0xEC,0xAC,0x1A,0x1F,0xB2,0x66,
+0x6E,0x56,0x67,0x60,0x7A,0x74,0x5E,0xC0,0x6D,0x97,0x36,0xAE,0xB5,0x0D,0x5D,0x66,
+0x73,0xC0,0x25,0x32,0x45,0xD8,0x4A,0x06,0x07,0x8F,0xC4,0xB7,0x07,0xB1,0x4D,0x06,
+0x0D,0xE1,0xA5,0xEB,0xF4,0x75,0xCA,0xBA,0x9C,0xD0,0xBD,0xB3,0xD3,0x32,0x24,0x4C,
+0xEE,0x7E,0xE2,0x76,0x04,0x4B,0x49,0x53,0xD8,0xF2,0xE9,0x54,0x33,0xFC,0xE5,0x71,
+0x1F,0x3D,0x14,0x5C,0x96,0x4B,0xF1,0x3A,0xF2,0x00,0xBB,0x6C,0xB4,0xFA,0x96,0x55,
+0x08,0x88,0x09,0xC1,0xCC,0x91,0x19,0x29,0xB0,0x20,0x2D,0xFF,0xCB,0x38,0xA4,0x40,
+0xE1,0x17,0xBE,0x79,0x61,0x80,0xFF,0x07,0x03,0x86,0x4C,0x4E,0x7B,0x06,0x9F,0x11,
+0x86,0x8D,0x89,0xEE,0x27,0xC4,0xDB,0xE2,0xBC,0x19,0x8E,0x0B,0xC3,0xC3,0x13,0xC7,
+0x2D,0x03,0x63,0x3B,0xD3,0xE8,0xE4,0xA2,0x2A,0xC2,0x82,0x08,0x94,0x16,0x54,0xF0,
+0xEF,0x1F,0x27,0x90,0x25,0xB8,0x0D,0x0E,0x28,0x1B,0x47,0x77,0x47,0xBD,0x1C,0xA8,
+0x25,0xF1,0x94,0xB4,0x66,
+};
+
+
+/* subject:/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com */
+/* issuer :/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com */
+
+
+const unsigned char Thawte_Premium_Server_CA_certificate[811]={
+0x30,0x82,0x03,0x27,0x30,0x82,0x02,0x90,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
+0x81,0xCE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,0x31,
+0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,0x72,
+0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,0x13,
+0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,
+0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,
+0x69,0x6F,0x6E,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x54,0x68,
+0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x53,0x65,0x72,
+0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x28,0x30,0x26,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x09,0x01,0x16,0x19,0x70,0x72,0x65,0x6D,0x69,0x75,0x6D,0x2D,0x73,
+0x65,0x72,0x76,0x65,0x72,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,0x6D,
+0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x38,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,
+0x5A,0x17,0x0D,0x32,0x30,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,
+0x30,0x81,0xCE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,
+0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,
+0x72,0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,
+0x13,0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,
+0x73,0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,
+0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,
+0x73,0x69,0x6F,0x6E,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x54,
+0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x53,0x65,
+0x72,0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x28,0x30,0x26,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x09,0x01,0x16,0x19,0x70,0x72,0x65,0x6D,0x69,0x75,0x6D,0x2D,
+0x73,0x65,0x72,0x76,0x65,0x72,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,
+0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xD2,0x36,
+0x36,0x6A,0x8B,0xD7,0xC2,0x5B,0x9E,0xDA,0x81,0x41,0x62,0x8F,0x38,0xEE,0x49,0x04,
+0x55,0xD6,0xD0,0xEF,0x1C,0x1B,0x95,0x16,0x47,0xEF,0x18,0x48,0x35,0x3A,0x52,0xF4,
+0x2B,0x6A,0x06,0x8F,0x3B,0x2F,0xEA,0x56,0xE3,0xAF,0x86,0x8D,0x9E,0x17,0xF7,0x9E,
+0xB4,0x65,0x75,0x02,0x4D,0xEF,0xCB,0x09,0xA2,0x21,0x51,0xD8,0x9B,0xD0,0x67,0xD0,
+0xBA,0x0D,0x92,0x06,0x14,0x73,0xD4,0x93,0xCB,0x97,0x2A,0x00,0x9C,0x5C,0x4E,0x0C,
+0xBC,0xFA,0x15,0x52,0xFC,0xF2,0x44,0x6E,0xDA,0x11,0x4A,0x6E,0x08,0x9F,0x2F,0x2D,
+0xE3,0xF9,0xAA,0x3A,0x86,0x73,0xB6,0x46,0x53,0x58,0xC8,0x89,0x05,0xBD,0x83,0x11,
+0xB8,0x73,0x3F,0xAA,0x07,0x8D,0xF4,0x42,0x4D,0xE7,0x40,0x9D,0x1C,0x37,0x02,0x03,
+0x01,0x00,0x01,0xA3,0x13,0x30,0x11,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,
+0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x26,0x48,0x2C,0x16,0xC2,
+0x58,0xFA,0xE8,0x16,0x74,0x0C,0xAA,0xAA,0x5F,0x54,0x3F,0xF2,0xD7,0xC9,0x78,0x60,
+0x5E,0x5E,0x6E,0x37,0x63,0x22,0x77,0x36,0x7E,0xB2,0x17,0xC4,0x34,0xB9,0xF5,0x08,
+0x85,0xFC,0xC9,0x01,0x38,0xFF,0x4D,0xBE,0xF2,0x16,0x42,0x43,0xE7,0xBB,0x5A,0x46,
+0xFB,0xC1,0xC6,0x11,0x1F,0xF1,0x4A,0xB0,0x28,0x46,0xC9,0xC3,0xC4,0x42,0x7D,0xBC,
+0xFA,0xAB,0x59,0x6E,0xD5,0xB7,0x51,0x88,0x11,0xE3,0xA4,0x85,0x19,0x6B,0x82,0x4C,
+0xA4,0x0C,0x12,0xAD,0xE9,0xA4,0xAE,0x3F,0xF1,0xC3,0x49,0x65,0x9A,0x8C,0xC5,0xC8,
+0x3E,0x25,0xB7,0x94,0x99,0xBB,0x92,0x32,0x71,0x07,0xF0,0x86,0x5E,0xED,0x50,0x27,
+0xA6,0x0D,0xA6,0x23,0xF9,0xBB,0xCB,0xA6,0x07,0x14,0x42,
+};
+
+
+/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
+/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */
+
+
+const unsigned char thawte_Primary_Root_CA_certificate[1060]={
+0x30,0x82,0x04,0x20,0x30,0x82,0x03,0x08,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x34,
+0x4E,0xD5,0x57,0x20,0xD5,0xED,0xEC,0x49,0xF4,0x2F,0xCE,0x37,0xDB,0x2B,0x6D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0xA9,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
+0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
+0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
+0x30,0x36,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
+0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,
+0x04,0x03,0x13,0x16,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
+0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,
+0x31,0x31,0x31,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,
+0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xA9,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,
+0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x38,0x30,0x36,0x06,
+0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x74,
+0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,
+0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,
+0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,
+0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,
+0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,
+0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAC,0xA0,0xF0,0xFB,0x80,0x59,0xD4,0x9C,0xC7,
+0xA4,0xCF,0x9D,0xA1,0x59,0x73,0x09,0x10,0x45,0x0C,0x0D,0x2C,0x6E,0x68,0xF1,0x6C,
+0x5B,0x48,0x68,0x49,0x59,0x37,0xFC,0x0B,0x33,0x19,0xC2,0x77,0x7F,0xCC,0x10,0x2D,
+0x95,0x34,0x1C,0xE6,0xEB,0x4D,0x09,0xA7,0x1C,0xD2,0xB8,0xC9,0x97,0x36,0x02,0xB7,
+0x89,0xD4,0x24,0x5F,0x06,0xC0,0xCC,0x44,0x94,0x94,0x8D,0x02,0x62,0x6F,0xEB,0x5A,
+0xDD,0x11,0x8D,0x28,0x9A,0x5C,0x84,0x90,0x10,0x7A,0x0D,0xBD,0x74,0x66,0x2F,0x6A,
+0x38,0xA0,0xE2,0xD5,0x54,0x44,0xEB,0x1D,0x07,0x9F,0x07,0xBA,0x6F,0xEE,0xE9,0xFD,
+0x4E,0x0B,0x29,0xF5,0x3E,0x84,0xA0,0x01,0xF1,0x9C,0xAB,0xF8,0x1C,0x7E,0x89,0xA4,
+0xE8,0xA1,0xD8,0x71,0x65,0x0D,0xA3,0x51,0x7B,0xEE,0xBC,0xD2,0x22,0x60,0x0D,0xB9,
+0x5B,0x9D,0xDF,0xBA,0xFC,0x51,0x5B,0x0B,0xAF,0x98,0xB2,0xE9,0x2E,0xE9,0x04,0xE8,
+0x62,0x87,0xDE,0x2B,0xC8,0xD7,0x4E,0xC1,0x4C,0x64,0x1E,0xDD,0xCF,0x87,0x58,0xBA,
+0x4A,0x4F,0xCA,0x68,0x07,0x1D,0x1C,0x9D,0x4A,0xC6,0xD5,0x2F,0x91,0xCC,0x7C,0x71,
+0x72,0x1C,0xC5,0xC0,0x67,0xEB,0x32,0xFD,0xC9,0x92,0x5C,0x94,0xDA,0x85,0xC0,0x9B,
+0xBF,0x53,0x7D,0x2B,0x09,0xF4,0x8C,0x9D,0x91,0x1F,0x97,0x6A,0x52,0xCB,0xDE,0x09,
+0x36,0xA4,0x77,0xD8,0x7B,0x87,0x50,0x44,0xD5,0x3E,0x6E,0x29,0x69,0xFB,0x39,0x49,
+0x26,0x1E,0x09,0xA5,0x80,0x7B,0x40,0x2D,0xEB,0xE8,0x27,0x85,0xC9,0xFE,0x61,0xFD,
+0x7E,0xE6,0x7C,0x97,0x1D,0xD5,0x9D,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,
+0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,
+0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,
+0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7B,0x5B,0x45,0xCF,
+0xAF,0xCE,0xCB,0x7A,0xFD,0x31,0x92,0x1A,0x6A,0xB6,0xF3,0x46,0xEB,0x57,0x48,0x50,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
+0x82,0x01,0x01,0x00,0x79,0x11,0xC0,0x4B,0xB3,0x91,0xB6,0xFC,0xF0,0xE9,0x67,0xD4,
+0x0D,0x6E,0x45,0xBE,0x55,0xE8,0x93,0xD2,0xCE,0x03,0x3F,0xED,0xDA,0x25,0xB0,0x1D,
+0x57,0xCB,0x1E,0x3A,0x76,0xA0,0x4C,0xEC,0x50,0x76,0xE8,0x64,0x72,0x0C,0xA4,0xA9,
+0xF1,0xB8,0x8B,0xD6,0xD6,0x87,0x84,0xBB,0x32,0xE5,0x41,0x11,0xC0,0x77,0xD9,0xB3,
+0x60,0x9D,0xEB,0x1B,0xD5,0xD1,0x6E,0x44,0x44,0xA9,0xA6,0x01,0xEC,0x55,0x62,0x1D,
+0x77,0xB8,0x5C,0x8E,0x48,0x49,0x7C,0x9C,0x3B,0x57,0x11,0xAC,0xAD,0x73,0x37,0x8E,
+0x2F,0x78,0x5C,0x90,0x68,0x47,0xD9,0x60,0x60,0xE6,0xFC,0x07,0x3D,0x22,0x20,0x17,
+0xC4,0xF7,0x16,0xE9,0xC4,0xD8,0x72,0xF9,0xC8,0x73,0x7C,0xDF,0x16,0x2F,0x15,0xA9,
+0x3E,0xFD,0x6A,0x27,0xB6,0xA1,0xEB,0x5A,0xBA,0x98,0x1F,0xD5,0xE3,0x4D,0x64,0x0A,
+0x9D,0x13,0xC8,0x61,0xBA,0xF5,0x39,0x1C,0x87,0xBA,0xB8,0xBD,0x7B,0x22,0x7F,0xF6,
+0xFE,0xAC,0x40,0x79,0xE5,0xAC,0x10,0x6F,0x3D,0x8F,0x1B,0x79,0x76,0x8B,0xC4,0x37,
+0xB3,0x21,0x18,0x84,0xE5,0x36,0x00,0xEB,0x63,0x20,0x99,0xB9,0xE9,0xFE,0x33,0x04,
+0xBB,0x41,0xC8,0xC1,0x02,0xF9,0x44,0x63,0x20,0x9E,0x81,0xCE,0x42,0xD3,0xD6,0x3F,
+0x2C,0x76,0xD3,0x63,0x9C,0x59,0xDD,0x8F,0xA6,0xE1,0x0E,0xA0,0x2E,0x41,0xF7,0x2E,
+0x95,0x47,0xCF,0xBC,0xFD,0x33,0xF3,0xF6,0x0B,0x61,0x7E,0x7E,0x91,0x2B,0x81,0x47,
+0xC2,0x27,0x30,0xEE,0xA7,0x10,0x5D,0x37,0x8F,0x5C,0x39,0x2B,0xE4,0x04,0xF0,0x7B,
+0x8D,0x56,0x8C,0x68,
+};
+
+
+/* subject:/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
+/* issuer :/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */
+
+
+const unsigned char thawte_Primary_Root_CA___G2_certificate[652]={
+0x30,0x82,0x02,0x88,0x30,0x82,0x02,0x0D,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x35,
+0xFC,0x26,0x5C,0xD9,0x84,0x4F,0xC9,0x3D,0x26,0x3D,0x57,0x9B,0xAE,0xD7,0x56,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x84,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,
+0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,
+0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,
+0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,
+0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,0x30,0x30,0x30,0x30,
+0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,0x35,
+0x39,0x5A,0x30,0x81,0x84,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,
+0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,
+0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,
+0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,
+0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,
+0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,
+0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,
+0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,
+0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,
+0x04,0xA2,0xD5,0x9C,0x82,0x7B,0x95,0x9D,0xF1,0x52,0x78,0x87,0xFE,0x8A,0x16,0xBF,
+0x05,0xE6,0xDF,0xA3,0x02,0x4F,0x0D,0x07,0xC6,0x00,0x51,0xBA,0x0C,0x02,0x52,0x2D,
+0x22,0xA4,0x42,0x39,0xC4,0xFE,0x8F,0xEA,0xC9,0xC1,0xBE,0xD4,0x4D,0xFF,0x9F,0x7A,
+0x9E,0xE2,0xB1,0x7C,0x9A,0xAD,0xA7,0x86,0x09,0x73,0x87,0xD1,0xE7,0x9A,0xE3,0x7A,
+0xA5,0xAA,0x6E,0xFB,0xBA,0xB3,0x70,0xC0,0x67,0x88,0xA2,0x35,0xD4,0xA3,0x9A,0xB1,
+0xFD,0xAD,0xC2,0xEF,0x31,0xFA,0xA8,0xB9,0xF3,0xFB,0x08,0xC6,0x91,0xD1,0xFB,0x29,
+0x95,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,
+0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,
+0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,
+0x14,0x9A,0xD8,0x00,0x30,0x00,0xE7,0x6B,0x7F,0x85,0x18,0xEE,0x8B,0xB6,0xCE,0x8A,
+0x0C,0xF8,0x11,0xE1,0xBB,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,
+0x03,0x03,0x69,0x00,0x30,0x66,0x02,0x31,0x00,0xDD,0xF8,0xE0,0x57,0x47,0x5B,0xA7,
+0xE6,0x0A,0xC3,0xBD,0xF5,0x80,0x8A,0x97,0x35,0x0D,0x1B,0x89,0x3C,0x54,0x86,0x77,
+0x28,0xCA,0xA1,0xF4,0x79,0xDE,0xB5,0xE6,0x38,0xB0,0xF0,0x65,0x70,0x8C,0x7F,0x02,
+0x54,0xC2,0xBF,0xFF,0xD8,0xA1,0x3E,0xD9,0xCF,0x02,0x31,0x00,0xC4,0x8D,0x94,0xFC,
+0xDC,0x53,0xD2,0xDC,0x9D,0x78,0x16,0x1F,0x15,0x33,0x23,0x53,0x52,0xE3,0x5A,0x31,
+0x5D,0x9D,0xCA,0xAE,0xBD,0x13,0x29,0x44,0x0D,0x27,0x5B,0xA8,0xE7,0x68,0x9C,0x12,
+0xF7,0x58,0x3F,0x2E,0x72,0x02,0x57,0xA3,0x8F,0xA1,0x14,0x2E,
+};
+
+
+/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
+/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */
+
+
+const unsigned char thawte_Primary_Root_CA___G3_certificate[1070]={
+0x30,0x82,0x04,0x2A,0x30,0x82,0x03,0x12,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x60,
+0x01,0x97,0xB7,0x46,0xA7,0xEA,0xB4,0xB4,0x9A,0xD6,0x4B,0x2F,0xF7,0x90,0xFB,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,
+0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,
+0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,
+0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,
+0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,
+0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,
+0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,
+0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,
+0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,
+0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,
+0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,0x30,
+0x1E,0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,
+0x17,0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,
+0x81,0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,
+0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,
+0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,
+0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,
+0x30,0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,
+0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,
+0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,
+0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,
+0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,
+0x00,0xB2,0xBF,0x27,0x2C,0xFB,0xDB,0xD8,0x5B,0xDD,0x78,0x7B,0x1B,0x9E,0x77,0x66,
+0x81,0xCB,0x3E,0xBC,0x7C,0xAE,0xF3,0xA6,0x27,0x9A,0x34,0xA3,0x68,0x31,0x71,0x38,
+0x33,0x62,0xE4,0xF3,0x71,0x66,0x79,0xB1,0xA9,0x65,0xA3,0xA5,0x8B,0xD5,0x8F,0x60,
+0x2D,0x3F,0x42,0xCC,0xAA,0x6B,0x32,0xC0,0x23,0xCB,0x2C,0x41,0xDD,0xE4,0xDF,0xFC,
+0x61,0x9C,0xE2,0x73,0xB2,0x22,0x95,0x11,0x43,0x18,0x5F,0xC4,0xB6,0x1F,0x57,0x6C,
+0x0A,0x05,0x58,0x22,0xC8,0x36,0x4C,0x3A,0x7C,0xA5,0xD1,0xCF,0x86,0xAF,0x88,0xA7,
+0x44,0x02,0x13,0x74,0x71,0x73,0x0A,0x42,0x59,0x02,0xF8,0x1B,0x14,0x6B,0x42,0xDF,
+0x6F,0x5F,0xBA,0x6B,0x82,0xA2,0x9D,0x5B,0xE7,0x4A,0xBD,0x1E,0x01,0x72,0xDB,0x4B,
+0x74,0xE8,0x3B,0x7F,0x7F,0x7D,0x1F,0x04,0xB4,0x26,0x9B,0xE0,0xB4,0x5A,0xAC,0x47,
+0x3D,0x55,0xB8,0xD7,0xB0,0x26,0x52,0x28,0x01,0x31,0x40,0x66,0xD8,0xD9,0x24,0xBD,
+0xF6,0x2A,0xD8,0xEC,0x21,0x49,0x5C,0x9B,0xF6,0x7A,0xE9,0x7F,0x55,0x35,0x7E,0x96,
+0x6B,0x8D,0x93,0x93,0x27,0xCB,0x92,0xBB,0xEA,0xAC,0x40,0xC0,0x9F,0xC2,0xF8,0x80,
+0xCF,0x5D,0xF4,0x5A,0xDC,0xCE,0x74,0x86,0xA6,0x3E,0x6C,0x0B,0x53,0xCA,0xBD,0x92,
+0xCE,0x19,0x06,0x72,0xE6,0x0C,0x5C,0x38,0x69,0xC7,0x04,0xD6,0xBC,0x6C,0xCE,0x5B,
+0xF6,0xF7,0x68,0x9C,0xDC,0x25,0x15,0x48,0x88,0xA1,0xE9,0xA9,0xF8,0x98,0x9C,0xE0,
+0xF3,0xD5,0x31,0x28,0x61,0x11,0x6C,0x67,0x96,0x8D,0x39,0x99,0xCB,0xC2,0x45,0x24,
+0x39,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,
+0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0x6C,0xAA,0x94,0x60,0x9C,0xED,0xE4,0xFF,0xFA,
+0x3E,0x0A,0x74,0x2B,0x63,0x03,0xF7,0xB6,0x59,0xBF,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1A,0x40,
+0xD8,0x95,0x65,0xAC,0x09,0x92,0x89,0xC6,0x39,0xF4,0x10,0xE5,0xA9,0x0E,0x66,0x53,
+0x5D,0x78,0xDE,0xFA,0x24,0x91,0xBB,0xE7,0x44,0x51,0xDF,0xC6,0x16,0x34,0x0A,0xEF,
+0x6A,0x44,0x51,0xEA,0x2B,0x07,0x8A,0x03,0x7A,0xC3,0xEB,0x3F,0x0A,0x2C,0x52,0x16,
+0xA0,0x2B,0x43,0xB9,0x25,0x90,0x3F,0x70,0xA9,0x33,0x25,0x6D,0x45,0x1A,0x28,0x3B,
+0x27,0xCF,0xAA,0xC3,0x29,0x42,0x1B,0xDF,0x3B,0x4C,0xC0,0x33,0x34,0x5B,0x41,0x88,
+0xBF,0x6B,0x2B,0x65,0xAF,0x28,0xEF,0xB2,0xF5,0xC3,0xAA,0x66,0xCE,0x7B,0x56,0xEE,
+0xB7,0xC8,0xCB,0x67,0xC1,0xC9,0x9C,0x1A,0x18,0xB8,0xC4,0xC3,0x49,0x03,0xF1,0x60,
+0x0E,0x50,0xCD,0x46,0xC5,0xF3,0x77,0x79,0xF7,0xB6,0x15,0xE0,0x38,0xDB,0xC7,0x2F,
+0x28,0xA0,0x0C,0x3F,0x77,0x26,0x74,0xD9,0x25,0x12,0xDA,0x31,0xDA,0x1A,0x1E,0xDC,
+0x29,0x41,0x91,0x22,0x3C,0x69,0xA7,0xBB,0x02,0xF2,0xB6,0x5C,0x27,0x03,0x89,0xF4,
+0x06,0xEA,0x9B,0xE4,0x72,0x82,0xE3,0xA1,0x09,0xC1,0xE9,0x00,0x19,0xD3,0x3E,0xD4,
+0x70,0x6B,0xBA,0x71,0xA6,0xAA,0x58,0xAE,0xF4,0xBB,0xE9,0x6C,0xB6,0xEF,0x87,0xCC,
+0x9B,0xBB,0xFF,0x39,0xE6,0x56,0x61,0xD3,0x0A,0xA7,0xC4,0x5C,0x4C,0x60,0x7B,0x05,
+0x77,0x26,0x7A,0xBF,0xD8,0x07,0x52,0x2C,0x62,0xF7,0x70,0x63,0xD9,0x39,0xBC,0x6F,
+0x1C,0xC2,0x79,0xDC,0x76,0x29,0xAF,0xCE,0xC5,0x2C,0x64,0x04,0x5E,0x88,0x36,0x6E,
+0x31,0xD4,0x40,0x1A,0x62,0x34,0x36,0x3F,0x35,0x01,0xAE,0xAC,0x63,0xA0,
+};
+
+
+/* subject:/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Server CA/emailAddress=server-certs@thawte.com */
+/* issuer :/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Server CA/emailAddress=server-certs@thawte.com */
+
+
+const unsigned char Thawte_Server_CA_certificate[791]={
+0x30,0x82,0x03,0x13,0x30,0x82,0x02,0x7C,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,
+0x81,0xC4,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x5A,0x41,0x31,
+0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,0x0C,0x57,0x65,0x73,0x74,0x65,0x72,
+0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x07,0x13,
+0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,
+0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,0x31,0x28,0x30,0x26,0x06,0x03,0x55,
+0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,
+0x69,0x6F,0x6E,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x03,0x13,0x10,0x54,0x68,
+0x61,0x77,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x65,0x72,0x20,0x43,0x41,0x31,0x26,
+0x30,0x24,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x09,0x01,0x16,0x17,0x73,
+0x65,0x72,0x76,0x65,0x72,0x2D,0x63,0x65,0x72,0x74,0x73,0x40,0x74,0x68,0x61,0x77,
+0x74,0x65,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x38,0x30,0x31,
+0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x30,0x31,0x32,0x33,0x31,0x32,
+0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xC4,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,
+0x04,0x06,0x13,0x02,0x5A,0x41,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x08,0x13,
+0x0C,0x57,0x65,0x73,0x74,0x65,0x72,0x6E,0x20,0x43,0x61,0x70,0x65,0x31,0x12,0x30,
+0x10,0x06,0x03,0x55,0x04,0x07,0x13,0x09,0x43,0x61,0x70,0x65,0x20,0x54,0x6F,0x77,
+0x6E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0A,0x13,0x14,0x54,0x68,0x61,0x77,
+0x74,0x65,0x20,0x43,0x6F,0x6E,0x73,0x75,0x6C,0x74,0x69,0x6E,0x67,0x20,0x63,0x63,
+0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,
+0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x19,0x30,0x17,0x06,0x03,
+0x55,0x04,0x03,0x13,0x10,0x54,0x68,0x61,0x77,0x74,0x65,0x20,0x53,0x65,0x72,0x76,
+0x65,0x72,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,
+0x0D,0x01,0x09,0x01,0x16,0x17,0x73,0x65,0x72,0x76,0x65,0x72,0x2D,0x63,0x65,0x72,
+0x74,0x73,0x40,0x74,0x68,0x61,0x77,0x74,0x65,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,
+0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xD3,0xA4,0x50,0x6E,0xC8,0xFF,
+0x56,0x6B,0xE6,0xCF,0x5D,0xB6,0xEA,0x0C,0x68,0x75,0x47,0xA2,0xAA,0xC2,0xDA,0x84,
+0x25,0xFC,0xA8,0xF4,0x47,0x51,0xDA,0x85,0xB5,0x20,0x74,0x94,0x86,0x1E,0x0F,0x75,
+0xC9,0xE9,0x08,0x61,0xF5,0x06,0x6D,0x30,0x6E,0x15,0x19,0x02,0xE9,0x52,0xC0,0x62,
+0xDB,0x4D,0x99,0x9E,0xE2,0x6A,0x0C,0x44,0x38,0xCD,0xFE,0xBE,0xE3,0x64,0x09,0x70,
+0xC5,0xFE,0xB1,0x6B,0x29,0xB6,0x2F,0x49,0xC8,0x3B,0xD4,0x27,0x04,0x25,0x10,0x97,
+0x2F,0xE7,0x90,0x6D,0xC0,0x28,0x42,0x99,0xD7,0x4C,0x43,0xDE,0xC3,0xF5,0x21,0x6D,
+0x54,0x9F,0x5D,0xC3,0x58,0xE1,0xC0,0xE4,0xD9,0x5B,0xB0,0xB8,0xDC,0xB4,0x7B,0xDF,
+0x36,0x3A,0xC2,0xB5,0x66,0x22,0x12,0xD6,0x87,0x0D,0x02,0x03,0x01,0x00,0x01,0xA3,
+0x13,0x30,0x11,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x07,0xFA,0x4C,0x69,0x5C,0xFB,0x95,0xCC,0x46,
+0xEE,0x85,0x83,0x4D,0x21,0x30,0x8E,0xCA,0xD9,0xA8,0x6F,0x49,0x1A,0xE6,0xDA,0x51,
+0xE3,0x60,0x70,0x6C,0x84,0x61,0x11,0xA1,0x1A,0xC8,0x48,0x3E,0x59,0x43,0x7D,0x4F,
+0x95,0x3D,0xA1,0x8B,0xB7,0x0B,0x62,0x98,0x7A,0x75,0x8A,0xDD,0x88,0x4E,0x4E,0x9E,
+0x40,0xDB,0xA8,0xCC,0x32,0x74,0xB9,0x6F,0x0D,0xC6,0xE3,0xB3,0x44,0x0B,0xD9,0x8A,
+0x6F,0x9A,0x29,0x9B,0x99,0x18,0x28,0x3B,0xD1,0xE3,0x40,0x28,0x9A,0x5A,0x3C,0xD5,
+0xB5,0xE7,0x20,0x1B,0x8B,0xCA,0xA4,0xAB,0x8D,0xE9,0x51,0xD9,0xE2,0x4C,0x2C,0x59,
+0xA9,0xDA,0xB9,0xB2,0x75,0x1B,0xF6,0x42,0xF2,0xEF,0xC7,0xF2,0x18,0xF9,0x89,0xBC,
+0xA3,0xFF,0x8A,0x23,0x2E,0x70,0x47,
+};
+
+
+/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC */
+/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC */
+
+
+const unsigned char UTN_DATACorp_SGC_Root_CA_certificate[1122]={
+0x30,0x82,0x04,0x5E,0x30,0x82,0x03,0x46,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44,
+0xBE,0x0C,0x8B,0x50,0x00,0x21,0xB4,0x11,0xD3,0x2A,0x68,0x06,0xA9,0xAD,0x69,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0x93,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,
+0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,
+0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,
+0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,
+0x13,0x12,0x55,0x54,0x4E,0x20,0x2D,0x20,0x44,0x41,0x54,0x41,0x43,0x6F,0x72,0x70,
+0x20,0x53,0x47,0x43,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x34,0x31,0x38,
+0x35,0x37,0x32,0x31,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,0x34,0x31,0x39,0x30,
+0x36,0x33,0x30,0x5A,0x30,0x81,0x93,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,
+0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,
+0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,
+0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,
+0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,
+0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1B,0x30,
+0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x55,0x54,0x4E,0x20,0x2D,0x20,0x44,0x41,
+0x54,0x41,0x43,0x6F,0x72,0x70,0x20,0x53,0x47,0x43,0x30,0x82,0x01,0x22,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,
+0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDF,0xEE,0x58,0x10,0xA2,
+0x2B,0x6E,0x55,0xC4,0x8E,0xBF,0x2E,0x46,0x09,0xE7,0xE0,0x08,0x0F,0x2E,0x2B,0x7A,
+0x13,0x94,0x1B,0xBD,0xF6,0xB6,0x80,0x8E,0x65,0x05,0x93,0x00,0x1E,0xBC,0xAF,0xE2,
+0x0F,0x8E,0x19,0x0D,0x12,0x47,0xEC,0xAC,0xAD,0xA3,0xFA,0x2E,0x70,0xF8,0xDE,0x6E,
+0xFB,0x56,0x42,0x15,0x9E,0x2E,0x5C,0xEF,0x23,0xDE,0x21,0xB9,0x05,0x76,0x27,0x19,
+0x0F,0x4F,0xD6,0xC3,0x9C,0xB4,0xBE,0x94,0x19,0x63,0xF2,0xA6,0x11,0x0A,0xEB,0x53,
+0x48,0x9C,0xBE,0xF2,0x29,0x3B,0x16,0xE8,0x1A,0xA0,0x4C,0xA6,0xC9,0xF4,0x18,0x59,
+0x68,0xC0,0x70,0xF2,0x53,0x00,0xC0,0x5E,0x50,0x82,0xA5,0x56,0x6F,0x36,0xF9,0x4A,
+0xE0,0x44,0x86,0xA0,0x4D,0x4E,0xD6,0x47,0x6E,0x49,0x4A,0xCB,0x67,0xD7,0xA6,0xC4,
+0x05,0xB9,0x8E,0x1E,0xF4,0xFC,0xFF,0xCD,0xE7,0x36,0xE0,0x9C,0x05,0x6C,0xB2,0x33,
+0x22,0x15,0xD0,0xB4,0xE0,0xCC,0x17,0xC0,0xB2,0xC0,0xF4,0xFE,0x32,0x3F,0x29,0x2A,
+0x95,0x7B,0xD8,0xF2,0xA7,0x4E,0x0F,0x54,0x7C,0xA1,0x0D,0x80,0xB3,0x09,0x03,0xC1,
+0xFF,0x5C,0xDD,0x5E,0x9A,0x3E,0xBC,0xAE,0xBC,0x47,0x8A,0x6A,0xAE,0x71,0xCA,0x1F,
+0xB1,0x2A,0xB8,0x5F,0x42,0x05,0x0B,0xEC,0x46,0x30,0xD1,0x72,0x0B,0xCA,0xE9,0x56,
+0x6D,0xF5,0xEF,0xDF,0x78,0xBE,0x61,0xBA,0xB2,0xA5,0xAE,0x04,0x4C,0xBC,0xA8,0xAC,
+0x69,0x15,0x97,0xBD,0xEF,0xEB,0xB4,0x8C,0xBF,0x35,0xF8,0xD4,0xC3,0xD1,0x28,0x0E,
+0x5C,0x3A,0x9F,0x70,0x18,0x33,0x20,0x77,0xC4,0xA2,0xAF,0x02,0x03,0x01,0x00,0x01,
+0xA3,0x81,0xAB,0x30,0x81,0xA8,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,
+0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x53,
+0x32,0xD1,0xB3,0xCF,0x7F,0xFA,0xE0,0xF1,0xA0,0x5D,0x85,0x4E,0x92,0xD2,0x9E,0x45,
+0x1D,0xB4,0x4F,0x30,0x3D,0x06,0x03,0x55,0x1D,0x1F,0x04,0x36,0x30,0x34,0x30,0x32,
+0xA0,0x30,0xA0,0x2E,0x86,0x2C,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,
+0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55,
+0x54,0x4E,0x2D,0x44,0x41,0x54,0x41,0x43,0x6F,0x72,0x70,0x53,0x47,0x43,0x2E,0x63,
+0x72,0x6C,0x30,0x2A,0x06,0x03,0x55,0x1D,0x25,0x04,0x23,0x30,0x21,0x06,0x08,0x2B,
+0x06,0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x0A,0x2B,0x06,0x01,0x04,0x01,0x82,0x37,
+0x0A,0x03,0x03,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x04,0x01,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,
+0x01,0x00,0x27,0x35,0x97,0x00,0x8A,0x8B,0x28,0xBD,0xC6,0x33,0x30,0x1E,0x29,0xFC,
+0xE2,0xF7,0xD5,0x98,0xD4,0x40,0xBB,0x60,0xCA,0xBF,0xAB,0x17,0x2C,0x09,0x36,0x7F,
+0x50,0xFA,0x41,0xDC,0xAE,0x96,0x3A,0x0A,0x23,0x3E,0x89,0x59,0xC9,0xA3,0x07,0xED,
+0x1B,0x37,0xAD,0xFC,0x7C,0xBE,0x51,0x49,0x5A,0xDE,0x3A,0x0A,0x54,0x08,0x16,0x45,
+0xC2,0x99,0xB1,0x87,0xCD,0x8C,0x68,0xE0,0x69,0x03,0xE9,0xC4,0x4E,0x98,0xB2,0x3B,
+0x8C,0x16,0xB3,0x0E,0xA0,0x0C,0x98,0x50,0x9B,0x93,0xA9,0x70,0x09,0xC8,0x2C,0xA3,
+0x8F,0xDF,0x02,0xE4,0xE0,0x71,0x3A,0xF1,0xB4,0x23,0x72,0xA0,0xAA,0x01,0xDF,0xDF,
+0x98,0x3E,0x14,0x50,0xA0,0x31,0x26,0xBD,0x28,0xE9,0x5A,0x30,0x26,0x75,0xF9,0x7B,
+0x60,0x1C,0x8D,0xF3,0xCD,0x50,0x26,0x6D,0x04,0x27,0x9A,0xDF,0xD5,0x0D,0x45,0x47,
+0x29,0x6B,0x2C,0xE6,0x76,0xD9,0xA9,0x29,0x7D,0x32,0xDD,0xC9,0x36,0x3C,0xBD,0xAE,
+0x35,0xF1,0x11,0x9E,0x1D,0xBB,0x90,0x3F,0x12,0x47,0x4E,0x8E,0xD7,0x7E,0x0F,0x62,
+0x73,0x1D,0x52,0x26,0x38,0x1C,0x18,0x49,0xFD,0x30,0x74,0x9A,0xC4,0xE5,0x22,0x2F,
+0xD8,0xC0,0x8D,0xED,0x91,0x7A,0x4C,0x00,0x8F,0x72,0x7F,0x5D,0xDA,0xDD,0x1B,0x8B,
+0x45,0x6B,0xE7,0xDD,0x69,0x97,0xA8,0xC5,0x56,0x4C,0x0F,0x0C,0xF6,0x9F,0x7A,0x91,
+0x37,0xF6,0x97,0x82,0xE0,0xDD,0x71,0x69,0xFF,0x76,0x3F,0x60,0x4D,0x3C,0xCF,0xF7,
+0x99,0xF9,0xC6,0x57,0xF4,0xC9,0x55,0x39,0x78,0xBA,0x2C,0x79,0xC9,0xA6,0x88,0x2B,
+0xF4,0x08,
+};
+
+
+/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
+/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */
+
+
+const unsigned char UTN_USERFirst_Hardware_Root_CA_certificate[1144]={
+0x30,0x82,0x04,0x74,0x30,0x82,0x03,0x5C,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44,
+0xBE,0x0C,0x8B,0x50,0x00,0x24,0xB4,0x11,0xD3,0x36,0x2A,0xFE,0x65,0x0A,0xFD,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0x97,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,
+0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,
+0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,
+0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,
+0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,
+0x13,0x16,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,
+0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x37,
+0x30,0x39,0x31,0x38,0x31,0x30,0x34,0x32,0x5A,0x17,0x0D,0x31,0x39,0x30,0x37,0x30,
+0x39,0x31,0x38,0x31,0x39,0x32,0x32,0x5A,0x30,0x81,0x97,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,
+0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E,
+0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,
+0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,
+0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,
+0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,
+0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,
+0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x55,0x54,0x4E,0x2D,
+0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,
+0x72,0x65,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,
+0x01,0x01,0x00,0xB1,0xF7,0xC3,0x38,0x3F,0xB4,0xA8,0x7F,0xCF,0x39,0x82,0x51,0x67,
+0xD0,0x6D,0x9F,0xD2,0xFF,0x58,0xF3,0xE7,0x9F,0x2B,0xEC,0x0D,0x89,0x54,0x99,0xB9,
+0x38,0x99,0x16,0xF7,0xE0,0x21,0x79,0x48,0xC2,0xBB,0x61,0x74,0x12,0x96,0x1D,0x3C,
+0x6A,0x72,0xD5,0x3C,0x10,0x67,0x3A,0x39,0xED,0x2B,0x13,0xCD,0x66,0xEB,0x95,0x09,
+0x33,0xA4,0x6C,0x97,0xB1,0xE8,0xC6,0xEC,0xC1,0x75,0x79,0x9C,0x46,0x5E,0x8D,0xAB,
+0xD0,0x6A,0xFD,0xB9,0x2A,0x55,0x17,0x10,0x54,0xB3,0x19,0xF0,0x9A,0xF6,0xF1,0xB1,
+0x5D,0xB6,0xA7,0x6D,0xFB,0xE0,0x71,0x17,0x6B,0xA2,0x88,0xFB,0x00,0xDF,0xFE,0x1A,
+0x31,0x77,0x0C,0x9A,0x01,0x7A,0xB1,0x32,0xE3,0x2B,0x01,0x07,0x38,0x6E,0xC3,0xA5,
+0x5E,0x23,0xBC,0x45,0x9B,0x7B,0x50,0xC1,0xC9,0x30,0x8F,0xDB,0xE5,0x2B,0x7A,0xD3,
+0x5B,0xFB,0x33,0x40,0x1E,0xA0,0xD5,0x98,0x17,0xBC,0x8B,0x87,0xC3,0x89,0xD3,0x5D,
+0xA0,0x8E,0xB2,0xAA,0xAA,0xF6,0x8E,0x69,0x88,0x06,0xC5,0xFA,0x89,0x21,0xF3,0x08,
+0x9D,0x69,0x2E,0x09,0x33,0x9B,0x29,0x0D,0x46,0x0F,0x8C,0xCC,0x49,0x34,0xB0,0x69,
+0x51,0xBD,0xF9,0x06,0xCD,0x68,0xAD,0x66,0x4C,0xBC,0x3E,0xAC,0x61,0xBD,0x0A,0x88,
+0x0E,0xC8,0xDF,0x3D,0xEE,0x7C,0x04,0x4C,0x9D,0x0A,0x5E,0x6B,0x91,0xD6,0xEE,0xC7,
+0xED,0x28,0x8D,0xAB,0x4D,0x87,0x89,0x73,0xD0,0x6E,0xA4,0xD0,0x1E,0x16,0x8B,0x14,
+0xE1,0x76,0x44,0x03,0x7F,0x63,0xAC,0xE4,0xCD,0x49,0x9C,0xC5,0x92,0xF4,0xAB,0x32,
+0xA1,0x48,0x5B,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xB9,0x30,0x81,0xB6,0x30,0x0B,
+0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55,
+0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,
+0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xA1,0x72,0x5F,0x26,0x1B,0x28,0x98,0x43,0x95,
+0x5D,0x07,0x37,0xD5,0x85,0x96,0x9D,0x4B,0xD2,0xC3,0x45,0x30,0x44,0x06,0x03,0x55,
+0x1D,0x1F,0x04,0x3D,0x30,0x3B,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,
+0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,
+0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,
+0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x2E,0x63,0x72,
+0x6C,0x30,0x31,0x06,0x03,0x55,0x1D,0x25,0x04,0x2A,0x30,0x28,0x06,0x08,0x2B,0x06,
+0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x05,
+0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x06,0x06,0x08,0x2B,0x06,0x01,0x05,
+0x05,0x07,0x03,0x07,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x47,0x19,0x0F,0xDE,0x74,0xC6,0x99,0x97,
+0xAF,0xFC,0xAD,0x28,0x5E,0x75,0x8E,0xEB,0x2D,0x67,0xEE,0x4E,0x7B,0x2B,0xD7,0x0C,
+0xFF,0xF6,0xDE,0xCB,0x55,0xA2,0x0A,0xE1,0x4C,0x54,0x65,0x93,0x60,0x6B,0x9F,0x12,
+0x9C,0xAD,0x5E,0x83,0x2C,0xEB,0x5A,0xAE,0xC0,0xE4,0x2D,0xF4,0x00,0x63,0x1D,0xB8,
+0xC0,0x6C,0xF2,0xCF,0x49,0xBB,0x4D,0x93,0x6F,0x06,0xA6,0x0A,0x22,0xB2,0x49,0x62,
+0x08,0x4E,0xFF,0xC8,0xC8,0x14,0xB2,0x88,0x16,0x5D,0xE7,0x01,0xE4,0x12,0x95,0xE5,
+0x45,0x34,0xB3,0x8B,0x69,0xBD,0xCF,0xB4,0x85,0x8F,0x75,0x51,0x9E,0x7D,0x3A,0x38,
+0x3A,0x14,0x48,0x12,0xC6,0xFB,0xA7,0x3B,0x1A,0x8D,0x0D,0x82,0x40,0x07,0xE8,0x04,
+0x08,0x90,0xA1,0x89,0xCB,0x19,0x50,0xDF,0xCA,0x1C,0x01,0xBC,0x1D,0x04,0x19,0x7B,
+0x10,0x76,0x97,0x3B,0xEE,0x90,0x90,0xCA,0xC4,0x0E,0x1F,0x16,0x6E,0x75,0xEF,0x33,
+0xF8,0xD3,0x6F,0x5B,0x1E,0x96,0xE3,0xE0,0x74,0x77,0x74,0x7B,0x8A,0xA2,0x6E,0x2D,
+0xDD,0x76,0xD6,0x39,0x30,0x82,0xF0,0xAB,0x9C,0x52,0xF2,0x2A,0xC7,0xAF,0x49,0x5E,
+0x7E,0xC7,0x68,0xE5,0x82,0x81,0xC8,0x6A,0x27,0xF9,0x27,0x88,0x2A,0xD5,0x58,0x50,
+0x95,0x1F,0xF0,0x3B,0x1C,0x57,0xBB,0x7D,0x14,0x39,0x62,0x2B,0x9A,0xC9,0x94,0x92,
+0x2A,0xA3,0x22,0x0C,0xFF,0x89,0x26,0x7D,0x5F,0x23,0x2B,0x47,0xD7,0x15,0x1D,0xA9,
+0x6A,0x9E,0x51,0x0D,0x2A,0x51,0x9E,0x81,0xF9,0xD4,0x3B,0x5E,0x70,0x12,0x7F,0x10,
+0x32,0x9C,0x1E,0xBB,0x9D,0xF8,0x66,0xA8,
+};
+
+
+/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 1 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 1 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+
+
+const unsigned char ValiCert_Class_1_VA_certificate[747]={
+0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
+0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
+0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
+0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
+0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
+0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
+0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
+0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
+0x32,0x35,0x32,0x32,0x32,0x33,0x34,0x38,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
+0x35,0x32,0x32,0x32,0x33,0x34,0x38,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
+0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
+0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
+0x61,0x73,0x73,0x20,0x31,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
+0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
+0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
+0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
+0x81,0x81,0x00,0xD8,0x59,0x82,0x7A,0x89,0xB8,0x96,0xBA,0xA6,0x2F,0x68,0x6F,0x58,
+0x2E,0xA7,0x54,0x1C,0x06,0x6E,0xF4,0xEA,0x8D,0x48,0xBC,0x31,0x94,0x17,0xF0,0xF3,
+0x4E,0xBC,0xB2,0xB8,0x35,0x92,0x76,0xB0,0xD0,0xA5,0xA5,0x01,0xD7,0x00,0x03,0x12,
+0x22,0x19,0x08,0xF8,0xFF,0x11,0x23,0x9B,0xCE,0x07,0xF5,0xBF,0x69,0x1A,0x26,0xFE,
+0x4E,0xE9,0xD1,0x7F,0x9D,0x2C,0x40,0x1D,0x59,0x68,0x6E,0xA6,0xF8,0x58,0xB0,0x9D,
+0x1A,0x8F,0xD3,0x3F,0xF1,0xDC,0x19,0x06,0x81,0xA8,0x0E,0xE0,0x3A,0xDD,0xC8,0x53,
+0x45,0x09,0x06,0xE6,0x0F,0x70,0xC3,0xFA,0x40,0xA6,0x0E,0xE2,0x56,0x05,0x0F,0x18,
+0x4D,0xFC,0x20,0x82,0xD1,0x73,0x55,0x74,0x8D,0x76,0x72,0xA0,0x1D,0x9D,0x1D,0xC0,
+0xDD,0x3F,0x71,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x50,0x68,0x3D,0x49,0xF4,
+0x2C,0x1C,0x06,0x94,0xDF,0x95,0x60,0x7F,0x96,0x7B,0x17,0xFE,0x4F,0x71,0xAD,0x64,
+0xC8,0xDD,0x77,0xD2,0xEF,0x59,0x55,0xE8,0x3F,0xE8,0x8E,0x05,0x2A,0x21,0xF2,0x07,
+0xD2,0xB5,0xA7,0x52,0xFE,0x9C,0xB1,0xB6,0xE2,0x5B,0x77,0x17,0x40,0xEA,0x72,0xD6,
+0x23,0xCB,0x28,0x81,0x32,0xC3,0x00,0x79,0x18,0xEC,0x59,0x17,0x89,0xC9,0xC6,0x6A,
+0x1E,0x71,0xC9,0xFD,0xB7,0x74,0xA5,0x25,0x45,0x69,0xC5,0x48,0xAB,0x19,0xE1,0x45,
+0x8A,0x25,0x6B,0x19,0xEE,0xE5,0xBB,0x12,0xF5,0x7F,0xF7,0xA6,0x8D,0x51,0xC3,0xF0,
+0x9D,0x74,0xB7,0xA9,0x3E,0xA0,0xA5,0xFF,0xB6,0x49,0x03,0x13,0xDA,0x22,0xCC,0xED,
+0x71,0x82,0x2B,0x99,0xCF,0x3A,0xB7,0xF5,0x2D,0x72,0xC8,
+};
+
+
+/* subject:/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 2 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+/* issuer :/L=ValiCert Validation Network/O=ValiCert, Inc./OU=ValiCert Class 2 Policy Validation Authority/CN=http://www.valicert.com//emailAddress=info@valicert.com */
+
+
+const unsigned char ValiCert_Class_2_VA_certificate[747]={
+0x30,0x82,0x02,0xE7,0x30,0x82,0x02,0x50,0x02,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xBB,0x31,0x24,0x30,
+0x22,0x06,0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,
+0x20,0x56,0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,
+0x6F,0x72,0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,
+0x6C,0x69,0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,
+0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,
+0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,
+0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,
+0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,
+0x32,0x36,0x30,0x30,0x31,0x39,0x35,0x34,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,
+0x36,0x30,0x30,0x31,0x39,0x35,0x34,0x5A,0x30,0x81,0xBB,0x31,0x24,0x30,0x22,0x06,
+0x03,0x55,0x04,0x07,0x13,0x1B,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x56,
+0x61,0x6C,0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x61,0x6C,0x69,
+0x43,0x65,0x72,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x35,0x30,0x33,0x06,0x03,
+0x55,0x04,0x0B,0x13,0x2C,0x56,0x61,0x6C,0x69,0x43,0x65,0x72,0x74,0x20,0x43,0x6C,
+0x61,0x73,0x73,0x20,0x32,0x20,0x50,0x6F,0x6C,0x69,0x63,0x79,0x20,0x56,0x61,0x6C,
+0x69,0x64,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,
+0x79,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x68,0x74,0x74,0x70,
+0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x76,0x61,0x6C,0x69,0x63,0x65,0x72,0x74,0x2E,
+0x63,0x6F,0x6D,0x2F,0x31,0x20,0x30,0x1E,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,
+0x01,0x09,0x01,0x16,0x11,0x69,0x6E,0x66,0x6F,0x40,0x76,0x61,0x6C,0x69,0x63,0x65,
+0x72,0x74,0x2E,0x63,0x6F,0x6D,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,
+0x81,0x81,0x00,0xCE,0x3A,0x71,0xCA,0xE5,0xAB,0xC8,0x59,0x92,0x55,0xD7,0xAB,0xD8,
+0x74,0x0E,0xF9,0xEE,0xD9,0xF6,0x55,0x47,0x59,0x65,0x47,0x0E,0x05,0x55,0xDC,0xEB,
+0x98,0x36,0x3C,0x5C,0x53,0x5D,0xD3,0x30,0xCF,0x38,0xEC,0xBD,0x41,0x89,0xED,0x25,
+0x42,0x09,0x24,0x6B,0x0A,0x5E,0xB3,0x7C,0xDD,0x52,0x2D,0x4C,0xE6,0xD4,0xD6,0x7D,
+0x5A,0x59,0xA9,0x65,0xD4,0x49,0x13,0x2D,0x24,0x4D,0x1C,0x50,0x6F,0xB5,0xC1,0x85,
+0x54,0x3B,0xFE,0x71,0xE4,0xD3,0x5C,0x42,0xF9,0x80,0xE0,0x91,0x1A,0x0A,0x5B,0x39,
+0x36,0x67,0xF3,0x3F,0x55,0x7C,0x1B,0x3F,0xB4,0x5F,0x64,0x73,0x34,0xE3,0xB4,0x12,
+0xBF,0x87,0x64,0xF8,0xDA,0x12,0xFF,0x37,0x27,0xC1,0xB3,0x43,0xBB,0xEF,0x7B,0x6E,
+0x2E,0x69,0xF7,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,0x3B,0x7F,0x50,0x6F,0x6F,
+0x50,0x94,0x99,0x49,0x62,0x38,0x38,0x1F,0x4B,0xF8,0xA5,0xC8,0x3E,0xA7,0x82,0x81,
+0xF6,0x2B,0xC7,0xE8,0xC5,0xCE,0xE8,0x3A,0x10,0x82,0xCB,0x18,0x00,0x8E,0x4D,0xBD,
+0xA8,0x58,0x7F,0xA1,0x79,0x00,0xB5,0xBB,0xE9,0x8D,0xAF,0x41,0xD9,0x0F,0x34,0xEE,
+0x21,0x81,0x19,0xA0,0x32,0x49,0x28,0xF4,0xC4,0x8E,0x56,0xD5,0x52,0x33,0xFD,0x50,
+0xD5,0x7E,0x99,0x6C,0x03,0xE4,0xC9,0x4C,0xFC,0xCB,0x6C,0xAB,0x66,0xB3,0x4A,0x21,
+0x8C,0xE5,0xB5,0x0C,0x32,0x3E,0x10,0xB2,0xCC,0x6C,0xA1,0xDC,0x9A,0x98,0x4C,0x02,
+0x5B,0xF3,0xCE,0xB9,0x9E,0xA5,0x72,0x0E,0x4A,0xB7,0x3F,0x3C,0xE6,0x16,0x68,0xF8,
+0xBE,0xED,0x74,0x4C,0xBC,0x5B,0xD5,0x62,0x1F,0x43,0xDD,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority */
+/* issuer :/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority */
+
+
+const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority_certificate[576]={
+0x30,0x82,0x02,0x3C,0x30,0x82,0x01,0xA5,0x02,0x10,0x3C,0x91,0x31,0xCB,0x1F,0xF6,
+0xD0,0x1B,0x0E,0x9A,0xB8,0xD0,0x44,0xBF,0x12,0xBE,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x5F,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x37,0x30,0x35,0x06,0x03,0x55,0x04,0x0B,0x13,0x2E,0x43,0x6C,0x61,0x73,
+0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,
+0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,
+0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x36,
+0x30,0x31,0x32,0x39,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,
+0x38,0x30,0x32,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x5F,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x37,0x30,0x35,0x06,0x03,0x55,0x04,0x0B,0x13,0x2E,0x43,0x6C,0x61,
+0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,
+0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,
+0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8D,
+0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC9,0x5C,0x59,0x9E,0xF2,0x1B,0x8A,0x01,
+0x14,0xB4,0x10,0xDF,0x04,0x40,0xDB,0xE3,0x57,0xAF,0x6A,0x45,0x40,0x8F,0x84,0x0C,
+0x0B,0xD1,0x33,0xD9,0xD9,0x11,0xCF,0xEE,0x02,0x58,0x1F,0x25,0xF7,0x2A,0xA8,0x44,
+0x05,0xAA,0xEC,0x03,0x1F,0x78,0x7F,0x9E,0x93,0xB9,0x9A,0x00,0xAA,0x23,0x7D,0xD6,
+0xAC,0x85,0xA2,0x63,0x45,0xC7,0x72,0x27,0xCC,0xF4,0x4C,0xC6,0x75,0x71,0xD2,0x39,
+0xEF,0x4F,0x42,0xF0,0x75,0xDF,0x0A,0x90,0xC6,0x8E,0x20,0x6F,0x98,0x0F,0xF8,0xAC,
+0x23,0x5F,0x70,0x29,0x36,0xA4,0xC9,0x86,0xE7,0xB1,0x9A,0x20,0xCB,0x53,0xA5,0x85,
+0xE7,0x3D,0xBE,0x7D,0x9A,0xFE,0x24,0x45,0x33,0xDC,0x76,0x15,0xED,0x0F,0xA2,0x71,
+0x64,0x4C,0x65,0x2E,0x81,0x68,0x45,0xA7,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,
+0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x81,0x81,0x00,
+0x10,0x72,0x52,0xA9,0x05,0x14,0x19,0x32,0x08,0x41,0xF0,0xC5,0x6B,0x0A,0xCC,0x7E,
+0x0F,0x21,0x19,0xCD,0xE4,0x67,0xDC,0x5F,0xA9,0x1B,0xE6,0xCA,0xE8,0x73,0x9D,0x22,
+0xD8,0x98,0x6E,0x73,0x03,0x61,0x91,0xC5,0x7C,0xB0,0x45,0x40,0x6E,0x44,0x9D,0x8D,
+0xB0,0xB1,0x96,0x74,0x61,0x2D,0x0D,0xA9,0x45,0xD2,0xA4,0x92,0x2A,0xD6,0x9A,0x75,
+0x97,0x6E,0x3F,0x53,0xFD,0x45,0x99,0x60,0x1D,0xA8,0x2B,0x4C,0xF9,0x5E,0xA7,0x09,
+0xD8,0x75,0x30,0xD7,0xD2,0x65,0x60,0x3D,0x67,0xD6,0x48,0x55,0x75,0x69,0x3F,0x91,
+0xF5,0x48,0x0B,0x47,0x69,0x22,0x69,0x82,0x96,0xBE,0xC9,0xC8,0x38,0x86,0x4A,0x7A,
+0x2C,0x73,0x19,0x48,0x69,0x4E,0x6B,0x7C,0x65,0xBF,0x0F,0xFC,0x70,0xCE,0x88,0x90,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority - G2/OU=(c) 1998 VeriSign, Inc. - For authorized use only/OU=VeriSign Trust Network */
+/* issuer :/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority - G2/OU=(c) 1998 VeriSign, Inc. - For authorized use only/OU=VeriSign Trust Network */
+
+
+const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G2_certificate[774]={
+0x30,0x82,0x03,0x02,0x30,0x82,0x02,0x6B,0x02,0x10,0x7D,0xD9,0xFE,0x07,0xCF,0xA8,
+0x1E,0xB7,0x10,0x79,0x67,0xFB,0xA7,0x89,0x34,0xC6,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xC1,0x31,0x0B,0x30,0x09,
+0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,
+0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x31,0x3C,0x30,0x3A,0x06,0x03,0x55,0x04,0x0B,0x13,0x33,0x43,0x6C,0x61,
+0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,
+0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,
+0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,
+0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x31,
+0x39,0x39,0x38,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,
+0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,
+0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,
+0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x30,0x1E,0x17,
+0x0D,0x39,0x38,0x30,0x35,0x31,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,
+0x32,0x38,0x30,0x38,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xC1,
+0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,
+0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x3C,0x30,0x3A,0x06,0x03,0x55,0x04,0x0B,0x13,
+0x33,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x32,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x38,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,
+0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xCC,0x5E,
+0xD1,0x11,0x5D,0x5C,0x69,0xD0,0xAB,0xD3,0xB9,0x6A,0x4C,0x99,0x1F,0x59,0x98,0x30,
+0x8E,0x16,0x85,0x20,0x46,0x6D,0x47,0x3F,0xD4,0x85,0x20,0x84,0xE1,0x6D,0xB3,0xF8,
+0xA4,0xED,0x0C,0xF1,0x17,0x0F,0x3B,0xF9,0xA7,0xF9,0x25,0xD7,0xC1,0xCF,0x84,0x63,
+0xF2,0x7C,0x63,0xCF,0xA2,0x47,0xF2,0xC6,0x5B,0x33,0x8E,0x64,0x40,0x04,0x68,0xC1,
+0x80,0xB9,0x64,0x1C,0x45,0x77,0xC7,0xD8,0x6E,0xF5,0x95,0x29,0x3C,0x50,0xE8,0x34,
+0xD7,0x78,0x1F,0xA8,0xBA,0x6D,0x43,0x91,0x95,0x8F,0x45,0x57,0x5E,0x7E,0xC5,0xFB,
+0xCA,0xA4,0x04,0xEB,0xEA,0x97,0x37,0x54,0x30,0x6F,0xBB,0x01,0x47,0x32,0x33,0xCD,
+0xDC,0x57,0x9B,0x64,0x69,0x61,0xF8,0x9B,0x1D,0x1C,0x89,0x4F,0x5C,0x67,0x02,0x03,
+0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x03,0x81,0x81,0x00,0x51,0x4D,0xCD,0xBE,0x5C,0xCB,0x98,0x19,0x9C,0x15,
+0xB2,0x01,0x39,0x78,0x2E,0x4D,0x0F,0x67,0x70,0x70,0x99,0xC6,0x10,0x5A,0x94,0xA4,
+0x53,0x4D,0x54,0x6D,0x2B,0xAF,0x0D,0x5D,0x40,0x8B,0x64,0xD3,0xD7,0xEE,0xDE,0x56,
+0x61,0x92,0x5F,0xA6,0xC4,0x1D,0x10,0x61,0x36,0xD3,0x2C,0x27,0x3C,0xE8,0x29,0x09,
+0xB9,0x11,0x64,0x74,0xCC,0xB5,0x73,0x9F,0x1C,0x48,0xA9,0xBC,0x61,0x01,0xEE,0xE2,
+0x17,0xA6,0x0C,0xE3,0x40,0x08,0x3B,0x0E,0xE7,0xEB,0x44,0x73,0x2A,0x9A,0xF1,0x69,
+0x92,0xEF,0x71,0x14,0xC3,0x39,0xAC,0x71,0xA7,0x91,0x09,0x6F,0xE4,0x71,0x06,0xB3,
+0xBA,0x59,0x57,0x26,0x79,0x00,0xF6,0xF8,0x0D,0xA2,0x33,0x30,0x28,0xD4,0xAA,0x58,
+0xA0,0x9D,0x9D,0x69,0x91,0xFD,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */
+
+
+const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate[1054]={
+0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0x9B,0x7E,0x06,0x49,0xA3,
+0x3E,0x62,0xB9,0xD5,0xEE,0x90,0x48,0x71,0x29,0xEF,0x57,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
+0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,
+0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
+0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
+0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
+0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
+0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xCB,0xBA,0x9C,0x52,0xFC,0x78,0x1F,0x1A,0x1E,0x6F,0x1B,
+0x37,0x73,0xBD,0xF8,0xC9,0x6B,0x94,0x12,0x30,0x4F,0xF0,0x36,0x47,0xF5,0xD0,0x91,
+0x0A,0xF5,0x17,0xC8,0xA5,0x61,0xC1,0x16,0x40,0x4D,0xFB,0x8A,0x61,0x90,0xE5,0x76,
+0x20,0xC1,0x11,0x06,0x7D,0xAB,0x2C,0x6E,0xA6,0xF5,0x11,0x41,0x8E,0xFA,0x2D,0xAD,
+0x2A,0x61,0x59,0xA4,0x67,0x26,0x4C,0xD0,0xE8,0xBC,0x52,0x5B,0x70,0x20,0x04,0x58,
+0xD1,0x7A,0xC9,0xA4,0x69,0xBC,0x83,0x17,0x64,0xAD,0x05,0x8B,0xBC,0xD0,0x58,0xCE,
+0x8D,0x8C,0xF5,0xEB,0xF0,0x42,0x49,0x0B,0x9D,0x97,0x27,0x67,0x32,0x6E,0xE1,0xAE,
+0x93,0x15,0x1C,0x70,0xBC,0x20,0x4D,0x2F,0x18,0xDE,0x92,0x88,0xE8,0x6C,0x85,0x57,
+0x11,0x1A,0xE9,0x7E,0xE3,0x26,0x11,0x54,0xA2,0x45,0x96,0x55,0x83,0xCA,0x30,0x89,
+0xE8,0xDC,0xD8,0xA3,0xED,0x2A,0x80,0x3F,0x7F,0x79,0x65,0x57,0x3E,0x15,0x20,0x66,
+0x08,0x2F,0x95,0x93,0xBF,0xAA,0x47,0x2F,0xA8,0x46,0x97,0xF0,0x12,0xE2,0xFE,0xC2,
+0x0A,0x2B,0x51,0xE6,0x76,0xE6,0xB7,0x46,0xB7,0xE2,0x0D,0xA6,0xCC,0xA8,0xC3,0x4C,
+0x59,0x55,0x89,0xE6,0xE8,0x53,0x5C,0x1C,0xEA,0x9D,0xF0,0x62,0x16,0x0B,0xA7,0xC9,
+0x5F,0x0C,0xF0,0xDE,0xC2,0x76,0xCE,0xAF,0xF7,0x6A,0xF2,0xFA,0x41,0xA6,0xA2,0x33,
+0x14,0xC9,0xE5,0x7A,0x63,0xD3,0x9E,0x62,0x37,0xD5,0x85,0x65,0x9E,0x0E,0xE6,0x53,
+0x24,0x74,0x1B,0x5E,0x1D,0x12,0x53,0x5B,0xC7,0x2C,0xE7,0x83,0x49,0x3B,0x15,0xAE,
+0x8A,0x68,0xB9,0x57,0x97,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x11,0x14,
+0x96,0xC1,0xAB,0x92,0x08,0xF7,0x3F,0x2F,0xC9,0xB2,0xFE,0xE4,0x5A,0x9F,0x64,0xDE,
+0xDB,0x21,0x4F,0x86,0x99,0x34,0x76,0x36,0x57,0xDD,0xD0,0x15,0x2F,0xC5,0xAD,0x7F,
+0x15,0x1F,0x37,0x62,0x73,0x3E,0xD4,0xE7,0x5F,0xCE,0x17,0x03,0xDB,0x35,0xFA,0x2B,
+0xDB,0xAE,0x60,0x09,0x5F,0x1E,0x5F,0x8F,0x6E,0xBB,0x0B,0x3D,0xEA,0x5A,0x13,0x1E,
+0x0C,0x60,0x6F,0xB5,0xC0,0xB5,0x23,0x22,0x2E,0x07,0x0B,0xCB,0xA9,0x74,0xCB,0x47,
+0xBB,0x1D,0xC1,0xD7,0xA5,0x6B,0xCC,0x2F,0xD2,0x42,0xFD,0x49,0xDD,0xA7,0x89,0xCF,
+0x53,0xBA,0xDA,0x00,0x5A,0x28,0xBF,0x82,0xDF,0xF8,0xBA,0x13,0x1D,0x50,0x86,0x82,
+0xFD,0x8E,0x30,0x8F,0x29,0x46,0xB0,0x1E,0x3D,0x35,0xDA,0x38,0x62,0x16,0x18,0x4A,
+0xAD,0xE6,0xB6,0x51,0x6C,0xDE,0xAF,0x62,0xEB,0x01,0xD0,0x1E,0x24,0xFE,0x7A,0x8F,
+0x12,0x1A,0x12,0x68,0xB8,0xFB,0x66,0x99,0x14,0x14,0x45,0x5C,0xAE,0xE7,0xAE,0x69,
+0x17,0x81,0x2B,0x5A,0x37,0xC9,0x5E,0x2A,0xF4,0xC6,0xE2,0xA1,0x5C,0x54,0x9B,0xA6,
+0x54,0x00,0xCF,0xF0,0xF1,0xC1,0xC7,0x98,0x30,0x1A,0x3B,0x36,0x16,0xDB,0xA3,0x6E,
+0xEA,0xFD,0xAD,0xB2,0xC2,0xDA,0xEF,0x02,0x47,0x13,0x8A,0xC0,0xF1,0xB3,0x31,0xAD,
+0x4F,0x1C,0xE1,0x4F,0x9C,0xAF,0x0F,0x0C,0x9D,0xF7,0x78,0x0D,0xD8,0xF4,0x35,0x56,
+0x80,0xDA,0xB7,0x6D,0x17,0x8F,0x9D,0x1E,0x81,0x64,0xE1,0xFE,0xC5,0x45,0xBA,0xAD,
+0x6B,0xB9,0x0A,0x7A,0x4E,0x4F,0x4B,0x84,0xEE,0x4B,0xF1,0x7D,0xDD,0x11,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */
+
+
+const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate[904]={
+0x30,0x82,0x03,0x84,0x30,0x82,0x03,0x0A,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x2F,
+0x80,0xFE,0x23,0x8C,0x0E,0x22,0x0F,0x48,0x67,0x12,0x28,0x91,0x87,0xAC,0xB3,0x30,
+0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0xCA,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
+0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
+0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x34,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,
+0x30,0x35,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,
+0x38,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,
+0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,
+0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,
+0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,
+0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,
+0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,
+0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,
+0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,
+0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,
+0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,
+0x20,0x2D,0x20,0x47,0x34,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,
+0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xA7,0x56,0x7A,
+0x7C,0x52,0xDA,0x64,0x9B,0x0E,0x2D,0x5C,0xD8,0x5E,0xAC,0x92,0x3D,0xFE,0x01,0xE6,
+0x19,0x4A,0x3D,0x14,0x03,0x4B,0xFA,0x60,0x27,0x20,0xD9,0x83,0x89,0x69,0xFA,0x54,
+0xC6,0x9A,0x18,0x5E,0x55,0x2A,0x64,0xDE,0x06,0xF6,0x8D,0x4A,0x3B,0xAD,0x10,0x3C,
+0x65,0x3D,0x90,0x88,0x04,0x89,0xE0,0x30,0x61,0xB3,0xAE,0x5D,0x01,0xA7,0x7B,0xDE,
+0x7C,0xB2,0xBE,0xCA,0x65,0x61,0x00,0x86,0xAE,0xDA,0x8F,0x7B,0xD0,0x89,0xAD,0x4D,
+0x1D,0x59,0x9A,0x41,0xB1,0xBC,0x47,0x80,0xDC,0x9E,0x62,0xC3,0xF9,0xA3,0x81,0xB2,
+0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,
+0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,
+0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,0x0C,
+0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,0x09,
+0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,0x06,
+0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,
+0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,
+0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,
+0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,
+0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0x16,
+0x91,0xFD,0xEE,0xA6,0x6E,0xE4,0xB5,0x2E,0x49,0x8F,0x87,0x78,0x81,0x80,0xEC,0xE5,
+0xB1,0xB5,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,
+0x00,0x30,0x65,0x02,0x30,0x66,0x21,0x0C,0x18,0x26,0x60,0x5A,0x38,0x7B,0x56,0x42,
+0xE0,0xA7,0xFC,0x36,0x84,0x51,0x91,0x20,0x2C,0x76,0x4D,0x43,0x3D,0xC4,0x1D,0x84,
+0x23,0xD0,0xAC,0xD6,0x7C,0x35,0x06,0xCE,0xCD,0x69,0xBD,0x90,0x0D,0xDB,0x6C,0x48,
+0x42,0x1D,0x0E,0xAA,0x42,0x02,0x31,0x00,0x9C,0x3D,0x48,0x39,0x23,0x39,0x58,0x1A,
+0x15,0x12,0x59,0x6A,0x9E,0xEF,0xD5,0x59,0xB2,0x1D,0x52,0x2C,0x99,0x71,0xCD,0xC7,
+0x29,0xDF,0x1B,0x2A,0x61,0x7B,0x71,0xD1,0xDE,0xF3,0xC0,0xE5,0x0D,0x3A,0x4A,0xAA,
+0x2D,0xA7,0xD8,0x86,0x2A,0xDD,0x2E,0x10,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */
+
+
+const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate[1239]={
+0x30,0x82,0x04,0xD3,0x30,0x82,0x03,0xBB,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18,
+0xDA,0xD1,0x9E,0x26,0x7D,0xE8,0xBB,0x4A,0x21,0x58,0xCD,0xCC,0x6B,0x3B,0x4A,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,
+0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,
+0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x1E,0x17,0x0D,0x30,
+0x36,0x31,0x31,0x30,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,
+0x30,0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,
+0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,
+0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,
+0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,
+0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,
+0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,
+0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,
+0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,
+0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
+0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAF,0x24,0x08,0x08,0x29,0x7A,0x35,
+0x9E,0x60,0x0C,0xAA,0xE7,0x4B,0x3B,0x4E,0xDC,0x7C,0xBC,0x3C,0x45,0x1C,0xBB,0x2B,
+0xE0,0xFE,0x29,0x02,0xF9,0x57,0x08,0xA3,0x64,0x85,0x15,0x27,0xF5,0xF1,0xAD,0xC8,
+0x31,0x89,0x5D,0x22,0xE8,0x2A,0xAA,0xA6,0x42,0xB3,0x8F,0xF8,0xB9,0x55,0xB7,0xB1,
+0xB7,0x4B,0xB3,0xFE,0x8F,0x7E,0x07,0x57,0xEC,0xEF,0x43,0xDB,0x66,0x62,0x15,0x61,
+0xCF,0x60,0x0D,0xA4,0xD8,0xDE,0xF8,0xE0,0xC3,0x62,0x08,0x3D,0x54,0x13,0xEB,0x49,
+0xCA,0x59,0x54,0x85,0x26,0xE5,0x2B,0x8F,0x1B,0x9F,0xEB,0xF5,0xA1,0x91,0xC2,0x33,
+0x49,0xD8,0x43,0x63,0x6A,0x52,0x4B,0xD2,0x8F,0xE8,0x70,0x51,0x4D,0xD1,0x89,0x69,
+0x7B,0xC7,0x70,0xF6,0xB3,0xDC,0x12,0x74,0xDB,0x7B,0x5D,0x4B,0x56,0xD3,0x96,0xBF,
+0x15,0x77,0xA1,0xB0,0xF4,0xA2,0x25,0xF2,0xAF,0x1C,0x92,0x67,0x18,0xE5,0xF4,0x06,
+0x04,0xEF,0x90,0xB9,0xE4,0x00,0xE4,0xDD,0x3A,0xB5,0x19,0xFF,0x02,0xBA,0xF4,0x3C,
+0xEE,0xE0,0x8B,0xEB,0x37,0x8B,0xEC,0xF4,0xD7,0xAC,0xF2,0xF6,0xF0,0x3D,0xAF,0xDD,
+0x75,0x91,0x33,0x19,0x1D,0x1C,0x40,0xCB,0x74,0x24,0x19,0x21,0x93,0xD9,0x14,0xFE,
+0xAC,0x2A,0x52,0xC7,0x8F,0xD5,0x04,0x49,0xE4,0x8D,0x63,0x47,0x88,0x3C,0x69,0x83,
+0xCB,0xFE,0x47,0xBD,0x2B,0x7E,0x4F,0xC5,0x95,0xAE,0x0E,0x9D,0xD4,0xD1,0x43,0xC0,
+0x67,0x73,0xE3,0x14,0x08,0x7E,0xE5,0x3F,0x9F,0x73,0xB8,0x33,0x0A,0xCF,0x5D,0x3F,
+0x34,0x87,0x96,0x8A,0xEE,0x53,0xE8,0x25,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
+0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,
+0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,
+0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,
+0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,
+0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,
+0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,
+0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,
+0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,
+0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,
+0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7F,
+0xD3,0x65,0xA7,0xC2,0xDD,0xEC,0xBB,0xF0,0x30,0x09,0xF3,0x43,0x39,0xFA,0x02,0xAF,
+0x33,0x31,0x33,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,
+0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x93,0x24,0x4A,0x30,0x5F,0x62,0xCF,0xD8,0x1A,
+0x98,0x2F,0x3D,0xEA,0xDC,0x99,0x2D,0xBD,0x77,0xF6,0xA5,0x79,0x22,0x38,0xEC,0xC4,
+0xA7,0xA0,0x78,0x12,0xAD,0x62,0x0E,0x45,0x70,0x64,0xC5,0xE7,0x97,0x66,0x2D,0x98,
+0x09,0x7E,0x5F,0xAF,0xD6,0xCC,0x28,0x65,0xF2,0x01,0xAA,0x08,0x1A,0x47,0xDE,0xF9,
+0xF9,0x7C,0x92,0x5A,0x08,0x69,0x20,0x0D,0xD9,0x3E,0x6D,0x6E,0x3C,0x0D,0x6E,0xD8,
+0xE6,0x06,0x91,0x40,0x18,0xB9,0xF8,0xC1,0xED,0xDF,0xDB,0x41,0xAA,0xE0,0x96,0x20,
+0xC9,0xCD,0x64,0x15,0x38,0x81,0xC9,0x94,0xEE,0xA2,0x84,0x29,0x0B,0x13,0x6F,0x8E,
+0xDB,0x0C,0xDD,0x25,0x02,0xDB,0xA4,0x8B,0x19,0x44,0xD2,0x41,0x7A,0x05,0x69,0x4A,
+0x58,0x4F,0x60,0xCA,0x7E,0x82,0x6A,0x0B,0x02,0xAA,0x25,0x17,0x39,0xB5,0xDB,0x7F,
+0xE7,0x84,0x65,0x2A,0x95,0x8A,0xBD,0x86,0xDE,0x5E,0x81,0x16,0x83,0x2D,0x10,0xCC,
+0xDE,0xFD,0xA8,0x82,0x2A,0x6D,0x28,0x1F,0x0D,0x0B,0xC4,0xE5,0xE7,0x1A,0x26,0x19,
+0xE1,0xF4,0x11,0x6F,0x10,0xB5,0x95,0xFC,0xE7,0x42,0x05,0x32,0xDB,0xCE,0x9D,0x51,
+0x5E,0x28,0xB6,0x9E,0x85,0xD3,0x5B,0xEF,0xA5,0x7D,0x45,0x40,0x72,0x8E,0xB7,0x0E,
+0x6B,0x0E,0x06,0xFB,0x33,0x35,0x48,0x71,0xB8,0x9D,0x27,0x8B,0xC4,0x65,0x5F,0x0D,
+0x86,0x76,0x9C,0x44,0x7A,0xF6,0x95,0x5C,0xF6,0x5D,0x32,0x08,0x33,0xA4,0x54,0xB6,
+0x18,0x3F,0x68,0x5C,0xF2,0x42,0x4A,0x85,0x38,0x54,0x83,0x5F,0xD1,0xE8,0x2C,0xF2,
+0xAC,0x11,0xD6,0xA8,0xED,0x63,0x6A,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */
+
+
+const unsigned char Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate[1054]={
+0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0xEC,0xA0,0xA7,0x8B,0x6E,
+0x75,0x6A,0x01,0xCF,0xC4,0x7C,0xCC,0x2F,0x94,0x5E,0xD7,0x30,0x0D,0x06,0x09,0x2A,
+0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30,
+0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,
+0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,
+0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,
+0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,
+0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,
+0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,
+0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,
+0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,
+0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,
+0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30,
+0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36,
+0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,
+0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A,
+0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,
+0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53,
+0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,
+0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20,
+0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,
+0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,
+0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,
+0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,
+0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,
+0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,
+0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,
+0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,
+0x02,0x82,0x01,0x01,0x00,0xAD,0xCB,0xA5,0x11,0x69,0xC6,0x59,0xAB,0xF1,0x8F,0xB5,
+0x19,0x0F,0x56,0xCE,0xCC,0xB5,0x1F,0x20,0xE4,0x9E,0x26,0x25,0x4B,0xE0,0x73,0x65,
+0x89,0x59,0xDE,0xD0,0x83,0xE4,0xF5,0x0F,0xB5,0xBB,0xAD,0xF1,0x7C,0xE8,0x21,0xFC,
+0xE4,0xE8,0x0C,0xEE,0x7C,0x45,0x22,0x19,0x76,0x92,0xB4,0x13,0xB7,0x20,0x5B,0x09,
+0xFA,0x61,0xAE,0xA8,0xF2,0xA5,0x8D,0x85,0xC2,0x2A,0xD6,0xDE,0x66,0x36,0xD2,0x9B,
+0x02,0xF4,0xA8,0x92,0x60,0x7C,0x9C,0x69,0xB4,0x8F,0x24,0x1E,0xD0,0x86,0x52,0xF6,
+0x32,0x9C,0x41,0x58,0x1E,0x22,0xBD,0xCD,0x45,0x62,0x95,0x08,0x6E,0xD0,0x66,0xDD,
+0x53,0xA2,0xCC,0xF0,0x10,0xDC,0x54,0x73,0x8B,0x04,0xA1,0x46,0x33,0x33,0x5C,0x17,
+0x40,0xB9,0x9E,0x4D,0xD3,0xF3,0xBE,0x55,0x83,0xE8,0xB1,0x89,0x8E,0x5A,0x7C,0x9A,
+0x96,0x22,0x90,0x3B,0x88,0x25,0xF2,0xD2,0x53,0x88,0x02,0x0C,0x0B,0x78,0xF2,0xE6,
+0x37,0x17,0x4B,0x30,0x46,0x07,0xE4,0x80,0x6D,0xA6,0xD8,0x96,0x2E,0xE8,0x2C,0xF8,
+0x11,0xB3,0x38,0x0D,0x66,0xA6,0x9B,0xEA,0xC9,0x23,0x5B,0xDB,0x8E,0xE2,0xF3,0x13,
+0x8E,0x1A,0x59,0x2D,0xAA,0x02,0xF0,0xEC,0xA4,0x87,0x66,0xDC,0xC1,0x3F,0xF5,0xD8,
+0xB9,0xF4,0xEC,0x82,0xC6,0xD2,0x3D,0x95,0x1D,0xE5,0xC0,0x4F,0x84,0xC9,0xD9,0xA3,
+0x44,0x28,0x06,0x6A,0xD7,0x45,0xAC,0xF0,0x6B,0x6A,0xEF,0x4E,0x5F,0xF8,0x11,0x82,
+0x1E,0x38,0x63,0x34,0x66,0x50,0xD4,0x3E,0x93,0x73,0xFA,0x30,0xC3,0x66,0xAD,0xFF,
+0x93,0x2D,0x97,0xEF,0x03,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,
+0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8F,0xFA,
+0x25,0x6B,0x4F,0x5B,0xE4,0xA4,0x4E,0x27,0x55,0xAB,0x22,0x15,0x59,0x3C,0xCA,0xB5,
+0x0A,0xD4,0x4A,0xDB,0xAB,0xDD,0xA1,0x5F,0x53,0xC5,0xA0,0x57,0x39,0xC2,0xCE,0x47,
+0x2B,0xBE,0x3A,0xC8,0x56,0xBF,0xC2,0xD9,0x27,0x10,0x3A,0xB1,0x05,0x3C,0xC0,0x77,
+0x31,0xBB,0x3A,0xD3,0x05,0x7B,0x6D,0x9A,0x1C,0x30,0x8C,0x80,0xCB,0x93,0x93,0x2A,
+0x83,0xAB,0x05,0x51,0x82,0x02,0x00,0x11,0x67,0x6B,0xF3,0x88,0x61,0x47,0x5F,0x03,
+0x93,0xD5,0x5B,0x0D,0xE0,0xF1,0xD4,0xA1,0x32,0x35,0x85,0xB2,0x3A,0xDB,0xB0,0x82,
+0xAB,0xD1,0xCB,0x0A,0xBC,0x4F,0x8C,0x5B,0xC5,0x4B,0x00,0x3B,0x1F,0x2A,0x82,0xA6,
+0x7E,0x36,0x85,0xDC,0x7E,0x3C,0x67,0x00,0xB5,0xE4,0x3B,0x52,0xE0,0xA8,0xEB,0x5D,
+0x15,0xF9,0xC6,0x6D,0xF0,0xAD,0x1D,0x0E,0x85,0xB7,0xA9,0x9A,0x73,0x14,0x5A,0x5B,
+0x8F,0x41,0x28,0xC0,0xD5,0xE8,0x2D,0x4D,0xA4,0x5E,0xCD,0xAA,0xD9,0xED,0xCE,0xDC,
+0xD8,0xD5,0x3C,0x42,0x1D,0x17,0xC1,0x12,0x5D,0x45,0x38,0xC3,0x38,0xF3,0xFC,0x85,
+0x2E,0x83,0x46,0x48,0xB2,0xD7,0x20,0x5F,0x92,0x36,0x8F,0xE7,0x79,0x0F,0x98,0x5E,
+0x99,0xE8,0xF0,0xD0,0xA4,0xBB,0xF5,0x53,0xBD,0x2A,0xCE,0x59,0xB0,0xAF,0x6E,0x7F,
+0x6C,0xBB,0xD2,0x1E,0x00,0xB0,0x21,0xED,0xF8,0x41,0x62,0x82,0xB9,0xD8,0xB2,0xC4,
+0xBB,0x46,0x50,0xF3,0x31,0xC5,0x8F,0x01,0xA8,0x74,0xEB,0xF5,0x78,0x27,0xDA,0xE7,
+0xF7,0x66,0x43,0xF3,0x9E,0x83,0x3E,0x20,0xAA,0xC3,0x35,0x60,0x91,0xCE,
+};
+
+
+/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
+/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */
+
+
+const unsigned char VeriSign_Universal_Root_Certification_Authority_certificate[1213]={
+0x30,0x82,0x04,0xB9,0x30,0x82,0x03,0xA1,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x40,
+0x1A,0xC4,0x64,0x21,0xB3,0x13,0x21,0x03,0x0E,0xBB,0xE4,0x12,0x1A,0xC5,0x1D,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81,
+0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,
+0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,
+0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,
+0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,
+0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,
+0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,
+0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,
+0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,
+0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69,
+0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,
+0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,
+0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56,
+0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,
+0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,
+0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,
+0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,
+0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC7,
+0x61,0x37,0x5E,0xB1,0x01,0x34,0xDB,0x62,0xD7,0x15,0x9B,0xFF,0x58,0x5A,0x8C,0x23,
+0x23,0xD6,0x60,0x8E,0x91,0xD7,0x90,0x98,0x83,0x7A,0xE6,0x58,0x19,0x38,0x8C,0xC5,
+0xF6,0xE5,0x64,0x85,0xB4,0xA2,0x71,0xFB,0xED,0xBD,0xB9,0xDA,0xCD,0x4D,0x00,0xB4,
+0xC8,0x2D,0x73,0xA5,0xC7,0x69,0x71,0x95,0x1F,0x39,0x3C,0xB2,0x44,0x07,0x9C,0xE8,
+0x0E,0xFA,0x4D,0x4A,0xC4,0x21,0xDF,0x29,0x61,0x8F,0x32,0x22,0x61,0x82,0xC5,0x87,
+0x1F,0x6E,0x8C,0x7C,0x5F,0x16,0x20,0x51,0x44,0xD1,0x70,0x4F,0x57,0xEA,0xE3,0x1C,
+0xE3,0xCC,0x79,0xEE,0x58,0xD8,0x0E,0xC2,0xB3,0x45,0x93,0xC0,0x2C,0xE7,0x9A,0x17,
+0x2B,0x7B,0x00,0x37,0x7A,0x41,0x33,0x78,0xE1,0x33,0xE2,0xF3,0x10,0x1A,0x7F,0x87,
+0x2C,0xBE,0xF6,0xF5,0xF7,0x42,0xE2,0xE5,0xBF,0x87,0x62,0x89,0x5F,0x00,0x4B,0xDF,
+0xC5,0xDD,0xE4,0x75,0x44,0x32,0x41,0x3A,0x1E,0x71,0x6E,0x69,0xCB,0x0B,0x75,0x46,
+0x08,0xD1,0xCA,0xD2,0x2B,0x95,0xD0,0xCF,0xFB,0xB9,0x40,0x6B,0x64,0x8C,0x57,0x4D,
+0xFC,0x13,0x11,0x79,0x84,0xED,0x5E,0x54,0xF6,0x34,0x9F,0x08,0x01,0xF3,0x10,0x25,
+0x06,0x17,0x4A,0xDA,0xF1,0x1D,0x7A,0x66,0x6B,0x98,0x60,0x66,0xA4,0xD9,0xEF,0xD2,
+0x2E,0x82,0xF1,0xF0,0xEF,0x09,0xEA,0x44,0xC9,0x15,0x6A,0xE2,0x03,0x6E,0x33,0xD3,
+0xAC,0x9F,0x55,0x00,0xC7,0xF6,0x08,0x6A,0x94,0xB9,0x5F,0xDC,0xE0,0x33,0xF1,0x84,
+0x60,0xF9,0x5B,0x27,0x11,0xB4,0xFC,0x16,0xF2,0xBB,0x56,0x6A,0x80,0x25,0x8D,0x02,
+0x03,0x01,0x00,0x01,0xA3,0x81,0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,
+0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,
+0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,
+0x06,0x01,0x05,0x05,0x07,0x01,0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,
+0x59,0x30,0x57,0x30,0x55,0x16,0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,
+0x30,0x21,0x30,0x1F,0x30,0x07,0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,
+0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,
+0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,
+0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,
+0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,
+0x0E,0x04,0x16,0x04,0x14,0xB6,0x77,0xFA,0x69,0x48,0x47,0x9F,0x53,0x12,0xD5,0xC2,
+0xEA,0x07,0x32,0x76,0x07,0xD1,0x97,0x07,0x19,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,
+0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x4A,0xF8,0xF8,
+0xB0,0x03,0xE6,0x2C,0x67,0x7B,0xE4,0x94,0x77,0x63,0xCC,0x6E,0x4C,0xF9,0x7D,0x0E,
+0x0D,0xDC,0xC8,0xB9,0x35,0xB9,0x70,0x4F,0x63,0xFA,0x24,0xFA,0x6C,0x83,0x8C,0x47,
+0x9D,0x3B,0x63,0xF3,0x9A,0xF9,0x76,0x32,0x95,0x91,0xB1,0x77,0xBC,0xAC,0x9A,0xBE,
+0xB1,0xE4,0x31,0x21,0xC6,0x81,0x95,0x56,0x5A,0x0E,0xB1,0xC2,0xD4,0xB1,0xA6,0x59,
+0xAC,0xF1,0x63,0xCB,0xB8,0x4C,0x1D,0x59,0x90,0x4A,0xEF,0x90,0x16,0x28,0x1F,0x5A,
+0xAE,0x10,0xFB,0x81,0x50,0x38,0x0C,0x6C,0xCC,0xF1,0x3D,0xC3,0xF5,0x63,0xE3,0xB3,
+0xE3,0x21,0xC9,0x24,0x39,0xE9,0xFD,0x15,0x66,0x46,0xF4,0x1B,0x11,0xD0,0x4D,0x73,
+0xA3,0x7D,0x46,0xF9,0x3D,0xED,0xA8,0x5F,0x62,0xD4,0xF1,0x3F,0xF8,0xE0,0x74,0x57,
+0x2B,0x18,0x9D,0x81,0xB4,0xC4,0x28,0xDA,0x94,0x97,0xA5,0x70,0xEB,0xAC,0x1D,0xBE,
+0x07,0x11,0xF0,0xD5,0xDB,0xDD,0xE5,0x8C,0xF0,0xD5,0x32,0xB0,0x83,0xE6,0x57,0xE2,
+0x8F,0xBF,0xBE,0xA1,0xAA,0xBF,0x3D,0x1D,0xB5,0xD4,0x38,0xEA,0xD7,0xB0,0x5C,0x3A,
+0x4F,0x6A,0x3F,0x8F,0xC0,0x66,0x6C,0x63,0xAA,0xE9,0xD9,0xA4,0x16,0xF4,0x81,0xD1,
+0x95,0x14,0x0E,0x7D,0xCD,0x95,0x34,0xD9,0xD2,0x8F,0x70,0x73,0x81,0x7B,0x9C,0x7E,
+0xBD,0x98,0x61,0xD8,0x45,0x87,0x98,0x90,0xC5,0xEB,0x86,0x30,0xC6,0x35,0xBF,0xF0,
+0xFF,0xC3,0x55,0x88,0x83,0x4B,0xEF,0x05,0x92,0x06,0x71,0xF2,0xB8,0x98,0x93,0xB7,
+0xEC,0xCD,0x82,0x61,0xF1,0x38,0xE6,0x4F,0x97,0x98,0x2A,0x5A,0x8D,
+};
+
+
+/* subject:/C=US/OU=www.xrampsecurity.com/O=XRamp Security Services Inc/CN=XRamp Global Certification Authority */
+/* issuer :/C=US/OU=www.xrampsecurity.com/O=XRamp Security Services Inc/CN=XRamp Global Certification Authority */
+
+
+const unsigned char XRamp_Global_CA_Root_certificate[1076]={
+0x30,0x82,0x04,0x30,0x30,0x82,0x03,0x18,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x50,
+0x94,0x6C,0xEC,0x18,0xEA,0xD5,0x9C,0x4D,0xD5,0x97,0xEF,0x75,0x8F,0xA0,0xAD,0x30,
+0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,
+0x82,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x1E,
+0x30,0x1C,0x06,0x03,0x55,0x04,0x0B,0x13,0x15,0x77,0x77,0x77,0x2E,0x78,0x72,0x61,
+0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x2E,0x63,0x6F,0x6D,0x31,0x24,
+0x30,0x22,0x06,0x03,0x55,0x04,0x0A,0x13,0x1B,0x58,0x52,0x61,0x6D,0x70,0x20,0x53,
+0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,
+0x20,0x49,0x6E,0x63,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x03,0x13,0x24,0x58,
+0x52,0x61,0x6D,0x70,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x65,0x72,0x74,
+0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,
+0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x31,0x31,0x30,0x31,0x31,0x37,0x31,
+0x34,0x30,0x34,0x5A,0x17,0x0D,0x33,0x35,0x30,0x31,0x30,0x31,0x30,0x35,0x33,0x37,
+0x31,0x39,0x5A,0x30,0x81,0x82,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,
+0x02,0x55,0x53,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0B,0x13,0x15,0x77,0x77,
+0x77,0x2E,0x78,0x72,0x61,0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x2E,
+0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0A,0x13,0x1B,0x58,0x52,
+0x61,0x6D,0x70,0x20,0x53,0x65,0x63,0x75,0x72,0x69,0x74,0x79,0x20,0x53,0x65,0x72,
+0x76,0x69,0x63,0x65,0x73,0x20,0x49,0x6E,0x63,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,
+0x04,0x03,0x13,0x24,0x58,0x52,0x61,0x6D,0x70,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,
+0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,
+0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,
+0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,
+0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0x98,0x24,0x1E,0xBD,0x15,0xB4,0xBA,
+0xDF,0xC7,0x8C,0xA5,0x27,0xB6,0x38,0x0B,0x69,0xF3,0xB6,0x4E,0xA8,0x2C,0x2E,0x21,
+0x1D,0x5C,0x44,0xDF,0x21,0x5D,0x7E,0x23,0x74,0xFE,0x5E,0x7E,0xB4,0x4A,0xB7,0xA6,
+0xAD,0x1F,0xAE,0xE0,0x06,0x16,0xE2,0x9B,0x5B,0xD9,0x67,0x74,0x6B,0x5D,0x80,0x8F,
+0x29,0x9D,0x86,0x1B,0xD9,0x9C,0x0D,0x98,0x6D,0x76,0x10,0x28,0x58,0xE4,0x65,0xB0,
+0x7F,0x4A,0x98,0x79,0x9F,0xE0,0xC3,0x31,0x7E,0x80,0x2B,0xB5,0x8C,0xC0,0x40,0x3B,
+0x11,0x86,0xD0,0xCB,0xA2,0x86,0x36,0x60,0xA4,0xD5,0x30,0x82,0x6D,0xD9,0x6E,0xD0,
+0x0F,0x12,0x04,0x33,0x97,0x5F,0x4F,0x61,0x5A,0xF0,0xE4,0xF9,0x91,0xAB,0xE7,0x1D,
+0x3B,0xBC,0xE8,0xCF,0xF4,0x6B,0x2D,0x34,0x7C,0xE2,0x48,0x61,0x1C,0x8E,0xF3,0x61,
+0x44,0xCC,0x6F,0xA0,0x4A,0xA9,0x94,0xB0,0x4D,0xDA,0xE7,0xA9,0x34,0x7A,0x72,0x38,
+0xA8,0x41,0xCC,0x3C,0x94,0x11,0x7D,0xEB,0xC8,0xA6,0x8C,0xB7,0x86,0xCB,0xCA,0x33,
+0x3B,0xD9,0x3D,0x37,0x8B,0xFB,0x7A,0x3E,0x86,0x2C,0xE7,0x73,0xD7,0x0A,0x57,0xAC,
+0x64,0x9B,0x19,0xEB,0xF4,0x0F,0x04,0x08,0x8A,0xAC,0x03,0x17,0x19,0x64,0xF4,0x5A,
+0x25,0x22,0x8D,0x34,0x2C,0xB2,0xF6,0x68,0x1D,0x12,0x6D,0xD3,0x8A,0x1E,0x14,0xDA,
+0xC4,0x8F,0xA6,0xE2,0x23,0x85,0xD5,0x7A,0x0D,0xBD,0x6A,0xE0,0xE9,0xEC,0xEC,0x17,
+0xBB,0x42,0x1B,0x67,0xAA,0x25,0xED,0x45,0x83,0x21,0xFC,0xC1,0xC9,0x7C,0xD5,0x62,
+0x3E,0xFA,0xF2,0xC5,0x2D,0xD3,0xFD,0xD4,0x65,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,
+0x9F,0x30,0x81,0x9C,0x30,0x13,0x06,0x09,0x2B,0x06,0x01,0x04,0x01,0x82,0x37,0x14,
+0x02,0x04,0x06,0x1E,0x04,0x00,0x43,0x00,0x41,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,
+0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,
+0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,
+0x04,0x14,0xC6,0x4F,0xA2,0x3D,0x06,0x63,0x84,0x09,0x9C,0xCE,0x62,0xE4,0x04,0xAC,
+0x8D,0x5C,0xB5,0xE9,0xB6,0x1B,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,
+0x2D,0x30,0x2B,0xA0,0x29,0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,
+0x63,0x72,0x6C,0x2E,0x78,0x72,0x61,0x6D,0x70,0x73,0x65,0x63,0x75,0x72,0x69,0x74,
+0x79,0x2E,0x63,0x6F,0x6D,0x2F,0x58,0x47,0x43,0x41,0x2E,0x63,0x72,0x6C,0x30,0x10,
+0x06,0x09,0x2B,0x06,0x01,0x04,0x01,0x82,0x37,0x15,0x01,0x04,0x03,0x02,0x01,0x01,
+0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,
+0x82,0x01,0x01,0x00,0x91,0x15,0x39,0x03,0x01,0x1B,0x67,0xFB,0x4A,0x1C,0xF9,0x0A,
+0x60,0x5B,0xA1,0xDA,0x4D,0x97,0x62,0xF9,0x24,0x53,0x27,0xD7,0x82,0x64,0x4E,0x90,
+0x2E,0xC3,0x49,0x1B,0x2B,0x9A,0xDC,0xFC,0xA8,0x78,0x67,0x35,0xF1,0x1D,0xF0,0x11,
+0xBD,0xB7,0x48,0xE3,0x10,0xF6,0x0D,0xDF,0x3F,0xD2,0xC9,0xB6,0xAA,0x55,0xA4,0x48,
+0xBA,0x02,0xDB,0xDE,0x59,0x2E,0x15,0x5B,0x3B,0x9D,0x16,0x7D,0x47,0xD7,0x37,0xEA,
+0x5F,0x4D,0x76,0x12,0x36,0xBB,0x1F,0xD7,0xA1,0x81,0x04,0x46,0x20,0xA3,0x2C,0x6D,
+0xA9,0x9E,0x01,0x7E,0x3F,0x29,0xCE,0x00,0x93,0xDF,0xFD,0xC9,0x92,0x73,0x89,0x89,
+0x64,0x9E,0xE7,0x2B,0xE4,0x1C,0x91,0x2C,0xD2,0xB9,0xCE,0x7D,0xCE,0x6F,0x31,0x99,
+0xD3,0xE6,0xBE,0xD2,0x1E,0x90,0xF0,0x09,0x14,0x79,0x5C,0x23,0xAB,0x4D,0xD2,0xDA,
+0x21,0x1F,0x4D,0x99,0x79,0x9D,0xE1,0xCF,0x27,0x9F,0x10,0x9B,0x1C,0x88,0x0D,0xB0,
+0x8A,0x64,0x41,0x31,0xB8,0x0E,0x6C,0x90,0x24,0xA4,0x9B,0x5C,0x71,0x8F,0xBA,0xBB,
+0x7E,0x1C,0x1B,0xDB,0x6A,0x80,0x0F,0x21,0xBC,0xE9,0xDB,0xA6,0xB7,0x40,0xF4,0xB2,
+0x8B,0xA9,0xB1,0xE4,0xEF,0x9A,0x1A,0xD0,0x3D,0x69,0x99,0xEE,0xA8,0x28,0xA3,0xE1,
+0x3C,0xB3,0xF0,0xB2,0x11,0x9C,0xCF,0x7C,0x40,0xE6,0xDD,0xE7,0x43,0x7D,0xA2,0xD8,
+0x3A,0xB5,0xA9,0x8D,0xF2,0x34,0x99,0xC4,0xD4,0x10,0xE1,0x06,0xFD,0x09,0x84,0x10,
+0x3B,0xEE,0xC4,0x4C,0xF4,0xEC,0x27,0x7C,0x42,0xC2,0x74,0x7C,0x82,0x8A,0x09,0xC9,
+0xB4,0x03,0x25,0xBC,
+};
+
+
+const unsigned char* kSSLCertCertificateList[] = {
+ AddTrust_External_Root_certificate,
+ AddTrust_Low_Value_Services_Root_certificate,
+ AddTrust_Public_Services_Root_certificate,
+ AddTrust_Qualified_Certificates_Root_certificate,
+ AffirmTrust_Commercial_certificate,
+ AffirmTrust_Networking_certificate,
+ AffirmTrust_Premium_certificate,
+ AffirmTrust_Premium_ECC_certificate,
+ America_Online_Root_Certification_Authority_1_certificate,
+ America_Online_Root_Certification_Authority_2_certificate,
+ Baltimore_CyberTrust_Root_certificate,
+ Comodo_AAA_Services_root_certificate,
+ COMODO_Certification_Authority_certificate,
+ COMODO_ECC_Certification_Authority_certificate,
+ Comodo_Secure_Services_root_certificate,
+ Comodo_Trusted_Services_root_certificate,
+ Cybertrust_Global_Root_certificate,
+ DigiCert_Assured_ID_Root_CA_certificate,
+ DigiCert_Global_Root_CA_certificate,
+ DigiCert_High_Assurance_EV_Root_CA_certificate,
+ Entrust_net_Premium_2048_Secure_Server_CA_certificate,
+ Entrust_net_Secure_Server_CA_certificate,
+ Entrust_Root_Certification_Authority_certificate,
+ Equifax_Secure_CA_certificate,
+ Equifax_Secure_eBusiness_CA_1_certificate,
+ Equifax_Secure_eBusiness_CA_2_certificate,
+ Equifax_Secure_Global_eBusiness_CA_certificate,
+ GeoTrust_Global_CA_certificate,
+ GeoTrust_Global_CA_2_certificate,
+ GeoTrust_Primary_Certification_Authority_certificate,
+ GeoTrust_Primary_Certification_Authority___G2_certificate,
+ GeoTrust_Primary_Certification_Authority___G3_certificate,
+ GeoTrust_Universal_CA_certificate,
+ GeoTrust_Universal_CA_2_certificate,
+ GlobalSign_Root_CA_certificate,
+ GlobalSign_Root_CA___R2_certificate,
+ GlobalSign_Root_CA___R3_certificate,
+ Go_Daddy_Class_2_CA_certificate,
+ Go_Daddy_Root_Certificate_Authority___G2_certificate,
+ GTE_CyberTrust_Global_Root_certificate,
+ Network_Solutions_Certificate_Authority_certificate,
+ RSA_Root_Certificate_1_certificate,
+ Starfield_Class_2_CA_certificate,
+ Starfield_Root_Certificate_Authority___G2_certificate,
+ Starfield_Services_Root_Certificate_Authority___G2_certificate,
+ StartCom_Certification_Authority_certificate,
+ StartCom_Certification_Authority_G2_certificate,
+ TC_TrustCenter_Class_2_CA_II_certificate,
+ TC_TrustCenter_Class_3_CA_II_certificate,
+ TC_TrustCenter_Universal_CA_I_certificate,
+ TC_TrustCenter_Universal_CA_III_certificate,
+ Thawte_Premium_Server_CA_certificate,
+ thawte_Primary_Root_CA_certificate,
+ thawte_Primary_Root_CA___G2_certificate,
+ thawte_Primary_Root_CA___G3_certificate,
+ Thawte_Server_CA_certificate,
+ UTN_DATACorp_SGC_Root_CA_certificate,
+ UTN_USERFirst_Hardware_Root_CA_certificate,
+ ValiCert_Class_1_VA_certificate,
+ ValiCert_Class_2_VA_certificate,
+ Verisign_Class_3_Public_Primary_Certification_Authority_certificate,
+ Verisign_Class_3_Public_Primary_Certification_Authority___G2_certificate,
+ Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate,
+ VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate,
+ VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate,
+ Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate,
+ VeriSign_Universal_Root_Certification_Authority_certificate,
+ XRamp_Global_CA_Root_certificate,
+};
+
+const size_t kSSLCertCertificateSizeList[] = {
+ 1082,
+ 1052,
+ 1049,
+ 1058,
+ 848,
+ 848,
+ 1354,
+ 514,
+ 936,
+ 1448,
+ 891,
+ 1078,
+ 1057,
+ 653,
+ 1091,
+ 1095,
+ 933,
+ 955,
+ 947,
+ 969,
+ 1120,
+ 1244,
+ 1173,
+ 804,
+ 646,
+ 804,
+ 660,
+ 856,
+ 874,
+ 896,
+ 690,
+ 1026,
+ 1388,
+ 1392,
+ 889,
+ 958,
+ 867,
+ 1028,
+ 969,
+ 606,
+ 1002,
+ 747,
+ 1043,
+ 993,
+ 1011,
+ 1931,
+ 1383,
+ 1198,
+ 1198,
+ 993,
+ 997,
+ 811,
+ 1060,
+ 652,
+ 1070,
+ 791,
+ 1122,
+ 1144,
+ 747,
+ 747,
+ 576,
+ 774,
+ 1054,
+ 904,
+ 1239,
+ 1054,
+ 1213,
+ 1076,
+};
+
+} // namspace rtc
diff --git a/chromium/third_party/webrtc/base/sslsocketfactory.cc b/chromium/third_party/webrtc/base/sslsocketfactory.cc
new file mode 100644
index 00000000000..0e37ab84eb0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslsocketfactory.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/autodetectproxy.h"
+#include "webrtc/base/httpcommon.h"
+#include "webrtc/base/httpcommon-inl.h"
+#include "webrtc/base/socketadapters.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslsocketfactory.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// ProxySocketAdapter
+// TODO: Consider combining AutoDetectProxy and ProxySocketAdapter. I think
+// the socket adapter is the more appropriate idiom for automatic proxy
+// detection. We may or may not want to combine proxydetect.* as well.
+///////////////////////////////////////////////////////////////////////////////
+
+class ProxySocketAdapter : public AsyncSocketAdapter {
+ public:
+ ProxySocketAdapter(SslSocketFactory* factory, int family, int type)
+ : AsyncSocketAdapter(NULL), factory_(factory), family_(family),
+ type_(type), detect_(NULL) {
+ }
+ virtual ~ProxySocketAdapter() {
+ Close();
+ }
+
+ virtual int Connect(const SocketAddress& addr) {
+ ASSERT(NULL == detect_);
+ ASSERT(NULL == socket_);
+ remote_ = addr;
+ if (remote_.IsAnyIP() && remote_.hostname().empty()) {
+ LOG_F(LS_ERROR) << "Empty address";
+ return SOCKET_ERROR;
+ }
+ Url<char> url("/", remote_.HostAsURIString(), remote_.port());
+ detect_ = new AutoDetectProxy(factory_->agent_);
+ detect_->set_server_url(url.url());
+ detect_->SignalWorkDone.connect(this,
+ &ProxySocketAdapter::OnProxyDetectionComplete);
+ detect_->Start();
+ return SOCKET_ERROR;
+ }
+ virtual int GetError() const {
+ if (socket_) {
+ return socket_->GetError();
+ }
+ return detect_ ? EWOULDBLOCK : EADDRNOTAVAIL;
+ }
+ virtual int Close() {
+ if (socket_) {
+ return socket_->Close();
+ }
+ if (detect_) {
+ detect_->Destroy(false);
+ detect_ = NULL;
+ }
+ return 0;
+ }
+ virtual ConnState GetState() const {
+ if (socket_) {
+ return socket_->GetState();
+ }
+ return detect_ ? CS_CONNECTING : CS_CLOSED;
+ }
+
+private:
+ // AutoDetectProxy Slots
+ void OnProxyDetectionComplete(SignalThread* thread) {
+ ASSERT(detect_ == thread);
+ Attach(factory_->CreateProxySocket(detect_->proxy(), family_, type_));
+ detect_->Release();
+ detect_ = NULL;
+ if (0 == AsyncSocketAdapter::Connect(remote_)) {
+ SignalConnectEvent(this);
+ } else if (!IsBlockingError(socket_->GetError())) {
+ SignalCloseEvent(this, socket_->GetError());
+ }
+ }
+
+ SslSocketFactory* factory_;
+ int family_;
+ int type_;
+ SocketAddress remote_;
+ AutoDetectProxy* detect_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// SslSocketFactory
+///////////////////////////////////////////////////////////////////////////////
+
+Socket* SslSocketFactory::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* SslSocketFactory::CreateSocket(int family, int type) {
+ return factory_->CreateSocket(family, type);
+}
+
+AsyncSocket* SslSocketFactory::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* SslSocketFactory::CreateAsyncSocket(int family, int type) {
+ if (autodetect_proxy_) {
+ return new ProxySocketAdapter(this, family, type);
+ } else {
+ return CreateProxySocket(proxy_, family, type);
+ }
+}
+
+
+AsyncSocket* SslSocketFactory::CreateProxySocket(const ProxyInfo& proxy,
+ int family,
+ int type) {
+ AsyncSocket* socket = factory_->CreateAsyncSocket(family, type);
+ if (!socket)
+ return NULL;
+
+ // Binary logging happens at the lowest level
+ if (!logging_label_.empty() && binary_mode_) {
+ socket = new LoggingSocketAdapter(socket, logging_level_,
+ logging_label_.c_str(), binary_mode_);
+ }
+
+ if (proxy.type) {
+ AsyncSocket* proxy_socket = 0;
+ if (proxy_.type == PROXY_SOCKS5) {
+ proxy_socket = new AsyncSocksProxySocket(socket, proxy.address,
+ proxy.username, proxy.password);
+ } else {
+ // Note: we are trying unknown proxies as HTTPS currently
+ AsyncHttpsProxySocket* http_proxy =
+ new AsyncHttpsProxySocket(socket, agent_, proxy.address,
+ proxy.username, proxy.password);
+ http_proxy->SetForceConnect(force_connect_ || !hostname_.empty());
+ proxy_socket = http_proxy;
+ }
+ if (!proxy_socket) {
+ delete socket;
+ return NULL;
+ }
+ socket = proxy_socket; // for our purposes the proxy is now the socket
+ }
+
+ if (!hostname_.empty()) {
+ if (SSLAdapter* ssl_adapter = SSLAdapter::Create(socket)) {
+ ssl_adapter->set_ignore_bad_cert(ignore_bad_cert_);
+ ssl_adapter->StartSSL(hostname_.c_str(), true);
+ socket = ssl_adapter;
+ } else {
+ LOG_F(LS_ERROR) << "SSL unavailable";
+ }
+ }
+
+ // Regular logging occurs at the highest level
+ if (!logging_label_.empty() && !binary_mode_) {
+ socket = new LoggingSocketAdapter(socket, logging_level_,
+ logging_label_.c_str(), binary_mode_);
+ }
+ return socket;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sslsocketfactory.h b/chromium/third_party/webrtc/base/sslsocketfactory.h
new file mode 100644
index 00000000000..edb23dbb805
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslsocketfactory.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLSOCKETFACTORY_H__
+#define WEBRTC_BASE_SSLSOCKETFACTORY_H__
+
+#include "webrtc/base/proxyinfo.h"
+#include "webrtc/base/socketserver.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// SslSocketFactory
+///////////////////////////////////////////////////////////////////////////////
+
+class SslSocketFactory : public SocketFactory {
+ public:
+ SslSocketFactory(SocketFactory* factory, const std::string& user_agent)
+ : factory_(factory), agent_(user_agent), autodetect_proxy_(true),
+ force_connect_(false), logging_level_(LS_VERBOSE), binary_mode_(false),
+ ignore_bad_cert_(false) {
+ }
+
+ void SetAutoDetectProxy() {
+ autodetect_proxy_ = true;
+ }
+ void SetForceConnect(bool force) {
+ force_connect_ = force;
+ }
+ void SetProxy(const ProxyInfo& proxy) {
+ autodetect_proxy_ = false;
+ proxy_ = proxy;
+ }
+ bool autodetect_proxy() const { return autodetect_proxy_; }
+ const ProxyInfo& proxy() const { return proxy_; }
+
+ void UseSSL(const char* hostname) { hostname_ = hostname; }
+ void DisableSSL() { hostname_.clear(); }
+ void SetIgnoreBadCert(bool ignore) { ignore_bad_cert_ = ignore; }
+ bool ignore_bad_cert() const { return ignore_bad_cert_; }
+
+ void SetLogging(LoggingSeverity level, const std::string& label,
+ bool binary_mode = false) {
+ logging_level_ = level;
+ logging_label_ = label;
+ binary_mode_ = binary_mode;
+ }
+
+ // SocketFactory Interface
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ private:
+ friend class ProxySocketAdapter;
+ AsyncSocket* CreateProxySocket(const ProxyInfo& proxy, int family, int type);
+
+ SocketFactory* factory_;
+ std::string agent_;
+ bool autodetect_proxy_, force_connect_;
+ ProxyInfo proxy_;
+ std::string hostname_, logging_label_;
+ LoggingSeverity logging_level_;
+ bool binary_mode_;
+ bool ignore_bad_cert_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLSOCKETFACTORY_H__
diff --git a/chromium/third_party/webrtc/base/sslstreamadapter.cc b/chromium/third_party/webrtc/base/sslstreamadapter.cc
new file mode 100644
index 00000000000..44df2eedd95
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslstreamadapter.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/sslconfig.h"
+
+#if SSL_USE_SCHANNEL
+
+// SChannel support for DTLS and peer-to-peer mode are not
+// done.
+#elif SSL_USE_OPENSSL // && !SSL_USE_SCHANNEL
+
+#include "webrtc/base/opensslstreamadapter.h"
+
+#elif SSL_USE_NSS // && !SSL_USE_SCHANNEL && !SSL_USE_OPENSSL
+
+#include "webrtc/base/nssstreamadapter.h"
+
+#endif // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL && !SSL_USE_NSS
+
+///////////////////////////////////////////////////////////////////////////////
+
+namespace rtc {
+
+SSLStreamAdapter* SSLStreamAdapter::Create(StreamInterface* stream) {
+#if SSL_USE_SCHANNEL
+ return NULL;
+#elif SSL_USE_OPENSSL // !SSL_USE_SCHANNEL
+ return new OpenSSLStreamAdapter(stream);
+#elif SSL_USE_NSS // !SSL_USE_SCHANNEL && !SSL_USE_OPENSSL
+ return new NSSStreamAdapter(stream);
+#else // !SSL_USE_SCHANNEL && !SSL_USE_OPENSSL && !SSL_USE_NSS
+ return NULL;
+#endif
+}
+
+// Note: this matches the logic above with SCHANNEL dominating
+#if SSL_USE_SCHANNEL
+bool SSLStreamAdapter::HaveDtls() { return false; }
+bool SSLStreamAdapter::HaveDtlsSrtp() { return false; }
+bool SSLStreamAdapter::HaveExporter() { return false; }
+#elif SSL_USE_OPENSSL
+bool SSLStreamAdapter::HaveDtls() {
+ return OpenSSLStreamAdapter::HaveDtls();
+}
+bool SSLStreamAdapter::HaveDtlsSrtp() {
+ return OpenSSLStreamAdapter::HaveDtlsSrtp();
+}
+bool SSLStreamAdapter::HaveExporter() {
+ return OpenSSLStreamAdapter::HaveExporter();
+}
+#elif SSL_USE_NSS
+bool SSLStreamAdapter::HaveDtls() {
+ return NSSStreamAdapter::HaveDtls();
+}
+bool SSLStreamAdapter::HaveDtlsSrtp() {
+ return NSSStreamAdapter::HaveDtlsSrtp();
+}
+bool SSLStreamAdapter::HaveExporter() {
+ return NSSStreamAdapter::HaveExporter();
+}
+#endif // !SSL_USE_SCHANNEL && !SSL_USE_OPENSSL && !SSL_USE_NSS
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/sslstreamadapter.h b/chromium/third_party/webrtc/base/sslstreamadapter.h
new file mode 100644
index 00000000000..ffe6b2f7b57
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslstreamadapter.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLSTREAMADAPTER_H_
+#define WEBRTC_BASE_SSLSTREAMADAPTER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/stream.h"
+#include "webrtc/base/sslidentity.h"
+
+namespace rtc {
+
+// SSLStreamAdapter : A StreamInterfaceAdapter that does SSL/TLS.
+// After SSL has been started, the stream will only open on successful
+// SSL verification of certificates, and the communication is
+// encrypted of course.
+//
+// This class was written with SSLAdapter as a starting point. It
+// offers a similar interface, with two differences: there is no
+// support for a restartable SSL connection, and this class has a
+// peer-to-peer mode.
+//
+// The SSL library requires initialization and cleanup. Static method
+// for doing this are in SSLAdapter. They should possibly be moved out
+// to a neutral class.
+
+
+enum SSLRole { SSL_CLIENT, SSL_SERVER };
+enum SSLMode { SSL_MODE_TLS, SSL_MODE_DTLS };
+
+// Errors for Read -- in the high range so no conflict with OpenSSL.
+enum { SSE_MSG_TRUNC = 0xff0001 };
+
+class SSLStreamAdapter : public StreamAdapterInterface {
+ public:
+ // Instantiate an SSLStreamAdapter wrapping the given stream,
+ // (using the selected implementation for the platform).
+ // Caller is responsible for freeing the returned object.
+ static SSLStreamAdapter* Create(StreamInterface* stream);
+
+ explicit SSLStreamAdapter(StreamInterface* stream)
+ : StreamAdapterInterface(stream), ignore_bad_cert_(false) { }
+
+ void set_ignore_bad_cert(bool ignore) { ignore_bad_cert_ = ignore; }
+ bool ignore_bad_cert() const { return ignore_bad_cert_; }
+
+ // Specify our SSL identity: key and certificate. Mostly this is
+ // only used in the peer-to-peer mode (unless we actually want to
+ // provide a client certificate to a server).
+ // SSLStream takes ownership of the SSLIdentity object and will
+ // free it when appropriate. Should be called no more than once on a
+ // given SSLStream instance.
+ virtual void SetIdentity(SSLIdentity* identity) = 0;
+
+ // Call this to indicate that we are to play the server's role in
+ // the peer-to-peer mode.
+ // The default argument is for backward compatibility
+ // TODO(ekr@rtfm.com): rename this SetRole to reflect its new function
+ virtual void SetServerRole(SSLRole role = SSL_SERVER) = 0;
+
+ // Do DTLS or TLS
+ virtual void SetMode(SSLMode mode) = 0;
+
+ // The mode of operation is selected by calling either
+ // StartSSLWithServer or StartSSLWithPeer.
+ // Use of the stream prior to calling either of these functions will
+ // pass data in clear text.
+ // Calling one of these functions causes SSL negotiation to begin as
+ // soon as possible: right away if the underlying wrapped stream is
+ // already opened, or else as soon as it opens.
+ //
+ // These functions return a negative error code on failure.
+ // Returning 0 means success so far, but negotiation is probably not
+ // complete and will continue asynchronously. In that case, the
+ // exposed stream will open after successful negotiation and
+ // verification, or an SE_CLOSE event will be raised if negotiation
+ // fails.
+
+ // StartSSLWithServer starts SSL negotiation with a server in
+ // traditional mode. server_name specifies the expected server name
+ // which the server's certificate needs to specify.
+ virtual int StartSSLWithServer(const char* server_name) = 0;
+
+ // StartSSLWithPeer starts negotiation in the special peer-to-peer
+ // mode.
+ // Generally, SetIdentity() and possibly SetServerRole() should have
+ // been called before this.
+ // SetPeerCertificate() or SetPeerCertificateDigest() must also be called.
+ // It may be called after StartSSLWithPeer() but must be called before the
+ // underlying stream opens.
+ virtual int StartSSLWithPeer() = 0;
+
+ // Specify the digest of the certificate that our peer is expected to use in
+ // peer-to-peer mode. Only this certificate will be accepted during
+ // SSL verification. The certificate is assumed to have been
+ // obtained through some other secure channel (such as the XMPP
+ // channel). Unlike SetPeerCertificate(), this must specify the
+ // terminal certificate, not just a CA.
+ // SSLStream makes a copy of the digest value.
+ virtual bool SetPeerCertificateDigest(const std::string& digest_alg,
+ const unsigned char* digest_val,
+ size_t digest_len) = 0;
+
+ // Retrieves the peer's X.509 certificate, if a connection has been
+ // established. It returns the transmitted over SSL, including the entire
+ // chain. The returned certificate is owned by the caller.
+ virtual bool GetPeerCertificate(SSLCertificate** cert) const = 0;
+
+ // Key Exporter interface from RFC 5705
+ // Arguments are:
+ // label -- the exporter label.
+ // part of the RFC defining each exporter
+ // usage (IN)
+ // context/context_len -- a context to bind to for this connection;
+ // optional, can be NULL, 0 (IN)
+ // use_context -- whether to use the context value
+ // (needed to distinguish no context from
+ // zero-length ones).
+ // result -- where to put the computed value
+ // result_len -- the length of the computed value
+ virtual bool ExportKeyingMaterial(const std::string& label,
+ const uint8* context,
+ size_t context_len,
+ bool use_context,
+ uint8* result,
+ size_t result_len) {
+ return false; // Default is unsupported
+ }
+
+
+ // DTLS-SRTP interface
+ virtual bool SetDtlsSrtpCiphers(const std::vector<std::string>& ciphers) {
+ return false;
+ }
+
+ virtual bool GetDtlsSrtpCipher(std::string* cipher) {
+ return false;
+ }
+
+ // Capabilities testing
+ static bool HaveDtls();
+ static bool HaveDtlsSrtp();
+ static bool HaveExporter();
+
+ // If true, the server certificate need not match the configured
+ // server_name, and in fact missing certificate authority and other
+ // verification errors are ignored.
+ bool ignore_bad_cert_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLSTREAMADAPTER_H_
diff --git a/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc b/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc
new file mode 100644
index 00000000000..af78bfff5ab
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc
@@ -0,0 +1,940 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <algorithm>
+#include <set>
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslconfig.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stream.h"
+
+static const int kBlockSize = 4096;
+static const char kAES_CM_HMAC_SHA1_80[] = "AES_CM_128_HMAC_SHA1_80";
+static const char kAES_CM_HMAC_SHA1_32[] = "AES_CM_128_HMAC_SHA1_32";
+static const char kExporterLabel[] = "label";
+static const unsigned char kExporterContext[] = "context";
+static int kExporterContextLen = sizeof(kExporterContext);
+
+static const char kRSA_PRIVATE_KEY_PEM[] =
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n";
+
+static const char kCERT_PEM[] =
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n";
+
+#define MAYBE_SKIP_TEST(feature) \
+ if (!(rtc::SSLStreamAdapter::feature())) { \
+ LOG(LS_INFO) << "Feature disabled... skipping"; \
+ return; \
+ }
+
+class SSLStreamAdapterTestBase;
+
+class SSLDummyStream : public rtc::StreamInterface,
+ public sigslot::has_slots<> {
+ public:
+ explicit SSLDummyStream(SSLStreamAdapterTestBase *test,
+ const std::string &side,
+ rtc::FifoBuffer *in,
+ rtc::FifoBuffer *out) :
+ test_(test),
+ side_(side),
+ in_(in),
+ out_(out),
+ first_packet_(true) {
+ in_->SignalEvent.connect(this, &SSLDummyStream::OnEventIn);
+ out_->SignalEvent.connect(this, &SSLDummyStream::OnEventOut);
+ }
+
+ virtual rtc::StreamState GetState() const { return rtc::SS_OPEN; }
+
+ virtual rtc::StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ rtc::StreamResult r;
+
+ r = in_->Read(buffer, buffer_len, read, error);
+ if (r == rtc::SR_BLOCK)
+ return rtc::SR_BLOCK;
+ if (r == rtc::SR_EOS)
+ return rtc::SR_EOS;
+
+ if (r != rtc::SR_SUCCESS) {
+ ADD_FAILURE();
+ return rtc::SR_ERROR;
+ }
+
+ return rtc::SR_SUCCESS;
+ }
+
+ // Catch readability events on in and pass them up.
+ virtual void OnEventIn(rtc::StreamInterface *stream, int sig,
+ int err) {
+ int mask = (rtc::SE_READ | rtc::SE_CLOSE);
+
+ if (sig & mask) {
+ LOG(LS_INFO) << "SSLDummyStream::OnEvent side=" << side_ << " sig="
+ << sig << " forwarding upward";
+ PostEvent(sig & mask, 0);
+ }
+ }
+
+ // Catch writeability events on out and pass them up.
+ virtual void OnEventOut(rtc::StreamInterface *stream, int sig,
+ int err) {
+ if (sig & rtc::SE_WRITE) {
+ LOG(LS_INFO) << "SSLDummyStream::OnEvent side=" << side_ << " sig="
+ << sig << " forwarding upward";
+
+ PostEvent(sig & rtc::SE_WRITE, 0);
+ }
+ }
+
+ // Write to the outgoing FifoBuffer
+ rtc::StreamResult WriteData(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ return out_->Write(data, data_len, written, error);
+ }
+
+ // Defined later
+ virtual rtc::StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+
+ virtual void Close() {
+ LOG(LS_INFO) << "Closing outbound stream";
+ out_->Close();
+ }
+
+ private:
+ SSLStreamAdapterTestBase *test_;
+ const std::string side_;
+ rtc::FifoBuffer *in_;
+ rtc::FifoBuffer *out_;
+ bool first_packet_;
+};
+
+static const int kFifoBufferSize = 4096;
+
+class SSLStreamAdapterTestBase : public testing::Test,
+ public sigslot::has_slots<> {
+ public:
+ SSLStreamAdapterTestBase(const std::string& client_cert_pem,
+ const std::string& client_private_key_pem,
+ bool dtls) :
+ client_buffer_(kFifoBufferSize), server_buffer_(kFifoBufferSize),
+ client_stream_(
+ new SSLDummyStream(this, "c2s", &client_buffer_, &server_buffer_)),
+ server_stream_(
+ new SSLDummyStream(this, "s2c", &server_buffer_, &client_buffer_)),
+ client_ssl_(rtc::SSLStreamAdapter::Create(client_stream_)),
+ server_ssl_(rtc::SSLStreamAdapter::Create(server_stream_)),
+ client_identity_(NULL), server_identity_(NULL),
+ delay_(0), mtu_(1460), loss_(0), lose_first_packet_(false),
+ damage_(false), dtls_(dtls),
+ handshake_wait_(5000), identities_set_(false) {
+ // Set use of the test RNG to get predictable loss patterns.
+ rtc::SetRandomTestMode(true);
+
+ // Set up the slots
+ client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
+ server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
+
+ if (!client_cert_pem.empty() && !client_private_key_pem.empty()) {
+ client_identity_ = rtc::SSLIdentity::FromPEMStrings(
+ client_private_key_pem, client_cert_pem);
+ } else {
+ client_identity_ = rtc::SSLIdentity::Generate("client");
+ }
+ server_identity_ = rtc::SSLIdentity::Generate("server");
+
+ client_ssl_->SetIdentity(client_identity_);
+ server_ssl_->SetIdentity(server_identity_);
+ }
+
+ ~SSLStreamAdapterTestBase() {
+ // Put it back for the next test.
+ rtc::SetRandomTestMode(false);
+ }
+
+ static void SetUpTestCase() {
+ rtc::InitializeSSL();
+ }
+
+ static void TearDownTestCase() {
+ rtc::CleanupSSL();
+ }
+
+ // Recreate the client/server identities with the specified validity period.
+ // |not_before| and |not_after| are offsets from the current time in number
+ // of seconds.
+ void ResetIdentitiesWithValidity(int not_before, int not_after) {
+ client_stream_ =
+ new SSLDummyStream(this, "c2s", &client_buffer_, &server_buffer_);
+ server_stream_ =
+ new SSLDummyStream(this, "s2c", &server_buffer_, &client_buffer_);
+
+ client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_));
+ server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_));
+
+ client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
+ server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent);
+
+ rtc::SSLIdentityParams client_params;
+ client_params.common_name = "client";
+ client_params.not_before = not_before;
+ client_params.not_after = not_after;
+ client_identity_ = rtc::SSLIdentity::GenerateForTest(client_params);
+
+ rtc::SSLIdentityParams server_params;
+ server_params.common_name = "server";
+ server_params.not_before = not_before;
+ server_params.not_after = not_after;
+ server_identity_ = rtc::SSLIdentity::GenerateForTest(server_params);
+
+ client_ssl_->SetIdentity(client_identity_);
+ server_ssl_->SetIdentity(server_identity_);
+ }
+
+ virtual void OnEvent(rtc::StreamInterface *stream, int sig, int err) {
+ LOG(LS_INFO) << "SSLStreamAdapterTestBase::OnEvent sig=" << sig;
+
+ if (sig & rtc::SE_READ) {
+ ReadData(stream);
+ }
+
+ if ((stream == client_ssl_.get()) && (sig & rtc::SE_WRITE)) {
+ WriteData();
+ }
+ }
+
+ void SetPeerIdentitiesByDigest(bool correct) {
+ unsigned char digest[20];
+ size_t digest_len;
+ bool rv;
+
+ LOG(LS_INFO) << "Setting peer identities by digest";
+
+ rv = server_identity_->certificate().ComputeDigest(rtc::DIGEST_SHA_1,
+ digest, 20,
+ &digest_len);
+ ASSERT_TRUE(rv);
+ if (!correct) {
+ LOG(LS_INFO) << "Setting bogus digest for server cert";
+ digest[0]++;
+ }
+ rv = client_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, digest,
+ digest_len);
+ ASSERT_TRUE(rv);
+
+
+ rv = client_identity_->certificate().ComputeDigest(rtc::DIGEST_SHA_1,
+ digest, 20, &digest_len);
+ ASSERT_TRUE(rv);
+ if (!correct) {
+ LOG(LS_INFO) << "Setting bogus digest for client cert";
+ digest[0]++;
+ }
+ rv = server_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, digest,
+ digest_len);
+ ASSERT_TRUE(rv);
+
+ identities_set_ = true;
+ }
+
+ void TestHandshake(bool expect_success = true) {
+ server_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS :
+ rtc::SSL_MODE_TLS);
+ client_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS :
+ rtc::SSL_MODE_TLS);
+
+ if (!dtls_) {
+ // Make sure we simulate a reliable network for TLS.
+ // This is just a check to make sure that people don't write wrong
+ // tests.
+ ASSERT((mtu_ == 1460) && (loss_ == 0) && (lose_first_packet_ == 0));
+ }
+
+ if (!identities_set_)
+ SetPeerIdentitiesByDigest(true);
+
+ // Start the handshake
+ int rv;
+
+ server_ssl_->SetServerRole();
+ rv = server_ssl_->StartSSLWithPeer();
+ ASSERT_EQ(0, rv);
+
+ rv = client_ssl_->StartSSLWithPeer();
+ ASSERT_EQ(0, rv);
+
+ // Now run the handshake
+ if (expect_success) {
+ EXPECT_TRUE_WAIT((client_ssl_->GetState() == rtc::SS_OPEN)
+ && (server_ssl_->GetState() == rtc::SS_OPEN),
+ handshake_wait_);
+ } else {
+ EXPECT_TRUE_WAIT(client_ssl_->GetState() == rtc::SS_CLOSED,
+ handshake_wait_);
+ }
+ }
+
+ rtc::StreamResult DataWritten(SSLDummyStream *from, const void *data,
+ size_t data_len, size_t *written,
+ int *error) {
+ // Randomly drop loss_ percent of packets
+ if (rtc::CreateRandomId() % 100 < static_cast<uint32>(loss_)) {
+ LOG(LS_INFO) << "Randomly dropping packet, size=" << data_len;
+ *written = data_len;
+ return rtc::SR_SUCCESS;
+ }
+ if (dtls_ && (data_len > mtu_)) {
+ LOG(LS_INFO) << "Dropping packet > mtu, size=" << data_len;
+ *written = data_len;
+ return rtc::SR_SUCCESS;
+ }
+
+ // Optionally damage application data (type 23). Note that we don't damage
+ // handshake packets and we damage the last byte to keep the header
+ // intact but break the MAC.
+ if (damage_ && (*static_cast<const unsigned char *>(data) == 23)) {
+ std::vector<char> buf(data_len);
+
+ LOG(LS_INFO) << "Damaging packet";
+
+ memcpy(&buf[0], data, data_len);
+ buf[data_len - 1]++;
+
+ return from->WriteData(&buf[0], data_len, written, error);
+ }
+
+ return from->WriteData(data, data_len, written, error);
+ }
+
+ void SetDelay(int delay) {
+ delay_ = delay;
+ }
+ int GetDelay() { return delay_; }
+
+ void SetLoseFirstPacket(bool lose) {
+ lose_first_packet_ = lose;
+ }
+ bool GetLoseFirstPacket() { return lose_first_packet_; }
+
+ void SetLoss(int percent) {
+ loss_ = percent;
+ }
+
+ void SetDamage() {
+ damage_ = true;
+ }
+
+ void SetMtu(size_t mtu) {
+ mtu_ = mtu;
+ }
+
+ void SetHandshakeWait(int wait) {
+ handshake_wait_ = wait;
+ }
+
+ void SetDtlsSrtpCiphers(const std::vector<std::string> &ciphers,
+ bool client) {
+ if (client)
+ client_ssl_->SetDtlsSrtpCiphers(ciphers);
+ else
+ server_ssl_->SetDtlsSrtpCiphers(ciphers);
+ }
+
+ bool GetDtlsSrtpCipher(bool client, std::string *retval) {
+ if (client)
+ return client_ssl_->GetDtlsSrtpCipher(retval);
+ else
+ return server_ssl_->GetDtlsSrtpCipher(retval);
+ }
+
+ bool GetPeerCertificate(bool client, rtc::SSLCertificate** cert) {
+ if (client)
+ return client_ssl_->GetPeerCertificate(cert);
+ else
+ return server_ssl_->GetPeerCertificate(cert);
+ }
+
+ bool ExportKeyingMaterial(const char *label,
+ const unsigned char *context,
+ size_t context_len,
+ bool use_context,
+ bool client,
+ unsigned char *result,
+ size_t result_len) {
+ if (client)
+ return client_ssl_->ExportKeyingMaterial(label,
+ context, context_len,
+ use_context,
+ result, result_len);
+ else
+ return server_ssl_->ExportKeyingMaterial(label,
+ context, context_len,
+ use_context,
+ result, result_len);
+ }
+
+ // To be implemented by subclasses.
+ virtual void WriteData() = 0;
+ virtual void ReadData(rtc::StreamInterface *stream) = 0;
+ virtual void TestTransfer(int size) = 0;
+
+ protected:
+ rtc::FifoBuffer client_buffer_;
+ rtc::FifoBuffer server_buffer_;
+ SSLDummyStream *client_stream_; // freed by client_ssl_ destructor
+ SSLDummyStream *server_stream_; // freed by server_ssl_ destructor
+ rtc::scoped_ptr<rtc::SSLStreamAdapter> client_ssl_;
+ rtc::scoped_ptr<rtc::SSLStreamAdapter> server_ssl_;
+ rtc::SSLIdentity *client_identity_; // freed by client_ssl_ destructor
+ rtc::SSLIdentity *server_identity_; // freed by server_ssl_ destructor
+ int delay_;
+ size_t mtu_;
+ int loss_;
+ bool lose_first_packet_;
+ bool damage_;
+ bool dtls_;
+ int handshake_wait_;
+ bool identities_set_;
+};
+
+class SSLStreamAdapterTestTLS : public SSLStreamAdapterTestBase {
+ public:
+ SSLStreamAdapterTestTLS() :
+ SSLStreamAdapterTestBase("", "", false) {
+ };
+
+ // Test data transfer for TLS
+ virtual void TestTransfer(int size) {
+ LOG(LS_INFO) << "Starting transfer test with " << size << " bytes";
+ // Create some dummy data to send.
+ size_t received;
+
+ send_stream_.ReserveSize(size);
+ for (int i = 0; i < size; ++i) {
+ char ch = static_cast<char>(i);
+ send_stream_.Write(&ch, 1, NULL, NULL);
+ }
+ send_stream_.Rewind();
+
+ // Prepare the receive stream.
+ recv_stream_.ReserveSize(size);
+
+ // Start sending
+ WriteData();
+
+ // Wait for the client to close
+ EXPECT_TRUE_WAIT(server_ssl_->GetState() == rtc::SS_CLOSED, 10000);
+
+ // Now check the data
+ recv_stream_.GetSize(&received);
+
+ EXPECT_EQ(static_cast<size_t>(size), received);
+ EXPECT_EQ(0, memcmp(send_stream_.GetBuffer(),
+ recv_stream_.GetBuffer(), size));
+ }
+
+ void WriteData() {
+ size_t position, tosend, size;
+ rtc::StreamResult rv;
+ size_t sent;
+ char block[kBlockSize];
+
+ send_stream_.GetSize(&size);
+ if (!size)
+ return;
+
+ for (;;) {
+ send_stream_.GetPosition(&position);
+ if (send_stream_.Read(block, sizeof(block), &tosend, NULL) !=
+ rtc::SR_EOS) {
+ rv = client_ssl_->Write(block, tosend, &sent, 0);
+
+ if (rv == rtc::SR_SUCCESS) {
+ send_stream_.SetPosition(position + sent);
+ LOG(LS_VERBOSE) << "Sent: " << position + sent;
+ } else if (rv == rtc::SR_BLOCK) {
+ LOG(LS_VERBOSE) << "Blocked...";
+ send_stream_.SetPosition(position);
+ break;
+ } else {
+ ADD_FAILURE();
+ break;
+ }
+ } else {
+ // Now close
+ LOG(LS_INFO) << "Wrote " << position << " bytes. Closing";
+ client_ssl_->Close();
+ break;
+ }
+ }
+ };
+
+ virtual void ReadData(rtc::StreamInterface *stream) {
+ char buffer[1600];
+ size_t bread;
+ int err2;
+ rtc::StreamResult r;
+
+ for (;;) {
+ r = stream->Read(buffer, sizeof(buffer), &bread, &err2);
+
+ if (r == rtc::SR_ERROR || r == rtc::SR_EOS) {
+ // Unfortunately, errors are the way that the stream adapter
+ // signals close in OpenSSL
+ stream->Close();
+ return;
+ }
+
+ if (r == rtc::SR_BLOCK)
+ break;
+
+ ASSERT_EQ(rtc::SR_SUCCESS, r);
+ LOG(LS_INFO) << "Read " << bread;
+
+ recv_stream_.Write(buffer, bread, NULL, NULL);
+ }
+ }
+
+ private:
+ rtc::MemoryStream send_stream_;
+ rtc::MemoryStream recv_stream_;
+};
+
+class SSLStreamAdapterTestDTLS : public SSLStreamAdapterTestBase {
+ public:
+ SSLStreamAdapterTestDTLS() :
+ SSLStreamAdapterTestBase("", "", true),
+ packet_size_(1000), count_(0), sent_(0) {
+ }
+
+ SSLStreamAdapterTestDTLS(const std::string& cert_pem,
+ const std::string& private_key_pem) :
+ SSLStreamAdapterTestBase(cert_pem, private_key_pem, true),
+ packet_size_(1000), count_(0), sent_(0) {
+ }
+
+ virtual void WriteData() {
+ unsigned char *packet = new unsigned char[1600];
+
+ do {
+ memset(packet, sent_ & 0xff, packet_size_);
+ *(reinterpret_cast<uint32_t *>(packet)) = sent_;
+
+ size_t sent;
+ int rv = client_ssl_->Write(packet, packet_size_, &sent, 0);
+ if (rv == rtc::SR_SUCCESS) {
+ LOG(LS_VERBOSE) << "Sent: " << sent_;
+ sent_++;
+ } else if (rv == rtc::SR_BLOCK) {
+ LOG(LS_VERBOSE) << "Blocked...";
+ break;
+ } else {
+ ADD_FAILURE();
+ break;
+ }
+ } while (sent_ < count_);
+
+ delete [] packet;
+ }
+
+ virtual void ReadData(rtc::StreamInterface *stream) {
+ unsigned char buffer[2000];
+ size_t bread;
+ int err2;
+ rtc::StreamResult r;
+
+ for (;;) {
+ r = stream->Read(buffer, 2000, &bread, &err2);
+
+ if (r == rtc::SR_ERROR) {
+ // Unfortunately, errors are the way that the stream adapter
+ // signals close right now
+ stream->Close();
+ return;
+ }
+
+ if (r == rtc::SR_BLOCK)
+ break;
+
+ ASSERT_EQ(rtc::SR_SUCCESS, r);
+ LOG(LS_INFO) << "Read " << bread;
+
+ // Now parse the datagram
+ ASSERT_EQ(packet_size_, bread);
+ unsigned char* ptr_to_buffer = buffer;
+ uint32_t packet_num = *(reinterpret_cast<uint32_t *>(ptr_to_buffer));
+
+ for (size_t i = 4; i < packet_size_; i++) {
+ ASSERT_EQ((packet_num & 0xff), buffer[i]);
+ }
+ received_.insert(packet_num);
+ }
+ }
+
+ virtual void TestTransfer(int count) {
+ count_ = count;
+
+ WriteData();
+
+ EXPECT_TRUE_WAIT(sent_ == count_, 10000);
+ LOG(LS_INFO) << "sent_ == " << sent_;
+
+ if (damage_) {
+ WAIT(false, 2000);
+ EXPECT_EQ(0U, received_.size());
+ } else if (loss_ == 0) {
+ EXPECT_EQ_WAIT(static_cast<size_t>(sent_), received_.size(), 1000);
+ } else {
+ LOG(LS_INFO) << "Sent " << sent_ << " packets; received " <<
+ received_.size();
+ }
+ };
+
+ private:
+ size_t packet_size_;
+ int count_;
+ int sent_;
+ std::set<int> received_;
+};
+
+
+rtc::StreamResult SSLDummyStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ *written = data_len;
+
+ LOG(LS_INFO) << "Writing to loopback " << data_len;
+
+ if (first_packet_) {
+ first_packet_ = false;
+ if (test_->GetLoseFirstPacket()) {
+ LOG(LS_INFO) << "Losing initial packet of length " << data_len;
+ return rtc::SR_SUCCESS;
+ }
+ }
+
+ return test_->DataWritten(this, data, data_len, written, error);
+
+ return rtc::SR_SUCCESS;
+};
+
+class SSLStreamAdapterTestDTLSFromPEMStrings : public SSLStreamAdapterTestDTLS {
+ public:
+ SSLStreamAdapterTestDTLSFromPEMStrings() :
+ SSLStreamAdapterTestDTLS(kCERT_PEM, kRSA_PRIVATE_KEY_PEM) {
+ }
+};
+
+// Basic tests: TLS
+
+// Test that we cannot read/write if we have not yet handshaked.
+// This test only applies to NSS because OpenSSL has passthrough
+// semantics for I/O before the handshake is started.
+#if SSL_USE_NSS
+TEST_F(SSLStreamAdapterTestTLS, TestNoReadWriteBeforeConnect) {
+ rtc::StreamResult rv;
+ char block[kBlockSize];
+ size_t dummy;
+
+ rv = client_ssl_->Write(block, sizeof(block), &dummy, NULL);
+ ASSERT_EQ(rtc::SR_BLOCK, rv);
+
+ rv = client_ssl_->Read(block, sizeof(block), &dummy, NULL);
+ ASSERT_EQ(rtc::SR_BLOCK, rv);
+}
+#endif
+
+
+// Test that we can make a handshake work
+TEST_F(SSLStreamAdapterTestTLS, TestTLSConnect) {
+ TestHandshake();
+};
+
+// Test transfer -- trivial
+TEST_F(SSLStreamAdapterTestTLS, TestTLSTransfer) {
+ TestHandshake();
+ TestTransfer(100000);
+};
+
+// Test read-write after close.
+TEST_F(SSLStreamAdapterTestTLS, ReadWriteAfterClose) {
+ TestHandshake();
+ TestTransfer(100000);
+ client_ssl_->Close();
+
+ rtc::StreamResult rv;
+ char block[kBlockSize];
+ size_t dummy;
+
+ // It's an error to write after closed.
+ rv = client_ssl_->Write(block, sizeof(block), &dummy, NULL);
+ ASSERT_EQ(rtc::SR_ERROR, rv);
+
+ // But after closed read gives you EOS.
+ rv = client_ssl_->Read(block, sizeof(block), &dummy, NULL);
+ ASSERT_EQ(rtc::SR_EOS, rv);
+};
+
+// Test a handshake with a bogus peer digest
+TEST_F(SSLStreamAdapterTestTLS, TestTLSBogusDigest) {
+ SetPeerIdentitiesByDigest(false);
+ TestHandshake(false);
+};
+
+// Test moving a bunch of data
+
+// Basic tests: DTLS
+// Test that we can make a handshake work
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSConnect) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ TestHandshake();
+};
+
+// Test that we can make a handshake work if the first packet in
+// each direction is lost. This gives us predictable loss
+// rather than having to tune random
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSConnectWithLostFirstPacket) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ SetLoseFirstPacket(true);
+ TestHandshake();
+};
+
+// Test a handshake with loss and delay
+TEST_F(SSLStreamAdapterTestDTLS,
+ TestDTLSConnectWithLostFirstPacketDelay2s) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ SetLoseFirstPacket(true);
+ SetDelay(2000);
+ SetHandshakeWait(20000);
+ TestHandshake();
+};
+
+// Test a handshake with small MTU
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSConnectWithSmallMtu) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ SetMtu(700);
+ SetHandshakeWait(20000);
+ TestHandshake();
+};
+
+// Test transfer -- trivial
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransfer) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ TestHandshake();
+ TestTransfer(100);
+};
+
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransferWithLoss) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ TestHandshake();
+ SetLoss(10);
+ TestTransfer(100);
+};
+
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransferWithDamage) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ SetDamage(); // Must be called first because first packet
+ // write happens at end of handshake.
+ TestHandshake();
+ TestTransfer(100);
+};
+
+// Test DTLS-SRTP with all high ciphers
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpHigh) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ std::vector<std::string> high;
+ high.push_back(kAES_CM_HMAC_SHA1_80);
+ SetDtlsSrtpCiphers(high, true);
+ SetDtlsSrtpCiphers(high, false);
+ TestHandshake();
+
+ std::string client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
+ std::string server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+
+ ASSERT_EQ(client_cipher, server_cipher);
+ ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_80);
+};
+
+// Test DTLS-SRTP with all low ciphers
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpLow) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ std::vector<std::string> low;
+ low.push_back(kAES_CM_HMAC_SHA1_32);
+ SetDtlsSrtpCiphers(low, true);
+ SetDtlsSrtpCiphers(low, false);
+ TestHandshake();
+
+ std::string client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
+ std::string server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+
+ ASSERT_EQ(client_cipher, server_cipher);
+ ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_32);
+};
+
+
+// Test DTLS-SRTP with a mismatch -- should not converge
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpHighLow) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ std::vector<std::string> high;
+ high.push_back(kAES_CM_HMAC_SHA1_80);
+ std::vector<std::string> low;
+ low.push_back(kAES_CM_HMAC_SHA1_32);
+ SetDtlsSrtpCiphers(high, true);
+ SetDtlsSrtpCiphers(low, false);
+ TestHandshake();
+
+ std::string client_cipher;
+ ASSERT_FALSE(GetDtlsSrtpCipher(true, &client_cipher));
+ std::string server_cipher;
+ ASSERT_FALSE(GetDtlsSrtpCipher(false, &server_cipher));
+};
+
+// Test DTLS-SRTP with each side being mixed -- should select high
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpMixed) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ std::vector<std::string> mixed;
+ mixed.push_back(kAES_CM_HMAC_SHA1_80);
+ mixed.push_back(kAES_CM_HMAC_SHA1_32);
+ SetDtlsSrtpCiphers(mixed, true);
+ SetDtlsSrtpCiphers(mixed, false);
+ TestHandshake();
+
+ std::string client_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(true, &client_cipher));
+ std::string server_cipher;
+ ASSERT_TRUE(GetDtlsSrtpCipher(false, &server_cipher));
+
+ ASSERT_EQ(client_cipher, server_cipher);
+ ASSERT_EQ(client_cipher, kAES_CM_HMAC_SHA1_80);
+};
+
+// Test an exporter
+TEST_F(SSLStreamAdapterTestDTLS, TestDTLSExporter) {
+ MAYBE_SKIP_TEST(HaveExporter);
+ TestHandshake();
+ unsigned char client_out[20];
+ unsigned char server_out[20];
+
+ bool result;
+ result = ExportKeyingMaterial(kExporterLabel,
+ kExporterContext, kExporterContextLen,
+ true, true,
+ client_out, sizeof(client_out));
+ ASSERT_TRUE(result);
+
+ result = ExportKeyingMaterial(kExporterLabel,
+ kExporterContext, kExporterContextLen,
+ true, false,
+ server_out, sizeof(server_out));
+ ASSERT_TRUE(result);
+
+ ASSERT_TRUE(!memcmp(client_out, server_out, sizeof(client_out)));
+}
+
+// Test not yet valid certificates are not rejected.
+TEST_F(SSLStreamAdapterTestDTLS, TestCertNotYetValid) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ long one_day = 60 * 60 * 24;
+ // Make the certificates not valid until one day later.
+ ResetIdentitiesWithValidity(one_day, one_day);
+ TestHandshake();
+}
+
+// Test expired certificates are not rejected.
+TEST_F(SSLStreamAdapterTestDTLS, TestCertExpired) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ long one_day = 60 * 60 * 24;
+ // Make the certificates already expired.
+ ResetIdentitiesWithValidity(-one_day, -one_day);
+ TestHandshake();
+}
+
+// Test data transfer using certs created from strings.
+TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestTransfer) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ TestHandshake();
+ TestTransfer(100);
+}
+
+// Test getting the remote certificate.
+TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) {
+ MAYBE_SKIP_TEST(HaveDtls);
+
+ // Peer certificates haven't been received yet.
+ rtc::scoped_ptr<rtc::SSLCertificate> client_peer_cert;
+ ASSERT_FALSE(GetPeerCertificate(true, client_peer_cert.accept()));
+ ASSERT_FALSE(client_peer_cert != NULL);
+
+ rtc::scoped_ptr<rtc::SSLCertificate> server_peer_cert;
+ ASSERT_FALSE(GetPeerCertificate(false, server_peer_cert.accept()));
+ ASSERT_FALSE(server_peer_cert != NULL);
+
+ TestHandshake();
+
+ // The client should have a peer certificate after the handshake.
+ ASSERT_TRUE(GetPeerCertificate(true, client_peer_cert.accept()));
+ ASSERT_TRUE(client_peer_cert != NULL);
+
+ // It's not kCERT_PEM.
+ std::string client_peer_string = client_peer_cert->ToPEMString();
+ ASSERT_NE(kCERT_PEM, client_peer_string);
+
+ // It must not have a chain, because the test certs are self-signed.
+ rtc::SSLCertChain* client_peer_chain;
+ ASSERT_FALSE(client_peer_cert->GetChain(&client_peer_chain));
+
+ // The server should have a peer certificate after the handshake.
+ ASSERT_TRUE(GetPeerCertificate(false, server_peer_cert.accept()));
+ ASSERT_TRUE(server_peer_cert != NULL);
+
+ // It's kCERT_PEM
+ ASSERT_EQ(kCERT_PEM, server_peer_cert->ToPEMString());
+
+ // It must not have a chain, because the test certs are self-signed.
+ rtc::SSLCertChain* server_peer_chain;
+ ASSERT_FALSE(server_peer_cert->GetChain(&server_peer_chain));
+}
diff --git a/chromium/third_party/webrtc/base/sslstreamadapterhelper.cc b/chromium/third_party/webrtc/base/sslstreamadapterhelper.cc
new file mode 100644
index 00000000000..d9c6afd408c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslstreamadapterhelper.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <vector>
+
+#if HAVE_CONFIG_H
+#include "config.h"
+#endif // HAVE_CONFIG_H
+
+#include "webrtc/base/sslstreamadapterhelper.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+void SSLStreamAdapterHelper::SetIdentity(SSLIdentity* identity) {
+ ASSERT(identity_.get() == NULL);
+ identity_.reset(identity);
+}
+
+void SSLStreamAdapterHelper::SetServerRole(SSLRole role) {
+ role_ = role;
+}
+
+int SSLStreamAdapterHelper::StartSSLWithServer(const char* server_name) {
+ ASSERT(server_name != NULL && server_name[0] != '\0');
+ ssl_server_name_ = server_name;
+ return StartSSL();
+}
+
+int SSLStreamAdapterHelper::StartSSLWithPeer() {
+ ASSERT(ssl_server_name_.empty());
+ // It is permitted to specify peer_certificate_ only later.
+ return StartSSL();
+}
+
+void SSLStreamAdapterHelper::SetMode(SSLMode mode) {
+ ASSERT(state_ == SSL_NONE);
+ ssl_mode_ = mode;
+}
+
+StreamState SSLStreamAdapterHelper::GetState() const {
+ switch (state_) {
+ case SSL_WAIT:
+ case SSL_CONNECTING:
+ return SS_OPENING;
+ case SSL_CONNECTED:
+ return SS_OPEN;
+ default:
+ return SS_CLOSED;
+ };
+ // not reached
+}
+
+bool SSLStreamAdapterHelper::GetPeerCertificate(SSLCertificate** cert) const {
+ if (!peer_certificate_)
+ return false;
+
+ *cert = peer_certificate_->GetReference();
+ return true;
+}
+
+bool SSLStreamAdapterHelper::SetPeerCertificateDigest(
+ const std::string &digest_alg,
+ const unsigned char* digest_val,
+ size_t digest_len) {
+ ASSERT(peer_certificate_.get() == NULL);
+ ASSERT(peer_certificate_digest_algorithm_.empty());
+ ASSERT(ssl_server_name_.empty());
+ size_t expected_len;
+
+ if (!GetDigestLength(digest_alg, &expected_len)) {
+ LOG(LS_WARNING) << "Unknown digest algorithm: " << digest_alg;
+ return false;
+ }
+ if (expected_len != digest_len)
+ return false;
+
+ peer_certificate_digest_value_.SetData(digest_val, digest_len);
+ peer_certificate_digest_algorithm_ = digest_alg;
+
+ return true;
+}
+
+void SSLStreamAdapterHelper::Error(const char* context, int err, bool signal) {
+ LOG(LS_WARNING) << "SSLStreamAdapterHelper::Error("
+ << context << ", " << err << "," << signal << ")";
+ state_ = SSL_ERROR;
+ ssl_error_code_ = err;
+ Cleanup();
+ if (signal)
+ StreamAdapterInterface::OnEvent(stream(), SE_CLOSE, err);
+}
+
+void SSLStreamAdapterHelper::Close() {
+ Cleanup();
+ ASSERT(state_ == SSL_CLOSED || state_ == SSL_ERROR);
+ StreamAdapterInterface::Close();
+}
+
+int SSLStreamAdapterHelper::StartSSL() {
+ ASSERT(state_ == SSL_NONE);
+
+ if (StreamAdapterInterface::GetState() != SS_OPEN) {
+ state_ = SSL_WAIT;
+ return 0;
+ }
+
+ state_ = SSL_CONNECTING;
+ int err = BeginSSL();
+ if (err) {
+ Error("BeginSSL", err, false);
+ return err;
+ }
+
+ return 0;
+}
+
+} // namespace rtc
+
diff --git a/chromium/third_party/webrtc/base/sslstreamadapterhelper.h b/chromium/third_party/webrtc/base/sslstreamadapterhelper.h
new file mode 100644
index 00000000000..ef06597b822
--- /dev/null
+++ b/chromium/third_party/webrtc/base/sslstreamadapterhelper.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SSLSTREAMADAPTERHELPER_H_
+#define WEBRTC_BASE_SSLSTREAMADAPTERHELPER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/sslstreamadapter.h"
+
+namespace rtc {
+
+// SSLStreamAdapterHelper : A stream adapter which implements much
+// of the logic that is common between the known implementations
+// (NSS and OpenSSL)
+class SSLStreamAdapterHelper : public SSLStreamAdapter {
+ public:
+ explicit SSLStreamAdapterHelper(StreamInterface* stream)
+ : SSLStreamAdapter(stream),
+ state_(SSL_NONE),
+ role_(SSL_CLIENT),
+ ssl_error_code_(0), // Not meaningful yet
+ ssl_mode_(SSL_MODE_TLS) {}
+
+
+ // Overrides of SSLStreamAdapter
+ virtual void SetIdentity(SSLIdentity* identity);
+ virtual void SetServerRole(SSLRole role = SSL_SERVER);
+ virtual void SetMode(SSLMode mode);
+
+ virtual int StartSSLWithServer(const char* server_name);
+ virtual int StartSSLWithPeer();
+
+ virtual bool SetPeerCertificateDigest(const std::string& digest_alg,
+ const unsigned char* digest_val,
+ size_t digest_len);
+ virtual bool GetPeerCertificate(SSLCertificate** cert) const;
+ virtual StreamState GetState() const;
+ virtual void Close();
+
+ protected:
+ // Internal helper methods
+ // The following method returns 0 on success and a negative
+ // error code on failure. The error code may be either -1 or
+ // from the impl on some other error cases, so it can't really be
+ // interpreted unfortunately.
+
+ // Perform SSL negotiation steps.
+ int ContinueSSL();
+
+ // Error handler helper. signal is given as true for errors in
+ // asynchronous contexts (when an error code was not returned
+ // through some other method), and in that case an SE_CLOSE event is
+ // raised on the stream with the specified error.
+ // A 0 error means a graceful close, otherwise there is not really enough
+ // context to interpret the error code.
+ virtual void Error(const char* context, int err, bool signal);
+
+ // Must be implemented by descendents
+ virtual int BeginSSL() = 0;
+ virtual void Cleanup() = 0;
+ virtual bool GetDigestLength(const std::string& algorithm,
+ size_t* length) = 0;
+
+ enum SSLState {
+ // Before calling one of the StartSSL methods, data flows
+ // in clear text.
+ SSL_NONE,
+ SSL_WAIT, // waiting for the stream to open to start SSL negotiation
+ SSL_CONNECTING, // SSL negotiation in progress
+ SSL_CONNECTED, // SSL stream successfully established
+ SSL_ERROR, // some SSL error occurred, stream is closed
+ SSL_CLOSED // Clean close
+ };
+
+ // MSG_MAX is the maximum generic stream message number.
+ enum { MSG_DTLS_TIMEOUT = MSG_MAX + 1 };
+
+ SSLState state_;
+ SSLRole role_;
+ int ssl_error_code_; // valid when state_ == SSL_ERROR
+
+ // Our key and certificate, mostly useful in peer-to-peer mode.
+ scoped_ptr<SSLIdentity> identity_;
+ // in traditional mode, the server name that the server's certificate
+ // must specify. Empty in peer-to-peer mode.
+ std::string ssl_server_name_;
+ // The peer's certificate. Only used for GetPeerCertificate.
+ scoped_ptr<SSLCertificate> peer_certificate_;
+
+ // The digest of the certificate that the peer must present.
+ Buffer peer_certificate_digest_value_;
+ std::string peer_certificate_digest_algorithm_;
+
+ // Do DTLS or not
+ SSLMode ssl_mode_;
+
+ private:
+ // Go from state SSL_NONE to either SSL_CONNECTING or SSL_WAIT,
+ // depending on whether the underlying stream is already open or
+ // not. Returns 0 on success and a negative value on error.
+ int StartSSL();
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SSLSTREAMADAPTERHELPER_H_
diff --git a/chromium/third_party/webrtc/base/stream.cc b/chromium/third_party/webrtc/base/stream.cc
new file mode 100644
index 00000000000..9aa10d7735b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stream.cc
@@ -0,0 +1,1335 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_POSIX)
+#include <sys/file.h>
+#endif // WEBRTC_POSIX
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <errno.h>
+#include <string>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#define fileno _fileno
+#endif
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamInterface
+///////////////////////////////////////////////////////////////////////////////
+StreamInterface::~StreamInterface() {
+}
+
+StreamResult StreamInterface::WriteAll(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ StreamResult result = SR_SUCCESS;
+ size_t total_written = 0, current_written;
+ while (total_written < data_len) {
+ result = Write(static_cast<const char*>(data) + total_written,
+ data_len - total_written, &current_written, error);
+ if (result != SR_SUCCESS)
+ break;
+ total_written += current_written;
+ }
+ if (written)
+ *written = total_written;
+ return result;
+}
+
+StreamResult StreamInterface::ReadAll(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ StreamResult result = SR_SUCCESS;
+ size_t total_read = 0, current_read;
+ while (total_read < buffer_len) {
+ result = Read(static_cast<char*>(buffer) + total_read,
+ buffer_len - total_read, &current_read, error);
+ if (result != SR_SUCCESS)
+ break;
+ total_read += current_read;
+ }
+ if (read)
+ *read = total_read;
+ return result;
+}
+
+StreamResult StreamInterface::ReadLine(std::string* line) {
+ line->clear();
+ StreamResult result = SR_SUCCESS;
+ while (true) {
+ char ch;
+ result = Read(&ch, sizeof(ch), NULL, NULL);
+ if (result != SR_SUCCESS) {
+ break;
+ }
+ if (ch == '\n') {
+ break;
+ }
+ line->push_back(ch);
+ }
+ if (!line->empty()) { // give back the line we've collected so far with
+ result = SR_SUCCESS; // a success code. Otherwise return the last code
+ }
+ return result;
+}
+
+void StreamInterface::PostEvent(Thread* t, int events, int err) {
+ t->Post(this, MSG_POST_EVENT, new StreamEventData(events, err));
+}
+
+void StreamInterface::PostEvent(int events, int err) {
+ PostEvent(Thread::Current(), events, err);
+}
+
+StreamInterface::StreamInterface() {
+}
+
+void StreamInterface::OnMessage(Message* msg) {
+ if (MSG_POST_EVENT == msg->message_id) {
+ StreamEventData* pe = static_cast<StreamEventData*>(msg->pdata);
+ SignalEvent(this, pe->events, pe->error);
+ delete msg->pdata;
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamAdapterInterface
+///////////////////////////////////////////////////////////////////////////////
+
+StreamAdapterInterface::StreamAdapterInterface(StreamInterface* stream,
+ bool owned)
+ : stream_(stream), owned_(owned) {
+ if (NULL != stream_)
+ stream_->SignalEvent.connect(this, &StreamAdapterInterface::OnEvent);
+}
+
+void StreamAdapterInterface::Attach(StreamInterface* stream, bool owned) {
+ if (NULL != stream_)
+ stream_->SignalEvent.disconnect(this);
+ if (owned_)
+ delete stream_;
+ stream_ = stream;
+ owned_ = owned;
+ if (NULL != stream_)
+ stream_->SignalEvent.connect(this, &StreamAdapterInterface::OnEvent);
+}
+
+StreamInterface* StreamAdapterInterface::Detach() {
+ if (NULL != stream_)
+ stream_->SignalEvent.disconnect(this);
+ StreamInterface* stream = stream_;
+ stream_ = NULL;
+ return stream;
+}
+
+StreamAdapterInterface::~StreamAdapterInterface() {
+ if (owned_)
+ delete stream_;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamTap
+///////////////////////////////////////////////////////////////////////////////
+
+StreamTap::StreamTap(StreamInterface* stream, StreamInterface* tap)
+ : StreamAdapterInterface(stream), tap_(), tap_result_(SR_SUCCESS),
+ tap_error_(0) {
+ AttachTap(tap);
+}
+
+void StreamTap::AttachTap(StreamInterface* tap) {
+ tap_.reset(tap);
+}
+
+StreamInterface* StreamTap::DetachTap() {
+ return tap_.release();
+}
+
+StreamResult StreamTap::GetTapResult(int* error) {
+ if (error) {
+ *error = tap_error_;
+ }
+ return tap_result_;
+}
+
+StreamResult StreamTap::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ size_t backup_read;
+ if (!read) {
+ read = &backup_read;
+ }
+ StreamResult res = StreamAdapterInterface::Read(buffer, buffer_len,
+ read, error);
+ if ((res == SR_SUCCESS) && (tap_result_ == SR_SUCCESS)) {
+ tap_result_ = tap_->WriteAll(buffer, *read, NULL, &tap_error_);
+ }
+ return res;
+}
+
+StreamResult StreamTap::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ size_t backup_written;
+ if (!written) {
+ written = &backup_written;
+ }
+ StreamResult res = StreamAdapterInterface::Write(data, data_len,
+ written, error);
+ if ((res == SR_SUCCESS) && (tap_result_ == SR_SUCCESS)) {
+ tap_result_ = tap_->WriteAll(data, *written, NULL, &tap_error_);
+ }
+ return res;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamSegment
+///////////////////////////////////////////////////////////////////////////////
+
+StreamSegment::StreamSegment(StreamInterface* stream)
+ : StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
+ length_(SIZE_UNKNOWN) {
+ // It's ok for this to fail, in which case start_ is left as SIZE_UNKNOWN.
+ stream->GetPosition(&start_);
+}
+
+StreamSegment::StreamSegment(StreamInterface* stream, size_t length)
+ : StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
+ length_(length) {
+ // It's ok for this to fail, in which case start_ is left as SIZE_UNKNOWN.
+ stream->GetPosition(&start_);
+}
+
+StreamResult StreamSegment::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (SIZE_UNKNOWN != length_) {
+ if (pos_ >= length_)
+ return SR_EOS;
+ buffer_len = _min(buffer_len, length_ - pos_);
+ }
+ size_t backup_read;
+ if (!read) {
+ read = &backup_read;
+ }
+ StreamResult result = StreamAdapterInterface::Read(buffer, buffer_len,
+ read, error);
+ if (SR_SUCCESS == result) {
+ pos_ += *read;
+ }
+ return result;
+}
+
+bool StreamSegment::SetPosition(size_t position) {
+ if (SIZE_UNKNOWN == start_)
+ return false; // Not seekable
+ if ((SIZE_UNKNOWN != length_) && (position > length_))
+ return false; // Seek past end of segment
+ if (!StreamAdapterInterface::SetPosition(start_ + position))
+ return false;
+ pos_ = position;
+ return true;
+}
+
+bool StreamSegment::GetPosition(size_t* position) const {
+ if (SIZE_UNKNOWN == start_)
+ return false; // Not seekable
+ if (!StreamAdapterInterface::GetPosition(position))
+ return false;
+ if (position) {
+ ASSERT(*position >= start_);
+ *position -= start_;
+ }
+ return true;
+}
+
+bool StreamSegment::GetSize(size_t* size) const {
+ if (!StreamAdapterInterface::GetSize(size))
+ return false;
+ if (size) {
+ if (SIZE_UNKNOWN != start_) {
+ ASSERT(*size >= start_);
+ *size -= start_;
+ }
+ if (SIZE_UNKNOWN != length_) {
+ *size = _min(*size, length_);
+ }
+ }
+ return true;
+}
+
+bool StreamSegment::GetAvailable(size_t* size) const {
+ if (!StreamAdapterInterface::GetAvailable(size))
+ return false;
+ if (size && (SIZE_UNKNOWN != length_))
+ *size = _min(*size, length_ - pos_);
+ return true;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// NullStream
+///////////////////////////////////////////////////////////////////////////////
+
+NullStream::NullStream() {
+}
+
+NullStream::~NullStream() {
+}
+
+StreamState NullStream::GetState() const {
+ return SS_OPEN;
+}
+
+StreamResult NullStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (error) *error = -1;
+ return SR_ERROR;
+}
+
+StreamResult NullStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (written) *written = data_len;
+ return SR_SUCCESS;
+}
+
+void NullStream::Close() {
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// FileStream
+///////////////////////////////////////////////////////////////////////////////
+
+FileStream::FileStream() : file_(NULL) {
+}
+
+FileStream::~FileStream() {
+ FileStream::Close();
+}
+
+bool FileStream::Open(const std::string& filename, const char* mode,
+ int* error) {
+ Close();
+#if defined(WEBRTC_WIN)
+ std::wstring wfilename;
+ if (Utf8ToWindowsFilename(filename, &wfilename)) {
+ file_ = _wfopen(wfilename.c_str(), ToUtf16(mode).c_str());
+ } else {
+ if (error) {
+ *error = -1;
+ return false;
+ }
+ }
+#else
+ file_ = fopen(filename.c_str(), mode);
+#endif
+ if (!file_ && error) {
+ *error = errno;
+ }
+ return (file_ != NULL);
+}
+
+bool FileStream::OpenShare(const std::string& filename, const char* mode,
+ int shflag, int* error) {
+ Close();
+#if defined(WEBRTC_WIN)
+ std::wstring wfilename;
+ if (Utf8ToWindowsFilename(filename, &wfilename)) {
+ file_ = _wfsopen(wfilename.c_str(), ToUtf16(mode).c_str(), shflag);
+ if (!file_ && error) {
+ *error = errno;
+ return false;
+ }
+ return file_ != NULL;
+ } else {
+ if (error) {
+ *error = -1;
+ }
+ return false;
+ }
+#else
+ return Open(filename, mode, error);
+#endif
+}
+
+bool FileStream::DisableBuffering() {
+ if (!file_)
+ return false;
+ return (setvbuf(file_, NULL, _IONBF, 0) == 0);
+}
+
+StreamState FileStream::GetState() const {
+ return (file_ == NULL) ? SS_CLOSED : SS_OPEN;
+}
+
+StreamResult FileStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (!file_)
+ return SR_EOS;
+ size_t result = fread(buffer, 1, buffer_len, file_);
+ if ((result == 0) && (buffer_len > 0)) {
+ if (feof(file_))
+ return SR_EOS;
+ if (error)
+ *error = errno;
+ return SR_ERROR;
+ }
+ if (read)
+ *read = result;
+ return SR_SUCCESS;
+}
+
+StreamResult FileStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (!file_)
+ return SR_EOS;
+ size_t result = fwrite(data, 1, data_len, file_);
+ if ((result == 0) && (data_len > 0)) {
+ if (error)
+ *error = errno;
+ return SR_ERROR;
+ }
+ if (written)
+ *written = result;
+ return SR_SUCCESS;
+}
+
+void FileStream::Close() {
+ if (file_) {
+ DoClose();
+ file_ = NULL;
+ }
+}
+
+bool FileStream::SetPosition(size_t position) {
+ if (!file_)
+ return false;
+ return (fseek(file_, static_cast<int>(position), SEEK_SET) == 0);
+}
+
+bool FileStream::GetPosition(size_t* position) const {
+ ASSERT(NULL != position);
+ if (!file_)
+ return false;
+ long result = ftell(file_);
+ if (result < 0)
+ return false;
+ if (position)
+ *position = result;
+ return true;
+}
+
+bool FileStream::GetSize(size_t* size) const {
+ ASSERT(NULL != size);
+ if (!file_)
+ return false;
+ struct stat file_stats;
+ if (fstat(fileno(file_), &file_stats) != 0)
+ return false;
+ if (size)
+ *size = file_stats.st_size;
+ return true;
+}
+
+bool FileStream::GetAvailable(size_t* size) const {
+ ASSERT(NULL != size);
+ if (!GetSize(size))
+ return false;
+ long result = ftell(file_);
+ if (result < 0)
+ return false;
+ if (size)
+ *size -= result;
+ return true;
+}
+
+bool FileStream::ReserveSize(size_t size) {
+ // TODO: extend the file to the proper length
+ return true;
+}
+
+bool FileStream::GetSize(const std::string& filename, size_t* size) {
+ struct stat file_stats;
+ if (stat(filename.c_str(), &file_stats) != 0)
+ return false;
+ *size = file_stats.st_size;
+ return true;
+}
+
+bool FileStream::Flush() {
+ if (file_) {
+ return (0 == fflush(file_));
+ }
+ // try to flush empty file?
+ ASSERT(false);
+ return false;
+}
+
+#if defined(WEBRTC_POSIX) && !defined(__native_client__)
+
+bool FileStream::TryLock() {
+ if (file_ == NULL) {
+ // Stream not open.
+ ASSERT(false);
+ return false;
+ }
+
+ return flock(fileno(file_), LOCK_EX|LOCK_NB) == 0;
+}
+
+bool FileStream::Unlock() {
+ if (file_ == NULL) {
+ // Stream not open.
+ ASSERT(false);
+ return false;
+ }
+
+ return flock(fileno(file_), LOCK_UN) == 0;
+}
+
+#endif
+
+void FileStream::DoClose() {
+ fclose(file_);
+}
+
+CircularFileStream::CircularFileStream(size_t max_size)
+ : max_write_size_(max_size),
+ position_(0),
+ marked_position_(max_size / 2),
+ last_write_position_(0),
+ read_segment_(READ_LATEST),
+ read_segment_available_(0) {
+}
+
+bool CircularFileStream::Open(
+ const std::string& filename, const char* mode, int* error) {
+ if (!FileStream::Open(filename.c_str(), mode, error))
+ return false;
+
+ if (strchr(mode, "r") != NULL) { // Opened in read mode.
+ // Check if the buffer has been overwritten and determine how to read the
+ // log in time sequence.
+ size_t file_size;
+ GetSize(&file_size);
+ if (file_size == position_) {
+ // The buffer has not been overwritten yet. Read 0 .. file_size
+ read_segment_ = READ_LATEST;
+ read_segment_available_ = file_size;
+ } else {
+ // The buffer has been over written. There are three segments: The first
+ // one is 0 .. marked_position_, which is the marked earliest log. The
+ // second one is position_ .. file_size, which is the middle log. The
+ // last one is marked_position_ .. position_, which is the latest log.
+ read_segment_ = READ_MARKED;
+ read_segment_available_ = marked_position_;
+ last_write_position_ = position_;
+ }
+
+ // Read from the beginning.
+ position_ = 0;
+ SetPosition(position_);
+ }
+
+ return true;
+}
+
+StreamResult CircularFileStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (read_segment_available_ == 0) {
+ size_t file_size;
+ switch (read_segment_) {
+ case READ_MARKED: // Finished READ_MARKED and start READ_MIDDLE.
+ read_segment_ = READ_MIDDLE;
+ position_ = last_write_position_;
+ SetPosition(position_);
+ GetSize(&file_size);
+ read_segment_available_ = file_size - position_;
+ break;
+
+ case READ_MIDDLE: // Finished READ_MIDDLE and start READ_LATEST.
+ read_segment_ = READ_LATEST;
+ position_ = marked_position_;
+ SetPosition(position_);
+ read_segment_available_ = last_write_position_ - position_;
+ break;
+
+ default: // Finished READ_LATEST and return EOS.
+ return rtc::SR_EOS;
+ }
+ }
+
+ size_t local_read;
+ if (!read) read = &local_read;
+
+ size_t to_read = rtc::_min(buffer_len, read_segment_available_);
+ rtc::StreamResult result
+ = rtc::FileStream::Read(buffer, to_read, read, error);
+ if (result == rtc::SR_SUCCESS) {
+ read_segment_available_ -= *read;
+ position_ += *read;
+ }
+ return result;
+}
+
+StreamResult CircularFileStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (position_ >= max_write_size_) {
+ ASSERT(position_ == max_write_size_);
+ position_ = marked_position_;
+ SetPosition(position_);
+ }
+
+ size_t local_written;
+ if (!written) written = &local_written;
+
+ size_t to_eof = max_write_size_ - position_;
+ size_t to_write = rtc::_min(data_len, to_eof);
+ rtc::StreamResult result
+ = rtc::FileStream::Write(data, to_write, written, error);
+ if (result == rtc::SR_SUCCESS) {
+ position_ += *written;
+ }
+ return result;
+}
+
+AsyncWriteStream::~AsyncWriteStream() {
+ write_thread_->Clear(this, 0, NULL);
+ ClearBufferAndWrite();
+
+ CritScope cs(&crit_stream_);
+ stream_.reset();
+}
+
+// This is needed by some stream writers, such as RtpDumpWriter.
+bool AsyncWriteStream::GetPosition(size_t* position) const {
+ CritScope cs(&crit_stream_);
+ return stream_->GetPosition(position);
+}
+
+// This is needed by some stream writers, such as the plugin log writers.
+StreamResult AsyncWriteStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ CritScope cs(&crit_stream_);
+ return stream_->Read(buffer, buffer_len, read, error);
+}
+
+void AsyncWriteStream::Close() {
+ if (state_ == SS_CLOSED) {
+ return;
+ }
+
+ write_thread_->Clear(this, 0, NULL);
+ ClearBufferAndWrite();
+
+ CritScope cs(&crit_stream_);
+ stream_->Close();
+ state_ = SS_CLOSED;
+}
+
+StreamResult AsyncWriteStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (state_ == SS_CLOSED) {
+ return SR_ERROR;
+ }
+
+ size_t previous_buffer_length = 0;
+ {
+ CritScope cs(&crit_buffer_);
+ previous_buffer_length = buffer_.length();
+ buffer_.AppendData(data, data_len);
+ }
+
+ if (previous_buffer_length == 0) {
+ // If there's stuff already in the buffer, then we already called
+ // Post and the write_thread_ hasn't pulled it out yet, so we
+ // don't need to re-Post.
+ write_thread_->Post(this, 0, NULL);
+ }
+ // Return immediately, assuming that it works.
+ if (written) {
+ *written = data_len;
+ }
+ return SR_SUCCESS;
+}
+
+void AsyncWriteStream::OnMessage(rtc::Message* pmsg) {
+ ClearBufferAndWrite();
+}
+
+bool AsyncWriteStream::Flush() {
+ if (state_ == SS_CLOSED) {
+ return false;
+ }
+
+ ClearBufferAndWrite();
+
+ CritScope cs(&crit_stream_);
+ return stream_->Flush();
+}
+
+void AsyncWriteStream::ClearBufferAndWrite() {
+ Buffer to_write;
+ {
+ CritScope cs_buffer(&crit_buffer_);
+ buffer_.TransferTo(&to_write);
+ }
+
+ if (to_write.length() > 0) {
+ CritScope cs(&crit_stream_);
+ stream_->WriteAll(to_write.data(), to_write.length(), NULL, NULL);
+ }
+}
+
+#if defined(WEBRTC_POSIX) && !defined(__native_client__)
+
+// Have to identically rewrite the FileStream destructor or else it would call
+// the base class's Close() instead of the sub-class's.
+POpenStream::~POpenStream() {
+ POpenStream::Close();
+}
+
+bool POpenStream::Open(const std::string& subcommand,
+ const char* mode,
+ int* error) {
+ Close();
+ file_ = popen(subcommand.c_str(), mode);
+ if (file_ == NULL) {
+ if (error)
+ *error = errno;
+ return false;
+ }
+ return true;
+}
+
+bool POpenStream::OpenShare(const std::string& subcommand, const char* mode,
+ int shflag, int* error) {
+ return Open(subcommand, mode, error);
+}
+
+void POpenStream::DoClose() {
+ wait_status_ = pclose(file_);
+}
+
+#endif
+
+///////////////////////////////////////////////////////////////////////////////
+// MemoryStream
+///////////////////////////////////////////////////////////////////////////////
+
+MemoryStreamBase::MemoryStreamBase()
+ : buffer_(NULL), buffer_length_(0), data_length_(0),
+ seek_position_(0) {
+}
+
+StreamState MemoryStreamBase::GetState() const {
+ return SS_OPEN;
+}
+
+StreamResult MemoryStreamBase::Read(void* buffer, size_t bytes,
+ size_t* bytes_read, int* error) {
+ if (seek_position_ >= data_length_) {
+ return SR_EOS;
+ }
+ size_t available = data_length_ - seek_position_;
+ if (bytes > available) {
+ // Read partial buffer
+ bytes = available;
+ }
+ memcpy(buffer, &buffer_[seek_position_], bytes);
+ seek_position_ += bytes;
+ if (bytes_read) {
+ *bytes_read = bytes;
+ }
+ return SR_SUCCESS;
+}
+
+StreamResult MemoryStreamBase::Write(const void* buffer, size_t bytes,
+ size_t* bytes_written, int* error) {
+ size_t available = buffer_length_ - seek_position_;
+ if (0 == available) {
+ // Increase buffer size to the larger of:
+ // a) new position rounded up to next 256 bytes
+ // b) double the previous length
+ size_t new_buffer_length = _max(((seek_position_ + bytes) | 0xFF) + 1,
+ buffer_length_ * 2);
+ StreamResult result = DoReserve(new_buffer_length, error);
+ if (SR_SUCCESS != result) {
+ return result;
+ }
+ ASSERT(buffer_length_ >= new_buffer_length);
+ available = buffer_length_ - seek_position_;
+ }
+
+ if (bytes > available) {
+ bytes = available;
+ }
+ memcpy(&buffer_[seek_position_], buffer, bytes);
+ seek_position_ += bytes;
+ if (data_length_ < seek_position_) {
+ data_length_ = seek_position_;
+ }
+ if (bytes_written) {
+ *bytes_written = bytes;
+ }
+ return SR_SUCCESS;
+}
+
+void MemoryStreamBase::Close() {
+ // nothing to do
+}
+
+bool MemoryStreamBase::SetPosition(size_t position) {
+ if (position > data_length_)
+ return false;
+ seek_position_ = position;
+ return true;
+}
+
+bool MemoryStreamBase::GetPosition(size_t* position) const {
+ if (position)
+ *position = seek_position_;
+ return true;
+}
+
+bool MemoryStreamBase::GetSize(size_t* size) const {
+ if (size)
+ *size = data_length_;
+ return true;
+}
+
+bool MemoryStreamBase::GetAvailable(size_t* size) const {
+ if (size)
+ *size = data_length_ - seek_position_;
+ return true;
+}
+
+bool MemoryStreamBase::ReserveSize(size_t size) {
+ return (SR_SUCCESS == DoReserve(size, NULL));
+}
+
+StreamResult MemoryStreamBase::DoReserve(size_t size, int* error) {
+ return (buffer_length_ >= size) ? SR_SUCCESS : SR_EOS;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+MemoryStream::MemoryStream()
+ : buffer_alloc_(NULL) {
+}
+
+MemoryStream::MemoryStream(const char* data)
+ : buffer_alloc_(NULL) {
+ SetData(data, strlen(data));
+}
+
+MemoryStream::MemoryStream(const void* data, size_t length)
+ : buffer_alloc_(NULL) {
+ SetData(data, length);
+}
+
+MemoryStream::~MemoryStream() {
+ delete [] buffer_alloc_;
+}
+
+void MemoryStream::SetData(const void* data, size_t length) {
+ data_length_ = buffer_length_ = length;
+ delete [] buffer_alloc_;
+ buffer_alloc_ = new char[buffer_length_ + kAlignment];
+ buffer_ = reinterpret_cast<char*>(ALIGNP(buffer_alloc_, kAlignment));
+ memcpy(buffer_, data, data_length_);
+ seek_position_ = 0;
+}
+
+StreamResult MemoryStream::DoReserve(size_t size, int* error) {
+ if (buffer_length_ >= size)
+ return SR_SUCCESS;
+
+ if (char* new_buffer_alloc = new char[size + kAlignment]) {
+ char* new_buffer = reinterpret_cast<char*>(
+ ALIGNP(new_buffer_alloc, kAlignment));
+ memcpy(new_buffer, buffer_, data_length_);
+ delete [] buffer_alloc_;
+ buffer_alloc_ = new_buffer_alloc;
+ buffer_ = new_buffer;
+ buffer_length_ = size;
+ return SR_SUCCESS;
+ }
+
+ if (error) {
+ *error = ENOMEM;
+ }
+ return SR_ERROR;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+ExternalMemoryStream::ExternalMemoryStream() {
+}
+
+ExternalMemoryStream::ExternalMemoryStream(void* data, size_t length) {
+ SetData(data, length);
+}
+
+ExternalMemoryStream::~ExternalMemoryStream() {
+}
+
+void ExternalMemoryStream::SetData(void* data, size_t length) {
+ data_length_ = buffer_length_ = length;
+ buffer_ = static_cast<char*>(data);
+ seek_position_ = 0;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// FifoBuffer
+///////////////////////////////////////////////////////////////////////////////
+
+FifoBuffer::FifoBuffer(size_t size)
+ : state_(SS_OPEN), buffer_(new char[size]), buffer_length_(size),
+ data_length_(0), read_position_(0), owner_(Thread::Current()) {
+ // all events are done on the owner_ thread
+}
+
+FifoBuffer::FifoBuffer(size_t size, Thread* owner)
+ : state_(SS_OPEN), buffer_(new char[size]), buffer_length_(size),
+ data_length_(0), read_position_(0), owner_(owner) {
+ // all events are done on the owner_ thread
+}
+
+FifoBuffer::~FifoBuffer() {
+}
+
+bool FifoBuffer::GetBuffered(size_t* size) const {
+ CritScope cs(&crit_);
+ *size = data_length_;
+ return true;
+}
+
+bool FifoBuffer::SetCapacity(size_t size) {
+ CritScope cs(&crit_);
+ if (data_length_ > size) {
+ return false;
+ }
+
+ if (size != buffer_length_) {
+ char* buffer = new char[size];
+ const size_t copy = data_length_;
+ const size_t tail_copy = _min(copy, buffer_length_ - read_position_);
+ memcpy(buffer, &buffer_[read_position_], tail_copy);
+ memcpy(buffer + tail_copy, &buffer_[0], copy - tail_copy);
+ buffer_.reset(buffer);
+ read_position_ = 0;
+ buffer_length_ = size;
+ }
+ return true;
+}
+
+StreamResult FifoBuffer::ReadOffset(void* buffer, size_t bytes,
+ size_t offset, size_t* bytes_read) {
+ CritScope cs(&crit_);
+ return ReadOffsetLocked(buffer, bytes, offset, bytes_read);
+}
+
+StreamResult FifoBuffer::WriteOffset(const void* buffer, size_t bytes,
+ size_t offset, size_t* bytes_written) {
+ CritScope cs(&crit_);
+ return WriteOffsetLocked(buffer, bytes, offset, bytes_written);
+}
+
+StreamState FifoBuffer::GetState() const {
+ return state_;
+}
+
+StreamResult FifoBuffer::Read(void* buffer, size_t bytes,
+ size_t* bytes_read, int* error) {
+ CritScope cs(&crit_);
+ const bool was_writable = data_length_ < buffer_length_;
+ size_t copy = 0;
+ StreamResult result = ReadOffsetLocked(buffer, bytes, 0, &copy);
+
+ if (result == SR_SUCCESS) {
+ // If read was successful then adjust the read position and number of
+ // bytes buffered.
+ read_position_ = (read_position_ + copy) % buffer_length_;
+ data_length_ -= copy;
+ if (bytes_read) {
+ *bytes_read = copy;
+ }
+
+ // if we were full before, and now we're not, post an event
+ if (!was_writable && copy > 0) {
+ PostEvent(owner_, SE_WRITE, 0);
+ }
+ }
+ return result;
+}
+
+StreamResult FifoBuffer::Write(const void* buffer, size_t bytes,
+ size_t* bytes_written, int* error) {
+ CritScope cs(&crit_);
+
+ const bool was_readable = (data_length_ > 0);
+ size_t copy = 0;
+ StreamResult result = WriteOffsetLocked(buffer, bytes, 0, &copy);
+
+ if (result == SR_SUCCESS) {
+ // If write was successful then adjust the number of readable bytes.
+ data_length_ += copy;
+ if (bytes_written) {
+ *bytes_written = copy;
+ }
+
+ // if we didn't have any data to read before, and now we do, post an event
+ if (!was_readable && copy > 0) {
+ PostEvent(owner_, SE_READ, 0);
+ }
+ }
+ return result;
+}
+
+void FifoBuffer::Close() {
+ CritScope cs(&crit_);
+ state_ = SS_CLOSED;
+}
+
+const void* FifoBuffer::GetReadData(size_t* size) {
+ CritScope cs(&crit_);
+ *size = (read_position_ + data_length_ <= buffer_length_) ?
+ data_length_ : buffer_length_ - read_position_;
+ return &buffer_[read_position_];
+}
+
+void FifoBuffer::ConsumeReadData(size_t size) {
+ CritScope cs(&crit_);
+ ASSERT(size <= data_length_);
+ const bool was_writable = data_length_ < buffer_length_;
+ read_position_ = (read_position_ + size) % buffer_length_;
+ data_length_ -= size;
+ if (!was_writable && size > 0) {
+ PostEvent(owner_, SE_WRITE, 0);
+ }
+}
+
+void* FifoBuffer::GetWriteBuffer(size_t* size) {
+ CritScope cs(&crit_);
+ if (state_ == SS_CLOSED) {
+ return NULL;
+ }
+
+ // if empty, reset the write position to the beginning, so we can get
+ // the biggest possible block
+ if (data_length_ == 0) {
+ read_position_ = 0;
+ }
+
+ const size_t write_position = (read_position_ + data_length_)
+ % buffer_length_;
+ *size = (write_position > read_position_ || data_length_ == 0) ?
+ buffer_length_ - write_position : read_position_ - write_position;
+ return &buffer_[write_position];
+}
+
+void FifoBuffer::ConsumeWriteBuffer(size_t size) {
+ CritScope cs(&crit_);
+ ASSERT(size <= buffer_length_ - data_length_);
+ const bool was_readable = (data_length_ > 0);
+ data_length_ += size;
+ if (!was_readable && size > 0) {
+ PostEvent(owner_, SE_READ, 0);
+ }
+}
+
+bool FifoBuffer::GetWriteRemaining(size_t* size) const {
+ CritScope cs(&crit_);
+ *size = buffer_length_ - data_length_;
+ return true;
+}
+
+StreamResult FifoBuffer::ReadOffsetLocked(void* buffer,
+ size_t bytes,
+ size_t offset,
+ size_t* bytes_read) {
+ if (offset >= data_length_) {
+ return (state_ != SS_CLOSED) ? SR_BLOCK : SR_EOS;
+ }
+
+ const size_t available = data_length_ - offset;
+ const size_t read_position = (read_position_ + offset) % buffer_length_;
+ const size_t copy = _min(bytes, available);
+ const size_t tail_copy = _min(copy, buffer_length_ - read_position);
+ char* const p = static_cast<char*>(buffer);
+ memcpy(p, &buffer_[read_position], tail_copy);
+ memcpy(p + tail_copy, &buffer_[0], copy - tail_copy);
+
+ if (bytes_read) {
+ *bytes_read = copy;
+ }
+ return SR_SUCCESS;
+}
+
+StreamResult FifoBuffer::WriteOffsetLocked(const void* buffer,
+ size_t bytes,
+ size_t offset,
+ size_t* bytes_written) {
+ if (state_ == SS_CLOSED) {
+ return SR_EOS;
+ }
+
+ if (data_length_ + offset >= buffer_length_) {
+ return SR_BLOCK;
+ }
+
+ const size_t available = buffer_length_ - data_length_ - offset;
+ const size_t write_position = (read_position_ + data_length_ + offset)
+ % buffer_length_;
+ const size_t copy = _min(bytes, available);
+ const size_t tail_copy = _min(copy, buffer_length_ - write_position);
+ const char* const p = static_cast<const char*>(buffer);
+ memcpy(&buffer_[write_position], p, tail_copy);
+ memcpy(&buffer_[0], p + tail_copy, copy - tail_copy);
+
+ if (bytes_written) {
+ *bytes_written = copy;
+ }
+ return SR_SUCCESS;
+}
+
+
+
+///////////////////////////////////////////////////////////////////////////////
+// LoggingAdapter
+///////////////////////////////////////////////////////////////////////////////
+
+LoggingAdapter::LoggingAdapter(StreamInterface* stream, LoggingSeverity level,
+ const std::string& label, bool hex_mode)
+ : StreamAdapterInterface(stream), level_(level), hex_mode_(hex_mode) {
+ set_label(label);
+}
+
+void LoggingAdapter::set_label(const std::string& label) {
+ label_.assign("[");
+ label_.append(label);
+ label_.append("]");
+}
+
+StreamResult LoggingAdapter::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ size_t local_read; if (!read) read = &local_read;
+ StreamResult result = StreamAdapterInterface::Read(buffer, buffer_len, read,
+ error);
+ if (result == SR_SUCCESS) {
+ LogMultiline(level_, label_.c_str(), true, buffer, *read, hex_mode_, &lms_);
+ }
+ return result;
+}
+
+StreamResult LoggingAdapter::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ size_t local_written;
+ if (!written) written = &local_written;
+ StreamResult result = StreamAdapterInterface::Write(data, data_len, written,
+ error);
+ if (result == SR_SUCCESS) {
+ LogMultiline(level_, label_.c_str(), false, data, *written, hex_mode_,
+ &lms_);
+ }
+ return result;
+}
+
+void LoggingAdapter::Close() {
+ LogMultiline(level_, label_.c_str(), false, NULL, 0, hex_mode_, &lms_);
+ LogMultiline(level_, label_.c_str(), true, NULL, 0, hex_mode_, &lms_);
+ LOG_V(level_) << label_ << " Closed locally";
+ StreamAdapterInterface::Close();
+}
+
+void LoggingAdapter::OnEvent(StreamInterface* stream, int events, int err) {
+ if (events & SE_OPEN) {
+ LOG_V(level_) << label_ << " Open";
+ } else if (events & SE_CLOSE) {
+ LogMultiline(level_, label_.c_str(), false, NULL, 0, hex_mode_, &lms_);
+ LogMultiline(level_, label_.c_str(), true, NULL, 0, hex_mode_, &lms_);
+ LOG_V(level_) << label_ << " Closed with error: " << err;
+ }
+ StreamAdapterInterface::OnEvent(stream, events, err);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// StringStream - Reads/Writes to an external std::string
+///////////////////////////////////////////////////////////////////////////////
+
+StringStream::StringStream(std::string& str)
+ : str_(str), read_pos_(0), read_only_(false) {
+}
+
+StringStream::StringStream(const std::string& str)
+ : str_(const_cast<std::string&>(str)), read_pos_(0), read_only_(true) {
+}
+
+StreamState StringStream::GetState() const {
+ return SS_OPEN;
+}
+
+StreamResult StringStream::Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ size_t available = _min(buffer_len, str_.size() - read_pos_);
+ if (!available)
+ return SR_EOS;
+ memcpy(buffer, str_.data() + read_pos_, available);
+ read_pos_ += available;
+ if (read)
+ *read = available;
+ return SR_SUCCESS;
+}
+
+StreamResult StringStream::Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (read_only_) {
+ if (error) {
+ *error = -1;
+ }
+ return SR_ERROR;
+ }
+ str_.append(static_cast<const char*>(data),
+ static_cast<const char*>(data) + data_len);
+ if (written)
+ *written = data_len;
+ return SR_SUCCESS;
+}
+
+void StringStream::Close() {
+}
+
+bool StringStream::SetPosition(size_t position) {
+ if (position > str_.size())
+ return false;
+ read_pos_ = position;
+ return true;
+}
+
+bool StringStream::GetPosition(size_t* position) const {
+ if (position)
+ *position = read_pos_;
+ return true;
+}
+
+bool StringStream::GetSize(size_t* size) const {
+ if (size)
+ *size = str_.size();
+ return true;
+}
+
+bool StringStream::GetAvailable(size_t* size) const {
+ if (size)
+ *size = str_.size() - read_pos_;
+ return true;
+}
+
+bool StringStream::ReserveSize(size_t size) {
+ if (read_only_)
+ return false;
+ str_.reserve(size);
+ return true;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamReference
+///////////////////////////////////////////////////////////////////////////////
+
+StreamReference::StreamReference(StreamInterface* stream)
+ : StreamAdapterInterface(stream, false) {
+ // owner set to false so the destructor does not free the stream.
+ stream_ref_count_ = new StreamRefCount(stream);
+}
+
+StreamInterface* StreamReference::NewReference() {
+ stream_ref_count_->AddReference();
+ return new StreamReference(stream_ref_count_, stream());
+}
+
+StreamReference::~StreamReference() {
+ stream_ref_count_->Release();
+}
+
+StreamReference::StreamReference(StreamRefCount* stream_ref_count,
+ StreamInterface* stream)
+ : StreamAdapterInterface(stream, false),
+ stream_ref_count_(stream_ref_count) {
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+StreamResult Flow(StreamInterface* source,
+ char* buffer, size_t buffer_len,
+ StreamInterface* sink,
+ size_t* data_len /* = NULL */) {
+ ASSERT(buffer_len > 0);
+
+ StreamResult result;
+ size_t count, read_pos, write_pos;
+ if (data_len) {
+ read_pos = *data_len;
+ } else {
+ read_pos = 0;
+ }
+
+ bool end_of_stream = false;
+ do {
+ // Read until buffer is full, end of stream, or error
+ while (!end_of_stream && (read_pos < buffer_len)) {
+ result = source->Read(buffer + read_pos, buffer_len - read_pos,
+ &count, NULL);
+ if (result == SR_EOS) {
+ end_of_stream = true;
+ } else if (result != SR_SUCCESS) {
+ if (data_len) {
+ *data_len = read_pos;
+ }
+ return result;
+ } else {
+ read_pos += count;
+ }
+ }
+
+ // Write until buffer is empty, or error (including end of stream)
+ write_pos = 0;
+ while (write_pos < read_pos) {
+ result = sink->Write(buffer + write_pos, read_pos - write_pos,
+ &count, NULL);
+ if (result != SR_SUCCESS) {
+ if (data_len) {
+ *data_len = read_pos - write_pos;
+ if (write_pos > 0) {
+ memmove(buffer, buffer + write_pos, *data_len);
+ }
+ }
+ return result;
+ }
+ write_pos += count;
+ }
+
+ read_pos = 0;
+ } while (!end_of_stream);
+
+ if (data_len) {
+ *data_len = 0;
+ }
+ return SR_SUCCESS;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/stream.h b/chromium/third_party/webrtc/base/stream.h
new file mode 100644
index 00000000000..00ded372c8c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stream.h
@@ -0,0 +1,820 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_STREAM_H_
+#define WEBRTC_BASE_STREAM_H_
+
+#include <stdio.h>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamInterface is a generic asynchronous stream interface, supporting read,
+// write, and close operations, and asynchronous signalling of state changes.
+// The interface is designed with file, memory, and socket implementations in
+// mind. Some implementations offer extended operations, such as seeking.
+///////////////////////////////////////////////////////////////////////////////
+
+// The following enumerations are declared outside of the StreamInterface
+// class for brevity in use.
+
+// The SS_OPENING state indicates that the stream will signal open or closed
+// in the future.
+enum StreamState { SS_CLOSED, SS_OPENING, SS_OPEN };
+
+// Stream read/write methods return this value to indicate various success
+// and failure conditions described below.
+enum StreamResult { SR_ERROR, SR_SUCCESS, SR_BLOCK, SR_EOS };
+
+// StreamEvents are used to asynchronously signal state transitionss. The flags
+// may be combined.
+// SE_OPEN: The stream has transitioned to the SS_OPEN state
+// SE_CLOSE: The stream has transitioned to the SS_CLOSED state
+// SE_READ: Data is available, so Read is likely to not return SR_BLOCK
+// SE_WRITE: Data can be written, so Write is likely to not return SR_BLOCK
+enum StreamEvent { SE_OPEN = 1, SE_READ = 2, SE_WRITE = 4, SE_CLOSE = 8 };
+
+class Thread;
+
+struct StreamEventData : public MessageData {
+ int events, error;
+ StreamEventData(int ev, int er) : events(ev), error(er) { }
+};
+
+class StreamInterface : public MessageHandler {
+ public:
+ enum {
+ MSG_POST_EVENT = 0xF1F1, MSG_MAX = MSG_POST_EVENT
+ };
+
+ virtual ~StreamInterface();
+
+ virtual StreamState GetState() const = 0;
+
+ // Read attempts to fill buffer of size buffer_len. Write attempts to send
+ // data_len bytes stored in data. The variables read and write are set only
+ // on SR_SUCCESS (see below). Likewise, error is only set on SR_ERROR.
+ // Read and Write return a value indicating:
+ // SR_ERROR: an error occurred, which is returned in a non-null error
+ // argument. Interpretation of the error requires knowledge of the
+ // stream's concrete type, which limits its usefulness.
+ // SR_SUCCESS: some number of bytes were successfully written, which is
+ // returned in a non-null read/write argument.
+ // SR_BLOCK: the stream is in non-blocking mode, and the operation would
+ // block, or the stream is in SS_OPENING state.
+ // SR_EOS: the end-of-stream has been reached, or the stream is in the
+ // SS_CLOSED state.
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) = 0;
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) = 0;
+ // Attempt to transition to the SS_CLOSED state. SE_CLOSE will not be
+ // signalled as a result of this call.
+ virtual void Close() = 0;
+
+ // Streams may signal one or more StreamEvents to indicate state changes.
+ // The first argument identifies the stream on which the state change occured.
+ // The second argument is a bit-wise combination of StreamEvents.
+ // If SE_CLOSE is signalled, then the third argument is the associated error
+ // code. Otherwise, the value is undefined.
+ // Note: Not all streams will support asynchronous event signalling. However,
+ // SS_OPENING and SR_BLOCK returned from stream member functions imply that
+ // certain events will be raised in the future.
+ sigslot::signal3<StreamInterface*, int, int> SignalEvent;
+
+ // Like calling SignalEvent, but posts a message to the specified thread,
+ // which will call SignalEvent. This helps unroll the stack and prevent
+ // re-entrancy.
+ void PostEvent(Thread* t, int events, int err);
+ // Like the aforementioned method, but posts to the current thread.
+ void PostEvent(int events, int err);
+
+ //
+ // OPTIONAL OPERATIONS
+ //
+ // Not all implementations will support the following operations. In general,
+ // a stream will only support an operation if it reasonably efficient to do
+ // so. For example, while a socket could buffer incoming data to support
+ // seeking, it will not do so. Instead, a buffering stream adapter should
+ // be used.
+ //
+ // Even though several of these operations are related, you should
+ // always use whichever operation is most relevant. For example, you may
+ // be tempted to use GetSize() and GetPosition() to deduce the result of
+ // GetAvailable(). However, a stream which is read-once may support the
+ // latter operation but not the former.
+ //
+
+ // The following four methods are used to avoid copying data multiple times.
+
+ // GetReadData returns a pointer to a buffer which is owned by the stream.
+ // The buffer contains data_len bytes. NULL is returned if no data is
+ // available, or if the method fails. If the caller processes the data, it
+ // must call ConsumeReadData with the number of processed bytes. GetReadData
+ // does not require a matching call to ConsumeReadData if the data is not
+ // processed. Read and ConsumeReadData invalidate the buffer returned by
+ // GetReadData.
+ virtual const void* GetReadData(size_t* data_len) { return NULL; }
+ virtual void ConsumeReadData(size_t used) {}
+
+ // GetWriteBuffer returns a pointer to a buffer which is owned by the stream.
+ // The buffer has a capacity of buf_len bytes. NULL is returned if there is
+ // no buffer available, or if the method fails. The call may write data to
+ // the buffer, and then call ConsumeWriteBuffer with the number of bytes
+ // written. GetWriteBuffer does not require a matching call to
+ // ConsumeWriteData if no data is written. Write, ForceWrite, and
+ // ConsumeWriteData invalidate the buffer returned by GetWriteBuffer.
+ // TODO: Allow the caller to specify a minimum buffer size. If the specified
+ // amount of buffer is not yet available, return NULL and Signal SE_WRITE
+ // when it is available. If the requested amount is too large, return an
+ // error.
+ virtual void* GetWriteBuffer(size_t* buf_len) { return NULL; }
+ virtual void ConsumeWriteBuffer(size_t used) {}
+
+ // Write data_len bytes found in data, circumventing any throttling which
+ // would could cause SR_BLOCK to be returned. Returns true if all the data
+ // was written. Otherwise, the method is unsupported, or an unrecoverable
+ // error occurred, and the error value is set. This method should be used
+ // sparingly to write critical data which should not be throttled. A stream
+ // which cannot circumvent its blocking constraints should not implement this
+ // method.
+ // NOTE: This interface is being considered experimentally at the moment. It
+ // would be used by JUDP and BandwidthStream as a way to circumvent certain
+ // soft limits in writing.
+ //virtual bool ForceWrite(const void* data, size_t data_len, int* error) {
+ // if (error) *error = -1;
+ // return false;
+ //}
+
+ // Seek to a byte offset from the beginning of the stream. Returns false if
+ // the stream does not support seeking, or cannot seek to the specified
+ // position.
+ virtual bool SetPosition(size_t position) { return false; }
+
+ // Get the byte offset of the current position from the start of the stream.
+ // Returns false if the position is not known.
+ virtual bool GetPosition(size_t* position) const { return false; }
+
+ // Get the byte length of the entire stream. Returns false if the length
+ // is not known.
+ virtual bool GetSize(size_t* size) const { return false; }
+
+ // Return the number of Read()-able bytes remaining before end-of-stream.
+ // Returns false if not known.
+ virtual bool GetAvailable(size_t* size) const { return false; }
+
+ // Return the number of Write()-able bytes remaining before end-of-stream.
+ // Returns false if not known.
+ virtual bool GetWriteRemaining(size_t* size) const { return false; }
+
+ // Return true if flush is successful.
+ virtual bool Flush() { return false; }
+
+ // Communicates the amount of data which will be written to the stream. The
+ // stream may choose to preallocate memory to accomodate this data. The
+ // stream may return false to indicate that there is not enough room (ie,
+ // Write will return SR_EOS/SR_ERROR at some point). Note that calling this
+ // function should not affect the existing state of data in the stream.
+ virtual bool ReserveSize(size_t size) { return true; }
+
+ //
+ // CONVENIENCE METHODS
+ //
+ // These methods are implemented in terms of other methods, for convenience.
+ //
+
+ // Seek to the start of the stream.
+ inline bool Rewind() { return SetPosition(0); }
+
+ // WriteAll is a helper function which repeatedly calls Write until all the
+ // data is written, or something other than SR_SUCCESS is returned. Note that
+ // unlike Write, the argument 'written' is always set, and may be non-zero
+ // on results other than SR_SUCCESS. The remaining arguments have the
+ // same semantics as Write.
+ StreamResult WriteAll(const void* data, size_t data_len,
+ size_t* written, int* error);
+
+ // Similar to ReadAll. Calls Read until buffer_len bytes have been read, or
+ // until a non-SR_SUCCESS result is returned. 'read' is always set.
+ StreamResult ReadAll(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+
+ // ReadLine is a helper function which repeatedly calls Read until it hits
+ // the end-of-line character, or something other than SR_SUCCESS.
+ // TODO: this is too inefficient to keep here. Break this out into a buffered
+ // readline object or adapter
+ StreamResult ReadLine(std::string* line);
+
+ protected:
+ StreamInterface();
+
+ // MessageHandler Interface
+ virtual void OnMessage(Message* msg);
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(StreamInterface);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamAdapterInterface is a convenient base-class for adapting a stream.
+// By default, all operations are pass-through. Override the methods that you
+// require adaptation. Streams should really be upgraded to reference-counted.
+// In the meantime, use the owned flag to indicate whether the adapter should
+// own the adapted stream.
+///////////////////////////////////////////////////////////////////////////////
+
+class StreamAdapterInterface : public StreamInterface,
+ public sigslot::has_slots<> {
+ public:
+ explicit StreamAdapterInterface(StreamInterface* stream, bool owned = true);
+
+ // Core Stream Interface
+ virtual StreamState GetState() const {
+ return stream_->GetState();
+ }
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ return stream_->Read(buffer, buffer_len, read, error);
+ }
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ return stream_->Write(data, data_len, written, error);
+ }
+ virtual void Close() {
+ stream_->Close();
+ }
+
+ // Optional Stream Interface
+ /* Note: Many stream adapters were implemented prior to this Read/Write
+ interface. Therefore, a simple pass through of data in those cases may
+ be broken. At a later time, we should do a once-over pass of all
+ adapters, and make them compliant with these interfaces, after which this
+ code can be uncommented.
+ virtual const void* GetReadData(size_t* data_len) {
+ return stream_->GetReadData(data_len);
+ }
+ virtual void ConsumeReadData(size_t used) {
+ stream_->ConsumeReadData(used);
+ }
+
+ virtual void* GetWriteBuffer(size_t* buf_len) {
+ return stream_->GetWriteBuffer(buf_len);
+ }
+ virtual void ConsumeWriteBuffer(size_t used) {
+ stream_->ConsumeWriteBuffer(used);
+ }
+ */
+
+ /* Note: This interface is currently undergoing evaluation.
+ virtual bool ForceWrite(const void* data, size_t data_len, int* error) {
+ return stream_->ForceWrite(data, data_len, error);
+ }
+ */
+
+ virtual bool SetPosition(size_t position) {
+ return stream_->SetPosition(position);
+ }
+ virtual bool GetPosition(size_t* position) const {
+ return stream_->GetPosition(position);
+ }
+ virtual bool GetSize(size_t* size) const {
+ return stream_->GetSize(size);
+ }
+ virtual bool GetAvailable(size_t* size) const {
+ return stream_->GetAvailable(size);
+ }
+ virtual bool GetWriteRemaining(size_t* size) const {
+ return stream_->GetWriteRemaining(size);
+ }
+ virtual bool ReserveSize(size_t size) {
+ return stream_->ReserveSize(size);
+ }
+ virtual bool Flush() {
+ return stream_->Flush();
+ }
+
+ void Attach(StreamInterface* stream, bool owned = true);
+ StreamInterface* Detach();
+
+ protected:
+ virtual ~StreamAdapterInterface();
+
+ // Note that the adapter presents itself as the origin of the stream events,
+ // since users of the adapter may not recognize the adapted object.
+ virtual void OnEvent(StreamInterface* stream, int events, int err) {
+ SignalEvent(this, events, err);
+ }
+ StreamInterface* stream() { return stream_; }
+
+ private:
+ StreamInterface* stream_;
+ bool owned_;
+ DISALLOW_EVIL_CONSTRUCTORS(StreamAdapterInterface);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamTap is a non-modifying, pass-through adapter, which copies all data
+// in either direction to the tap. Note that errors or blocking on writing to
+// the tap will prevent further tap writes from occurring.
+///////////////////////////////////////////////////////////////////////////////
+
+class StreamTap : public StreamAdapterInterface {
+ public:
+ explicit StreamTap(StreamInterface* stream, StreamInterface* tap);
+
+ void AttachTap(StreamInterface* tap);
+ StreamInterface* DetachTap();
+ StreamResult GetTapResult(int* error);
+
+ // StreamAdapterInterface Interface
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+
+ private:
+ scoped_ptr<StreamInterface> tap_;
+ StreamResult tap_result_;
+ int tap_error_;
+ DISALLOW_EVIL_CONSTRUCTORS(StreamTap);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamSegment adapts a read stream, to expose a subset of the adapted
+// stream's data. This is useful for cases where a stream contains multiple
+// documents concatenated together. StreamSegment can expose a subset of
+// the data as an independent stream, including support for rewinding and
+// seeking.
+///////////////////////////////////////////////////////////////////////////////
+
+class StreamSegment : public StreamAdapterInterface {
+ public:
+ // The current position of the adapted stream becomes the beginning of the
+ // segment. If a length is specified, it bounds the length of the segment.
+ explicit StreamSegment(StreamInterface* stream);
+ explicit StreamSegment(StreamInterface* stream, size_t length);
+
+ // StreamAdapterInterface Interface
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual bool SetPosition(size_t position);
+ virtual bool GetPosition(size_t* position) const;
+ virtual bool GetSize(size_t* size) const;
+ virtual bool GetAvailable(size_t* size) const;
+
+ private:
+ size_t start_, pos_, length_;
+ DISALLOW_EVIL_CONSTRUCTORS(StreamSegment);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// NullStream gives errors on read, and silently discards all written data.
+///////////////////////////////////////////////////////////////////////////////
+
+class NullStream : public StreamInterface {
+ public:
+ NullStream();
+ virtual ~NullStream();
+
+ // StreamInterface Interface
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// FileStream is a simple implementation of a StreamInterface, which does not
+// support asynchronous notification.
+///////////////////////////////////////////////////////////////////////////////
+
+class FileStream : public StreamInterface {
+ public:
+ FileStream();
+ virtual ~FileStream();
+
+ // The semantics of filename and mode are the same as stdio's fopen
+ virtual bool Open(const std::string& filename, const char* mode, int* error);
+ virtual bool OpenShare(const std::string& filename, const char* mode,
+ int shflag, int* error);
+
+ // By default, reads and writes are buffered for efficiency. Disabling
+ // buffering causes writes to block until the bytes on disk are updated.
+ virtual bool DisableBuffering();
+
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+ virtual bool SetPosition(size_t position);
+ virtual bool GetPosition(size_t* position) const;
+ virtual bool GetSize(size_t* size) const;
+ virtual bool GetAvailable(size_t* size) const;
+ virtual bool ReserveSize(size_t size);
+
+ virtual bool Flush();
+
+#if defined(WEBRTC_POSIX) && !defined(__native_client__)
+ // Tries to aquire an exclusive lock on the file.
+ // Use OpenShare(...) on win32 to get similar functionality.
+ bool TryLock();
+ bool Unlock();
+#endif
+
+ // Note: Deprecated in favor of Filesystem::GetFileSize().
+ static bool GetSize(const std::string& filename, size_t* size);
+
+ protected:
+ virtual void DoClose();
+
+ FILE* file_;
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(FileStream);
+};
+
+// A stream that caps the output at a certain size, dropping content from the
+// middle of the logical stream and maintaining equal parts of the start/end of
+// the logical stream.
+class CircularFileStream : public FileStream {
+ public:
+ explicit CircularFileStream(size_t max_size);
+
+ virtual bool Open(const std::string& filename, const char* mode, int* error);
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+
+ private:
+ enum ReadSegment {
+ READ_MARKED, // Read 0 .. marked_position_
+ READ_MIDDLE, // Read position_ .. file_size
+ READ_LATEST, // Read marked_position_ .. position_ if the buffer was
+ // overwritten or 0 .. position_ otherwise.
+ };
+
+ size_t max_write_size_;
+ size_t position_;
+ size_t marked_position_;
+ size_t last_write_position_;
+ ReadSegment read_segment_;
+ size_t read_segment_available_;
+};
+
+// A stream which pushes writes onto a separate thread and
+// returns from the write call immediately.
+class AsyncWriteStream : public StreamInterface {
+ public:
+ // Takes ownership of the stream, but not the thread.
+ AsyncWriteStream(StreamInterface* stream, rtc::Thread* write_thread)
+ : stream_(stream),
+ write_thread_(write_thread),
+ state_(stream ? stream->GetState() : SS_CLOSED) {
+ }
+
+ virtual ~AsyncWriteStream();
+
+ // StreamInterface Interface
+ virtual StreamState GetState() const { return state_; }
+ // This is needed by some stream writers, such as RtpDumpWriter.
+ virtual bool GetPosition(size_t* position) const;
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+ virtual bool Flush();
+
+ protected:
+ // From MessageHandler
+ virtual void OnMessage(rtc::Message* pmsg);
+ virtual void ClearBufferAndWrite();
+
+ private:
+ rtc::scoped_ptr<StreamInterface> stream_;
+ Thread* write_thread_;
+ StreamState state_;
+ Buffer buffer_;
+ mutable CriticalSection crit_stream_;
+ CriticalSection crit_buffer_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(AsyncWriteStream);
+};
+
+
+#if defined(WEBRTC_POSIX) && !defined(__native_client__)
+// A FileStream that is actually not a file, but the output or input of a
+// sub-command. See "man 3 popen" for documentation of the underlying OS popen()
+// function.
+class POpenStream : public FileStream {
+ public:
+ POpenStream() : wait_status_(-1) {}
+ virtual ~POpenStream();
+
+ virtual bool Open(const std::string& subcommand, const char* mode,
+ int* error);
+ // Same as Open(). shflag is ignored.
+ virtual bool OpenShare(const std::string& subcommand, const char* mode,
+ int shflag, int* error);
+
+ // Returns the wait status from the last Close() of an Open()'ed stream, or
+ // -1 if no Open()+Close() has been done on this object. Meaning of the number
+ // is documented in "man 2 wait".
+ int GetWaitStatus() const { return wait_status_; }
+
+ protected:
+ virtual void DoClose();
+
+ private:
+ int wait_status_;
+};
+#endif // WEBRTC_POSIX
+
+///////////////////////////////////////////////////////////////////////////////
+// MemoryStream is a simple implementation of a StreamInterface over in-memory
+// data. Data is read and written at the current seek position. Reads return
+// end-of-stream when they reach the end of data. Writes actually extend the
+// end of data mark.
+///////////////////////////////////////////////////////////////////////////////
+
+class MemoryStreamBase : public StreamInterface {
+ public:
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t bytes, size_t* bytes_read,
+ int* error);
+ virtual StreamResult Write(const void* buffer, size_t bytes,
+ size_t* bytes_written, int* error);
+ virtual void Close();
+ virtual bool SetPosition(size_t position);
+ virtual bool GetPosition(size_t* position) const;
+ virtual bool GetSize(size_t* size) const;
+ virtual bool GetAvailable(size_t* size) const;
+ virtual bool ReserveSize(size_t size);
+
+ char* GetBuffer() { return buffer_; }
+ const char* GetBuffer() const { return buffer_; }
+
+ protected:
+ MemoryStreamBase();
+
+ virtual StreamResult DoReserve(size_t size, int* error);
+
+ // Invariant: 0 <= seek_position <= data_length_ <= buffer_length_
+ char* buffer_;
+ size_t buffer_length_;
+ size_t data_length_;
+ size_t seek_position_;
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(MemoryStreamBase);
+};
+
+// MemoryStream dynamically resizes to accomodate written data.
+
+class MemoryStream : public MemoryStreamBase {
+ public:
+ MemoryStream();
+ explicit MemoryStream(const char* data); // Calls SetData(data, strlen(data))
+ MemoryStream(const void* data, size_t length); // Calls SetData(data, length)
+ virtual ~MemoryStream();
+
+ void SetData(const void* data, size_t length);
+
+ protected:
+ virtual StreamResult DoReserve(size_t size, int* error);
+ // Memory Streams are aligned for efficiency.
+ static const int kAlignment = 16;
+ char* buffer_alloc_;
+};
+
+// ExternalMemoryStream adapts an external memory buffer, so writes which would
+// extend past the end of the buffer will return end-of-stream.
+
+class ExternalMemoryStream : public MemoryStreamBase {
+ public:
+ ExternalMemoryStream();
+ ExternalMemoryStream(void* data, size_t length);
+ virtual ~ExternalMemoryStream();
+
+ void SetData(void* data, size_t length);
+};
+
+// FifoBuffer allows for efficient, thread-safe buffering of data between
+// writer and reader. As the data can wrap around the end of the buffer,
+// MemoryStreamBase can't help us here.
+
+class FifoBuffer : public StreamInterface {
+ public:
+ // Creates a FIFO buffer with the specified capacity.
+ explicit FifoBuffer(size_t length);
+ // Creates a FIFO buffer with the specified capacity and owner
+ FifoBuffer(size_t length, Thread* owner);
+ virtual ~FifoBuffer();
+ // Gets the amount of data currently readable from the buffer.
+ bool GetBuffered(size_t* data_len) const;
+ // Resizes the buffer to the specified capacity. Fails if data_length_ > size
+ bool SetCapacity(size_t length);
+
+ // Read into |buffer| with an offset from the current read position, offset
+ // is specified in number of bytes.
+ // This method doesn't adjust read position nor the number of available
+ // bytes, user has to call ConsumeReadData() to do this.
+ StreamResult ReadOffset(void* buffer, size_t bytes, size_t offset,
+ size_t* bytes_read);
+
+ // Write |buffer| with an offset from the current write position, offset is
+ // specified in number of bytes.
+ // This method doesn't adjust the number of buffered bytes, user has to call
+ // ConsumeWriteBuffer() to do this.
+ StreamResult WriteOffset(const void* buffer, size_t bytes, size_t offset,
+ size_t* bytes_written);
+
+ // StreamInterface methods
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t bytes,
+ size_t* bytes_read, int* error);
+ virtual StreamResult Write(const void* buffer, size_t bytes,
+ size_t* bytes_written, int* error);
+ virtual void Close();
+ virtual const void* GetReadData(size_t* data_len);
+ virtual void ConsumeReadData(size_t used);
+ virtual void* GetWriteBuffer(size_t* buf_len);
+ virtual void ConsumeWriteBuffer(size_t used);
+ virtual bool GetWriteRemaining(size_t* size) const;
+
+ private:
+ // Helper method that implements ReadOffset. Caller must acquire a lock
+ // when calling this method.
+ StreamResult ReadOffsetLocked(void* buffer, size_t bytes, size_t offset,
+ size_t* bytes_read);
+
+ // Helper method that implements WriteOffset. Caller must acquire a lock
+ // when calling this method.
+ StreamResult WriteOffsetLocked(const void* buffer, size_t bytes,
+ size_t offset, size_t* bytes_written);
+
+ StreamState state_; // keeps the opened/closed state of the stream
+ scoped_ptr<char[]> buffer_; // the allocated buffer
+ size_t buffer_length_; // size of the allocated buffer
+ size_t data_length_; // amount of readable data in the buffer
+ size_t read_position_; // offset to the readable data
+ Thread* owner_; // stream callbacks are dispatched on this thread
+ mutable CriticalSection crit_; // object lock
+ DISALLOW_EVIL_CONSTRUCTORS(FifoBuffer);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+class LoggingAdapter : public StreamAdapterInterface {
+ public:
+ LoggingAdapter(StreamInterface* stream, LoggingSeverity level,
+ const std::string& label, bool hex_mode = false);
+
+ void set_label(const std::string& label);
+
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+
+ protected:
+ virtual void OnEvent(StreamInterface* stream, int events, int err);
+
+ private:
+ LoggingSeverity level_;
+ std::string label_;
+ bool hex_mode_;
+ LogMultilineState lms_;
+
+ DISALLOW_EVIL_CONSTRUCTORS(LoggingAdapter);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StringStream - Reads/Writes to an external std::string
+///////////////////////////////////////////////////////////////////////////////
+
+class StringStream : public StreamInterface {
+ public:
+ explicit StringStream(std::string& str);
+ explicit StringStream(const std::string& str);
+
+ virtual StreamState GetState() const;
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error);
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error);
+ virtual void Close();
+ virtual bool SetPosition(size_t position);
+ virtual bool GetPosition(size_t* position) const;
+ virtual bool GetSize(size_t* size) const;
+ virtual bool GetAvailable(size_t* size) const;
+ virtual bool ReserveSize(size_t size);
+
+ private:
+ std::string& str_;
+ size_t read_pos_;
+ bool read_only_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamReference - A reference counting stream adapter
+///////////////////////////////////////////////////////////////////////////////
+
+// Keep in mind that the streams and adapters defined in this file are
+// not thread-safe, so this has limited uses.
+
+// A StreamRefCount holds the reference count and a pointer to the
+// wrapped stream. It deletes the wrapped stream when there are no
+// more references. We can then have multiple StreamReference
+// instances pointing to one StreamRefCount, all wrapping the same
+// stream.
+
+class StreamReference : public StreamAdapterInterface {
+ class StreamRefCount;
+ public:
+ // Constructor for the first reference to a stream
+ // Note: get more references through NewReference(). Use this
+ // constructor only once on a given stream.
+ explicit StreamReference(StreamInterface* stream);
+ StreamInterface* GetStream() { return stream(); }
+ StreamInterface* NewReference();
+ virtual ~StreamReference();
+
+ private:
+ class StreamRefCount {
+ public:
+ explicit StreamRefCount(StreamInterface* stream)
+ : stream_(stream), ref_count_(1) {
+ }
+ void AddReference() {
+ CritScope lock(&cs_);
+ ++ref_count_;
+ }
+ void Release() {
+ int ref_count;
+ { // Atomic ops would have been a better fit here.
+ CritScope lock(&cs_);
+ ref_count = --ref_count_;
+ }
+ if (ref_count == 0) {
+ delete stream_;
+ delete this;
+ }
+ }
+ private:
+ StreamInterface* stream_;
+ int ref_count_;
+ CriticalSection cs_;
+ DISALLOW_EVIL_CONSTRUCTORS(StreamRefCount);
+ };
+
+ // Constructor for adding references
+ explicit StreamReference(StreamRefCount* stream_ref_count,
+ StreamInterface* stream);
+
+ StreamRefCount* stream_ref_count_;
+ DISALLOW_EVIL_CONSTRUCTORS(StreamReference);
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Flow attempts to move bytes from source to sink via buffer of size
+// buffer_len. The function returns SR_SUCCESS when source reaches
+// end-of-stream (returns SR_EOS), and all the data has been written successful
+// to sink. Alternately, if source returns SR_BLOCK or SR_ERROR, or if sink
+// returns SR_BLOCK, SR_ERROR, or SR_EOS, then the function immediately returns
+// with the unexpected StreamResult value.
+// data_len is the length of the valid data in buffer. in case of error
+// this is the data that read from source but can't move to destination.
+// as a pass in parameter, it indicates data in buffer that should move to sink
+StreamResult Flow(StreamInterface* source,
+ char* buffer, size_t buffer_len,
+ StreamInterface* sink, size_t* data_len = NULL);
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_STREAM_H_
diff --git a/chromium/third_party/webrtc/base/stream_unittest.cc b/chromium/third_party/webrtc/base/stream_unittest.cc
new file mode 100644
index 00000000000..a6664d71621
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stream_unittest.cc
@@ -0,0 +1,492 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// TestStream
+///////////////////////////////////////////////////////////////////////////////
+
+class TestStream : public StreamInterface {
+ public:
+ TestStream() : pos_(0) { }
+
+ virtual StreamState GetState() const { return SS_OPEN; }
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ unsigned char* uc_buffer = static_cast<unsigned char*>(buffer);
+ for (size_t i = 0; i < buffer_len; ++i) {
+ uc_buffer[i] = static_cast<unsigned char>(pos_++);
+ }
+ if (read)
+ *read = buffer_len;
+ return SR_SUCCESS;
+ }
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (error)
+ *error = -1;
+ return SR_ERROR;
+ }
+ virtual void Close() { }
+ virtual bool SetPosition(size_t position) {
+ pos_ = position;
+ return true;
+ }
+ virtual bool GetPosition(size_t* position) const {
+ if (position) *position = pos_;
+ return true;
+ }
+ virtual bool GetSize(size_t* size) const {
+ return false;
+ }
+ virtual bool GetAvailable(size_t* size) const {
+ return false;
+ }
+
+ private:
+ size_t pos_;
+};
+
+bool VerifyTestBuffer(unsigned char* buffer, size_t len,
+ unsigned char value) {
+ bool passed = true;
+ for (size_t i = 0; i < len; ++i) {
+ if (buffer[i] != value++) {
+ passed = false;
+ break;
+ }
+ }
+ // Ensure that we don't pass again without re-writing
+ memset(buffer, 0, len);
+ return passed;
+}
+
+void SeekTest(StreamInterface* stream, const unsigned char value) {
+ size_t bytes;
+ unsigned char buffer[13] = { 0 };
+ const size_t kBufSize = sizeof(buffer);
+
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(bytes, kBufSize);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, kBufSize, value));
+ EXPECT_TRUE(stream->GetPosition(&bytes));
+ EXPECT_EQ(13U, bytes);
+
+ EXPECT_TRUE(stream->SetPosition(7));
+
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(bytes, kBufSize);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, kBufSize, value + 7));
+ EXPECT_TRUE(stream->GetPosition(&bytes));
+ EXPECT_EQ(20U, bytes);
+}
+
+TEST(StreamSegment, TranslatesPosition) {
+ TestStream* test = new TestStream;
+ // Verify behavior of original stream
+ SeekTest(test, 0);
+ StreamSegment* segment = new StreamSegment(test);
+ // Verify behavior of adapted stream (all values offset by 20)
+ SeekTest(segment, 20);
+ delete segment;
+}
+
+TEST(StreamSegment, SupportsArtificialTermination) {
+ TestStream* test = new TestStream;
+
+ size_t bytes;
+ unsigned char buffer[5000] = { 0 };
+ const size_t kBufSize = sizeof(buffer);
+
+ {
+ StreamInterface* stream = test;
+
+ // Read a lot of bytes
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(bytes, kBufSize);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, kBufSize, 0));
+
+ // Test seeking far ahead
+ EXPECT_TRUE(stream->SetPosition(12345));
+
+ // Read a bunch more bytes
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(bytes, kBufSize);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, kBufSize, 12345 % 256));
+ }
+
+ // Create a segment of test stream in range [100,600)
+ EXPECT_TRUE(test->SetPosition(100));
+ StreamSegment* segment = new StreamSegment(test, 500);
+
+ {
+ StreamInterface* stream = segment;
+
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(500U, bytes);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, 500, 100));
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_EOS);
+
+ // Test seeking past "end" of stream
+ EXPECT_FALSE(stream->SetPosition(12345));
+ EXPECT_FALSE(stream->SetPosition(501));
+
+ // Test seeking to end (edge case)
+ EXPECT_TRUE(stream->SetPosition(500));
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_EOS);
+
+ // Test seeking to start
+ EXPECT_TRUE(stream->SetPosition(0));
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_SUCCESS);
+ EXPECT_EQ(500U, bytes);
+ EXPECT_TRUE(VerifyTestBuffer(buffer, 500, 100));
+ EXPECT_EQ(stream->Read(buffer, kBufSize, &bytes, NULL), SR_EOS);
+ }
+
+ delete segment;
+}
+
+TEST(FifoBufferTest, TestAll) {
+ const size_t kSize = 16;
+ const char in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV";
+ char out[kSize * 2];
+ void* p;
+ const void* q;
+ size_t bytes;
+ FifoBuffer buf(kSize);
+ StreamInterface* stream = &buf;
+
+ // Test assumptions about base state
+ EXPECT_EQ(SS_OPEN, stream->GetState());
+ EXPECT_EQ(SR_BLOCK, stream->Read(out, kSize, &bytes, NULL));
+ EXPECT_TRUE(NULL != stream->GetReadData(&bytes));
+ EXPECT_EQ((size_t)0, bytes);
+ stream->ConsumeReadData(0);
+ EXPECT_TRUE(NULL != stream->GetWriteBuffer(&bytes));
+ EXPECT_EQ(kSize, bytes);
+ stream->ConsumeWriteBuffer(0);
+
+ // Try a full write
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+
+ // Try a write that should block
+ EXPECT_EQ(SR_BLOCK, stream->Write(in, kSize, &bytes, NULL));
+
+ // Try a full read
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize));
+
+ // Try a read that should block
+ EXPECT_EQ(SR_BLOCK, stream->Read(out, kSize, &bytes, NULL));
+
+ // Try a too-big write
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize * 2, &bytes, NULL));
+ EXPECT_EQ(bytes, kSize);
+
+ // Try a too-big read
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize * 2, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize));
+
+ // Try some small writes and reads
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+
+ // Try wraparound reads and writes in the following pattern
+ // WWWWWWWWWWWW.... 0123456789AB....
+ // RRRRRRRRXXXX.... ........89AB....
+ // WWWW....XXXXWWWW 4567....89AB0123
+ // XXXX....RRRRXXXX 4567........0123
+ // XXXXWWWWWWWWXXXX 4567012345670123
+ // RRRRXXXXXXXXRRRR ....01234567....
+ // ....RRRRRRRR.... ................
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize * 3 / 4, &bytes, NULL));
+ EXPECT_EQ(kSize * 3 / 4, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 4, &bytes, NULL));
+ EXPECT_EQ(kSize / 4 , bytes);
+ EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4));
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2 , bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(kSize / 2 , bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+
+ // Use GetWriteBuffer to reset the read_position for the next tests
+ stream->GetWriteBuffer(&bytes);
+ stream->ConsumeWriteBuffer(0);
+
+ // Try using GetReadData to do a full read
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ q = stream->GetReadData(&bytes);
+ EXPECT_TRUE(NULL != q);
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(q, in, kSize));
+ stream->ConsumeReadData(kSize);
+ EXPECT_EQ(SR_BLOCK, stream->Read(out, kSize, &bytes, NULL));
+
+ // Try using GetReadData to do some small reads
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ q = stream->GetReadData(&bytes);
+ EXPECT_TRUE(NULL != q);
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(q, in, kSize / 2));
+ stream->ConsumeReadData(kSize / 2);
+ q = stream->GetReadData(&bytes);
+ EXPECT_TRUE(NULL != q);
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(q, in + kSize / 2, kSize / 2));
+ stream->ConsumeReadData(kSize / 2);
+ EXPECT_EQ(SR_BLOCK, stream->Read(out, kSize, &bytes, NULL));
+
+ // Try using GetReadData in a wraparound case
+ // WWWWWWWWWWWWWWWW 0123456789ABCDEF
+ // RRRRRRRRRRRRXXXX ............CDEF
+ // WWWWWWWW....XXXX 01234567....CDEF
+ // ............RRRR 01234567........
+ // RRRRRRRR........ ................
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize * 3 / 4, &bytes, NULL));
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ q = stream->GetReadData(&bytes);
+ EXPECT_TRUE(NULL != q);
+ EXPECT_EQ(kSize / 4, bytes);
+ EXPECT_EQ(0, memcmp(q, in + kSize * 3 / 4, kSize / 4));
+ stream->ConsumeReadData(kSize / 4);
+ q = stream->GetReadData(&bytes);
+ EXPECT_TRUE(NULL != q);
+ EXPECT_EQ(kSize / 2, bytes);
+ EXPECT_EQ(0, memcmp(q, in, kSize / 2));
+ stream->ConsumeReadData(kSize / 2);
+
+ // Use GetWriteBuffer to reset the read_position for the next tests
+ stream->GetWriteBuffer(&bytes);
+ stream->ConsumeWriteBuffer(0);
+
+ // Try using GetWriteBuffer to do a full write
+ p = stream->GetWriteBuffer(&bytes);
+ EXPECT_TRUE(NULL != p);
+ EXPECT_EQ(kSize, bytes);
+ memcpy(p, in, kSize);
+ stream->ConsumeWriteBuffer(kSize);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize));
+
+ // Try using GetWriteBuffer to do some small writes
+ p = stream->GetWriteBuffer(&bytes);
+ EXPECT_TRUE(NULL != p);
+ EXPECT_EQ(kSize, bytes);
+ memcpy(p, in, kSize / 2);
+ stream->ConsumeWriteBuffer(kSize / 2);
+ p = stream->GetWriteBuffer(&bytes);
+ EXPECT_TRUE(NULL != p);
+ EXPECT_EQ(kSize / 2, bytes);
+ memcpy(p, in + kSize / 2, kSize / 2);
+ stream->ConsumeWriteBuffer(kSize / 2);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize));
+
+ // Try using GetWriteBuffer in a wraparound case
+ // WWWWWWWWWWWW.... 0123456789AB....
+ // RRRRRRRRXXXX.... ........89AB....
+ // ........XXXXWWWW ........89AB0123
+ // WWWW....XXXXXXXX 4567....89AB0123
+ // RRRR....RRRRRRRR ................
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize * 3 / 4, &bytes, NULL));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ p = stream->GetWriteBuffer(&bytes);
+ EXPECT_TRUE(NULL != p);
+ EXPECT_EQ(kSize / 4, bytes);
+ memcpy(p, in, kSize / 4);
+ stream->ConsumeWriteBuffer(kSize / 4);
+ p = stream->GetWriteBuffer(&bytes);
+ EXPECT_TRUE(NULL != p);
+ EXPECT_EQ(kSize / 2, bytes);
+ memcpy(p, in + kSize / 4, kSize / 4);
+ stream->ConsumeWriteBuffer(kSize / 4);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize * 3 / 4, &bytes, NULL));
+ EXPECT_EQ(kSize * 3 / 4, bytes);
+ EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4));
+ EXPECT_EQ(0, memcmp(in, out + kSize / 4, kSize / 4));
+
+ // Check that the stream is now empty
+ EXPECT_EQ(SR_BLOCK, stream->Read(out, kSize, &bytes, NULL));
+
+ // Try growing the buffer
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_TRUE(buf.SetCapacity(kSize * 2));
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in + kSize, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize * 2, &bytes, NULL));
+ EXPECT_EQ(kSize * 2, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize * 2));
+
+ // Try shrinking the buffer
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_TRUE(buf.SetCapacity(kSize));
+ EXPECT_EQ(SR_BLOCK, stream->Write(in, kSize, &bytes, NULL));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize, &bytes, NULL));
+ EXPECT_EQ(kSize, bytes);
+ EXPECT_EQ(0, memcmp(in, out, kSize));
+
+ // Write to the stream, close it, read the remaining bytes
+ EXPECT_EQ(SR_SUCCESS, stream->Write(in, kSize / 2, &bytes, NULL));
+ stream->Close();
+ EXPECT_EQ(SS_CLOSED, stream->GetState());
+ EXPECT_EQ(SR_EOS, stream->Write(in, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(SR_SUCCESS, stream->Read(out, kSize / 2, &bytes, NULL));
+ EXPECT_EQ(0, memcmp(in, out, kSize / 2));
+ EXPECT_EQ(SR_EOS, stream->Read(out, kSize / 2, &bytes, NULL));
+}
+
+TEST(FifoBufferTest, FullBufferCheck) {
+ FifoBuffer buff(10);
+ buff.ConsumeWriteBuffer(10);
+
+ size_t free;
+ EXPECT_TRUE(buff.GetWriteBuffer(&free) != NULL);
+ EXPECT_EQ(0U, free);
+}
+
+TEST(FifoBufferTest, WriteOffsetAndReadOffset) {
+ const size_t kSize = 16;
+ const char in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV";
+ char out[kSize * 2];
+ FifoBuffer buf(kSize);
+
+ // Write 14 bytes.
+ EXPECT_EQ(SR_SUCCESS, buf.Write(in, 14, NULL, NULL));
+
+ // Make sure data is in |buf|.
+ size_t buffered;
+ EXPECT_TRUE(buf.GetBuffered(&buffered));
+ EXPECT_EQ(14u, buffered);
+
+ // Read 10 bytes.
+ buf.ConsumeReadData(10);
+
+ // There should be now 12 bytes of available space.
+ size_t remaining;
+ EXPECT_TRUE(buf.GetWriteRemaining(&remaining));
+ EXPECT_EQ(12u, remaining);
+
+ // Write at offset 12, this should fail.
+ EXPECT_EQ(SR_BLOCK, buf.WriteOffset(in, 10, 12, NULL));
+
+ // Write 8 bytes at offset 4, this wraps around the buffer.
+ EXPECT_EQ(SR_SUCCESS, buf.WriteOffset(in, 8, 4, NULL));
+
+ // Number of available space remains the same until we call
+ // ConsumeWriteBuffer().
+ EXPECT_TRUE(buf.GetWriteRemaining(&remaining));
+ EXPECT_EQ(12u, remaining);
+ buf.ConsumeWriteBuffer(12);
+
+ // There's 4 bytes bypassed and 4 bytes no read so skip them and verify the
+ // 8 bytes written.
+ size_t read;
+ EXPECT_EQ(SR_SUCCESS, buf.ReadOffset(out, 8, 8, &read));
+ EXPECT_EQ(8u, read);
+ EXPECT_EQ(0, memcmp(out, in, 8));
+
+ // There should still be 16 bytes available for reading.
+ EXPECT_TRUE(buf.GetBuffered(&buffered));
+ EXPECT_EQ(16u, buffered);
+
+ // Read at offset 16, this should fail since we don't have that much data.
+ EXPECT_EQ(SR_BLOCK, buf.ReadOffset(out, 10, 16, NULL));
+}
+
+TEST(AsyncWriteTest, TestWrite) {
+ FifoBuffer* buf = new FifoBuffer(100);
+ AsyncWriteStream stream(buf, Thread::Current());
+ EXPECT_EQ(SS_OPEN, stream.GetState());
+
+ // Write "abc". Will go to the logging thread, which is the current
+ // thread.
+ stream.Write("abc", 3, NULL, NULL);
+ char bytes[100];
+ size_t count;
+ // Messages on the thread's queue haven't been processed, so "abc"
+ // hasn't been written yet.
+ EXPECT_NE(SR_SUCCESS, buf->ReadOffset(&bytes, 3, 0, &count));
+ // Now we process the messages on the thread's queue, so "abc" has
+ // been written.
+ EXPECT_TRUE_WAIT(SR_SUCCESS == buf->ReadOffset(&bytes, 3, 0, &count), 10);
+ EXPECT_EQ(3u, count);
+ EXPECT_EQ(0, memcmp(bytes, "abc", 3));
+
+ // Write "def". Will go to the logging thread, which is the current
+ // thread.
+ stream.Write("d", 1, &count, NULL);
+ stream.Write("e", 1, &count, NULL);
+ stream.Write("f", 1, &count, NULL);
+ EXPECT_EQ(1u, count);
+ // Messages on the thread's queue haven't been processed, so "def"
+ // hasn't been written yet.
+ EXPECT_NE(SR_SUCCESS, buf->ReadOffset(&bytes, 3, 3, &count));
+ // Flush() causes the message to be processed, so "def" has now been
+ // written.
+ stream.Flush();
+ EXPECT_EQ(SR_SUCCESS, buf->ReadOffset(&bytes, 3, 3, &count));
+ EXPECT_EQ(3u, count);
+ EXPECT_EQ(0, memcmp(bytes, "def", 3));
+
+ // Write "xyz". Will go to the logging thread, which is the current
+ // thread.
+ stream.Write("xyz", 3, &count, NULL);
+ EXPECT_EQ(3u, count);
+ // Messages on the thread's queue haven't been processed, so "xyz"
+ // hasn't been written yet.
+ EXPECT_NE(SR_SUCCESS, buf->ReadOffset(&bytes, 3, 6, &count));
+ // Close() causes the message to be processed, so "xyz" has now been
+ // written.
+ stream.Close();
+ EXPECT_EQ(SR_SUCCESS, buf->ReadOffset(&bytes, 3, 6, &count));
+ EXPECT_EQ(3u, count);
+ EXPECT_EQ(0, memcmp(bytes, "xyz", 3));
+ EXPECT_EQ(SS_CLOSED, stream.GetState());
+
+ // Is't closed, so the writes should fail.
+ EXPECT_EQ(SR_ERROR, stream.Write("000", 3, NULL, NULL));
+
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/stringdigest.h b/chromium/third_party/webrtc/base/stringdigest.h
new file mode 100644
index 00000000000..7cf6f329af1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringdigest.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_STRINGDIGEST_H_
+#define WEBRTC_BASE_STRINGDIGEST_H_
+
+// TODO: Update remaining callers to use messagedigest.h instead
+#include "webrtc/base/messagedigest.h"
+
+#endif // WEBRTC_BASE_STRINGDIGEST_H_
diff --git a/chromium/third_party/webrtc/base/stringencode.cc b/chromium/third_party/webrtc/base/stringencode.cc
new file mode 100644
index 00000000000..1e0a1493cdd
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringencode.cc
@@ -0,0 +1,657 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/stringencode.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/stringutils.h"
+
+namespace rtc {
+
+/////////////////////////////////////////////////////////////////////////////
+// String Encoding Utilities
+/////////////////////////////////////////////////////////////////////////////
+
+size_t escape(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ const char * illegal, char escape) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ char ch = source[srcpos++];
+ if ((ch == escape) || ::strchr(illegal, ch)) {
+ if (bufpos + 2 >= buflen)
+ break;
+ buffer[bufpos++] = escape;
+ }
+ buffer[bufpos++] = ch;
+ }
+
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t unescape(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ char escape) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ char ch = source[srcpos++];
+ if ((ch == escape) && (srcpos < srclen)) {
+ ch = source[srcpos++];
+ }
+ buffer[bufpos++] = ch;
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ const char * illegal, char escape) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ char ch = source[srcpos++];
+ if ((ch != escape) && !::strchr(illegal, ch)) {
+ buffer[bufpos++] = ch;
+ } else if (bufpos + 3 >= buflen) {
+ break;
+ } else {
+ buffer[bufpos+0] = escape;
+ buffer[bufpos+1] = hex_encode((static_cast<unsigned char>(ch) >> 4) & 0xF);
+ buffer[bufpos+2] = hex_encode((static_cast<unsigned char>(ch) ) & 0xF);
+ bufpos += 3;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ char escape) {
+ if (buflen <= 0)
+ return 0;
+
+ unsigned char h1, h2;
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ char ch = source[srcpos++];
+ if ((ch == escape)
+ && (srcpos + 1 < srclen)
+ && hex_decode(source[srcpos], &h1)
+ && hex_decode(source[srcpos+1], &h2)) {
+ buffer[bufpos++] = (h1 << 4) | h2;
+ srcpos += 2;
+ } else {
+ buffer[bufpos++] = ch;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+const char* unsafe_filename_characters() {
+ // It might be better to have a single specification which is the union of
+ // all operating systems, unless one system is overly restrictive.
+#if defined(WEBRTC_WIN)
+ return "\\/:*?\"<>|";
+#else // !WEBRTC_WIN
+ // TODO
+ ASSERT(false);
+ return "";
+#endif // !WEBRTC_WIN
+}
+
+const unsigned char URL_UNSAFE = 0x1; // 0-33 "#$%&+,/:;<=>?@[\]^`{|} 127
+const unsigned char XML_UNSAFE = 0x2; // "&'<>
+const unsigned char HTML_UNSAFE = 0x2; // "&'<>
+
+// ! " # $ % & ' ( ) * + , - . / 0 1 2 3 4 6 5 7 8 9 : ; < = > ?
+//@ A B C D E F G H I J K L M N O P Q R S T U V W X Y Z [ \ ] ^ _
+//` a b c d e f g h i j k l m n o p q r s t u v w x y z { | } ~
+
+const unsigned char ASCII_CLASS[128] = {
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,0,3,1,1,1,3,2,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,3,1,3,1,
+ 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,
+ 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,
+};
+
+size_t url_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ if (NULL == buffer)
+ return srclen * 3 + 1;
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ unsigned char ch = source[srcpos++];
+ if ((ch < 128) && (ASCII_CLASS[ch] & URL_UNSAFE)) {
+ if (bufpos + 3 >= buflen) {
+ break;
+ }
+ buffer[bufpos+0] = '%';
+ buffer[bufpos+1] = hex_encode((ch >> 4) & 0xF);
+ buffer[bufpos+2] = hex_encode((ch ) & 0xF);
+ bufpos += 3;
+ } else {
+ buffer[bufpos++] = ch;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t url_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ if (NULL == buffer)
+ return srclen + 1;
+ if (buflen <= 0)
+ return 0;
+
+ unsigned char h1, h2;
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ unsigned char ch = source[srcpos++];
+ if (ch == '+') {
+ buffer[bufpos++] = ' ';
+ } else if ((ch == '%')
+ && (srcpos + 1 < srclen)
+ && hex_decode(source[srcpos], &h1)
+ && hex_decode(source[srcpos+1], &h2))
+ {
+ buffer[bufpos++] = (h1 << 4) | h2;
+ srcpos += 2;
+ } else {
+ buffer[bufpos++] = ch;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t utf8_decode(const char* source, size_t srclen, unsigned long* value) {
+ const unsigned char* s = reinterpret_cast<const unsigned char*>(source);
+ if ((s[0] & 0x80) == 0x00) { // Check s[0] == 0xxxxxxx
+ *value = s[0];
+ return 1;
+ }
+ if ((srclen < 2) || ((s[1] & 0xC0) != 0x80)) { // Check s[1] != 10xxxxxx
+ return 0;
+ }
+ // Accumulate the trailer byte values in value16, and combine it with the
+ // relevant bits from s[0], once we've determined the sequence length.
+ unsigned long value16 = (s[1] & 0x3F);
+ if ((s[0] & 0xE0) == 0xC0) { // Check s[0] == 110xxxxx
+ *value = ((s[0] & 0x1F) << 6) | value16;
+ return 2;
+ }
+ if ((srclen < 3) || ((s[2] & 0xC0) != 0x80)) { // Check s[2] != 10xxxxxx
+ return 0;
+ }
+ value16 = (value16 << 6) | (s[2] & 0x3F);
+ if ((s[0] & 0xF0) == 0xE0) { // Check s[0] == 1110xxxx
+ *value = ((s[0] & 0x0F) << 12) | value16;
+ return 3;
+ }
+ if ((srclen < 4) || ((s[3] & 0xC0) != 0x80)) { // Check s[3] != 10xxxxxx
+ return 0;
+ }
+ value16 = (value16 << 6) | (s[3] & 0x3F);
+ if ((s[0] & 0xF8) == 0xF0) { // Check s[0] == 11110xxx
+ *value = ((s[0] & 0x07) << 18) | value16;
+ return 4;
+ }
+ return 0;
+}
+
+size_t utf8_encode(char* buffer, size_t buflen, unsigned long value) {
+ if ((value <= 0x7F) && (buflen >= 1)) {
+ buffer[0] = static_cast<unsigned char>(value);
+ return 1;
+ }
+ if ((value <= 0x7FF) && (buflen >= 2)) {
+ buffer[0] = 0xC0 | static_cast<unsigned char>(value >> 6);
+ buffer[1] = 0x80 | static_cast<unsigned char>(value & 0x3F);
+ return 2;
+ }
+ if ((value <= 0xFFFF) && (buflen >= 3)) {
+ buffer[0] = 0xE0 | static_cast<unsigned char>(value >> 12);
+ buffer[1] = 0x80 | static_cast<unsigned char>((value >> 6) & 0x3F);
+ buffer[2] = 0x80 | static_cast<unsigned char>(value & 0x3F);
+ return 3;
+ }
+ if ((value <= 0x1FFFFF) && (buflen >= 4)) {
+ buffer[0] = 0xF0 | static_cast<unsigned char>(value >> 18);
+ buffer[1] = 0x80 | static_cast<unsigned char>((value >> 12) & 0x3F);
+ buffer[2] = 0x80 | static_cast<unsigned char>((value >> 6) & 0x3F);
+ buffer[3] = 0x80 | static_cast<unsigned char>(value & 0x3F);
+ return 4;
+ }
+ return 0;
+}
+
+size_t html_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ unsigned char ch = source[srcpos];
+ if (ch < 128) {
+ srcpos += 1;
+ if (ASCII_CLASS[ch] & HTML_UNSAFE) {
+ const char * escseq = 0;
+ size_t esclen = 0;
+ switch (ch) {
+ case '<': escseq = "&lt;"; esclen = 4; break;
+ case '>': escseq = "&gt;"; esclen = 4; break;
+ case '\'': escseq = "&#39;"; esclen = 5; break;
+ case '\"': escseq = "&quot;"; esclen = 6; break;
+ case '&': escseq = "&amp;"; esclen = 5; break;
+ default: ASSERT(false);
+ }
+ if (bufpos + esclen >= buflen) {
+ break;
+ }
+ memcpy(buffer + bufpos, escseq, esclen);
+ bufpos += esclen;
+ } else {
+ buffer[bufpos++] = ch;
+ }
+ } else {
+ // Largest value is 0x1FFFFF => &#2097151; (10 characters)
+ char escseq[11];
+ unsigned long val;
+ if (size_t vallen = utf8_decode(&source[srcpos], srclen - srcpos, &val)) {
+ srcpos += vallen;
+ } else {
+ // Not a valid utf8 sequence, just use the raw character.
+ val = static_cast<unsigned char>(source[srcpos++]);
+ }
+ size_t esclen = sprintfn(escseq, ARRAY_SIZE(escseq), "&#%lu;", val);
+ if (bufpos + esclen >= buflen) {
+ break;
+ }
+ memcpy(buffer + bufpos, escseq, esclen);
+ bufpos += esclen;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t html_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ return xml_decode(buffer, buflen, source, srclen);
+}
+
+size_t xml_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ unsigned char ch = source[srcpos++];
+ if ((ch < 128) && (ASCII_CLASS[ch] & XML_UNSAFE)) {
+ const char * escseq = 0;
+ size_t esclen = 0;
+ switch (ch) {
+ case '<': escseq = "&lt;"; esclen = 4; break;
+ case '>': escseq = "&gt;"; esclen = 4; break;
+ case '\'': escseq = "&apos;"; esclen = 6; break;
+ case '\"': escseq = "&quot;"; esclen = 6; break;
+ case '&': escseq = "&amp;"; esclen = 5; break;
+ default: ASSERT(false);
+ }
+ if (bufpos + esclen >= buflen) {
+ break;
+ }
+ memcpy(buffer + bufpos, escseq, esclen);
+ bufpos += esclen;
+ } else {
+ buffer[bufpos++] = ch;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+size_t xml_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen <= 0)
+ return 0;
+
+ size_t srcpos = 0, bufpos = 0;
+ while ((srcpos < srclen) && (bufpos + 1 < buflen)) {
+ unsigned char ch = source[srcpos++];
+ if (ch != '&') {
+ buffer[bufpos++] = ch;
+ } else if ((srcpos + 2 < srclen)
+ && (memcmp(source + srcpos, "lt;", 3) == 0)) {
+ buffer[bufpos++] = '<';
+ srcpos += 3;
+ } else if ((srcpos + 2 < srclen)
+ && (memcmp(source + srcpos, "gt;", 3) == 0)) {
+ buffer[bufpos++] = '>';
+ srcpos += 3;
+ } else if ((srcpos + 4 < srclen)
+ && (memcmp(source + srcpos, "apos;", 5) == 0)) {
+ buffer[bufpos++] = '\'';
+ srcpos += 5;
+ } else if ((srcpos + 4 < srclen)
+ && (memcmp(source + srcpos, "quot;", 5) == 0)) {
+ buffer[bufpos++] = '\"';
+ srcpos += 5;
+ } else if ((srcpos + 3 < srclen)
+ && (memcmp(source + srcpos, "amp;", 4) == 0)) {
+ buffer[bufpos++] = '&';
+ srcpos += 4;
+ } else if ((srcpos < srclen) && (source[srcpos] == '#')) {
+ int int_base = 10;
+ if ((srcpos + 1 < srclen) && (source[srcpos+1] == 'x')) {
+ int_base = 16;
+ srcpos += 1;
+ }
+ char * ptr;
+ // TODO: Fix hack (ptr may go past end of data)
+ unsigned long val = strtoul(source + srcpos + 1, &ptr, int_base);
+ if ((static_cast<size_t>(ptr - source) < srclen) && (*ptr == ';')) {
+ srcpos = ptr - source + 1;
+ } else {
+ // Not a valid escape sequence.
+ break;
+ }
+ if (size_t esclen = utf8_encode(buffer + bufpos, buflen - bufpos, val)) {
+ bufpos += esclen;
+ } else {
+ // Not enough room to encode the character, or illegal character
+ break;
+ }
+ } else {
+ // Unrecognized escape sequence.
+ break;
+ }
+ }
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+static const char HEX[] = "0123456789abcdef";
+
+char hex_encode(unsigned char val) {
+ ASSERT(val < 16);
+ return (val < 16) ? HEX[val] : '!';
+}
+
+bool hex_decode(char ch, unsigned char* val) {
+ if ((ch >= '0') && (ch <= '9')) {
+ *val = ch - '0';
+ } else if ((ch >= 'A') && (ch <= 'Z')) {
+ *val = (ch - 'A') + 10;
+ } else if ((ch >= 'a') && (ch <= 'z')) {
+ *val = (ch - 'a') + 10;
+ } else {
+ return false;
+ }
+ return true;
+}
+
+size_t hex_encode(char* buffer, size_t buflen,
+ const char* csource, size_t srclen) {
+ return hex_encode_with_delimiter(buffer, buflen, csource, srclen, 0);
+}
+
+size_t hex_encode_with_delimiter(char* buffer, size_t buflen,
+ const char* csource, size_t srclen,
+ char delimiter) {
+ ASSERT(NULL != buffer); // TODO: estimate output size
+ if (buflen == 0)
+ return 0;
+
+ // Init and check bounds.
+ const unsigned char* bsource =
+ reinterpret_cast<const unsigned char*>(csource);
+ size_t srcpos = 0, bufpos = 0;
+ size_t needed = delimiter ? (srclen * 3) : (srclen * 2 + 1);
+ if (buflen < needed)
+ return 0;
+
+ while (srcpos < srclen) {
+ unsigned char ch = bsource[srcpos++];
+ buffer[bufpos ] = hex_encode((ch >> 4) & 0xF);
+ buffer[bufpos+1] = hex_encode((ch ) & 0xF);
+ bufpos += 2;
+
+ // Don't write a delimiter after the last byte.
+ if (delimiter && (srcpos < srclen)) {
+ buffer[bufpos] = delimiter;
+ ++bufpos;
+ }
+ }
+
+ // Null terminate.
+ buffer[bufpos] = '\0';
+ return bufpos;
+}
+
+std::string hex_encode(const char* source, size_t srclen) {
+ return hex_encode_with_delimiter(source, srclen, 0);
+}
+
+std::string hex_encode_with_delimiter(const char* source, size_t srclen,
+ char delimiter) {
+ const size_t kBufferSize = srclen * 3;
+ char* buffer = STACK_ARRAY(char, kBufferSize);
+ size_t length = hex_encode_with_delimiter(buffer, kBufferSize,
+ source, srclen, delimiter);
+ ASSERT(srclen == 0 || length > 0);
+ return std::string(buffer, length);
+}
+
+size_t hex_decode(char * cbuffer, size_t buflen,
+ const char * source, size_t srclen) {
+ return hex_decode_with_delimiter(cbuffer, buflen, source, srclen, 0);
+}
+
+size_t hex_decode_with_delimiter(char* cbuffer, size_t buflen,
+ const char* source, size_t srclen,
+ char delimiter) {
+ ASSERT(NULL != cbuffer); // TODO: estimate output size
+ if (buflen == 0)
+ return 0;
+
+ // Init and bounds check.
+ unsigned char* bbuffer = reinterpret_cast<unsigned char*>(cbuffer);
+ size_t srcpos = 0, bufpos = 0;
+ size_t needed = (delimiter) ? (srclen + 1) / 3 : srclen / 2;
+ if (buflen < needed)
+ return 0;
+
+ while (srcpos < srclen) {
+ if ((srclen - srcpos) < 2) {
+ // This means we have an odd number of bytes.
+ return 0;
+ }
+
+ unsigned char h1, h2;
+ if (!hex_decode(source[srcpos], &h1) ||
+ !hex_decode(source[srcpos + 1], &h2))
+ return 0;
+
+ bbuffer[bufpos++] = (h1 << 4) | h2;
+ srcpos += 2;
+
+ // Remove the delimiter if needed.
+ if (delimiter && (srclen - srcpos) > 1) {
+ if (source[srcpos] != delimiter)
+ return 0;
+ ++srcpos;
+ }
+ }
+
+ return bufpos;
+}
+
+size_t hex_decode(char* buffer, size_t buflen, const std::string& source) {
+ return hex_decode_with_delimiter(buffer, buflen, source, 0);
+}
+size_t hex_decode_with_delimiter(char* buffer, size_t buflen,
+ const std::string& source, char delimiter) {
+ return hex_decode_with_delimiter(buffer, buflen,
+ source.c_str(), source.length(), delimiter);
+}
+
+size_t transform(std::string& value, size_t maxlen, const std::string& source,
+ Transform t) {
+ char* buffer = STACK_ARRAY(char, maxlen + 1);
+ size_t length = t(buffer, maxlen + 1, source.data(), source.length());
+ value.assign(buffer, length);
+ return length;
+}
+
+std::string s_transform(const std::string& source, Transform t) {
+ // Ask transformation function to approximate the destination size (returns upper bound)
+ size_t maxlen = t(NULL, 0, source.data(), source.length());
+ char * buffer = STACK_ARRAY(char, maxlen);
+ size_t len = t(buffer, maxlen, source.data(), source.length());
+ std::string result(buffer, len);
+ return result;
+}
+
+size_t tokenize(const std::string& source, char delimiter,
+ std::vector<std::string>* fields) {
+ ASSERT(NULL != fields);
+ fields->clear();
+ size_t last = 0;
+ for (size_t i = 0; i < source.length(); ++i) {
+ if (source[i] == delimiter) {
+ if (i != last) {
+ fields->push_back(source.substr(last, i - last));
+ }
+ last = i + 1;
+ }
+ }
+ if (last != source.length()) {
+ fields->push_back(source.substr(last, source.length() - last));
+ }
+ return fields->size();
+}
+
+size_t tokenize_append(const std::string& source, char delimiter,
+ std::vector<std::string>* fields) {
+ if (!fields) return 0;
+
+ std::vector<std::string> new_fields;
+ tokenize(source, delimiter, &new_fields);
+ fields->insert(fields->end(), new_fields.begin(), new_fields.end());
+ return fields->size();
+}
+
+size_t tokenize(const std::string& source, char delimiter, char start_mark,
+ char end_mark, std::vector<std::string>* fields) {
+ if (!fields) return 0;
+ fields->clear();
+
+ std::string remain_source = source;
+ while (!remain_source.empty()) {
+ size_t start_pos = remain_source.find(start_mark);
+ if (std::string::npos == start_pos) break;
+ std::string pre_mark;
+ if (start_pos > 0) {
+ pre_mark = remain_source.substr(0, start_pos - 1);
+ }
+
+ ++start_pos;
+ size_t end_pos = remain_source.find(end_mark, start_pos);
+ if (std::string::npos == end_pos) break;
+
+ // We have found the matching marks. First tokenize the pre-mask. Then add
+ // the marked part as a single field. Finally, loop back for the post-mark.
+ tokenize_append(pre_mark, delimiter, fields);
+ fields->push_back(remain_source.substr(start_pos, end_pos - start_pos));
+ remain_source = remain_source.substr(end_pos + 1);
+ }
+
+ return tokenize_append(remain_source, delimiter, fields);
+}
+
+size_t split(const std::string& source, char delimiter,
+ std::vector<std::string>* fields) {
+ ASSERT(NULL != fields);
+ fields->clear();
+ size_t last = 0;
+ for (size_t i = 0; i < source.length(); ++i) {
+ if (source[i] == delimiter) {
+ fields->push_back(source.substr(last, i - last));
+ last = i + 1;
+ }
+ }
+ fields->push_back(source.substr(last, source.length() - last));
+ return fields->size();
+}
+
+char make_char_safe_for_filename(char c) {
+ if (c < 32)
+ return '_';
+
+ switch (c) {
+ case '<':
+ case '>':
+ case ':':
+ case '"':
+ case '/':
+ case '\\':
+ case '|':
+ case '*':
+ case '?':
+ return '_';
+
+ default:
+ return c;
+ }
+}
+
+/*
+void sprintf(std::string& value, size_t maxlen, const char * format, ...) {
+ char * buffer = STACK_ARRAY(char, maxlen + 1);
+ va_list args;
+ va_start(args, format);
+ value.assign(buffer, vsprintfn(buffer, maxlen + 1, format, args));
+ va_end(args);
+}
+*/
+
+/////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/stringencode.h b/chromium/third_party/webrtc/base/stringencode.h
new file mode 100644
index 00000000000..b6c666f98a7
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringencode.h
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_STRINGENCODE_H_
+#define WEBRTC_BASE_STRINGENCODE_H_
+
+#include <string>
+#include <sstream>
+#include <vector>
+
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// String Encoding Utilities
+//////////////////////////////////////////////////////////////////////
+
+// Convert an unsigned value to it's utf8 representation. Returns the length
+// of the encoded string, or 0 if the encoding is longer than buflen - 1.
+size_t utf8_encode(char* buffer, size_t buflen, unsigned long value);
+// Decode the utf8 encoded value pointed to by source. Returns the number of
+// bytes used by the encoding, or 0 if the encoding is invalid.
+size_t utf8_decode(const char* source, size_t srclen, unsigned long* value);
+
+// Escaping prefixes illegal characters with the escape character. Compact, but
+// illegal characters still appear in the string.
+size_t escape(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ const char * illegal, char escape);
+// Note: in-place unescaping (buffer == source) is allowed.
+size_t unescape(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ char escape);
+
+// Encoding replaces illegal characters with the escape character and 2 hex
+// chars, so it's a little less compact than escape, but completely removes
+// illegal characters. note that hex digits should not be used as illegal
+// characters.
+size_t encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ const char * illegal, char escape);
+// Note: in-place decoding (buffer == source) is allowed.
+size_t decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen,
+ char escape);
+
+// Returns a list of characters that may be unsafe for use in the name of a
+// file, suitable for passing to the 'illegal' member of escape or encode.
+const char* unsafe_filename_characters();
+
+// url_encode is an encode operation with a predefined set of illegal characters
+// and escape character (for use in URLs, obviously).
+size_t url_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+// Note: in-place decoding (buffer == source) is allowed.
+size_t url_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+
+// html_encode prevents data embedded in html from containing markup.
+size_t html_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+// Note: in-place decoding (buffer == source) is allowed.
+size_t html_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+
+// xml_encode makes data suitable for inside xml attributes and values.
+size_t xml_encode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+// Note: in-place decoding (buffer == source) is allowed.
+size_t xml_decode(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+
+// Convert an unsigned value from 0 to 15 to the hex character equivalent...
+char hex_encode(unsigned char val);
+// ...and vice-versa.
+bool hex_decode(char ch, unsigned char* val);
+
+// hex_encode shows the hex representation of binary data in ascii.
+size_t hex_encode(char* buffer, size_t buflen,
+ const char* source, size_t srclen);
+
+// hex_encode, but separate each byte representation with a delimiter.
+// |delimiter| == 0 means no delimiter
+// If the buffer is too short, we return 0
+size_t hex_encode_with_delimiter(char* buffer, size_t buflen,
+ const char* source, size_t srclen,
+ char delimiter);
+
+// Helper functions for hex_encode.
+std::string hex_encode(const char* source, size_t srclen);
+std::string hex_encode_with_delimiter(const char* source, size_t srclen,
+ char delimiter);
+
+// hex_decode converts ascii hex to binary.
+size_t hex_decode(char* buffer, size_t buflen,
+ const char* source, size_t srclen);
+
+// hex_decode, assuming that there is a delimiter between every byte
+// pair.
+// |delimiter| == 0 means no delimiter
+// If the buffer is too short or the data is invalid, we return 0.
+size_t hex_decode_with_delimiter(char* buffer, size_t buflen,
+ const char* source, size_t srclen,
+ char delimiter);
+
+// Helper functions for hex_decode.
+size_t hex_decode(char* buffer, size_t buflen, const std::string& source);
+size_t hex_decode_with_delimiter(char* buffer, size_t buflen,
+ const std::string& source, char delimiter);
+
+// Apply any suitable string transform (including the ones above) to an STL
+// string. Stack-allocated temporary space is used for the transformation,
+// so value and source may refer to the same string.
+typedef size_t (*Transform)(char * buffer, size_t buflen,
+ const char * source, size_t srclen);
+size_t transform(std::string& value, size_t maxlen, const std::string& source,
+ Transform t);
+
+// Return the result of applying transform t to source.
+std::string s_transform(const std::string& source, Transform t);
+
+// Convenience wrappers.
+inline std::string s_url_encode(const std::string& source) {
+ return s_transform(source, url_encode);
+}
+inline std::string s_url_decode(const std::string& source) {
+ return s_transform(source, url_decode);
+}
+
+// Splits the source string into multiple fields separated by delimiter,
+// with duplicates of delimiter creating empty fields.
+size_t split(const std::string& source, char delimiter,
+ std::vector<std::string>* fields);
+
+// Splits the source string into multiple fields separated by delimiter,
+// with duplicates of delimiter ignored. Trailing delimiter ignored.
+size_t tokenize(const std::string& source, char delimiter,
+ std::vector<std::string>* fields);
+
+// Tokenize and append the tokens to fields. Return the new size of fields.
+size_t tokenize_append(const std::string& source, char delimiter,
+ std::vector<std::string>* fields);
+
+// Splits the source string into multiple fields separated by delimiter, with
+// duplicates of delimiter ignored. Trailing delimiter ignored. A substring in
+// between the start_mark and the end_mark is treated as a single field. Return
+// the size of fields. For example, if source is "filename
+// \"/Library/Application Support/media content.txt\"", delimiter is ' ', and
+// the start_mark and end_mark are '"', this method returns two fields:
+// "filename" and "/Library/Application Support/media content.txt".
+size_t tokenize(const std::string& source, char delimiter, char start_mark,
+ char end_mark, std::vector<std::string>* fields);
+
+// Safe sprintf to std::string
+//void sprintf(std::string& value, size_t maxlen, const char * format, ...)
+// PRINTF_FORMAT(3);
+
+// Convert arbitrary values to/from a string.
+
+template <class T>
+static bool ToString(const T &t, std::string* s) {
+ ASSERT(NULL != s);
+ std::ostringstream oss;
+ oss << std::boolalpha << t;
+ *s = oss.str();
+ return !oss.fail();
+}
+
+template <class T>
+static bool FromString(const std::string& s, T* t) {
+ ASSERT(NULL != t);
+ std::istringstream iss(s);
+ iss >> std::boolalpha >> *t;
+ return !iss.fail();
+}
+
+// Inline versions of the string conversion routines.
+
+template<typename T>
+static inline std::string ToString(const T& val) {
+ std::string str; ToString(val, &str); return str;
+}
+
+template<typename T>
+static inline T FromString(const std::string& str) {
+ T val; FromString(str, &val); return val;
+}
+
+template<typename T>
+static inline T FromString(const T& defaultValue, const std::string& str) {
+ T val(defaultValue); FromString(str, &val); return val;
+}
+
+// simple function to strip out characters which shouldn't be
+// used in filenames
+char make_char_safe_for_filename(char c);
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_STRINGENCODE_H__
diff --git a/chromium/third_party/webrtc/base/stringencode_unittest.cc b/chromium/third_party/webrtc/base/stringencode_unittest.cc
new file mode 100644
index 00000000000..c9e726ecb5b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringencode_unittest.cc
@@ -0,0 +1,385 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+namespace rtc {
+
+TEST(Utf8EncodeTest, EncodeDecode) {
+ const struct Utf8Test {
+ const char* encoded;
+ size_t encsize, enclen;
+ unsigned long decoded;
+ } kTests[] = {
+ { "a ", 5, 1, 'a' },
+ { "\x7F ", 5, 1, 0x7F },
+ { "\xC2\x80 ", 5, 2, 0x80 },
+ { "\xDF\xBF ", 5, 2, 0x7FF },
+ { "\xE0\xA0\x80 ", 5, 3, 0x800 },
+ { "\xEF\xBF\xBF ", 5, 3, 0xFFFF },
+ { "\xF0\x90\x80\x80 ", 5, 4, 0x10000 },
+ { "\xF0\x90\x80\x80 ", 3, 0, 0x10000 },
+ { "\xF0\xF0\x80\x80 ", 5, 0, 0 },
+ { "\xF0\x90\x80 ", 5, 0, 0 },
+ { "\x90\x80\x80 ", 5, 0, 0 },
+ { NULL, 0, 0 },
+ };
+ for (size_t i = 0; kTests[i].encoded; ++i) {
+ unsigned long val = 0;
+ ASSERT_EQ(kTests[i].enclen, utf8_decode(kTests[i].encoded,
+ kTests[i].encsize,
+ &val));
+ unsigned long result = (kTests[i].enclen == 0) ? 0 : kTests[i].decoded;
+ ASSERT_EQ(result, val);
+
+ if (kTests[i].decoded == 0) {
+ // Not an interesting encoding test case
+ continue;
+ }
+
+ char buffer[5];
+ memset(buffer, 0x01, ARRAY_SIZE(buffer));
+ ASSERT_EQ(kTests[i].enclen, utf8_encode(buffer,
+ kTests[i].encsize,
+ kTests[i].decoded));
+ ASSERT_TRUE(memcmp(buffer, kTests[i].encoded, kTests[i].enclen) == 0);
+ // Make sure remainder of buffer is unchanged
+ ASSERT_TRUE(memory_check(buffer + kTests[i].enclen,
+ 0x1,
+ ARRAY_SIZE(buffer) - kTests[i].enclen));
+ }
+}
+
+class HexEncodeTest : public testing::Test {
+ public:
+ HexEncodeTest() : enc_res_(0), dec_res_(0) {
+ for (size_t i = 0; i < sizeof(data_); ++i) {
+ data_[i] = (i + 128) & 0xff;
+ }
+ memset(decoded_, 0x7f, sizeof(decoded_));
+ }
+
+ char data_[10];
+ char encoded_[31];
+ char decoded_[11];
+ size_t enc_res_;
+ size_t dec_res_;
+};
+
+// Test that we can convert to/from hex with no delimiter.
+TEST_F(HexEncodeTest, TestWithNoDelimiter) {
+ enc_res_ = hex_encode(encoded_, sizeof(encoded_), data_, sizeof(data_));
+ ASSERT_EQ(sizeof(data_) * 2, enc_res_);
+ ASSERT_STREQ("80818283848586878889", encoded_);
+ dec_res_ = hex_decode(decoded_, sizeof(decoded_), encoded_, enc_res_);
+ ASSERT_EQ(sizeof(data_), dec_res_);
+ ASSERT_EQ(0, memcmp(data_, decoded_, dec_res_));
+}
+
+// Test that we can convert to/from hex with a colon delimiter.
+TEST_F(HexEncodeTest, TestWithDelimiter) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(encoded_),
+ data_, sizeof(data_), ':');
+ ASSERT_EQ(sizeof(data_) * 3 - 1, enc_res_);
+ ASSERT_STREQ("80:81:82:83:84:85:86:87:88:89", encoded_);
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_),
+ encoded_, enc_res_, ':');
+ ASSERT_EQ(sizeof(data_), dec_res_);
+ ASSERT_EQ(0, memcmp(data_, decoded_, dec_res_));
+}
+
+// Test that encoding with one delimiter and decoding with another fails.
+TEST_F(HexEncodeTest, TestWithWrongDelimiter) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(encoded_),
+ data_, sizeof(data_), ':');
+ ASSERT_EQ(sizeof(data_) * 3 - 1, enc_res_);
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_),
+ encoded_, enc_res_, '/');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that encoding without a delimiter and decoding with one fails.
+TEST_F(HexEncodeTest, TestExpectedDelimiter) {
+ enc_res_ = hex_encode(encoded_, sizeof(encoded_), data_, sizeof(data_));
+ ASSERT_EQ(sizeof(data_) * 2, enc_res_);
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_),
+ encoded_, enc_res_, ':');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that encoding with a delimiter and decoding without one fails.
+TEST_F(HexEncodeTest, TestExpectedNoDelimiter) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(encoded_),
+ data_, sizeof(data_), ':');
+ ASSERT_EQ(sizeof(data_) * 3 - 1, enc_res_);
+ dec_res_ = hex_decode(decoded_, sizeof(decoded_), encoded_, enc_res_);
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that we handle a zero-length buffer with no delimiter.
+TEST_F(HexEncodeTest, TestZeroLengthNoDelimiter) {
+ enc_res_ = hex_encode(encoded_, sizeof(encoded_), "", 0);
+ ASSERT_EQ(0U, enc_res_);
+ dec_res_ = hex_decode(decoded_, sizeof(decoded_), encoded_, enc_res_);
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that we handle a zero-length buffer with a delimiter.
+TEST_F(HexEncodeTest, TestZeroLengthWithDelimiter) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(encoded_), "", 0, ':');
+ ASSERT_EQ(0U, enc_res_);
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_),
+ encoded_, enc_res_, ':');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test the std::string variants that take no delimiter.
+TEST_F(HexEncodeTest, TestHelpersNoDelimiter) {
+ std::string result = hex_encode(data_, sizeof(data_));
+ ASSERT_EQ("80818283848586878889", result);
+ dec_res_ = hex_decode(decoded_, sizeof(decoded_), result);
+ ASSERT_EQ(sizeof(data_), dec_res_);
+ ASSERT_EQ(0, memcmp(data_, decoded_, dec_res_));
+}
+
+// Test the std::string variants that use a delimiter.
+TEST_F(HexEncodeTest, TestHelpersWithDelimiter) {
+ std::string result = hex_encode_with_delimiter(data_, sizeof(data_), ':');
+ ASSERT_EQ("80:81:82:83:84:85:86:87:88:89", result);
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_), result, ':');
+ ASSERT_EQ(sizeof(data_), dec_res_);
+ ASSERT_EQ(0, memcmp(data_, decoded_, dec_res_));
+}
+
+// Test that encoding into a too-small output buffer (without delimiter) fails.
+TEST_F(HexEncodeTest, TestEncodeTooShort) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(data_) * 2,
+ data_, sizeof(data_), 0);
+ ASSERT_EQ(0U, enc_res_);
+}
+
+// Test that encoding into a too-small output buffer (with delimiter) fails.
+TEST_F(HexEncodeTest, TestEncodeWithDelimiterTooShort) {
+ enc_res_ = hex_encode_with_delimiter(encoded_, sizeof(data_) * 3 - 1,
+ data_, sizeof(data_), ':');
+ ASSERT_EQ(0U, enc_res_);
+}
+
+// Test that decoding into a too-small output buffer fails.
+TEST_F(HexEncodeTest, TestDecodeTooShort) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, 4, "0123456789", 10, 0);
+ ASSERT_EQ(0U, dec_res_);
+ ASSERT_EQ(0x7f, decoded_[4]);
+}
+
+// Test that decoding non-hex data fails.
+TEST_F(HexEncodeTest, TestDecodeBogusData) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_), "xyz", 3, 0);
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that decoding an odd number of hex characters fails.
+TEST_F(HexEncodeTest, TestDecodeOddHexDigits) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, sizeof(decoded_), "012", 3, 0);
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that decoding a string with too many delimiters fails.
+TEST_F(HexEncodeTest, TestDecodeWithDelimiterTooManyDelimiters) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, 4, "01::23::45::67", 14, ':');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that decoding a string with a leading delimiter fails.
+TEST_F(HexEncodeTest, TestDecodeWithDelimiterLeadingDelimiter) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, 4, ":01:23:45:67", 12, ':');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Test that decoding a string with a trailing delimiter fails.
+TEST_F(HexEncodeTest, TestDecodeWithDelimiterTrailingDelimiter) {
+ dec_res_ = hex_decode_with_delimiter(decoded_, 4, "01:23:45:67:", 12, ':');
+ ASSERT_EQ(0U, dec_res_);
+}
+
+// Tests counting substrings.
+TEST(TokenizeTest, CountSubstrings) {
+ std::vector<std::string> fields;
+
+ EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields));
+ fields.clear();
+ EXPECT_EQ(1ul, tokenize("one", ' ', &fields));
+
+ // Extra spaces should be ignored.
+ fields.clear();
+ EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields));
+ fields.clear();
+ EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields));
+ fields.clear();
+ EXPECT_EQ(0ul, tokenize(" ", ' ', &fields));
+}
+
+// Tests comparing substrings.
+TEST(TokenizeTest, CompareSubstrings) {
+ std::vector<std::string> fields;
+
+ tokenize("find middle one", ' ', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("middle", fields.at(1).c_str());
+ fields.clear();
+
+ // Extra spaces should be ignored.
+ tokenize(" find middle one ", ' ', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("middle", fields.at(1).c_str());
+ fields.clear();
+ tokenize(" ", ' ', &fields);
+ ASSERT_EQ(0ul, fields.size());
+}
+
+TEST(TokenizeTest, TokenizeAppend) {
+ ASSERT_EQ(0ul, tokenize_append("A B C", ' ', NULL));
+
+ std::vector<std::string> fields;
+
+ tokenize_append("A B C", ' ', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("B", fields.at(1).c_str());
+
+ tokenize_append("D E", ' ', &fields);
+ ASSERT_EQ(5ul, fields.size());
+ ASSERT_STREQ("B", fields.at(1).c_str());
+ ASSERT_STREQ("E", fields.at(4).c_str());
+}
+
+TEST(TokenizeTest, TokenizeWithMarks) {
+ ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL));
+
+ std::vector<std::string> fields;
+ tokenize("A B C", ' ', '"', '"', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("C", fields.at(2).c_str());
+
+ tokenize("\"A B\" C", ' ', '"', '"', &fields);
+ ASSERT_EQ(2ul, fields.size());
+ ASSERT_STREQ("A B", fields.at(0).c_str());
+
+ tokenize("D \"A B\" C", ' ', '"', '"', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("D", fields.at(0).c_str());
+ ASSERT_STREQ("A B", fields.at(1).c_str());
+
+ tokenize("D \"A B\" C \"E F\"", ' ', '"', '"', &fields);
+ ASSERT_EQ(4ul, fields.size());
+ ASSERT_STREQ("D", fields.at(0).c_str());
+ ASSERT_STREQ("A B", fields.at(1).c_str());
+ ASSERT_STREQ("E F", fields.at(3).c_str());
+
+ // No matching marks.
+ tokenize("D \"A B", ' ', '"', '"', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("D", fields.at(0).c_str());
+ ASSERT_STREQ("\"A", fields.at(1).c_str());
+
+ tokenize("D (A B) C (E F) G", ' ', '(', ')', &fields);
+ ASSERT_EQ(5ul, fields.size());
+ ASSERT_STREQ("D", fields.at(0).c_str());
+ ASSERT_STREQ("A B", fields.at(1).c_str());
+ ASSERT_STREQ("E F", fields.at(3).c_str());
+}
+
+// Tests counting substrings.
+TEST(SplitTest, CountSubstrings) {
+ std::vector<std::string> fields;
+
+ EXPECT_EQ(5ul, split("one,two,three,four,five", ',', &fields));
+ fields.clear();
+ EXPECT_EQ(1ul, split("one", ',', &fields));
+
+ // Empty fields between commas count.
+ fields.clear();
+ EXPECT_EQ(5ul, split("one,,three,four,five", ',', &fields));
+ fields.clear();
+ EXPECT_EQ(3ul, split(",three,", ',', &fields));
+ fields.clear();
+ EXPECT_EQ(1ul, split("", ',', &fields));
+}
+
+// Tests comparing substrings.
+TEST(SplitTest, CompareSubstrings) {
+ std::vector<std::string> fields;
+
+ split("find,middle,one", ',', &fields);
+ ASSERT_EQ(3ul, fields.size());
+ ASSERT_STREQ("middle", fields.at(1).c_str());
+ fields.clear();
+
+ // Empty fields between commas count.
+ split("find,,middle,one", ',', &fields);
+ ASSERT_EQ(4ul, fields.size());
+ ASSERT_STREQ("middle", fields.at(2).c_str());
+ fields.clear();
+ split("", ',', &fields);
+ ASSERT_EQ(1ul, fields.size());
+ ASSERT_STREQ("", fields.at(0).c_str());
+}
+
+TEST(BoolTest, DecodeValid) {
+ bool value;
+ EXPECT_TRUE(FromString("true", &value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(FromString("true,", &value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(FromString("true , true", &value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(FromString("true ,\n false", &value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(FromString(" true \n", &value));
+ EXPECT_TRUE(value);
+
+ EXPECT_TRUE(FromString("false", &value));
+ EXPECT_FALSE(value);
+ EXPECT_TRUE(FromString(" false ", &value));
+ EXPECT_FALSE(value);
+ EXPECT_TRUE(FromString(" false, ", &value));
+ EXPECT_FALSE(value);
+
+ EXPECT_TRUE(FromString<bool>("true\n"));
+ EXPECT_FALSE(FromString<bool>("false\n"));
+}
+
+TEST(BoolTest, DecodeInvalid) {
+ bool value;
+ EXPECT_FALSE(FromString("True", &value));
+ EXPECT_FALSE(FromString("TRUE", &value));
+ EXPECT_FALSE(FromString("False", &value));
+ EXPECT_FALSE(FromString("FALSE", &value));
+ EXPECT_FALSE(FromString("0", &value));
+ EXPECT_FALSE(FromString("1", &value));
+ EXPECT_FALSE(FromString("0,", &value));
+ EXPECT_FALSE(FromString("1,", &value));
+ EXPECT_FALSE(FromString("1,0", &value));
+ EXPECT_FALSE(FromString("1.", &value));
+ EXPECT_FALSE(FromString("1.0", &value));
+ EXPECT_FALSE(FromString("", &value));
+ EXPECT_FALSE(FromString<bool>("false\nfalse"));
+}
+
+TEST(BoolTest, RoundTrip) {
+ bool value;
+ EXPECT_TRUE(FromString(ToString(true), &value));
+ EXPECT_TRUE(value);
+ EXPECT_TRUE(FromString(ToString(false), &value));
+ EXPECT_FALSE(value);
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/stringutils.cc b/chromium/third_party/webrtc/base/stringutils.cc
new file mode 100644
index 00000000000..041708d3d53
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringutils.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+bool memory_check(const void* memory, int c, size_t count) {
+ const char* char_memory = static_cast<const char*>(memory);
+ char char_c = static_cast<char>(c);
+ for (size_t i = 0; i < count; ++i) {
+ if (char_memory[i] != char_c) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool string_match(const char* target, const char* pattern) {
+ while (*pattern) {
+ if (*pattern == '*') {
+ if (!*++pattern) {
+ return true;
+ }
+ while (*target) {
+ if ((toupper(*pattern) == toupper(*target))
+ && string_match(target + 1, pattern + 1)) {
+ return true;
+ }
+ ++target;
+ }
+ return false;
+ } else {
+ if (toupper(*pattern) != toupper(*target)) {
+ return false;
+ }
+ ++target;
+ ++pattern;
+ }
+ }
+ return !*target;
+}
+
+#if defined(WEBRTC_WIN)
+int ascii_string_compare(const wchar_t* s1, const char* s2, size_t n,
+ CharacterTransformation transformation) {
+ wchar_t c1, c2;
+ while (true) {
+ if (n-- == 0) return 0;
+ c1 = transformation(*s1);
+ // Double check that characters are not UTF-8
+ ASSERT(static_cast<unsigned char>(*s2) < 128);
+ // Note: *s2 gets implicitly promoted to wchar_t
+ c2 = transformation(*s2);
+ if (c1 != c2) return (c1 < c2) ? -1 : 1;
+ if (!c1) return 0;
+ ++s1;
+ ++s2;
+ }
+}
+
+size_t asccpyn(wchar_t* buffer, size_t buflen,
+ const char* source, size_t srclen) {
+ if (buflen <= 0)
+ return 0;
+
+ if (srclen == SIZE_UNKNOWN) {
+ srclen = strlenn(source, buflen - 1);
+ } else if (srclen >= buflen) {
+ srclen = buflen - 1;
+ }
+#if _DEBUG
+ // Double check that characters are not UTF-8
+ for (size_t pos = 0; pos < srclen; ++pos)
+ ASSERT(static_cast<unsigned char>(source[pos]) < 128);
+#endif // _DEBUG
+ std::copy(source, source + srclen, buffer);
+ buffer[srclen] = 0;
+ return srclen;
+}
+
+#endif // WEBRTC_WIN
+
+void replace_substrs(const char *search,
+ size_t search_len,
+ const char *replace,
+ size_t replace_len,
+ std::string *s) {
+ size_t pos = 0;
+ while ((pos = s->find(search, pos, search_len)) != std::string::npos) {
+ s->replace(pos, search_len, replace, replace_len);
+ pos += replace_len;
+ }
+}
+
+bool starts_with(const char *s1, const char *s2) {
+ return strncmp(s1, s2, strlen(s2)) == 0;
+}
+
+bool ends_with(const char *s1, const char *s2) {
+ size_t s1_length = strlen(s1);
+ size_t s2_length = strlen(s2);
+
+ if (s2_length > s1_length) {
+ return false;
+ }
+
+ const char* start = s1 + (s1_length - s2_length);
+ return strncmp(start, s2, s2_length) == 0;
+}
+
+static const char kWhitespace[] = " \n\r\t";
+
+std::string string_trim(const std::string& s) {
+ std::string::size_type first = s.find_first_not_of(kWhitespace);
+ std::string::size_type last = s.find_last_not_of(kWhitespace);
+
+ if (first == std::string::npos || last == std::string::npos) {
+ return std::string("");
+ }
+
+ return s.substr(first, last - first + 1);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/stringutils.h b/chromium/third_party/webrtc/base/stringutils.h
new file mode 100644
index 00000000000..25990e0af46
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringutils.h
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_STRINGUTILS_H__
+#define WEBRTC_BASE_STRINGUTILS_H__
+
+#include <ctype.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <string.h>
+
+#if defined(WEBRTC_WIN)
+#include <malloc.h>
+#include <wchar.h>
+#define alloca _alloca
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_POSIX)
+#ifdef BSD
+#include <stdlib.h>
+#else // BSD
+#include <alloca.h>
+#endif // !BSD
+#endif // WEBRTC_POSIX
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+
+///////////////////////////////////////////////////////////////////////////////
+// Generic string/memory utilities
+///////////////////////////////////////////////////////////////////////////////
+
+#define STACK_ARRAY(TYPE, LEN) static_cast<TYPE*>(::alloca((LEN)*sizeof(TYPE)))
+
+namespace rtc {
+
+// Complement to memset. Verifies memory consists of count bytes of value c.
+bool memory_check(const void* memory, int c, size_t count);
+
+// Determines whether the simple wildcard pattern matches target.
+// Alpha characters in pattern match case-insensitively.
+// Asterisks in pattern match 0 or more characters.
+// Ex: string_match("www.TEST.GOOGLE.COM", "www.*.com") -> true
+bool string_match(const char* target, const char* pattern);
+
+} // namespace rtc
+
+///////////////////////////////////////////////////////////////////////////////
+// Rename a bunch of common string functions so they are consistent across
+// platforms and between char and wchar_t variants.
+// Here is the full list of functions that are unified:
+// strlen, strcmp, stricmp, strncmp, strnicmp
+// strchr, vsnprintf, strtoul, tolowercase
+// tolowercase is like tolower, but not compatible with end-of-file value
+//
+// It's not clear if we will ever use wchar_t strings on unix. In theory,
+// all strings should be Utf8 all the time, except when interfacing with Win32
+// APIs that require Utf16.
+///////////////////////////////////////////////////////////////////////////////
+
+inline char tolowercase(char c) {
+ return static_cast<char>(tolower(c));
+}
+
+#if defined(WEBRTC_WIN)
+
+inline size_t strlen(const wchar_t* s) {
+ return wcslen(s);
+}
+inline int strcmp(const wchar_t* s1, const wchar_t* s2) {
+ return wcscmp(s1, s2);
+}
+inline int stricmp(const wchar_t* s1, const wchar_t* s2) {
+ return _wcsicmp(s1, s2);
+}
+inline int strncmp(const wchar_t* s1, const wchar_t* s2, size_t n) {
+ return wcsncmp(s1, s2, n);
+}
+inline int strnicmp(const wchar_t* s1, const wchar_t* s2, size_t n) {
+ return _wcsnicmp(s1, s2, n);
+}
+inline const wchar_t* strchr(const wchar_t* s, wchar_t c) {
+ return wcschr(s, c);
+}
+inline const wchar_t* strstr(const wchar_t* haystack, const wchar_t* needle) {
+ return wcsstr(haystack, needle);
+}
+#ifndef vsnprintf
+inline int vsnprintf(wchar_t* buf, size_t n, const wchar_t* fmt, va_list args) {
+ return _vsnwprintf(buf, n, fmt, args);
+}
+#endif // !vsnprintf
+inline unsigned long strtoul(const wchar_t* snum, wchar_t** end, int base) {
+ return wcstoul(snum, end, base);
+}
+inline wchar_t tolowercase(wchar_t c) {
+ return static_cast<wchar_t>(towlower(c));
+}
+
+#endif // WEBRTC_WIN
+
+#if defined(WEBRTC_POSIX)
+
+inline int _stricmp(const char* s1, const char* s2) {
+ return strcasecmp(s1, s2);
+}
+inline int _strnicmp(const char* s1, const char* s2, size_t n) {
+ return strncasecmp(s1, s2, n);
+}
+
+#endif // WEBRTC_POSIX
+
+///////////////////////////////////////////////////////////////////////////////
+// Traits simplifies porting string functions to be CTYPE-agnostic
+///////////////////////////////////////////////////////////////////////////////
+
+namespace rtc {
+
+const size_t SIZE_UNKNOWN = static_cast<size_t>(-1);
+
+template<class CTYPE>
+struct Traits {
+ // STL string type
+ //typedef XXX string;
+ // Null-terminated string
+ //inline static const CTYPE* empty_str();
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// String utilities which work with char or wchar_t
+///////////////////////////////////////////////////////////////////////////////
+
+template<class CTYPE>
+inline const CTYPE* nonnull(const CTYPE* str, const CTYPE* def_str = NULL) {
+ return str ? str : (def_str ? def_str : Traits<CTYPE>::empty_str());
+}
+
+template<class CTYPE>
+const CTYPE* strchr(const CTYPE* str, const CTYPE* chs) {
+ for (size_t i=0; str[i]; ++i) {
+ for (size_t j=0; chs[j]; ++j) {
+ if (str[i] == chs[j]) {
+ return str + i;
+ }
+ }
+ }
+ return 0;
+}
+
+template<class CTYPE>
+const CTYPE* strchrn(const CTYPE* str, size_t slen, CTYPE ch) {
+ for (size_t i=0; i<slen && str[i]; ++i) {
+ if (str[i] == ch) {
+ return str + i;
+ }
+ }
+ return 0;
+}
+
+template<class CTYPE>
+size_t strlenn(const CTYPE* buffer, size_t buflen) {
+ size_t bufpos = 0;
+ while (buffer[bufpos] && (bufpos < buflen)) {
+ ++bufpos;
+ }
+ return bufpos;
+}
+
+// Safe versions of strncpy, strncat, snprintf and vsnprintf that always
+// null-terminate.
+
+template<class CTYPE>
+size_t strcpyn(CTYPE* buffer, size_t buflen,
+ const CTYPE* source, size_t srclen = SIZE_UNKNOWN) {
+ if (buflen <= 0)
+ return 0;
+
+ if (srclen == SIZE_UNKNOWN) {
+ srclen = strlenn(source, buflen - 1);
+ } else if (srclen >= buflen) {
+ srclen = buflen - 1;
+ }
+ memcpy(buffer, source, srclen * sizeof(CTYPE));
+ buffer[srclen] = 0;
+ return srclen;
+}
+
+template<class CTYPE>
+size_t strcatn(CTYPE* buffer, size_t buflen,
+ const CTYPE* source, size_t srclen = SIZE_UNKNOWN) {
+ if (buflen <= 0)
+ return 0;
+
+ size_t bufpos = strlenn(buffer, buflen - 1);
+ return bufpos + strcpyn(buffer + bufpos, buflen - bufpos, source, srclen);
+}
+
+// Some compilers (clang specifically) require vsprintfn be defined before
+// sprintfn.
+template<class CTYPE>
+size_t vsprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format,
+ va_list args) {
+ int len = vsnprintf(buffer, buflen, format, args);
+ if ((len < 0) || (static_cast<size_t>(len) >= buflen)) {
+ len = static_cast<int>(buflen - 1);
+ buffer[len] = 0;
+ }
+ return len;
+}
+
+template<class CTYPE>
+size_t sprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format, ...);
+template<class CTYPE>
+size_t sprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format, ...) {
+ va_list args;
+ va_start(args, format);
+ size_t len = vsprintfn(buffer, buflen, format, args);
+ va_end(args);
+ return len;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// Allow safe comparing and copying ascii (not UTF-8) with both wide and
+// non-wide character strings.
+///////////////////////////////////////////////////////////////////////////////
+
+inline int asccmp(const char* s1, const char* s2) {
+ return strcmp(s1, s2);
+}
+inline int ascicmp(const char* s1, const char* s2) {
+ return _stricmp(s1, s2);
+}
+inline int ascncmp(const char* s1, const char* s2, size_t n) {
+ return strncmp(s1, s2, n);
+}
+inline int ascnicmp(const char* s1, const char* s2, size_t n) {
+ return _strnicmp(s1, s2, n);
+}
+inline size_t asccpyn(char* buffer, size_t buflen,
+ const char* source, size_t srclen = SIZE_UNKNOWN) {
+ return strcpyn(buffer, buflen, source, srclen);
+}
+
+#if defined(WEBRTC_WIN)
+
+typedef wchar_t(*CharacterTransformation)(wchar_t);
+inline wchar_t identity(wchar_t c) { return c; }
+int ascii_string_compare(const wchar_t* s1, const char* s2, size_t n,
+ CharacterTransformation transformation);
+
+inline int asccmp(const wchar_t* s1, const char* s2) {
+ return ascii_string_compare(s1, s2, static_cast<size_t>(-1), identity);
+}
+inline int ascicmp(const wchar_t* s1, const char* s2) {
+ return ascii_string_compare(s1, s2, static_cast<size_t>(-1), tolowercase);
+}
+inline int ascncmp(const wchar_t* s1, const char* s2, size_t n) {
+ return ascii_string_compare(s1, s2, n, identity);
+}
+inline int ascnicmp(const wchar_t* s1, const char* s2, size_t n) {
+ return ascii_string_compare(s1, s2, n, tolowercase);
+}
+size_t asccpyn(wchar_t* buffer, size_t buflen,
+ const char* source, size_t srclen = SIZE_UNKNOWN);
+
+#endif // WEBRTC_WIN
+
+///////////////////////////////////////////////////////////////////////////////
+// Traits<char> specializations
+///////////////////////////////////////////////////////////////////////////////
+
+template<>
+struct Traits<char> {
+ typedef std::string string;
+ inline static const char* empty_str() { return ""; }
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// Traits<wchar_t> specializations (Windows only, currently)
+///////////////////////////////////////////////////////////////////////////////
+
+#if defined(WEBRTC_WIN)
+
+template<>
+struct Traits<wchar_t> {
+ typedef std::wstring string;
+ inline static const wchar_t* Traits<wchar_t>::empty_str() { return L""; }
+};
+
+#endif // WEBRTC_WIN
+
+// Replaces all occurrences of "search" with "replace".
+void replace_substrs(const char *search,
+ size_t search_len,
+ const char *replace,
+ size_t replace_len,
+ std::string *s);
+
+// True iff s1 starts with s2.
+bool starts_with(const char *s1, const char *s2);
+
+// True iff s1 ends with s2.
+bool ends_with(const char *s1, const char *s2);
+
+// Remove leading and trailing whitespaces.
+std::string string_trim(const std::string& s);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_STRINGUTILS_H__
diff --git a/chromium/third_party/webrtc/base/stringutils_unittest.cc b/chromium/third_party/webrtc/base/stringutils_unittest.cc
new file mode 100644
index 00000000000..b82290d0aa6
--- /dev/null
+++ b/chromium/third_party/webrtc/base/stringutils_unittest.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+// Tests for string_match().
+
+TEST(string_matchTest, Matches) {
+ EXPECT_TRUE( string_match("A.B.C.D", "a.b.c.d"));
+ EXPECT_TRUE( string_match("www.TEST.GOOGLE.COM", "www.*.com"));
+ EXPECT_TRUE( string_match("127.0.0.1", "12*.0.*1"));
+ EXPECT_TRUE( string_match("127.1.0.21", "12*.0.*1"));
+ EXPECT_FALSE(string_match("127.0.0.0", "12*.0.*1"));
+ EXPECT_FALSE(string_match("127.0.0.0", "12*.0.*1"));
+ EXPECT_FALSE(string_match("127.1.1.21", "12*.0.*1"));
+}
+
+// It's not clear if we will ever use wchar_t strings on unix. In theory,
+// all strings should be Utf8 all the time, except when interfacing with Win32
+// APIs that require Utf16.
+
+#if defined(WEBRTC_WIN)
+
+// Tests for ascii_string_compare().
+
+// Tests NULL input.
+TEST(ascii_string_compareTest, NullInput) {
+ // The following results in an access violation in
+ // ascii_string_compare. Is this a bug or by design? stringutils.h
+ // should document the expected behavior in this case.
+
+ // EXPECT_EQ(0, ascii_string_compare(NULL, NULL, 1, identity));
+}
+
+// Tests comparing two strings of different lengths.
+TEST(ascii_string_compareTest, DifferentLengths) {
+ EXPECT_EQ(-1, ascii_string_compare(L"Test", "Test1", 5, identity));
+}
+
+// Tests the case where the buffer size is smaller than the string
+// lengths.
+TEST(ascii_string_compareTest, SmallBuffer) {
+ EXPECT_EQ(0, ascii_string_compare(L"Test", "Test1", 3, identity));
+}
+
+// Tests the case where the buffer is not full.
+TEST(ascii_string_compareTest, LargeBuffer) {
+ EXPECT_EQ(0, ascii_string_compare(L"Test", "Test", 10, identity));
+}
+
+// Tests comparing two eqaul strings.
+TEST(ascii_string_compareTest, Equal) {
+ EXPECT_EQ(0, ascii_string_compare(L"Test", "Test", 5, identity));
+ EXPECT_EQ(0, ascii_string_compare(L"TeSt", "tEsT", 5, tolowercase));
+}
+
+// Tests comparing a smller string to a larger one.
+TEST(ascii_string_compareTest, LessThan) {
+ EXPECT_EQ(-1, ascii_string_compare(L"abc", "abd", 4, identity));
+ EXPECT_EQ(-1, ascii_string_compare(L"ABC", "abD", 5, tolowercase));
+}
+
+// Tests comparing a larger string to a smaller one.
+TEST(ascii_string_compareTest, GreaterThan) {
+ EXPECT_EQ(1, ascii_string_compare(L"xyz", "xy", 5, identity));
+ EXPECT_EQ(1, ascii_string_compare(L"abc", "ABB", 5, tolowercase));
+}
+#endif // WEBRTC_WIN
+
+TEST(string_trim_Test, Trimming) {
+ EXPECT_EQ("temp", string_trim("\n\r\t temp \n\r\t"));
+ EXPECT_EQ("temp\n\r\t temp", string_trim(" temp\n\r\t temp "));
+ EXPECT_EQ("temp temp", string_trim("temp temp"));
+ EXPECT_EQ("", string_trim(" \r\n\t"));
+ EXPECT_EQ("", string_trim(""));
+}
+
+TEST(string_startsTest, StartsWith) {
+ EXPECT_TRUE(starts_with("foobar", "foo"));
+ EXPECT_TRUE(starts_with("foobar", "foobar"));
+ EXPECT_TRUE(starts_with("foobar", ""));
+ EXPECT_TRUE(starts_with("", ""));
+ EXPECT_FALSE(starts_with("foobar", "bar"));
+ EXPECT_FALSE(starts_with("foobar", "foobarbaz"));
+ EXPECT_FALSE(starts_with("", "f"));
+}
+
+TEST(string_endsTest, EndsWith) {
+ EXPECT_TRUE(ends_with("foobar", "bar"));
+ EXPECT_TRUE(ends_with("foobar", "foobar"));
+ EXPECT_TRUE(ends_with("foobar", ""));
+ EXPECT_TRUE(ends_with("", ""));
+ EXPECT_FALSE(ends_with("foobar", "foo"));
+ EXPECT_FALSE(ends_with("foobar", "foobarbaz"));
+ EXPECT_FALSE(ends_with("", "f"));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/systeminfo.cc b/chromium/third_party/webrtc/base/systeminfo.cc
new file mode 100644
index 00000000000..213c272b3fb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/systeminfo.cc
@@ -0,0 +1,518 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/systeminfo.h"
+
+#if defined(WEBRTC_WIN)
+#include <winsock2.h>
+#ifndef EXCLUDE_D3D9
+#include <d3d9.h>
+#endif
+#include <intrin.h> // for __cpuid()
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <ApplicationServices/ApplicationServices.h>
+#include <CoreServices/CoreServices.h>
+#elif defined(WEBRTC_LINUX)
+#include <unistd.h>
+#endif
+#if defined(WEBRTC_MAC)
+#include <sys/sysctl.h>
+#endif
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/win32.h"
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include "webrtc/base/macconversion.h"
+#elif defined(WEBRTC_LINUX)
+#include "webrtc/base/linux.h"
+#endif
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringutils.h"
+
+namespace rtc {
+
+// See Also: http://msdn.microsoft.com/en-us/library/ms683194(v=vs.85).aspx
+#if defined(WEBRTC_WIN)
+typedef BOOL (WINAPI *LPFN_GLPI)(
+ PSYSTEM_LOGICAL_PROCESSOR_INFORMATION,
+ PDWORD);
+
+static void GetProcessorInformation(int* physical_cpus, int* cache_size) {
+ // GetLogicalProcessorInformation() is available on Windows XP SP3 and beyond.
+ LPFN_GLPI glpi = reinterpret_cast<LPFN_GLPI>(GetProcAddress(
+ GetModuleHandle(L"kernel32"),
+ "GetLogicalProcessorInformation"));
+ if (NULL == glpi) {
+ return;
+ }
+ // Determine buffer size, allocate and get processor information.
+ // Size can change between calls (unlikely), so a loop is done.
+ DWORD return_length = 0;
+ scoped_ptr<SYSTEM_LOGICAL_PROCESSOR_INFORMATION[]> infos;
+ while (!glpi(infos.get(), &return_length)) {
+ if (GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+ infos.reset(new SYSTEM_LOGICAL_PROCESSOR_INFORMATION[
+ return_length / sizeof(SYSTEM_LOGICAL_PROCESSOR_INFORMATION)]);
+ } else {
+ return;
+ }
+ }
+ *physical_cpus = 0;
+ *cache_size = 0;
+ for (size_t i = 0;
+ i < return_length / sizeof(SYSTEM_LOGICAL_PROCESSOR_INFORMATION); ++i) {
+ if (infos[i].Relationship == RelationProcessorCore) {
+ ++*physical_cpus;
+ } else if (infos[i].Relationship == RelationCache) {
+ int next_cache_size = static_cast<int>(infos[i].Cache.Size);
+ if (next_cache_size >= *cache_size) {
+ *cache_size = next_cache_size;
+ }
+ }
+ }
+ return;
+}
+#else
+// TODO(fbarchard): Use gcc 4.4 provided cpuid intrinsic
+// 32 bit fpic requires ebx be preserved
+#if (defined(__pic__) || defined(__APPLE__)) && defined(__i386__)
+static inline void __cpuid(int cpu_info[4], int info_type) {
+ __asm__ volatile ( // NOLINT
+ "mov %%ebx, %%edi\n"
+ "cpuid\n"
+ "xchg %%edi, %%ebx\n"
+ : "=a"(cpu_info[0]), "=D"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3])
+ : "a"(info_type)
+ ); // NOLINT
+}
+#elif defined(__i386__) || defined(__x86_64__)
+static inline void __cpuid(int cpu_info[4], int info_type) {
+ __asm__ volatile ( // NOLINT
+ "cpuid\n"
+ : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3])
+ : "a"(info_type)
+ ); // NOLINT
+}
+#endif
+#endif // WEBRTC_WIN
+
+// Note(fbarchard):
+// Family and model are extended family and extended model. 8 bits each.
+SystemInfo::SystemInfo()
+ : physical_cpus_(1), logical_cpus_(1), cache_size_(0),
+ cpu_family_(0), cpu_model_(0), cpu_stepping_(0),
+ cpu_speed_(0), memory_(0) {
+ // Initialize the basic information.
+#if defined(__arm__) || defined(_M_ARM)
+ cpu_arch_ = SI_ARCH_ARM;
+#elif defined(__x86_64__) || defined(_M_X64)
+ cpu_arch_ = SI_ARCH_X64;
+#elif defined(__i386__) || defined(_M_IX86)
+ cpu_arch_ = SI_ARCH_X86;
+#else
+ cpu_arch_ = SI_ARCH_UNKNOWN;
+#endif
+
+#if defined(WEBRTC_WIN)
+ SYSTEM_INFO si;
+ GetSystemInfo(&si);
+ logical_cpus_ = si.dwNumberOfProcessors;
+ GetProcessorInformation(&physical_cpus_, &cache_size_);
+ if (physical_cpus_ <= 0) {
+ physical_cpus_ = logical_cpus_;
+ }
+ cpu_family_ = si.wProcessorLevel;
+ cpu_model_ = si.wProcessorRevision >> 8;
+ cpu_stepping_ = si.wProcessorRevision & 0xFF;
+#elif defined(WEBRTC_MAC)
+ uint32_t sysctl_value;
+ size_t length = sizeof(sysctl_value);
+ if (!sysctlbyname("hw.physicalcpu_max", &sysctl_value, &length, NULL, 0)) {
+ physical_cpus_ = static_cast<int>(sysctl_value);
+ }
+ length = sizeof(sysctl_value);
+ if (!sysctlbyname("hw.logicalcpu_max", &sysctl_value, &length, NULL, 0)) {
+ logical_cpus_ = static_cast<int>(sysctl_value);
+ }
+ uint64_t sysctl_value64;
+ length = sizeof(sysctl_value64);
+ if (!sysctlbyname("hw.l3cachesize", &sysctl_value64, &length, NULL, 0)) {
+ cache_size_ = static_cast<int>(sysctl_value64);
+ }
+ if (!cache_size_) {
+ length = sizeof(sysctl_value64);
+ if (!sysctlbyname("hw.l2cachesize", &sysctl_value64, &length, NULL, 0)) {
+ cache_size_ = static_cast<int>(sysctl_value64);
+ }
+ }
+ length = sizeof(sysctl_value);
+ if (!sysctlbyname("machdep.cpu.family", &sysctl_value, &length, NULL, 0)) {
+ cpu_family_ = static_cast<int>(sysctl_value);
+ }
+ length = sizeof(sysctl_value);
+ if (!sysctlbyname("machdep.cpu.model", &sysctl_value, &length, NULL, 0)) {
+ cpu_model_ = static_cast<int>(sysctl_value);
+ }
+ length = sizeof(sysctl_value);
+ if (!sysctlbyname("machdep.cpu.stepping", &sysctl_value, &length, NULL, 0)) {
+ cpu_stepping_ = static_cast<int>(sysctl_value);
+ }
+#elif defined(__native_client__)
+ // TODO(ryanpetrie): Implement this via PPAPI when it's available.
+#else // WEBRTC_LINUX
+ ProcCpuInfo proc_info;
+ if (proc_info.LoadFromSystem()) {
+ proc_info.GetNumCpus(&logical_cpus_);
+ proc_info.GetNumPhysicalCpus(&physical_cpus_);
+ proc_info.GetCpuFamily(&cpu_family_);
+#if defined(CPU_X86)
+ // These values only apply to x86 systems.
+ proc_info.GetSectionIntValue(0, "model", &cpu_model_);
+ proc_info.GetSectionIntValue(0, "stepping", &cpu_stepping_);
+ proc_info.GetSectionIntValue(0, "cpu MHz", &cpu_speed_);
+ proc_info.GetSectionIntValue(0, "cache size", &cache_size_);
+ cache_size_ *= 1024;
+#endif
+ }
+ // ProcCpuInfo reads cpu speed from "cpu MHz" under /proc/cpuinfo.
+ // But that number is a moving target which can change on-the-fly according to
+ // many factors including system workload.
+ // See /sys/devices/system/cpu/cpu0/cpufreq/scaling_available_governors.
+ // The one in /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq is more
+ // accurate. We use it as our cpu speed when it is available.
+ // cpuinfo_max_freq is measured in KHz and requires conversion to MHz.
+ int max_freq = rtc::ReadCpuMaxFreq();
+ if (max_freq > 0) {
+ cpu_speed_ = max_freq / 1000;
+ }
+#endif
+// For L2 CacheSize see also
+// http://www.flounder.com/cpuid_explorer2.htm#CPUID(0x800000006)
+#ifdef CPU_X86
+ if (cache_size_ == 0) {
+ int cpu_info[4];
+ __cpuid(cpu_info, 0x80000000); // query maximum extended cpuid function.
+ if (static_cast<uint32>(cpu_info[0]) >= 0x80000006) {
+ __cpuid(cpu_info, 0x80000006);
+ cache_size_ = (cpu_info[2] >> 16) * 1024;
+ }
+ }
+#endif
+}
+
+// Return the number of cpu threads available to the system.
+int SystemInfo::GetMaxCpus() {
+ return logical_cpus_;
+}
+
+// Return the number of cpu cores available to the system.
+int SystemInfo::GetMaxPhysicalCpus() {
+ return physical_cpus_;
+}
+
+// Return the number of cpus available to the process. Since affinity can be
+// changed on the fly, do not cache this value.
+// Can be affected by heat.
+int SystemInfo::GetCurCpus() {
+ int cur_cpus;
+#if defined(WEBRTC_WIN)
+ DWORD_PTR process_mask, system_mask;
+ ::GetProcessAffinityMask(::GetCurrentProcess(), &process_mask, &system_mask);
+ for (cur_cpus = 0; process_mask; ++cur_cpus) {
+ // Sparse-ones algorithm. There are slightly faster methods out there but
+ // they are unintuitive and won't make a difference on a single dword.
+ process_mask &= (process_mask - 1);
+ }
+#elif defined(WEBRTC_MAC)
+ uint32_t sysctl_value;
+ size_t length = sizeof(sysctl_value);
+ int error = sysctlbyname("hw.ncpu", &sysctl_value, &length, NULL, 0);
+ cur_cpus = !error ? static_cast<int>(sysctl_value) : 1;
+#else
+ // Linux, Solaris, WEBRTC_ANDROID
+ cur_cpus = static_cast<int>(sysconf(_SC_NPROCESSORS_ONLN));
+#endif
+ return cur_cpus;
+}
+
+// Return the type of this CPU.
+SystemInfo::Architecture SystemInfo::GetCpuArchitecture() {
+ return cpu_arch_;
+}
+
+// Returns the vendor string from the cpu, e.g. "GenuineIntel", "AuthenticAMD".
+// See "Intel Processor Identification and the CPUID Instruction"
+// (Intel document number: 241618)
+std::string SystemInfo::GetCpuVendor() {
+ if (cpu_vendor_.empty()) {
+#if defined(CPU_X86)
+ int cpu_info[4];
+ __cpuid(cpu_info, 0);
+ cpu_info[0] = cpu_info[1]; // Reorder output
+ cpu_info[1] = cpu_info[3];
+ cpu_info[2] = cpu_info[2];
+ cpu_info[3] = 0;
+ cpu_vendor_ = std::string(reinterpret_cast<char*>(&cpu_info[0]));
+#elif defined(CPU_ARM)
+ cpu_vendor_ = std::string("ARM");
+#else
+ cpu_vendor_ = std::string("Undefined");
+#endif
+ }
+ return cpu_vendor_;
+}
+
+int SystemInfo::GetCpuCacheSize() {
+ return cache_size_;
+}
+
+// Return the "family" of this CPU.
+int SystemInfo::GetCpuFamily() {
+ return cpu_family_;
+}
+
+// Return the "model" of this CPU.
+int SystemInfo::GetCpuModel() {
+ return cpu_model_;
+}
+
+// Return the "stepping" of this CPU.
+int SystemInfo::GetCpuStepping() {
+ return cpu_stepping_;
+}
+
+// Return the clockrate of the primary processor in Mhz. This value can be
+// cached. Returns -1 on error.
+int SystemInfo::GetMaxCpuSpeed() {
+ if (cpu_speed_) {
+ return cpu_speed_;
+ }
+#if defined(WEBRTC_WIN)
+ HKEY key;
+ static const WCHAR keyName[] =
+ L"HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0";
+
+ if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, keyName , 0, KEY_QUERY_VALUE, &key)
+ == ERROR_SUCCESS) {
+ DWORD data, len;
+ len = sizeof(data);
+
+ if (RegQueryValueEx(key, L"~Mhz", 0, 0, reinterpret_cast<LPBYTE>(&data),
+ &len) == ERROR_SUCCESS) {
+ cpu_speed_ = data;
+ } else {
+ LOG(LS_WARNING) << "Failed to query registry value HKLM\\" << keyName
+ << "\\~Mhz";
+ cpu_speed_ = -1;
+ }
+
+ RegCloseKey(key);
+ } else {
+ LOG(LS_WARNING) << "Failed to open registry key HKLM\\" << keyName;
+ cpu_speed_ = -1;
+ }
+#elif defined(WEBRTC_MAC)
+ uint64_t sysctl_value;
+ size_t length = sizeof(sysctl_value);
+ int error = sysctlbyname("hw.cpufrequency_max", &sysctl_value, &length,
+ NULL, 0);
+ cpu_speed_ = !error ? static_cast<int>(sysctl_value/1000000) : -1;
+#else
+ // TODO(fbarchard): Implement using proc/cpuinfo
+ cpu_speed_ = 0;
+#endif
+ return cpu_speed_;
+}
+
+// Dynamically check the current clockrate, which could be reduced because of
+// powersaving profiles. Eventually for windows we want to query WMI for
+// root\WMI::ProcessorPerformance.InstanceName="Processor_Number_0".frequency
+int SystemInfo::GetCurCpuSpeed() {
+#if defined(WEBRTC_WIN)
+ // TODO(fbarchard): Add WMI check, requires COM initialization
+ // NOTE(fbarchard): Testable on Sandy Bridge.
+ return GetMaxCpuSpeed();
+#elif defined(WEBRTC_MAC)
+ uint64_t sysctl_value;
+ size_t length = sizeof(sysctl_value);
+ int error = sysctlbyname("hw.cpufrequency", &sysctl_value, &length, NULL, 0);
+ return !error ? static_cast<int>(sysctl_value/1000000) : GetMaxCpuSpeed();
+#else // WEBRTC_LINUX
+ // TODO(fbarchard): Use proc/cpuinfo for Cur speed on Linux.
+ return GetMaxCpuSpeed();
+#endif
+}
+
+// Returns the amount of installed physical memory in Bytes. Cacheable.
+// Returns -1 on error.
+int64 SystemInfo::GetMemorySize() {
+ if (memory_) {
+ return memory_;
+ }
+
+#if defined(WEBRTC_WIN)
+ MEMORYSTATUSEX status = {0};
+ status.dwLength = sizeof(status);
+
+ if (GlobalMemoryStatusEx(&status)) {
+ memory_ = status.ullTotalPhys;
+ } else {
+ LOG_GLE(LS_WARNING) << "GlobalMemoryStatusEx failed.";
+ memory_ = -1;
+ }
+
+#elif defined(WEBRTC_MAC)
+ size_t len = sizeof(memory_);
+ int error = sysctlbyname("hw.memsize", &memory_, &len, NULL, 0);
+ if (error || memory_ == 0) {
+ memory_ = -1;
+ }
+#else // WEBRTC_LINUX
+ memory_ = static_cast<int64>(sysconf(_SC_PHYS_PAGES)) *
+ static_cast<int64>(sysconf(_SC_PAGESIZE));
+ if (memory_ < 0) {
+ LOG(LS_WARNING) << "sysconf(_SC_PHYS_PAGES) failed."
+ << "sysconf(_SC_PHYS_PAGES) " << sysconf(_SC_PHYS_PAGES)
+ << "sysconf(_SC_PAGESIZE) " << sysconf(_SC_PAGESIZE);
+ memory_ = -1;
+ }
+#endif
+
+ return memory_;
+}
+
+
+// Return the name of the machine model we are currently running on.
+// This is a human readable string that consists of the name and version
+// number of the hardware, i.e 'MacBookAir1,1'. Returns an empty string if
+// model can not be determined. The string is cached for subsequent calls.
+std::string SystemInfo::GetMachineModel() {
+ if (!machine_model_.empty()) {
+ return machine_model_;
+ }
+
+#if defined(WEBRTC_MAC)
+ char buffer[128];
+ size_t length = sizeof(buffer);
+ int error = sysctlbyname("hw.model", buffer, &length, NULL, 0);
+ if (!error) {
+ machine_model_.assign(buffer, length - 1);
+ } else {
+ machine_model_.clear();
+ }
+#else
+ machine_model_ = "Not available";
+#endif
+
+ return machine_model_;
+}
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+// Helper functions to query IOKit for video hardware properties.
+static CFTypeRef SearchForProperty(io_service_t port, CFStringRef name) {
+ return IORegistryEntrySearchCFProperty(port, kIOServicePlane,
+ name, kCFAllocatorDefault,
+ kIORegistryIterateRecursively | kIORegistryIterateParents);
+}
+
+static void GetProperty(io_service_t port, CFStringRef name, int* value) {
+ if (!value) return;
+ CFTypeRef ref = SearchForProperty(port, name);
+ if (ref) {
+ CFTypeID refType = CFGetTypeID(ref);
+ if (CFNumberGetTypeID() == refType) {
+ CFNumberRef number = reinterpret_cast<CFNumberRef>(ref);
+ p_convertCFNumberToInt(number, value);
+ } else if (CFDataGetTypeID() == refType) {
+ CFDataRef data = reinterpret_cast<CFDataRef>(ref);
+ if (CFDataGetLength(data) == sizeof(UInt32)) {
+ *value = *reinterpret_cast<const UInt32*>(CFDataGetBytePtr(data));
+ }
+ }
+ CFRelease(ref);
+ }
+}
+
+static void GetProperty(io_service_t port, CFStringRef name,
+ std::string* value) {
+ if (!value) return;
+ CFTypeRef ref = SearchForProperty(port, name);
+ if (ref) {
+ CFTypeID refType = CFGetTypeID(ref);
+ if (CFStringGetTypeID() == refType) {
+ CFStringRef stringRef = reinterpret_cast<CFStringRef>(ref);
+ p_convertHostCFStringRefToCPPString(stringRef, *value);
+ } else if (CFDataGetTypeID() == refType) {
+ CFDataRef dataRef = reinterpret_cast<CFDataRef>(ref);
+ *value = std::string(reinterpret_cast<const char*>(
+ CFDataGetBytePtr(dataRef)), CFDataGetLength(dataRef));
+ }
+ CFRelease(ref);
+ }
+}
+#endif
+
+// Fills a struct with information on the graphics adapater and returns true
+// iff successful.
+bool SystemInfo::GetGpuInfo(GpuInfo *info) {
+ if (!info) return false;
+#if defined(WEBRTC_WIN) && !defined(EXCLUDE_D3D9)
+ D3DADAPTER_IDENTIFIER9 identifier;
+ HRESULT hr = E_FAIL;
+ HINSTANCE d3d_lib = LoadLibrary(L"d3d9.dll");
+
+ if (d3d_lib) {
+ typedef IDirect3D9* (WINAPI *D3DCreate9Proc)(UINT);
+ D3DCreate9Proc d3d_create_proc = reinterpret_cast<D3DCreate9Proc>(
+ GetProcAddress(d3d_lib, "Direct3DCreate9"));
+ if (d3d_create_proc) {
+ IDirect3D9* d3d = d3d_create_proc(D3D_SDK_VERSION);
+ if (d3d) {
+ hr = d3d->GetAdapterIdentifier(D3DADAPTER_DEFAULT, 0, &identifier);
+ d3d->Release();
+ }
+ }
+ FreeLibrary(d3d_lib);
+ }
+
+ if (hr != D3D_OK) {
+ LOG(LS_ERROR) << "Failed to access Direct3D9 information.";
+ return false;
+ }
+
+ info->device_name = identifier.DeviceName;
+ info->description = identifier.Description;
+ info->vendor_id = identifier.VendorId;
+ info->device_id = identifier.DeviceId;
+ info->driver = identifier.Driver;
+ // driver_version format: product.version.subversion.build
+ std::stringstream ss;
+ ss << HIWORD(identifier.DriverVersion.HighPart) << "."
+ << LOWORD(identifier.DriverVersion.HighPart) << "."
+ << HIWORD(identifier.DriverVersion.LowPart) << "."
+ << LOWORD(identifier.DriverVersion.LowPart);
+ info->driver_version = ss.str();
+ return true;
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ // We'll query the IOKit for the gpu of the main display.
+ io_service_t display_service_port = CGDisplayIOServicePort(
+ kCGDirectMainDisplay);
+ GetProperty(display_service_port, CFSTR("vendor-id"), &info->vendor_id);
+ GetProperty(display_service_port, CFSTR("device-id"), &info->device_id);
+ GetProperty(display_service_port, CFSTR("model"), &info->description);
+ return true;
+#else // WEBRTC_LINUX
+ // TODO(fbarchard): Implement this on Linux
+ return false;
+#endif
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/systeminfo.h b/chromium/third_party/webrtc/base/systeminfo.h
new file mode 100644
index 00000000000..44088629bae
--- /dev/null
+++ b/chromium/third_party/webrtc/base/systeminfo.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_SYSTEMINFO_H__
+#define WEBRTC_BASE_SYSTEMINFO_H__
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+class SystemInfo {
+ public:
+ enum Architecture {
+ SI_ARCH_UNKNOWN = -1,
+ SI_ARCH_X86 = 0,
+ SI_ARCH_X64 = 1,
+ SI_ARCH_ARM = 2
+ };
+
+ SystemInfo();
+
+ // The number of CPU Cores in the system.
+ int GetMaxPhysicalCpus();
+ // The number of CPU Threads in the system.
+ int GetMaxCpus();
+ // The number of CPU Threads currently available to this process.
+ int GetCurCpus();
+ // Identity of the CPUs.
+ Architecture GetCpuArchitecture();
+ std::string GetCpuVendor();
+ int GetCpuFamily();
+ int GetCpuModel();
+ int GetCpuStepping();
+ // Return size of CPU cache in bytes. Uses largest available cache (L3).
+ int GetCpuCacheSize();
+ // Estimated speed of the CPUs, in MHz. e.g. 2400 for 2.4 GHz
+ int GetMaxCpuSpeed();
+ int GetCurCpuSpeed();
+ // Total amount of physical memory, in bytes.
+ int64 GetMemorySize();
+ // The model name of the machine, e.g. "MacBookAir1,1"
+ std::string GetMachineModel();
+
+ // The gpu identifier
+ struct GpuInfo {
+ GpuInfo() : vendor_id(0), device_id(0) {}
+ std::string device_name;
+ std::string description;
+ int vendor_id;
+ int device_id;
+ std::string driver;
+ std::string driver_version;
+ };
+ bool GetGpuInfo(GpuInfo *info);
+
+ private:
+ int physical_cpus_;
+ int logical_cpus_;
+ int cache_size_;
+ Architecture cpu_arch_;
+ std::string cpu_vendor_;
+ int cpu_family_;
+ int cpu_model_;
+ int cpu_stepping_;
+ int cpu_speed_;
+ int64 memory_;
+ std::string machine_model_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_SYSTEMINFO_H__
diff --git a/chromium/third_party/webrtc/base/systeminfo_unittest.cc b/chromium/third_party/webrtc/base/systeminfo_unittest.cc
new file mode 100644
index 00000000000..fec553582a3
--- /dev/null
+++ b/chromium/third_party/webrtc/base/systeminfo_unittest.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/systeminfo.h"
+
+#if defined(CPU_X86) || defined(CPU_ARM)
+TEST(SystemInfoTest, CpuVendorNonEmpty) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuVendor: " << info.GetCpuVendor();
+ EXPECT_FALSE(info.GetCpuVendor().empty());
+}
+
+// Tests Vendor identification is Intel or AMD.
+// See Also http://en.wikipedia.org/wiki/CPUID
+TEST(SystemInfoTest, CpuVendorIntelAMDARM) {
+ rtc::SystemInfo info;
+#if defined(CPU_X86)
+ EXPECT_TRUE(rtc::string_match(info.GetCpuVendor().c_str(),
+ "GenuineIntel") ||
+ rtc::string_match(info.GetCpuVendor().c_str(),
+ "AuthenticAMD"));
+#elif defined(CPU_ARM)
+ EXPECT_TRUE(rtc::string_match(info.GetCpuVendor().c_str(), "ARM"));
+#endif
+}
+#endif // defined(CPU_X86) || defined(CPU_ARM)
+
+// Tests CpuArchitecture matches expectations.
+TEST(SystemInfoTest, GetCpuArchitecture) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuArchitecture: " << info.GetCpuArchitecture();
+ rtc::SystemInfo::Architecture architecture = info.GetCpuArchitecture();
+#if defined(CPU_X86) || defined(CPU_ARM)
+ if (sizeof(intptr_t) == 8) {
+ EXPECT_EQ(rtc::SystemInfo::SI_ARCH_X64, architecture);
+ } else if (sizeof(intptr_t) == 4) {
+#if defined(CPU_ARM)
+ EXPECT_EQ(rtc::SystemInfo::SI_ARCH_ARM, architecture);
+#else
+ EXPECT_EQ(rtc::SystemInfo::SI_ARCH_X86, architecture);
+#endif
+ }
+#endif
+}
+
+// Tests Cpu Cache Size
+TEST(SystemInfoTest, CpuCacheSize) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuCacheSize: " << info.GetCpuCacheSize();
+ EXPECT_GE(info.GetCpuCacheSize(), 8192); // 8 KB min cache
+ EXPECT_LE(info.GetCpuCacheSize(), 1024 * 1024 * 1024); // 1 GB max cache
+}
+
+// Tests MachineModel is set. On Mac test machine model is known.
+TEST(SystemInfoTest, MachineModelKnown) {
+ rtc::SystemInfo info;
+ EXPECT_FALSE(info.GetMachineModel().empty());
+ const char *machine_model = info.GetMachineModel().c_str();
+ LOG(LS_INFO) << "MachineModel: " << machine_model;
+ bool known = true;
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ // Full list as of May 2012. Update when new OSX based models are added.
+ known = rtc::string_match(machine_model, "MacBookPro*") ||
+ rtc::string_match(machine_model, "MacBookAir*") ||
+ rtc::string_match(machine_model, "MacBook*") ||
+ rtc::string_match(machine_model, "MacPro*") ||
+ rtc::string_match(machine_model, "Macmini*") ||
+ rtc::string_match(machine_model, "iMac*") ||
+ rtc::string_match(machine_model, "Xserve*");
+#elif !defined(WEBRTC_IOS)
+ // All other machines return Not available.
+ known = rtc::string_match(info.GetMachineModel().c_str(),
+ "Not available");
+#endif
+ if (!known) {
+ LOG(LS_WARNING) << "Machine Model Unknown: " << machine_model;
+ }
+}
+
+// Tests maximum cpu clockrate.
+TEST(SystemInfoTest, CpuMaxCpuSpeed) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "MaxCpuSpeed: " << info.GetMaxCpuSpeed();
+ EXPECT_GT(info.GetMaxCpuSpeed(), 0);
+ EXPECT_LT(info.GetMaxCpuSpeed(), 100000); // 100 Ghz
+}
+
+// Tests current cpu clockrate.
+TEST(SystemInfoTest, CpuCurCpuSpeed) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "MaxCurSpeed: " << info.GetCurCpuSpeed();
+ EXPECT_GT(info.GetCurCpuSpeed(), 0);
+ EXPECT_LT(info.GetMaxCpuSpeed(), 100000);
+}
+
+// Tests physical memory size.
+TEST(SystemInfoTest, MemorySize) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "MemorySize: " << info.GetMemorySize();
+ EXPECT_GT(info.GetMemorySize(), -1);
+}
+
+// Tests number of logical cpus available to the system.
+TEST(SystemInfoTest, MaxCpus) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "MaxCpus: " << info.GetMaxCpus();
+ EXPECT_GT(info.GetMaxCpus(), 0);
+}
+
+// Tests number of physical cpus available to the system.
+TEST(SystemInfoTest, MaxPhysicalCpus) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "MaxPhysicalCpus: " << info.GetMaxPhysicalCpus();
+ EXPECT_GT(info.GetMaxPhysicalCpus(), 0);
+ EXPECT_LE(info.GetMaxPhysicalCpus(), info.GetMaxCpus());
+}
+
+// Tests number of logical cpus available to the process.
+TEST(SystemInfoTest, CurCpus) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CurCpus: " << info.GetCurCpus();
+ EXPECT_GT(info.GetCurCpus(), 0);
+ EXPECT_LE(info.GetCurCpus(), info.GetMaxCpus());
+}
+
+#ifdef CPU_X86
+// CPU family/model/stepping is only available on X86. The following tests
+// that they are set when running on x86 CPUs. Valid Family/Model/Stepping
+// values are non-zero on known CPUs.
+
+// Tests Intel CPU Family identification.
+TEST(SystemInfoTest, CpuFamily) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuFamily: " << info.GetCpuFamily();
+ EXPECT_GT(info.GetCpuFamily(), 0);
+}
+
+// Tests Intel CPU Model identification.
+TEST(SystemInfoTest, CpuModel) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuModel: " << info.GetCpuModel();
+ EXPECT_GT(info.GetCpuModel(), 0);
+}
+
+// Tests Intel CPU Stepping identification.
+TEST(SystemInfoTest, CpuStepping) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuStepping: " << info.GetCpuStepping();
+ EXPECT_GT(info.GetCpuStepping(), 0);
+}
+#else // CPU_X86
+// If not running on x86 CPU the following tests expect the functions to
+// return 0.
+TEST(SystemInfoTest, CpuFamily) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuFamily: " << info.GetCpuFamily();
+ EXPECT_EQ(0, info.GetCpuFamily());
+}
+
+// Tests Intel CPU Model identification.
+TEST(SystemInfoTest, CpuModel) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuModel: " << info.GetCpuModel();
+ EXPECT_EQ(0, info.GetCpuModel());
+}
+
+// Tests Intel CPU Stepping identification.
+TEST(SystemInfoTest, CpuStepping) {
+ rtc::SystemInfo info;
+ LOG(LS_INFO) << "CpuStepping: " << info.GetCpuStepping();
+ EXPECT_EQ(0, info.GetCpuStepping());
+}
+#endif // CPU_X86
+
+#if WEBRTC_WIN && !defined(EXCLUDE_D3D9)
+TEST(SystemInfoTest, GpuInfo) {
+ rtc::SystemInfo info;
+ rtc::SystemInfo::GpuInfo gi;
+ EXPECT_TRUE(info.GetGpuInfo(&gi));
+ LOG(LS_INFO) << "GpuDriver: " << gi.driver;
+ EXPECT_FALSE(gi.driver.empty());
+ LOG(LS_INFO) << "GpuDriverVersion: " << gi.driver_version;
+ EXPECT_FALSE(gi.driver_version.empty());
+}
+#endif
diff --git a/chromium/third_party/webrtc/base/task.cc b/chromium/third_party/webrtc/base/task.cc
new file mode 100644
index 00000000000..ed9f426267e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task.cc
@@ -0,0 +1,272 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/task.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/taskrunner.h"
+
+namespace rtc {
+
+int32 Task::unique_id_seed_ = 0;
+
+Task::Task(TaskParent *parent)
+ : TaskParent(this, parent),
+ state_(STATE_INIT),
+ blocked_(false),
+ done_(false),
+ aborted_(false),
+ busy_(false),
+ error_(false),
+ start_time_(0),
+ timeout_time_(0),
+ timeout_seconds_(0),
+ timeout_suspended_(false) {
+ unique_id_ = unique_id_seed_++;
+
+ // sanity check that we didn't roll-over our id seed
+ ASSERT(unique_id_ < unique_id_seed_);
+}
+
+Task::~Task() {
+ // Is this task being deleted in the correct manner?
+ ASSERT(!done_ || GetRunner()->is_ok_to_delete(this));
+ ASSERT(state_ == STATE_INIT || done_);
+ ASSERT(state_ == STATE_INIT || blocked_);
+
+ // If the task is being deleted without being done, it
+ // means that it hasn't been removed from its parent.
+ // This happens if a task is deleted outside of TaskRunner.
+ if (!done_) {
+ Stop();
+ }
+}
+
+int64 Task::CurrentTime() {
+ return GetRunner()->CurrentTime();
+}
+
+int64 Task::ElapsedTime() {
+ return CurrentTime() - start_time_;
+}
+
+void Task::Start() {
+ if (state_ != STATE_INIT)
+ return;
+ // Set the start time before starting the task. Otherwise if the task
+ // finishes quickly and deletes the Task object, setting start_time_
+ // will crash.
+ start_time_ = CurrentTime();
+ GetRunner()->StartTask(this);
+}
+
+void Task::Step() {
+ if (done_) {
+#ifdef _DEBUG
+ // we do not know how !blocked_ happens when done_ - should be impossible.
+ // But it causes problems, so in retail build, we force blocked_, and
+ // under debug we assert.
+ ASSERT(blocked_);
+#else
+ blocked_ = true;
+#endif
+ return;
+ }
+
+ // Async Error() was called
+ if (error_) {
+ done_ = true;
+ state_ = STATE_ERROR;
+ blocked_ = true;
+// obsolete - an errored task is not considered done now
+// SignalDone();
+
+ Stop();
+#ifdef _DEBUG
+ // verify that stop removed this from its parent
+ ASSERT(!parent()->IsChildTask(this));
+#endif
+ return;
+ }
+
+ busy_ = true;
+ int new_state = Process(state_);
+ busy_ = false;
+
+ if (aborted_) {
+ Abort(true); // no need to wake because we're awake
+ return;
+ }
+
+ if (new_state == STATE_BLOCKED) {
+ blocked_ = true;
+ // Let the timeout continue
+ } else {
+ state_ = new_state;
+ blocked_ = false;
+ ResetTimeout();
+ }
+
+ if (new_state == STATE_DONE) {
+ done_ = true;
+ } else if (new_state == STATE_ERROR) {
+ done_ = true;
+ error_ = true;
+ }
+
+ if (done_) {
+// obsolete - call this yourself
+// SignalDone();
+
+ Stop();
+#if _DEBUG
+ // verify that stop removed this from its parent
+ ASSERT(!parent()->IsChildTask(this));
+#endif
+ blocked_ = true;
+ }
+}
+
+void Task::Abort(bool nowake) {
+ // Why only check for done_ (instead of "aborted_ || done_")?
+ //
+ // If aborted_ && !done_, it means the logic for aborting still
+ // needs to be executed (because busy_ must have been true when
+ // Abort() was previously called).
+ if (done_)
+ return;
+ aborted_ = true;
+ if (!busy_) {
+ done_ = true;
+ blocked_ = true;
+ error_ = true;
+
+ // "done_" is set before calling "Stop()" to ensure that this code
+ // doesn't execute more than once (recursively) for the same task.
+ Stop();
+#ifdef _DEBUG
+ // verify that stop removed this from its parent
+ ASSERT(!parent()->IsChildTask(this));
+#endif
+ if (!nowake) {
+ // WakeTasks to self-delete.
+ // Don't call Wake() because it is a no-op after "done_" is set.
+ // Even if Wake() did run, it clears "blocked_" which isn't desireable.
+ GetRunner()->WakeTasks();
+ }
+ }
+}
+
+void Task::Wake() {
+ if (done_)
+ return;
+ if (blocked_) {
+ blocked_ = false;
+ GetRunner()->WakeTasks();
+ }
+}
+
+void Task::Error() {
+ if (error_ || done_)
+ return;
+ error_ = true;
+ Wake();
+}
+
+std::string Task::GetStateName(int state) const {
+ switch (state) {
+ case STATE_BLOCKED: return "BLOCKED";
+ case STATE_INIT: return "INIT";
+ case STATE_START: return "START";
+ case STATE_DONE: return "DONE";
+ case STATE_ERROR: return "ERROR";
+ case STATE_RESPONSE: return "RESPONSE";
+ }
+ return "??";
+}
+
+int Task::Process(int state) {
+ int newstate = STATE_ERROR;
+
+ if (TimedOut()) {
+ ClearTimeout();
+ newstate = OnTimeout();
+ SignalTimeout();
+ } else {
+ switch (state) {
+ case STATE_INIT:
+ newstate = STATE_START;
+ break;
+ case STATE_START:
+ newstate = ProcessStart();
+ break;
+ case STATE_RESPONSE:
+ newstate = ProcessResponse();
+ break;
+ case STATE_DONE:
+ case STATE_ERROR:
+ newstate = STATE_BLOCKED;
+ break;
+ }
+ }
+
+ return newstate;
+}
+
+void Task::Stop() {
+ // No need to wake because we're either awake or in abort
+ TaskParent::OnStopped(this);
+}
+
+void Task::set_timeout_seconds(const int timeout_seconds) {
+ timeout_seconds_ = timeout_seconds;
+ ResetTimeout();
+}
+
+bool Task::TimedOut() {
+ return timeout_seconds_ &&
+ timeout_time_ &&
+ CurrentTime() >= timeout_time_;
+}
+
+void Task::ResetTimeout() {
+ int64 previous_timeout_time = timeout_time_;
+ bool timeout_allowed = (state_ != STATE_INIT)
+ && (state_ != STATE_DONE)
+ && (state_ != STATE_ERROR);
+ if (timeout_seconds_ && timeout_allowed && !timeout_suspended_)
+ timeout_time_ = CurrentTime() +
+ (timeout_seconds_ * kSecToMsec * kMsecTo100ns);
+ else
+ timeout_time_ = 0;
+
+ GetRunner()->UpdateTaskTimeout(this, previous_timeout_time);
+}
+
+void Task::ClearTimeout() {
+ int64 previous_timeout_time = timeout_time_;
+ timeout_time_ = 0;
+ GetRunner()->UpdateTaskTimeout(this, previous_timeout_time);
+}
+
+void Task::SuspendTimeout() {
+ if (!timeout_suspended_) {
+ timeout_suspended_ = true;
+ ResetTimeout();
+ }
+}
+
+void Task::ResumeTimeout() {
+ if (timeout_suspended_) {
+ timeout_suspended_ = false;
+ ResetTimeout();
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task.h b/chromium/third_party/webrtc/base/task.h
new file mode 100644
index 00000000000..77d767a78c2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TASK_H__
+#define WEBRTC_BASE_TASK_H__
+
+#include <string>
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/taskparent.h"
+
+/////////////////////////////////////////////////////////////////////
+//
+// TASK
+//
+/////////////////////////////////////////////////////////////////////
+//
+// Task is a state machine infrastructure. States are pushed forward by
+// pushing forwards a TaskRunner that holds on to all Tasks. The purpose
+// of Task is threefold:
+//
+// (1) It manages ongoing work on the UI thread. Multitasking without
+// threads, keeping it easy, keeping it real. :-) It does this by
+// organizing a set of states for each task. When you return from your
+// Process*() function, you return an integer for the next state. You do
+// not go onto the next state yourself. Every time you enter a state,
+// you check to see if you can do anything yet. If not, you return
+// STATE_BLOCKED. If you _could_ do anything, do not return
+// STATE_BLOCKED - even if you end up in the same state, return
+// STATE_mysamestate. When you are done, return STATE_DONE and then the
+// task will self-delete sometime afterwards.
+//
+// (2) It helps you avoid all those reentrancy problems when you chain
+// too many triggers on one thread. Basically if you want to tell a task
+// to process something for you, you feed your task some information and
+// then you Wake() it. Don't tell it to process it right away. If it
+// might be working on something as you send it information, you may want
+// to have a queue in the task.
+//
+// (3) Finally it helps manage parent tasks and children. If a parent
+// task gets aborted, all the children tasks are too. The nice thing
+// about this, for example, is if you have one parent task that
+// represents, say, and Xmpp connection, then you can spawn a whole bunch
+// of infinite lifetime child tasks and now worry about cleaning them up.
+// When the parent task goes to STATE_DONE, the task engine will make
+// sure all those children are aborted and get deleted.
+//
+// Notice that Task has a few built-in states, e.g.,
+//
+// STATE_INIT - the task isn't running yet
+// STATE_START - the task is in its first state
+// STATE_RESPONSE - the task is in its second state
+// STATE_DONE - the task is done
+//
+// STATE_ERROR - indicates an error - we should audit the error code in
+// light of any usage of it to see if it should be improved. When I
+// first put down the task stuff I didn't have a good sense of what was
+// needed for Abort and Error, and now the subclasses of Task will ground
+// the design in a stronger way.
+//
+// STATE_NEXT - the first undefined state number. (like WM_USER) - you
+// can start defining more task states there.
+//
+// When you define more task states, just override Process(int state) and
+// add your own switch statement. If you want to delegate to
+// Task::Process, you can effectively delegate to its switch statement.
+// No fancy method pointers or such - this is all just pretty low tech,
+// easy to debug, and fast.
+//
+// Also notice that Task has some primitive built-in timeout functionality.
+//
+// A timeout is defined as "the task stays in STATE_BLOCKED longer than
+// timeout_seconds_."
+//
+// Descendant classes can override this behavior by calling the
+// various protected methods to change the timeout behavior. For
+// instance, a descendand might call SuspendTimeout() when it knows
+// that it isn't waiting for anything that might timeout, but isn't
+// yet in the STATE_DONE state.
+//
+
+namespace rtc {
+
+// Executes a sequence of steps
+class Task : public TaskParent {
+ public:
+ Task(TaskParent *parent);
+ virtual ~Task();
+
+ int32 unique_id() { return unique_id_; }
+
+ void Start();
+ void Step();
+ int GetState() const { return state_; }
+ bool HasError() const { return (GetState() == STATE_ERROR); }
+ bool Blocked() const { return blocked_; }
+ bool IsDone() const { return done_; }
+ int64 ElapsedTime();
+
+ // Called from outside to stop task without any more callbacks
+ void Abort(bool nowake = false);
+
+ bool TimedOut();
+
+ int64 timeout_time() const { return timeout_time_; }
+ int timeout_seconds() const { return timeout_seconds_; }
+ void set_timeout_seconds(int timeout_seconds);
+
+ sigslot::signal0<> SignalTimeout;
+
+ // Called inside the task to signal that the task may be unblocked
+ void Wake();
+
+ protected:
+
+ enum {
+ STATE_BLOCKED = -1,
+ STATE_INIT = 0,
+ STATE_START = 1,
+ STATE_DONE = 2,
+ STATE_ERROR = 3,
+ STATE_RESPONSE = 4,
+ STATE_NEXT = 5, // Subclasses which need more states start here and higher
+ };
+
+ // Called inside to advise that the task should wake and signal an error
+ void Error();
+
+ int64 CurrentTime();
+
+ virtual std::string GetStateName(int state) const;
+ virtual int Process(int state);
+ virtual void Stop();
+ virtual int ProcessStart() = 0;
+ virtual int ProcessResponse() { return STATE_DONE; }
+
+ void ResetTimeout();
+ void ClearTimeout();
+
+ void SuspendTimeout();
+ void ResumeTimeout();
+
+ protected:
+ virtual int OnTimeout() {
+ // by default, we are finished after timing out
+ return STATE_DONE;
+ }
+
+ private:
+ void Done();
+
+ int state_;
+ bool blocked_;
+ bool done_;
+ bool aborted_;
+ bool busy_;
+ bool error_;
+ int64 start_time_;
+ int64 timeout_time_;
+ int timeout_seconds_;
+ bool timeout_suspended_;
+ int32 unique_id_;
+
+ static int32 unique_id_seed_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TASK_H__
diff --git a/chromium/third_party/webrtc/base/task_unittest.cc b/chromium/third_party/webrtc/base/task_unittest.cc
new file mode 100644
index 00000000000..8831259c60e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_unittest.cc
@@ -0,0 +1,545 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_POSIX)
+#include <sys/time.h>
+#endif // WEBRTC_POSIX
+
+// TODO: Remove this once the cause of sporadic failures in these
+// tests is tracked down.
+#include <iostream>
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif // WEBRTC_WIN
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/task.h"
+#include "webrtc/base/taskrunner.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+static int64 GetCurrentTime() {
+ return static_cast<int64>(Time()) * 10000;
+}
+
+// feel free to change these numbers. Note that '0' won't work, though
+#define STUCK_TASK_COUNT 5
+#define HAPPY_TASK_COUNT 20
+
+// this is a generic timeout task which, when it signals timeout, will
+// include the unique ID of the task in the signal (we don't use this
+// in production code because we haven't yet had occasion to generate
+// an array of the same types of task)
+
+class IdTimeoutTask : public Task, public sigslot::has_slots<> {
+ public:
+ explicit IdTimeoutTask(TaskParent *parent) : Task(parent) {
+ SignalTimeout.connect(this, &IdTimeoutTask::OnLocalTimeout);
+ }
+
+ sigslot::signal1<const int> SignalTimeoutId;
+ sigslot::signal1<const int> SignalDoneId;
+
+ virtual int ProcessStart() {
+ return STATE_RESPONSE;
+ }
+
+ void OnLocalTimeout() {
+ SignalTimeoutId(unique_id());
+ }
+
+ protected:
+ virtual void Stop() {
+ SignalDoneId(unique_id());
+ Task::Stop();
+ }
+};
+
+class StuckTask : public IdTimeoutTask {
+ public:
+ explicit StuckTask(TaskParent *parent) : IdTimeoutTask(parent) {}
+ virtual int ProcessStart() {
+ return STATE_BLOCKED;
+ }
+};
+
+class HappyTask : public IdTimeoutTask {
+ public:
+ explicit HappyTask(TaskParent *parent) : IdTimeoutTask(parent) {
+ time_to_perform_ = rand() % (STUCK_TASK_COUNT / 2);
+ }
+ virtual int ProcessStart() {
+ if (ElapsedTime() > (time_to_perform_ * 1000 * 10000))
+ return STATE_RESPONSE;
+ else
+ return STATE_BLOCKED;
+ }
+
+ private:
+ int time_to_perform_;
+};
+
+// simple implementation of a task runner which uses Windows'
+// GetSystemTimeAsFileTime() to get the current clock ticks
+
+class MyTaskRunner : public TaskRunner {
+ public:
+ virtual void WakeTasks() { RunTasks(); }
+ virtual int64 CurrentTime() {
+ return GetCurrentTime();
+ }
+
+ bool timeout_change() const {
+ return timeout_change_;
+ }
+
+ void clear_timeout_change() {
+ timeout_change_ = false;
+ }
+ protected:
+ virtual void OnTimeoutChange() {
+ timeout_change_ = true;
+ }
+ bool timeout_change_;
+};
+
+//
+// this unit test is primarily concerned (for now) with the timeout
+// functionality in tasks. It works as follows:
+//
+// * Create a bunch of tasks, some "stuck" (ie., guaranteed to timeout)
+// and some "happy" (will immediately finish).
+// * Set the timeout on the "stuck" tasks to some number of seconds between
+// 1 and the number of stuck tasks
+// * Start all the stuck & happy tasks in random order
+// * Wait "number of stuck tasks" seconds and make sure everything timed out
+
+class TaskTest : public sigslot::has_slots<> {
+ public:
+ TaskTest() {}
+
+ // no need to delete any tasks; the task runner owns them
+ ~TaskTest() {}
+
+ void Start() {
+ // create and configure tasks
+ for (int i = 0; i < STUCK_TASK_COUNT; ++i) {
+ stuck_[i].task_ = new StuckTask(&task_runner_);
+ stuck_[i].task_->SignalTimeoutId.connect(this,
+ &TaskTest::OnTimeoutStuck);
+ stuck_[i].timed_out_ = false;
+ stuck_[i].xlat_ = stuck_[i].task_->unique_id();
+ stuck_[i].task_->set_timeout_seconds(i + 1);
+ LOG(LS_INFO) << "Task " << stuck_[i].xlat_ << " created with timeout "
+ << stuck_[i].task_->timeout_seconds();
+ }
+
+ for (int i = 0; i < HAPPY_TASK_COUNT; ++i) {
+ happy_[i].task_ = new HappyTask(&task_runner_);
+ happy_[i].task_->SignalTimeoutId.connect(this,
+ &TaskTest::OnTimeoutHappy);
+ happy_[i].task_->SignalDoneId.connect(this,
+ &TaskTest::OnDoneHappy);
+ happy_[i].timed_out_ = false;
+ happy_[i].xlat_ = happy_[i].task_->unique_id();
+ }
+
+ // start all the tasks in random order
+ int stuck_index = 0;
+ int happy_index = 0;
+ for (int i = 0; i < STUCK_TASK_COUNT + HAPPY_TASK_COUNT; ++i) {
+ if ((stuck_index < STUCK_TASK_COUNT) &&
+ (happy_index < HAPPY_TASK_COUNT)) {
+ if (rand() % 2 == 1) {
+ stuck_[stuck_index++].task_->Start();
+ } else {
+ happy_[happy_index++].task_->Start();
+ }
+ } else if (stuck_index < STUCK_TASK_COUNT) {
+ stuck_[stuck_index++].task_->Start();
+ } else {
+ happy_[happy_index++].task_->Start();
+ }
+ }
+
+ for (int i = 0; i < STUCK_TASK_COUNT; ++i) {
+ std::cout << "Stuck task #" << i << " timeout is " <<
+ stuck_[i].task_->timeout_seconds() << " at " <<
+ stuck_[i].task_->timeout_time() << std::endl;
+ }
+
+ // just a little self-check to make sure we started all the tasks
+ ASSERT_EQ(STUCK_TASK_COUNT, stuck_index);
+ ASSERT_EQ(HAPPY_TASK_COUNT, happy_index);
+
+ // run the unblocked tasks
+ LOG(LS_INFO) << "Running tasks";
+ task_runner_.RunTasks();
+
+ std::cout << "Start time is " << GetCurrentTime() << std::endl;
+
+ // give all the stuck tasks time to timeout
+ for (int i = 0; !task_runner_.AllChildrenDone() && i < STUCK_TASK_COUNT;
+ ++i) {
+ Thread::Current()->ProcessMessages(1000);
+ for (int j = 0; j < HAPPY_TASK_COUNT; ++j) {
+ if (happy_[j].task_) {
+ happy_[j].task_->Wake();
+ }
+ }
+ LOG(LS_INFO) << "Polling tasks";
+ task_runner_.PollTasks();
+ }
+
+ // We see occasional test failures here due to the stuck tasks not having
+ // timed-out yet, which seems like it should be impossible. To help track
+ // this down we have added logging of the timing information, which we send
+ // directly to stdout so that we get it in opt builds too.
+ std::cout << "End time is " << GetCurrentTime() << std::endl;
+ }
+
+ void OnTimeoutStuck(const int id) {
+ LOG(LS_INFO) << "Timed out task " << id;
+
+ int i;
+ for (i = 0; i < STUCK_TASK_COUNT; ++i) {
+ if (stuck_[i].xlat_ == id) {
+ stuck_[i].timed_out_ = true;
+ stuck_[i].task_ = NULL;
+ break;
+ }
+ }
+
+ // getting a bad ID here is a failure, but let's continue
+ // running to see what else might go wrong
+ EXPECT_LT(i, STUCK_TASK_COUNT);
+ }
+
+ void OnTimeoutHappy(const int id) {
+ int i;
+ for (i = 0; i < HAPPY_TASK_COUNT; ++i) {
+ if (happy_[i].xlat_ == id) {
+ happy_[i].timed_out_ = true;
+ happy_[i].task_ = NULL;
+ break;
+ }
+ }
+
+ // getting a bad ID here is a failure, but let's continue
+ // running to see what else might go wrong
+ EXPECT_LT(i, HAPPY_TASK_COUNT);
+ }
+
+ void OnDoneHappy(const int id) {
+ int i;
+ for (i = 0; i < HAPPY_TASK_COUNT; ++i) {
+ if (happy_[i].xlat_ == id) {
+ happy_[i].task_ = NULL;
+ break;
+ }
+ }
+
+ // getting a bad ID here is a failure, but let's continue
+ // running to see what else might go wrong
+ EXPECT_LT(i, HAPPY_TASK_COUNT);
+ }
+
+ void check_passed() {
+ EXPECT_TRUE(task_runner_.AllChildrenDone());
+
+ // make sure none of our happy tasks timed out
+ for (int i = 0; i < HAPPY_TASK_COUNT; ++i) {
+ EXPECT_FALSE(happy_[i].timed_out_);
+ }
+
+ // make sure all of our stuck tasks timed out
+ for (int i = 0; i < STUCK_TASK_COUNT; ++i) {
+ EXPECT_TRUE(stuck_[i].timed_out_);
+ if (!stuck_[i].timed_out_) {
+ std::cout << "Stuck task #" << i << " timeout is at "
+ << stuck_[i].task_->timeout_time() << std::endl;
+ }
+ }
+
+ std::cout.flush();
+ }
+
+ private:
+ struct TaskInfo {
+ IdTimeoutTask *task_;
+ bool timed_out_;
+ int xlat_;
+ };
+
+ MyTaskRunner task_runner_;
+ TaskInfo stuck_[STUCK_TASK_COUNT];
+ TaskInfo happy_[HAPPY_TASK_COUNT];
+};
+
+TEST(start_task_test, Timeout) {
+ TaskTest task_test;
+ task_test.Start();
+ task_test.check_passed();
+}
+
+// Test for aborting the task while it is running
+
+class AbortTask : public Task {
+ public:
+ explicit AbortTask(TaskParent *parent) : Task(parent) {
+ set_timeout_seconds(1);
+ }
+
+ virtual int ProcessStart() {
+ Abort();
+ return STATE_NEXT;
+ }
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(AbortTask);
+};
+
+class TaskAbortTest : public sigslot::has_slots<> {
+ public:
+ TaskAbortTest() {}
+
+ // no need to delete any tasks; the task runner owns them
+ ~TaskAbortTest() {}
+
+ void Start() {
+ Task *abort_task = new AbortTask(&task_runner_);
+ abort_task->SignalTimeout.connect(this, &TaskAbortTest::OnTimeout);
+ abort_task->Start();
+
+ // run the task
+ task_runner_.RunTasks();
+ }
+
+ private:
+ void OnTimeout() {
+ FAIL() << "Task timed out instead of aborting.";
+ }
+
+ MyTaskRunner task_runner_;
+ DISALLOW_EVIL_CONSTRUCTORS(TaskAbortTest);
+};
+
+TEST(start_task_test, Abort) {
+ TaskAbortTest abort_test;
+ abort_test.Start();
+}
+
+// Test for aborting a task to verify that it does the Wake operation
+// which gets it deleted.
+
+class SetBoolOnDeleteTask : public Task {
+ public:
+ SetBoolOnDeleteTask(TaskParent *parent, bool *set_when_deleted)
+ : Task(parent),
+ set_when_deleted_(set_when_deleted) {
+ EXPECT_TRUE(NULL != set_when_deleted);
+ EXPECT_FALSE(*set_when_deleted);
+ }
+
+ virtual ~SetBoolOnDeleteTask() {
+ *set_when_deleted_ = true;
+ }
+
+ virtual int ProcessStart() {
+ return STATE_BLOCKED;
+ }
+
+ private:
+ bool* set_when_deleted_;
+ DISALLOW_EVIL_CONSTRUCTORS(SetBoolOnDeleteTask);
+};
+
+class AbortShouldWakeTest : public sigslot::has_slots<> {
+ public:
+ AbortShouldWakeTest() {}
+
+ // no need to delete any tasks; the task runner owns them
+ ~AbortShouldWakeTest() {}
+
+ void Start() {
+ bool task_deleted = false;
+ Task *task_to_abort = new SetBoolOnDeleteTask(&task_runner_, &task_deleted);
+ task_to_abort->Start();
+
+ // Task::Abort() should call TaskRunner::WakeTasks(). WakeTasks calls
+ // TaskRunner::RunTasks() immediately which should delete the task.
+ task_to_abort->Abort();
+ EXPECT_TRUE(task_deleted);
+
+ if (!task_deleted) {
+ // avoid a crash (due to referencing a local variable)
+ // if the test fails.
+ task_runner_.RunTasks();
+ }
+ }
+
+ private:
+ void OnTimeout() {
+ FAIL() << "Task timed out instead of aborting.";
+ }
+
+ MyTaskRunner task_runner_;
+ DISALLOW_EVIL_CONSTRUCTORS(AbortShouldWakeTest);
+};
+
+TEST(start_task_test, AbortShouldWake) {
+ AbortShouldWakeTest abort_should_wake_test;
+ abort_should_wake_test.Start();
+}
+
+// Validate that TaskRunner's OnTimeoutChange gets called appropriately
+// * When a task calls UpdateTaskTimeout
+// * When the next timeout task time, times out
+class TimeoutChangeTest : public sigslot::has_slots<> {
+ public:
+ TimeoutChangeTest()
+ : task_count_(ARRAY_SIZE(stuck_tasks_)) {}
+
+ // no need to delete any tasks; the task runner owns them
+ ~TimeoutChangeTest() {}
+
+ void Start() {
+ for (int i = 0; i < task_count_; ++i) {
+ stuck_tasks_[i] = new StuckTask(&task_runner_);
+ stuck_tasks_[i]->set_timeout_seconds(i + 2);
+ stuck_tasks_[i]->SignalTimeoutId.connect(this,
+ &TimeoutChangeTest::OnTimeoutId);
+ }
+
+ for (int i = task_count_ - 1; i >= 0; --i) {
+ stuck_tasks_[i]->Start();
+ }
+ task_runner_.clear_timeout_change();
+
+ // At this point, our timeouts are set as follows
+ // task[0] is 2 seconds, task[1] at 3 seconds, etc.
+
+ stuck_tasks_[0]->set_timeout_seconds(2);
+ // Now, task[0] is 2 seconds, task[1] at 3 seconds...
+ // so timeout change shouldn't be called.
+ EXPECT_FALSE(task_runner_.timeout_change());
+ task_runner_.clear_timeout_change();
+
+ stuck_tasks_[0]->set_timeout_seconds(1);
+ // task[0] is 1 seconds, task[1] at 3 seconds...
+ // The smallest timeout got smaller so timeout change be called.
+ EXPECT_TRUE(task_runner_.timeout_change());
+ task_runner_.clear_timeout_change();
+
+ stuck_tasks_[1]->set_timeout_seconds(2);
+ // task[0] is 1 seconds, task[1] at 2 seconds...
+ // The smallest timeout is still 1 second so no timeout change.
+ EXPECT_FALSE(task_runner_.timeout_change());
+ task_runner_.clear_timeout_change();
+
+ while (task_count_ > 0) {
+ int previous_count = task_count_;
+ task_runner_.PollTasks();
+ if (previous_count != task_count_) {
+ // We only get here when a task times out. When that
+ // happens, the timeout change should get called because
+ // the smallest timeout is now in the past.
+ EXPECT_TRUE(task_runner_.timeout_change());
+ task_runner_.clear_timeout_change();
+ }
+ Thread::Current()->socketserver()->Wait(500, false);
+ }
+ }
+
+ private:
+ void OnTimeoutId(const int id) {
+ for (int i = 0; i < ARRAY_SIZE(stuck_tasks_); ++i) {
+ if (stuck_tasks_[i] && stuck_tasks_[i]->unique_id() == id) {
+ task_count_--;
+ stuck_tasks_[i] = NULL;
+ break;
+ }
+ }
+ }
+
+ MyTaskRunner task_runner_;
+ StuckTask* (stuck_tasks_[3]);
+ int task_count_;
+ DISALLOW_EVIL_CONSTRUCTORS(TimeoutChangeTest);
+};
+
+TEST(start_task_test, TimeoutChange) {
+ TimeoutChangeTest timeout_change_test;
+ timeout_change_test.Start();
+}
+
+class DeleteTestTaskRunner : public TaskRunner {
+ public:
+ DeleteTestTaskRunner() {
+ }
+ virtual void WakeTasks() { }
+ virtual int64 CurrentTime() {
+ return GetCurrentTime();
+ }
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(DeleteTestTaskRunner);
+};
+
+TEST(unstarted_task_test, DeleteTask) {
+ // This test ensures that we don't
+ // crash if a task is deleted without running it.
+ DeleteTestTaskRunner task_runner;
+ HappyTask* happy_task = new HappyTask(&task_runner);
+ happy_task->Start();
+
+ // try deleting the task directly
+ HappyTask* child_happy_task = new HappyTask(happy_task);
+ delete child_happy_task;
+
+ // run the unblocked tasks
+ task_runner.RunTasks();
+}
+
+TEST(unstarted_task_test, DoNotDeleteTask1) {
+ // This test ensures that we don't
+ // crash if a task runner is deleted without
+ // running a certain task.
+ DeleteTestTaskRunner task_runner;
+ HappyTask* happy_task = new HappyTask(&task_runner);
+ happy_task->Start();
+
+ HappyTask* child_happy_task = new HappyTask(happy_task);
+ child_happy_task->Start();
+
+ // Never run the tasks
+}
+
+TEST(unstarted_task_test, DoNotDeleteTask2) {
+ // This test ensures that we don't
+ // crash if a taskrunner is delete with a
+ // task that has never been started.
+ DeleteTestTaskRunner task_runner;
+ HappyTask* happy_task = new HappyTask(&task_runner);
+ happy_task->Start();
+
+ // Do not start the task.
+ // Note: this leaks memory, so don't do this.
+ // Instead, always run your tasks or delete them.
+ new HappyTask(happy_task);
+
+ // run the unblocked tasks
+ task_runner.RunTasks();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/taskparent.cc b/chromium/third_party/webrtc/base/taskparent.cc
new file mode 100644
index 00000000000..edc146fd227
--- /dev/null
+++ b/chromium/third_party/webrtc/base/taskparent.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "webrtc/base/taskparent.h"
+
+#include "webrtc/base/task.h"
+#include "webrtc/base/taskrunner.h"
+
+namespace rtc {
+
+TaskParent::TaskParent(Task* derived_instance, TaskParent *parent)
+ : parent_(parent) {
+ ASSERT(derived_instance != NULL);
+ ASSERT(parent != NULL);
+ runner_ = parent->GetRunner();
+ parent_->AddChild(derived_instance);
+ Initialize();
+}
+
+TaskParent::TaskParent(TaskRunner *derived_instance)
+ : parent_(NULL),
+ runner_(derived_instance) {
+ ASSERT(derived_instance != NULL);
+ Initialize();
+}
+
+// Does common initialization of member variables
+void TaskParent::Initialize() {
+ children_.reset(new ChildSet());
+ child_error_ = false;
+}
+
+void TaskParent::AddChild(Task *child) {
+ children_->insert(child);
+}
+
+#ifdef _DEBUG
+bool TaskParent::IsChildTask(Task *task) {
+ ASSERT(task != NULL);
+ return task->parent_ == this && children_->find(task) != children_->end();
+}
+#endif
+
+bool TaskParent::AllChildrenDone() {
+ for (ChildSet::iterator it = children_->begin();
+ it != children_->end();
+ ++it) {
+ if (!(*it)->IsDone())
+ return false;
+ }
+ return true;
+}
+
+bool TaskParent::AnyChildError() {
+ return child_error_;
+}
+
+void TaskParent::AbortAllChildren() {
+ if (children_->size() > 0) {
+#ifdef _DEBUG
+ runner_->IncrementAbortCount();
+#endif
+
+ ChildSet copy = *children_;
+ for (ChildSet::iterator it = copy.begin(); it != copy.end(); ++it) {
+ (*it)->Abort(true); // Note we do not wake
+ }
+
+#ifdef _DEBUG
+ runner_->DecrementAbortCount();
+#endif
+ }
+}
+
+void TaskParent::OnStopped(Task *task) {
+ AbortAllChildren();
+ parent_->OnChildStopped(task);
+}
+
+void TaskParent::OnChildStopped(Task *child) {
+ if (child->HasError())
+ child_error_ = true;
+ children_->erase(child);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/taskparent.h b/chromium/third_party/webrtc/base/taskparent.h
new file mode 100644
index 00000000000..a3832024e16
--- /dev/null
+++ b/chromium/third_party/webrtc/base/taskparent.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TASKPARENT_H__
+#define WEBRTC_BASE_TASKPARENT_H__
+
+#include <set>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+class Task;
+class TaskRunner;
+
+class TaskParent {
+ public:
+ TaskParent(Task *derived_instance, TaskParent *parent);
+ explicit TaskParent(TaskRunner *derived_instance);
+ virtual ~TaskParent() { }
+
+ TaskParent *GetParent() { return parent_; }
+ TaskRunner *GetRunner() { return runner_; }
+
+ bool AllChildrenDone();
+ bool AnyChildError();
+#ifdef _DEBUG
+ bool IsChildTask(Task *task);
+#endif
+
+ protected:
+ void OnStopped(Task *task);
+ void AbortAllChildren();
+ TaskParent *parent() {
+ return parent_;
+ }
+
+ private:
+ void Initialize();
+ void OnChildStopped(Task *child);
+ void AddChild(Task *child);
+
+ TaskParent *parent_;
+ TaskRunner *runner_;
+ bool child_error_;
+ typedef std::set<Task *> ChildSet;
+ scoped_ptr<ChildSet> children_;
+ DISALLOW_EVIL_CONSTRUCTORS(TaskParent);
+};
+
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TASKPARENT_H__
diff --git a/chromium/third_party/webrtc/base/taskrunner.cc b/chromium/third_party/webrtc/base/taskrunner.cc
new file mode 100644
index 00000000000..bc4ab5e44fc
--- /dev/null
+++ b/chromium/third_party/webrtc/base/taskrunner.cc
@@ -0,0 +1,224 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "webrtc/base/taskrunner.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/task.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+TaskRunner::TaskRunner()
+ : TaskParent(this),
+ next_timeout_task_(NULL),
+ tasks_running_(false)
+#ifdef _DEBUG
+ , abort_count_(0),
+ deleting_task_(NULL)
+#endif
+{
+}
+
+TaskRunner::~TaskRunner() {
+ // this kills and deletes children silently!
+ AbortAllChildren();
+ InternalRunTasks(true);
+}
+
+void TaskRunner::StartTask(Task * task) {
+ tasks_.push_back(task);
+
+ // the task we just started could be about to timeout --
+ // make sure our "next timeout task" is correct
+ UpdateTaskTimeout(task, 0);
+
+ WakeTasks();
+}
+
+void TaskRunner::RunTasks() {
+ InternalRunTasks(false);
+}
+
+void TaskRunner::InternalRunTasks(bool in_destructor) {
+ // This shouldn't run while an abort is happening.
+ // If that occurs, then tasks may be deleted in this method,
+ // but pointers to them will still be in the
+ // "ChildSet copy" in TaskParent::AbortAllChildren.
+ // Subsequent use of those task may cause data corruption or crashes.
+ ASSERT(!abort_count_);
+ // Running continues until all tasks are Blocked (ok for a small # of tasks)
+ if (tasks_running_) {
+ return; // don't reenter
+ }
+
+ tasks_running_ = true;
+
+ int64 previous_timeout_time = next_task_timeout();
+
+ int did_run = true;
+ while (did_run) {
+ did_run = false;
+ // use indexing instead of iterators because tasks_ may grow
+ for (size_t i = 0; i < tasks_.size(); ++i) {
+ while (!tasks_[i]->Blocked()) {
+ tasks_[i]->Step();
+ did_run = true;
+ }
+ }
+ }
+ // Tasks are deleted when running has paused
+ bool need_timeout_recalc = false;
+ for (size_t i = 0; i < tasks_.size(); ++i) {
+ if (tasks_[i]->IsDone()) {
+ Task* task = tasks_[i];
+ if (next_timeout_task_ &&
+ task->unique_id() == next_timeout_task_->unique_id()) {
+ next_timeout_task_ = NULL;
+ need_timeout_recalc = true;
+ }
+
+#ifdef _DEBUG
+ deleting_task_ = task;
+#endif
+ delete task;
+#ifdef _DEBUG
+ deleting_task_ = NULL;
+#endif
+ tasks_[i] = NULL;
+ }
+ }
+ // Finally, remove nulls
+ std::vector<Task *>::iterator it;
+ it = std::remove(tasks_.begin(),
+ tasks_.end(),
+ reinterpret_cast<Task *>(NULL));
+
+ tasks_.erase(it, tasks_.end());
+
+ if (need_timeout_recalc)
+ RecalcNextTimeout(NULL);
+
+ // Make sure that adjustments are done to account
+ // for any timeout changes (but don't call this
+ // while being destroyed since it calls a pure virtual function).
+ if (!in_destructor)
+ CheckForTimeoutChange(previous_timeout_time);
+
+ tasks_running_ = false;
+}
+
+void TaskRunner::PollTasks() {
+ // see if our "next potentially timed-out task" has indeed timed out.
+ // If it has, wake it up, then queue up the next task in line
+ // Repeat while we have new timed-out tasks.
+ // TODO: We need to guard against WakeTasks not updating
+ // next_timeout_task_. Maybe also add documentation in the header file once
+ // we understand this code better.
+ Task* old_timeout_task = NULL;
+ while (next_timeout_task_ &&
+ old_timeout_task != next_timeout_task_ &&
+ next_timeout_task_->TimedOut()) {
+ old_timeout_task = next_timeout_task_;
+ next_timeout_task_->Wake();
+ WakeTasks();
+ }
+}
+
+int64 TaskRunner::next_task_timeout() const {
+ if (next_timeout_task_) {
+ return next_timeout_task_->timeout_time();
+ }
+ return 0;
+}
+
+// this function gets called frequently -- when each task changes
+// state to something other than DONE, ERROR or BLOCKED, it calls
+// ResetTimeout(), which will call this function to make sure that
+// the next timeout-able task hasn't changed. The logic in this function
+// prevents RecalcNextTimeout() from getting called in most cases,
+// effectively making the task scheduler O-1 instead of O-N
+
+void TaskRunner::UpdateTaskTimeout(Task* task,
+ int64 previous_task_timeout_time) {
+ ASSERT(task != NULL);
+ int64 previous_timeout_time = next_task_timeout();
+ bool task_is_timeout_task = next_timeout_task_ != NULL &&
+ task->unique_id() == next_timeout_task_->unique_id();
+ if (task_is_timeout_task) {
+ previous_timeout_time = previous_task_timeout_time;
+ }
+
+ // if the relevant task has a timeout, then
+ // check to see if it's closer than the current
+ // "about to timeout" task
+ if (task->timeout_time()) {
+ if (next_timeout_task_ == NULL ||
+ (task->timeout_time() <= next_timeout_task_->timeout_time())) {
+ next_timeout_task_ = task;
+ }
+ } else if (task_is_timeout_task) {
+ // otherwise, if the task doesn't have a timeout,
+ // and it used to be our "about to timeout" task,
+ // walk through all the tasks looking for the real
+ // "about to timeout" task
+ RecalcNextTimeout(task);
+ }
+
+ // Note when task_running_, then the running routine
+ // (TaskRunner::InternalRunTasks) is responsible for calling
+ // CheckForTimeoutChange.
+ if (!tasks_running_) {
+ CheckForTimeoutChange(previous_timeout_time);
+ }
+}
+
+void TaskRunner::RecalcNextTimeout(Task *exclude_task) {
+ // walk through all the tasks looking for the one
+ // which satisfies the following:
+ // it's not finished already
+ // we're not excluding it
+ // it has the closest timeout time
+
+ int64 next_timeout_time = 0;
+ next_timeout_task_ = NULL;
+
+ for (size_t i = 0; i < tasks_.size(); ++i) {
+ Task *task = tasks_[i];
+ // if the task isn't complete, and it actually has a timeout time
+ if (!task->IsDone() && (task->timeout_time() > 0))
+ // if it doesn't match our "exclude" task
+ if (exclude_task == NULL ||
+ exclude_task->unique_id() != task->unique_id())
+ // if its timeout time is sooner than our current timeout time
+ if (next_timeout_time == 0 ||
+ task->timeout_time() <= next_timeout_time) {
+ // set this task as our next-to-timeout
+ next_timeout_time = task->timeout_time();
+ next_timeout_task_ = task;
+ }
+ }
+}
+
+void TaskRunner::CheckForTimeoutChange(int64 previous_timeout_time) {
+ int64 next_timeout = next_task_timeout();
+ bool timeout_change = (previous_timeout_time == 0 && next_timeout != 0) ||
+ next_timeout < previous_timeout_time ||
+ (previous_timeout_time <= CurrentTime() &&
+ previous_timeout_time != next_timeout);
+ if (timeout_change) {
+ OnTimeoutChange();
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/taskrunner.h b/chromium/third_party/webrtc/base/taskrunner.h
new file mode 100644
index 00000000000..629c2d3ac13
--- /dev/null
+++ b/chromium/third_party/webrtc/base/taskrunner.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TASKRUNNER_H__
+#define WEBRTC_BASE_TASKRUNNER_H__
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/taskparent.h"
+
+namespace rtc {
+class Task;
+
+const int64 kSecToMsec = 1000;
+const int64 kMsecTo100ns = 10000;
+const int64 kSecTo100ns = kSecToMsec * kMsecTo100ns;
+
+class TaskRunner : public TaskParent, public sigslot::has_slots<> {
+ public:
+ TaskRunner();
+ virtual ~TaskRunner();
+
+ virtual void WakeTasks() = 0;
+
+ // Returns the current time in 100ns units. It is used for
+ // determining timeouts. The origin is not important, only
+ // the units and that rollover while the computer is running.
+ //
+ // On Windows, GetSystemTimeAsFileTime is the typical implementation.
+ virtual int64 CurrentTime() = 0 ;
+
+ void StartTask(Task *task);
+ void RunTasks();
+ void PollTasks();
+
+ void UpdateTaskTimeout(Task *task, int64 previous_task_timeout_time);
+
+#ifdef _DEBUG
+ bool is_ok_to_delete(Task* task) {
+ return task == deleting_task_;
+ }
+
+ void IncrementAbortCount() {
+ ++abort_count_;
+ }
+
+ void DecrementAbortCount() {
+ --abort_count_;
+ }
+#endif
+
+ // Returns the next absolute time when a task times out
+ // OR "0" if there is no next timeout.
+ int64 next_task_timeout() const;
+
+ protected:
+ // The primary usage of this method is to know if
+ // a callback timer needs to be set-up or adjusted.
+ // This method will be called
+ // * when the next_task_timeout() becomes a smaller value OR
+ // * when next_task_timeout() has changed values and the previous
+ // value is in the past.
+ //
+ // If the next_task_timeout moves to the future, this method will *not*
+ // get called (because it subclass should check next_task_timeout()
+ // when its timer goes off up to see if it needs to set-up a new timer).
+ //
+ // Note that this maybe called conservatively. In that it may be
+ // called when no time change has happened.
+ virtual void OnTimeoutChange() {
+ // by default, do nothing.
+ }
+
+ private:
+ void InternalRunTasks(bool in_destructor);
+ void CheckForTimeoutChange(int64 previous_timeout_time);
+
+ std::vector<Task *> tasks_;
+ Task *next_timeout_task_;
+ bool tasks_running_;
+#ifdef _DEBUG
+ int abort_count_;
+ Task* deleting_task_;
+#endif
+
+ void RecalcNextTimeout(Task *exclude_task);
+};
+
+} // namespace rtc
+
+#endif // TASK_BASE_TASKRUNNER_H__
diff --git a/chromium/third_party/webrtc/base/template_util.h b/chromium/third_party/webrtc/base/template_util.h
new file mode 100644
index 00000000000..f0bf39c5f9d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/template_util.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2011 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TEMPLATE_UTIL_H_
+#define WEBRTC_BASE_TEMPLATE_UTIL_H_
+
+#include <stddef.h> // For size_t.
+
+namespace rtc {
+
+// template definitions from tr1
+
+template<class T, T v>
+struct integral_constant {
+ static const T value = v;
+ typedef T value_type;
+ typedef integral_constant<T, v> type;
+};
+
+template <class T, T v> const T integral_constant<T, v>::value;
+
+typedef integral_constant<bool, true> true_type;
+typedef integral_constant<bool, false> false_type;
+
+template <class T> struct is_pointer : false_type {};
+template <class T> struct is_pointer<T*> : true_type {};
+
+template <class T, class U> struct is_same : public false_type {};
+template <class T> struct is_same<T,T> : true_type {};
+
+template<class> struct is_array : public false_type {};
+template<class T, size_t n> struct is_array<T[n]> : public true_type {};
+template<class T> struct is_array<T[]> : public true_type {};
+
+template <class T> struct is_non_const_reference : false_type {};
+template <class T> struct is_non_const_reference<T&> : true_type {};
+template <class T> struct is_non_const_reference<const T&> : false_type {};
+
+template <class T> struct is_void : false_type {};
+template <> struct is_void<void> : true_type {};
+
+namespace internal {
+
+// Types YesType and NoType are guaranteed such that sizeof(YesType) <
+// sizeof(NoType).
+typedef char YesType;
+
+struct NoType {
+ YesType dummy[2];
+};
+
+// This class is an implementation detail for is_convertible, and you
+// don't need to know how it works to use is_convertible. For those
+// who care: we declare two different functions, one whose argument is
+// of type To and one with a variadic argument list. We give them
+// return types of different size, so we can use sizeof to trick the
+// compiler into telling us which function it would have chosen if we
+// had called it with an argument of type From. See Alexandrescu's
+// _Modern C++ Design_ for more details on this sort of trick.
+
+struct ConvertHelper {
+ template <typename To>
+ static YesType Test(To);
+
+ template <typename To>
+ static NoType Test(...);
+
+ template <typename From>
+ static From& Create();
+};
+
+// Used to determine if a type is a struct/union/class. Inspired by Boost's
+// is_class type_trait implementation.
+struct IsClassHelper {
+ template <typename C>
+ static YesType Test(void(C::*)(void));
+
+ template <typename C>
+ static NoType Test(...);
+};
+
+} // namespace internal
+
+// Inherits from true_type if From is convertible to To, false_type otherwise.
+//
+// Note that if the type is convertible, this will be a true_type REGARDLESS
+// of whether or not the conversion would emit a warning.
+template <typename From, typename To>
+struct is_convertible
+ : integral_constant<bool,
+ sizeof(internal::ConvertHelper::Test<To>(
+ internal::ConvertHelper::Create<From>())) ==
+ sizeof(internal::YesType)> {
+};
+
+template <typename T>
+struct is_class
+ : integral_constant<bool,
+ sizeof(internal::IsClassHelper::Test<T>(0)) ==
+ sizeof(internal::YesType)> {
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TEMPLATE_UTIL_H_
diff --git a/chromium/third_party/webrtc/base/testbase64.h b/chromium/third_party/webrtc/base/testbase64.h
new file mode 100644
index 00000000000..39dd00ce389
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testbase64.h
@@ -0,0 +1,5 @@
+/* This file was generated by googleclient/talk/binary2header.sh */
+
+static unsigned char testbase64[] = {
+0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01, 0x02, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xe1, 0x0d, 0x07, 0x45, 0x78, 0x69, 0x66, 0x00, 0x00, 0x4d, 0x4d, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x01, 0x0e, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x9e, 0x01, 0x0f, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0xbe, 0x01, 0x10, 0x00, 0x02, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0xc3, 0x01, 0x12, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x1a, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xcc, 0x01, 0x1b, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xd4, 0x01, 0x28, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00, 0x01, 0x31, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xdc, 0x01, 0x32, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xf0, 0x01, 0x3c, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x01, 0x04, 0x02, 0x13, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00, 0x87, 0x69, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01, 0x14, 0x00, 0x00, 0x02, 0xc4, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, 0x53, 0x4f, 0x4e, 0x59, 0x00, 0x44, 0x53, 0x43, 0x2d, 0x50, 0x32, 0x30, 0x30, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x20, 0x50, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x20, 0x37, 0x2e, 0x30, 0x00, 0x32, 0x30, 0x30, 0x37, 0x3a, 0x30, 0x31, 0x3a, 0x33, 0x30, 0x20, 0x32, 0x33, 0x3a, 0x31, 0x30, 0x3a, 0x30, 0x34, 0x00, 0x4d, 0x61, 0x63, 0x20, 0x4f, 0x53, 0x20, 0x58, 0x20, 0x31, 0x30, 0x2e, 0x34, 0x2e, 0x38, 0x00, 0x00, 0x1c, 0x82, 0x9a, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0x6a, 0x82, 0x9d, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0x72, 0x88, 0x22, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00, 0x88, 0x27, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x64, 0x00, 0x00, 0x90, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x04, 0x30, 0x32, 0x32, 0x30, 0x90, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x02, 0x7a, 0x90, 0x04, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x02, 0x8e, 0x91, 0x01, 0x00, 0x07, 0x00, 0x00, 0x00, 0x04, 0x01, 0x02, 0x03, 0x00, 0x91, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0xa2, 0x92, 0x04, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0xaa, 0x92, 0x05, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0xb2, 0x92, 0x07, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x00, 0x00, 0x92, 0x08, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x92, 0x09, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x0f, 0x00, 0x00, 0x92, 0x0a, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0xba, 0xa0, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x04, 0x30, 0x31, 0x30, 0x30, 0xa0, 0x01, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0xff, 0xff, 0x00, 0x00, 0xa0, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, 0xa0, 0x03, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, 0xa3, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x01, 0x03, 0x00, 0x00, 0x00, 0xa3, 0x01, 0x00, 0x07, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xa4, 0x01, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x06, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x08, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x09, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x0a, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x01, 0x90, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x0a, 0x32, 0x30, 0x30, 0x37, 0x3a, 0x30, 0x31, 0x3a, 0x32, 0x30, 0x20, 0x32, 0x33, 0x3a, 0x30, 0x35, 0x3a, 0x35, 0x32, 0x00, 0x32, 0x30, 0x30, 0x37, 0x3a, 0x30, 0x31, 0x3a, 0x32, 0x30, 0x20, 0x32, 0x33, 0x3a, 0x30, 0x35, 0x3a, 0x35, 0x32, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x4f, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x06, 0x01, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x06, 0x00, 0x00, 0x01, 0x1a, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x03, 0x12, 0x01, 0x1b, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x03, 0x1a, 0x01, 0x28, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00, 0x02, 0x01, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x03, 0x22, 0x02, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x09, 0xdd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01, 0x02, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xed, 0x00, 0x0c, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x5f, 0x43, 0x4d, 0x00, 0x02, 0xff, 0xee, 0x00, 0x0e, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x00, 0x64, 0x80, 0x00, 0x00, 0x00, 0x01, 0xff, 0xdb, 0x00, 0x84, 0x00, 0x0c, 0x08, 0x08, 0x08, 0x09, 0x08, 0x0c, 0x09, 0x09, 0x0c, 0x11, 0x0b, 0x0a, 0x0b, 0x11, 0x15, 0x0f, 0x0c, 0x0c, 0x0f, 0x15, 0x18, 0x13, 0x13, 0x15, 0x13, 0x13, 0x18, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x01, 0x0d, 0x0b, 0x0b, 0x0d, 0x0e, 0x0d, 0x10, 0x0e, 0x0e, 0x10, 0x14, 0x0e, 0x0e, 0x0e, 0x14, 0x14, 0x0e, 0x0e, 0x0e, 0x0e, 0x14, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0xff, 0xc0, 0x00, 0x11, 0x08, 0x00, 0x64, 0x00, 0x64, 0x03, 0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xdd, 0x00, 0x04, 0x00, 0x07, 0xff, 0xc4, 0x01, 0x3f, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x01, 0x02, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x01, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x10, 0x00, 0x01, 0x04, 0x01, 0x03, 0x02, 0x04, 0x02, 0x05, 0x07, 0x06, 0x08, 0x05, 0x03, 0x0c, 0x33, 0x01, 0x00, 0x02, 0x11, 0x03, 0x04, 0x21, 0x12, 0x31, 0x05, 0x41, 0x51, 0x61, 0x13, 0x22, 0x71, 0x81, 0x32, 0x06, 0x14, 0x91, 0xa1, 0xb1, 0x42, 0x23, 0x24, 0x15, 0x52, 0xc1, 0x62, 0x33, 0x34, 0x72, 0x82, 0xd1, 0x43, 0x07, 0x25, 0x92, 0x53, 0xf0, 0xe1, 0xf1, 0x63, 0x73, 0x35, 0x16, 0xa2, 0xb2, 0x83, 0x26, 0x44, 0x93, 0x54, 0x64, 0x45, 0xc2, 0xa3, 0x74, 0x36, 0x17, 0xd2, 0x55, 0xe2, 0x65, 0xf2, 0xb3, 0x84, 0xc3, 0xd3, 0x75, 0xe3, 0xf3, 0x46, 0x27, 0x94, 0xa4, 0x85, 0xb4, 0x95, 0xc4, 0xd4, 0xe4, 0xf4, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x56, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x37, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xd7, 0xe7, 0xf7, 0x11, 0x00, 0x02, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x05, 0x06, 0x07, 0x07, 0x06, 0x05, 0x35, 0x01, 0x00, 0x02, 0x11, 0x03, 0x21, 0x31, 0x12, 0x04, 0x41, 0x51, 0x61, 0x71, 0x22, 0x13, 0x05, 0x32, 0x81, 0x91, 0x14, 0xa1, 0xb1, 0x42, 0x23, 0xc1, 0x52, 0xd1, 0xf0, 0x33, 0x24, 0x62, 0xe1, 0x72, 0x82, 0x92, 0x43, 0x53, 0x15, 0x63, 0x73, 0x34, 0xf1, 0x25, 0x06, 0x16, 0xa2, 0xb2, 0x83, 0x07, 0x26, 0x35, 0xc2, 0xd2, 0x44, 0x93, 0x54, 0xa3, 0x17, 0x64, 0x45, 0x55, 0x36, 0x74, 0x65, 0xe2, 0xf2, 0xb3, 0x84, 0xc3, 0xd3, 0x75, 0xe3, 0xf3, 0x46, 0x94, 0xa4, 0x85, 0xb4, 0x95, 0xc4, 0xd4, 0xe4, 0xf4, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x56, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x27, 0x37, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x11, 0x03, 0x11, 0x00, 0x3f, 0x00, 0xf2, 0xed, 0xb2, 0x8d, 0x4d, 0x45, 0xcd, 0x2f, 0x3f, 0x44, 0x68, 0x93, 0xc3, 0x58, 0xc8, 0xf1, 0x1f, 0x8a, 0x33, 0x86, 0xda, 0x58, 0xc1, 0xa0, 0x02, 0x4f, 0xc4, 0xa1, 0x69, 0xa5, 0x9b, 0x5b, 0x4b, 0x84, 0x73, 0xdf, 0xc9, 0x15, 0xf8, 0xe3, 0xd1, 0x0e, 0x07, 0x93, 0xf3, 0xd1, 0x0f, 0x1c, 0x17, 0xef, 0x2e, 0x3b, 0x5b, 0xdc, 0xff, 0x00, 0xdf, 0x42, 0xbf, 0x8f, 0x8e, 0xdc, 0x82, 0xca, 0xd8, 0x37, 0x11, 0xa9, 0x3d, 0x82, 0x69, 0x2b, 0xc4, 0x6d, 0xc9, 0x75, 0x25, 0xbc, 0xf7, 0xec, 0xa1, 0xb5, 0x74, 0x19, 0x5d, 0x2e, 0x8a, 0x9a, 0x4b, 0x89, 0x7d, 0xc4, 0x68, 0xc6, 0xf6, 0xfe, 0xb2, 0xa0, 0x30, 0x1d, 0x60, 0x86, 0x88, 0x8d, 0x49, 0x3e, 0x01, 0x11, 0x20, 0xa3, 0x8c, 0xb9, 0xb1, 0xaa, 0x62, 0xad, 0xbf, 0x18, 0x97, 0x43, 0x47, 0x1d, 0xd2, 0xaf, 0x04, 0xd9, 0xb8, 0xc8, 0x0d, 0x68, 0xe4, 0xf7, 0x3e, 0x48, 0xf1, 0x05, 0xbc, 0x25, 0xaa, 0x07, 0x71, 0xd9, 0x14, 0x78, 0xf6, 0x49, 0xb5, 0x90, 0xfd, 0xa7, 0xc6, 0x14, 0xfd, 0x1b, 0x1c, 0xff, 0x00, 0x4d, 0x8d, 0x2e, 0x73, 0x8c, 0x35, 0xa3, 0x52, 0x4f, 0x92, 0x48, 0xa6, 0x1a, 0x24, 0xb6, 0x2a, 0xfa, 0xa5, 0x9e, 0x60, 0x64, 0x39, 0x94, 0x13, 0xcb, 0x27, 0x73, 0x80, 0xf3, 0x0c, 0xf6, 0xff, 0x00, 0xd2, 0x5a, 0x78, 0xbf, 0x53, 0x71, 0xf6, 0x01, 0x75, 0xb6, 0x97, 0x6a, 0x25, 0xa1, 0xad, 0x1f, 0xf4, 0xb7, 0x23, 0x48, 0xb7, 0x94, 0x84, 0x97, 0x5b, 0xff, 0x00, 0x32, 0xa9, 0xdd, 0xfc, 0xed, 0x9b, 0x7e, 0x0d, 0x9e, 0x52, 0x4a, 0x95, 0x61, 0xff, 0xd0, 0xf3, 0x3b, 0xa7, 0x70, 0xee, 0x01, 0x8f, 0xb9, 0x59, 0xfa, 0x7e, 0xdf, 0xe4, 0xc8, 0xf9, 0x2a, 0xc2, 0x5c, 0x63, 0xc3, 0x54, 0x67, 0x87, 0x6e, 0x10, 0x35, 0x68, 0xd4, 0x79, 0x1e, 0x53, 0x4a, 0xe0, 0xdc, 0xe9, 0xb8, 0x1f, 0x6a, 0xda, 0x6c, 0x25, 0x94, 0x37, 0xb0, 0xd0, 0xb8, 0xad, 0x67, 0xe4, 0x55, 0x8a, 0x5b, 0x8b, 0x82, 0xc0, 0x6f, 0x76, 0x80, 0x34, 0x49, 0x05, 0x2e, 0x9e, 0xc6, 0x1c, 0x66, 0x31, 0xba, 0x10, 0x23, 0xe0, 0xaf, 0xe1, 0x61, 0x53, 0x43, 0x8d, 0x81, 0xb3, 0x67, 0xef, 0x9e, 0x49, 0x2a, 0x12, 0x6c, 0xb6, 0x63, 0x1a, 0x0c, 0x31, 0xba, 0x55, 0xcd, 0xac, 0xfa, 0x8e, 0xdf, 0x91, 0x6e, 0x91, 0xd9, 0xb3, 0xc9, 0x73, 0x90, 0x7a, 0xab, 0x6a, 0xc2, 0xa4, 0x60, 0xe2, 0x8f, 0xd2, 0x38, 0x03, 0x7d, 0x9e, 0x0d, 0xff, 0x00, 0xcc, 0xd6, 0xd3, 0x6b, 0x71, 0x67, 0xd2, 0x3e, 0x64, 0x72, 0xab, 0xdb, 0x8d, 0x54, 0x39, 0xc5, 0x83, 0x6b, 0x3d, 0xee, 0x2e, 0xd4, 0x92, 0x3c, 0x4a, 0x56, 0xba, 0xb4, 0x79, 0x5c, 0xf7, 0xb2, 0x96, 0x6c, 0x8d, 0xaf, 0x80, 0x48, 0x3c, 0xf0, 0xb2, 0x1f, 0x63, 0x9c, 0xe9, 0x3f, 0x24, 0x5c, 0xdb, 0xdd, 0x76, 0x43, 0xde, 0xfd, 0x5c, 0xe3, 0x24, 0xfc, 0x50, 0x00, 0x93, 0x0a, 0x78, 0x8a, 0x0d, 0x49, 0xca, 0xcf, 0x93, 0x63, 0x1b, 0x7d, 0xd7, 0x57, 0x50, 0xd5, 0xef, 0x70, 0x6b, 0x4f, 0xc7, 0x45, 0xdb, 0x74, 0x9e, 0x8d, 0x5e, 0x33, 0x83, 0xd8, 0x37, 0xdd, 0xc3, 0xac, 0x3d, 0xbf, 0x92, 0xc5, 0x5b, 0xea, 0xbf, 0xd5, 0x62, 0xc0, 0xdc, 0xbc, 0xbd, 0x2d, 0x22, 0x5a, 0xcf, 0xdd, 0x69, 0xff, 0x00, 0xd1, 0x8e, 0x5d, 0xa5, 0x38, 0xb5, 0xb0, 0x00, 0xc6, 0xc4, 0x24, 0x4a, 0xd6, 0x8d, 0x18, 0x04, 0x49, 0x88, 0x9e, 0x55, 0xd6, 0x61, 0xb0, 0xc1, 0x70, 0x32, 0xdd, 0x3c, 0x95, 0xda, 0xf1, 0xfe, 0xf5, 0x62, 0xbc, 0x76, 0x8e, 0x75, 0x28, 0x02, 0xa2, 0xe7, 0x7d, 0x92, 0xb9, 0x84, 0x96, 0x96, 0xda, 0xf7, 0x70, 0x12, 0x4e, 0x5a, 0xff, 0x00, 0xff, 0xd1, 0xf3, 0x7a, 0x21, 0xaf, 0xde, 0xef, 0xa2, 0x22, 0x55, 0xfc, 0x5a, 0xbd, 0x42, 0xfb, 0x08, 0xfa, 0x67, 0x4f, 0x82, 0xcd, 0x6d, 0x85, 0xc0, 0x56, 0x3b, 0x90, 0xb7, 0xf0, 0x2a, 0x0e, 0x63, 0x58, 0x3b, 0xf2, 0xa3, 0x9e, 0x8c, 0xb8, 0x86, 0xbe, 0x49, 0xf1, 0x2c, 0x0c, 0x86, 0xb4, 0x4c, 0x69, 0xe4, 0xaf, 0x6e, 0xcc, 0x6b, 0x7d, 0x46, 0xb3, 0x70, 0xec, 0x38, 0x51, 0x7d, 0x02, 0x8a, 0xc7, 0xa6, 0xd9, 0x20, 0x68, 0x0f, 0x8f, 0x8a, 0xcf, 0xc9, 0xc2, 0xea, 0x59, 0x5b, 0x48, 0xb0, 0x91, 0xae, 0xe6, 0xc9, 0x03, 0xc9, 0x30, 0x51, 0x66, 0xd4, 0x0d, 0xad, 0xbd, 0x5f, 0x53, 0xcc, 0x6b, 0xb6, 0x90, 0x5a, 0x3b, 0x83, 0x0b, 0x43, 0x17, 0x31, 0xd6, 0xc3, 0x6e, 0x12, 0x3b, 0x79, 0xac, 0xc1, 0x89, 0x47, 0xd9, 0xe8, 0x63, 0x98, 0x45, 0xed, 0x6c, 0x5a, 0xf1, 0xa0, 0x27, 0xc5, 0x5b, 0xc3, 0x6f, 0xa6, 0xe0, 0x1c, 0x7d, 0xb3, 0xa2, 0x69, 0x34, 0x7b, 0xae, 0x1a, 0x8d, 0x45, 0x17, 0x9d, 0xeb, 0xfd, 0x21, 0xd8, 0xb9, 0xae, 0xb5, 0x80, 0xbb, 0x1e, 0xd2, 0x5c, 0xd7, 0x78, 0x13, 0xf9, 0xae, 0x4b, 0xea, 0xc7, 0x4a, 0x39, 0xbd, 0x55, 0xb3, 0xed, 0x66, 0x38, 0xf5, 0x09, 0x22, 0x41, 0x23, 0xe8, 0x37, 0xfb, 0x4b, 0xa1, 0xeb, 0xd6, 0xfe, 0x88, 0x31, 0xbf, 0x41, 0xc0, 0xee, 0xd2, 0x74, 0x02, 0x78, 0x53, 0xfa, 0x97, 0x43, 0x19, 0x85, 0x65, 0xff, 0x00, 0x9d, 0x71, 0x33, 0xe4, 0x1a, 0x7d, 0x8d, 0x53, 0x42, 0x56, 0x35, 0x6b, 0xe5, 0x80, 0x06, 0xc7, 0x57, 0xa7, 0xc4, 0xa9, 0xdb, 0xb6, 0x81, 0x1f, 0xeb, 0xd9, 0x69, 0x56, 0xc2, 0xd0, 0x00, 0xe5, 0x55, 0xc0, 0x12, 0xc2, 0xd7, 0x4e, 0xa2, 0x5a, 0x7c, 0x0a, 0xd0, 0x63, 0x9a, 0xd1, 0xaf, 0xd2, 0xe2, 0x3c, 0x12, 0x62, 0x66, 0xc6, 0x42, 0x23, 0x5a, 0x49, 0x8f, 0x10, 0xa2, 0xd2, 0x3e, 0x28, 0x9d, 0xc4, 0x88, 0x09, 0x29, 0x16, 0xc3, 0x3c, 0x24, 0x8d, 0xe6, 0x92, 0x72, 0x1f, 0xff, 0xd2, 0xf3, 0xbb, 0xb0, 0xfe, 0xcb, 0x99, 0xe9, 0xce, 0xf6, 0x88, 0x2d, 0x77, 0x91, 0x5b, 0x3d, 0x3d, 0xd0, 0xe6, 0x90, 0xa9, 0x65, 0x57, 0x38, 0x95, 0xdd, 0xcb, 0x9a, 0x7d, 0xce, 0xf2, 0x3f, 0x44, 0x23, 0x60, 0x58, 0x76, 0xe9, 0xca, 0x8c, 0xea, 0x1b, 0x31, 0x02, 0x32, 0x23, 0xea, 0xee, 0xb1, 0xcd, 0xb0, 0xc7, 0x87, 0x74, 0x7a, 0xeb, 0x70, 0x1a, 0x71, 0xe1, 0xfe, 0xe4, 0x1c, 0x1d, 0xae, 0xe5, 0x69, 0xd8, 0xfa, 0x99, 0x50, 0x0d, 0x1a, 0xf7, 0x2a, 0x3a, 0x0c, 0xf4, 0x1a, 0x8e, 0xc7, 0x27, 0x5d, 0xbf, 0x18, 0x41, 0xdc, 0xc2, 0xf0, 0x7f, 0x74, 0xf6, 0x3a, 0x22, 0x66, 0xdb, 0x68, 0xc6, 0x80, 0x48, 0x6b, 0x88, 0x06, 0x39, 0x0d, 0xee, 0xaa, 0x1f, 0xb3, 0xd5, 0x1b, 0x83, 0xd8, 0x3b, 0x38, 0x8f, 0x69, 0xfe, 0xdf, 0xd1, 0x4d, 0x29, 0xa1, 0x4c, 0x7a, 0xf4, 0xbf, 0xa7, 0x92, 0xcf, 0xa5, 0x20, 0x08, 0xf3, 0xf6, 0xff, 0x00, 0x15, 0xbb, 0xd1, 0x31, 0xd9, 0x5e, 0x3d, 0x75, 0x56, 0x36, 0x88, 0x00, 0x81, 0xe0, 0x16, 0x5e, 0x55, 0x74, 0x3f, 0x00, 0x9d, 0xe0, 0xcc, 0x69, 0xe7, 0x3a, 0x2d, 0xbe, 0x90, 0x00, 0xa9, 0xae, 0xef, 0x1f, 0x95, 0x4b, 0x0d, 0x9a, 0xdc, 0xc7, 0x45, 0xfe, 0xb1, 0x7d, 0x60, 0xa7, 0xa1, 0xe0, 0x1f, 0x4e, 0x1d, 0x99, 0x69, 0x02, 0x9a, 0xcf, 0x1f, 0xca, 0x7b, 0xbf, 0x90, 0xc5, 0xc2, 0xb3, 0xeb, 0x57, 0xd6, 0x03, 0x6b, 0xae, 0x39, 0xb6, 0x82, 0xe3, 0x31, 0xa1, 0x68, 0xf2, 0x6b, 0x5c, 0x12, 0xfa, 0xe1, 0x91, 0x66, 0x47, 0x5d, 0xb8, 0x3b, 0x4f, 0x44, 0x36, 0xb6, 0x8f, 0x28, 0xdd, 0xff, 0x00, 0x7e, 0x46, 0xab, 0x12, 0x2b, 0x65, 0x55, 0x32, 0xa7, 0x62, 0xb6, 0xbd, 0xf7, 0x64, 0x10, 0xdb, 0x03, 0x9f, 0x1b, 0x9e, 0xc7, 0xd9, 0xb8, 0x3b, 0x1f, 0x67, 0xf3, 0x6c, 0x52, 0x80, 0xd7, 0x7d, 0x0f, 0xea, 0x7f, 0x5d, 0x1d, 0x67, 0xa6, 0x0b, 0x1e, 0x47, 0xda, 0x69, 0x3b, 0x2e, 0x03, 0xc7, 0xf3, 0x5f, 0x1f, 0xf0, 0x8b, 0xa1, 0x02, 0x46, 0xba, 0x79, 0xaf, 0x32, 0xff, 0x00, 0x16, 0xad, 0xca, 0x1d, 0x57, 0x2a, 0xdc, 0x79, 0x18, 0x41, 0xb0, 0xf6, 0x9e, 0xe4, 0x9f, 0xd0, 0x8f, 0xeb, 0x31, 0xab, 0xd2, 0x83, 0xa4, 0xcb, 0x8c, 0xb8, 0xa0, 0x42, 0x12, 0x7b, 0x67, 0x9f, 0x2f, 0xf5, 0x09, 0x26, 0x96, 0xc4, 0xce, 0xa9, 0x20, 0xa7, 0xff, 0xd3, 0xf3, 0x2f, 0xb4, 0x5d, 0xe9, 0x0a, 0xb7, 0x9f, 0x4c, 0x19, 0xdb, 0x3a, 0x2d, 0x5e, 0x94, 0xfd, 0xc4, 0xb7, 0xc5, 0x62, 0xf9, 0x2b, 0xfd, 0x2e, 0xe3, 0x5d, 0xe0, 0x7c, 0x13, 0x48, 0xd1, 0x92, 0x12, 0xa9, 0x0b, 0x7a, 0xbc, 0x2d, 0xc2, 0x7f, 0x92, 0x60, 0xab, 0x4e, 0x79, 0x2e, 0x00, 0xf0, 0xaa, 0xe1, 0xda, 0x3d, 0x43, 0xfc, 0xad, 0x55, 0xbb, 0x80, 0x79, 0x81, 0xa0, 0xe6, 0x54, 0x32, 0x6d, 0x02, 0xbe, 0xf3, 0x61, 0x81, 0xa8, 0x44, 0x14, 0x03, 0x59, 0x0e, 0x1c, 0xf6, 0x1f, 0xdc, 0xb2, 0xec, 0xa3, 0x23, 0x77, 0xe8, 0x6e, 0x70, 0xf2, 0x25, 0x1f, 0x1f, 0x17, 0xa9, 0x6d, 0x71, 0x36, 0x97, 0x47, 0x00, 0xa4, 0x02, 0xe0, 0x2c, 0x7c, 0xc1, 0xab, 0xd5, 0x31, 0x85, 0x35, 0xd4, 0xe6, 0x13, 0x02, 0xd6, 0x4b, 0x67, 0x48, 0x2b, 0xa9, 0xe9, 0x2e, 0x02, 0xb6, 0x4f, 0x82, 0xe5, 0x7a, 0x95, 0x19, 0xc6, 0x87, 0x3d, 0xfb, 0xa2, 0xb8, 0x79, 0x1e, 0x4d, 0x3b, 0x96, 0xcf, 0x4f, 0xbd, 0xcd, 0xa2, 0xa2, 0x1f, 0xa0, 0x82, 0xd3, 0xfc, 0x97, 0x05, 0x24, 0x36, 0x6b, 0xf3, 0x31, 0xa2, 0x35, 0x79, 0xef, 0xad, 0xf8, 0xae, 0xaf, 0xaf, 0xd8, 0xf2, 0xd8, 0x6d, 0xed, 0x6b, 0xda, 0x7b, 0x18, 0x1b, 0x5d, 0xff, 0x00, 0x52, 0xb1, 0x6d, 0xf0, 0x81, 0x31, 0xca, 0xf4, 0x6e, 0xb1, 0x80, 0xce, 0xb1, 0x84, 0xc0, 0x21, 0xb7, 0xd6, 0x77, 0x31, 0xd1, 0x27, 0xc1, 0xcd, 0xfe, 0xd2, 0xe3, 0xec, 0xe8, 0x1d, 0x45, 0x96, 0xb0, 0x9a, 0xb7, 0x87, 0x3f, 0x68, 0x2d, 0xf7, 0x01, 0x1f, 0xbe, 0xd1, 0xf4, 0x7f, 0xb4, 0xa4, 0x0d, 0x77, 0xbb, 0xfa, 0x8f, 0x80, 0x3a, 0x7f, 0x43, 0xaa, 0xe2, 0xdf, 0xd2, 0x65, 0x7e, 0x95, 0xe4, 0x0f, 0x1f, 0xa1, 0xfe, 0x6b, 0x16, 0x9f, 0x52, 0xfa, 0xc1, 0xd3, 0xba, 0x6d, 0x26, 0xdc, 0xac, 0x86, 0xd4, 0xd9, 0x0d, 0x31, 0x2e, 0x74, 0x9e, 0xdb, 0x59, 0x2e, 0x55, 0xe8, 0xc9, 0xb2, 0x96, 0xd5, 0x4b, 0x9f, 0xb8, 0x6d, 0xda, 0x1c, 0x04, 0x09, 0x03, 0xfe, 0x8a, 0xc6, 0xfa, 0xd3, 0xf5, 0x6a, 0xbe, 0xbb, 0x5b, 0x2e, 0xc6, 0xb5, 0x94, 0xe6, 0xd5, 0x20, 0x97, 0x7d, 0x1b, 0x1b, 0xf9, 0xad, 0x7c, 0x7d, 0x17, 0xb7, 0xf3, 0x1e, 0x92, 0x1b, 0x7f, 0xf8, 0xe0, 0x7d, 0x59, 0xdd, 0xfd, 0x32, 0xd8, 0x8f, 0xa5, 0xe8, 0x3a, 0x12, 0x5c, 0x3f, 0xfc, 0xc4, 0xfa, 0xc3, 0xb3, 0x77, 0xa7, 0x56, 0xed, 0xdb, 0x76, 0x7a, 0x8d, 0xdd, 0x1f, 0xbf, 0xfd, 0x44, 0x92, 0x56, 0x8f, 0xff, 0xd4, 0xf2, 0xe8, 0x86, 0x17, 0x1e, 0xfa, 0x04, 0x56, 0x4b, 0x43, 0x6c, 0x6f, 0x2d, 0xe5, 0x46, 0x01, 0x64, 0x2b, 0x14, 0x32, 0x5b, 0xb4, 0xa0, 0x52, 0x1d, 0xde, 0x9b, 0x94, 0xdb, 0xab, 0x6b, 0x81, 0xf7, 0x05, 0xb0, 0xd7, 0x07, 0xb2, 0x27, 0x55, 0xc6, 0x57, 0x65, 0xd8, 0x76, 0x6e, 0x64, 0xed, 0xee, 0x16, 0xce, 0x27, 0x57, 0x63, 0xda, 0x0c, 0xc2, 0x8e, 0x51, 0x67, 0x84, 0xfa, 0x1d, 0xdd, 0x62, 0xc7, 0x07, 0xe9, 0xf7, 0xa3, 0xd6, 0x6c, 0x02, 0x41, 0x55, 0x31, 0xf3, 0x2b, 0xb3, 0xba, 0x2b, 0x2e, 0x68, 0x24, 0x1d, 0x47, 0x64, 0xca, 0xa6, 0x50, 0x41, 0x65, 0x90, 0x6c, 0xb1, 0xa5, 0xae, 0x33, 0x23, 0x51, 0xe4, 0xab, 0x7d, 0x5d, 0xcb, 0xb6, 0xcc, 0x37, 0xd0, 0x40, 0x73, 0x71, 0xde, 0x58, 0x09, 0xe7, 0x6f, 0x2c, 0x44, 0xc9, 0xc9, 0xae, 0xba, 0x9d, 0x63, 0x88, 0x01, 0xa0, 0x95, 0x9d, 0xf5, 0x3f, 0x2a, 0xe6, 0x67, 0xdb, 0x50, 0x83, 0x55, 0xad, 0x36, 0x3e, 0x78, 0x10, 0x74, 0x77, 0xfd, 0x2d, 0xaa, 0x4c, 0x7d, 0x58, 0x73, 0x91, 0xa0, 0x0f, 0x51, 0x45, 0xb7, 0x33, 0xdd, 0x58, 0x69, 0x1d, 0xd8, 0x0c, 0x9f, 0x96, 0x88, 0x19, 0x99, 0x19, 0xac, 0xcf, 0xa3, 0xd2, 0xad, 0xb5, 0xdb, 0x76, 0x8f, 0xad, 0xc4, 0xea, 0xcf, 0xdf, 0x7e, 0xdf, 0xdd, 0xfc, 0xd5, 0xa3, 0x5e, 0x43, 0x2b, 0x6b, 0xb2, 0xad, 0x3b, 0x6a, 0xa4, 0x13, 0xa7, 0x04, 0xac, 0x7a, 0x6f, 0xb3, 0x23, 0x26, 0xcc, 0xfb, 0xb4, 0x75, 0x8e, 0x01, 0x83, 0xf7, 0x58, 0x3e, 0x8b, 0x53, 0xa7, 0x2a, 0x1a, 0x31, 0x42, 0x36, 0x5d, 0x4c, 0x9a, 0xf2, 0xdc, 0xc6, 0xfe, 0x98, 0xb4, 0x34, 0xcb, 0x48, 0x0a, 0x8f, 0xdb, 0xb2, 0xeb, 0x76, 0xd6, 0x07, 0x5c, 0x59, 0xc9, 0x64, 0x8f, 0x93, 0xa7, 0x73, 0x16, 0x83, 0xaf, 0x0e, 0xa4, 0x33, 0xef, 0x50, 0xc5, 0x0c, 0xda, 0x59, 0x10, 0x06, 0x8a, 0x2e, 0x29, 0x0e, 0xac, 0xc2, 0x31, 0x3d, 0x36, 0x69, 0x7e, 0xd6, 0xcc, 0xf5, 0x3d, 0x6f, 0xb3, 0xeb, 0x1b, 0x76, 0xef, 0x3b, 0xa3, 0xfa, 0xc9, 0x2b, 0x5f, 0x66, 0x6f, 0xa9, 0x1e, 0x73, 0xf2, 0x49, 0x2e, 0x39, 0xf7, 0x4f, 0xb7, 0x8d, 0xff, 0xd5, 0xf3, 0x26, 0xfe, 0x0a, 0xc5, 0x1b, 0xa7, 0xcb, 0xb2, 0xcf, 0x49, 0x03, 0xb2, 0x46, 0xee, 0xd9, 0xd9, 0xb3, 0xf4, 0x9f, 0x25, 0x4a, 0xdf, 0x4b, 0x77, 0xe8, 0x27, 0xd4, 0xef, 0x1c, 0x2a, 0x29, 0x26, 0xc5, 0x7c, 0x9d, 0x6c, 0x7f, 0xb7, 0x6e, 0x1b, 0x26, 0x7f, 0x05, 0xa3, 0xfe, 0x53, 0x8d, 0x62, 0x57, 0x30, 0x92, 0x12, 0xfa, 0x2f, 0x86, 0xdf, 0xa4, 0xec, 0x67, 0xfe, 0xd0, 0xf4, 0xff, 0x00, 0x4d, 0xfc, 0xdf, 0x78, 0xe1, 0x68, 0x7d, 0x54, 0x99, 0xbf, 0x6f, 0xf3, 0xbe, 0xdf, 0x8e, 0xdd, 0x7f, 0xef, 0xeb, 0x97, 0x49, 0x3e, 0x3b, 0x7f, 0x06, 0x2c, 0x9f, 0x37, 0x5f, 0xf0, 0x9f, 0x4c, 0xeb, 0x7b, 0xbf, 0x67, 0x55, 0xe8, 0xff, 0x00, 0x31, 0xbc, 0x7a, 0x9e, 0x31, 0xdb, 0xfe, 0x92, 0xae, 0x37, 0x7a, 0x4d, 0xdb, 0xe2, 0x17, 0x9d, 0xa4, 0xa3, 0xc9, 0xba, 0xfc, 0x7b, 0x7d, 0x5f, 0x52, 0xa7, 0x7e, 0xd1, 0x28, 0xf8, 0xf3, 0xb0, 0xc7, 0x32, 0xbc, 0x99, 0x24, 0xc5, 0xe3, 0xab, 0xeb, 0x1f, 0xa4, 0xf5, 0xfc, 0xe1, 0x25, 0xe4, 0xe9, 0x24, 0x97, 0xff, 0xd9, 0xff, 0xed, 0x2e, 0x1c, 0x50, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x20, 0x33, 0x2e, 0x30, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x1c, 0x02, 0x00, 0x00, 0x02, 0x00, 0x02, 0x1c, 0x02, 0x78, 0x00, 0x1f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0xfb, 0x09, 0xa6, 0xbd, 0x07, 0x4c, 0x2a, 0x36, 0x9d, 0x8f, 0xe2, 0xcc, 0x57, 0xa9, 0xac, 0x85, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xea, 0x00, 0x00, 0x00, 0x00, 0x1d, 0xb0, 0x3c, 0x3f, 0x78, 0x6d, 0x6c, 0x20, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x3d, 0x22, 0x31, 0x2e, 0x30, 0x22, 0x20, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x3d, 0x22, 0x55, 0x54, 0x46, 0x2d, 0x38, 0x22, 0x3f, 0x3e, 0x0a, 0x3c, 0x21, 0x44, 0x4f, 0x43, 0x54, 0x59, 0x50, 0x45, 0x20, 0x70, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x22, 0x2d, 0x2f, 0x2f, 0x41, 0x70, 0x70, 0x6c, 0x65, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x72, 0x2f, 0x2f, 0x44, 0x54, 0x44, 0x20, 0x50, 0x4c, 0x49, 0x53, 0x54, 0x20, 0x31, 0x2e, 0x30, 0x2f, 0x2f, 0x45, 0x4e, 0x22, 0x20, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x44, 0x54, 0x44, 0x73, 0x2f, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x2d, 0x31, 0x2e, 0x30, 0x2e, 0x64, 0x74, 0x64, 0x22, 0x3e, 0x0a, 0x3c, 0x70, 0x6c, 0x69, 0x73, 0x74, 0x20, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x3d, 0x22, 0x31, 0x2e, 0x30, 0x22, 0x3e, 0x0a, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x48, 0x6f, 0x72, 0x69, 0x7a, 0x6f, 0x6e, 0x74, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x48, 0x6f, 0x72, 0x69, 0x7a, 0x6f, 0x6e, 0x74, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x32, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x4f, 0x72, 0x69, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x4f, 0x72, 0x69, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x31, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x53, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x53, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x31, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x56, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x56, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x32, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x56, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x56, 0x65, 0x72, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x53, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x31, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x73, 0x75, 0x62, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x70, 0x61, 0x70, 0x65, 0x72, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x5f, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x67, 0x65, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x67, 0x65, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x30, 0x2e, 0x30, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x30, 0x2e, 0x30, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x33, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x35, 0x37, 0x36, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x70, 0x65, 0x72, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x4d, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x70, 0x65, 0x72, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x2d, 0x31, 0x38, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x2d, 0x31, 0x38, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x37, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x35, 0x39, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x50, 0x61, 0x70, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x50, 0x61, 0x70, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x6e, 0x61, 0x2d, 0x6c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x33, 0x2d, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x54, 0x31, 0x37, 0x3a, 0x34, 0x39, 0x3a, 0x33, 0x36, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x31, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x55, 0x6e, 0x61, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x67, 0x65, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x55, 0x6e, 0x61, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x67, 0x65, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x30, 0x2e, 0x30, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x30, 0x2e, 0x30, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x33, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x35, 0x37, 0x36, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x55, 0x6e, 0x61, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x70, 0x65, 0x72, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x50, 0x4d, 0x55, 0x6e, 0x61, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x65, 0x64, 0x50, 0x61, 0x70, 0x65, 0x72, 0x52, 0x65, 0x63, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x2d, 0x31, 0x38, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x2d, 0x31, 0x38, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x37, 0x37, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x35, 0x39, 0x34, 0x3c, 0x2f, 0x72, 0x65, 0x61, 0x6c, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x2d, 0x33, 0x30, 0x54, 0x32, 0x32, 0x3a, 0x30, 0x38, 0x3a, 0x34, 0x31, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x30, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x70, 0x70, 0x64, 0x2e, 0x50, 0x4d, 0x50, 0x61, 0x70, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x70, 0x70, 0x64, 0x2e, 0x50, 0x4d, 0x50, 0x61, 0x70, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x55, 0x53, 0x20, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x70, 0x6d, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x6d, 0x6f, 0x64, 0x44, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x32, 0x30, 0x30, 0x33, 0x2d, 0x30, 0x37, 0x2d, 0x30, 0x31, 0x54, 0x31, 0x37, 0x3a, 0x34, 0x39, 0x3a, 0x33, 0x36, 0x5a, 0x3c, 0x2f, 0x64, 0x61, 0x74, 0x65, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x31, 0x3c, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x09, 0x3c, 0x2f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x41, 0x50, 0x49, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x30, 0x30, 0x2e, 0x32, 0x30, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x63, 0x6b, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x2f, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x70, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x41, 0x50, 0x49, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x30, 0x30, 0x2e, 0x32, 0x30, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x63, 0x6b, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x2f, 0x3e, 0x0a, 0x09, 0x3c, 0x6b, 0x65, 0x79, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x74, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3c, 0x2f, 0x6b, 0x65, 0x79, 0x3e, 0x0a, 0x09, 0x3c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3e, 0x0a, 0x3c, 0x2f, 0x64, 0x69, 0x63, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x70, 0x6c, 0x69, 0x73, 0x74, 0x3e, 0x0a, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xe9, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x00, 0x03, 0x00, 0x00, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0x00, 0x00, 0x02, 0xde, 0x02, 0x40, 0xff, 0xee, 0xff, 0xee, 0x03, 0x06, 0x02, 0x52, 0x03, 0x67, 0x05, 0x28, 0x03, 0xfc, 0x00, 0x02, 0x00, 0x00, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0x00, 0x00, 0x02, 0xd8, 0x02, 0x28, 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x01, 0x7f, 0xff, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x68, 0x08, 0x00, 0x19, 0x01, 0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x48, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x26, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x80, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x1e, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x1e, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xf3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x27, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xf5, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x2f, 0x66, 0x66, 0x00, 0x01, 0x00, 0x6c, 0x66, 0x66, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x2f, 0x66, 0x66, 0x00, 0x01, 0x00, 0xa1, 0x99, 0x9a, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x32, 0x00, 0x00, 0x00, 0x01, 0x00, 0x5a, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x35, 0x00, 0x00, 0x00, 0x01, 0x00, 0x2d, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x38, 0x42, 0x49, 0x4d, 0x03, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0x40, 0x00, 0x00, 0x02, 0x40, 0x00, 0x00, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x1a, 0x00, 0x00, 0x00, 0x00, 0x03, 0x45, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x08, 0x00, 0x44, 0x00, 0x53, 0x00, 0x43, 0x00, 0x30, 0x00, 0x32, 0x00, 0x33, 0x00, 0x32, 0x00, 0x35, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6e, 0x75, 0x6c, 0x6c, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x06, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x73, 0x4f, 0x62, 0x6a, 0x63, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0x63, 0x74, 0x31, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x54, 0x6f, 0x70, 0x20, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4c, 0x65, 0x66, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x42, 0x74, 0x6f, 0x6d, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x52, 0x67, 0x68, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x06, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x73, 0x56, 0x6c, 0x4c, 0x73, 0x00, 0x00, 0x00, 0x01, 0x4f, 0x62, 0x6a, 0x63, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x07, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x49, 0x44, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x49, 0x44, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x65, 0x6e, 0x75, 0x6d, 0x00, 0x00, 0x00, 0x0c, 0x45, 0x53, 0x6c, 0x69, 0x63, 0x65, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x0d, 0x61, 0x75, 0x74, 0x6f, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x00, 0x00, 0x00, 0x00, 0x54, 0x79, 0x70, 0x65, 0x65, 0x6e, 0x75, 0x6d, 0x00, 0x00, 0x00, 0x0a, 0x45, 0x53, 0x6c, 0x69, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x00, 0x00, 0x00, 0x00, 0x49, 0x6d, 0x67, 0x20, 0x00, 0x00, 0x00, 0x06, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x73, 0x4f, 0x62, 0x6a, 0x63, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0x63, 0x74, 0x31, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x54, 0x6f, 0x70, 0x20, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4c, 0x65, 0x66, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x42, 0x74, 0x6f, 0x6d, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x52, 0x67, 0x68, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x03, 0x75, 0x72, 0x6c, 0x54, 0x45, 0x58, 0x54, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6e, 0x75, 0x6c, 0x6c, 0x54, 0x45, 0x58, 0x54, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4d, 0x73, 0x67, 0x65, 0x54, 0x45, 0x58, 0x54, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x61, 0x6c, 0x74, 0x54, 0x61, 0x67, 0x54, 0x45, 0x58, 0x54, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x63, 0x65, 0x6c, 0x6c, 0x54, 0x65, 0x78, 0x74, 0x49, 0x73, 0x48, 0x54, 0x4d, 0x4c, 0x62, 0x6f, 0x6f, 0x6c, 0x01, 0x00, 0x00, 0x00, 0x08, 0x63, 0x65, 0x6c, 0x6c, 0x54, 0x65, 0x78, 0x74, 0x54, 0x45, 0x58, 0x54, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x68, 0x6f, 0x72, 0x7a, 0x41, 0x6c, 0x69, 0x67, 0x6e, 0x65, 0x6e, 0x75, 0x6d, 0x00, 0x00, 0x00, 0x0f, 0x45, 0x53, 0x6c, 0x69, 0x63, 0x65, 0x48, 0x6f, 0x72, 0x7a, 0x41, 0x6c, 0x69, 0x67, 0x6e, 0x00, 0x00, 0x00, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x00, 0x00, 0x00, 0x09, 0x76, 0x65, 0x72, 0x74, 0x41, 0x6c, 0x69, 0x67, 0x6e, 0x65, 0x6e, 0x75, 0x6d, 0x00, 0x00, 0x00, 0x0f, 0x45, 0x53, 0x6c, 0x69, 0x63, 0x65, 0x56, 0x65, 0x72, 0x74, 0x41, 0x6c, 0x69, 0x67, 0x6e, 0x00, 0x00, 0x00, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x00, 0x00, 0x00, 0x0b, 0x62, 0x67, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x65, 0x6e, 0x75, 0x6d, 0x00, 0x00, 0x00, 0x11, 0x45, 0x53, 0x6c, 0x69, 0x63, 0x65, 0x42, 0x47, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x00, 0x00, 0x00, 0x00, 0x4e, 0x6f, 0x6e, 0x65, 0x00, 0x00, 0x00, 0x09, 0x74, 0x6f, 0x70, 0x4f, 0x75, 0x74, 0x73, 0x65, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x6c, 0x65, 0x66, 0x74, 0x4f, 0x75, 0x74, 0x73, 0x65, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x4f, 0x75, 0x74, 0x73, 0x65, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x72, 0x69, 0x67, 0x68, 0x74, 0x4f, 0x75, 0x74, 0x73, 0x65, 0x74, 0x6c, 0x6f, 0x6e, 0x67, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x09, 0xf9, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x01, 0x2c, 0x00, 0x00, 0x75, 0x30, 0x00, 0x00, 0x09, 0xdd, 0x00, 0x18, 0x00, 0x01, 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01, 0x02, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xed, 0x00, 0x0c, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x5f, 0x43, 0x4d, 0x00, 0x02, 0xff, 0xee, 0x00, 0x0e, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x00, 0x64, 0x80, 0x00, 0x00, 0x00, 0x01, 0xff, 0xdb, 0x00, 0x84, 0x00, 0x0c, 0x08, 0x08, 0x08, 0x09, 0x08, 0x0c, 0x09, 0x09, 0x0c, 0x11, 0x0b, 0x0a, 0x0b, 0x11, 0x15, 0x0f, 0x0c, 0x0c, 0x0f, 0x15, 0x18, 0x13, 0x13, 0x15, 0x13, 0x13, 0x18, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x01, 0x0d, 0x0b, 0x0b, 0x0d, 0x0e, 0x0d, 0x10, 0x0e, 0x0e, 0x10, 0x14, 0x0e, 0x0e, 0x0e, 0x14, 0x14, 0x0e, 0x0e, 0x0e, 0x0e, 0x14, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0xff, 0xc0, 0x00, 0x11, 0x08, 0x00, 0x64, 0x00, 0x64, 0x03, 0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xdd, 0x00, 0x04, 0x00, 0x07, 0xff, 0xc4, 0x01, 0x3f, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x01, 0x02, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x01, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x10, 0x00, 0x01, 0x04, 0x01, 0x03, 0x02, 0x04, 0x02, 0x05, 0x07, 0x06, 0x08, 0x05, 0x03, 0x0c, 0x33, 0x01, 0x00, 0x02, 0x11, 0x03, 0x04, 0x21, 0x12, 0x31, 0x05, 0x41, 0x51, 0x61, 0x13, 0x22, 0x71, 0x81, 0x32, 0x06, 0x14, 0x91, 0xa1, 0xb1, 0x42, 0x23, 0x24, 0x15, 0x52, 0xc1, 0x62, 0x33, 0x34, 0x72, 0x82, 0xd1, 0x43, 0x07, 0x25, 0x92, 0x53, 0xf0, 0xe1, 0xf1, 0x63, 0x73, 0x35, 0x16, 0xa2, 0xb2, 0x83, 0x26, 0x44, 0x93, 0x54, 0x64, 0x45, 0xc2, 0xa3, 0x74, 0x36, 0x17, 0xd2, 0x55, 0xe2, 0x65, 0xf2, 0xb3, 0x84, 0xc3, 0xd3, 0x75, 0xe3, 0xf3, 0x46, 0x27, 0x94, 0xa4, 0x85, 0xb4, 0x95, 0xc4, 0xd4, 0xe4, 0xf4, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x56, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x37, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xd7, 0xe7, 0xf7, 0x11, 0x00, 0x02, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x05, 0x06, 0x07, 0x07, 0x06, 0x05, 0x35, 0x01, 0x00, 0x02, 0x11, 0x03, 0x21, 0x31, 0x12, 0x04, 0x41, 0x51, 0x61, 0x71, 0x22, 0x13, 0x05, 0x32, 0x81, 0x91, 0x14, 0xa1, 0xb1, 0x42, 0x23, 0xc1, 0x52, 0xd1, 0xf0, 0x33, 0x24, 0x62, 0xe1, 0x72, 0x82, 0x92, 0x43, 0x53, 0x15, 0x63, 0x73, 0x34, 0xf1, 0x25, 0x06, 0x16, 0xa2, 0xb2, 0x83, 0x07, 0x26, 0x35, 0xc2, 0xd2, 0x44, 0x93, 0x54, 0xa3, 0x17, 0x64, 0x45, 0x55, 0x36, 0x74, 0x65, 0xe2, 0xf2, 0xb3, 0x84, 0xc3, 0xd3, 0x75, 0xe3, 0xf3, 0x46, 0x94, 0xa4, 0x85, 0xb4, 0x95, 0xc4, 0xd4, 0xe4, 0xf4, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x56, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x27, 0x37, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x11, 0x03, 0x11, 0x00, 0x3f, 0x00, 0xf2, 0xed, 0xb2, 0x8d, 0x4d, 0x45, 0xcd, 0x2f, 0x3f, 0x44, 0x68, 0x93, 0xc3, 0x58, 0xc8, 0xf1, 0x1f, 0x8a, 0x33, 0x86, 0xda, 0x58, 0xc1, 0xa0, 0x02, 0x4f, 0xc4, 0xa1, 0x69, 0xa5, 0x9b, 0x5b, 0x4b, 0x84, 0x73, 0xdf, 0xc9, 0x15, 0xf8, 0xe3, 0xd1, 0x0e, 0x07, 0x93, 0xf3, 0xd1, 0x0f, 0x1c, 0x17, 0xef, 0x2e, 0x3b, 0x5b, 0xdc, 0xff, 0x00, 0xdf, 0x42, 0xbf, 0x8f, 0x8e, 0xdc, 0x82, 0xca, 0xd8, 0x37, 0x11, 0xa9, 0x3d, 0x82, 0x69, 0x2b, 0xc4, 0x6d, 0xc9, 0x75, 0x25, 0xbc, 0xf7, 0xec, 0xa1, 0xb5, 0x74, 0x19, 0x5d, 0x2e, 0x8a, 0x9a, 0x4b, 0x89, 0x7d, 0xc4, 0x68, 0xc6, 0xf6, 0xfe, 0xb2, 0xa0, 0x30, 0x1d, 0x60, 0x86, 0x88, 0x8d, 0x49, 0x3e, 0x01, 0x11, 0x20, 0xa3, 0x8c, 0xb9, 0xb1, 0xaa, 0x62, 0xad, 0xbf, 0x18, 0x97, 0x43, 0x47, 0x1d, 0xd2, 0xaf, 0x04, 0xd9, 0xb8, 0xc8, 0x0d, 0x68, 0xe4, 0xf7, 0x3e, 0x48, 0xf1, 0x05, 0xbc, 0x25, 0xaa, 0x07, 0x71, 0xd9, 0x14, 0x78, 0xf6, 0x49, 0xb5, 0x90, 0xfd, 0xa7, 0xc6, 0x14, 0xfd, 0x1b, 0x1c, 0xff, 0x00, 0x4d, 0x8d, 0x2e, 0x73, 0x8c, 0x35, 0xa3, 0x52, 0x4f, 0x92, 0x48, 0xa6, 0x1a, 0x24, 0xb6, 0x2a, 0xfa, 0xa5, 0x9e, 0x60, 0x64, 0x39, 0x94, 0x13, 0xcb, 0x27, 0x73, 0x80, 0xf3, 0x0c, 0xf6, 0xff, 0x00, 0xd2, 0x5a, 0x78, 0xbf, 0x53, 0x71, 0xf6, 0x01, 0x75, 0xb6, 0x97, 0x6a, 0x25, 0xa1, 0xad, 0x1f, 0xf4, 0xb7, 0x23, 0x48, 0xb7, 0x94, 0x84, 0x97, 0x5b, 0xff, 0x00, 0x32, 0xa9, 0xdd, 0xfc, 0xed, 0x9b, 0x7e, 0x0d, 0x9e, 0x52, 0x4a, 0x95, 0x61, 0xff, 0xd0, 0xf3, 0x3b, 0xa7, 0x70, 0xee, 0x01, 0x8f, 0xb9, 0x59, 0xfa, 0x7e, 0xdf, 0xe4, 0xc8, 0xf9, 0x2a, 0xc2, 0x5c, 0x63, 0xc3, 0x54, 0x67, 0x87, 0x6e, 0x10, 0x35, 0x68, 0xd4, 0x79, 0x1e, 0x53, 0x4a, 0xe0, 0xdc, 0xe9, 0xb8, 0x1f, 0x6a, 0xda, 0x6c, 0x25, 0x94, 0x37, 0xb0, 0xd0, 0xb8, 0xad, 0x67, 0xe4, 0x55, 0x8a, 0x5b, 0x8b, 0x82, 0xc0, 0x6f, 0x76, 0x80, 0x34, 0x49, 0x05, 0x2e, 0x9e, 0xc6, 0x1c, 0x66, 0x31, 0xba, 0x10, 0x23, 0xe0, 0xaf, 0xe1, 0x61, 0x53, 0x43, 0x8d, 0x81, 0xb3, 0x67, 0xef, 0x9e, 0x49, 0x2a, 0x12, 0x6c, 0xb6, 0x63, 0x1a, 0x0c, 0x31, 0xba, 0x55, 0xcd, 0xac, 0xfa, 0x8e, 0xdf, 0x91, 0x6e, 0x91, 0xd9, 0xb3, 0xc9, 0x73, 0x90, 0x7a, 0xab, 0x6a, 0xc2, 0xa4, 0x60, 0xe2, 0x8f, 0xd2, 0x38, 0x03, 0x7d, 0x9e, 0x0d, 0xff, 0x00, 0xcc, 0xd6, 0xd3, 0x6b, 0x71, 0x67, 0xd2, 0x3e, 0x64, 0x72, 0xab, 0xdb, 0x8d, 0x54, 0x39, 0xc5, 0x83, 0x6b, 0x3d, 0xee, 0x2e, 0xd4, 0x92, 0x3c, 0x4a, 0x56, 0xba, 0xb4, 0x79, 0x5c, 0xf7, 0xb2, 0x96, 0x6c, 0x8d, 0xaf, 0x80, 0x48, 0x3c, 0xf0, 0xb2, 0x1f, 0x63, 0x9c, 0xe9, 0x3f, 0x24, 0x5c, 0xdb, 0xdd, 0x76, 0x43, 0xde, 0xfd, 0x5c, 0xe3, 0x24, 0xfc, 0x50, 0x00, 0x93, 0x0a, 0x78, 0x8a, 0x0d, 0x49, 0xca, 0xcf, 0x93, 0x63, 0x1b, 0x7d, 0xd7, 0x57, 0x50, 0xd5, 0xef, 0x70, 0x6b, 0x4f, 0xc7, 0x45, 0xdb, 0x74, 0x9e, 0x8d, 0x5e, 0x33, 0x83, 0xd8, 0x37, 0xdd, 0xc3, 0xac, 0x3d, 0xbf, 0x92, 0xc5, 0x5b, 0xea, 0xbf, 0xd5, 0x62, 0xc0, 0xdc, 0xbc, 0xbd, 0x2d, 0x22, 0x5a, 0xcf, 0xdd, 0x69, 0xff, 0x00, 0xd1, 0x8e, 0x5d, 0xa5, 0x38, 0xb5, 0xb0, 0x00, 0xc6, 0xc4, 0x24, 0x4a, 0xd6, 0x8d, 0x18, 0x04, 0x49, 0x88, 0x9e, 0x55, 0xd6, 0x61, 0xb0, 0xc1, 0x70, 0x32, 0xdd, 0x3c, 0x95, 0xda, 0xf1, 0xfe, 0xf5, 0x62, 0xbc, 0x76, 0x8e, 0x75, 0x28, 0x02, 0xa2, 0xe7, 0x7d, 0x92, 0xb9, 0x84, 0x96, 0x96, 0xda, 0xf7, 0x70, 0x12, 0x4e, 0x5a, 0xff, 0x00, 0xff, 0xd1, 0xf3, 0x7a, 0x21, 0xaf, 0xde, 0xef, 0xa2, 0x22, 0x55, 0xfc, 0x5a, 0xbd, 0x42, 0xfb, 0x08, 0xfa, 0x67, 0x4f, 0x82, 0xcd, 0x6d, 0x85, 0xc0, 0x56, 0x3b, 0x90, 0xb7, 0xf0, 0x2a, 0x0e, 0x63, 0x58, 0x3b, 0xf2, 0xa3, 0x9e, 0x8c, 0xb8, 0x86, 0xbe, 0x49, 0xf1, 0x2c, 0x0c, 0x86, 0xb4, 0x4c, 0x69, 0xe4, 0xaf, 0x6e, 0xcc, 0x6b, 0x7d, 0x46, 0xb3, 0x70, 0xec, 0x38, 0x51, 0x7d, 0x02, 0x8a, 0xc7, 0xa6, 0xd9, 0x20, 0x68, 0x0f, 0x8f, 0x8a, 0xcf, 0xc9, 0xc2, 0xea, 0x59, 0x5b, 0x48, 0xb0, 0x91, 0xae, 0xe6, 0xc9, 0x03, 0xc9, 0x30, 0x51, 0x66, 0xd4, 0x0d, 0xad, 0xbd, 0x5f, 0x53, 0xcc, 0x6b, 0xb6, 0x90, 0x5a, 0x3b, 0x83, 0x0b, 0x43, 0x17, 0x31, 0xd6, 0xc3, 0x6e, 0x12, 0x3b, 0x79, 0xac, 0xc1, 0x89, 0x47, 0xd9, 0xe8, 0x63, 0x98, 0x45, 0xed, 0x6c, 0x5a, 0xf1, 0xa0, 0x27, 0xc5, 0x5b, 0xc3, 0x6f, 0xa6, 0xe0, 0x1c, 0x7d, 0xb3, 0xa2, 0x69, 0x34, 0x7b, 0xae, 0x1a, 0x8d, 0x45, 0x17, 0x9d, 0xeb, 0xfd, 0x21, 0xd8, 0xb9, 0xae, 0xb5, 0x80, 0xbb, 0x1e, 0xd2, 0x5c, 0xd7, 0x78, 0x13, 0xf9, 0xae, 0x4b, 0xea, 0xc7, 0x4a, 0x39, 0xbd, 0x55, 0xb3, 0xed, 0x66, 0x38, 0xf5, 0x09, 0x22, 0x41, 0x23, 0xe8, 0x37, 0xfb, 0x4b, 0xa1, 0xeb, 0xd6, 0xfe, 0x88, 0x31, 0xbf, 0x41, 0xc0, 0xee, 0xd2, 0x74, 0x02, 0x78, 0x53, 0xfa, 0x97, 0x43, 0x19, 0x85, 0x65, 0xff, 0x00, 0x9d, 0x71, 0x33, 0xe4, 0x1a, 0x7d, 0x8d, 0x53, 0x42, 0x56, 0x35, 0x6b, 0xe5, 0x80, 0x06, 0xc7, 0x57, 0xa7, 0xc4, 0xa9, 0xdb, 0xb6, 0x81, 0x1f, 0xeb, 0xd9, 0x69, 0x56, 0xc2, 0xd0, 0x00, 0xe5, 0x55, 0xc0, 0x12, 0xc2, 0xd7, 0x4e, 0xa2, 0x5a, 0x7c, 0x0a, 0xd0, 0x63, 0x9a, 0xd1, 0xaf, 0xd2, 0xe2, 0x3c, 0x12, 0x62, 0x66, 0xc6, 0x42, 0x23, 0x5a, 0x49, 0x8f, 0x10, 0xa2, 0xd2, 0x3e, 0x28, 0x9d, 0xc4, 0x88, 0x09, 0x29, 0x16, 0xc3, 0x3c, 0x24, 0x8d, 0xe6, 0x92, 0x72, 0x1f, 0xff, 0xd2, 0xf3, 0xbb, 0xb0, 0xfe, 0xcb, 0x99, 0xe9, 0xce, 0xf6, 0x88, 0x2d, 0x77, 0x91, 0x5b, 0x3d, 0x3d, 0xd0, 0xe6, 0x90, 0xa9, 0x65, 0x57, 0x38, 0x95, 0xdd, 0xcb, 0x9a, 0x7d, 0xce, 0xf2, 0x3f, 0x44, 0x23, 0x60, 0x58, 0x76, 0xe9, 0xca, 0x8c, 0xea, 0x1b, 0x31, 0x02, 0x32, 0x23, 0xea, 0xee, 0xb1, 0xcd, 0xb0, 0xc7, 0x87, 0x74, 0x7a, 0xeb, 0x70, 0x1a, 0x71, 0xe1, 0xfe, 0xe4, 0x1c, 0x1d, 0xae, 0xe5, 0x69, 0xd8, 0xfa, 0x99, 0x50, 0x0d, 0x1a, 0xf7, 0x2a, 0x3a, 0x0c, 0xf4, 0x1a, 0x8e, 0xc7, 0x27, 0x5d, 0xbf, 0x18, 0x41, 0xdc, 0xc2, 0xf0, 0x7f, 0x74, 0xf6, 0x3a, 0x22, 0x66, 0xdb, 0x68, 0xc6, 0x80, 0x48, 0x6b, 0x88, 0x06, 0x39, 0x0d, 0xee, 0xaa, 0x1f, 0xb3, 0xd5, 0x1b, 0x83, 0xd8, 0x3b, 0x38, 0x8f, 0x69, 0xfe, 0xdf, 0xd1, 0x4d, 0x29, 0xa1, 0x4c, 0x7a, 0xf4, 0xbf, 0xa7, 0x92, 0xcf, 0xa5, 0x20, 0x08, 0xf3, 0xf6, 0xff, 0x00, 0x15, 0xbb, 0xd1, 0x31, 0xd9, 0x5e, 0x3d, 0x75, 0x56, 0x36, 0x88, 0x00, 0x81, 0xe0, 0x16, 0x5e, 0x55, 0x74, 0x3f, 0x00, 0x9d, 0xe0, 0xcc, 0x69, 0xe7, 0x3a, 0x2d, 0xbe, 0x90, 0x00, 0xa9, 0xae, 0xef, 0x1f, 0x95, 0x4b, 0x0d, 0x9a, 0xdc, 0xc7, 0x45, 0xfe, 0xb1, 0x7d, 0x60, 0xa7, 0xa1, 0xe0, 0x1f, 0x4e, 0x1d, 0x99, 0x69, 0x02, 0x9a, 0xcf, 0x1f, 0xca, 0x7b, 0xbf, 0x90, 0xc5, 0xc2, 0xb3, 0xeb, 0x57, 0xd6, 0x03, 0x6b, 0xae, 0x39, 0xb6, 0x82, 0xe3, 0x31, 0xa1, 0x68, 0xf2, 0x6b, 0x5c, 0x12, 0xfa, 0xe1, 0x91, 0x66, 0x47, 0x5d, 0xb8, 0x3b, 0x4f, 0x44, 0x36, 0xb6, 0x8f, 0x28, 0xdd, 0xff, 0x00, 0x7e, 0x46, 0xab, 0x12, 0x2b, 0x65, 0x55, 0x32, 0xa7, 0x62, 0xb6, 0xbd, 0xf7, 0x64, 0x10, 0xdb, 0x03, 0x9f, 0x1b, 0x9e, 0xc7, 0xd9, 0xb8, 0x3b, 0x1f, 0x67, 0xf3, 0x6c, 0x52, 0x80, 0xd7, 0x7d, 0x0f, 0xea, 0x7f, 0x5d, 0x1d, 0x67, 0xa6, 0x0b, 0x1e, 0x47, 0xda, 0x69, 0x3b, 0x2e, 0x03, 0xc7, 0xf3, 0x5f, 0x1f, 0xf0, 0x8b, 0xa1, 0x02, 0x46, 0xba, 0x79, 0xaf, 0x32, 0xff, 0x00, 0x16, 0xad, 0xca, 0x1d, 0x57, 0x2a, 0xdc, 0x79, 0x18, 0x41, 0xb0, 0xf6, 0x9e, 0xe4, 0x9f, 0xd0, 0x8f, 0xeb, 0x31, 0xab, 0xd2, 0x83, 0xa4, 0xcb, 0x8c, 0xb8, 0xa0, 0x42, 0x12, 0x7b, 0x67, 0x9f, 0x2f, 0xf5, 0x09, 0x26, 0x96, 0xc4, 0xce, 0xa9, 0x20, 0xa7, 0xff, 0xd3, 0xf3, 0x2f, 0xb4, 0x5d, 0xe9, 0x0a, 0xb7, 0x9f, 0x4c, 0x19, 0xdb, 0x3a, 0x2d, 0x5e, 0x94, 0xfd, 0xc4, 0xb7, 0xc5, 0x62, 0xf9, 0x2b, 0xfd, 0x2e, 0xe3, 0x5d, 0xe0, 0x7c, 0x13, 0x48, 0xd1, 0x92, 0x12, 0xa9, 0x0b, 0x7a, 0xbc, 0x2d, 0xc2, 0x7f, 0x92, 0x60, 0xab, 0x4e, 0x79, 0x2e, 0x00, 0xf0, 0xaa, 0xe1, 0xda, 0x3d, 0x43, 0xfc, 0xad, 0x55, 0xbb, 0x80, 0x79, 0x81, 0xa0, 0xe6, 0x54, 0x32, 0x6d, 0x02, 0xbe, 0xf3, 0x61, 0x81, 0xa8, 0x44, 0x14, 0x03, 0x59, 0x0e, 0x1c, 0xf6, 0x1f, 0xdc, 0xb2, 0xec, 0xa3, 0x23, 0x77, 0xe8, 0x6e, 0x70, 0xf2, 0x25, 0x1f, 0x1f, 0x17, 0xa9, 0x6d, 0x71, 0x36, 0x97, 0x47, 0x00, 0xa4, 0x02, 0xe0, 0x2c, 0x7c, 0xc1, 0xab, 0xd5, 0x31, 0x85, 0x35, 0xd4, 0xe6, 0x13, 0x02, 0xd6, 0x4b, 0x67, 0x48, 0x2b, 0xa9, 0xe9, 0x2e, 0x02, 0xb6, 0x4f, 0x82, 0xe5, 0x7a, 0x95, 0x19, 0xc6, 0x87, 0x3d, 0xfb, 0xa2, 0xb8, 0x79, 0x1e, 0x4d, 0x3b, 0x96, 0xcf, 0x4f, 0xbd, 0xcd, 0xa2, 0xa2, 0x1f, 0xa0, 0x82, 0xd3, 0xfc, 0x97, 0x05, 0x24, 0x36, 0x6b, 0xf3, 0x31, 0xa2, 0x35, 0x79, 0xef, 0xad, 0xf8, 0xae, 0xaf, 0xaf, 0xd8, 0xf2, 0xd8, 0x6d, 0xed, 0x6b, 0xda, 0x7b, 0x18, 0x1b, 0x5d, 0xff, 0x00, 0x52, 0xb1, 0x6d, 0xf0, 0x81, 0x31, 0xca, 0xf4, 0x6e, 0xb1, 0x80, 0xce, 0xb1, 0x84, 0xc0, 0x21, 0xb7, 0xd6, 0x77, 0x31, 0xd1, 0x27, 0xc1, 0xcd, 0xfe, 0xd2, 0xe3, 0xec, 0xe8, 0x1d, 0x45, 0x96, 0xb0, 0x9a, 0xb7, 0x87, 0x3f, 0x68, 0x2d, 0xf7, 0x01, 0x1f, 0xbe, 0xd1, 0xf4, 0x7f, 0xb4, 0xa4, 0x0d, 0x77, 0xbb, 0xfa, 0x8f, 0x80, 0x3a, 0x7f, 0x43, 0xaa, 0xe2, 0xdf, 0xd2, 0x65, 0x7e, 0x95, 0xe4, 0x0f, 0x1f, 0xa1, 0xfe, 0x6b, 0x16, 0x9f, 0x52, 0xfa, 0xc1, 0xd3, 0xba, 0x6d, 0x26, 0xdc, 0xac, 0x86, 0xd4, 0xd9, 0x0d, 0x31, 0x2e, 0x74, 0x9e, 0xdb, 0x59, 0x2e, 0x55, 0xe8, 0xc9, 0xb2, 0x96, 0xd5, 0x4b, 0x9f, 0xb8, 0x6d, 0xda, 0x1c, 0x04, 0x09, 0x03, 0xfe, 0x8a, 0xc6, 0xfa, 0xd3, 0xf5, 0x6a, 0xbe, 0xbb, 0x5b, 0x2e, 0xc6, 0xb5, 0x94, 0xe6, 0xd5, 0x20, 0x97, 0x7d, 0x1b, 0x1b, 0xf9, 0xad, 0x7c, 0x7d, 0x17, 0xb7, 0xf3, 0x1e, 0x92, 0x1b, 0x7f, 0xf8, 0xe0, 0x7d, 0x59, 0xdd, 0xfd, 0x32, 0xd8, 0x8f, 0xa5, 0xe8, 0x3a, 0x12, 0x5c, 0x3f, 0xfc, 0xc4, 0xfa, 0xc3, 0xb3, 0x77, 0xa7, 0x56, 0xed, 0xdb, 0x76, 0x7a, 0x8d, 0xdd, 0x1f, 0xbf, 0xfd, 0x44, 0x92, 0x56, 0x8f, 0xff, 0xd4, 0xf2, 0xe8, 0x86, 0x17, 0x1e, 0xfa, 0x04, 0x56, 0x4b, 0x43, 0x6c, 0x6f, 0x2d, 0xe5, 0x46, 0x01, 0x64, 0x2b, 0x14, 0x32, 0x5b, 0xb4, 0xa0, 0x52, 0x1d, 0xde, 0x9b, 0x94, 0xdb, 0xab, 0x6b, 0x81, 0xf7, 0x05, 0xb0, 0xd7, 0x07, 0xb2, 0x27, 0x55, 0xc6, 0x57, 0x65, 0xd8, 0x76, 0x6e, 0x64, 0xed, 0xee, 0x16, 0xce, 0x27, 0x57, 0x63, 0xda, 0x0c, 0xc2, 0x8e, 0x51, 0x67, 0x84, 0xfa, 0x1d, 0xdd, 0x62, 0xc7, 0x07, 0xe9, 0xf7, 0xa3, 0xd6, 0x6c, 0x02, 0x41, 0x55, 0x31, 0xf3, 0x2b, 0xb3, 0xba, 0x2b, 0x2e, 0x68, 0x24, 0x1d, 0x47, 0x64, 0xca, 0xa6, 0x50, 0x41, 0x65, 0x90, 0x6c, 0xb1, 0xa5, 0xae, 0x33, 0x23, 0x51, 0xe4, 0xab, 0x7d, 0x5d, 0xcb, 0xb6, 0xcc, 0x37, 0xd0, 0x40, 0x73, 0x71, 0xde, 0x58, 0x09, 0xe7, 0x6f, 0x2c, 0x44, 0xc9, 0xc9, 0xae, 0xba, 0x9d, 0x63, 0x88, 0x01, 0xa0, 0x95, 0x9d, 0xf5, 0x3f, 0x2a, 0xe6, 0x67, 0xdb, 0x50, 0x83, 0x55, 0xad, 0x36, 0x3e, 0x78, 0x10, 0x74, 0x77, 0xfd, 0x2d, 0xaa, 0x4c, 0x7d, 0x58, 0x73, 0x91, 0xa0, 0x0f, 0x51, 0x45, 0xb7, 0x33, 0xdd, 0x58, 0x69, 0x1d, 0xd8, 0x0c, 0x9f, 0x96, 0x88, 0x19, 0x99, 0x19, 0xac, 0xcf, 0xa3, 0xd2, 0xad, 0xb5, 0xdb, 0x76, 0x8f, 0xad, 0xc4, 0xea, 0xcf, 0xdf, 0x7e, 0xdf, 0xdd, 0xfc, 0xd5, 0xa3, 0x5e, 0x43, 0x2b, 0x6b, 0xb2, 0xad, 0x3b, 0x6a, 0xa4, 0x13, 0xa7, 0x04, 0xac, 0x7a, 0x6f, 0xb3, 0x23, 0x26, 0xcc, 0xfb, 0xb4, 0x75, 0x8e, 0x01, 0x83, 0xf7, 0x58, 0x3e, 0x8b, 0x53, 0xa7, 0x2a, 0x1a, 0x31, 0x42, 0x36, 0x5d, 0x4c, 0x9a, 0xf2, 0xdc, 0xc6, 0xfe, 0x98, 0xb4, 0x34, 0xcb, 0x48, 0x0a, 0x8f, 0xdb, 0xb2, 0xeb, 0x76, 0xd6, 0x07, 0x5c, 0x59, 0xc9, 0x64, 0x8f, 0x93, 0xa7, 0x73, 0x16, 0x83, 0xaf, 0x0e, 0xa4, 0x33, 0xef, 0x50, 0xc5, 0x0c, 0xda, 0x59, 0x10, 0x06, 0x8a, 0x2e, 0x29, 0x0e, 0xac, 0xc2, 0x31, 0x3d, 0x36, 0x69, 0x7e, 0xd6, 0xcc, 0xf5, 0x3d, 0x6f, 0xb3, 0xeb, 0x1b, 0x76, 0xef, 0x3b, 0xa3, 0xfa, 0xc9, 0x2b, 0x5f, 0x66, 0x6f, 0xa9, 0x1e, 0x73, 0xf2, 0x49, 0x2e, 0x39, 0xf7, 0x4f, 0xb7, 0x8d, 0xff, 0xd5, 0xf3, 0x26, 0xfe, 0x0a, 0xc5, 0x1b, 0xa7, 0xcb, 0xb2, 0xcf, 0x49, 0x03, 0xb2, 0x46, 0xee, 0xd9, 0xd9, 0xb3, 0xf4, 0x9f, 0x25, 0x4a, 0xdf, 0x4b, 0x77, 0xe8, 0x27, 0xd4, 0xef, 0x1c, 0x2a, 0x29, 0x26, 0xc5, 0x7c, 0x9d, 0x6c, 0x7f, 0xb7, 0x6e, 0x1b, 0x26, 0x7f, 0x05, 0xa3, 0xfe, 0x53, 0x8d, 0x62, 0x57, 0x30, 0x92, 0x12, 0xfa, 0x2f, 0x86, 0xdf, 0xa4, 0xec, 0x67, 0xfe, 0xd0, 0xf4, 0xff, 0x00, 0x4d, 0xfc, 0xdf, 0x78, 0xe1, 0x68, 0x7d, 0x54, 0x99, 0xbf, 0x6f, 0xf3, 0xbe, 0xdf, 0x8e, 0xdd, 0x7f, 0xef, 0xeb, 0x97, 0x49, 0x3e, 0x3b, 0x7f, 0x06, 0x2c, 0x9f, 0x37, 0x5f, 0xf0, 0x9f, 0x4c, 0xeb, 0x7b, 0xbf, 0x67, 0x55, 0xe8, 0xff, 0x00, 0x31, 0xbc, 0x7a, 0x9e, 0x31, 0xdb, 0xfe, 0x92, 0xae, 0x37, 0x7a, 0x4d, 0xdb, 0xe2, 0x17, 0x9d, 0xa4, 0xa3, 0xc9, 0xba, 0xfc, 0x7b, 0x7d, 0x5f, 0x52, 0xa7, 0x7e, 0xd1, 0x28, 0xf8, 0xf3, 0xb0, 0xc7, 0x32, 0xbc, 0x99, 0x24, 0xc5, 0xe3, 0xab, 0xeb, 0x1f, 0xa4, 0xf5, 0xfc, 0xe1, 0x25, 0xe4, 0xe9, 0x24, 0x97, 0xff, 0xd9, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x55, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x41, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x50, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x73, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x00, 0x00, 0x13, 0x00, 0x41, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x50, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x73, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x20, 0x00, 0x37, 0x00, 0x2e, 0x00, 0x30, 0x00, 0x00, 0x00, 0x01, 0x00, 0x38, 0x42, 0x49, 0x4d, 0x04, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0xff, 0xe1, 0x15, 0x67, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x78, 0x61, 0x70, 0x2f, 0x31, 0x2e, 0x30, 0x2f, 0x00, 0x3c, 0x3f, 0x78, 0x70, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x20, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x3d, 0x27, 0xef, 0xbb, 0xbf, 0x27, 0x20, 0x69, 0x64, 0x3d, 0x27, 0x57, 0x35, 0x4d, 0x30, 0x4d, 0x70, 0x43, 0x65, 0x68, 0x69, 0x48, 0x7a, 0x72, 0x65, 0x53, 0x7a, 0x4e, 0x54, 0x63, 0x7a, 0x6b, 0x63, 0x39, 0x64, 0x27, 0x3f, 0x3e, 0x0a, 0x3c, 0x3f, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2d, 0x78, 0x61, 0x70, 0x2d, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x20, 0x65, 0x73, 0x63, 0x3d, 0x22, 0x43, 0x52, 0x22, 0x3f, 0x3e, 0x0a, 0x3c, 0x78, 0x3a, 0x78, 0x61, 0x70, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x78, 0x3d, 0x27, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x3a, 0x6e, 0x73, 0x3a, 0x6d, 0x65, 0x74, 0x61, 0x2f, 0x27, 0x20, 0x78, 0x3a, 0x78, 0x61, 0x70, 0x74, 0x6b, 0x3d, 0x27, 0x58, 0x4d, 0x50, 0x20, 0x74, 0x6f, 0x6f, 0x6c, 0x6b, 0x69, 0x74, 0x20, 0x32, 0x2e, 0x38, 0x2e, 0x32, 0x2d, 0x33, 0x33, 0x2c, 0x20, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x20, 0x31, 0x2e, 0x35, 0x27, 0x3e, 0x0a, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x52, 0x44, 0x46, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x72, 0x64, 0x66, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x31, 0x39, 0x39, 0x39, 0x2f, 0x30, 0x32, 0x2f, 0x32, 0x32, 0x2d, 0x72, 0x64, 0x66, 0x2d, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x2d, 0x6e, 0x73, 0x23, 0x27, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x69, 0x58, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x69, 0x58, 0x2f, 0x31, 0x2e, 0x30, 0x2f, 0x27, 0x3e, 0x0a, 0x0a, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x62, 0x6f, 0x75, 0x74, 0x3d, 0x27, 0x75, 0x75, 0x69, 0x64, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x61, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x27, 0x0a, 0x20, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x70, 0x64, 0x66, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x64, 0x66, 0x2f, 0x31, 0x2e, 0x33, 0x2f, 0x27, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x21, 0x2d, 0x2d, 0x20, 0x70, 0x64, 0x66, 0x3a, 0x53, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x20, 0x69, 0x73, 0x20, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x64, 0x20, 0x2d, 0x2d, 0x3e, 0x0a, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x62, 0x6f, 0x75, 0x74, 0x3d, 0x27, 0x75, 0x75, 0x69, 0x64, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x61, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x27, 0x0a, 0x20, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x70, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x2f, 0x31, 0x2e, 0x30, 0x2f, 0x27, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x21, 0x2d, 0x2d, 0x20, 0x70, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x3a, 0x43, 0x61, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x73, 0x20, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x64, 0x20, 0x2d, 0x2d, 0x3e, 0x0a, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x62, 0x6f, 0x75, 0x74, 0x3d, 0x27, 0x75, 0x75, 0x69, 0x64, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x61, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x27, 0x0a, 0x20, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x78, 0x61, 0x70, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x78, 0x61, 0x70, 0x2f, 0x31, 0x2e, 0x30, 0x2f, 0x27, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x21, 0x2d, 0x2d, 0x20, 0x78, 0x61, 0x70, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x73, 0x20, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x64, 0x20, 0x2d, 0x2d, 0x3e, 0x0a, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x62, 0x6f, 0x75, 0x74, 0x3d, 0x27, 0x75, 0x75, 0x69, 0x64, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x61, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x27, 0x0a, 0x20, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x78, 0x61, 0x70, 0x4d, 0x4d, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x6e, 0x73, 0x2e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x78, 0x61, 0x70, 0x2f, 0x31, 0x2e, 0x30, 0x2f, 0x6d, 0x6d, 0x2f, 0x27, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x78, 0x61, 0x70, 0x4d, 0x4d, 0x3a, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x3e, 0x61, 0x64, 0x6f, 0x62, 0x65, 0x3a, 0x64, 0x6f, 0x63, 0x69, 0x64, 0x3a, 0x70, 0x68, 0x6f, 0x74, 0x6f, 0x73, 0x68, 0x6f, 0x70, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x36, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x3c, 0x2f, 0x78, 0x61, 0x70, 0x4d, 0x4d, 0x3a, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x3e, 0x0a, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x62, 0x6f, 0x75, 0x74, 0x3d, 0x27, 0x75, 0x75, 0x69, 0x64, 0x3a, 0x32, 0x32, 0x64, 0x30, 0x32, 0x62, 0x30, 0x61, 0x2d, 0x62, 0x32, 0x34, 0x39, 0x2d, 0x31, 0x31, 0x64, 0x62, 0x2d, 0x38, 0x61, 0x66, 0x38, 0x2d, 0x39, 0x31, 0x64, 0x35, 0x34, 0x30, 0x33, 0x66, 0x39, 0x32, 0x66, 0x39, 0x27, 0x0a, 0x20, 0x20, 0x78, 0x6d, 0x6c, 0x6e, 0x73, 0x3a, 0x64, 0x63, 0x3d, 0x27, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x70, 0x75, 0x72, 0x6c, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x64, 0x63, 0x2f, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x31, 0x2e, 0x31, 0x2f, 0x27, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x63, 0x3a, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x41, 0x6c, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x72, 0x64, 0x66, 0x3a, 0x6c, 0x69, 0x20, 0x78, 0x6d, 0x6c, 0x3a, 0x6c, 0x61, 0x6e, 0x67, 0x3d, 0x27, 0x78, 0x2d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3e, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x6c, 0x69, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x41, 0x6c, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x63, 0x3a, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x72, 0x64, 0x66, 0x3a, 0x52, 0x44, 0x46, 0x3e, 0x0a, 0x3c, 0x2f, 0x78, 0x3a, 0x78, 0x61, 0x70, 0x6d, 0x65, 0x74, 0x61, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x0a, 0x3c, 0x3f, 0x78, 0x70, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x3d, 0x27, 0x77, 0x27, 0x3f, 0x3e, 0xff, 0xee, 0x00, 0x0e, 0x41, 0x64, 0x6f, 0x62, 0x65, 0x00, 0x64, 0x40, 0x00, 0x00, 0x00, 0x01, 0xff, 0xdb, 0x00, 0x84, 0x00, 0x04, 0x03, 0x03, 0x03, 0x03, 0x03, 0x04, 0x03, 0x03, 0x04, 0x06, 0x04, 0x03, 0x04, 0x06, 0x07, 0x05, 0x04, 0x04, 0x05, 0x07, 0x08, 0x06, 0x06, 0x07, 0x06, 0x06, 0x08, 0x0a, 0x08, 0x09, 0x09, 0x09, 0x09, 0x08, 0x0a, 0x0a, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0a, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x01, 0x04, 0x05, 0x05, 0x08, 0x07, 0x08, 0x0f, 0x0a, 0x0a, 0x0f, 0x14, 0x0e, 0x0e, 0x0e, 0x14, 0x14, 0x0e, 0x0e, 0x0e, 0x0e, 0x14, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x11, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0xff, 0xc0, 0x00, 0x11, 0x08, 0x00, 0x64, 0x00, 0x64, 0x03, 0x01, 0x11, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xdd, 0x00, 0x04, 0x00, 0x0d, 0xff, 0xc4, 0x01, 0xa2, 0x00, 0x00, 0x00, 0x07, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x05, 0x03, 0x02, 0x06, 0x01, 0x00, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x01, 0x00, 0x02, 0x02, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x02, 0x06, 0x07, 0x03, 0x04, 0x02, 0x06, 0x02, 0x73, 0x01, 0x02, 0x03, 0x11, 0x04, 0x00, 0x05, 0x21, 0x12, 0x31, 0x41, 0x51, 0x06, 0x13, 0x61, 0x22, 0x71, 0x81, 0x14, 0x32, 0x91, 0xa1, 0x07, 0x15, 0xb1, 0x42, 0x23, 0xc1, 0x52, 0xd1, 0xe1, 0x33, 0x16, 0x62, 0xf0, 0x24, 0x72, 0x82, 0xf1, 0x25, 0x43, 0x34, 0x53, 0x92, 0xa2, 0xb2, 0x63, 0x73, 0xc2, 0x35, 0x44, 0x27, 0x93, 0xa3, 0xb3, 0x36, 0x17, 0x54, 0x64, 0x74, 0xc3, 0xd2, 0xe2, 0x08, 0x26, 0x83, 0x09, 0x0a, 0x18, 0x19, 0x84, 0x94, 0x45, 0x46, 0xa4, 0xb4, 0x56, 0xd3, 0x55, 0x28, 0x1a, 0xf2, 0xe3, 0xf3, 0xc4, 0xd4, 0xe4, 0xf4, 0x65, 0x75, 0x85, 0x95, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x37, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xd7, 0xe7, 0xf7, 0x38, 0x48, 0x58, 0x68, 0x78, 0x88, 0x98, 0xa8, 0xb8, 0xc8, 0xd8, 0xe8, 0xf8, 0x29, 0x39, 0x49, 0x59, 0x69, 0x79, 0x89, 0x99, 0xa9, 0xb9, 0xc9, 0xd9, 0xe9, 0xf9, 0x2a, 0x3a, 0x4a, 0x5a, 0x6a, 0x7a, 0x8a, 0x9a, 0xaa, 0xba, 0xca, 0xda, 0xea, 0xfa, 0x11, 0x00, 0x02, 0x02, 0x01, 0x02, 0x03, 0x05, 0x05, 0x04, 0x05, 0x06, 0x04, 0x08, 0x03, 0x03, 0x6d, 0x01, 0x00, 0x02, 0x11, 0x03, 0x04, 0x21, 0x12, 0x31, 0x41, 0x05, 0x51, 0x13, 0x61, 0x22, 0x06, 0x71, 0x81, 0x91, 0x32, 0xa1, 0xb1, 0xf0, 0x14, 0xc1, 0xd1, 0xe1, 0x23, 0x42, 0x15, 0x52, 0x62, 0x72, 0xf1, 0x33, 0x24, 0x34, 0x43, 0x82, 0x16, 0x92, 0x53, 0x25, 0xa2, 0x63, 0xb2, 0xc2, 0x07, 0x73, 0xd2, 0x35, 0xe2, 0x44, 0x83, 0x17, 0x54, 0x93, 0x08, 0x09, 0x0a, 0x18, 0x19, 0x26, 0x36, 0x45, 0x1a, 0x27, 0x64, 0x74, 0x55, 0x37, 0xf2, 0xa3, 0xb3, 0xc3, 0x28, 0x29, 0xd3, 0xe3, 0xf3, 0x84, 0x94, 0xa4, 0xb4, 0xc4, 0xd4, 0xe4, 0xf4, 0x65, 0x75, 0x85, 0x95, 0xa5, 0xb5, 0xc5, 0xd5, 0xe5, 0xf5, 0x46, 0x56, 0x66, 0x76, 0x86, 0x96, 0xa6, 0xb6, 0xc6, 0xd6, 0xe6, 0xf6, 0x47, 0x57, 0x67, 0x77, 0x87, 0x97, 0xa7, 0xb7, 0xc7, 0xd7, 0xe7, 0xf7, 0x38, 0x48, 0x58, 0x68, 0x78, 0x88, 0x98, 0xa8, 0xb8, 0xc8, 0xd8, 0xe8, 0xf8, 0x39, 0x49, 0x59, 0x69, 0x79, 0x89, 0x99, 0xa9, 0xb9, 0xc9, 0xd9, 0xe9, 0xf9, 0x2a, 0x3a, 0x4a, 0x5a, 0x6a, 0x7a, 0x8a, 0x9a, 0xaa, 0xba, 0xca, 0xda, 0xea, 0xfa, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x11, 0x03, 0x11, 0x00, 0x3f, 0x00, 0xf0, 0x67, 0xa6, 0x5c, 0x0f, 0x01, 0xd4, 0x7e, 0x18, 0x12, 0x98, 0xe9, 0xd6, 0x2d, 0x34, 0x6d, 0x70, 0xdf, 0xdc, 0xa1, 0xe3, 0xec, 0x5b, 0xfb, 0x32, 0x24, 0xb2, 0x01, 0x1f, 0x15, 0xa4, 0x52, 0x4a, 0x82, 0x31, 0xf1, 0xfe, 0xd1, 0x3d, 0x14, 0x64, 0x49, 0x64, 0x22, 0x98, 0xcf, 0xa5, 0x46, 0x6c, 0x16, 0x55, 0x71, 0x56, 0x62, 0x28, 0x07, 0xc5, 0x45, 0x15, 0xa0, 0xc8, 0x89, 0x33, 0xe1, 0x63, 0xd2, 0xd8, 0x34, 0x44, 0x17, 0xa0, 0x2c, 0x4d, 0x16, 0xbb, 0xed, 0xdc, 0xf8, 0x64, 0xc1, 0x6b, 0x31, 0x42, 0x18, 0x8e, 0xc7, 0xb5, 0x2a, 0x7d, 0xb2, 0x56, 0xc5, 0x61, 0x8c, 0xf2, 0xa0, 0x1b, 0x1e, 0x83, 0x0d, 0xa1, 0x63, 0x50, 0x1f, 0x97, 0x7c, 0x2a, 0xa9, 0x1a, 0x9a, 0x86, 0x4f, 0xb4, 0xb4, 0x38, 0x0a, 0xa6, 0x0b, 0xb8, 0x0c, 0x05, 0x14, 0xf8, 0x76, 0x3e, 0x19, 0x14, 0xb6, 0x78, 0xf8, 0x8c, 0x2a, 0xd5, 0x01, 0xdc, 0x6f, 0x8a, 0x1a, 0xe3, 0x8d, 0xab, 0xff, 0xd0, 0xf0, 0xec, 0xe9, 0x15, 0xb5, 0xb9, 0x5a, 0x7c, 0x4c, 0xa2, 0x9e, 0x24, 0xf5, 0xca, 0xc6, 0xe5, 0x99, 0xd9, 0x34, 0x99, 0x04, 0x3a, 0x7d, 0xb5, 0xba, 0xd5, 0x51, 0x63, 0x0e, 0xc7, 0xc5, 0x9b, 0x73, 0xf8, 0xe4, 0x6f, 0x76, 0xca, 0xd9, 0xda, 0x54, 0x6d, 0x72, 0x2e, 0x1a, 0x57, 0x11, 0x44, 0x40, 0x0d, 0x27, 0x7a, 0x0f, 0xd9, 0x5f, 0x12, 0x69, 0x4c, 0x84, 0xcd, 0x36, 0xe3, 0x85, 0xb2, 0xcd, 0x2f, 0x4a, 0x8b, 0x58, 0x36, 0xf6, 0x76, 0xa8, 0x64, 0x64, 0x3c, 0xa4, 0x93, 0xaa, 0x25, 0x3c, 0x49, 0xda, 0xa4, 0xe5, 0x26, 0x54, 0xe4, 0x8c, 0x7c, 0x5c, 0x93, 0x4d, 0x67, 0xc9, 0x3a, 0x6e, 0x9f, 0x13, 0xb4, 0xce, 0xf7, 0x3a, 0x9b, 0xad, 0x52, 0xd6, 0x2a, 0xd1, 0x49, 0xee, 0xc7, 0xf8, 0x64, 0x46, 0x42, 0x4e, 0xcd, 0x92, 0xc2, 0x00, 0xdd, 0x8a, 0x47, 0xe5, 0x69, 0x6e, 0xd4, 0xa4, 0x08, 0x16, 0x83, 0x9c, 0x8c, 0xdd, 0x95, 0x6b, 0xb9, 0xf6, 0xef, 0x97, 0x78, 0x94, 0xe3, 0x78, 0x04, 0xa4, 0xf3, 0xe8, 0xee, 0x64, 0xe1, 0x12, 0x10, 0x05, 0x6a, 0xc7, 0xc0, 0x6f, 0x53, 0xf3, 0xc9, 0x89, 0xb4, 0x9c, 0x4e, 0xb4, 0xf2, 0xd3, 0xde, 0x7a, 0xd2, 0x19, 0x16, 0x38, 0x61, 0x5d, 0xd9, 0x88, 0x05, 0x9c, 0xf4, 0x0a, 0x0f, 0x5f, 0x73, 0x84, 0xe4, 0xa4, 0xc7, 0x0d, 0xa5, 0xf1, 0x59, 0xba, 0x5c, 0x08, 0x98, 0x6f, 0xc8, 0x20, 0xfa, 0x4e, 0x4e, 0xf6, 0x69, 0xe1, 0xa2, 0x89, 0xfd, 0x1f, 0x77, 0x2c, 0xe6, 0xce, 0xd6, 0x17, 0x9a, 0x69, 0xdb, 0xd3, 0x86, 0x18, 0xc1, 0x67, 0x77, 0x26, 0x80, 0x28, 0x1b, 0x93, 0x88, 0x41, 0x0f, 0x40, 0xb0, 0xfc, 0x87, 0xf3, 0x43, 0x98, 0xd7, 0x58, 0x96, 0xdb, 0x4d, 0x91, 0x88, 0xe5, 0x6c, 0x58, 0xdc, 0x5c, 0x2a, 0xf7, 0x2c, 0xb1, 0xfc, 0x20, 0x8f, 0x02, 0xd9, 0x65, 0x06, 0xbe, 0x26, 0x6f, 0xa2, 0x7f, 0xce, 0x3d, 0x69, 0x26, 0xdd, 0x13, 0x52, 0xbf, 0xbd, 0x92, 0x62, 0x59, 0x4c, 0x90, 0xac, 0x50, 0x45, 0x5e, 0xbb, 0x09, 0x03, 0x12, 0x29, 0x84, 0x00, 0xc4, 0xc9, 0x11, 0xff, 0x00, 0x42, 0xe7, 0xa7, 0x7a, 0xd4, 0xfd, 0x21, 0x79, 0xe9, 0x78, 0x71, 0x8b, 0x95, 0x39, 0x75, 0xaf, 0x4e, 0x98, 0x78, 0x42, 0x38, 0xdf, 0xff, 0xd1, 0xf0, 0xe6, 0xa0, 0x58, 0xc8, 0x84, 0x9a, 0xaa, 0x30, 0x55, 0xf9, 0x0a, 0x6f, 0x90, 0x0c, 0xca, 0x72, 0x48, 0xb8, 0x1e, 0x89, 0xa7, 0x23, 0x17, 0x24, 0xff, 0x00, 0x61, 0xb6, 0x54, 0x76, 0x6e, 0x1b, 0xa7, 0xbe, 0x50, 0xf2, 0xc1, 0xd7, 0x4c, 0x52, 0x5e, 0x33, 0x5b, 0xe9, 0x10, 0xf4, 0x54, 0x3c, 0x5e, 0x77, 0xee, 0x49, 0xec, 0x2b, 0xb6, 0x63, 0xe4, 0xc9, 0xc3, 0xef, 0x73, 0xf0, 0xe1, 0x32, 0x1b, 0xf2, 0x7a, 0x05, 0xce, 0xad, 0x65, 0xa1, 0x98, 0xb4, 0x0f, 0x2a, 0x5b, 0x23, 0xeb, 0x12, 0x00, 0x88, 0xb0, 0xa8, 0x66, 0x46, 0x3d, 0xea, 0x7b, 0xfb, 0x9e, 0x99, 0x89, 0xbc, 0x8d, 0x97, 0x3a, 0x34, 0x05, 0x32, 0x5d, 0x1f, 0xc9, 0x1a, 0x8c, 0x36, 0x8c, 0x6f, 0x66, 0xfa, 0xc6, 0xb7, 0x7d, 0xf0, 0x94, 0x04, 0xf0, 0x88, 0xc9, 0xd5, 0x9d, 0x8d, 0x4b, 0x11, 0xd4, 0x9f, 0xbb, 0x25, 0xc5, 0xdc, 0xa2, 0x03, 0x99, 0x4b, 0xbc, 0xf3, 0x0d, 0x97, 0x96, 0x74, 0xe5, 0xf2, 0xb6, 0x80, 0x95, 0xbd, 0x99, 0x15, 0xf5, 0x4b, 0xd2, 0x37, 0x58, 0x46, 0xd4, 0x27, 0xc5, 0xce, 0xc1, 0x7c, 0x30, 0x8e, 0x68, 0x94, 0x7b, 0x9e, 0x6d, 0xe6, 0x7b, 0x9b, 0x5d, 0x3a, 0xd8, 0xdb, 0x32, 0xfa, 0x77, 0x65, 0x15, 0xe4, 0x57, 0xa7, 0x21, 0x55, 0x04, 0x57, 0xef, 0xd8, 0x66, 0x56, 0x38, 0x19, 0x1b, 0xe8, 0xe0, 0x67, 0x98, 0xc7, 0x1a, 0x1c, 0xde, 0x71, 0x71, 0x79, 0x2c, 0xf2, 0xfa, 0x8c, 0x48, 0xec, 0xb5, 0x24, 0x9a, 0x0c, 0xce, 0x75, 0x29, 0xae, 0x8c, 0x67, 0xd4, 0xb5, 0x0b, 0x4b, 0x04, 0x05, 0xef, 0x2e, 0x66, 0x8e, 0x18, 0x08, 0x15, 0xdd, 0x8f, 0x11, 0xb0, 0xeb, 0x4c, 0x04, 0x5b, 0x21, 0x2a, 0x7d, 0x41, 0xe4, 0x4f, 0xcb, 0xcb, 0x5d, 0x12, 0x45, 0xb8, 0xb7, 0x53, 0x71, 0xaa, 0x9f, 0x86, 0x5b, 0xd6, 0x50, 0x4a, 0xed, 0xba, 0x46, 0x77, 0x00, 0x13, 0xd4, 0x8c, 0x85, 0xd3, 0x12, 0x6d, 0xeb, 0x1a, 0x67, 0x95, 0xd9, 0x39, 0x39, 0x50, 0xac, 0xff, 0x00, 0x6f, 0xc4, 0xff, 0x00, 0x1c, 0x81, 0x92, 0xb2, 0x6b, 0x6d, 0x02, 0xdd, 0xbd, 0x36, 0x92, 0x36, 0x2d, 0x1f, 0xc0, 0x2a, 0x0b, 0x28, 0x1b, 0x91, 0x41, 0xf4, 0x9c, 0xb6, 0x25, 0x81, 0x46, 0xfe, 0x81, 0xb5, 0xad, 0x3d, 0xba, 0x57, 0xb7, 0xf9, 0xf6, 0xc9, 0xb0, 0x7f, 0xff, 0xd2, 0xf0, 0xe2, 0x86, 0x95, 0xc4, 0x67, 0x7e, 0x3f, 0x11, 0xf7, 0xa8, 0x19, 0x06, 0x69, 0x8d, 0xca, 0xca, 0x24, 0x8f, 0xd3, 0x52, 0x24, 0x89, 0x47, 0x25, 0x1f, 0xcb, 0x20, 0xf8, 0xb2, 0xb2, 0x76, 0x6e, 0x88, 0x36, 0xf6, 0x6f, 0x2a, 0xc1, 0x6e, 0xfa, 0x45, 0xad, 0xbc, 0x3f, 0x0b, 0x46, 0x81, 0x4d, 0x46, 0xea, 0x7a, 0x9a, 0x83, 0x9a, 0xa9, 0xdd, 0xbb, 0xec, 0x7b, 0x06, 0x5b, 0xe5, 0xcf, 0x2e, 0x69, 0xfa, 0x5c, 0xcd, 0x7b, 0x14, 0x5e, 0xa5, 0xee, 0xf5, 0xb8, 0x7d, 0xdd, 0x99, 0xba, 0xef, 0x91, 0x16, 0x5b, 0x36, 0xb6, 0x65, 0x0d, 0xac, 0xb2, 0x5b, 0xed, 0x34, 0x81, 0x7a, 0xbb, 0x46, 0x40, 0x6a, 0x9e, 0xb4, 0x39, 0x31, 0x13, 0x49, 0xda, 0xd2, 0x9b, 0xed, 0x1e, 0xc4, 0x24, 0xb3, 0x35, 0xb2, 0x88, 0x60, 0x06, 0xe6, 0x56, 0x98, 0x96, 0x79, 0x1e, 0x31, 0x51, 0xc9, 0x8f, 0xcb, 0x00, 0xe6, 0xb3, 0xe4, 0xf9, 0x2b, 0xcc, 0x7a, 0x94, 0xda, 0x96, 0xa9, 0x71, 0x77, 0x70, 0x79, 0xcd, 0x33, 0x97, 0x76, 0x3f, 0xcc, 0xc6, 0xa6, 0x9f, 0x2e, 0x99, 0xb9, 0xc6, 0x2a, 0x21, 0xe6, 0x73, 0xca, 0xe6, 0x4a, 0x51, 0x1a, 0x99, 0x1c, 0x28, 0x04, 0x93, 0xd0, 0x0e, 0xa4, 0xe4, 0xda, 0x5f, 0x50, 0xfe, 0x4a, 0xfe, 0x48, 0xb5, 0xb2, 0xc1, 0xe6, 0x1f, 0x31, 0x7e, 0xef, 0x52, 0x91, 0x43, 0xc3, 0x6e, 0x77, 0xf4, 0x22, 0x6d, 0xbf, 0xe4, 0x63, 0x0e, 0xbf, 0xca, 0x36, 0xeb, 0x5c, 0x84, 0xa5, 0x48, 0x7d, 0x3b, 0x61, 0xa1, 0xdb, 0x5b, 0x2c, 0x71, 0xda, 0x45, 0xc4, 0x28, 0x00, 0x81, 0xdb, 0x31, 0xc9, 0xb4, 0xb2, 0x3b, 0x5d, 0x27, 0xa5, 0x05, 0x1b, 0xc7, 0xdb, 0x10, 0xa9, 0xbd, 0xa6, 0x93, 0x0c, 0x75, 0xe4, 0x39, 0x35, 0x41, 0x3d, 0xc5, 0x06, 0xdb, 0x8e, 0xfd, 0x46, 0x5b, 0x1d, 0x98, 0x95, 0x4f, 0x46, 0xdb, 0xd5, 0xfb, 0x29, 0x5e, 0x9d, 0x0d, 0x32, 0xeb, 0x61, 0x4f, 0xff, 0xd3, 0xf1, 0x46, 0x9a, 0x16, 0x1b, 0x91, 0x71, 0x28, 0xac, 0x4a, 0x14, 0x30, 0x3e, 0x19, 0x54, 0xb9, 0x36, 0xc7, 0x9b, 0x2d, 0xd1, 0x6c, 0x45, 0xe3, 0xdc, 0xde, 0xc8, 0x95, 0x5b, 0x87, 0xf8, 0x41, 0x1d, 0x10, 0x54, 0x01, 0x98, 0x79, 0x25, 0xd1, 0xda, 0xe9, 0xe1, 0xb5, 0x9e, 0xac, 0xeb, 0x42, 0xba, 0x8e, 0xdf, 0x8c, 0x31, 0x21, 0x70, 0xb4, 0x5d, 0xbe, 0xc5, 0x7c, 0x2b, 0xed, 0xe1, 0x94, 0x18, 0xb9, 0x51, 0x3d, 0x03, 0x2c, 0x13, 0x6b, 0xf1, 0x42, 0x6e, 0xe2, 0xb7, 0x12, 0xa0, 0xdd, 0x50, 0x9f, 0x4f, 0x6f, 0xa7, 0x6f, 0xc7, 0x03, 0x61, 0xa0, 0x83, 0xb5, 0xf3, 0x97, 0x98, 0x20, 0x9c, 0x44, 0xea, 0xd0, 0xad, 0x48, 0x64, 0x90, 0x21, 0xd8, 0x9f, 0xa7, 0xa6, 0x44, 0xca, 0x99, 0xc6, 0x36, 0xcb, 0x74, 0x5d, 0x7e, 0x5b, 0xfe, 0x31, 0x6a, 0x31, 0xf3, 0x8c, 0xd0, 0xad, 0x40, 0xa3, 0x1f, 0x7c, 0x44, 0xd6, 0x51, 0xd9, 0xe0, 0x5f, 0x9a, 0x7e, 0x41, 0x9f, 0x40, 0xf3, 0x14, 0xba, 0x85, 0xba, 0x34, 0xba, 0x2d, 0xfb, 0x34, 0xd0, 0xcf, 0x4f, 0xb0, 0xce, 0x6a, 0x51, 0xe9, 0xb0, 0x20, 0xf4, 0xf1, 0x19, 0xb2, 0xc3, 0x90, 0x11, 0x4e, 0x97, 0x55, 0x80, 0x83, 0xc4, 0x17, 0x7e, 0x4c, 0x79, 0x19, 0xfc, 0xd1, 0xe7, 0x78, 0x4b, 0x91, 0x1d, 0xae, 0x92, 0xa6, 0xf6, 0x46, 0x75, 0xe4, 0xad, 0x22, 0x1f, 0xdd, 0xa1, 0x07, 0xb3, 0x1e, 0xfe, 0xd9, 0x92, 0xeb, 0x4b, 0xed, 0xfd, 0x0a, 0xc2, 0x63, 0x27, 0xa4, 0x88, 0x17, 0x60, 0x49, 0x35, 0xdc, 0x8e, 0xa5, 0x7d, 0xab, 0xd3, 0x28, 0x90, 0x50, 0xcd, 0xed, 0x2d, 0xda, 0x15, 0x55, 0x51, 0xf1, 0x1a, 0x0a, 0xf7, 0x39, 0x5d, 0xaa, 0x77, 0x6f, 0x01, 0x8e, 0xa7, 0x7d, 0xfa, 0xff, 0x00, 0x66, 0x10, 0xa8, 0xb8, 0x63, 0x76, 0x90, 0xa8, 0x20, 0x06, 0x56, 0xdb, 0x61, 0xda, 0xbd, 0x4f, 0xcb, 0x24, 0x15, 0x0f, 0xf5, 0x66, 0xe5, 0x5f, 0x4c, 0x53, 0xc3, 0xb7, 0xce, 0x99, 0x6b, 0x17, 0xff, 0xd4, 0xf0, 0xec, 0x57, 0x6f, 0x32, 0xa5, 0xa4, 0x43, 0x76, 0x75, 0xa9, 0xf1, 0x03, 0xfa, 0x64, 0x08, 0x6c, 0x8e, 0xfb, 0x3d, 0x7f, 0xcb, 0x16, 0x2b, 0x3d, 0xbc, 0x16, 0xa3, 0x66, 0x6d, 0x98, 0xfb, 0x1e, 0xb9, 0xac, 0xc8, 0x77, 0xb7, 0x7d, 0x01, 0xb3, 0x37, 0xb8, 0xd3, 0x46, 0x95, 0x68, 0x86, 0xd2, 0x2e, 0x4e, 0xab, 0xf0, 0x23, 0x11, 0x4e, 0x5f, 0xcd, 0x98, 0xe7, 0x25, 0x96, 0x71, 0x83, 0x0f, 0xd6, 0x3c, 0xb9, 0xe7, 0x0d, 0x7c, 0x41, 0x22, 0x5e, 0xb3, 0x20, 0x0c, 0x65, 0x80, 0xc8, 0x63, 0x8e, 0xbb, 0x95, 0xa5, 0x07, 0xeb, 0xcc, 0xac, 0x73, 0x83, 0x4e, 0x5c, 0x59, 0x09, 0xd8, 0xec, 0xc8, 0x57, 0x41, 0xd3, 0x4e, 0x95, 0xa5, 0x5b, 0x4b, 0x6a, 0xcb, 0xab, 0x43, 0x10, 0x4b, 0xeb, 0x85, 0xa2, 0x2c, 0x8e, 0x3f, 0x68, 0x54, 0xf5, 0x00, 0xd3, 0x97, 0x7a, 0x65, 0x79, 0xa6, 0x24, 0x76, 0x6f, 0xd3, 0x62, 0x96, 0x30, 0x78, 0xcb, 0x21, 0xf2, 0xf4, 0x22, 0xce, 0x54, 0x8e, 0x46, 0x26, 0x10, 0x7e, 0x0a, 0xf5, 0xd8, 0xf5, 0x1f, 0x31, 0x98, 0x83, 0x73, 0xb3, 0x91, 0xcd, 0x67, 0xe6, 0x7d, 0xe8, 0x16, 0x69, 0x6f, 0x10, 0x1f, 0x54, 0x9a, 0x37, 0xf5, 0x41, 0x5e, 0x7f, 0x0a, 0x29, 0x62, 0x02, 0xf8, 0x9c, 0xc8, 0x8c, 0x77, 0x6a, 0x99, 0xa0, 0x89, 0xff, 0x00, 0x9c, 0x74, 0xd2, 0xed, 0xed, 0xfc, 0xbb, 0x7b, 0xaa, 0x9a, 0x7d, 0x62, 0xfe, 0x46, 0x2d, 0xfe, 0x4c, 0x51, 0x31, 0x11, 0xa9, 0xf6, 0xef, 0x9b, 0x30, 0x5e, 0x7b, 0x38, 0xdd, 0xf4, 0x7f, 0x95, 0x94, 0xbc, 0x12, 0x43, 0x30, 0x6a, 0xb2, 0xf3, 0x86, 0x40, 0x3e, 0xcb, 0xd7, 0x6a, 0xd7, 0xb1, 0xe9, 0x8f, 0x37, 0x19, 0x97, 0x41, 0x2c, 0x71, 0x20, 0xf5, 0x36, 0x9c, 0x55, 0x78, 0x1d, 0x8a, 0x91, 0xd7, 0x11, 0x14, 0x5a, 0x3e, 0x19, 0x03, 0x10, 0x6b, 0xca, 0xbd, 0x86, 0xf8, 0x9d, 0x95, 0x18, 0x36, 0x65, 0x2e, 0xbc, 0x54, 0x1f, 0xa2, 0x99, 0x00, 0x59, 0x2a, 0x6f, 0x5e, 0x55, 0x15, 0xe9, 0x5f, 0xc3, 0x2f, 0xb6, 0x14, 0xff, 0x00, 0xff, 0xd5, 0xf1, 0x95, 0xfe, 0x80, 0x74, 0x0d, 0x7c, 0xd9, 0x89, 0x3d, 0x78, 0x57, 0x8b, 0xc5, 0x28, 0xe8, 0x55, 0xf7, 0x1f, 0x48, 0xca, 0x38, 0xb8, 0x83, 0x9f, 0x93, 0x07, 0x85, 0x3a, 0x7a, 0x6f, 0x95, 0x66, 0x2b, 0x2c, 0x4c, 0x0d, 0x14, 0x00, 0x3e, 0x9c, 0xc3, 0x98, 0x76, 0xb8, 0x45, 0xbd, 0x02, 0xde, 0x48, 0xee, 0xdc, 0xa0, 0x15, 0xe2, 0x2b, 0xc8, 0x8a, 0x8a, 0xfd, 0x3b, 0x66, 0x3f, 0x00, 0x73, 0x84, 0x2d, 0x36, 0xb5, 0xb5, 0x9e, 0x35, 0x1c, 0x29, 0xc4, 0xfe, 0xc8, 0x04, 0x7f, 0xc4, 0x69, 0x91, 0xe1, 0x67, 0x2c, 0x4a, 0xd2, 0xe9, 0x4e, 0xe3, 0xd4, 0xf4, 0x81, 0x5a, 0x12, 0xc5, 0x41, 0x3f, 0x79, 0x38, 0x9b, 0x60, 0x20, 0x07, 0x34, 0xb0, 0xc9, 0x03, 0x5c, 0x23, 0x03, 0x53, 0x13, 0x56, 0x88, 0xdf, 0x09, 0xda, 0x9b, 0xd3, 0xb6, 0x52, 0x0e, 0xec, 0xe4, 0x29, 0x24, 0xfc, 0xd0, 0xe7, 0x75, 0xe5, 0x57, 0x6b, 0x61, 0xfb, 0xf0, 0xca, 0xaa, 0x57, 0xa8, 0xe6, 0x78, 0x1a, 0x7d, 0xf9, 0x95, 0x8a, 0x5e, 0xa0, 0xe3, 0x67, 0x8f, 0xa0, 0xbd, 0x5b, 0xf2, 0xdf, 0x4a, 0x82, 0xcb, 0x4a, 0xb3, 0xb0, 0xb4, 0x41, 0x0a, 0x70, 0x48, 0xd9, 0x57, 0x60, 0x51, 0x3a, 0x8f, 0xbc, 0xe6, 0x7b, 0xcb, 0xe4, 0x3b, 0xa7, 0x3f, 0x9b, 0x9f, 0x9a, 0xba, 0x77, 0xe5, 0x5f, 0x95, 0x9c, 0x59, 0x94, 0x9f, 0xcd, 0x37, 0x8c, 0xa9, 0xa6, 0xd9, 0x39, 0xaa, 0xd0, 0x7d, 0xa9, 0x1c, 0x03, 0x5e, 0x09, 0xff, 0x00, 0x0c, 0x76, 0xcb, 0x62, 0x2d, 0xa5, 0xf2, 0x85, 0xbf, 0xe7, 0x87, 0xe6, 0xa3, 0x5e, 0x4d, 0xa8, 0xc9, 0xe6, 0x8b, 0xd5, 0x69, 0x5c, 0xb0, 0x4a, 0xab, 0xc4, 0xb5, 0x35, 0x0a, 0xaa, 0xea, 0x40, 0x03, 0xa0, 0xf6, 0xcb, 0x40, 0x4d, 0x3e, 0xdb, 0xff, 0x00, 0x9c, 0x7f, 0xfc, 0xce, 0x4f, 0xcc, 0xbf, 0x26, 0x25, 0xe5, 0xd3, 0x2f, 0xe9, 0xdd, 0x3d, 0xfe, 0xab, 0xa9, 0xaa, 0xd2, 0xa6, 0x40, 0x2a, 0xb2, 0x71, 0x00, 0x01, 0xea, 0x0d, 0xe8, 0x3a, 0x64, 0x25, 0x16, 0x1c, 0x8b, 0xd9, 0x51, 0x39, 0x28, 0x12, 0x51, 0x41, 0xfd, 0xa3, 0xd2, 0xb9, 0x4f, 0x0d, 0x33, 0xb5, 0xf4, 0x87, 0x9d, 0x79, 0x0e, 0xb4, 0xaf, 0x6a, 0xf8, 0xf1, 0xf0, 0xc9, 0xda, 0xbf, 0xff, 0xd6, 0xf2, 0xc6, 0xb5, 0x68, 0x64, 0xd0, 0x6d, 0x35, 0x20, 0x39, 0xcd, 0x13, 0x0f, 0x5e, 0x61, 0xfc, 0x8f, 0x40, 0x8b, 0x5e, 0xe0, 0x66, 0x1c, 0x4f, 0xaa, 0x9d, 0xe6, 0xa6, 0x1e, 0x91, 0x2e, 0xa9, 0x87, 0x95, 0xee, 0x9c, 0xc5, 0x55, 0x34, 0x60, 0x40, 0xae, 0x57, 0x30, 0xd9, 0xa7, 0x95, 0xbd, 0x6f, 0xcb, 0x26, 0x39, 0x40, 0x0d, 0x4e, 0xc0, 0x9f, 0x9e, 0x50, 0x5d, 0xac, 0x79, 0x33, 0x8b, 0xbb, 0x9b, 0x3b, 0x6b, 0x35, 0x48, 0x54, 0x09, 0x29, 0x56, 0x7f, 0xe1, 0x86, 0x72, 0x00, 0x2c, 0x6e, 0xf7, 0x63, 0x3e, 0x63, 0xbd, 0xbd, 0x5d, 0x20, 0x2a, 0xb3, 0xa4, 0x33, 0x48, 0xab, 0x21, 0x43, 0xf1, 0x2c, 0x47, 0xed, 0x1d, 0xbc, 0x73, 0x18, 0x9b, 0x64, 0x28, 0x96, 0x3a, 0xc7, 0x49, 0xb0, 0xf4, 0xcc, 0xe9, 0x73, 0x6c, 0xb4, 0xf8, 0x67, 0x92, 0x32, 0x21, 0x70, 0x7b, 0x89, 0x05, 0x57, 0xef, 0x38, 0x28, 0x94, 0x4a, 0x7d, 0x13, 0x7d, 0x6a, 0xd3, 0x4c, 0xb8, 0xf2, 0xc3, 0xc8, 0x2e, 0x03, 0xf3, 0xe2, 0x7d, 0x33, 0xb7, 0xc5, 0xcc, 0x71, 0x03, 0xc6, 0xb9, 0x64, 0x06, 0xe2, 0x9a, 0xf2, 0x4f, 0xd2, 0x6d, 0xe9, 0xfe, 0x41, 0x45, 0x5b, 0x18, 0x66, 0xa5, 0x64, 0x09, 0xf4, 0xd5, 0xb7, 0xcd, 0x93, 0xc7, 0xcf, 0x9b, 0xe5, 0x6f, 0xf9, 0xc8, 0x0d, 0x56, 0xeb, 0x59, 0xfc, 0xce, 0xd5, 0x12, 0x61, 0xc4, 0x69, 0xe9, 0x0d, 0xa4, 0x4b, 0xfe, 0x48, 0x40, 0xd5, 0x3e, 0xe4, 0xb6, 0x64, 0x8e, 0x4c, 0x02, 0x61, 0x65, 0xa0, 0x14, 0xb4, 0xb6, 0xb0, 0xb1, 0xb6, 0xb2, 0x97, 0xcb, 0xf1, 0x5a, 0x2d, 0xc6, 0xa5, 0xac, 0xb4, 0x70, 0x5d, 0xc7, 0x3d, 0xc1, 0x51, 0x24, 0x91, 0xc9, 0x31, 0x75, 0x6b, 0x70, 0x9f, 0x14, 0x68, 0x01, 0x46, 0xe4, 0xb5, 0xa3, 0x17, 0xcb, 0x40, 0x61, 0x6f, 0x47, 0xff, 0x00, 0x9c, 0x3a, 0x8f, 0x5b, 0x4f, 0x3c, 0x6b, 0xb7, 0xfa, 0x30, 0x91, 0x3c, 0xa4, 0xb1, 0x95, 0xb9, 0x82, 0x42, 0x0a, 0xbc, 0x8e, 0xe4, 0xdb, 0xa9, 0xef, 0xc9, 0x17, 0x91, 0x24, 0x7c, 0xb2, 0x05, 0x64, 0xfb, 0x75, 0x64, 0x32, 0x39, 0x69, 0x5b, 0x9c, 0xad, 0xb9, 0xdb, 0xa7, 0xb5, 0x3b, 0x53, 0x2a, 0x21, 0x41, 0x44, 0xf3, 0x8b, 0x8f, 0x2e, 0x43, 0x9d, 0x2b, 0xd4, 0x57, 0x23, 0x41, 0x36, 0xff, 0x00, 0xff, 0xd7, 0xf0, 0xc0, 0xd5, 0xb5, 0x11, 0x64, 0xb6, 0x3f, 0x59, 0x90, 0xd9, 0xab, 0x06, 0xf4, 0x79, 0x7c, 0x3b, 0x74, 0xc8, 0x08, 0x8b, 0xb6, 0xe3, 0x96, 0x55, 0x57, 0xb3, 0x3e, 0xf2, 0x35, 0xc7, 0xd6, 0x0b, 0x45, 0x5d, 0xdc, 0x8a, 0x7d, 0xd9, 0x8d, 0x94, 0x3b, 0x3d, 0x1c, 0x9e, 0xc3, 0xe5, 0xc3, 0x2c, 0x7c, 0xc5, 0x0f, 0xee, 0xdb, 0x8b, 0x0c, 0xc4, 0x26, 0x9d, 0xa0, 0x9a, 0x7d, 0x2c, 0xe5, 0xe4, 0x55, 0x7f, 0xee, 0xc1, 0x15, 0x04, 0xd0, 0x12, 0x3c, 0x72, 0x89, 0x1b, 0x2c, 0xcc, 0xa8, 0x2a, 0x8b, 0x87, 0xbb, 0x63, 0x1a, 0x28, 0x65, 0xf0, 0xed, 0xf2, 0xc3, 0xc2, 0x0a, 0x06, 0x4a, 0x46, 0xc7, 0xa5, 0xa3, 0x59, 0xc8, 0xb2, 0xc7, 0x45, 0x22, 0x9c, 0x14, 0x54, 0x10, 0x46, 0xf5, 0x1d, 0x32, 0x5c, 0x14, 0x14, 0xe4, 0x32, 0x2f, 0x3a, 0xf3, 0xb6, 0x90, 0x9a, 0x6d, 0xae, 0x9f, 0x3d, 0xab, 0xb8, 0x8a, 0x3b, 0xf8, 0x39, 0x44, 0x58, 0xf0, 0x08, 0xd5, 0x14, 0xa5, 0x7b, 0x65, 0x98, 0x8e, 0xfb, 0xb5, 0x67, 0x87, 0xa5, 0xef, 0x5e, 0x44, 0x96, 0x35, 0xb5, 0xb6, 0x59, 0x36, 0xfd, 0xd8, 0xa0, 0xf1, 0x20, 0x53, 0x33, 0xc0, 0x79, 0x59, 0x73, 0x7c, 0xd7, 0xf9, 0xfb, 0xa2, 0xcd, 0x67, 0xf9, 0xa7, 0x7b, 0x72, 0xf1, 0x71, 0x83, 0x53, 0x86, 0x0b, 0x98, 0x24, 0x22, 0x8a, 0xcc, 0x88, 0x23, 0x7f, 0xb8, 0xae, 0xf9, 0x7c, 0x50, 0x1e, 0x5f, 0x7c, 0x48, 0x21, 0x44, 0x6b, 0xce, 0x9b, 0xb0, 0x1b, 0x9e, 0xf5, 0xaf, 0x8e, 0x4d, 0x5f, 0x7a, 0x7f, 0xce, 0x34, 0xf9, 0x5d, 0x3c, 0xa3, 0xf9, 0x69, 0x63, 0xa9, 0x3c, 0x27, 0xeb, 0xda, 0xe1, 0x37, 0xd7, 0x2e, 0xaa, 0xdb, 0x06, 0xda, 0x30, 0x49, 0xfe, 0x54, 0x03, 0x03, 0x49, 0xdc, 0xb3, 0xaf, 0x38, 0xfe, 0x6a, 0xf9, 0x47, 0xc9, 0x3a, 0x74, 0x97, 0xfa, 0xf6, 0xaf, 0x15, 0x85, 0xb8, 0x75, 0x89, 0xb8, 0x87, 0x9a, 0x72, 0xee, 0x2a, 0x14, 0x24, 0x60, 0xb1, 0xa8, 0xdf, 0x07, 0x0b, 0x2d, 0xcb, 0xcf, 0x7f, 0xe8, 0x6a, 0xff, 0x00, 0x26, 0xbd, 0x6a, 0x7f, 0x89, 0x2f, 0xf8, 0x52, 0x9e, 0xb7, 0xe8, 0xb9, 0xb8, 0x57, 0xc2, 0x95, 0xe9, 0x8f, 0x08, 0x5a, 0x2f, 0xff, 0xd0, 0xf0, 0x4d, 0x40, 0xaa, 0xd7, 0x00, 0x64, 0xcb, 0x3c, 0x97, 0xa8, 0xb5, 0x9e, 0xa3, 0x1a, 0xd6, 0x84, 0x95, 0x3f, 0x45, 0x72, 0x9c, 0xa2, 0xc3, 0x99, 0xa5, 0x9d, 0x49, 0xf4, 0x17, 0x97, 0xaf, 0x63, 0x17, 0x52, 0x6f, 0xf0, 0xc8, 0x43, 0x6f, 0x9a, 0xe9, 0x07, 0x70, 0x0e, 0xec, 0x83, 0x51, 0x44, 0xb8, 0x61, 0x1a, 0x9e, 0x11, 0xd3, 0x91, 0x60, 0x68, 0x6b, 0xd3, 0x31, 0x4f, 0x36, 0xd3, 0x4c, 0x52, 0xef, 0x4c, 0xd5, 0x0c, 0xc4, 0x69, 0xda, 0x94, 0xc8, 0x3a, 0xf0, 0x66, 0x07, 0x73, 0xe0, 0x40, 0xfd, 0x79, 0x93, 0x12, 0x1c, 0x9c, 0x32, 0xc7, 0xfc, 0x41, 0x33, 0xd2, 0xb4, 0x6f, 0x38, 0x98, 0x65, 0x76, 0xbf, 0x69, 0x42, 0xd0, 0xaa, 0xc9, 0xde, 0x95, 0xad, 0x28, 0x46, 0x4e, 0xac, 0x39, 0x77, 0x80, 0x11, 0xbf, 0xd8, 0xc7, 0x7c, 0xe1, 0xa5, 0xf9, 0x92, 0x4d, 0x32, 0x5b, 0x8b, 0x93, 0x27, 0xa7, 0x68, 0x56, 0xe2, 0x45, 0xda, 0x85, 0x61, 0x6e, 0x67, 0xad, 0x6b, 0xb0, 0x38, 0xc2, 0x81, 0xe4, 0xc7, 0x52, 0x31, 0x1c, 0x67, 0x86, 0x5b, 0xbd, 0x37, 0xca, 0x7a, 0x94, 0xb1, 0x69, 0xb6, 0x2e, 0xb7, 0x15, 0x48, 0xc2, 0xb4, 0x52, 0x53, 0xac, 0x32, 0xaf, 0xb1, 0xed, 0x9b, 0x10, 0x36, 0x78, 0x5c, 0x9f, 0x51, 0x64, 0x1f, 0x98, 0x3e, 0x58, 0xb6, 0xfc, 0xc8, 0xf2, 0xe5, 0xbc, 0x68, 0x52, 0x2d, 0x5a, 0xd1, 0x84, 0xb6, 0xf3, 0x95, 0x0e, 0xc0, 0x85, 0xe2, 0xcb, 0xd8, 0xd1, 0xbb, 0xe4, 0xc1, 0xa6, 0x97, 0xce, 0x17, 0x5f, 0x95, 0xde, 0x6d, 0xb6, 0xbe, 0xb7, 0x69, 0x34, 0xf3, 0x3c, 0x72, 0xcf, 0xe8, 0xa3, 0x45, 0x49, 0x95, 0x4a, 0x90, 0x3e, 0x35, 0x5a, 0x95, 0x1d, 0xfe, 0x21, 0x93, 0x4d, 0xbe, 0xd2, 0xd2, 0xf5, 0x8b, 0xbd, 0x32, 0x2d, 0x3f, 0x4c, 0x9a, 0xe4, 0xca, 0x9e, 0x90, 0x85, 0x65, 0x55, 0x08, 0x85, 0x91, 0x01, 0x3b, 0x0a, 0x05, 0xe9, 0xb0, 0xc0, 0x5a, 0xc3, 0xcd, 0x3f, 0x3b, 0x7f, 0x26, 0xec, 0xff, 0x00, 0x35, 0x6d, 0x6d, 0xb5, 0x3d, 0x16, 0xfe, 0x0d, 0x3b, 0xcd, 0x96, 0x01, 0x92, 0x46, 0x9e, 0xa2, 0x0b, 0xc8, 0xb7, 0x28, 0x92, 0x71, 0xfb, 0x2e, 0xa7, 0xec, 0x3d, 0x0f, 0xc2, 0x68, 0x71, 0x05, 0x95, 0xd3, 0xe7, 0x9f, 0xfa, 0x16, 0x2f, 0xcd, 0x7f, 0x43, 0xd6, 0xfa, 0xa5, 0x97, 0xab, 0xeb, 0x7a, 0x5f, 0x55, 0xfa, 0xec, 0x5e, 0xaf, 0x0f, 0xf7, 0xed, 0x2b, 0x4e, 0x15, 0xff, 0x00, 0x65, 0xdf, 0x8e, 0x14, 0xf1, 0xbf, 0xff, 0xd1, 0xf0, 0x5a, 0xa7, 0x18, 0x5e, 0x56, 0x1f, 0x68, 0x71, 0x5f, 0xa7, 0xbe, 0x2a, 0x98, 0xdb, 0xfa, 0x90, 0x24, 0x37, 0xb0, 0xfd, 0xb8, 0xa8, 0x58, 0x78, 0xae, 0x43, 0xc9, 0xb4, 0x6d, 0xbb, 0xda, 0x3c, 0xa1, 0xad, 0x43, 0xa8, 0xda, 0xc5, 0x2a, 0x3d, 0x26, 0x5a, 0x02, 0x2b, 0xbe, 0x60, 0x64, 0x8d, 0x17, 0x6f, 0x8b, 0x20, 0x90, 0x7a, 0x3c, 0x32, 0x8b, 0xa8, 0x02, 0xf3, 0xfd, 0xe0, 0x1b, 0x11, 0x98, 0x66, 0x3b, 0xb9, 0x62, 0x54, 0x83, 0x36, 0xf2, 0xa4, 0xe4, 0x29, 0x34, 0xeb, 0xc8, 0x74, 0xae, 0x0d, 0xc3, 0x65, 0x82, 0x13, 0x6b, 0x57, 0xba, 0x54, 0xe4, 0x8c, 0x41, 0x1b, 0x75, 0xa7, 0xe0, 0x72, 0x5c, 0x4c, 0x84, 0x50, 0x5a, 0xb3, 0xdd, 0xdd, 0xc3, 0x24, 0x33, 0xb1, 0x60, 0xe0, 0x86, 0x52, 0x45, 0x38, 0xd2, 0x87, 0x24, 0x26, 0x6d, 0x8c, 0xe1, 0x41, 0x25, 0xfc, 0xa3, 0xd7, 0x2f, 0x6f, 0x3c, 0xbf, 0x73, 0xa5, 0xb2, 0x2c, 0xd1, 0x69, 0x17, 0x2f, 0x6b, 0x14, 0x8c, 0x0f, 0x21, 0x0d, 0x79, 0x46, 0x09, 0x15, 0xed, 0xb7, 0x4e, 0xd9, 0xb9, 0x8b, 0xcb, 0xe4, 0xa2, 0x5e, 0xa3, 0xa6, 0xdf, 0x6a, 0x36, 0xe4, 0xcd, 0x69, 0x1c, 0x4e, 0x84, 0x7c, 0x76, 0xab, 0x21, 0x67, 0xa8, 0xa7, 0xd9, 0xf8, 0x4d, 0x2b, 0xf3, 0xc3, 0x4d, 0x49, 0x57, 0x98, 0x75, 0x6f, 0x31, 0xda, 0xf9, 0xa3, 0x4b, 0xfd, 0x1f, 0x69, 0x1d, 0xae, 0xa1, 0xa9, 0x7e, 0xee, 0xe6, 0xd2, 0x79, 0x18, 0xf3, 0xb5, 0x1f, 0xee, 0xd9, 0x0a, 0x01, 0x4e, 0x3f, 0xb3, 0x4d, 0xf2, 0x9c, 0xb9, 0x04, 0x05, 0xb7, 0xe2, 0x87, 0x1e, 0xdd, 0x19, 0x3e, 0xaf, 0x6b, 0xae, 0xcb, 0x6d, 0x13, 0x0d, 0x45, 0xa2, 0x8e, 0x06, 0xe5, 0x13, 0x2a, 0x02, 0x01, 0x5e, 0x82, 0xb5, 0x04, 0xe6, 0x11, 0xd4, 0xcd, 0xda, 0x43, 0x49, 0x8e, 0xb7, 0xdc, 0xb1, 0x51, 0xe6, 0x4d, 0x76, 0xd2, 0x61, 0x15, 0xaa, 0x4b, 0xa8, 0xc9, 0x6e, 0x49, 0x79, 0x20, 0xe6, 0x8c, 0x49, 0xad, 0x43, 0x16, 0xe4, 0xa7, 0xaf, 0x43, 0xd3, 0x26, 0x35, 0x75, 0xcd, 0xa8, 0xe8, 0x87, 0x46, 0xbf, 0xc7, 0x9a, 0xff, 0x00, 0xd6, 0xbf, 0x48, 0xfe, 0x88, 0xfd, 0xe7, 0x0f, 0xab, 0xfa, 0x3f, 0x58, 0x7f, 0x5f, 0x8d, 0x3f, 0x9f, 0xa7, 0x5e, 0xd4, 0xc3, 0xf9, 0xd1, 0x7c, 0xb6, 0x47, 0xe4, 0x3a, 0x5b, 0xff, 0xd2, 0xf0, 0xb7, 0xa6, 0x1e, 0xdf, 0xd3, 0xf6, 0xa5, 0x71, 0x54, 0xdb, 0x4b, 0x80, 0x3c, 0x42, 0x26, 0xee, 0x29, 0xbe, 0x51, 0x23, 0x4e, 0x44, 0x05, 0x84, 0x45, 0xa5, 0xd5, 0xf7, 0x97, 0x2e, 0xfd, 0x6b, 0x6a, 0x98, 0x09, 0xab, 0xc7, 0xfc, 0x46, 0x3b, 0x4c, 0x26, 0x32, 0x30, 0x3e, 0x4f, 0x49, 0xd0, 0xfc, 0xfb, 0x05, 0xd4, 0x4a, 0x7d, 0x40, 0xac, 0x3a, 0x8e, 0x84, 0x1c, 0xc5, 0x96, 0x2a, 0x73, 0xe1, 0x9c, 0x16, 0x6d, 0xa5, 0x79, 0x86, 0xd6, 0xec, 0x80, 0x5a, 0xa0, 0xf5, 0xca, 0xcc, 0x5c, 0xa1, 0x2b, 0x1b, 0x26, 0x30, 0x6a, 0x31, 0x46, 0xcf, 0x1c, 0x87, 0x94, 0x64, 0x9e, 0x3d, 0xb6, 0xf0, 0xca, 0xa8, 0x39, 0x51, 0x99, 0x42, 0x6b, 0x1a, 0xc5, 0xa5, 0xa5, 0x94, 0xf7, 0x92, 0xc8, 0xaa, 0xb1, 0x23, 0x30, 0x04, 0xf8, 0x0e, 0x9f, 0x4e, 0x4a, 0x11, 0xb2, 0xd5, 0x9b, 0x25, 0x06, 0x1b, 0xff, 0x00, 0x38, 0xfd, 0xad, 0xdf, 0xda, 0xf9, 0xa2, 0xfe, 0xc5, 0x42, 0xbe, 0x9b, 0x7f, 0x0b, 0xdd, 0xdd, 0x07, 0xaf, 0x14, 0x68, 0xd8, 0x71, 0x6d, 0xbb, 0x90, 0xfc, 0x73, 0x6e, 0xf2, 0xf2, 0xdd, 0xf4, 0xad, 0xa6, 0xab, 0x6d, 0x69, 0x14, 0xfa, 0xee, 0xa0, 0xe2, 0x0b, 0x0d, 0x39, 0x19, 0xfe, 0x11, 0xc5, 0x1a, 0x4a, 0x1d, 0x8f, 0x73, 0x4f, 0xf8, 0x96, 0x0b, 0x40, 0x8d, 0xec, 0xf3, 0x6d, 0x3f, 0x52, 0xba, 0xd6, 0x35, 0x8b, 0xbf, 0x36, 0x6a, 0x5f, 0x0d, 0xc5, 0xdc, 0xa8, 0xb6, 0xa8, 0x7a, 0xc5, 0x6c, 0x9b, 0x22, 0x0f, 0xa3, 0x73, 0x9a, 0xbc, 0xb3, 0xe2, 0x36, 0xed, 0xb1, 0x43, 0x80, 0x53, 0xd0, 0xa7, 0xd4, 0x44, 0xfa, 0x7a, 0xda, 0x83, 0xbd, 0x3e, 0x2f, 0xa7, 0x2b, 0xad, 0x9b, 0xb8, 0x8d, 0xa8, 0xe8, 0x91, 0xdb, 0xfa, 0x2d, 0x6f, 0xc3, 0x8a, 0x2d, 0x56, 0xa3, 0xad, 0x4f, 0x5c, 0xa4, 0x0d, 0xdc, 0xa3, 0xca, 0xd0, 0xbf, 0xa1, 0xe3, 0xfa, 0xe7, 0x0f, 0xf2, 0xb9, 0x57, 0xbf, 0x1a, 0xe4, 0xb8, 0x57, 0xc5, 0xdd, 0xff, 0xd3, 0xf0, 0xcc, 0x5d, 0x7b, 0x70, 0xc5, 0x53, 0x6d, 0x2f, 0xd5, 0xe4, 0x69, 0xfd, 0xdf, 0xec, 0xd7, 0xad, 0x7d, 0xb2, 0x8c, 0x8d, 0xd8, 0xed, 0x91, 0x9f, 0x43, 0xea, 0xe7, 0xeb, 0x94, 0xad, 0x3e, 0x1e, 0x95, 0xfc, 0x72, 0x81, 0x7d, 0x1c, 0x9d, 0xba, 0xb1, 0x7b, 0xdf, 0xa9, 0x7a, 0xdf, 0xee, 0x2f, 0xd4, 0xfa, 0xe7, 0xed, 0x7a, 0x7f, 0xdd, 0xff, 0x00, 0xb2, 0xae, 0x64, 0x0b, 0xea, 0xe3, 0x9a, 0xbf, 0x4a, 0x6f, 0xa4, 0xff, 0x00, 0x89, 0xbd, 0x45, 0xfa, 0xb5, 0x79, 0xf7, 0xeb, 0xc7, 0xe9, 0xae, 0x57, 0x2e, 0x17, 0x23, 0x1f, 0x89, 0xd1, 0x99, 0x8f, 0xf1, 0xa7, 0x11, 0xcf, 0xd3, 0xf5, 0x29, 0xb5, 0x6b, 0xd3, 0xe8, 0xcc, 0x7f, 0x45, 0xb9, 0xa3, 0xc5, 0x62, 0xbe, 0x68, 0xff, 0x00, 0x15, 0xfd, 0x4c, 0xfe, 0x90, 0xaf, 0xd4, 0xab, 0xf1, 0x7a, 0x7f, 0x62, 0x9d, 0xab, 0xdf, 0x32, 0xb1, 0x70, 0x5e, 0xdc, 0xdc, 0x2d, 0x47, 0x8b, 0x5e, 0xae, 0x4c, 0xbf, 0xf2, 0x37, 0x9f, 0x3d, 0x5b, 0xd2, 0xff, 0x00, 0x8e, 0x87, 0xee, 0x29, 0x5a, 0xf2, 0xf4, 0xaa, 0xd4, 0xa5, 0x36, 0xa7, 0x3a, 0x57, 0xfd, 0x8e, 0x64, 0x3a, 0xf2, 0xf6, 0xbf, 0xcc, 0x7f, 0x5b, 0xfc, 0x23, 0xa7, 0xfe, 0x8e, 0xff, 0x00, 0x8e, 0x37, 0xd6, 0x63, 0xfa, 0xe5, 0x2b, 0xcb, 0x87, 0xec, 0xd6, 0xbd, 0xb9, 0x7d, 0xac, 0xc7, 0xcd, 0x7c, 0x2d, 0xf8, 0x2b, 0x89, 0x26, 0x8f, 0xd4, 0xfa, 0x94, 0x3e, 0x85, 0x29, 0xc9, 0x69, 0xfc, 0x33, 0x58, 0x5d, 0x9c, 0x79, 0xb2, 0xbb, 0x0f, 0xac, 0x7a, 0x2b, 0xea, 0x75, 0xef, 0x92, 0x0c, 0x53, 0x3d, 0x2f, 0xd4, 0xfa, 0xbb, 0xfa, 0x74, 0xf5, 0x39, 0x9a, 0xd7, 0xe7, 0x80, 0x53, 0x79, 0xba, 0x5b, 0xfe, 0x97, 0xfa, 0x4b, 0xfc, 0xba, 0x7f, 0xb1, 0xc7, 0xab, 0x1e, 0x8f, 0xff, 0xd9
+};
diff --git a/chromium/third_party/webrtc/base/testclient.cc b/chromium/third_party/webrtc/base/testclient.cc
new file mode 100644
index 00000000000..32670e21a9e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testclient.cc
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+// DESIGN: Each packet received is put it into a list of packets.
+// Callers can retrieve received packets from any thread by calling
+// NextPacket.
+
+TestClient::TestClient(AsyncPacketSocket* socket)
+ : socket_(socket), ready_to_send_(false) {
+ packets_ = new std::vector<Packet*>();
+ socket_->SignalReadPacket.connect(this, &TestClient::OnPacket);
+ socket_->SignalReadyToSend.connect(this, &TestClient::OnReadyToSend);
+}
+
+TestClient::~TestClient() {
+ delete socket_;
+ for (unsigned i = 0; i < packets_->size(); i++)
+ delete (*packets_)[i];
+ delete packets_;
+}
+
+bool TestClient::CheckConnState(AsyncPacketSocket::State state) {
+ // Wait for our timeout value until the socket reaches the desired state.
+ uint32 end = TimeAfter(kTimeout);
+ while (socket_->GetState() != state && TimeUntil(end) > 0)
+ Thread::Current()->ProcessMessages(1);
+ return (socket_->GetState() == state);
+}
+
+int TestClient::Send(const char* buf, size_t size) {
+ rtc::PacketOptions options;
+ return socket_->Send(buf, size, options);
+}
+
+int TestClient::SendTo(const char* buf, size_t size,
+ const SocketAddress& dest) {
+ rtc::PacketOptions options;
+ return socket_->SendTo(buf, size, dest, options);
+}
+
+TestClient::Packet* TestClient::NextPacket() {
+ // If no packets are currently available, we go into a get/dispatch loop for
+ // at most 1 second. If, during the loop, a packet arrives, then we can stop
+ // early and return it.
+
+ // Note that the case where no packet arrives is important. We often want to
+ // test that a packet does not arrive.
+
+ // Note also that we only try to pump our current thread's message queue.
+ // Pumping another thread's queue could lead to messages being dispatched from
+ // the wrong thread to non-thread-safe objects.
+
+ uint32 end = TimeAfter(kTimeout);
+ while (TimeUntil(end) > 0) {
+ {
+ CritScope cs(&crit_);
+ if (packets_->size() != 0) {
+ break;
+ }
+ }
+ Thread::Current()->ProcessMessages(1);
+ }
+
+ // Return the first packet placed in the queue.
+ Packet* packet = NULL;
+ CritScope cs(&crit_);
+ if (packets_->size() > 0) {
+ packet = packets_->front();
+ packets_->erase(packets_->begin());
+ }
+
+ return packet;
+}
+
+bool TestClient::CheckNextPacket(const char* buf, size_t size,
+ SocketAddress* addr) {
+ bool res = false;
+ Packet* packet = NextPacket();
+ if (packet) {
+ res = (packet->size == size && memcmp(packet->buf, buf, size) == 0);
+ if (addr)
+ *addr = packet->addr;
+ delete packet;
+ }
+ return res;
+}
+
+bool TestClient::CheckNoPacket() {
+ bool res;
+ Packet* packet = NextPacket();
+ res = (packet == NULL);
+ delete packet;
+ return res;
+}
+
+int TestClient::GetError() {
+ return socket_->GetError();
+}
+
+int TestClient::SetOption(Socket::Option opt, int value) {
+ return socket_->SetOption(opt, value);
+}
+
+bool TestClient::ready_to_send() const {
+ return ready_to_send_;
+}
+
+void TestClient::OnPacket(AsyncPacketSocket* socket, const char* buf,
+ size_t size, const SocketAddress& remote_addr,
+ const PacketTime& packet_time) {
+ CritScope cs(&crit_);
+ packets_->push_back(new Packet(remote_addr, buf, size));
+}
+
+void TestClient::OnReadyToSend(AsyncPacketSocket* socket) {
+ ready_to_send_ = true;
+}
+
+TestClient::Packet::Packet(const SocketAddress& a, const char* b, size_t s)
+ : addr(a), buf(0), size(s) {
+ buf = new char[size];
+ memcpy(buf, b, size);
+}
+
+TestClient::Packet::Packet(const Packet& p)
+ : addr(p.addr), buf(0), size(p.size) {
+ buf = new char[size];
+ memcpy(buf, p.buf, size);
+}
+
+TestClient::Packet::~Packet() {
+ delete[] buf;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/testclient.h b/chromium/third_party/webrtc/base/testclient.h
new file mode 100644
index 00000000000..d56f948b049
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testclient.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TESTCLIENT_H_
+#define WEBRTC_BASE_TESTCLIENT_H_
+
+#include <vector>
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+
+// A simple client that can send TCP or UDP data and check that it receives
+// what it expects to receive. Useful for testing server functionality.
+class TestClient : public sigslot::has_slots<> {
+ public:
+ // Records the contents of a packet that was received.
+ struct Packet {
+ Packet(const SocketAddress& a, const char* b, size_t s);
+ Packet(const Packet& p);
+ virtual ~Packet();
+
+ SocketAddress addr;
+ char* buf;
+ size_t size;
+ };
+
+ // Creates a client that will send and receive with the given socket and
+ // will post itself messages with the given thread.
+ explicit TestClient(AsyncPacketSocket* socket);
+ ~TestClient();
+
+ SocketAddress address() const { return socket_->GetLocalAddress(); }
+ SocketAddress remote_address() const { return socket_->GetRemoteAddress(); }
+
+ // Checks that the socket moves to the specified connect state.
+ bool CheckConnState(AsyncPacketSocket::State state);
+
+ // Checks that the socket is connected to the remote side.
+ bool CheckConnected() {
+ return CheckConnState(AsyncPacketSocket::STATE_CONNECTED);
+ }
+
+ // Sends using the clients socket.
+ int Send(const char* buf, size_t size);
+
+ // Sends using the clients socket to the given destination.
+ int SendTo(const char* buf, size_t size, const SocketAddress& dest);
+
+ // Returns the next packet received by the client or 0 if none is received
+ // within a reasonable amount of time. The caller must delete the packet
+ // when done with it.
+ Packet* NextPacket();
+
+ // Checks that the next packet has the given contents. Returns the remote
+ // address that the packet was sent from.
+ bool CheckNextPacket(const char* buf, size_t len, SocketAddress* addr);
+
+ // Checks that no packets have arrived or will arrive in the next second.
+ bool CheckNoPacket();
+
+ int GetError();
+ int SetOption(Socket::Option opt, int value);
+
+ bool ready_to_send() const;
+
+ private:
+ static const int kTimeout = 1000;
+ // Workaround for the fact that AsyncPacketSocket::GetConnState doesn't exist.
+ Socket::ConnState GetState();
+ // Slot for packets read on the socket.
+ void OnPacket(AsyncPacketSocket* socket, const char* buf, size_t len,
+ const SocketAddress& remote_addr,
+ const PacketTime& packet_time);
+ void OnReadyToSend(AsyncPacketSocket* socket);
+
+ CriticalSection crit_;
+ AsyncPacketSocket* socket_;
+ std::vector<Packet*>* packets_;
+ bool ready_to_send_;
+ DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TESTCLIENT_H_
diff --git a/chromium/third_party/webrtc/base/testclient_unittest.cc b/chromium/third_party/webrtc/base/testclient_unittest.cc
new file mode 100644
index 00000000000..c282668678e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testclient_unittest.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/testechoserver.h"
+#include "webrtc/base/thread.h"
+
+using namespace rtc;
+
+void TestUdpInternal(const SocketAddress& loopback) {
+ Thread *main = Thread::Current();
+ AsyncSocket* socket = main->socketserver()
+ ->CreateAsyncSocket(loopback.family(), SOCK_DGRAM);
+ socket->Bind(loopback);
+
+ TestClient client(new AsyncUDPSocket(socket));
+ SocketAddress addr = client.address(), from;
+ EXPECT_EQ(3, client.SendTo("foo", 3, addr));
+ EXPECT_TRUE(client.CheckNextPacket("foo", 3, &from));
+ EXPECT_EQ(from, addr);
+ EXPECT_TRUE(client.CheckNoPacket());
+}
+
+void TestTcpInternal(const SocketAddress& loopback) {
+ Thread *main = Thread::Current();
+ TestEchoServer server(main, loopback);
+
+ AsyncSocket* socket = main->socketserver()
+ ->CreateAsyncSocket(loopback.family(), SOCK_STREAM);
+ AsyncTCPSocket* tcp_socket = AsyncTCPSocket::Create(
+ socket, loopback, server.address());
+ ASSERT_TRUE(tcp_socket != NULL);
+
+ TestClient client(tcp_socket);
+ SocketAddress addr = client.address(), from;
+ EXPECT_TRUE(client.CheckConnected());
+ EXPECT_EQ(3, client.Send("foo", 3));
+ EXPECT_TRUE(client.CheckNextPacket("foo", 3, &from));
+ EXPECT_EQ(from, server.address());
+ EXPECT_TRUE(client.CheckNoPacket());
+}
+
+// Tests whether the TestClient can send UDP to itself.
+TEST(TestClientTest, TestUdpIPv4) {
+ TestUdpInternal(SocketAddress("127.0.0.1", 0));
+}
+
+TEST(TestClientTest, TestUdpIPv6) {
+ if (HasIPv6Enabled()) {
+ TestUdpInternal(SocketAddress("::1", 0));
+ } else {
+ LOG(LS_INFO) << "Skipping IPv6 test.";
+ }
+}
+
+// Tests whether the TestClient can connect to a server and exchange data.
+TEST(TestClientTest, TestTcpIPv4) {
+ TestTcpInternal(SocketAddress("127.0.0.1", 0));
+}
+
+TEST(TestClientTest, TestTcpIPv6) {
+ if (HasIPv6Enabled()) {
+ TestTcpInternal(SocketAddress("::1", 0));
+ } else {
+ LOG(LS_INFO) << "Skipping IPv6 test.";
+ }
+}
diff --git a/chromium/third_party/webrtc/base/testechoserver.h b/chromium/third_party/webrtc/base/testechoserver.h
new file mode 100644
index 00000000000..733b320ddeb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testechoserver.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TESTECHOSERVER_H_
+#define WEBRTC_BASE_TESTECHOSERVER_H_
+
+#include <list>
+#include "webrtc/base/asynctcpsocket.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+// A test echo server, echoes back any packets sent to it.
+// Useful for unit tests.
+class TestEchoServer : public sigslot::has_slots<> {
+ public:
+ TestEchoServer(Thread* thread, const SocketAddress& addr)
+ : server_socket_(thread->socketserver()->CreateAsyncSocket(addr.family(),
+ SOCK_STREAM)) {
+ server_socket_->Bind(addr);
+ server_socket_->Listen(5);
+ server_socket_->SignalReadEvent.connect(this, &TestEchoServer::OnAccept);
+ }
+ ~TestEchoServer() {
+ for (ClientList::iterator it = client_sockets_.begin();
+ it != client_sockets_.end(); ++it) {
+ delete *it;
+ }
+ }
+
+ SocketAddress address() const { return server_socket_->GetLocalAddress(); }
+
+ private:
+ void OnAccept(AsyncSocket* socket) {
+ AsyncSocket* raw_socket = socket->Accept(NULL);
+ if (raw_socket) {
+ AsyncTCPSocket* packet_socket = new AsyncTCPSocket(raw_socket, false);
+ packet_socket->SignalReadPacket.connect(this, &TestEchoServer::OnPacket);
+ packet_socket->SignalClose.connect(this, &TestEchoServer::OnClose);
+ client_sockets_.push_back(packet_socket);
+ }
+ }
+ void OnPacket(AsyncPacketSocket* socket, const char* buf, size_t size,
+ const SocketAddress& remote_addr,
+ const PacketTime& packet_time) {
+ rtc::PacketOptions options;
+ socket->Send(buf, size, options);
+ }
+ void OnClose(AsyncPacketSocket* socket, int err) {
+ ClientList::iterator it =
+ std::find(client_sockets_.begin(), client_sockets_.end(), socket);
+ client_sockets_.erase(it);
+ Thread::Current()->Dispose(socket);
+ }
+
+ typedef std::list<AsyncTCPSocket*> ClientList;
+ scoped_ptr<AsyncSocket> server_socket_;
+ ClientList client_sockets_;
+ DISALLOW_EVIL_CONSTRUCTORS(TestEchoServer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TESTECHOSERVER_H_
diff --git a/chromium/third_party/webrtc/base/testutils.h b/chromium/third_party/webrtc/base/testutils.h
new file mode 100644
index 00000000000..74fed45cdfe
--- /dev/null
+++ b/chromium/third_party/webrtc/base/testutils.h
@@ -0,0 +1,629 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TESTUTILS_H__
+#define WEBRTC_BASE_TESTUTILS_H__
+
+// Utilities for testing rtc infrastructure in unittests
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#include <X11/Xlib.h>
+#include <X11/extensions/Xrandr.h>
+
+// X defines a few macros that stomp on types that gunit.h uses.
+#undef None
+#undef Bool
+#endif
+
+#include <map>
+#include <vector>
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+
+namespace testing {
+
+using namespace rtc;
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamSink - Monitor asynchronously signalled events from StreamInterface
+// or AsyncSocket (which should probably be a StreamInterface.
+///////////////////////////////////////////////////////////////////////////////
+
+// Note: Any event that is an error is treaded as SSE_ERROR instead of that
+// event.
+
+enum StreamSinkEvent {
+ SSE_OPEN = SE_OPEN,
+ SSE_READ = SE_READ,
+ SSE_WRITE = SE_WRITE,
+ SSE_CLOSE = SE_CLOSE,
+ SSE_ERROR = 16
+};
+
+class StreamSink : public sigslot::has_slots<> {
+ public:
+ void Monitor(StreamInterface* stream) {
+ stream->SignalEvent.connect(this, &StreamSink::OnEvent);
+ events_.erase(stream);
+ }
+ void Unmonitor(StreamInterface* stream) {
+ stream->SignalEvent.disconnect(this);
+ // In case you forgot to unmonitor a previous object with this address
+ events_.erase(stream);
+ }
+ bool Check(StreamInterface* stream, StreamSinkEvent event, bool reset = true) {
+ return DoCheck(stream, event, reset);
+ }
+ int Events(StreamInterface* stream, bool reset = true) {
+ return DoEvents(stream, reset);
+ }
+
+ void Monitor(AsyncSocket* socket) {
+ socket->SignalConnectEvent.connect(this, &StreamSink::OnConnectEvent);
+ socket->SignalReadEvent.connect(this, &StreamSink::OnReadEvent);
+ socket->SignalWriteEvent.connect(this, &StreamSink::OnWriteEvent);
+ socket->SignalCloseEvent.connect(this, &StreamSink::OnCloseEvent);
+ // In case you forgot to unmonitor a previous object with this address
+ events_.erase(socket);
+ }
+ void Unmonitor(AsyncSocket* socket) {
+ socket->SignalConnectEvent.disconnect(this);
+ socket->SignalReadEvent.disconnect(this);
+ socket->SignalWriteEvent.disconnect(this);
+ socket->SignalCloseEvent.disconnect(this);
+ events_.erase(socket);
+ }
+ bool Check(AsyncSocket* socket, StreamSinkEvent event, bool reset = true) {
+ return DoCheck(socket, event, reset);
+ }
+ int Events(AsyncSocket* socket, bool reset = true) {
+ return DoEvents(socket, reset);
+ }
+
+ private:
+ typedef std::map<void*,int> EventMap;
+
+ void OnEvent(StreamInterface* stream, int events, int error) {
+ if (error) {
+ events = SSE_ERROR;
+ }
+ AddEvents(stream, events);
+ }
+ void OnConnectEvent(AsyncSocket* socket) {
+ AddEvents(socket, SSE_OPEN);
+ }
+ void OnReadEvent(AsyncSocket* socket) {
+ AddEvents(socket, SSE_READ);
+ }
+ void OnWriteEvent(AsyncSocket* socket) {
+ AddEvents(socket, SSE_WRITE);
+ }
+ void OnCloseEvent(AsyncSocket* socket, int error) {
+ AddEvents(socket, (0 == error) ? SSE_CLOSE : SSE_ERROR);
+ }
+
+ void AddEvents(void* obj, int events) {
+ EventMap::iterator it = events_.find(obj);
+ if (events_.end() == it) {
+ events_.insert(EventMap::value_type(obj, events));
+ } else {
+ it->second |= events;
+ }
+ }
+ bool DoCheck(void* obj, StreamSinkEvent event, bool reset) {
+ EventMap::iterator it = events_.find(obj);
+ if ((events_.end() == it) || (0 == (it->second & event))) {
+ return false;
+ }
+ if (reset) {
+ it->second &= ~event;
+ }
+ return true;
+ }
+ int DoEvents(void* obj, bool reset) {
+ EventMap::iterator it = events_.find(obj);
+ if (events_.end() == it)
+ return 0;
+ int events = it->second;
+ if (reset) {
+ it->second = 0;
+ }
+ return events;
+ }
+
+ EventMap events_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// StreamSource - Implements stream interface and simulates asynchronous
+// events on the stream, without a network. Also buffers written data.
+///////////////////////////////////////////////////////////////////////////////
+
+class StreamSource : public StreamInterface {
+public:
+ StreamSource() {
+ Clear();
+ }
+
+ void Clear() {
+ readable_data_.clear();
+ written_data_.clear();
+ state_ = SS_CLOSED;
+ read_block_ = 0;
+ write_block_ = SIZE_UNKNOWN;
+ }
+ void QueueString(const char* data) {
+ QueueData(data, strlen(data));
+ }
+ void QueueStringF(const char* format, ...) {
+ va_list args;
+ va_start(args, format);
+ char buffer[1024];
+ size_t len = vsprintfn(buffer, sizeof(buffer), format, args);
+ ASSERT(len < sizeof(buffer) - 1);
+ va_end(args);
+ QueueData(buffer, len);
+ }
+ void QueueData(const char* data, size_t len) {
+ readable_data_.insert(readable_data_.end(), data, data + len);
+ if ((SS_OPEN == state_) && (readable_data_.size() == len)) {
+ SignalEvent(this, SE_READ, 0);
+ }
+ }
+ std::string ReadData() {
+ std::string data;
+ // avoid accessing written_data_[0] if it is undefined
+ if (written_data_.size() > 0) {
+ data.insert(0, &written_data_[0], written_data_.size());
+ }
+ written_data_.clear();
+ return data;
+ }
+ void SetState(StreamState state) {
+ int events = 0;
+ if ((SS_OPENING == state_) && (SS_OPEN == state)) {
+ events |= SE_OPEN;
+ if (!readable_data_.empty()) {
+ events |= SE_READ;
+ }
+ } else if ((SS_CLOSED != state_) && (SS_CLOSED == state)) {
+ events |= SE_CLOSE;
+ }
+ state_ = state;
+ if (events) {
+ SignalEvent(this, events, 0);
+ }
+ }
+ // Will cause Read to block when there are pos bytes in the read queue.
+ void SetReadBlock(size_t pos) { read_block_ = pos; }
+ // Will cause Write to block when there are pos bytes in the write queue.
+ void SetWriteBlock(size_t pos) { write_block_ = pos; }
+
+ virtual StreamState GetState() const { return state_; }
+ virtual StreamResult Read(void* buffer, size_t buffer_len,
+ size_t* read, int* error) {
+ if (SS_CLOSED == state_) {
+ if (error) *error = -1;
+ return SR_ERROR;
+ }
+ if ((SS_OPENING == state_) || (readable_data_.size() <= read_block_)) {
+ return SR_BLOCK;
+ }
+ size_t count = _min(buffer_len, readable_data_.size() - read_block_);
+ memcpy(buffer, &readable_data_[0], count);
+ size_t new_size = readable_data_.size() - count;
+ // Avoid undefined access beyond the last element of the vector.
+ // This only happens when new_size is 0.
+ if (count < readable_data_.size()) {
+ memmove(&readable_data_[0], &readable_data_[count], new_size);
+ }
+ readable_data_.resize(new_size);
+ if (read) *read = count;
+ return SR_SUCCESS;
+ }
+ virtual StreamResult Write(const void* data, size_t data_len,
+ size_t* written, int* error) {
+ if (SS_CLOSED == state_) {
+ if (error) *error = -1;
+ return SR_ERROR;
+ }
+ if (SS_OPENING == state_) {
+ return SR_BLOCK;
+ }
+ if (SIZE_UNKNOWN != write_block_) {
+ if (written_data_.size() >= write_block_) {
+ return SR_BLOCK;
+ }
+ if (data_len > (write_block_ - written_data_.size())) {
+ data_len = write_block_ - written_data_.size();
+ }
+ }
+ if (written) *written = data_len;
+ const char* cdata = static_cast<const char*>(data);
+ written_data_.insert(written_data_.end(), cdata, cdata + data_len);
+ return SR_SUCCESS;
+ }
+ virtual void Close() { state_ = SS_CLOSED; }
+
+private:
+ typedef std::vector<char> Buffer;
+ Buffer readable_data_, written_data_;
+ StreamState state_;
+ size_t read_block_, write_block_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// SocketTestClient
+// Creates a simulated client for testing. Works on real and virtual networks.
+///////////////////////////////////////////////////////////////////////////////
+
+class SocketTestClient : public sigslot::has_slots<> {
+public:
+ SocketTestClient() {
+ Init(NULL, AF_INET);
+ }
+ SocketTestClient(AsyncSocket* socket) {
+ Init(socket, socket->GetLocalAddress().family());
+ }
+ SocketTestClient(const SocketAddress& address) {
+ Init(NULL, address.family());
+ socket_->Connect(address);
+ }
+
+ AsyncSocket* socket() { return socket_.get(); }
+
+ void QueueString(const char* data) {
+ QueueData(data, strlen(data));
+ }
+ void QueueStringF(const char* format, ...) {
+ va_list args;
+ va_start(args, format);
+ char buffer[1024];
+ size_t len = vsprintfn(buffer, sizeof(buffer), format, args);
+ ASSERT(len < sizeof(buffer) - 1);
+ va_end(args);
+ QueueData(buffer, len);
+ }
+ void QueueData(const char* data, size_t len) {
+ send_buffer_.insert(send_buffer_.end(), data, data + len);
+ if (Socket::CS_CONNECTED == socket_->GetState()) {
+ Flush();
+ }
+ }
+ std::string ReadData() {
+ std::string data(&recv_buffer_[0], recv_buffer_.size());
+ recv_buffer_.clear();
+ return data;
+ }
+
+ bool IsConnected() const {
+ return (Socket::CS_CONNECTED == socket_->GetState());
+ }
+ bool IsClosed() const {
+ return (Socket::CS_CLOSED == socket_->GetState());
+ }
+
+private:
+ typedef std::vector<char> Buffer;
+
+ void Init(AsyncSocket* socket, int family) {
+ if (!socket) {
+ socket = Thread::Current()->socketserver()
+ ->CreateAsyncSocket(family, SOCK_STREAM);
+ }
+ socket_.reset(socket);
+ socket_->SignalConnectEvent.connect(this,
+ &SocketTestClient::OnConnectEvent);
+ socket_->SignalReadEvent.connect(this, &SocketTestClient::OnReadEvent);
+ socket_->SignalWriteEvent.connect(this, &SocketTestClient::OnWriteEvent);
+ socket_->SignalCloseEvent.connect(this, &SocketTestClient::OnCloseEvent);
+ }
+
+ void Flush() {
+ size_t sent = 0;
+ while (sent < send_buffer_.size()) {
+ int result = socket_->Send(&send_buffer_[sent],
+ send_buffer_.size() - sent);
+ if (result > 0) {
+ sent += result;
+ } else {
+ break;
+ }
+ }
+ size_t new_size = send_buffer_.size() - sent;
+ memmove(&send_buffer_[0], &send_buffer_[sent], new_size);
+ send_buffer_.resize(new_size);
+ }
+
+ void OnConnectEvent(AsyncSocket* socket) {
+ if (!send_buffer_.empty()) {
+ Flush();
+ }
+ }
+ void OnReadEvent(AsyncSocket* socket) {
+ char data[64 * 1024];
+ int result = socket_->Recv(data, ARRAY_SIZE(data));
+ if (result > 0) {
+ recv_buffer_.insert(recv_buffer_.end(), data, data + result);
+ }
+ }
+ void OnWriteEvent(AsyncSocket* socket) {
+ if (!send_buffer_.empty()) {
+ Flush();
+ }
+ }
+ void OnCloseEvent(AsyncSocket* socket, int error) {
+ }
+
+ scoped_ptr<AsyncSocket> socket_;
+ Buffer send_buffer_, recv_buffer_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// SocketTestServer
+// Creates a simulated server for testing. Works on real and virtual networks.
+///////////////////////////////////////////////////////////////////////////////
+
+class SocketTestServer : public sigslot::has_slots<> {
+ public:
+ SocketTestServer(const SocketAddress& address)
+ : socket_(Thread::Current()->socketserver()
+ ->CreateAsyncSocket(address.family(), SOCK_STREAM))
+ {
+ socket_->SignalReadEvent.connect(this, &SocketTestServer::OnReadEvent);
+ socket_->Bind(address);
+ socket_->Listen(5);
+ }
+ virtual ~SocketTestServer() {
+ clear();
+ }
+
+ size_t size() const { return clients_.size(); }
+ SocketTestClient* client(size_t index) const { return clients_[index]; }
+ SocketTestClient* operator[](size_t index) const { return client(index); }
+
+ void clear() {
+ for (size_t i=0; i<clients_.size(); ++i) {
+ delete clients_[i];
+ }
+ clients_.clear();
+ }
+
+ private:
+ void OnReadEvent(AsyncSocket* socket) {
+ AsyncSocket* accepted =
+ static_cast<AsyncSocket*>(socket_->Accept(NULL));
+ if (!accepted)
+ return;
+ clients_.push_back(new SocketTestClient(accepted));
+ }
+
+ scoped_ptr<AsyncSocket> socket_;
+ std::vector<SocketTestClient*> clients_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// Generic Utilities
+///////////////////////////////////////////////////////////////////////////////
+
+inline bool ReadFile(const char* filename, std::string* contents) {
+ FILE* fp = fopen(filename, "rb");
+ if (!fp)
+ return false;
+ char buffer[1024*64];
+ size_t read;
+ contents->clear();
+ while ((read = fread(buffer, 1, sizeof(buffer), fp))) {
+ contents->append(buffer, read);
+ }
+ bool success = (0 != feof(fp));
+ fclose(fp);
+ return success;
+}
+
+// Look in parent dir for parallel directory.
+inline rtc::Pathname GetSiblingDirectory(
+ const std::string& parallel_dir) {
+ rtc::Pathname path = rtc::Filesystem::GetCurrentDirectory();
+ while (!path.empty()) {
+ rtc::Pathname potential_parallel_dir = path;
+ potential_parallel_dir.AppendFolder(parallel_dir);
+ if (rtc::Filesystem::IsFolder(potential_parallel_dir)) {
+ return potential_parallel_dir;
+ }
+
+ path.SetFolder(path.parent_folder());
+ }
+ return path;
+}
+
+inline rtc::Pathname GetGoogle3Directory() {
+ return GetSiblingDirectory("google3");
+}
+
+inline rtc::Pathname GetTalkDirectory() {
+ return GetSiblingDirectory("talk");
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// Unittest predicates which are similar to STREQ, but for raw memory
+///////////////////////////////////////////////////////////////////////////////
+
+inline AssertionResult CmpHelperMemEq(const char* expected_expression,
+ const char* expected_length_expression,
+ const char* actual_expression,
+ const char* actual_length_expression,
+ const void* expected,
+ size_t expected_length,
+ const void* actual,
+ size_t actual_length)
+{
+ if ((expected_length == actual_length)
+ && (0 == memcmp(expected, actual, expected_length))) {
+ return AssertionSuccess();
+ }
+
+ Message msg;
+ msg << "Value of: " << actual_expression
+ << " [" << actual_length_expression << "]";
+ if (true) { //!actual_value.Equals(actual_expression)) {
+ size_t buffer_size = actual_length * 2 + 1;
+ char* buffer = STACK_ARRAY(char, buffer_size);
+ hex_encode(buffer, buffer_size,
+ reinterpret_cast<const char*>(actual), actual_length);
+ msg << "\n Actual: " << buffer << " [" << actual_length << "]";
+ }
+
+ msg << "\nExpected: " << expected_expression
+ << " [" << expected_length_expression << "]";
+ if (true) { //!expected_value.Equals(expected_expression)) {
+ size_t buffer_size = expected_length * 2 + 1;
+ char* buffer = STACK_ARRAY(char, buffer_size);
+ hex_encode(buffer, buffer_size,
+ reinterpret_cast<const char*>(expected), expected_length);
+ msg << "\nWhich is: " << buffer << " [" << expected_length << "]";
+ }
+
+ return AssertionFailure(msg);
+}
+
+inline AssertionResult CmpHelperFileEq(const char* expected_expression,
+ const char* expected_length_expression,
+ const char* actual_filename,
+ const void* expected,
+ size_t expected_length,
+ const char* filename)
+{
+ std::string contents;
+ if (!ReadFile(filename, &contents)) {
+ Message msg;
+ msg << "File '" << filename << "' could not be read.";
+ return AssertionFailure(msg);
+ }
+ return CmpHelperMemEq(expected_expression, expected_length_expression,
+ actual_filename, "",
+ expected, expected_length,
+ contents.c_str(), contents.size());
+}
+
+#define EXPECT_MEMEQ(expected, expected_length, actual, actual_length) \
+ EXPECT_PRED_FORMAT4(::testing::CmpHelperMemEq, expected, expected_length, \
+ actual, actual_length)
+
+#define ASSERT_MEMEQ(expected, expected_length, actual, actual_length) \
+ ASSERT_PRED_FORMAT4(::testing::CmpHelperMemEq, expected, expected_length, \
+ actual, actual_length)
+
+#define EXPECT_FILEEQ(expected, expected_length, filename) \
+ EXPECT_PRED_FORMAT3(::testing::CmpHelperFileEq, expected, expected_length, \
+ filename)
+
+#define ASSERT_FILEEQ(expected, expected_length, filename) \
+ ASSERT_PRED_FORMAT3(::testing::CmpHelperFileEq, expected, expected_length, \
+ filename)
+
+///////////////////////////////////////////////////////////////////////////////
+// Helpers for initializing constant memory with integers in a particular byte
+// order
+///////////////////////////////////////////////////////////////////////////////
+
+#define BYTE_CAST(x) static_cast<uint8>((x) & 0xFF)
+
+// Declare a N-bit integer as a little-endian sequence of bytes
+#define LE16(x) BYTE_CAST(((uint16)x) >> 0), BYTE_CAST(((uint16)x) >> 8)
+
+#define LE32(x) BYTE_CAST(((uint32)x) >> 0), BYTE_CAST(((uint32)x) >> 8), \
+ BYTE_CAST(((uint32)x) >> 16), BYTE_CAST(((uint32)x) >> 24)
+
+#define LE64(x) BYTE_CAST(((uint64)x) >> 0), BYTE_CAST(((uint64)x) >> 8), \
+ BYTE_CAST(((uint64)x) >> 16), BYTE_CAST(((uint64)x) >> 24), \
+ BYTE_CAST(((uint64)x) >> 32), BYTE_CAST(((uint64)x) >> 40), \
+ BYTE_CAST(((uint64)x) >> 48), BYTE_CAST(((uint64)x) >> 56)
+
+// Declare a N-bit integer as a big-endian (Internet) sequence of bytes
+#define BE16(x) BYTE_CAST(((uint16)x) >> 8), BYTE_CAST(((uint16)x) >> 0)
+
+#define BE32(x) BYTE_CAST(((uint32)x) >> 24), BYTE_CAST(((uint32)x) >> 16), \
+ BYTE_CAST(((uint32)x) >> 8), BYTE_CAST(((uint32)x) >> 0)
+
+#define BE64(x) BYTE_CAST(((uint64)x) >> 56), BYTE_CAST(((uint64)x) >> 48), \
+ BYTE_CAST(((uint64)x) >> 40), BYTE_CAST(((uint64)x) >> 32), \
+ BYTE_CAST(((uint64)x) >> 24), BYTE_CAST(((uint64)x) >> 16), \
+ BYTE_CAST(((uint64)x) >> 8), BYTE_CAST(((uint64)x) >> 0)
+
+// Declare a N-bit integer as a this-endian (local machine) sequence of bytes
+#ifndef BIG_ENDIAN
+#define BIG_ENDIAN 1
+#endif // BIG_ENDIAN
+
+#if BIG_ENDIAN
+#define TE16 BE16
+#define TE32 BE32
+#define TE64 BE64
+#else // !BIG_ENDIAN
+#define TE16 LE16
+#define TE32 LE32
+#define TE64 LE64
+#endif // !BIG_ENDIAN
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Helpers for determining if X/screencasting is available (on linux).
+
+#define MAYBE_SKIP_SCREENCAST_TEST() \
+ if (!testing::IsScreencastingAvailable()) { \
+ LOG(LS_WARNING) << "Skipping test, since it doesn't have the requisite " \
+ << "X environment for screen capture."; \
+ return; \
+ } \
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+struct XDisplay {
+ XDisplay() : display_(XOpenDisplay(NULL)) { }
+ ~XDisplay() { if (display_) XCloseDisplay(display_); }
+ bool IsValid() const { return display_ != NULL; }
+ operator Display*() { return display_; }
+ private:
+ Display* display_;
+};
+#endif
+
+// Returns true if screencasting is available. When false, anything that uses
+// screencasting features may fail.
+inline bool IsScreencastingAvailable() {
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ XDisplay display;
+ if (!display.IsValid()) {
+ LOG(LS_WARNING) << "No X Display available.";
+ return false;
+ }
+ int ignored_int, major_version, minor_version;
+ if (!XRRQueryExtension(display, &ignored_int, &ignored_int) ||
+ !XRRQueryVersion(display, &major_version, &minor_version) ||
+ major_version < 1 ||
+ (major_version < 2 && minor_version < 3)) {
+ LOG(LS_WARNING) << "XRandr version: " << major_version << "."
+ << minor_version;
+ LOG(LS_WARNING) << "XRandr is not supported or is too old (pre 1.3).";
+ return false;
+ }
+#endif
+ return true;
+}
+} // namespace testing
+
+#endif // WEBRTC_BASE_TESTUTILS_H__
diff --git a/chromium/third_party/webrtc/base/thread.cc b/chromium/third_party/webrtc/base/thread.cc
new file mode 100644
index 00000000000..49a299d6529
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread.cc
@@ -0,0 +1,560 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/thread.h"
+
+#ifndef __has_feature
+#define __has_feature(x) 0 // Compatibility with non-clang or LLVM compilers.
+#endif // __has_feature
+
+#if defined(WEBRTC_WIN)
+#include <comdef.h>
+#elif defined(WEBRTC_POSIX)
+#include <time.h>
+#endif
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/timeutils.h"
+
+#if !__has_feature(objc_arc) && (defined(WEBRTC_MAC))
+#include "webrtc/base/maccocoathreadhelper.h"
+#include "webrtc/base/scoped_autorelease_pool.h"
+#endif
+
+namespace rtc {
+
+ThreadManager* ThreadManager::Instance() {
+ LIBJINGLE_DEFINE_STATIC_LOCAL(ThreadManager, thread_manager, ());
+ return &thread_manager;
+}
+
+// static
+Thread* Thread::Current() {
+ return ThreadManager::Instance()->CurrentThread();
+}
+
+#if defined(WEBRTC_POSIX)
+ThreadManager::ThreadManager() {
+ pthread_key_create(&key_, NULL);
+#ifndef NO_MAIN_THREAD_WRAPPING
+ WrapCurrentThread();
+#endif
+#if !__has_feature(objc_arc) && (defined(WEBRTC_MAC))
+ // Under Automatic Reference Counting (ARC), you cannot use autorelease pools
+ // directly. Instead, you use @autoreleasepool blocks instead. Also, we are
+ // maintaining thread safety using immutability within context of GCD dispatch
+ // queues in this case.
+ InitCocoaMultiThreading();
+#endif
+}
+
+ThreadManager::~ThreadManager() {
+#if __has_feature(objc_arc)
+ @autoreleasepool
+#elif defined(WEBRTC_MAC)
+ // This is called during exit, at which point apparently no NSAutoreleasePools
+ // are available; but we might still need them to do cleanup (or we get the
+ // "no autoreleasepool in place, just leaking" warning when exiting).
+ ScopedAutoreleasePool pool;
+#endif
+ {
+ UnwrapCurrentThread();
+ pthread_key_delete(key_);
+ }
+}
+
+Thread *ThreadManager::CurrentThread() {
+ return static_cast<Thread *>(pthread_getspecific(key_));
+}
+
+void ThreadManager::SetCurrentThread(Thread *thread) {
+ pthread_setspecific(key_, thread);
+}
+#endif
+
+#if defined(WEBRTC_WIN)
+ThreadManager::ThreadManager() {
+ key_ = TlsAlloc();
+#ifndef NO_MAIN_THREAD_WRAPPING
+ WrapCurrentThread();
+#endif
+}
+
+ThreadManager::~ThreadManager() {
+ UnwrapCurrentThread();
+ TlsFree(key_);
+}
+
+Thread *ThreadManager::CurrentThread() {
+ return static_cast<Thread *>(TlsGetValue(key_));
+}
+
+void ThreadManager::SetCurrentThread(Thread *thread) {
+ TlsSetValue(key_, thread);
+}
+#endif
+
+Thread *ThreadManager::WrapCurrentThread() {
+ Thread* result = CurrentThread();
+ if (NULL == result) {
+ result = new Thread();
+ result->WrapCurrentWithThreadManager(this);
+ }
+ return result;
+}
+
+void ThreadManager::UnwrapCurrentThread() {
+ Thread* t = CurrentThread();
+ if (t && !(t->IsOwned())) {
+ t->UnwrapCurrent();
+ delete t;
+ }
+}
+
+struct ThreadInit {
+ Thread* thread;
+ Runnable* runnable;
+};
+
+Thread::Thread(SocketServer* ss)
+ : MessageQueue(ss),
+ priority_(PRIORITY_NORMAL),
+ running_(true, false),
+#if defined(WEBRTC_WIN)
+ thread_(NULL),
+ thread_id_(0),
+#endif
+ owned_(true) {
+ SetName("Thread", this); // default name
+}
+
+Thread::~Thread() {
+ Stop();
+ Clear(NULL);
+}
+
+bool Thread::SleepMs(int milliseconds) {
+#if defined(WEBRTC_WIN)
+ ::Sleep(milliseconds);
+ return true;
+#else
+ // POSIX has both a usleep() and a nanosleep(), but the former is deprecated,
+ // so we use nanosleep() even though it has greater precision than necessary.
+ struct timespec ts;
+ ts.tv_sec = milliseconds / 1000;
+ ts.tv_nsec = (milliseconds % 1000) * 1000000;
+ int ret = nanosleep(&ts, NULL);
+ if (ret != 0) {
+ LOG_ERR(LS_WARNING) << "nanosleep() returning early";
+ return false;
+ }
+ return true;
+#endif
+}
+
+bool Thread::SetName(const std::string& name, const void* obj) {
+ if (running()) return false;
+ name_ = name;
+ if (obj) {
+ char buf[16];
+ sprintfn(buf, sizeof(buf), " 0x%p", obj);
+ name_ += buf;
+ }
+ return true;
+}
+
+bool Thread::SetPriority(ThreadPriority priority) {
+#if defined(WEBRTC_WIN)
+ if (running()) {
+ BOOL ret = FALSE;
+ if (priority == PRIORITY_NORMAL) {
+ ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_NORMAL);
+ } else if (priority == PRIORITY_HIGH) {
+ ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_HIGHEST);
+ } else if (priority == PRIORITY_ABOVE_NORMAL) {
+ ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL);
+ } else if (priority == PRIORITY_IDLE) {
+ ret = ::SetThreadPriority(thread_, THREAD_PRIORITY_IDLE);
+ }
+ if (!ret) {
+ return false;
+ }
+ }
+ priority_ = priority;
+ return true;
+#else
+ // TODO: Implement for Linux/Mac if possible.
+ if (running()) return false;
+ priority_ = priority;
+ return true;
+#endif
+}
+
+bool Thread::Start(Runnable* runnable) {
+ ASSERT(owned_);
+ if (!owned_) return false;
+ ASSERT(!running());
+ if (running()) return false;
+
+ Restart(); // reset fStop_ if the thread is being restarted
+
+ // Make sure that ThreadManager is created on the main thread before
+ // we start a new thread.
+ ThreadManager::Instance();
+
+ ThreadInit* init = new ThreadInit;
+ init->thread = this;
+ init->runnable = runnable;
+#if defined(WEBRTC_WIN)
+ DWORD flags = 0;
+ if (priority_ != PRIORITY_NORMAL) {
+ flags = CREATE_SUSPENDED;
+ }
+ thread_ = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)PreRun, init, flags,
+ &thread_id_);
+ if (thread_) {
+ running_.Set();
+ if (priority_ != PRIORITY_NORMAL) {
+ SetPriority(priority_);
+ ::ResumeThread(thread_);
+ }
+ } else {
+ return false;
+ }
+#elif defined(WEBRTC_POSIX)
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+
+ // Thread priorities are not supported in NaCl.
+#if !defined(__native_client__)
+ if (priority_ != PRIORITY_NORMAL) {
+ if (priority_ == PRIORITY_IDLE) {
+ // There is no POSIX-standard way to set a below-normal priority for an
+ // individual thread (only whole process), so let's not support it.
+ LOG(LS_WARNING) << "PRIORITY_IDLE not supported";
+ } else {
+ // Set real-time round-robin policy.
+ if (pthread_attr_setschedpolicy(&attr, SCHED_RR) != 0) {
+ LOG(LS_ERROR) << "pthread_attr_setschedpolicy";
+ }
+ struct sched_param param;
+ if (pthread_attr_getschedparam(&attr, &param) != 0) {
+ LOG(LS_ERROR) << "pthread_attr_getschedparam";
+ } else {
+ // The numbers here are arbitrary.
+ if (priority_ == PRIORITY_HIGH) {
+ param.sched_priority = 6; // 6 = HIGH
+ } else {
+ ASSERT(priority_ == PRIORITY_ABOVE_NORMAL);
+ param.sched_priority = 4; // 4 = ABOVE_NORMAL
+ }
+ if (pthread_attr_setschedparam(&attr, &param) != 0) {
+ LOG(LS_ERROR) << "pthread_attr_setschedparam";
+ }
+ }
+ }
+ }
+#endif // !defined(__native_client__)
+
+ int error_code = pthread_create(&thread_, &attr, PreRun, init);
+ if (0 != error_code) {
+ LOG(LS_ERROR) << "Unable to create pthread, error " << error_code;
+ return false;
+ }
+ running_.Set();
+#endif
+ return true;
+}
+
+void Thread::Join() {
+ if (running()) {
+ ASSERT(!IsCurrent());
+#if defined(WEBRTC_WIN)
+ WaitForSingleObject(thread_, INFINITE);
+ CloseHandle(thread_);
+ thread_ = NULL;
+ thread_id_ = 0;
+#elif defined(WEBRTC_POSIX)
+ void *pv;
+ pthread_join(thread_, &pv);
+#endif
+ running_.Reset();
+ }
+}
+
+#if defined(WEBRTC_WIN)
+// As seen on MSDN.
+// http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx
+#define MSDEV_SET_THREAD_NAME 0x406D1388
+typedef struct tagTHREADNAME_INFO {
+ DWORD dwType;
+ LPCSTR szName;
+ DWORD dwThreadID;
+ DWORD dwFlags;
+} THREADNAME_INFO;
+
+void SetThreadName(DWORD dwThreadID, LPCSTR szThreadName) {
+ THREADNAME_INFO info;
+ info.dwType = 0x1000;
+ info.szName = szThreadName;
+ info.dwThreadID = dwThreadID;
+ info.dwFlags = 0;
+
+ __try {
+ RaiseException(MSDEV_SET_THREAD_NAME, 0, sizeof(info) / sizeof(DWORD),
+ reinterpret_cast<ULONG_PTR*>(&info));
+ }
+ __except(EXCEPTION_CONTINUE_EXECUTION) {
+ }
+}
+#endif // WEBRTC_WIN
+
+void* Thread::PreRun(void* pv) {
+ ThreadInit* init = static_cast<ThreadInit*>(pv);
+ ThreadManager::Instance()->SetCurrentThread(init->thread);
+#if defined(WEBRTC_WIN)
+ SetThreadName(GetCurrentThreadId(), init->thread->name_.c_str());
+#elif defined(WEBRTC_POSIX)
+ // TODO: See if naming exists for pthreads.
+#endif
+#if __has_feature(objc_arc)
+ @autoreleasepool
+#elif defined(WEBRTC_MAC)
+ // Make sure the new thread has an autoreleasepool
+ ScopedAutoreleasePool pool;
+#endif
+ {
+ if (init->runnable) {
+ init->runnable->Run(init->thread);
+ } else {
+ init->thread->Run();
+ }
+ delete init;
+ return NULL;
+ }
+}
+
+void Thread::Run() {
+ ProcessMessages(kForever);
+}
+
+bool Thread::IsOwned() {
+ return owned_;
+}
+
+void Thread::Stop() {
+ MessageQueue::Quit();
+ Join();
+}
+
+void Thread::Send(MessageHandler *phandler, uint32 id, MessageData *pdata) {
+ if (fStop_)
+ return;
+
+ // Sent messages are sent to the MessageHandler directly, in the context
+ // of "thread", like Win32 SendMessage. If in the right context,
+ // call the handler directly.
+
+ Message msg;
+ msg.phandler = phandler;
+ msg.message_id = id;
+ msg.pdata = pdata;
+ if (IsCurrent()) {
+ phandler->OnMessage(&msg);
+ return;
+ }
+
+ AutoThread thread;
+ Thread *current_thread = Thread::Current();
+ ASSERT(current_thread != NULL); // AutoThread ensures this
+
+ bool ready = false;
+ {
+ CritScope cs(&crit_);
+ _SendMessage smsg;
+ smsg.thread = current_thread;
+ smsg.msg = msg;
+ smsg.ready = &ready;
+ sendlist_.push_back(smsg);
+ }
+
+ // Wait for a reply
+
+ ss_->WakeUp();
+
+ bool waited = false;
+ crit_.Enter();
+ while (!ready) {
+ crit_.Leave();
+ current_thread->ReceiveSends();
+ current_thread->socketserver()->Wait(kForever, false);
+ waited = true;
+ crit_.Enter();
+ }
+ crit_.Leave();
+
+ // Our Wait loop above may have consumed some WakeUp events for this
+ // MessageQueue, that weren't relevant to this Send. Losing these WakeUps can
+ // cause problems for some SocketServers.
+ //
+ // Concrete example:
+ // Win32SocketServer on thread A calls Send on thread B. While processing the
+ // message, thread B Posts a message to A. We consume the wakeup for that
+ // Post while waiting for the Send to complete, which means that when we exit
+ // this loop, we need to issue another WakeUp, or else the Posted message
+ // won't be processed in a timely manner.
+
+ if (waited) {
+ current_thread->socketserver()->WakeUp();
+ }
+}
+
+void Thread::ReceiveSends() {
+ // Receive a sent message. Cleanup scenarios:
+ // - thread sending exits: We don't allow this, since thread can exit
+ // only via Join, so Send must complete.
+ // - thread receiving exits: Wakeup/set ready in Thread::Clear()
+ // - object target cleared: Wakeup/set ready in Thread::Clear()
+ crit_.Enter();
+ while (!sendlist_.empty()) {
+ _SendMessage smsg = sendlist_.front();
+ sendlist_.pop_front();
+ crit_.Leave();
+ smsg.msg.phandler->OnMessage(&smsg.msg);
+ crit_.Enter();
+ *smsg.ready = true;
+ smsg.thread->socketserver()->WakeUp();
+ }
+ crit_.Leave();
+}
+
+void Thread::Clear(MessageHandler *phandler, uint32 id,
+ MessageList* removed) {
+ CritScope cs(&crit_);
+
+ // Remove messages on sendlist_ with phandler
+ // Object target cleared: remove from send list, wakeup/set ready
+ // if sender not NULL.
+
+ std::list<_SendMessage>::iterator iter = sendlist_.begin();
+ while (iter != sendlist_.end()) {
+ _SendMessage smsg = *iter;
+ if (smsg.msg.Match(phandler, id)) {
+ if (removed) {
+ removed->push_back(smsg.msg);
+ } else {
+ delete smsg.msg.pdata;
+ }
+ iter = sendlist_.erase(iter);
+ *smsg.ready = true;
+ smsg.thread->socketserver()->WakeUp();
+ continue;
+ }
+ ++iter;
+ }
+
+ MessageQueue::Clear(phandler, id, removed);
+}
+
+bool Thread::ProcessMessages(int cmsLoop) {
+ uint32 msEnd = (kForever == cmsLoop) ? 0 : TimeAfter(cmsLoop);
+ int cmsNext = cmsLoop;
+
+ while (true) {
+#if __has_feature(objc_arc)
+ @autoreleasepool
+#elif defined(WEBRTC_MAC)
+ // see: http://developer.apple.com/library/mac/#documentation/Cocoa/Reference/Foundation/Classes/NSAutoreleasePool_Class/Reference/Reference.html
+ // Each thread is supposed to have an autorelease pool. Also for event loops
+ // like this, autorelease pool needs to be created and drained/released
+ // for each cycle.
+ ScopedAutoreleasePool pool;
+#endif
+ {
+ Message msg;
+ if (!Get(&msg, cmsNext))
+ return !IsQuitting();
+ Dispatch(&msg);
+
+ if (cmsLoop != kForever) {
+ cmsNext = TimeUntil(msEnd);
+ if (cmsNext < 0)
+ return true;
+ }
+ }
+ }
+}
+
+bool Thread::WrapCurrent() {
+ return WrapCurrentWithThreadManager(ThreadManager::Instance());
+}
+
+bool Thread::WrapCurrentWithThreadManager(ThreadManager* thread_manager) {
+ if (running())
+ return false;
+#if defined(WEBRTC_WIN)
+ // We explicitly ask for no rights other than synchronization.
+ // This gives us the best chance of succeeding.
+ thread_ = OpenThread(SYNCHRONIZE, FALSE, GetCurrentThreadId());
+ if (!thread_) {
+ LOG_GLE(LS_ERROR) << "Unable to get handle to thread.";
+ return false;
+ }
+ thread_id_ = GetCurrentThreadId();
+#elif defined(WEBRTC_POSIX)
+ thread_ = pthread_self();
+#endif
+ owned_ = false;
+ running_.Set();
+ thread_manager->SetCurrentThread(this);
+ return true;
+}
+
+void Thread::UnwrapCurrent() {
+ // Clears the platform-specific thread-specific storage.
+ ThreadManager::Instance()->SetCurrentThread(NULL);
+#if defined(WEBRTC_WIN)
+ if (!CloseHandle(thread_)) {
+ LOG_GLE(LS_ERROR) << "When unwrapping thread, failed to close handle.";
+ }
+#endif
+ running_.Reset();
+}
+
+
+AutoThread::AutoThread(SocketServer* ss) : Thread(ss) {
+ if (!ThreadManager::Instance()->CurrentThread()) {
+ ThreadManager::Instance()->SetCurrentThread(this);
+ }
+}
+
+AutoThread::~AutoThread() {
+ Stop();
+ if (ThreadManager::Instance()->CurrentThread() == this) {
+ ThreadManager::Instance()->SetCurrentThread(NULL);
+ }
+}
+
+#if defined(WEBRTC_WIN)
+void ComThread::Run() {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ ASSERT(SUCCEEDED(hr));
+ if (SUCCEEDED(hr)) {
+ Thread::Run();
+ CoUninitialize();
+ } else {
+ LOG(LS_ERROR) << "CoInitialize failed, hr=" << hr;
+ }
+}
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/thread.h b/chromium/third_party/webrtc/base/thread.h
new file mode 100644
index 00000000000..38727464b3f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread.h
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_THREAD_H_
+#define WEBRTC_BASE_THREAD_H_
+
+#include <algorithm>
+#include <list>
+#include <string>
+#include <vector>
+
+#if defined(WEBRTC_POSIX)
+#include <pthread.h>
+#endif
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/messagequeue.h"
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+namespace rtc {
+
+class Thread;
+
+class ThreadManager {
+ public:
+ ThreadManager();
+ ~ThreadManager();
+
+ static ThreadManager* Instance();
+
+ Thread* CurrentThread();
+ void SetCurrentThread(Thread* thread);
+
+ // Returns a thread object with its thread_ ivar set
+ // to whatever the OS uses to represent the thread.
+ // If there already *is* a Thread object corresponding to this thread,
+ // this method will return that. Otherwise it creates a new Thread
+ // object whose wrapped() method will return true, and whose
+ // handle will, on Win32, be opened with only synchronization privileges -
+ // if you need more privilegs, rather than changing this method, please
+ // write additional code to adjust the privileges, or call a different
+ // factory method of your own devising, because this one gets used in
+ // unexpected contexts (like inside browser plugins) and it would be a
+ // shame to break it. It is also conceivable on Win32 that we won't even
+ // be able to get synchronization privileges, in which case the result
+ // will have a NULL handle.
+ Thread *WrapCurrentThread();
+ void UnwrapCurrentThread();
+
+ private:
+#if defined(WEBRTC_POSIX)
+ pthread_key_t key_;
+#endif
+
+#if defined(WEBRTC_WIN)
+ DWORD key_;
+#endif
+
+ DISALLOW_COPY_AND_ASSIGN(ThreadManager);
+};
+
+struct _SendMessage {
+ _SendMessage() {}
+ Thread *thread;
+ Message msg;
+ bool *ready;
+};
+
+enum ThreadPriority {
+ PRIORITY_IDLE = -1,
+ PRIORITY_NORMAL = 0,
+ PRIORITY_ABOVE_NORMAL = 1,
+ PRIORITY_HIGH = 2,
+};
+
+class Runnable {
+ public:
+ virtual ~Runnable() {}
+ virtual void Run(Thread* thread) = 0;
+
+ protected:
+ Runnable() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(Runnable);
+};
+
+// WARNING! SUBCLASSES MUST CALL Stop() IN THEIR DESTRUCTORS! See ~Thread().
+
+class Thread : public MessageQueue {
+ public:
+ explicit Thread(SocketServer* ss = NULL);
+ // NOTE: ALL SUBCLASSES OF Thread MUST CALL Stop() IN THEIR DESTRUCTORS (or
+ // guarantee Stop() is explicitly called before the subclass is destroyed).
+ // This is required to avoid a data race between the destructor modifying the
+ // vtable, and the Thread::PreRun calling the virtual method Run().
+ virtual ~Thread();
+
+ static Thread* Current();
+
+ bool IsCurrent() const {
+ return Current() == this;
+ }
+
+ // Sleeps the calling thread for the specified number of milliseconds, during
+ // which time no processing is performed. Returns false if sleeping was
+ // interrupted by a signal (POSIX only).
+ static bool SleepMs(int millis);
+
+ // Sets the thread's name, for debugging. Must be called before Start().
+ // If |obj| is non-NULL, its value is appended to |name|.
+ const std::string& name() const { return name_; }
+ bool SetName(const std::string& name, const void* obj);
+
+ // Sets the thread's priority. Must be called before Start().
+ ThreadPriority priority() const { return priority_; }
+ bool SetPriority(ThreadPriority priority);
+
+ // Starts the execution of the thread.
+ bool Start(Runnable* runnable = NULL);
+
+ // Tells the thread to stop and waits until it is joined.
+ // Never call Stop on the current thread. Instead use the inherited Quit
+ // function which will exit the base MessageQueue without terminating the
+ // underlying OS thread.
+ virtual void Stop();
+
+ // By default, Thread::Run() calls ProcessMessages(kForever). To do other
+ // work, override Run(). To receive and dispatch messages, call
+ // ProcessMessages occasionally.
+ virtual void Run();
+
+ virtual void Send(MessageHandler *phandler, uint32 id = 0,
+ MessageData *pdata = NULL);
+
+ // Convenience method to invoke a functor on another thread. Caller must
+ // provide the |ReturnT| template argument, which cannot (easily) be deduced.
+ // Uses Send() internally, which blocks the current thread until execution
+ // is complete.
+ // Ex: bool result = thread.Invoke<bool>(&MyFunctionReturningBool);
+ template <class ReturnT, class FunctorT>
+ ReturnT Invoke(const FunctorT& functor) {
+ FunctorMessageHandler<ReturnT, FunctorT> handler(functor);
+ Send(&handler);
+ return handler.result();
+ }
+
+ // From MessageQueue
+ virtual void Clear(MessageHandler *phandler, uint32 id = MQID_ANY,
+ MessageList* removed = NULL);
+ virtual void ReceiveSends();
+
+ // ProcessMessages will process I/O and dispatch messages until:
+ // 1) cms milliseconds have elapsed (returns true)
+ // 2) Stop() is called (returns false)
+ bool ProcessMessages(int cms);
+
+ // Returns true if this is a thread that we created using the standard
+ // constructor, false if it was created by a call to
+ // ThreadManager::WrapCurrentThread(). The main thread of an application
+ // is generally not owned, since the OS representation of the thread
+ // obviously exists before we can get to it.
+ // You cannot call Start on non-owned threads.
+ bool IsOwned();
+
+#if defined(WEBRTC_WIN)
+ HANDLE GetHandle() const {
+ return thread_;
+ }
+ DWORD GetId() const {
+ return thread_id_;
+ }
+#elif defined(WEBRTC_POSIX)
+ pthread_t GetPThread() {
+ return thread_;
+ }
+#endif
+
+ // This method should be called when thread is created using non standard
+ // method, like derived implementation of rtc::Thread and it can not be
+ // started by calling Start(). This will set started flag to true and
+ // owned to false. This must be called from the current thread.
+ // NOTE: These methods should be used by the derived classes only, added here
+ // only for testing.
+ bool WrapCurrent();
+ void UnwrapCurrent();
+
+ // Expose private method running() for tests.
+ //
+ // DANGER: this is a terrible public API. Most callers that might want to
+ // call this likely do not have enough control/knowledge of the Thread in
+ // question to guarantee that the returned value remains true for the duration
+ // of whatever code is conditionally executing because of the return value!
+ bool RunningForTest() { return running(); }
+ // This is a legacy call-site that probably doesn't need to exist in the first
+ // place.
+ // TODO(fischman): delete once the ASSERT added in channelmanager.cc sticks
+ // for a month (ETA 2014/06/22).
+ bool RunningForChannelManager() { return running(); }
+
+ protected:
+ // Blocks the calling thread until this thread has terminated.
+ void Join();
+
+ private:
+ static void *PreRun(void *pv);
+
+ // ThreadManager calls this instead WrapCurrent() because
+ // ThreadManager::Instance() cannot be used while ThreadManager is
+ // being created.
+ bool WrapCurrentWithThreadManager(ThreadManager* thread_manager);
+
+ // Return true if the thread was started and hasn't yet stopped.
+ bool running() { return running_.Wait(0); }
+
+ std::list<_SendMessage> sendlist_;
+ std::string name_;
+ ThreadPriority priority_;
+ Event running_; // Signalled means running.
+
+#if defined(WEBRTC_POSIX)
+ pthread_t thread_;
+#endif
+
+#if defined(WEBRTC_WIN)
+ HANDLE thread_;
+ DWORD thread_id_;
+#endif
+
+ bool owned_;
+
+ friend class ThreadManager;
+
+ DISALLOW_COPY_AND_ASSIGN(Thread);
+};
+
+// AutoThread automatically installs itself at construction
+// uninstalls at destruction, if a Thread object is
+// _not already_ associated with the current OS thread.
+
+class AutoThread : public Thread {
+ public:
+ explicit AutoThread(SocketServer* ss = 0);
+ virtual ~AutoThread();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AutoThread);
+};
+
+// Win32 extension for threads that need to use COM
+#if defined(WEBRTC_WIN)
+class ComThread : public Thread {
+ public:
+ ComThread() {}
+ virtual ~ComThread() { Stop(); }
+
+ protected:
+ virtual void Run();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(ComThread);
+};
+#endif
+
+// Provides an easy way to install/uninstall a socketserver on a thread.
+class SocketServerScope {
+ public:
+ explicit SocketServerScope(SocketServer* ss) {
+ old_ss_ = Thread::Current()->socketserver();
+ Thread::Current()->set_socketserver(ss);
+ }
+ ~SocketServerScope() {
+ Thread::Current()->set_socketserver(old_ss_);
+ }
+
+ private:
+ SocketServer* old_ss_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(SocketServerScope);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_THREAD_H_
diff --git a/chromium/third_party/webrtc/base/thread_checker.h b/chromium/third_party/webrtc/base/thread_checker.h
new file mode 100644
index 00000000000..eee9315533d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread_checker.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/threading/thread_checker.h.
+
+#ifndef WEBRTC_BASE_THREAD_CHECKER_H_
+#define WEBRTC_BASE_THREAD_CHECKER_H_
+
+// Apart from debug builds, we also enable the thread checker in
+// builds with DCHECK_ALWAYS_ON so that trybots and waterfall bots
+// with this define will get the same level of thread checking as
+// debug bots.
+//
+// Note that this does not perfectly match situations where DCHECK is
+// enabled. For example a non-official release build may have
+// DCHECK_ALWAYS_ON undefined (and therefore ThreadChecker would be
+// disabled) but have DCHECKs enabled at runtime.
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+#define ENABLE_THREAD_CHECKER 1
+#else
+#define ENABLE_THREAD_CHECKER 0
+#endif
+
+#include "webrtc/base/thread_checker_impl.h"
+
+namespace rtc {
+
+// Do nothing implementation, for use in release mode.
+//
+// Note: You should almost always use the ThreadChecker class to get the
+// right version for your build configuration.
+class ThreadCheckerDoNothing {
+ public:
+ bool CalledOnValidThread() const {
+ return true;
+ }
+
+ void DetachFromThread() {}
+};
+
+// ThreadChecker is a helper class used to help verify that some methods of a
+// class are called from the same thread. It provides identical functionality to
+// base::NonThreadSafe, but it is meant to be held as a member variable, rather
+// than inherited from base::NonThreadSafe.
+//
+// While inheriting from base::NonThreadSafe may give a clear indication about
+// the thread-safety of a class, it may also lead to violations of the style
+// guide with regard to multiple inheritance. The choice between having a
+// ThreadChecker member and inheriting from base::NonThreadSafe should be based
+// on whether:
+// - Derived classes need to know the thread they belong to, as opposed to
+// having that functionality fully encapsulated in the base class.
+// - Derived classes should be able to reassign the base class to another
+// thread, via DetachFromThread.
+//
+// If neither of these are true, then having a ThreadChecker member and calling
+// CalledOnValidThread is the preferable solution.
+//
+// Example:
+// class MyClass {
+// public:
+// void Foo() {
+// DCHECK(thread_checker_.CalledOnValidThread());
+// ... (do stuff) ...
+// }
+//
+// private:
+// ThreadChecker thread_checker_;
+// }
+//
+// In Release mode, CalledOnValidThread will always return true.
+#if ENABLE_THREAD_CHECKER
+class ThreadChecker : public ThreadCheckerImpl {
+};
+#else
+class ThreadChecker : public ThreadCheckerDoNothing {
+};
+#endif // ENABLE_THREAD_CHECKER
+
+#undef ENABLE_THREAD_CHECKER
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_THREAD_CHECKER_H_
diff --git a/chromium/third_party/webrtc/base/thread_checker_impl.cc b/chromium/third_party/webrtc/base/thread_checker_impl.cc
new file mode 100644
index 00000000000..4a7455d3075
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread_checker_impl.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/threading/thread_checker_impl.cc.
+
+#include "webrtc/base/thread_checker_impl.h"
+
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+ThreadCheckerImpl::ThreadCheckerImpl()
+ : valid_thread_() {
+ EnsureThreadIdAssigned();
+}
+
+ThreadCheckerImpl::~ThreadCheckerImpl() {
+}
+
+bool ThreadCheckerImpl::CalledOnValidThread() const {
+ CritScope scoped_lock(&lock_);
+ EnsureThreadIdAssigned();
+ return valid_thread_->IsCurrent();
+}
+
+void ThreadCheckerImpl::DetachFromThread() {
+ CritScope scoped_lock(&lock_);
+ valid_thread_ = NULL;
+}
+
+void ThreadCheckerImpl::EnsureThreadIdAssigned() const {
+ if (!valid_thread_) {
+ valid_thread_ = Thread::Current();
+ }
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/thread_checker_impl.h b/chromium/third_party/webrtc/base/thread_checker_impl.h
new file mode 100644
index 00000000000..1d776b5ebcb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread_checker_impl.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/threading/thread_checker_impl.h.
+
+#ifndef WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
+#define WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
+
+#include "webrtc/base/criticalsection.h"
+
+namespace rtc {
+
+class Thread;
+
+// Real implementation of ThreadChecker, for use in debug mode, or
+// for temporary use in release mode (e.g. to CHECK on a threading issue
+// seen only in the wild).
+//
+// Note: You should almost always use the ThreadChecker class to get the
+// right version for your build configuration.
+class ThreadCheckerImpl {
+ public:
+ ThreadCheckerImpl();
+ ~ThreadCheckerImpl();
+
+ bool CalledOnValidThread() const;
+
+ // Changes the thread that is checked for in CalledOnValidThread. This may
+ // be useful when an object may be created on one thread and then used
+ // exclusively on another thread.
+ void DetachFromThread();
+
+ private:
+ void EnsureThreadIdAssigned() const;
+
+ mutable CriticalSection lock_;
+ // This is mutable so that CalledOnValidThread can set it.
+ // It's guarded by |lock_|.
+ mutable const Thread* valid_thread_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
diff --git a/chromium/third_party/webrtc/base/thread_checker_unittest.cc b/chromium/third_party/webrtc/base/thread_checker_unittest.cc
new file mode 100644
index 00000000000..13c1da5e20c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread_checker_unittest.cc
@@ -0,0 +1,205 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/threading/thread_checker_unittest.cc.
+
+#include <assert.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/base/scoped_ptr.h"
+
+// Duplicated from base/threading/thread_checker.h so that we can be
+// good citizens there and undef the macro.
+#if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
+#define ENABLE_THREAD_CHECKER 1
+#else
+#define ENABLE_THREAD_CHECKER 0
+#endif
+
+namespace rtc {
+
+namespace {
+
+// Simple class to exercise the basics of ThreadChecker.
+// Both the destructor and DoStuff should verify that they were
+// called on the same thread as the constructor.
+class ThreadCheckerClass : public ThreadChecker {
+ public:
+ ThreadCheckerClass() {}
+
+ // Verifies that it was called on the same thread as the constructor.
+ void DoStuff() {
+ assert(CalledOnValidThread());
+ }
+
+ void DetachFromThread() {
+ ThreadChecker::DetachFromThread();
+ }
+
+ static void MethodOnDifferentThreadImpl();
+ static void DetachThenCallFromDifferentThreadImpl();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(ThreadCheckerClass);
+};
+
+// Calls ThreadCheckerClass::DoStuff on another thread.
+class CallDoStuffOnThread : public Thread {
+ public:
+ explicit CallDoStuffOnThread(ThreadCheckerClass* thread_checker_class)
+ : Thread(),
+ thread_checker_class_(thread_checker_class) {
+ SetName("call_do_stuff_on_thread", NULL);
+ }
+
+ virtual void Run() OVERRIDE {
+ thread_checker_class_->DoStuff();
+ }
+
+ // New method. Needed since Thread::Join is protected, and it is called by
+ // the TEST.
+ void Join() {
+ Thread::Join();
+ }
+
+ private:
+ ThreadCheckerClass* thread_checker_class_;
+
+ DISALLOW_COPY_AND_ASSIGN(CallDoStuffOnThread);
+};
+
+// Deletes ThreadCheckerClass on a different thread.
+class DeleteThreadCheckerClassOnThread : public Thread {
+ public:
+ explicit DeleteThreadCheckerClassOnThread(
+ ThreadCheckerClass* thread_checker_class)
+ : Thread(),
+ thread_checker_class_(thread_checker_class) {
+ SetName("delete_thread_checker_class_on_thread", NULL);
+ }
+
+ virtual void Run() OVERRIDE {
+ thread_checker_class_.reset();
+ }
+
+ // New method. Needed since Thread::Join is protected, and it is called by
+ // the TEST.
+ void Join() {
+ Thread::Join();
+ }
+
+ private:
+ scoped_ptr<ThreadCheckerClass> thread_checker_class_;
+
+ DISALLOW_COPY_AND_ASSIGN(DeleteThreadCheckerClassOnThread);
+};
+
+} // namespace
+
+TEST(ThreadCheckerTest, CallsAllowedOnSameThread) {
+ scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ new ThreadCheckerClass);
+
+ // Verify that DoStuff doesn't assert.
+ thread_checker_class->DoStuff();
+
+ // Verify that the destructor doesn't assert.
+ thread_checker_class.reset();
+}
+
+TEST(ThreadCheckerTest, DestructorAllowedOnDifferentThread) {
+ scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ new ThreadCheckerClass);
+
+ // Verify that the destructor doesn't assert
+ // when called on a different thread.
+ DeleteThreadCheckerClassOnThread delete_on_thread(
+ thread_checker_class.release());
+
+ delete_on_thread.Start();
+ delete_on_thread.Join();
+}
+
+TEST(ThreadCheckerTest, DetachFromThread) {
+ scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ new ThreadCheckerClass);
+
+ // Verify that DoStuff doesn't assert when called on a different thread after
+ // a call to DetachFromThread.
+ thread_checker_class->DetachFromThread();
+ CallDoStuffOnThread call_on_thread(thread_checker_class.get());
+
+ call_on_thread.Start();
+ call_on_thread.Join();
+}
+
+#if GTEST_HAS_DEATH_TEST || !ENABLE_THREAD_CHECKER
+
+void ThreadCheckerClass::MethodOnDifferentThreadImpl() {
+ scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ new ThreadCheckerClass);
+
+ // DoStuff should assert in debug builds only when called on a
+ // different thread.
+ CallDoStuffOnThread call_on_thread(thread_checker_class.get());
+
+ call_on_thread.Start();
+ call_on_thread.Join();
+}
+
+#if ENABLE_THREAD_CHECKER
+TEST(ThreadCheckerDeathTest, MethodNotAllowedOnDifferentThreadInDebug) {
+ ASSERT_DEATH({
+ ThreadCheckerClass::MethodOnDifferentThreadImpl();
+ }, "");
+}
+#else
+TEST(ThreadCheckerTest, MethodAllowedOnDifferentThreadInRelease) {
+ ThreadCheckerClass::MethodOnDifferentThreadImpl();
+}
+#endif // ENABLE_THREAD_CHECKER
+
+void ThreadCheckerClass::DetachThenCallFromDifferentThreadImpl() {
+ scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ new ThreadCheckerClass);
+
+ // DoStuff doesn't assert when called on a different thread
+ // after a call to DetachFromThread.
+ thread_checker_class->DetachFromThread();
+ CallDoStuffOnThread call_on_thread(thread_checker_class.get());
+
+ call_on_thread.Start();
+ call_on_thread.Join();
+
+ // DoStuff should assert in debug builds only after moving to
+ // another thread.
+ thread_checker_class->DoStuff();
+}
+
+#if ENABLE_THREAD_CHECKER
+TEST(ThreadCheckerDeathTest, DetachFromThreadInDebug) {
+ ASSERT_DEATH({
+ ThreadCheckerClass::DetachThenCallFromDifferentThreadImpl();
+ }, "");
+}
+#else
+TEST(ThreadCheckerTest, DetachFromThreadInRelease) {
+ ThreadCheckerClass::DetachThenCallFromDifferentThreadImpl();
+}
+#endif // ENABLE_THREAD_CHECKER
+
+#endif // GTEST_HAS_DEATH_TEST || !ENABLE_THREAD_CHECKER
+
+// Just in case we ever get lumped together with other compilation units.
+#undef ENABLE_THREAD_CHECKER
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/thread_unittest.cc b/chromium/third_party/webrtc/base/thread_unittest.cc
new file mode 100644
index 00000000000..6a54ac7b399
--- /dev/null
+++ b/chromium/third_party/webrtc/base/thread_unittest.cc
@@ -0,0 +1,444 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/thread.h"
+
+#if defined(WEBRTC_WIN)
+#include <comdef.h> // NOLINT
+#endif
+
+using namespace rtc;
+
+// Generates a sequence of numbers (collaboratively).
+class TestGenerator {
+ public:
+ TestGenerator() : last(0), count(0) {}
+
+ int Next(int prev) {
+ int result = prev + last;
+ last = result;
+ count += 1;
+ return result;
+ }
+
+ int last;
+ int count;
+};
+
+struct TestMessage : public MessageData {
+ explicit TestMessage(int v) : value(v) {}
+ virtual ~TestMessage() {}
+
+ int value;
+};
+
+// Receives on a socket and sends by posting messages.
+class SocketClient : public TestGenerator, public sigslot::has_slots<> {
+ public:
+ SocketClient(AsyncSocket* socket, const SocketAddress& addr,
+ Thread* post_thread, MessageHandler* phandler)
+ : socket_(AsyncUDPSocket::Create(socket, addr)),
+ post_thread_(post_thread),
+ post_handler_(phandler) {
+ socket_->SignalReadPacket.connect(this, &SocketClient::OnPacket);
+ }
+
+ ~SocketClient() {
+ delete socket_;
+ }
+
+ SocketAddress address() const { return socket_->GetLocalAddress(); }
+
+ void OnPacket(AsyncPacketSocket* socket, const char* buf, size_t size,
+ const SocketAddress& remote_addr,
+ const PacketTime& packet_time) {
+ EXPECT_EQ(size, sizeof(uint32));
+ uint32 prev = reinterpret_cast<const uint32*>(buf)[0];
+ uint32 result = Next(prev);
+
+ post_thread_->PostDelayed(200, post_handler_, 0, new TestMessage(result));
+ }
+
+ private:
+ AsyncUDPSocket* socket_;
+ Thread* post_thread_;
+ MessageHandler* post_handler_;
+};
+
+// Receives messages and sends on a socket.
+class MessageClient : public MessageHandler, public TestGenerator {
+ public:
+ MessageClient(Thread* pth, Socket* socket)
+ : socket_(socket) {
+ }
+
+ virtual ~MessageClient() {
+ delete socket_;
+ }
+
+ virtual void OnMessage(Message *pmsg) {
+ TestMessage* msg = static_cast<TestMessage*>(pmsg->pdata);
+ int result = Next(msg->value);
+ EXPECT_GE(socket_->Send(&result, sizeof(result)), 0);
+ delete msg;
+ }
+
+ private:
+ Socket* socket_;
+};
+
+class CustomThread : public rtc::Thread {
+ public:
+ CustomThread() {}
+ virtual ~CustomThread() { Stop(); }
+ bool Start() { return false; }
+};
+
+
+// A thread that does nothing when it runs and signals an event
+// when it is destroyed.
+class SignalWhenDestroyedThread : public Thread {
+ public:
+ SignalWhenDestroyedThread(Event* event)
+ : event_(event) {
+ }
+
+ virtual ~SignalWhenDestroyedThread() {
+ Stop();
+ event_->Set();
+ }
+
+ virtual void Run() {
+ // Do nothing.
+ }
+
+ private:
+ Event* event_;
+};
+
+// Function objects to test Thread::Invoke.
+struct FunctorA {
+ int operator()() { return 42; }
+};
+class FunctorB {
+ public:
+ explicit FunctorB(bool* flag) : flag_(flag) {}
+ void operator()() { if (flag_) *flag_ = true; }
+ private:
+ bool* flag_;
+};
+struct FunctorC {
+ int operator()() {
+ Thread::Current()->ProcessMessages(50);
+ return 24;
+ }
+};
+
+// See: https://code.google.com/p/webrtc/issues/detail?id=2409
+TEST(ThreadTest, DISABLED_Main) {
+ const SocketAddress addr("127.0.0.1", 0);
+
+ // Create the messaging client on its own thread.
+ Thread th1;
+ Socket* socket = th1.socketserver()->CreateAsyncSocket(addr.family(),
+ SOCK_DGRAM);
+ MessageClient msg_client(&th1, socket);
+
+ // Create the socket client on its own thread.
+ Thread th2;
+ AsyncSocket* asocket =
+ th2.socketserver()->CreateAsyncSocket(addr.family(), SOCK_DGRAM);
+ SocketClient sock_client(asocket, addr, &th1, &msg_client);
+
+ socket->Connect(sock_client.address());
+
+ th1.Start();
+ th2.Start();
+
+ // Get the messages started.
+ th1.PostDelayed(100, &msg_client, 0, new TestMessage(1));
+
+ // Give the clients a little while to run.
+ // Messages will be processed at 100, 300, 500, 700, 900.
+ Thread* th_main = Thread::Current();
+ th_main->ProcessMessages(1000);
+
+ // Stop the sending client. Give the receiver a bit longer to run, in case
+ // it is running on a machine that is under load (e.g. the build machine).
+ th1.Stop();
+ th_main->ProcessMessages(200);
+ th2.Stop();
+
+ // Make sure the results were correct
+ EXPECT_EQ(5, msg_client.count);
+ EXPECT_EQ(34, msg_client.last);
+ EXPECT_EQ(5, sock_client.count);
+ EXPECT_EQ(55, sock_client.last);
+}
+
+// Test that setting thread names doesn't cause a malfunction.
+// There's no easy way to verify the name was set properly at this time.
+TEST(ThreadTest, Names) {
+ // Default name
+ Thread *thread;
+ thread = new Thread();
+ EXPECT_TRUE(thread->Start());
+ thread->Stop();
+ delete thread;
+ thread = new Thread();
+ // Name with no object parameter
+ EXPECT_TRUE(thread->SetName("No object", NULL));
+ EXPECT_TRUE(thread->Start());
+ thread->Stop();
+ delete thread;
+ // Really long name
+ thread = new Thread();
+ EXPECT_TRUE(thread->SetName("Abcdefghijklmnopqrstuvwxyz1234567890", this));
+ EXPECT_TRUE(thread->Start());
+ thread->Stop();
+ delete thread;
+}
+
+// Test that setting thread priorities doesn't cause a malfunction.
+// There's no easy way to verify the priority was set properly at this time.
+TEST(ThreadTest, Priorities) {
+ Thread *thread;
+ thread = new Thread();
+ EXPECT_TRUE(thread->SetPriority(PRIORITY_HIGH));
+ EXPECT_TRUE(thread->Start());
+ thread->Stop();
+ delete thread;
+ thread = new Thread();
+ EXPECT_TRUE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
+ EXPECT_TRUE(thread->Start());
+ thread->Stop();
+ delete thread;
+
+ thread = new Thread();
+ EXPECT_TRUE(thread->Start());
+#if defined(WEBRTC_WIN)
+ EXPECT_TRUE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
+#else
+ EXPECT_FALSE(thread->SetPriority(PRIORITY_ABOVE_NORMAL));
+#endif
+ thread->Stop();
+ delete thread;
+
+}
+
+TEST(ThreadTest, Wrap) {
+ Thread* current_thread = Thread::Current();
+ current_thread->UnwrapCurrent();
+ CustomThread* cthread = new CustomThread();
+ EXPECT_TRUE(cthread->WrapCurrent());
+ EXPECT_TRUE(cthread->RunningForTest());
+ EXPECT_FALSE(cthread->IsOwned());
+ cthread->UnwrapCurrent();
+ EXPECT_FALSE(cthread->RunningForTest());
+ delete cthread;
+ current_thread->WrapCurrent();
+}
+
+TEST(ThreadTest, Invoke) {
+ // Create and start the thread.
+ Thread thread;
+ thread.Start();
+ // Try calling functors.
+ EXPECT_EQ(42, thread.Invoke<int>(FunctorA()));
+ bool called = false;
+ FunctorB f2(&called);
+ thread.Invoke<void>(f2);
+ EXPECT_TRUE(called);
+ // Try calling bare functions.
+ struct LocalFuncs {
+ static int Func1() { return 999; }
+ static void Func2() {}
+ };
+ EXPECT_EQ(999, thread.Invoke<int>(&LocalFuncs::Func1));
+ thread.Invoke<void>(&LocalFuncs::Func2);
+}
+
+class AsyncInvokeTest : public testing::Test {
+ public:
+ void IntCallback(int value) {
+ EXPECT_EQ(expected_thread_, Thread::Current());
+ int_value_ = value;
+ }
+ void AsyncInvokeIntCallback(AsyncInvoker* invoker, Thread* thread) {
+ expected_thread_ = thread;
+ invoker->AsyncInvoke(thread, FunctorC(),
+ &AsyncInvokeTest::IntCallback,
+ static_cast<AsyncInvokeTest*>(this));
+ invoke_started_.Set();
+ }
+ void SetExpectedThreadForIntCallback(Thread* thread) {
+ expected_thread_ = thread;
+ }
+
+ protected:
+ enum { kWaitTimeout = 1000 };
+ AsyncInvokeTest()
+ : int_value_(0),
+ invoke_started_(true, false),
+ expected_thread_(NULL) {}
+
+ int int_value_;
+ Event invoke_started_;
+ Thread* expected_thread_;
+};
+
+TEST_F(AsyncInvokeTest, FireAndForget) {
+ AsyncInvoker invoker;
+ // Create and start the thread.
+ Thread thread;
+ thread.Start();
+ // Try calling functor.
+ bool called = false;
+ invoker.AsyncInvoke<void>(&thread, FunctorB(&called));
+ EXPECT_TRUE_WAIT(called, kWaitTimeout);
+}
+
+TEST_F(AsyncInvokeTest, WithCallback) {
+ AsyncInvoker invoker;
+ // Create and start the thread.
+ Thread thread;
+ thread.Start();
+ // Try calling functor.
+ SetExpectedThreadForIntCallback(Thread::Current());
+ invoker.AsyncInvoke(&thread, FunctorA(),
+ &AsyncInvokeTest::IntCallback,
+ static_cast<AsyncInvokeTest*>(this));
+ EXPECT_EQ_WAIT(42, int_value_, kWaitTimeout);
+}
+
+TEST_F(AsyncInvokeTest, CancelInvoker) {
+ // Create and start the thread.
+ Thread thread;
+ thread.Start();
+ // Try destroying invoker during call.
+ {
+ AsyncInvoker invoker;
+ invoker.AsyncInvoke(&thread, FunctorC(),
+ &AsyncInvokeTest::IntCallback,
+ static_cast<AsyncInvokeTest*>(this));
+ }
+ // With invoker gone, callback should be cancelled.
+ Thread::Current()->ProcessMessages(kWaitTimeout);
+ EXPECT_EQ(0, int_value_);
+}
+
+TEST_F(AsyncInvokeTest, CancelCallingThread) {
+ AsyncInvoker invoker;
+ { // Create and start the thread.
+ Thread thread;
+ thread.Start();
+ // Try calling functor.
+ thread.Invoke<void>(Bind(&AsyncInvokeTest::AsyncInvokeIntCallback,
+ static_cast<AsyncInvokeTest*>(this),
+ &invoker, Thread::Current()));
+ // Wait for the call to begin.
+ ASSERT_TRUE(invoke_started_.Wait(kWaitTimeout));
+ }
+ // Calling thread is gone. Return message shouldn't happen.
+ Thread::Current()->ProcessMessages(kWaitTimeout);
+ EXPECT_EQ(0, int_value_);
+}
+
+TEST_F(AsyncInvokeTest, KillInvokerBeforeExecute) {
+ Thread thread;
+ thread.Start();
+ {
+ AsyncInvoker invoker;
+ // Try calling functor.
+ thread.Invoke<void>(Bind(&AsyncInvokeTest::AsyncInvokeIntCallback,
+ static_cast<AsyncInvokeTest*>(this),
+ &invoker, Thread::Current()));
+ // Wait for the call to begin.
+ ASSERT_TRUE(invoke_started_.Wait(kWaitTimeout));
+ }
+ // Invoker is destroyed. Function should not execute.
+ Thread::Current()->ProcessMessages(kWaitTimeout);
+ EXPECT_EQ(0, int_value_);
+}
+
+TEST_F(AsyncInvokeTest, Flush) {
+ AsyncInvoker invoker;
+ bool flag1 = false;
+ bool flag2 = false;
+ // Queue two async calls to the current thread.
+ invoker.AsyncInvoke<void>(Thread::Current(),
+ FunctorB(&flag1));
+ invoker.AsyncInvoke<void>(Thread::Current(),
+ FunctorB(&flag2));
+ // Because we haven't pumped messages, these should not have run yet.
+ EXPECT_FALSE(flag1);
+ EXPECT_FALSE(flag2);
+ // Force them to run now.
+ invoker.Flush(Thread::Current());
+ EXPECT_TRUE(flag1);
+ EXPECT_TRUE(flag2);
+}
+
+TEST_F(AsyncInvokeTest, FlushWithIds) {
+ AsyncInvoker invoker;
+ bool flag1 = false;
+ bool flag2 = false;
+ // Queue two async calls to the current thread, one with a message id.
+ invoker.AsyncInvoke<void>(Thread::Current(),
+ FunctorB(&flag1),
+ 5);
+ invoker.AsyncInvoke<void>(Thread::Current(),
+ FunctorB(&flag2));
+ // Because we haven't pumped messages, these should not have run yet.
+ EXPECT_FALSE(flag1);
+ EXPECT_FALSE(flag2);
+ // Execute pending calls with id == 5.
+ invoker.Flush(Thread::Current(), 5);
+ EXPECT_TRUE(flag1);
+ EXPECT_FALSE(flag2);
+ flag1 = false;
+ // Execute all pending calls. The id == 5 call should not execute again.
+ invoker.Flush(Thread::Current());
+ EXPECT_FALSE(flag1);
+ EXPECT_TRUE(flag2);
+}
+
+
+#if defined(WEBRTC_WIN)
+class ComThreadTest : public testing::Test, public MessageHandler {
+ public:
+ ComThreadTest() : done_(false) {}
+ protected:
+ virtual void OnMessage(Message* message) {
+ HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+ // S_FALSE means the thread was already inited for a multithread apartment.
+ EXPECT_EQ(S_FALSE, hr);
+ if (SUCCEEDED(hr)) {
+ CoUninitialize();
+ }
+ done_ = true;
+ }
+ bool done_;
+};
+
+TEST_F(ComThreadTest, ComInited) {
+ Thread* thread = new ComThread();
+ EXPECT_TRUE(thread->Start());
+ thread->Post(this, 0);
+ EXPECT_TRUE_WAIT(done_, 1000);
+ delete thread;
+}
+#endif
diff --git a/chromium/third_party/webrtc/base/timeutils.cc b/chromium/third_party/webrtc/base/timeutils.cc
new file mode 100644
index 00000000000..dcf83e3dd90
--- /dev/null
+++ b/chromium/third_party/webrtc/base/timeutils.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#if defined(WEBRTC_POSIX)
+#include <sys/time.h>
+#if defined(WEBRTC_MAC)
+#include <mach/mach_time.h>
+#endif
+#endif
+
+#if defined(WEBRTC_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <mmsystem.h>
+#endif
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/timeutils.h"
+
+#define EFFICIENT_IMPLEMENTATION 1
+
+namespace rtc {
+
+const uint32 HALF = 0x80000000;
+
+uint64 TimeNanos() {
+ int64 ticks = 0;
+#if defined(WEBRTC_MAC)
+ static mach_timebase_info_data_t timebase;
+ if (timebase.denom == 0) {
+ // Get the timebase if this is the first time we run.
+ // Recommended by Apple's QA1398.
+ VERIFY(KERN_SUCCESS == mach_timebase_info(&timebase));
+ }
+ // Use timebase to convert absolute time tick units into nanoseconds.
+ ticks = mach_absolute_time() * timebase.numer / timebase.denom;
+#elif defined(WEBRTC_POSIX)
+ struct timespec ts;
+ // TODO: Do we need to handle the case when CLOCK_MONOTONIC
+ // is not supported?
+ clock_gettime(CLOCK_MONOTONIC, &ts);
+ ticks = kNumNanosecsPerSec * static_cast<int64>(ts.tv_sec) +
+ static_cast<int64>(ts.tv_nsec);
+#elif defined(WEBRTC_WIN)
+ static volatile LONG last_timegettime = 0;
+ static volatile int64 num_wrap_timegettime = 0;
+ volatile LONG* last_timegettime_ptr = &last_timegettime;
+ DWORD now = timeGetTime();
+ // Atomically update the last gotten time
+ DWORD old = InterlockedExchange(last_timegettime_ptr, now);
+ if (now < old) {
+ // If now is earlier than old, there may have been a race between
+ // threads.
+ // 0x0fffffff ~3.1 days, the code will not take that long to execute
+ // so it must have been a wrap around.
+ if (old > 0xf0000000 && now < 0x0fffffff) {
+ num_wrap_timegettime++;
+ }
+ }
+ ticks = now + (num_wrap_timegettime << 32);
+ // TODO: Calculate with nanosecond precision. Otherwise, we're just
+ // wasting a multiply and divide when doing Time() on Windows.
+ ticks = ticks * kNumNanosecsPerMillisec;
+#endif
+ return ticks;
+}
+
+uint32 Time() {
+ return static_cast<uint32>(TimeNanos() / kNumNanosecsPerMillisec);
+}
+
+uint64 TimeMicros() {
+ return static_cast<uint64>(TimeNanos() / kNumNanosecsPerMicrosec);
+}
+
+#if defined(WEBRTC_WIN)
+static const uint64 kFileTimeToUnixTimeEpochOffset = 116444736000000000ULL;
+
+struct timeval {
+ long tv_sec, tv_usec; // NOLINT
+};
+
+// Emulate POSIX gettimeofday().
+// Based on breakpad/src/third_party/glog/src/utilities.cc
+static int gettimeofday(struct timeval *tv, void *tz) {
+ // FILETIME is measured in tens of microseconds since 1601-01-01 UTC.
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+
+ LARGE_INTEGER li;
+ li.LowPart = ft.dwLowDateTime;
+ li.HighPart = ft.dwHighDateTime;
+
+ // Convert to seconds and microseconds since Unix time Epoch.
+ int64 micros = (li.QuadPart - kFileTimeToUnixTimeEpochOffset) / 10;
+ tv->tv_sec = static_cast<long>(micros / kNumMicrosecsPerSec); // NOLINT
+ tv->tv_usec = static_cast<long>(micros % kNumMicrosecsPerSec); // NOLINT
+
+ return 0;
+}
+
+// Emulate POSIX gmtime_r().
+static struct tm *gmtime_r(const time_t *timep, struct tm *result) {
+ // On Windows, gmtime is thread safe.
+ struct tm *tm = gmtime(timep); // NOLINT
+ if (tm == NULL) {
+ return NULL;
+ }
+ *result = *tm;
+ return result;
+}
+#endif // WEBRTC_WIN
+
+void CurrentTmTime(struct tm *tm, int *microseconds) {
+ struct timeval timeval;
+ if (gettimeofday(&timeval, NULL) < 0) {
+ // Incredibly unlikely code path.
+ timeval.tv_sec = timeval.tv_usec = 0;
+ }
+ time_t secs = timeval.tv_sec;
+ gmtime_r(&secs, tm);
+ *microseconds = timeval.tv_usec;
+}
+
+uint32 TimeAfter(int32 elapsed) {
+ ASSERT(elapsed >= 0);
+ ASSERT(static_cast<uint32>(elapsed) < HALF);
+ return Time() + elapsed;
+}
+
+bool TimeIsBetween(uint32 earlier, uint32 middle, uint32 later) {
+ if (earlier <= later) {
+ return ((earlier <= middle) && (middle <= later));
+ } else {
+ return !((later < middle) && (middle < earlier));
+ }
+}
+
+bool TimeIsLaterOrEqual(uint32 earlier, uint32 later) {
+#if EFFICIENT_IMPLEMENTATION
+ int32 diff = later - earlier;
+ return (diff >= 0 && static_cast<uint32>(diff) < HALF);
+#else
+ const bool later_or_equal = TimeIsBetween(earlier, later, earlier + HALF);
+ return later_or_equal;
+#endif
+}
+
+bool TimeIsLater(uint32 earlier, uint32 later) {
+#if EFFICIENT_IMPLEMENTATION
+ int32 diff = later - earlier;
+ return (diff > 0 && static_cast<uint32>(diff) < HALF);
+#else
+ const bool earlier_or_equal = TimeIsBetween(later, earlier, later + HALF);
+ return !earlier_or_equal;
+#endif
+}
+
+int32 TimeDiff(uint32 later, uint32 earlier) {
+#if EFFICIENT_IMPLEMENTATION
+ return later - earlier;
+#else
+ const bool later_or_equal = TimeIsBetween(earlier, later, earlier + HALF);
+ if (later_or_equal) {
+ if (earlier <= later) {
+ return static_cast<long>(later - earlier);
+ } else {
+ return static_cast<long>(later + (UINT32_MAX - earlier) + 1);
+ }
+ } else {
+ if (later <= earlier) {
+ return -static_cast<long>(earlier - later);
+ } else {
+ return -static_cast<long>(earlier + (UINT32_MAX - later) + 1);
+ }
+ }
+#endif
+}
+
+TimestampWrapAroundHandler::TimestampWrapAroundHandler()
+ : last_ts_(0), num_wrap_(0) {}
+
+int64 TimestampWrapAroundHandler::Unwrap(uint32 ts) {
+ if (ts < last_ts_) {
+ if (last_ts_ > 0xf0000000 && ts < 0x0fffffff) {
+ ++num_wrap_;
+ }
+ }
+ last_ts_ = ts;
+ int64_t unwrapped_ts = ts + (num_wrap_ << 32);
+ return unwrapped_ts;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/timeutils.h b/chromium/third_party/webrtc/base/timeutils.h
new file mode 100644
index 00000000000..ca041a7d113
--- /dev/null
+++ b/chromium/third_party/webrtc/base/timeutils.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2005 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TIMEUTILS_H_
+#define WEBRTC_BASE_TIMEUTILS_H_
+
+#include <time.h>
+
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+static const int64 kNumMillisecsPerSec = INT64_C(1000);
+static const int64 kNumMicrosecsPerSec = INT64_C(1000000);
+static const int64 kNumNanosecsPerSec = INT64_C(1000000000);
+
+static const int64 kNumMicrosecsPerMillisec = kNumMicrosecsPerSec /
+ kNumMillisecsPerSec;
+static const int64 kNumNanosecsPerMillisec = kNumNanosecsPerSec /
+ kNumMillisecsPerSec;
+static const int64 kNumNanosecsPerMicrosec = kNumNanosecsPerSec /
+ kNumMicrosecsPerSec;
+
+// January 1970, in NTP milliseconds.
+static const int64 kJan1970AsNtpMillisecs = INT64_C(2208988800000);
+
+typedef uint32 TimeStamp;
+
+// Returns the current time in milliseconds.
+uint32 Time();
+// Returns the current time in microseconds.
+uint64 TimeMicros();
+// Returns the current time in nanoseconds.
+uint64 TimeNanos();
+
+// Stores current time in *tm and microseconds in *microseconds.
+void CurrentTmTime(struct tm *tm, int *microseconds);
+
+// Returns a future timestamp, 'elapsed' milliseconds from now.
+uint32 TimeAfter(int32 elapsed);
+
+// Comparisons between time values, which can wrap around.
+bool TimeIsBetween(uint32 earlier, uint32 middle, uint32 later); // Inclusive
+bool TimeIsLaterOrEqual(uint32 earlier, uint32 later); // Inclusive
+bool TimeIsLater(uint32 earlier, uint32 later); // Exclusive
+
+// Returns the later of two timestamps.
+inline uint32 TimeMax(uint32 ts1, uint32 ts2) {
+ return TimeIsLaterOrEqual(ts1, ts2) ? ts2 : ts1;
+}
+
+// Returns the earlier of two timestamps.
+inline uint32 TimeMin(uint32 ts1, uint32 ts2) {
+ return TimeIsLaterOrEqual(ts1, ts2) ? ts1 : ts2;
+}
+
+// Number of milliseconds that would elapse between 'earlier' and 'later'
+// timestamps. The value is negative if 'later' occurs before 'earlier'.
+int32 TimeDiff(uint32 later, uint32 earlier);
+
+// The number of milliseconds that have elapsed since 'earlier'.
+inline int32 TimeSince(uint32 earlier) {
+ return TimeDiff(Time(), earlier);
+}
+
+// The number of milliseconds that will elapse between now and 'later'.
+inline int32 TimeUntil(uint32 later) {
+ return TimeDiff(later, Time());
+}
+
+// Converts a unix timestamp in nanoseconds to an NTP timestamp in ms.
+inline int64 UnixTimestampNanosecsToNtpMillisecs(int64 unix_ts_ns) {
+ return unix_ts_ns / kNumNanosecsPerMillisec + kJan1970AsNtpMillisecs;
+}
+
+class TimestampWrapAroundHandler {
+ public:
+ TimestampWrapAroundHandler();
+
+ int64 Unwrap(uint32 ts);
+
+ private:
+ uint32 last_ts_;
+ int64 num_wrap_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TIMEUTILS_H_
diff --git a/chromium/third_party/webrtc/base/timeutils_unittest.cc b/chromium/third_party/webrtc/base/timeutils_unittest.cc
new file mode 100644
index 00000000000..087fb0c28bc
--- /dev/null
+++ b/chromium/third_party/webrtc/base/timeutils_unittest.cc
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+TEST(TimeTest, TimeInMs) {
+ uint32 ts_earlier = Time();
+ Thread::SleepMs(100);
+ uint32 ts_now = Time();
+ // Allow for the thread to wakeup ~20ms early.
+ EXPECT_GE(ts_now, ts_earlier + 80);
+ // Make sure the Time is not returning in smaller unit like microseconds.
+ EXPECT_LT(ts_now, ts_earlier + 1000);
+}
+
+TEST(TimeTest, Comparison) {
+ // Obtain two different times, in known order
+ TimeStamp ts_earlier = Time();
+ Thread::SleepMs(100);
+ TimeStamp ts_now = Time();
+ EXPECT_NE(ts_earlier, ts_now);
+
+ // Common comparisons
+ EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_now));
+ EXPECT_TRUE( TimeIsLater( ts_earlier, ts_now));
+ EXPECT_FALSE(TimeIsLaterOrEqual(ts_now, ts_earlier));
+ EXPECT_FALSE(TimeIsLater( ts_now, ts_earlier));
+
+ // Edge cases
+ EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_earlier));
+ EXPECT_FALSE(TimeIsLater( ts_earlier, ts_earlier));
+
+ // Obtain a third time
+ TimeStamp ts_later = TimeAfter(100);
+ EXPECT_NE(ts_now, ts_later);
+ EXPECT_TRUE( TimeIsLater(ts_now, ts_later));
+ EXPECT_TRUE( TimeIsLater(ts_earlier, ts_later));
+
+ // Common comparisons
+ EXPECT_TRUE( TimeIsBetween(ts_earlier, ts_now, ts_later));
+ EXPECT_FALSE(TimeIsBetween(ts_earlier, ts_later, ts_now));
+ EXPECT_FALSE(TimeIsBetween(ts_now, ts_earlier, ts_later));
+ EXPECT_TRUE( TimeIsBetween(ts_now, ts_later, ts_earlier));
+ EXPECT_TRUE( TimeIsBetween(ts_later, ts_earlier, ts_now));
+ EXPECT_FALSE(TimeIsBetween(ts_later, ts_now, ts_earlier));
+
+ // Edge cases
+ EXPECT_TRUE( TimeIsBetween(ts_earlier, ts_earlier, ts_earlier));
+ EXPECT_TRUE( TimeIsBetween(ts_earlier, ts_earlier, ts_later));
+ EXPECT_TRUE( TimeIsBetween(ts_earlier, ts_later, ts_later));
+
+ // Earlier of two times
+ EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_earlier));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_now));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_later));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_now, ts_earlier));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_later, ts_earlier));
+
+ // Later of two times
+ EXPECT_EQ(ts_earlier, TimeMax(ts_earlier, ts_earlier));
+ EXPECT_EQ(ts_now, TimeMax(ts_earlier, ts_now));
+ EXPECT_EQ(ts_later, TimeMax(ts_earlier, ts_later));
+ EXPECT_EQ(ts_now, TimeMax(ts_now, ts_earlier));
+ EXPECT_EQ(ts_later, TimeMax(ts_later, ts_earlier));
+}
+
+TEST(TimeTest, Intervals) {
+ TimeStamp ts_earlier = Time();
+ TimeStamp ts_later = TimeAfter(500);
+
+ // We can't depend on ts_later and ts_earlier to be exactly 500 apart
+ // since time elapses between the calls to Time() and TimeAfter(500)
+ EXPECT_LE(500, TimeDiff(ts_later, ts_earlier));
+ EXPECT_GE(-500, TimeDiff(ts_earlier, ts_later));
+
+ // Time has elapsed since ts_earlier
+ EXPECT_GE(TimeSince(ts_earlier), 0);
+
+ // ts_earlier is earlier than now, so TimeUntil ts_earlier is -ve
+ EXPECT_LE(TimeUntil(ts_earlier), 0);
+
+ // ts_later likely hasn't happened yet, so TimeSince could be -ve
+ // but within 500
+ EXPECT_GE(TimeSince(ts_later), -500);
+
+ // TimeUntil ts_later is at most 500
+ EXPECT_LE(TimeUntil(ts_later), 500);
+}
+
+TEST(TimeTest, BoundaryComparison) {
+ // Obtain two different times, in known order
+ TimeStamp ts_earlier = static_cast<TimeStamp>(-50);
+ TimeStamp ts_later = ts_earlier + 100;
+ EXPECT_NE(ts_earlier, ts_later);
+
+ // Common comparisons
+ EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_later));
+ EXPECT_TRUE( TimeIsLater( ts_earlier, ts_later));
+ EXPECT_FALSE(TimeIsLaterOrEqual(ts_later, ts_earlier));
+ EXPECT_FALSE(TimeIsLater( ts_later, ts_earlier));
+
+ // Earlier of two times
+ EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_earlier));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_later));
+ EXPECT_EQ(ts_earlier, TimeMin(ts_later, ts_earlier));
+
+ // Later of two times
+ EXPECT_EQ(ts_earlier, TimeMax(ts_earlier, ts_earlier));
+ EXPECT_EQ(ts_later, TimeMax(ts_earlier, ts_later));
+ EXPECT_EQ(ts_later, TimeMax(ts_later, ts_earlier));
+
+ // Interval
+ EXPECT_EQ(100, TimeDiff(ts_later, ts_earlier));
+ EXPECT_EQ(-100, TimeDiff(ts_earlier, ts_later));
+}
+
+TEST(TimeTest, DISABLED_CurrentTmTime) {
+ struct tm tm;
+ int microseconds;
+
+ time_t before = ::time(NULL);
+ CurrentTmTime(&tm, &microseconds);
+ time_t after = ::time(NULL);
+
+ // Assert that 'tm' represents a time between 'before' and 'after'.
+ // mktime() uses local time, so we have to compensate for that.
+ time_t local_delta = before - ::mktime(::gmtime(&before)); // NOLINT
+ time_t t = ::mktime(&tm) + local_delta;
+
+ EXPECT_TRUE(before <= t && t <= after);
+ EXPECT_TRUE(0 <= microseconds && microseconds < 1000000);
+}
+
+class TimestampWrapAroundHandlerTest : public testing::Test {
+ public:
+ TimestampWrapAroundHandlerTest() {}
+
+ protected:
+ TimestampWrapAroundHandler wraparound_handler_;
+};
+
+TEST_F(TimestampWrapAroundHandlerTest, Unwrap) {
+ uint32 ts = 0xfffffff2;
+ int64 unwrapped_ts = ts;
+ EXPECT_EQ(ts, wraparound_handler_.Unwrap(ts));
+ ts = 2;
+ unwrapped_ts += 0x10;
+ EXPECT_EQ(unwrapped_ts, wraparound_handler_.Unwrap(ts));
+ ts = 0xfffffff2;
+ unwrapped_ts += 0xfffffff0;
+ EXPECT_EQ(unwrapped_ts, wraparound_handler_.Unwrap(ts));
+ ts = 0;
+ unwrapped_ts += 0xe;
+ EXPECT_EQ(unwrapped_ts, wraparound_handler_.Unwrap(ts));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/timing.cc b/chromium/third_party/webrtc/base/timing.cc
new file mode 100644
index 00000000000..aa1fc4290b5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/timing.cc
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/timing.h"
+#include "webrtc/base/timeutils.h"
+
+#if defined(WEBRTC_POSIX)
+#include <errno.h>
+#include <math.h>
+#include <sys/time.h>
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <mach/mach.h>
+#include <mach/clock.h>
+#endif
+#elif defined(WEBRTC_WIN)
+#include <sys/timeb.h>
+#include "webrtc/base/win32.h"
+#endif
+
+namespace rtc {
+
+Timing::Timing() {
+#if defined(WEBRTC_WIN)
+ // This may fail, but we handle failure gracefully in the methods
+ // that use it (use alternative sleep method).
+ //
+ // TODO: Make it possible for user to tell if IdleWait will
+ // be done at lesser resolution because of this.
+ timer_handle_ = CreateWaitableTimer(NULL, // Security attributes.
+ FALSE, // Manual reset?
+ NULL); // Timer name.
+#endif
+}
+
+Timing::~Timing() {
+#if defined(WEBRTC_WIN)
+ if (timer_handle_ != NULL)
+ CloseHandle(timer_handle_);
+#endif
+}
+
+double Timing::WallTimeNow() {
+#if defined(WEBRTC_POSIX)
+ struct timeval time;
+ gettimeofday(&time, NULL);
+ // Convert from second (1.0) and microsecond (1e-6).
+ return (static_cast<double>(time.tv_sec) +
+ static_cast<double>(time.tv_usec) * 1.0e-6);
+
+#elif defined(WEBRTC_WIN)
+ struct _timeb time;
+ _ftime(&time);
+ // Convert from second (1.0) and milliseconds (1e-3).
+ return (static_cast<double>(time.time) +
+ static_cast<double>(time.millitm) * 1.0e-3);
+#endif
+}
+
+double Timing::TimerNow() {
+ return (static_cast<double>(TimeNanos()) / kNumNanosecsPerSec);
+}
+
+double Timing::BusyWait(double period) {
+ double start_time = TimerNow();
+ while (TimerNow() - start_time < period) {
+ }
+ return TimerNow() - start_time;
+}
+
+double Timing::IdleWait(double period) {
+ double start_time = TimerNow();
+
+#if defined(WEBRTC_POSIX)
+ double sec_int, sec_frac = modf(period, &sec_int);
+ struct timespec ts;
+ ts.tv_sec = static_cast<time_t>(sec_int);
+ ts.tv_nsec = static_cast<long>(sec_frac * 1.0e9); // NOLINT
+
+ // NOTE(liulk): for the NOLINT above, long is the appropriate POSIX
+ // type.
+
+ // POSIX nanosleep may be interrupted by signals.
+ while (nanosleep(&ts, &ts) == -1 && errno == EINTR) {
+ }
+
+#elif defined(WEBRTC_WIN)
+ if (timer_handle_ != NULL) {
+ LARGE_INTEGER due_time;
+
+ // Negative indicates relative time. The unit is 100 nanoseconds.
+ due_time.QuadPart = -LONGLONG(period * 1.0e7);
+
+ SetWaitableTimer(timer_handle_, &due_time, 0, NULL, NULL, TRUE);
+ WaitForSingleObject(timer_handle_, INFINITE);
+ } else {
+ // Still attempts to sleep with lesser resolution.
+ // The unit is in milliseconds.
+ Sleep(DWORD(period * 1.0e3));
+ }
+#endif
+
+ return TimerNow() - start_time;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/timing.h b/chromium/third_party/webrtc/base/timing.h
new file mode 100644
index 00000000000..58b17a9fb7e
--- /dev/null
+++ b/chromium/third_party/webrtc/base/timing.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TIMING_H_
+#define WEBRTC_BASE_TIMING_H_
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32.h"
+#endif
+
+namespace rtc {
+
+class Timing {
+ public:
+ Timing();
+ virtual ~Timing();
+
+ // WallTimeNow() returns the current wall-clock time in seconds,
+ // within 10 milliseconds resolution.
+ virtual double WallTimeNow();
+
+ // TimerNow() is like WallTimeNow(), but is monotonically
+ // increasing. It returns seconds in resolution of 10 microseconds
+ // or better. Although timer and wall-clock time have the same
+ // timing unit, they do not necessarily correlate because wall-clock
+ // time may be adjusted backwards, hence not monotonic.
+ // Made virtual so we can make a fake one.
+ virtual double TimerNow();
+
+ // BusyWait() exhausts CPU as long as the time elapsed is less than
+ // the specified interval in seconds. Returns the actual waiting
+ // time based on TimerNow() measurement.
+ double BusyWait(double period);
+
+ // IdleWait() relinquishes control of CPU for specified period in
+ // seconds. It uses highest resolution sleep mechanism as possible,
+ // but does not otherwise guarantee the accuracy. Returns the
+ // actual waiting time based on TimerNow() measurement.
+ //
+ // This function is not re-entrant for an object. Create a fresh
+ // Timing object for each thread.
+ double IdleWait(double period);
+
+ private:
+#if defined(WEBRTC_WIN)
+ HANDLE timer_handle_;
+#endif
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TIMING_H_
diff --git a/chromium/third_party/webrtc/base/transformadapter.cc b/chromium/third_party/webrtc/base/transformadapter.cc
new file mode 100644
index 00000000000..76b750c2913
--- /dev/null
+++ b/chromium/third_party/webrtc/base/transformadapter.cc
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/transformadapter.h"
+
+#include <string.h>
+
+#include "webrtc/base/common.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+TransformAdapter::TransformAdapter(StreamInterface * stream,
+ TransformInterface * transform,
+ bool direction_read)
+ : StreamAdapterInterface(stream), transform_(transform),
+ direction_read_(direction_read), state_(ST_PROCESSING), len_(0) {
+}
+
+TransformAdapter::~TransformAdapter() {
+ TransformAdapter::Close();
+ delete transform_;
+}
+
+StreamResult
+TransformAdapter::Read(void * buffer, size_t buffer_len,
+ size_t * read, int * error) {
+ if (!direction_read_)
+ return SR_EOS;
+
+ while (state_ != ST_ERROR) {
+ if (state_ == ST_COMPLETE)
+ return SR_EOS;
+
+ // Buffer more data
+ if ((state_ == ST_PROCESSING) && (len_ < sizeof(buffer_))) {
+ size_t subread;
+ StreamResult result = StreamAdapterInterface::Read(
+ buffer_ + len_,
+ sizeof(buffer_) - len_,
+ &subread,
+ &error_);
+ if (result == SR_BLOCK) {
+ return SR_BLOCK;
+ } else if (result == SR_ERROR) {
+ state_ = ST_ERROR;
+ break;
+ } else if (result == SR_EOS) {
+ state_ = ST_FLUSHING;
+ } else {
+ len_ += subread;
+ }
+ }
+
+ // Process buffered data
+ size_t in_len = len_;
+ size_t out_len = buffer_len;
+ StreamResult result = transform_->Transform(buffer_, &in_len,
+ buffer, &out_len,
+ (state_ == ST_FLUSHING));
+ ASSERT(result != SR_BLOCK);
+ if (result == SR_EOS) {
+ // Note: Don't signal SR_EOS this iteration, unless out_len is zero
+ state_ = ST_COMPLETE;
+ } else if (result == SR_ERROR) {
+ state_ = ST_ERROR;
+ error_ = -1; // TODO: propagate error
+ break;
+ } else if ((out_len == 0) && (state_ == ST_FLUSHING)) {
+ // If there is no output AND no more input, then something is wrong
+ state_ = ST_ERROR;
+ error_ = -1; // TODO: better error code?
+ break;
+ }
+
+ len_ -= in_len;
+ if (len_ > 0)
+ memmove(buffer_, buffer_ + in_len, len_);
+
+ if (out_len == 0)
+ continue;
+
+ if (read)
+ *read = out_len;
+ return SR_SUCCESS;
+ }
+
+ if (error)
+ *error = error_;
+ return SR_ERROR;
+}
+
+StreamResult
+TransformAdapter::Write(const void * data, size_t data_len,
+ size_t * written, int * error) {
+ if (direction_read_)
+ return SR_EOS;
+
+ size_t bytes_written = 0;
+ while (state_ != ST_ERROR) {
+ if (state_ == ST_COMPLETE)
+ return SR_EOS;
+
+ if (len_ < sizeof(buffer_)) {
+ // Process buffered data
+ size_t in_len = data_len;
+ size_t out_len = sizeof(buffer_) - len_;
+ StreamResult result = transform_->Transform(data, &in_len,
+ buffer_ + len_, &out_len,
+ (state_ == ST_FLUSHING));
+
+ ASSERT(result != SR_BLOCK);
+ if (result == SR_EOS) {
+ // Note: Don't signal SR_EOS this iteration, unless no data written
+ state_ = ST_COMPLETE;
+ } else if (result == SR_ERROR) {
+ ASSERT(false); // When this happens, think about what should be done
+ state_ = ST_ERROR;
+ error_ = -1; // TODO: propagate error
+ break;
+ }
+
+ len_ = out_len;
+ bytes_written = in_len;
+ }
+
+ size_t pos = 0;
+ while (pos < len_) {
+ size_t subwritten;
+ StreamResult result = StreamAdapterInterface::Write(buffer_ + pos,
+ len_ - pos,
+ &subwritten,
+ &error_);
+ if (result == SR_BLOCK) {
+ ASSERT(false); // TODO: we should handle this
+ return SR_BLOCK;
+ } else if (result == SR_ERROR) {
+ state_ = ST_ERROR;
+ break;
+ } else if (result == SR_EOS) {
+ state_ = ST_COMPLETE;
+ break;
+ }
+
+ pos += subwritten;
+ }
+
+ len_ -= pos;
+ if (len_ > 0)
+ memmove(buffer_, buffer_ + pos, len_);
+
+ if (bytes_written == 0)
+ continue;
+
+ if (written)
+ *written = bytes_written;
+ return SR_SUCCESS;
+ }
+
+ if (error)
+ *error = error_;
+ return SR_ERROR;
+}
+
+void
+TransformAdapter::Close() {
+ if (!direction_read_ && (state_ == ST_PROCESSING)) {
+ state_ = ST_FLUSHING;
+ do {
+ Write(0, 0, NULL, NULL);
+ } while (state_ == ST_FLUSHING);
+ }
+ state_ = ST_COMPLETE;
+ StreamAdapterInterface::Close();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/transformadapter.h b/chromium/third_party/webrtc/base/transformadapter.h
new file mode 100644
index 00000000000..ad24438ea85
--- /dev/null
+++ b/chromium/third_party/webrtc/base/transformadapter.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TRANSFORMADAPTER_H__
+#define WEBRTC_BASE_TRANSFORMADAPTER_H__
+
+#include "webrtc/base/stream.h"
+
+namespace rtc {
+///////////////////////////////////////////////////////////////////////////////
+
+class TransformInterface {
+public:
+ virtual ~TransformInterface() { }
+
+ // Transform should convert the in_len bytes of input into the out_len-sized
+ // output buffer. If flush is true, there will be no more data following
+ // input.
+ // After the transformation, in_len contains the number of bytes consumed, and
+ // out_len contains the number of bytes ready in output.
+ // Note: Transform should not return SR_BLOCK, as there is no asynchronous
+ // notification available.
+ virtual StreamResult Transform(const void * input, size_t * in_len,
+ void * output, size_t * out_len,
+ bool flush) = 0;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+// TransformAdapter causes all data passed through to be transformed by the
+// supplied TransformInterface object, which may apply compression, encryption,
+// etc.
+
+class TransformAdapter : public StreamAdapterInterface {
+public:
+ // Note that the transformation is unidirectional, in the direction specified
+ // by the constructor. Operations in the opposite direction result in SR_EOS.
+ TransformAdapter(StreamInterface * stream,
+ TransformInterface * transform,
+ bool direction_read);
+ virtual ~TransformAdapter();
+
+ virtual StreamResult Read(void * buffer, size_t buffer_len,
+ size_t * read, int * error);
+ virtual StreamResult Write(const void * data, size_t data_len,
+ size_t * written, int * error);
+ virtual void Close();
+
+ // Apriori, we can't tell what the transformation does to the stream length.
+ virtual bool GetAvailable(size_t* size) const { return false; }
+ virtual bool ReserveSize(size_t size) { return true; }
+
+ // Transformations might not be restartable
+ virtual bool Rewind() { return false; }
+
+private:
+ enum State { ST_PROCESSING, ST_FLUSHING, ST_COMPLETE, ST_ERROR };
+ enum { BUFFER_SIZE = 1024 };
+
+ TransformInterface * transform_;
+ bool direction_read_;
+ State state_;
+ int error_;
+
+ char buffer_[BUFFER_SIZE];
+ size_t len_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TRANSFORMADAPTER_H__
diff --git a/chromium/third_party/webrtc/base/unittest_main.cc b/chromium/third_party/webrtc/base/unittest_main.cc
new file mode 100644
index 00000000000..5d412d5ed5b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/unittest_main.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2007 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+//
+// A reuseable entry point for gunit tests.
+
+#if defined(WEBRTC_WIN)
+#include <crtdbg.h>
+#endif
+
+#include "webrtc/base/flags.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+
+DEFINE_bool(help, false, "prints this message");
+DEFINE_string(log, "", "logging options to use");
+#if defined(WEBRTC_WIN)
+DEFINE_int(crt_break_alloc, -1, "memory allocation to break on");
+DEFINE_bool(default_error_handlers, false,
+ "leave the default exception/dbg handler functions in place");
+
+void TestInvalidParameterHandler(const wchar_t* expression,
+ const wchar_t* function,
+ const wchar_t* file,
+ unsigned int line,
+ uintptr_t pReserved) {
+ LOG(LS_ERROR) << "InvalidParameter Handler called. Exiting.";
+ LOG(LS_ERROR) << expression << std::endl << function << std::endl << file
+ << std::endl << line;
+ exit(1);
+}
+void TestPureCallHandler() {
+ LOG(LS_ERROR) << "Purecall Handler called. Exiting.";
+ exit(1);
+}
+int TestCrtReportHandler(int report_type, char* msg, int* retval) {
+ LOG(LS_ERROR) << "CrtReport Handler called...";
+ LOG(LS_ERROR) << msg;
+ if (report_type == _CRT_ASSERT) {
+ exit(1);
+ } else {
+ *retval = 0;
+ return TRUE;
+ }
+}
+#endif // WEBRTC_WIN
+
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+ rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, false);
+ if (FLAG_help) {
+ rtc::FlagList::Print(NULL, false);
+ return 0;
+ }
+
+#if defined(WEBRTC_WIN)
+ if (!FLAG_default_error_handlers) {
+ // Make sure any errors don't throw dialogs hanging the test run.
+ _set_invalid_parameter_handler(TestInvalidParameterHandler);
+ _set_purecall_handler(TestPureCallHandler);
+ _CrtSetReportHook2(_CRT_RPTHOOK_INSTALL, TestCrtReportHandler);
+ }
+
+#ifdef _DEBUG // Turn on memory leak checking on Windows.
+ _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF |_CRTDBG_LEAK_CHECK_DF);
+ if (FLAG_crt_break_alloc >= 0) {
+ _crtBreakAlloc = FLAG_crt_break_alloc;
+ }
+#endif // _DEBUG
+#endif // WEBRTC_WIN
+
+ rtc::Filesystem::SetOrganizationName("google");
+ rtc::Filesystem::SetApplicationName("unittest");
+
+ // By default, log timestamps. Allow overrides by used of a --log flag.
+ rtc::LogMessage::LogTimestamps();
+ if (*FLAG_log != '\0') {
+ rtc::LogMessage::ConfigureLogging(FLAG_log, "unittest.log");
+ }
+
+ int res = RUN_ALL_TESTS();
+
+ // clean up logging so we don't appear to leak memory.
+ rtc::LogMessage::ConfigureLogging("", "");
+
+#if defined(WEBRTC_WIN)
+ // Unhook crt function so that we don't ever log after statics have been
+ // uninitialized.
+ if (!FLAG_default_error_handlers)
+ _CrtSetReportHook2(_CRT_RPTHOOK_REMOVE, TestCrtReportHandler);
+#endif
+
+ return res;
+}
diff --git a/chromium/third_party/webrtc/base/unixfilesystem.cc b/chromium/third_party/webrtc/base/unixfilesystem.cc
new file mode 100644
index 00000000000..081d561dbac
--- /dev/null
+++ b/chromium/third_party/webrtc/base/unixfilesystem.cc
@@ -0,0 +1,572 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/unixfilesystem.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <unistd.h>
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <Carbon/Carbon.h>
+#include <IOKit/IOCFBundle.h>
+#include <sys/statvfs.h>
+#include "webrtc/base/macutils.h"
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+
+#if defined(WEBRTC_POSIX) && !defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
+#include <sys/types.h>
+#if defined(WEBRTC_ANDROID)
+#include <sys/statfs.h>
+#elif !defined(__native_client__)
+#include <sys/statvfs.h>
+#endif // !defined(__native_client__)
+#include <limits.h>
+#include <pwd.h>
+#include <stdio.h>
+#endif // WEBRTC_POSIX && !WEBRTC_MAC || WEBRTC_IOS
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#include <ctype.h>
+#include <algorithm>
+#endif
+
+#if defined(__native_client__) && !defined(__GLIBC__)
+#include <sys/syslimits.h>
+#endif
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringutils.h"
+
+#if defined(WEBRTC_IOS)
+// Defined in iosfilesystem.mm. No header file to discourage use
+// elsewhere; other places should use GetApp{Data,Temp}Folder() in
+// this file. Don't copy/paste. I mean it.
+char* IOSDataDirectory();
+char* IOSTempDirectory();
+void IOSAppName(rtc::Pathname* path);
+#endif
+
+namespace rtc {
+
+#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
+char* UnixFilesystem::app_temp_path_ = NULL;
+#else
+char* UnixFilesystem::provided_app_data_folder_ = NULL;
+char* UnixFilesystem::provided_app_temp_folder_ = NULL;
+
+void UnixFilesystem::SetAppDataFolder(const std::string& folder) {
+ delete [] provided_app_data_folder_;
+ provided_app_data_folder_ = CopyString(folder);
+}
+
+void UnixFilesystem::SetAppTempFolder(const std::string& folder) {
+ delete [] provided_app_temp_folder_;
+ provided_app_temp_folder_ = CopyString(folder);
+}
+#endif
+
+UnixFilesystem::UnixFilesystem() {
+#if defined(WEBRTC_IOS)
+ if (!provided_app_data_folder_)
+ provided_app_data_folder_ = IOSDataDirectory();
+ if (!provided_app_temp_folder_)
+ provided_app_temp_folder_ = IOSTempDirectory();
+#endif
+}
+
+UnixFilesystem::~UnixFilesystem() {}
+
+bool UnixFilesystem::CreateFolder(const Pathname &path, mode_t mode) {
+ std::string pathname(path.pathname());
+ int len = pathname.length();
+ if ((len == 0) || (pathname[len - 1] != '/'))
+ return false;
+
+ struct stat st;
+ int res = ::stat(pathname.c_str(), &st);
+ if (res == 0) {
+ // Something exists at this location, check if it is a directory
+ return S_ISDIR(st.st_mode) != 0;
+ } else if (errno != ENOENT) {
+ // Unexpected error
+ return false;
+ }
+
+ // Directory doesn't exist, look up one directory level
+ do {
+ --len;
+ } while ((len > 0) && (pathname[len - 1] != '/'));
+
+ if (!CreateFolder(Pathname(pathname.substr(0, len)), mode)) {
+ return false;
+ }
+
+ LOG(LS_INFO) << "Creating folder: " << pathname;
+ return (0 == ::mkdir(pathname.c_str(), mode));
+}
+
+bool UnixFilesystem::CreateFolder(const Pathname &path) {
+ return CreateFolder(path, 0755);
+}
+
+FileStream *UnixFilesystem::OpenFile(const Pathname &filename,
+ const std::string &mode) {
+ FileStream *fs = new FileStream();
+ if (fs && !fs->Open(filename.pathname().c_str(), mode.c_str(), NULL)) {
+ delete fs;
+ fs = NULL;
+ }
+ return fs;
+}
+
+bool UnixFilesystem::CreatePrivateFile(const Pathname &filename) {
+ int fd = open(filename.pathname().c_str(),
+ O_RDWR | O_CREAT | O_EXCL,
+ S_IRUSR | S_IWUSR);
+ if (fd < 0) {
+ LOG_ERR(LS_ERROR) << "open() failed.";
+ return false;
+ }
+ // Don't need to keep the file descriptor.
+ if (close(fd) < 0) {
+ LOG_ERR(LS_ERROR) << "close() failed.";
+ // Continue.
+ }
+ return true;
+}
+
+bool UnixFilesystem::DeleteFile(const Pathname &filename) {
+ LOG(LS_INFO) << "Deleting file:" << filename.pathname();
+
+ if (!IsFile(filename)) {
+ ASSERT(IsFile(filename));
+ return false;
+ }
+ return ::unlink(filename.pathname().c_str()) == 0;
+}
+
+bool UnixFilesystem::DeleteEmptyFolder(const Pathname &folder) {
+ LOG(LS_INFO) << "Deleting folder" << folder.pathname();
+
+ if (!IsFolder(folder)) {
+ ASSERT(IsFolder(folder));
+ return false;
+ }
+ std::string no_slash(folder.pathname(), 0, folder.pathname().length()-1);
+ return ::rmdir(no_slash.c_str()) == 0;
+}
+
+bool UnixFilesystem::GetTemporaryFolder(Pathname &pathname, bool create,
+ const std::string *append) {
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ FSRef fr;
+ if (0 != FSFindFolder(kOnAppropriateDisk, kTemporaryFolderType,
+ kCreateFolder, &fr))
+ return false;
+ unsigned char buffer[NAME_MAX+1];
+ if (0 != FSRefMakePath(&fr, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ pathname.SetPathname(reinterpret_cast<char*>(buffer), "");
+#elif defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ ASSERT(provided_app_temp_folder_ != NULL);
+ pathname.SetPathname(provided_app_temp_folder_, "");
+#else // !WEBRTC_MAC || WEBRTC_IOS && !WEBRTC_ANDROID
+ if (const char* tmpdir = getenv("TMPDIR")) {
+ pathname.SetPathname(tmpdir, "");
+ } else if (const char* tmp = getenv("TMP")) {
+ pathname.SetPathname(tmp, "");
+ } else {
+#ifdef P_tmpdir
+ pathname.SetPathname(P_tmpdir, "");
+#else // !P_tmpdir
+ pathname.SetPathname("/tmp/", "");
+#endif // !P_tmpdir
+ }
+#endif // !WEBRTC_MAC || WEBRTC_IOS && !WEBRTC_ANDROID
+ if (append) {
+ ASSERT(!append->empty());
+ pathname.AppendFolder(*append);
+ }
+ return !create || CreateFolder(pathname);
+}
+
+std::string UnixFilesystem::TempFilename(const Pathname &dir,
+ const std::string &prefix) {
+ int len = dir.pathname().size() + prefix.size() + 2 + 6;
+ char *tempname = new char[len];
+
+ snprintf(tempname, len, "%s/%sXXXXXX", dir.pathname().c_str(),
+ prefix.c_str());
+ int fd = ::mkstemp(tempname);
+ if (fd != -1)
+ ::close(fd);
+ std::string ret(tempname);
+ delete[] tempname;
+
+ return ret;
+}
+
+bool UnixFilesystem::MoveFile(const Pathname &old_path,
+ const Pathname &new_path) {
+ if (!IsFile(old_path)) {
+ ASSERT(IsFile(old_path));
+ return false;
+ }
+ LOG(LS_VERBOSE) << "Moving " << old_path.pathname()
+ << " to " << new_path.pathname();
+ if (rename(old_path.pathname().c_str(), new_path.pathname().c_str()) != 0) {
+ if (errno != EXDEV)
+ return false;
+ if (!CopyFile(old_path, new_path))
+ return false;
+ if (!DeleteFile(old_path))
+ return false;
+ }
+ return true;
+}
+
+bool UnixFilesystem::MoveFolder(const Pathname &old_path,
+ const Pathname &new_path) {
+ if (!IsFolder(old_path)) {
+ ASSERT(IsFolder(old_path));
+ return false;
+ }
+ LOG(LS_VERBOSE) << "Moving " << old_path.pathname()
+ << " to " << new_path.pathname();
+ if (rename(old_path.pathname().c_str(), new_path.pathname().c_str()) != 0) {
+ if (errno != EXDEV)
+ return false;
+ if (!CopyFolder(old_path, new_path))
+ return false;
+ if (!DeleteFolderAndContents(old_path))
+ return false;
+ }
+ return true;
+}
+
+bool UnixFilesystem::IsFolder(const Pathname &path) {
+ struct stat st;
+ if (stat(path.pathname().c_str(), &st) < 0)
+ return false;
+ return S_ISDIR(st.st_mode);
+}
+
+bool UnixFilesystem::CopyFile(const Pathname &old_path,
+ const Pathname &new_path) {
+ LOG(LS_VERBOSE) << "Copying " << old_path.pathname()
+ << " to " << new_path.pathname();
+ char buf[256];
+ size_t len;
+
+ StreamInterface *source = OpenFile(old_path, "rb");
+ if (!source)
+ return false;
+
+ StreamInterface *dest = OpenFile(new_path, "wb");
+ if (!dest) {
+ delete source;
+ return false;
+ }
+
+ while (source->Read(buf, sizeof(buf), &len, NULL) == SR_SUCCESS)
+ dest->Write(buf, len, NULL, NULL);
+
+ delete source;
+ delete dest;
+ return true;
+}
+
+bool UnixFilesystem::IsTemporaryPath(const Pathname& pathname) {
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ ASSERT(provided_app_temp_folder_ != NULL);
+#endif
+
+ const char* const kTempPrefixes[] = {
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ provided_app_temp_folder_,
+#else
+ "/tmp/", "/var/tmp/",
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ "/private/tmp/", "/private/var/tmp/", "/private/var/folders/",
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+#endif // WEBRTC_ANDROID || WEBRTC_IOS
+ };
+ for (size_t i = 0; i < ARRAY_SIZE(kTempPrefixes); ++i) {
+ if (0 == strncmp(pathname.pathname().c_str(), kTempPrefixes[i],
+ strlen(kTempPrefixes[i])))
+ return true;
+ }
+ return false;
+}
+
+bool UnixFilesystem::IsFile(const Pathname& pathname) {
+ struct stat st;
+ int res = ::stat(pathname.pathname().c_str(), &st);
+ // Treat symlinks, named pipes, etc. all as files.
+ return res == 0 && !S_ISDIR(st.st_mode);
+}
+
+bool UnixFilesystem::IsAbsent(const Pathname& pathname) {
+ struct stat st;
+ int res = ::stat(pathname.pathname().c_str(), &st);
+ // Note: we specifically maintain ENOTDIR as an error, because that implies
+ // that you could not call CreateFolder(pathname).
+ return res != 0 && ENOENT == errno;
+}
+
+bool UnixFilesystem::GetFileSize(const Pathname& pathname, size_t *size) {
+ struct stat st;
+ if (::stat(pathname.pathname().c_str(), &st) != 0)
+ return false;
+ *size = st.st_size;
+ return true;
+}
+
+bool UnixFilesystem::GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time) {
+ struct stat st;
+ if (::stat(path.pathname().c_str(), &st) != 0)
+ return false;
+ switch (which) {
+ case FTT_CREATED:
+ *time = st.st_ctime;
+ break;
+ case FTT_MODIFIED:
+ *time = st.st_mtime;
+ break;
+ case FTT_ACCESSED:
+ *time = st.st_atime;
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
+
+bool UnixFilesystem::GetAppPathname(Pathname* path) {
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ ProcessSerialNumber psn = { 0, kCurrentProcess };
+ CFDictionaryRef procinfo = ProcessInformationCopyDictionary(&psn,
+ kProcessDictionaryIncludeAllInformationMask);
+ if (NULL == procinfo)
+ return false;
+ CFStringRef cfpath = (CFStringRef) CFDictionaryGetValue(procinfo,
+ kIOBundleExecutableKey);
+ std::string path8;
+ bool success = ToUtf8(cfpath, &path8);
+ CFRelease(procinfo);
+ if (success)
+ path->SetPathname(path8);
+ return success;
+#elif defined(__native_client__)
+ return false;
+#elif IOS
+ IOSAppName(path);
+ return true;
+#else // WEBRTC_MAC && !defined(WEBRTC_IOS)
+ char buffer[PATH_MAX + 2];
+ ssize_t len = readlink("/proc/self/exe", buffer, ARRAY_SIZE(buffer) - 1);
+ if ((len <= 0) || (len == PATH_MAX + 1))
+ return false;
+ buffer[len] = '\0';
+ path->SetPathname(buffer);
+ return true;
+#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
+}
+
+bool UnixFilesystem::GetAppDataFolder(Pathname* path, bool per_user) {
+ ASSERT(!organization_name_.empty());
+ ASSERT(!application_name_.empty());
+
+ // First get the base directory for app data.
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ if (per_user) {
+ // Use ~/Library/Application Support/<orgname>/<appname>/
+ FSRef fr;
+ if (0 != FSFindFolder(kUserDomain, kApplicationSupportFolderType,
+ kCreateFolder, &fr))
+ return false;
+ unsigned char buffer[NAME_MAX+1];
+ if (0 != FSRefMakePath(&fr, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ path->SetPathname(reinterpret_cast<char*>(buffer), "");
+ } else {
+ // TODO
+ return false;
+ }
+#elif defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) // && !WEBRTC_MAC || WEBRTC_IOS
+ ASSERT(provided_app_data_folder_ != NULL);
+ path->SetPathname(provided_app_data_folder_, "");
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) // && !WEBRTC_MAC && !WEBRTC_IOS && !WEBRTC_ANDROID
+ if (per_user) {
+ // We follow the recommendations in
+ // http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+ // It specifies separate directories for data and config files, but
+ // GetAppDataFolder() does not distinguish. We just return the config dir
+ // path.
+ const char* xdg_config_home = getenv("XDG_CONFIG_HOME");
+ if (xdg_config_home) {
+ path->SetPathname(xdg_config_home, "");
+ } else {
+ // XDG says to default to $HOME/.config. We also support falling back to
+ // other synonyms for HOME if for some reason it is not defined.
+ const char* homedir;
+ if (const char* home = getenv("HOME")) {
+ homedir = home;
+ } else if (const char* dotdir = getenv("DOTDIR")) {
+ homedir = dotdir;
+ } else if (passwd* pw = getpwuid(geteuid())) {
+ homedir = pw->pw_dir;
+ } else {
+ return false;
+ }
+ path->SetPathname(homedir, "");
+ path->AppendFolder(".config");
+ }
+ } else {
+ // XDG does not define a standard directory for writable global data. Let's
+ // just use this.
+ path->SetPathname("/var/cache/", "");
+ }
+#endif // !WEBRTC_MAC && !WEBRTC_LINUX
+
+ // Now add on a sub-path for our app.
+#if defined(WEBRTC_MAC) || defined(WEBRTC_ANDROID)
+ path->AppendFolder(organization_name_);
+ path->AppendFolder(application_name_);
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ // XDG says to use a single directory level, so we concatenate the org and app
+ // name with a hyphen. We also do the Linuxy thing and convert to all
+ // lowercase with no spaces.
+ std::string subdir(organization_name_);
+ subdir.append("-");
+ subdir.append(application_name_);
+ replace_substrs(" ", 1, "", 0, &subdir);
+ std::transform(subdir.begin(), subdir.end(), subdir.begin(), ::tolower);
+ path->AppendFolder(subdir);
+#endif
+ if (!CreateFolder(*path, 0700)) {
+ return false;
+ }
+#if !defined(__native_client__)
+ // If the folder already exists, it may have the wrong mode or be owned by
+ // someone else, both of which are security problems. Setting the mode
+ // avoids both issues since it will fail if the path is not owned by us.
+ if (0 != ::chmod(path->pathname().c_str(), 0700)) {
+ LOG_ERR(LS_ERROR) << "Can't set mode on " << path;
+ return false;
+ }
+#endif
+ return true;
+}
+
+bool UnixFilesystem::GetAppTempFolder(Pathname* path) {
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ ASSERT(provided_app_temp_folder_ != NULL);
+ path->SetPathname(provided_app_temp_folder_);
+ return true;
+#else
+ ASSERT(!application_name_.empty());
+ // TODO: Consider whether we are worried about thread safety.
+ if (app_temp_path_ != NULL && strlen(app_temp_path_) > 0) {
+ path->SetPathname(app_temp_path_);
+ return true;
+ }
+
+ // Create a random directory as /tmp/<appname>-<pid>-<timestamp>
+ char buffer[128];
+ sprintfn(buffer, ARRAY_SIZE(buffer), "-%d-%d",
+ static_cast<int>(getpid()),
+ static_cast<int>(time(0)));
+ std::string folder(application_name_);
+ folder.append(buffer);
+ if (!GetTemporaryFolder(*path, true, &folder))
+ return false;
+
+ delete [] app_temp_path_;
+ app_temp_path_ = CopyString(path->pathname());
+ // TODO: atexit(DeleteFolderAndContents(app_temp_path_));
+ return true;
+#endif
+}
+
+bool UnixFilesystem::GetDiskFreeSpace(const Pathname& path, int64 *freebytes) {
+#ifdef __native_client__
+ return false;
+#else // __native_client__
+ ASSERT(NULL != freebytes);
+ // TODO: Consider making relative paths absolute using cwd.
+ // TODO: When popping off a symlink, push back on the components of the
+ // symlink, so we don't jump out of the target disk inadvertently.
+ Pathname existing_path(path.folder(), "");
+ while (!existing_path.folder().empty() && IsAbsent(existing_path)) {
+ existing_path.SetFolder(existing_path.parent_folder());
+ }
+#if defined(WEBRTC_ANDROID)
+ struct statfs vfs;
+ memset(&vfs, 0, sizeof(vfs));
+ if (0 != statfs(existing_path.pathname().c_str(), &vfs))
+ return false;
+#else
+ struct statvfs vfs;
+ memset(&vfs, 0, sizeof(vfs));
+ if (0 != statvfs(existing_path.pathname().c_str(), &vfs))
+ return false;
+#endif // WEBRTC_ANDROID
+#if defined(WEBRTC_LINUX)
+ *freebytes = static_cast<int64>(vfs.f_bsize) * vfs.f_bavail;
+#elif defined(WEBRTC_MAC)
+ *freebytes = static_cast<int64>(vfs.f_frsize) * vfs.f_bavail;
+#endif
+
+ return true;
+#endif // !__native_client__
+}
+
+Pathname UnixFilesystem::GetCurrentDirectory() {
+ Pathname cwd;
+ char buffer[PATH_MAX];
+ char *path = getcwd(buffer, PATH_MAX);
+
+ if (!path) {
+ LOG_ERR(LS_ERROR) << "getcwd() failed";
+ return cwd; // returns empty pathname
+ }
+ cwd.SetFolder(std::string(path));
+
+ return cwd;
+}
+
+char* UnixFilesystem::CopyString(const std::string& str) {
+ size_t size = str.length() + 1;
+
+ char* buf = new char[size];
+ if (!buf) {
+ return NULL;
+ }
+
+ strcpyn(buf, size, str.c_str());
+ return buf;
+}
+
+} // namespace rtc
+
+#if defined(__native_client__)
+extern "C" int __attribute__((weak))
+link(const char* oldpath, const char* newpath) {
+ errno = EACCES;
+ return -1;
+}
+#endif
diff --git a/chromium/third_party/webrtc/base/unixfilesystem.h b/chromium/third_party/webrtc/base/unixfilesystem.h
new file mode 100644
index 00000000000..7b6c20edde2
--- /dev/null
+++ b/chromium/third_party/webrtc/base/unixfilesystem.h
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_UNIXFILESYSTEM_H_
+#define WEBRTC_BASE_UNIXFILESYSTEM_H_
+
+#include <sys/types.h>
+
+#include "webrtc/base/fileutils.h"
+
+namespace rtc {
+
+class UnixFilesystem : public FilesystemInterface {
+ public:
+ UnixFilesystem();
+ virtual ~UnixFilesystem();
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ // Android does not have a native code API to fetch the app data or temp
+ // folders. That needs to be passed into this class from Java. Similarly, iOS
+ // only supports an Objective-C API for fetching the folder locations, so that
+ // needs to be passed in here from Objective-C. Or at least that used to be
+ // the case; now the ctor will do the work if necessary and possible.
+ // TODO(fischman): add an Android version that uses JNI and drop the
+ // SetApp*Folder() APIs once external users stop using them.
+ static void SetAppDataFolder(const std::string& folder);
+ static void SetAppTempFolder(const std::string& folder);
+#endif
+
+ // Opens a file. Returns an open StreamInterface if function succeeds.
+ // Otherwise, returns NULL.
+ virtual FileStream *OpenFile(const Pathname &filename,
+ const std::string &mode);
+
+ // Atomically creates an empty file accessible only to the current user if one
+ // does not already exist at the given path, otherwise fails.
+ virtual bool CreatePrivateFile(const Pathname &filename);
+
+ // This will attempt to delete the file located at filename.
+ // It will fail with VERIY if you pass it a non-existant file, or a directory.
+ virtual bool DeleteFile(const Pathname &filename);
+
+ // This will attempt to delete the folder located at 'folder'
+ // It ASSERTs and returns false if you pass it a non-existant folder or a
+ // plain file.
+ virtual bool DeleteEmptyFolder(const Pathname &folder);
+
+ // Creates a directory. This will call itself recursively to create /foo/bar
+ // even if /foo does not exist. All created directories are created with the
+ // given mode.
+ // Returns TRUE if function succeeds
+ virtual bool CreateFolder(const Pathname &pathname, mode_t mode);
+
+ // As above, with mode = 0755.
+ virtual bool CreateFolder(const Pathname &pathname);
+
+ // This moves a file from old_path to new_path, where "file" can be a plain
+ // file or directory, which will be moved recursively.
+ // Returns true if function succeeds.
+ virtual bool MoveFile(const Pathname &old_path, const Pathname &new_path);
+ virtual bool MoveFolder(const Pathname &old_path, const Pathname &new_path);
+
+ // This copies a file from old_path to _new_path where "file" can be a plain
+ // file or directory, which will be copied recursively.
+ // Returns true if function succeeds
+ virtual bool CopyFile(const Pathname &old_path, const Pathname &new_path);
+
+ // Returns true if a pathname is a directory
+ virtual bool IsFolder(const Pathname& pathname);
+
+ // Returns true if pathname represents a temporary location on the system.
+ virtual bool IsTemporaryPath(const Pathname& pathname);
+
+ // Returns true of pathname represents an existing file
+ virtual bool IsFile(const Pathname& pathname);
+
+ // Returns true if pathname refers to no filesystem object, every parent
+ // directory either exists, or is also absent.
+ virtual bool IsAbsent(const Pathname& pathname);
+
+ virtual std::string TempFilename(const Pathname &dir,
+ const std::string &prefix);
+
+ // A folder appropriate for storing temporary files (Contents are
+ // automatically deleted when the program exists)
+ virtual bool GetTemporaryFolder(Pathname &path, bool create,
+ const std::string *append);
+
+ virtual bool GetFileSize(const Pathname& path, size_t* size);
+ virtual bool GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time);
+
+ // Returns the path to the running application.
+ virtual bool GetAppPathname(Pathname* path);
+
+ virtual bool GetAppDataFolder(Pathname* path, bool per_user);
+
+ // Get a temporary folder that is unique to the current user and application.
+ virtual bool GetAppTempFolder(Pathname* path);
+
+ virtual bool GetDiskFreeSpace(const Pathname& path, int64 *freebytes);
+
+ // Returns the absolute path of the current directory.
+ virtual Pathname GetCurrentDirectory();
+
+ private:
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
+ static char* provided_app_data_folder_;
+ static char* provided_app_temp_folder_;
+#else
+ static char* app_temp_path_;
+#endif
+
+ static char* CopyString(const std::string& str);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_UNIXFILESYSTEM_H_
diff --git a/chromium/third_party/webrtc/base/urlencode.cc b/chromium/third_party/webrtc/base/urlencode.cc
new file mode 100644
index 00000000000..b152829ae83
--- /dev/null
+++ b/chromium/third_party/webrtc/base/urlencode.cc
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/urlencode.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/stringutils.h"
+
+static int HexPairValue(const char * code) {
+ int value = 0;
+ const char * pch = code;
+ for (;;) {
+ int digit = *pch++;
+ if (digit >= '0' && digit <= '9') {
+ value += digit - '0';
+ }
+ else if (digit >= 'A' && digit <= 'F') {
+ value += digit - 'A' + 10;
+ }
+ else if (digit >= 'a' && digit <= 'f') {
+ value += digit - 'a' + 10;
+ }
+ else {
+ return -1;
+ }
+ if (pch == code + 2)
+ return value;
+ value <<= 4;
+ }
+}
+
+static int InternalUrlDecode(const char *source, char *dest,
+ bool encode_space_as_plus) {
+ char * start = dest;
+
+ while (*source) {
+ switch (*source) {
+ case '+':
+ if (encode_space_as_plus) {
+ *(dest++) = ' ';
+ } else {
+ *dest++ = *source;
+ }
+ break;
+ case '%':
+ if (source[1] && source[2]) {
+ int value = HexPairValue(source + 1);
+ if (value >= 0) {
+ *(dest++) = value;
+ source += 2;
+ }
+ else {
+ *dest++ = '?';
+ }
+ }
+ else {
+ *dest++ = '?';
+ }
+ break;
+ default:
+ *dest++ = *source;
+ }
+ source++;
+ }
+
+ *dest = 0;
+ return static_cast<int>(dest - start);
+}
+
+static bool IsValidUrlChar(char ch, bool unsafe_only) {
+ if (unsafe_only) {
+ return !(ch <= ' ' || strchr("\\\"^&`<>[]{}", ch));
+ } else {
+ return isalnum(ch) || strchr("-_.!~*'()", ch);
+ }
+}
+
+namespace rtc {
+
+int UrlDecode(const char *source, char *dest) {
+ return InternalUrlDecode(source, dest, true);
+}
+
+int UrlDecodeWithoutEncodingSpaceAsPlus(const char *source, char *dest) {
+ return InternalUrlDecode(source, dest, false);
+}
+
+int InternalUrlEncode(const char *source, char *dest, unsigned int max,
+ bool encode_space_as_plus, bool unsafe_only) {
+ static const char *digits = "0123456789ABCDEF";
+ if (max == 0) {
+ return 0;
+ }
+
+ char *start = dest;
+ while (static_cast<unsigned>(dest - start) < max && *source) {
+ unsigned char ch = static_cast<unsigned char>(*source);
+ if (*source == ' ' && encode_space_as_plus && !unsafe_only) {
+ *dest++ = '+';
+ } else if (IsValidUrlChar(ch, unsafe_only)) {
+ *dest++ = *source;
+ } else {
+ if (static_cast<unsigned>(dest - start) + 4 > max) {
+ break;
+ }
+ *dest++ = '%';
+ *dest++ = digits[(ch >> 4) & 0x0F];
+ *dest++ = digits[ ch & 0x0F];
+ }
+ source++;
+ }
+ ASSERT(static_cast<unsigned int>(dest - start) < max);
+ *dest = 0;
+
+ return static_cast<int>(dest - start);
+}
+
+int UrlEncode(const char *source, char *dest, unsigned max) {
+ return InternalUrlEncode(source, dest, max, true, false);
+}
+
+int UrlEncodeWithoutEncodingSpaceAsPlus(const char *source, char *dest,
+ unsigned max) {
+ return InternalUrlEncode(source, dest, max, false, false);
+}
+
+int UrlEncodeOnlyUnsafeChars(const char *source, char *dest, unsigned max) {
+ return InternalUrlEncode(source, dest, max, false, true);
+}
+
+std::string
+InternalUrlDecodeString(const std::string & encoded,
+ bool encode_space_as_plus) {
+ size_t needed_length = encoded.length() + 1;
+ char* buf = STACK_ARRAY(char, needed_length);
+ InternalUrlDecode(encoded.c_str(), buf, encode_space_as_plus);
+ return buf;
+}
+
+std::string
+UrlDecodeString(const std::string & encoded) {
+ return InternalUrlDecodeString(encoded, true);
+}
+
+std::string
+UrlDecodeStringWithoutEncodingSpaceAsPlus(const std::string & encoded) {
+ return InternalUrlDecodeString(encoded, false);
+}
+
+std::string
+InternalUrlEncodeString(const std::string & decoded,
+ bool encode_space_as_plus,
+ bool unsafe_only) {
+ int needed_length = static_cast<int>(decoded.length()) * 3 + 1;
+ char* buf = STACK_ARRAY(char, needed_length);
+ InternalUrlEncode(decoded.c_str(), buf, needed_length,
+ encode_space_as_plus, unsafe_only);
+ return buf;
+}
+
+std::string
+UrlEncodeString(const std::string & decoded) {
+ return InternalUrlEncodeString(decoded, true, false);
+}
+
+std::string
+UrlEncodeStringWithoutEncodingSpaceAsPlus(const std::string & decoded) {
+ return InternalUrlEncodeString(decoded, false, false);
+}
+
+std::string
+UrlEncodeStringForOnlyUnsafeChars(const std::string & decoded) {
+ return InternalUrlEncodeString(decoded, false, true);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/urlencode.h b/chromium/third_party/webrtc/base/urlencode.h
new file mode 100644
index 00000000000..fc10f3880c5
--- /dev/null
+++ b/chromium/third_party/webrtc/base/urlencode.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2008 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef _URLENCODE_H_
+#define _URLENCODE_H_
+
+#include <string>
+
+namespace rtc {
+
+// Decode all encoded characters. Also decode + as space.
+int UrlDecode(const char *source, char *dest);
+
+// Decode all encoded characters.
+int UrlDecodeWithoutEncodingSpaceAsPlus(const char *source, char *dest);
+
+// Encode all characters except alphas, numbers, and -_.!~*'()
+// Also encode space as +.
+int UrlEncode(const char *source, char *dest, unsigned max);
+
+// Encode all characters except alphas, numbers, and -_.!~*'()
+int UrlEncodeWithoutEncodingSpaceAsPlus(const char *source, char *dest,
+ unsigned max);
+
+// Encode only unsafe chars, including \ "^&`<>[]{}
+// Also encode space as %20, instead of +
+int UrlEncodeOnlyUnsafeChars(const char *source, char *dest, unsigned max);
+
+std::string UrlDecodeString(const std::string & encoded);
+std::string UrlDecodeStringWithoutEncodingSpaceAsPlus(
+ const std::string & encoded);
+std::string UrlEncodeString(const std::string & decoded);
+std::string UrlEncodeStringWithoutEncodingSpaceAsPlus(
+ const std::string & decoded);
+std::string UrlEncodeStringForOnlyUnsafeChars(const std::string & decoded);
+
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/urlencode_unittest.cc b/chromium/third_party/webrtc/base/urlencode_unittest.cc
new file mode 100644
index 00000000000..52169132e22
--- /dev/null
+++ b/chromium/third_party/webrtc/base/urlencode_unittest.cc
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/urlencode.h"
+
+using rtc::UrlEncode;
+
+TEST(Urlencode, SourceTooLong) {
+ char source[] = "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
+ "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^";
+ char dest[1];
+ ASSERT_EQ(0, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ('\0', dest[0]);
+
+ dest[0] = 'a';
+ ASSERT_EQ(0, UrlEncode(source, dest, 0));
+ ASSERT_EQ('a', dest[0]);
+}
+
+TEST(Urlencode, OneCharacterConversion) {
+ char source[] = "^";
+ char dest[4];
+ ASSERT_EQ(3, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_STREQ("%5E", dest);
+}
+
+TEST(Urlencode, ShortDestinationNoEncoding) {
+ // In this case we have a destination that would not be
+ // big enough to hold an encoding but is big enough to
+ // hold the text given.
+ char source[] = "aa";
+ char dest[3];
+ ASSERT_EQ(2, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_STREQ("aa", dest);
+}
+
+TEST(Urlencode, ShortDestinationEncoding) {
+ // In this case we have a destination that is not
+ // big enough to hold the encoding.
+ char source[] = "&";
+ char dest[3];
+ ASSERT_EQ(0, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_EQ('\0', dest[0]);
+}
+
+TEST(Urlencode, Encoding1) {
+ char source[] = "A^ ";
+ char dest[8];
+ ASSERT_EQ(5, UrlEncode(source, dest, ARRAY_SIZE(dest)));
+ ASSERT_STREQ("A%5E+", dest);
+}
+
+TEST(Urlencode, Encoding2) {
+ char source[] = "A^ ";
+ char dest[8];
+ ASSERT_EQ(7, rtc::UrlEncodeWithoutEncodingSpaceAsPlus(source, dest,
+ ARRAY_SIZE(dest)));
+ ASSERT_STREQ("A%5E%20", dest);
+}
+
+TEST(Urldecode, Decoding1) {
+ char source[] = "A%5E+";
+ char dest[8];
+ ASSERT_EQ(3, rtc::UrlDecode(source, dest));
+ ASSERT_STREQ("A^ ", dest);
+}
+
+TEST(Urldecode, Decoding2) {
+ char source[] = "A%5E+";
+ char dest[8];
+ ASSERT_EQ(3, rtc::UrlDecodeWithoutEncodingSpaceAsPlus(source, dest));
+ ASSERT_STREQ("A^+", dest);
+}
diff --git a/chromium/third_party/webrtc/base/versionparsing.cc b/chromium/third_party/webrtc/base/versionparsing.cc
new file mode 100644
index 00000000000..c3f982ff6ec
--- /dev/null
+++ b/chromium/third_party/webrtc/base/versionparsing.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/versionparsing.h"
+
+#include <stdlib.h>
+
+namespace rtc {
+
+bool ParseVersionString(const std::string& version_str,
+ int num_expected_segments,
+ int version[]) {
+ size_t pos = 0;
+ for (int i = 0;;) {
+ size_t dot_pos = version_str.find('.', pos);
+ size_t n;
+ if (dot_pos == std::string::npos) {
+ // npos here is a special value meaning "to the end of the string"
+ n = std::string::npos;
+ } else {
+ n = dot_pos - pos;
+ }
+
+ version[i] = atoi(version_str.substr(pos, n).c_str());
+
+ if (++i >= num_expected_segments) break;
+
+ if (dot_pos == std::string::npos) {
+ // Previous segment was not terminated by a dot, but there's supposed to
+ // be more segments, so that's an error.
+ return false;
+ }
+ pos = dot_pos + 1;
+ }
+ return true;
+}
+
+int CompareVersions(const int version1[],
+ const int version2[],
+ int num_segments) {
+ for (int i = 0; i < num_segments; ++i) {
+ int diff = version1[i] - version2[i];
+ if (diff != 0) {
+ return diff;
+ }
+ }
+ return 0;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/versionparsing.h b/chromium/third_party/webrtc/base/versionparsing.h
new file mode 100644
index 00000000000..be2d3323813
--- /dev/null
+++ b/chromium/third_party/webrtc/base/versionparsing.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_VERSIONPARSING_H_
+#define WEBRTC_BASE_VERSIONPARSING_H_
+
+#include <string>
+
+namespace rtc {
+
+// Parses a version string into an array. "num_expected_segments" must be the
+// number of numerical segments that the version is expected to have (e.g.,
+// "1.1.2.0" has 4). "version" must be an array of that length to hold the
+// parsed numbers.
+// Returns "true" iff successful.
+bool ParseVersionString(const std::string& version_str,
+ int num_expected_segments,
+ int version[]);
+
+// Computes the lexicographical order of two versions. The return value
+// indicates the order in the standard way (e.g., see strcmp()).
+int CompareVersions(const int version1[],
+ const int version2[],
+ int num_segments);
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_VERSIONPARSING_H_
diff --git a/chromium/third_party/webrtc/base/versionparsing_unittest.cc b/chromium/third_party/webrtc/base/versionparsing_unittest.cc
new file mode 100644
index 00000000000..51156991a63
--- /dev/null
+++ b/chromium/third_party/webrtc/base/versionparsing_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/versionparsing.h"
+
+#include "webrtc/base/gunit.h"
+
+namespace rtc {
+
+static const int kExampleSegments = 4;
+
+typedef int ExampleVersion[kExampleSegments];
+
+TEST(VersionParsing, TestGoodParse) {
+ ExampleVersion ver;
+ std::string str1("1.1.2.0");
+ static const ExampleVersion expect1 = {1, 1, 2, 0};
+ EXPECT_TRUE(ParseVersionString(str1, kExampleSegments, ver));
+ EXPECT_EQ(0, CompareVersions(ver, expect1, kExampleSegments));
+ std::string str2("2.0.0.1");
+ static const ExampleVersion expect2 = {2, 0, 0, 1};
+ EXPECT_TRUE(ParseVersionString(str2, kExampleSegments, ver));
+ EXPECT_EQ(0, CompareVersions(ver, expect2, kExampleSegments));
+}
+
+TEST(VersionParsing, TestBadParse) {
+ ExampleVersion ver;
+ std::string str1("1.1.2");
+ EXPECT_FALSE(ParseVersionString(str1, kExampleSegments, ver));
+ std::string str2("");
+ EXPECT_FALSE(ParseVersionString(str2, kExampleSegments, ver));
+ std::string str3("garbarge");
+ EXPECT_FALSE(ParseVersionString(str3, kExampleSegments, ver));
+}
+
+TEST(VersionParsing, TestCompare) {
+ static const ExampleVersion ver1 = {1, 0, 21, 0};
+ static const ExampleVersion ver2 = {1, 1, 2, 0};
+ static const ExampleVersion ver3 = {1, 1, 3, 0};
+ static const ExampleVersion ver4 = {1, 1, 3, 9861};
+
+ // Test that every combination of comparisons has the expected outcome.
+ EXPECT_EQ(0, CompareVersions(ver1, ver1, kExampleSegments));
+ EXPECT_EQ(0, CompareVersions(ver2, ver2, kExampleSegments));
+ EXPECT_EQ(0, CompareVersions(ver3, ver3, kExampleSegments));
+ EXPECT_EQ(0, CompareVersions(ver4, ver4, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver1, ver2, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver2, ver1, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver1, ver3, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver3, ver1, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver1, ver4, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver4, ver1, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver2, ver3, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver3, ver2, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver2, ver4, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver4, ver2, kExampleSegments));
+
+ EXPECT_GT(0, CompareVersions(ver3, ver4, kExampleSegments));
+ EXPECT_LT(0, CompareVersions(ver4, ver3, kExampleSegments));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/virtualsocket_unittest.cc b/chromium/third_party/webrtc/base/virtualsocket_unittest.cc
new file mode 100644
index 00000000000..253d2c5bedb
--- /dev/null
+++ b/chromium/third_party/webrtc/base/virtualsocket_unittest.cc
@@ -0,0 +1,1001 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <time.h>
+#if defined(WEBRTC_POSIX)
+#include <netinet/in.h>
+#endif
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/testclient.h"
+#include "webrtc/base/testutils.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/virtualsocketserver.h"
+
+using namespace rtc;
+
+// Sends at a constant rate but with random packet sizes.
+struct Sender : public MessageHandler {
+ Sender(Thread* th, AsyncSocket* s, uint32 rt)
+ : thread(th), socket(new AsyncUDPSocket(s)),
+ done(false), rate(rt), count(0) {
+ last_send = rtc::Time();
+ thread->PostDelayed(NextDelay(), this, 1);
+ }
+
+ uint32 NextDelay() {
+ uint32 size = (rand() % 4096) + 1;
+ return 1000 * size / rate;
+ }
+
+ void OnMessage(Message* pmsg) {
+ ASSERT_EQ(1u, pmsg->message_id);
+
+ if (done)
+ return;
+
+ uint32 cur_time = rtc::Time();
+ uint32 delay = cur_time - last_send;
+ uint32 size = rate * delay / 1000;
+ size = std::min<uint32>(size, 4096);
+ size = std::max<uint32>(size, sizeof(uint32));
+
+ count += size;
+ memcpy(dummy, &cur_time, sizeof(cur_time));
+ socket->Send(dummy, size, options);
+
+ last_send = cur_time;
+ thread->PostDelayed(NextDelay(), this, 1);
+ }
+
+ Thread* thread;
+ scoped_ptr<AsyncUDPSocket> socket;
+ rtc::PacketOptions options;
+ bool done;
+ uint32 rate; // bytes per second
+ uint32 count;
+ uint32 last_send;
+ char dummy[4096];
+};
+
+struct Receiver : public MessageHandler, public sigslot::has_slots<> {
+ Receiver(Thread* th, AsyncSocket* s, uint32 bw)
+ : thread(th), socket(new AsyncUDPSocket(s)), bandwidth(bw), done(false),
+ count(0), sec_count(0), sum(0), sum_sq(0), samples(0) {
+ socket->SignalReadPacket.connect(this, &Receiver::OnReadPacket);
+ thread->PostDelayed(1000, this, 1);
+ }
+
+ ~Receiver() {
+ thread->Clear(this);
+ }
+
+ void OnReadPacket(AsyncPacketSocket* s, const char* data, size_t size,
+ const SocketAddress& remote_addr,
+ const PacketTime& packet_time) {
+ ASSERT_EQ(socket.get(), s);
+ ASSERT_GE(size, 4U);
+
+ count += size;
+ sec_count += size;
+
+ uint32 send_time = *reinterpret_cast<const uint32*>(data);
+ uint32 recv_time = rtc::Time();
+ uint32 delay = recv_time - send_time;
+ sum += delay;
+ sum_sq += delay * delay;
+ samples += 1;
+ }
+
+ void OnMessage(Message* pmsg) {
+ ASSERT_EQ(1u, pmsg->message_id);
+
+ if (done)
+ return;
+
+ // It is always possible for us to receive more than expected because
+ // packets can be further delayed in delivery.
+ if (bandwidth > 0)
+ ASSERT_TRUE(sec_count <= 5 * bandwidth / 4);
+ sec_count = 0;
+ thread->PostDelayed(1000, this, 1);
+ }
+
+ Thread* thread;
+ scoped_ptr<AsyncUDPSocket> socket;
+ uint32 bandwidth;
+ bool done;
+ size_t count;
+ size_t sec_count;
+ double sum;
+ double sum_sq;
+ uint32 samples;
+};
+
+class VirtualSocketServerTest : public testing::Test {
+ public:
+ VirtualSocketServerTest() : ss_(new VirtualSocketServer(NULL)),
+ kIPv4AnyAddress(IPAddress(INADDR_ANY), 0),
+ kIPv6AnyAddress(IPAddress(in6addr_any), 0) {
+ }
+
+ void CheckAddressIncrementalization(const SocketAddress& post,
+ const SocketAddress& pre) {
+ EXPECT_EQ(post.port(), pre.port() + 1);
+ IPAddress post_ip = post.ipaddr();
+ IPAddress pre_ip = pre.ipaddr();
+ EXPECT_EQ(pre_ip.family(), post_ip.family());
+ if (post_ip.family() == AF_INET) {
+ in_addr pre_ipv4 = pre_ip.ipv4_address();
+ in_addr post_ipv4 = post_ip.ipv4_address();
+ int difference = ntohl(post_ipv4.s_addr) - ntohl(pre_ipv4.s_addr);
+ EXPECT_EQ(1, difference);
+ } else if (post_ip.family() == AF_INET6) {
+ in6_addr post_ip6 = post_ip.ipv6_address();
+ in6_addr pre_ip6 = pre_ip.ipv6_address();
+ uint32* post_as_ints = reinterpret_cast<uint32*>(&post_ip6.s6_addr);
+ uint32* pre_as_ints = reinterpret_cast<uint32*>(&pre_ip6.s6_addr);
+ EXPECT_EQ(post_as_ints[3], pre_as_ints[3] + 1);
+ }
+ }
+
+ void BasicTest(const SocketAddress& initial_addr) {
+ AsyncSocket* socket = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_DGRAM);
+ socket->Bind(initial_addr);
+ SocketAddress server_addr = socket->GetLocalAddress();
+ // Make sure VSS didn't switch families on us.
+ EXPECT_EQ(server_addr.family(), initial_addr.family());
+
+ TestClient* client1 = new TestClient(new AsyncUDPSocket(socket));
+ AsyncSocket* socket2 =
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_DGRAM);
+ TestClient* client2 = new TestClient(new AsyncUDPSocket(socket2));
+
+ SocketAddress client2_addr;
+ EXPECT_EQ(3, client2->SendTo("foo", 3, server_addr));
+ EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &client2_addr));
+
+ SocketAddress client1_addr;
+ EXPECT_EQ(6, client1->SendTo("bizbaz", 6, client2_addr));
+ EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &client1_addr));
+ EXPECT_EQ(client1_addr, server_addr);
+
+ SocketAddress empty = EmptySocketAddressWithFamily(initial_addr.family());
+ for (int i = 0; i < 10; i++) {
+ client2 = new TestClient(AsyncUDPSocket::Create(ss_, empty));
+
+ SocketAddress next_client2_addr;
+ EXPECT_EQ(3, client2->SendTo("foo", 3, server_addr));
+ EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &next_client2_addr));
+ CheckAddressIncrementalization(next_client2_addr, client2_addr);
+ // EXPECT_EQ(next_client2_addr.port(), client2_addr.port() + 1);
+
+ SocketAddress server_addr2;
+ EXPECT_EQ(6, client1->SendTo("bizbaz", 6, next_client2_addr));
+ EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &server_addr2));
+ EXPECT_EQ(server_addr2, server_addr);
+
+ client2_addr = next_client2_addr;
+ }
+ }
+
+ // initial_addr should be made from either INADDR_ANY or in6addr_any.
+ void ConnectTest(const SocketAddress& initial_addr) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+ const SocketAddress kEmptyAddr =
+ EmptySocketAddressWithFamily(initial_addr.family());
+
+ // Create client
+ AsyncSocket* client = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(client);
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_TRUE(client->GetLocalAddress().IsNil());
+
+ // Create server
+ AsyncSocket* server = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(server);
+ EXPECT_NE(0, server->Listen(5)); // Bind required
+ EXPECT_EQ(0, server->Bind(initial_addr));
+ EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family());
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(server->GetState(), AsyncSocket::CS_CONNECTING);
+
+ // No pending server connections
+ EXPECT_FALSE(sink.Check(server, testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_addr));
+ EXPECT_EQ(AF_UNSPEC, accept_addr.family());
+
+ // Attempt connect to listening socket
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+ EXPECT_NE(client->GetLocalAddress(), kEmptyAddr); // Implicit Bind
+ EXPECT_NE(AF_UNSPEC, client->GetLocalAddress().family()); // Implicit Bind
+ EXPECT_NE(client->GetLocalAddress(), server->GetLocalAddress());
+
+ // Client is connecting
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CONNECTING);
+ EXPECT_FALSE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client, testing::SSE_CLOSE));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Client still connecting
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CONNECTING);
+ EXPECT_FALSE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client, testing::SSE_CLOSE));
+
+ // Server has pending connection
+ EXPECT_TRUE(sink.Check(server, testing::SSE_READ));
+ Socket* accepted = server->Accept(&accept_addr);
+ EXPECT_TRUE(NULL != accepted);
+ EXPECT_NE(accept_addr, kEmptyAddr);
+ EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr);
+
+ EXPECT_EQ(accepted->GetState(), AsyncSocket::CS_CONNECTED);
+ EXPECT_EQ(accepted->GetLocalAddress(), server->GetLocalAddress());
+ EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress());
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Client has connected
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CONNECTED);
+ EXPECT_TRUE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_FALSE(sink.Check(client, testing::SSE_CLOSE));
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress());
+ }
+
+ void ConnectToNonListenerTest(const SocketAddress& initial_addr) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+ const SocketAddress nil_addr;
+ const SocketAddress empty_addr =
+ EmptySocketAddressWithFamily(initial_addr.family());
+
+ // Create client
+ AsyncSocket* client = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(client);
+
+ // Create server
+ AsyncSocket* server = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(server);
+ EXPECT_EQ(0, server->Bind(initial_addr));
+ EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family());
+ // Attempt connect to non-listening socket
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // No pending server connections
+ EXPECT_FALSE(sink.Check(server, testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_addr));
+ EXPECT_EQ(accept_addr, nil_addr);
+
+ // Connection failed
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_FALSE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_TRUE(sink.Check(client, testing::SSE_ERROR));
+ EXPECT_EQ(client->GetRemoteAddress(), nil_addr);
+ }
+
+ void CloseDuringConnectTest(const SocketAddress& initial_addr) {
+ testing::StreamSink sink;
+ SocketAddress accept_addr;
+ const SocketAddress empty_addr =
+ EmptySocketAddressWithFamily(initial_addr.family());
+
+ // Create client and server
+ scoped_ptr<AsyncSocket> client(ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM));
+ sink.Monitor(client.get());
+ scoped_ptr<AsyncSocket> server(ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM));
+ sink.Monitor(server.get());
+
+ // Initiate connect
+ EXPECT_EQ(0, server->Bind(initial_addr));
+ EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ // Server close before socket enters accept queue
+ EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ));
+ server->Close();
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Result: connection failed
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR));
+
+ server.reset(ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+
+ // Initiate connect
+ EXPECT_EQ(0, server->Bind(initial_addr));
+ EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Server close while socket is in accept queue
+ EXPECT_TRUE(sink.Check(server.get(), testing::SSE_READ));
+ server->Close();
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Result: connection failed
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR));
+
+ // New server
+ server.reset(ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM));
+ sink.Monitor(server.get());
+
+ // Initiate connect
+ EXPECT_EQ(0, server->Bind(initial_addr));
+ EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, server->Listen(5));
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Server accepts connection
+ EXPECT_TRUE(sink.Check(server.get(), testing::SSE_READ));
+ scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ ASSERT_TRUE(NULL != accepted.get());
+ sink.Monitor(accepted.get());
+
+ // Client closes before connection complets
+ EXPECT_EQ(accepted->GetState(), AsyncSocket::CS_CONNECTED);
+
+ // Connected message has not been processed yet.
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CONNECTING);
+ client->Close();
+
+ ss_->ProcessMessagesUntilIdle();
+
+ // Result: accepted socket closes
+ EXPECT_EQ(accepted->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_TRUE(sink.Check(accepted.get(), testing::SSE_CLOSE));
+ EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE));
+ }
+
+ void CloseTest(const SocketAddress& initial_addr) {
+ testing::StreamSink sink;
+ const SocketAddress kEmptyAddr;
+
+ // Create clients
+ AsyncSocket* a = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM);
+ sink.Monitor(a);
+ a->Bind(initial_addr);
+ EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family());
+
+
+ scoped_ptr<AsyncSocket> b(ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM));
+ sink.Monitor(b.get());
+ b->Bind(initial_addr);
+ EXPECT_EQ(b->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, a->Connect(b->GetLocalAddress()));
+ EXPECT_EQ(0, b->Connect(a->GetLocalAddress()));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ EXPECT_TRUE(sink.Check(a, testing::SSE_OPEN));
+ EXPECT_EQ(a->GetState(), AsyncSocket::CS_CONNECTED);
+ EXPECT_EQ(a->GetRemoteAddress(), b->GetLocalAddress());
+
+ EXPECT_TRUE(sink.Check(b.get(), testing::SSE_OPEN));
+ EXPECT_EQ(b->GetState(), AsyncSocket::CS_CONNECTED);
+ EXPECT_EQ(b->GetRemoteAddress(), a->GetLocalAddress());
+
+ EXPECT_EQ(1, a->Send("a", 1));
+ b->Close();
+ EXPECT_EQ(1, a->Send("b", 1));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ char buffer[10];
+ EXPECT_FALSE(sink.Check(b.get(), testing::SSE_READ));
+ EXPECT_EQ(-1, b->Recv(buffer, 10));
+
+ EXPECT_TRUE(sink.Check(a, testing::SSE_CLOSE));
+ EXPECT_EQ(a->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_EQ(a->GetRemoteAddress(), kEmptyAddr);
+
+ // No signal for Closer
+ EXPECT_FALSE(sink.Check(b.get(), testing::SSE_CLOSE));
+ EXPECT_EQ(b->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_EQ(b->GetRemoteAddress(), kEmptyAddr);
+ }
+
+ void TcpSendTest(const SocketAddress& initial_addr) {
+ testing::StreamSink sink;
+ const SocketAddress kEmptyAddr;
+
+ // Connect two sockets
+ AsyncSocket* a = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM);
+ sink.Monitor(a);
+ a->Bind(initial_addr);
+ EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family());
+
+ AsyncSocket* b = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM);
+ sink.Monitor(b);
+ b->Bind(initial_addr);
+ EXPECT_EQ(b->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, a->Connect(b->GetLocalAddress()));
+ EXPECT_EQ(0, b->Connect(a->GetLocalAddress()));
+
+ ss_->ProcessMessagesUntilIdle();
+
+ const size_t kBufferSize = 2000;
+ ss_->set_send_buffer_capacity(kBufferSize);
+ ss_->set_recv_buffer_capacity(kBufferSize);
+
+ const size_t kDataSize = 5000;
+ char send_buffer[kDataSize], recv_buffer[kDataSize];
+ for (size_t i = 0; i < kDataSize; ++i)
+ send_buffer[i] = static_cast<char>(i % 256);
+ memset(recv_buffer, 0, sizeof(recv_buffer));
+ size_t send_pos = 0, recv_pos = 0;
+
+ // Can't send more than send buffer in one write
+ int result = a->Send(send_buffer + send_pos, kDataSize - send_pos);
+ EXPECT_EQ(static_cast<int>(kBufferSize), result);
+ send_pos += result;
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_FALSE(sink.Check(a, testing::SSE_WRITE));
+ EXPECT_TRUE(sink.Check(b, testing::SSE_READ));
+
+ // Receive buffer is already filled, fill send buffer again
+ result = a->Send(send_buffer + send_pos, kDataSize - send_pos);
+ EXPECT_EQ(static_cast<int>(kBufferSize), result);
+ send_pos += result;
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_FALSE(sink.Check(a, testing::SSE_WRITE));
+ EXPECT_FALSE(sink.Check(b, testing::SSE_READ));
+
+ // No more room in send or receive buffer
+ result = a->Send(send_buffer + send_pos, kDataSize - send_pos);
+ EXPECT_EQ(-1, result);
+ EXPECT_TRUE(a->IsBlocking());
+
+ // Read a subset of the data
+ result = b->Recv(recv_buffer + recv_pos, 500);
+ EXPECT_EQ(500, result);
+ recv_pos += result;
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_TRUE(sink.Check(a, testing::SSE_WRITE));
+ EXPECT_TRUE(sink.Check(b, testing::SSE_READ));
+
+ // Room for more on the sending side
+ result = a->Send(send_buffer + send_pos, kDataSize - send_pos);
+ EXPECT_EQ(500, result);
+ send_pos += result;
+
+ // Empty the recv buffer
+ while (true) {
+ result = b->Recv(recv_buffer + recv_pos, kDataSize - recv_pos);
+ if (result < 0) {
+ EXPECT_EQ(-1, result);
+ EXPECT_TRUE(b->IsBlocking());
+ break;
+ }
+ recv_pos += result;
+ }
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_TRUE(sink.Check(b, testing::SSE_READ));
+
+ // Continue to empty the recv buffer
+ while (true) {
+ result = b->Recv(recv_buffer + recv_pos, kDataSize - recv_pos);
+ if (result < 0) {
+ EXPECT_EQ(-1, result);
+ EXPECT_TRUE(b->IsBlocking());
+ break;
+ }
+ recv_pos += result;
+ }
+
+ // Send last of the data
+ result = a->Send(send_buffer + send_pos, kDataSize - send_pos);
+ EXPECT_EQ(500, result);
+ send_pos += result;
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_TRUE(sink.Check(b, testing::SSE_READ));
+
+ // Receive the last of the data
+ while (true) {
+ result = b->Recv(recv_buffer + recv_pos, kDataSize - recv_pos);
+ if (result < 0) {
+ EXPECT_EQ(-1, result);
+ EXPECT_TRUE(b->IsBlocking());
+ break;
+ }
+ recv_pos += result;
+ }
+
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_FALSE(sink.Check(b, testing::SSE_READ));
+
+ // The received data matches the sent data
+ EXPECT_EQ(kDataSize, send_pos);
+ EXPECT_EQ(kDataSize, recv_pos);
+ EXPECT_EQ(0, memcmp(recv_buffer, send_buffer, kDataSize));
+ }
+
+ void TcpSendsPacketsInOrderTest(const SocketAddress& initial_addr) {
+ const SocketAddress kEmptyAddr;
+
+ // Connect two sockets
+ AsyncSocket* a = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ AsyncSocket* b = ss_->CreateAsyncSocket(initial_addr.family(),
+ SOCK_STREAM);
+ a->Bind(initial_addr);
+ EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family());
+
+ b->Bind(initial_addr);
+ EXPECT_EQ(b->GetLocalAddress().family(), initial_addr.family());
+
+ EXPECT_EQ(0, a->Connect(b->GetLocalAddress()));
+ EXPECT_EQ(0, b->Connect(a->GetLocalAddress()));
+ ss_->ProcessMessagesUntilIdle();
+
+ // First, deliver all packets in 0 ms.
+ char buffer[2] = { 0, 0 };
+ const char cNumPackets = 10;
+ for (char i = 0; i < cNumPackets; ++i) {
+ buffer[0] = '0' + i;
+ EXPECT_EQ(1, a->Send(buffer, 1));
+ }
+
+ ss_->ProcessMessagesUntilIdle();
+
+ for (char i = 0; i < cNumPackets; ++i) {
+ EXPECT_EQ(1, b->Recv(buffer, sizeof(buffer)));
+ EXPECT_EQ(static_cast<char>('0' + i), buffer[0]);
+ }
+
+ // Next, deliver packets at random intervals
+ const uint32 mean = 50;
+ const uint32 stddev = 50;
+
+ ss_->set_delay_mean(mean);
+ ss_->set_delay_stddev(stddev);
+ ss_->UpdateDelayDistribution();
+
+ for (char i = 0; i < cNumPackets; ++i) {
+ buffer[0] = 'A' + i;
+ EXPECT_EQ(1, a->Send(buffer, 1));
+ }
+
+ ss_->ProcessMessagesUntilIdle();
+
+ for (char i = 0; i < cNumPackets; ++i) {
+ EXPECT_EQ(1, b->Recv(buffer, sizeof(buffer)));
+ EXPECT_EQ(static_cast<char>('A' + i), buffer[0]);
+ }
+ }
+
+ void BandwidthTest(const SocketAddress& initial_addr) {
+ AsyncSocket* send_socket =
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_DGRAM);
+ AsyncSocket* recv_socket =
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_DGRAM);
+ ASSERT_EQ(0, send_socket->Bind(initial_addr));
+ ASSERT_EQ(0, recv_socket->Bind(initial_addr));
+ EXPECT_EQ(send_socket->GetLocalAddress().family(), initial_addr.family());
+ EXPECT_EQ(recv_socket->GetLocalAddress().family(), initial_addr.family());
+ ASSERT_EQ(0, send_socket->Connect(recv_socket->GetLocalAddress()));
+
+ uint32 bandwidth = 64 * 1024;
+ ss_->set_bandwidth(bandwidth);
+
+ Thread* pthMain = Thread::Current();
+ Sender sender(pthMain, send_socket, 80 * 1024);
+ Receiver receiver(pthMain, recv_socket, bandwidth);
+
+ pthMain->ProcessMessages(5000);
+ sender.done = true;
+ pthMain->ProcessMessages(5000);
+
+ ASSERT_TRUE(receiver.count >= 5 * 3 * bandwidth / 4);
+ ASSERT_TRUE(receiver.count <= 6 * bandwidth); // queue could drain for 1s
+
+ ss_->set_bandwidth(0);
+ }
+
+ void DelayTest(const SocketAddress& initial_addr) {
+ time_t seed = ::time(NULL);
+ LOG(LS_VERBOSE) << "seed = " << seed;
+ srand(static_cast<unsigned int>(seed));
+
+ const uint32 mean = 2000;
+ const uint32 stddev = 500;
+
+ ss_->set_delay_mean(mean);
+ ss_->set_delay_stddev(stddev);
+ ss_->UpdateDelayDistribution();
+
+ AsyncSocket* send_socket =
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_DGRAM);
+ AsyncSocket* recv_socket =
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_DGRAM);
+ ASSERT_EQ(0, send_socket->Bind(initial_addr));
+ ASSERT_EQ(0, recv_socket->Bind(initial_addr));
+ EXPECT_EQ(send_socket->GetLocalAddress().family(), initial_addr.family());
+ EXPECT_EQ(recv_socket->GetLocalAddress().family(), initial_addr.family());
+ ASSERT_EQ(0, send_socket->Connect(recv_socket->GetLocalAddress()));
+
+ Thread* pthMain = Thread::Current();
+ // Avg packet size is 2K, so at 200KB/s for 10s, we should see about
+ // 1000 packets, which is necessary to get a good distribution.
+ Sender sender(pthMain, send_socket, 100 * 2 * 1024);
+ Receiver receiver(pthMain, recv_socket, 0);
+
+ pthMain->ProcessMessages(10000);
+ sender.done = receiver.done = true;
+ ss_->ProcessMessagesUntilIdle();
+
+ const double sample_mean = receiver.sum / receiver.samples;
+ double num =
+ receiver.samples * receiver.sum_sq - receiver.sum * receiver.sum;
+ double den = receiver.samples * (receiver.samples - 1);
+ const double sample_stddev = sqrt(num / den);
+ LOG(LS_VERBOSE) << "mean=" << sample_mean << " stddev=" << sample_stddev;
+
+ EXPECT_LE(500u, receiver.samples);
+ // We initially used a 0.1 fudge factor, but on the build machine, we
+ // have seen the value differ by as much as 0.13.
+ EXPECT_NEAR(mean, sample_mean, 0.15 * mean);
+ EXPECT_NEAR(stddev, sample_stddev, 0.15 * stddev);
+
+ ss_->set_delay_mean(0);
+ ss_->set_delay_stddev(0);
+ ss_->UpdateDelayDistribution();
+ }
+
+ // Test cross-family communication between a client bound to client_addr and a
+ // server bound to server_addr. shouldSucceed indicates if communication is
+ // expected to work or not.
+ void CrossFamilyConnectionTest(const SocketAddress& client_addr,
+ const SocketAddress& server_addr,
+ bool shouldSucceed) {
+ testing::StreamSink sink;
+ SocketAddress accept_address;
+ const SocketAddress kEmptyAddr;
+
+ // Client gets a IPv4 address
+ AsyncSocket* client = ss_->CreateAsyncSocket(client_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(client);
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_EQ(client->GetLocalAddress(), kEmptyAddr);
+ client->Bind(client_addr);
+
+ // Server gets a non-mapped non-any IPv6 address.
+ // IPv4 sockets should not be able to connect to this.
+ AsyncSocket* server = ss_->CreateAsyncSocket(server_addr.family(),
+ SOCK_STREAM);
+ sink.Monitor(server);
+ server->Bind(server_addr);
+ server->Listen(5);
+
+ if (shouldSucceed) {
+ EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_TRUE(sink.Check(server, testing::SSE_READ));
+ Socket* accepted = server->Accept(&accept_address);
+ EXPECT_TRUE(NULL != accepted);
+ EXPECT_NE(kEmptyAddr, accept_address);
+ ss_->ProcessMessagesUntilIdle();
+ EXPECT_TRUE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress());
+ } else {
+ // Check that the connection failed.
+ EXPECT_EQ(-1, client->Connect(server->GetLocalAddress()));
+ ss_->ProcessMessagesUntilIdle();
+
+ EXPECT_FALSE(sink.Check(server, testing::SSE_READ));
+ EXPECT_TRUE(NULL == server->Accept(&accept_address));
+ EXPECT_EQ(accept_address, kEmptyAddr);
+ EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED);
+ EXPECT_FALSE(sink.Check(client, testing::SSE_OPEN));
+ EXPECT_EQ(client->GetRemoteAddress(), kEmptyAddr);
+ }
+ }
+
+ // Test cross-family datagram sending between a client bound to client_addr
+ // and a server bound to server_addr. shouldSucceed indicates if sending is
+ // expected to succed or not.
+ void CrossFamilyDatagramTest(const SocketAddress& client_addr,
+ const SocketAddress& server_addr,
+ bool shouldSucceed) {
+ AsyncSocket* socket = ss_->CreateAsyncSocket(SOCK_DGRAM);
+ socket->Bind(server_addr);
+ SocketAddress bound_server_addr = socket->GetLocalAddress();
+ TestClient* client1 = new TestClient(new AsyncUDPSocket(socket));
+
+ AsyncSocket* socket2 = ss_->CreateAsyncSocket(SOCK_DGRAM);
+ socket2->Bind(client_addr);
+ TestClient* client2 = new TestClient(new AsyncUDPSocket(socket2));
+ SocketAddress client2_addr;
+
+ if (shouldSucceed) {
+ EXPECT_EQ(3, client2->SendTo("foo", 3, bound_server_addr));
+ EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &client2_addr));
+ SocketAddress client1_addr;
+ EXPECT_EQ(6, client1->SendTo("bizbaz", 6, client2_addr));
+ EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &client1_addr));
+ EXPECT_EQ(client1_addr, bound_server_addr);
+ } else {
+ EXPECT_EQ(-1, client2->SendTo("foo", 3, bound_server_addr));
+ EXPECT_FALSE(client1->CheckNextPacket("foo", 3, 0));
+ }
+ }
+
+ protected:
+ virtual void SetUp() {
+ Thread::Current()->set_socketserver(ss_);
+ }
+ virtual void TearDown() {
+ Thread::Current()->set_socketserver(NULL);
+ }
+
+ VirtualSocketServer* ss_;
+ const SocketAddress kIPv4AnyAddress;
+ const SocketAddress kIPv6AnyAddress;
+};
+
+TEST_F(VirtualSocketServerTest, basic_v4) {
+ SocketAddress ipv4_test_addr(IPAddress(INADDR_ANY), 5000);
+ BasicTest(ipv4_test_addr);
+}
+
+TEST_F(VirtualSocketServerTest, basic_v6) {
+ SocketAddress ipv6_test_addr(IPAddress(in6addr_any), 5000);
+ BasicTest(ipv6_test_addr);
+}
+
+TEST_F(VirtualSocketServerTest, connect_v4) {
+ ConnectTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, connect_v6) {
+ ConnectTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, connect_to_non_listener_v4) {
+ ConnectToNonListenerTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, connect_to_non_listener_v6) {
+ ConnectToNonListenerTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, close_during_connect_v4) {
+ CloseDuringConnectTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, close_during_connect_v6) {
+ CloseDuringConnectTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, close_v4) {
+ CloseTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, close_v6) {
+ CloseTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, tcp_send_v4) {
+ TcpSendTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, tcp_send_v6) {
+ TcpSendTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, TcpSendsPacketsInOrder_v4) {
+ TcpSendsPacketsInOrderTest(kIPv4AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, TcpSendsPacketsInOrder_v6) {
+ TcpSendsPacketsInOrderTest(kIPv6AnyAddress);
+}
+
+TEST_F(VirtualSocketServerTest, bandwidth_v4) {
+ SocketAddress ipv4_test_addr(IPAddress(INADDR_ANY), 1000);
+ BandwidthTest(ipv4_test_addr);
+}
+
+TEST_F(VirtualSocketServerTest, bandwidth_v6) {
+ SocketAddress ipv6_test_addr(IPAddress(in6addr_any), 1000);
+ BandwidthTest(ipv6_test_addr);
+}
+
+TEST_F(VirtualSocketServerTest, delay_v4) {
+ SocketAddress ipv4_test_addr(IPAddress(INADDR_ANY), 1000);
+ DelayTest(ipv4_test_addr);
+}
+
+// See: https://code.google.com/p/webrtc/issues/detail?id=2409
+TEST_F(VirtualSocketServerTest, DISABLED_delay_v6) {
+ SocketAddress ipv6_test_addr(IPAddress(in6addr_any), 1000);
+ DelayTest(ipv6_test_addr);
+}
+
+// Works, receiving socket sees 127.0.0.2.
+TEST_F(VirtualSocketServerTest, CanConnectFromMappedIPv6ToIPv4Any) {
+ CrossFamilyConnectionTest(SocketAddress("::ffff:127.0.0.2", 0),
+ SocketAddress("0.0.0.0", 5000),
+ true);
+}
+
+// Fails.
+TEST_F(VirtualSocketServerTest, CantConnectFromUnMappedIPv6ToIPv4Any) {
+ CrossFamilyConnectionTest(SocketAddress("::2", 0),
+ SocketAddress("0.0.0.0", 5000),
+ false);
+}
+
+// Fails.
+TEST_F(VirtualSocketServerTest, CantConnectFromUnMappedIPv6ToMappedIPv6) {
+ CrossFamilyConnectionTest(SocketAddress("::2", 0),
+ SocketAddress("::ffff:127.0.0.1", 5000),
+ false);
+}
+
+// Works. receiving socket sees ::ffff:127.0.0.2.
+TEST_F(VirtualSocketServerTest, CanConnectFromIPv4ToIPv6Any) {
+ CrossFamilyConnectionTest(SocketAddress("127.0.0.2", 0),
+ SocketAddress("::", 5000),
+ true);
+}
+
+// Fails.
+TEST_F(VirtualSocketServerTest, CantConnectFromIPv4ToUnMappedIPv6) {
+ CrossFamilyConnectionTest(SocketAddress("127.0.0.2", 0),
+ SocketAddress("::1", 5000),
+ false);
+}
+
+// Works. Receiving socket sees ::ffff:127.0.0.1.
+TEST_F(VirtualSocketServerTest, CanConnectFromIPv4ToMappedIPv6) {
+ CrossFamilyConnectionTest(SocketAddress("127.0.0.1", 0),
+ SocketAddress("::ffff:127.0.0.2", 5000),
+ true);
+}
+
+// Works, receiving socket sees a result from GetNextIP.
+TEST_F(VirtualSocketServerTest, CanConnectFromUnboundIPv6ToIPv4Any) {
+ CrossFamilyConnectionTest(SocketAddress("::", 0),
+ SocketAddress("0.0.0.0", 5000),
+ true);
+}
+
+// Works, receiving socket sees whatever GetNextIP gave the client.
+TEST_F(VirtualSocketServerTest, CanConnectFromUnboundIPv4ToIPv6Any) {
+ CrossFamilyConnectionTest(SocketAddress("0.0.0.0", 0),
+ SocketAddress("::", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CanSendDatagramFromUnboundIPv4ToIPv6Any) {
+ CrossFamilyDatagramTest(SocketAddress("0.0.0.0", 0),
+ SocketAddress("::", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CanSendDatagramFromMappedIPv6ToIPv4Any) {
+ CrossFamilyDatagramTest(SocketAddress("::ffff:127.0.0.1", 0),
+ SocketAddress("0.0.0.0", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CantSendDatagramFromUnMappedIPv6ToIPv4Any) {
+ CrossFamilyDatagramTest(SocketAddress("::2", 0),
+ SocketAddress("0.0.0.0", 5000),
+ false);
+}
+
+TEST_F(VirtualSocketServerTest, CantSendDatagramFromUnMappedIPv6ToMappedIPv6) {
+ CrossFamilyDatagramTest(SocketAddress("::2", 0),
+ SocketAddress("::ffff:127.0.0.1", 5000),
+ false);
+}
+
+TEST_F(VirtualSocketServerTest, CanSendDatagramFromIPv4ToIPv6Any) {
+ CrossFamilyDatagramTest(SocketAddress("127.0.0.2", 0),
+ SocketAddress("::", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CantSendDatagramFromIPv4ToUnMappedIPv6) {
+ CrossFamilyDatagramTest(SocketAddress("127.0.0.2", 0),
+ SocketAddress("::1", 5000),
+ false);
+}
+
+TEST_F(VirtualSocketServerTest, CanSendDatagramFromIPv4ToMappedIPv6) {
+ CrossFamilyDatagramTest(SocketAddress("127.0.0.1", 0),
+ SocketAddress("::ffff:127.0.0.2", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CanSendDatagramFromUnboundIPv6ToIPv4Any) {
+ CrossFamilyDatagramTest(SocketAddress("::", 0),
+ SocketAddress("0.0.0.0", 5000),
+ true);
+}
+
+TEST_F(VirtualSocketServerTest, CreatesStandardDistribution) {
+ const uint32 kTestMean[] = { 10, 100, 333, 1000 };
+ const double kTestDev[] = { 0.25, 0.1, 0.01 };
+ // TODO: The current code only works for 1000 data points or more.
+ const uint32 kTestSamples[] = { /*10, 100,*/ 1000 };
+ for (size_t midx = 0; midx < ARRAY_SIZE(kTestMean); ++midx) {
+ for (size_t didx = 0; didx < ARRAY_SIZE(kTestDev); ++didx) {
+ for (size_t sidx = 0; sidx < ARRAY_SIZE(kTestSamples); ++sidx) {
+ ASSERT_LT(0u, kTestSamples[sidx]);
+ const uint32 kStdDev =
+ static_cast<uint32>(kTestDev[didx] * kTestMean[midx]);
+ VirtualSocketServer::Function* f =
+ VirtualSocketServer::CreateDistribution(kTestMean[midx],
+ kStdDev,
+ kTestSamples[sidx]);
+ ASSERT_TRUE(NULL != f);
+ ASSERT_EQ(kTestSamples[sidx], f->size());
+ double sum = 0;
+ for (uint32 i = 0; i < f->size(); ++i) {
+ sum += (*f)[i].second;
+ }
+ const double mean = sum / f->size();
+ double sum_sq_dev = 0;
+ for (uint32 i = 0; i < f->size(); ++i) {
+ double dev = (*f)[i].second - mean;
+ sum_sq_dev += dev * dev;
+ }
+ const double stddev = sqrt(sum_sq_dev / f->size());
+ EXPECT_NEAR(kTestMean[midx], mean, 0.1 * kTestMean[midx])
+ << "M=" << kTestMean[midx]
+ << " SD=" << kStdDev
+ << " N=" << kTestSamples[sidx];
+ EXPECT_NEAR(kStdDev, stddev, 0.1 * kStdDev)
+ << "M=" << kTestMean[midx]
+ << " SD=" << kStdDev
+ << " N=" << kTestSamples[sidx];
+ delete f;
+ }
+ }
+ }
+}
diff --git a/chromium/third_party/webrtc/base/virtualsocketserver.cc b/chromium/third_party/webrtc/base/virtualsocketserver.cc
new file mode 100644
index 00000000000..f8e8ddeb811
--- /dev/null
+++ b/chromium/third_party/webrtc/base/virtualsocketserver.cc
@@ -0,0 +1,1101 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/virtualsocketserver.h"
+
+#include <errno.h>
+#include <math.h>
+
+#include <algorithm>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/socketaddresspair.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+#if defined(WEBRTC_WIN)
+const in_addr kInitialNextIPv4 = { {0x01, 0, 0, 0} };
+#else
+// This value is entirely arbitrary, hence the lack of concern about endianness.
+const in_addr kInitialNextIPv4 = { 0x01000000 };
+#endif
+// Starts at ::2 so as to not cause confusion with ::1.
+const in6_addr kInitialNextIPv6 = { { {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2
+ } } };
+
+const uint16 kFirstEphemeralPort = 49152;
+const uint16 kLastEphemeralPort = 65535;
+const uint16 kEphemeralPortCount = kLastEphemeralPort - kFirstEphemeralPort + 1;
+const uint32 kDefaultNetworkCapacity = 64 * 1024;
+const uint32 kDefaultTcpBufferSize = 32 * 1024;
+
+const uint32 UDP_HEADER_SIZE = 28; // IP + UDP headers
+const uint32 TCP_HEADER_SIZE = 40; // IP + TCP headers
+const uint32 TCP_MSS = 1400; // Maximum segment size
+
+// Note: The current algorithm doesn't work for sample sizes smaller than this.
+const int NUM_SAMPLES = 1000;
+
+enum {
+ MSG_ID_PACKET,
+ MSG_ID_CONNECT,
+ MSG_ID_DISCONNECT,
+};
+
+// Packets are passed between sockets as messages. We copy the data just like
+// the kernel does.
+class Packet : public MessageData {
+ public:
+ Packet(const char* data, size_t size, const SocketAddress& from)
+ : size_(size), consumed_(0), from_(from) {
+ ASSERT(NULL != data);
+ data_ = new char[size_];
+ memcpy(data_, data, size_);
+ }
+
+ virtual ~Packet() {
+ delete[] data_;
+ }
+
+ const char* data() const { return data_ + consumed_; }
+ size_t size() const { return size_ - consumed_; }
+ const SocketAddress& from() const { return from_; }
+
+ // Remove the first size bytes from the data.
+ void Consume(size_t size) {
+ ASSERT(size + consumed_ < size_);
+ consumed_ += size;
+ }
+
+ private:
+ char* data_;
+ size_t size_, consumed_;
+ SocketAddress from_;
+};
+
+struct MessageAddress : public MessageData {
+ explicit MessageAddress(const SocketAddress& a) : addr(a) { }
+ SocketAddress addr;
+};
+
+// Implements the socket interface using the virtual network. Packets are
+// passed as messages using the message queue of the socket server.
+class VirtualSocket : public AsyncSocket, public MessageHandler {
+ public:
+ VirtualSocket(VirtualSocketServer* server, int family, int type, bool async)
+ : server_(server), family_(family), type_(type), async_(async),
+ state_(CS_CLOSED), error_(0), listen_queue_(NULL),
+ write_enabled_(false),
+ network_size_(0), recv_buffer_size_(0), bound_(false), was_any_(false) {
+ ASSERT((type_ == SOCK_DGRAM) || (type_ == SOCK_STREAM));
+ ASSERT(async_ || (type_ != SOCK_STREAM)); // We only support async streams
+ }
+
+ virtual ~VirtualSocket() {
+ Close();
+
+ for (RecvBuffer::iterator it = recv_buffer_.begin();
+ it != recv_buffer_.end(); ++it) {
+ delete *it;
+ }
+ }
+
+ virtual SocketAddress GetLocalAddress() const {
+ return local_addr_;
+ }
+
+ virtual SocketAddress GetRemoteAddress() const {
+ return remote_addr_;
+ }
+
+ // Used by server sockets to set the local address without binding.
+ void SetLocalAddress(const SocketAddress& addr) {
+ local_addr_ = addr;
+ }
+
+ virtual int Bind(const SocketAddress& addr) {
+ if (!local_addr_.IsNil()) {
+ error_ = EINVAL;
+ return -1;
+ }
+ local_addr_ = addr;
+ int result = server_->Bind(this, &local_addr_);
+ if (result != 0) {
+ local_addr_.Clear();
+ error_ = EADDRINUSE;
+ } else {
+ bound_ = true;
+ was_any_ = addr.IsAnyIP();
+ }
+ return result;
+ }
+
+ virtual int Connect(const SocketAddress& addr) {
+ return InitiateConnect(addr, true);
+ }
+
+ virtual int Close() {
+ if (!local_addr_.IsNil() && bound_) {
+ // Remove from the binding table.
+ server_->Unbind(local_addr_, this);
+ bound_ = false;
+ }
+
+ if (SOCK_STREAM == type_) {
+ // Cancel pending sockets
+ if (listen_queue_) {
+ while (!listen_queue_->empty()) {
+ SocketAddress addr = listen_queue_->front();
+
+ // Disconnect listening socket.
+ server_->Disconnect(server_->LookupBinding(addr));
+ listen_queue_->pop_front();
+ }
+ delete listen_queue_;
+ listen_queue_ = NULL;
+ }
+ // Disconnect stream sockets
+ if (CS_CONNECTED == state_) {
+ // Disconnect remote socket, check if it is a child of a server socket.
+ VirtualSocket* socket =
+ server_->LookupConnection(local_addr_, remote_addr_);
+ if (!socket) {
+ // Not a server socket child, then see if it is bound.
+ // TODO: If this is indeed a server socket that has no
+ // children this will cause the server socket to be
+ // closed. This might lead to unexpected results, how to fix this?
+ socket = server_->LookupBinding(remote_addr_);
+ }
+ server_->Disconnect(socket);
+
+ // Remove mapping for both directions.
+ server_->RemoveConnection(remote_addr_, local_addr_);
+ server_->RemoveConnection(local_addr_, remote_addr_);
+ }
+ // Cancel potential connects
+ MessageList msgs;
+ if (server_->msg_queue_) {
+ server_->msg_queue_->Clear(this, MSG_ID_CONNECT, &msgs);
+ }
+ for (MessageList::iterator it = msgs.begin(); it != msgs.end(); ++it) {
+ ASSERT(NULL != it->pdata);
+ MessageAddress* data = static_cast<MessageAddress*>(it->pdata);
+
+ // Lookup remote side.
+ VirtualSocket* socket = server_->LookupConnection(local_addr_,
+ data->addr);
+ if (socket) {
+ // Server socket, remote side is a socket retreived by
+ // accept. Accepted sockets are not bound so we will not
+ // find it by looking in the bindings table.
+ server_->Disconnect(socket);
+ server_->RemoveConnection(local_addr_, data->addr);
+ } else {
+ server_->Disconnect(server_->LookupBinding(data->addr));
+ }
+ delete data;
+ }
+ // Clear incoming packets and disconnect messages
+ if (server_->msg_queue_) {
+ server_->msg_queue_->Clear(this);
+ }
+ }
+
+ state_ = CS_CLOSED;
+ local_addr_.Clear();
+ remote_addr_.Clear();
+ return 0;
+ }
+
+ virtual int Send(const void *pv, size_t cb) {
+ if (CS_CONNECTED != state_) {
+ error_ = ENOTCONN;
+ return -1;
+ }
+ if (SOCK_DGRAM == type_) {
+ return SendUdp(pv, cb, remote_addr_);
+ } else {
+ return SendTcp(pv, cb);
+ }
+ }
+
+ virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr) {
+ if (SOCK_DGRAM == type_) {
+ return SendUdp(pv, cb, addr);
+ } else {
+ if (CS_CONNECTED != state_) {
+ error_ = ENOTCONN;
+ return -1;
+ }
+ return SendTcp(pv, cb);
+ }
+ }
+
+ virtual int Recv(void *pv, size_t cb) {
+ SocketAddress addr;
+ return RecvFrom(pv, cb, &addr);
+ }
+
+ virtual int RecvFrom(void *pv, size_t cb, SocketAddress *paddr) {
+ // If we don't have a packet, then either error or wait for one to arrive.
+ if (recv_buffer_.empty()) {
+ if (async_) {
+ error_ = EAGAIN;
+ return -1;
+ }
+ while (recv_buffer_.empty()) {
+ Message msg;
+ server_->msg_queue_->Get(&msg);
+ server_->msg_queue_->Dispatch(&msg);
+ }
+ }
+
+ // Return the packet at the front of the queue.
+ Packet* packet = recv_buffer_.front();
+ size_t data_read = _min(cb, packet->size());
+ memcpy(pv, packet->data(), data_read);
+ *paddr = packet->from();
+
+ if (data_read < packet->size()) {
+ packet->Consume(data_read);
+ } else {
+ recv_buffer_.pop_front();
+ delete packet;
+ }
+
+ if (SOCK_STREAM == type_) {
+ bool was_full = (recv_buffer_size_ == server_->recv_buffer_capacity_);
+ recv_buffer_size_ -= data_read;
+ if (was_full) {
+ VirtualSocket* sender = server_->LookupBinding(remote_addr_);
+ ASSERT(NULL != sender);
+ server_->SendTcp(sender);
+ }
+ }
+
+ return static_cast<int>(data_read);
+ }
+
+ virtual int Listen(int backlog) {
+ ASSERT(SOCK_STREAM == type_);
+ ASSERT(CS_CLOSED == state_);
+ if (local_addr_.IsNil()) {
+ error_ = EINVAL;
+ return -1;
+ }
+ ASSERT(NULL == listen_queue_);
+ listen_queue_ = new ListenQueue;
+ state_ = CS_CONNECTING;
+ return 0;
+ }
+
+ virtual VirtualSocket* Accept(SocketAddress *paddr) {
+ if (NULL == listen_queue_) {
+ error_ = EINVAL;
+ return NULL;
+ }
+ while (!listen_queue_->empty()) {
+ VirtualSocket* socket = new VirtualSocket(server_, AF_INET, type_,
+ async_);
+
+ // Set the new local address to the same as this server socket.
+ socket->SetLocalAddress(local_addr_);
+ // Sockets made from a socket that 'was Any' need to inherit that.
+ socket->set_was_any(was_any_);
+ SocketAddress remote_addr(listen_queue_->front());
+ int result = socket->InitiateConnect(remote_addr, false);
+ listen_queue_->pop_front();
+ if (result != 0) {
+ delete socket;
+ continue;
+ }
+ socket->CompleteConnect(remote_addr, false);
+ if (paddr) {
+ *paddr = remote_addr;
+ }
+ return socket;
+ }
+ error_ = EWOULDBLOCK;
+ return NULL;
+ }
+
+ virtual int GetError() const {
+ return error_;
+ }
+
+ virtual void SetError(int error) {
+ error_ = error;
+ }
+
+ virtual ConnState GetState() const {
+ return state_;
+ }
+
+ virtual int GetOption(Option opt, int* value) {
+ OptionsMap::const_iterator it = options_map_.find(opt);
+ if (it == options_map_.end()) {
+ return -1;
+ }
+ *value = it->second;
+ return 0; // 0 is success to emulate getsockopt()
+ }
+
+ virtual int SetOption(Option opt, int value) {
+ options_map_[opt] = value;
+ return 0; // 0 is success to emulate setsockopt()
+ }
+
+ virtual int EstimateMTU(uint16* mtu) {
+ if (CS_CONNECTED != state_)
+ return ENOTCONN;
+ else
+ return 65536;
+ }
+
+ void OnMessage(Message *pmsg) {
+ if (pmsg->message_id == MSG_ID_PACKET) {
+ //ASSERT(!local_addr_.IsAny());
+ ASSERT(NULL != pmsg->pdata);
+ Packet* packet = static_cast<Packet*>(pmsg->pdata);
+
+ recv_buffer_.push_back(packet);
+
+ if (async_) {
+ SignalReadEvent(this);
+ }
+ } else if (pmsg->message_id == MSG_ID_CONNECT) {
+ ASSERT(NULL != pmsg->pdata);
+ MessageAddress* data = static_cast<MessageAddress*>(pmsg->pdata);
+ if (listen_queue_ != NULL) {
+ listen_queue_->push_back(data->addr);
+ if (async_) {
+ SignalReadEvent(this);
+ }
+ } else if ((SOCK_STREAM == type_) && (CS_CONNECTING == state_)) {
+ CompleteConnect(data->addr, true);
+ } else {
+ LOG(LS_VERBOSE) << "Socket at " << local_addr_ << " is not listening";
+ server_->Disconnect(server_->LookupBinding(data->addr));
+ }
+ delete data;
+ } else if (pmsg->message_id == MSG_ID_DISCONNECT) {
+ ASSERT(SOCK_STREAM == type_);
+ if (CS_CLOSED != state_) {
+ int error = (CS_CONNECTING == state_) ? ECONNREFUSED : 0;
+ state_ = CS_CLOSED;
+ remote_addr_.Clear();
+ if (async_) {
+ SignalCloseEvent(this, error);
+ }
+ }
+ } else {
+ ASSERT(false);
+ }
+ }
+
+ bool was_any() { return was_any_; }
+ void set_was_any(bool was_any) { was_any_ = was_any; }
+
+ private:
+ struct NetworkEntry {
+ size_t size;
+ uint32 done_time;
+ };
+
+ typedef std::deque<SocketAddress> ListenQueue;
+ typedef std::deque<NetworkEntry> NetworkQueue;
+ typedef std::vector<char> SendBuffer;
+ typedef std::list<Packet*> RecvBuffer;
+ typedef std::map<Option, int> OptionsMap;
+
+ int InitiateConnect(const SocketAddress& addr, bool use_delay) {
+ if (!remote_addr_.IsNil()) {
+ error_ = (CS_CONNECTED == state_) ? EISCONN : EINPROGRESS;
+ return -1;
+ }
+ if (local_addr_.IsNil()) {
+ // If there's no local address set, grab a random one in the correct AF.
+ int result = 0;
+ if (addr.ipaddr().family() == AF_INET) {
+ result = Bind(SocketAddress("0.0.0.0", 0));
+ } else if (addr.ipaddr().family() == AF_INET6) {
+ result = Bind(SocketAddress("::", 0));
+ }
+ if (result != 0) {
+ return result;
+ }
+ }
+ if (type_ == SOCK_DGRAM) {
+ remote_addr_ = addr;
+ state_ = CS_CONNECTED;
+ } else {
+ int result = server_->Connect(this, addr, use_delay);
+ if (result != 0) {
+ error_ = EHOSTUNREACH;
+ return -1;
+ }
+ state_ = CS_CONNECTING;
+ }
+ return 0;
+ }
+
+ void CompleteConnect(const SocketAddress& addr, bool notify) {
+ ASSERT(CS_CONNECTING == state_);
+ remote_addr_ = addr;
+ state_ = CS_CONNECTED;
+ server_->AddConnection(remote_addr_, local_addr_, this);
+ if (async_ && notify) {
+ SignalConnectEvent(this);
+ }
+ }
+
+ int SendUdp(const void* pv, size_t cb, const SocketAddress& addr) {
+ // If we have not been assigned a local port, then get one.
+ if (local_addr_.IsNil()) {
+ local_addr_ = EmptySocketAddressWithFamily(addr.ipaddr().family());
+ int result = server_->Bind(this, &local_addr_);
+ if (result != 0) {
+ local_addr_.Clear();
+ error_ = EADDRINUSE;
+ return result;
+ }
+ }
+
+ // Send the data in a message to the appropriate socket.
+ return server_->SendUdp(this, static_cast<const char*>(pv), cb, addr);
+ }
+
+ int SendTcp(const void* pv, size_t cb) {
+ size_t capacity = server_->send_buffer_capacity_ - send_buffer_.size();
+ if (0 == capacity) {
+ write_enabled_ = true;
+ error_ = EWOULDBLOCK;
+ return -1;
+ }
+ size_t consumed = _min(cb, capacity);
+ const char* cpv = static_cast<const char*>(pv);
+ send_buffer_.insert(send_buffer_.end(), cpv, cpv + consumed);
+ server_->SendTcp(this);
+ return static_cast<int>(consumed);
+ }
+
+ VirtualSocketServer* server_;
+ int family_;
+ int type_;
+ bool async_;
+ ConnState state_;
+ int error_;
+ SocketAddress local_addr_;
+ SocketAddress remote_addr_;
+
+ // Pending sockets which can be Accepted
+ ListenQueue* listen_queue_;
+
+ // Data which tcp has buffered for sending
+ SendBuffer send_buffer_;
+ bool write_enabled_;
+
+ // Critical section to protect the recv_buffer and queue_
+ CriticalSection crit_;
+
+ // Network model that enforces bandwidth and capacity constraints
+ NetworkQueue network_;
+ size_t network_size_;
+
+ // Data which has been received from the network
+ RecvBuffer recv_buffer_;
+ // The amount of data which is in flight or in recv_buffer_
+ size_t recv_buffer_size_;
+
+ // Is this socket bound?
+ bool bound_;
+
+ // When we bind a socket to Any, VSS's Bind gives it another address. For
+ // dual-stack sockets, we want to distinguish between sockets that were
+ // explicitly given a particular address and sockets that had one picked
+ // for them by VSS.
+ bool was_any_;
+
+ // Store the options that are set
+ OptionsMap options_map_;
+
+ friend class VirtualSocketServer;
+};
+
+VirtualSocketServer::VirtualSocketServer(SocketServer* ss)
+ : server_(ss), server_owned_(false), msg_queue_(NULL), stop_on_idle_(false),
+ network_delay_(Time()), next_ipv4_(kInitialNextIPv4),
+ next_ipv6_(kInitialNextIPv6), next_port_(kFirstEphemeralPort),
+ bindings_(new AddressMap()), connections_(new ConnectionMap()),
+ bandwidth_(0), network_capacity_(kDefaultNetworkCapacity),
+ send_buffer_capacity_(kDefaultTcpBufferSize),
+ recv_buffer_capacity_(kDefaultTcpBufferSize),
+ delay_mean_(0), delay_stddev_(0), delay_samples_(NUM_SAMPLES),
+ delay_dist_(NULL), drop_prob_(0.0) {
+ if (!server_) {
+ server_ = new PhysicalSocketServer();
+ server_owned_ = true;
+ }
+ UpdateDelayDistribution();
+}
+
+VirtualSocketServer::~VirtualSocketServer() {
+ delete bindings_;
+ delete connections_;
+ delete delay_dist_;
+ if (server_owned_) {
+ delete server_;
+ }
+}
+
+IPAddress VirtualSocketServer::GetNextIP(int family) {
+ if (family == AF_INET) {
+ IPAddress next_ip(next_ipv4_);
+ next_ipv4_.s_addr =
+ HostToNetwork32(NetworkToHost32(next_ipv4_.s_addr) + 1);
+ return next_ip;
+ } else if (family == AF_INET6) {
+ IPAddress next_ip(next_ipv6_);
+ uint32* as_ints = reinterpret_cast<uint32*>(&next_ipv6_.s6_addr);
+ as_ints[3] += 1;
+ return next_ip;
+ }
+ return IPAddress();
+}
+
+uint16 VirtualSocketServer::GetNextPort() {
+ uint16 port = next_port_;
+ if (next_port_ < kLastEphemeralPort) {
+ ++next_port_;
+ } else {
+ next_port_ = kFirstEphemeralPort;
+ }
+ return port;
+}
+
+Socket* VirtualSocketServer::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* VirtualSocketServer::CreateSocket(int family, int type) {
+ return CreateSocketInternal(family, type);
+}
+
+AsyncSocket* VirtualSocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* VirtualSocketServer::CreateAsyncSocket(int family, int type) {
+ return CreateSocketInternal(family, type);
+}
+
+VirtualSocket* VirtualSocketServer::CreateSocketInternal(int family, int type) {
+ return new VirtualSocket(this, family, type, true);
+}
+
+void VirtualSocketServer::SetMessageQueue(MessageQueue* msg_queue) {
+ msg_queue_ = msg_queue;
+ if (msg_queue_) {
+ msg_queue_->SignalQueueDestroyed.connect(this,
+ &VirtualSocketServer::OnMessageQueueDestroyed);
+ }
+}
+
+bool VirtualSocketServer::Wait(int cmsWait, bool process_io) {
+ ASSERT(msg_queue_ == Thread::Current());
+ if (stop_on_idle_ && Thread::Current()->empty()) {
+ return false;
+ }
+ return socketserver()->Wait(cmsWait, process_io);
+}
+
+void VirtualSocketServer::WakeUp() {
+ socketserver()->WakeUp();
+}
+
+bool VirtualSocketServer::ProcessMessagesUntilIdle() {
+ ASSERT(msg_queue_ == Thread::Current());
+ stop_on_idle_ = true;
+ while (!msg_queue_->empty()) {
+ Message msg;
+ if (msg_queue_->Get(&msg, kForever)) {
+ msg_queue_->Dispatch(&msg);
+ }
+ }
+ stop_on_idle_ = false;
+ return !msg_queue_->IsQuitting();
+}
+
+int VirtualSocketServer::Bind(VirtualSocket* socket,
+ const SocketAddress& addr) {
+ ASSERT(NULL != socket);
+ // Address must be completely specified at this point
+ ASSERT(!IPIsUnspec(addr.ipaddr()));
+ ASSERT(addr.port() != 0);
+
+ // Normalize the address (turns v6-mapped addresses into v4-addresses).
+ SocketAddress normalized(addr.ipaddr().Normalized(), addr.port());
+
+ AddressMap::value_type entry(normalized, socket);
+ return bindings_->insert(entry).second ? 0 : -1;
+}
+
+int VirtualSocketServer::Bind(VirtualSocket* socket, SocketAddress* addr) {
+ ASSERT(NULL != socket);
+
+ if (IPIsAny(addr->ipaddr())) {
+ addr->SetIP(GetNextIP(addr->ipaddr().family()));
+ } else if (!IPIsUnspec(addr->ipaddr())) {
+ addr->SetIP(addr->ipaddr().Normalized());
+ } else {
+ ASSERT(false);
+ }
+
+ if (addr->port() == 0) {
+ for (int i = 0; i < kEphemeralPortCount; ++i) {
+ addr->SetPort(GetNextPort());
+ if (bindings_->find(*addr) == bindings_->end()) {
+ break;
+ }
+ }
+ }
+
+ return Bind(socket, *addr);
+}
+
+VirtualSocket* VirtualSocketServer::LookupBinding(const SocketAddress& addr) {
+ SocketAddress normalized(addr.ipaddr().Normalized(),
+ addr.port());
+ AddressMap::iterator it = bindings_->find(normalized);
+ return (bindings_->end() != it) ? it->second : NULL;
+}
+
+int VirtualSocketServer::Unbind(const SocketAddress& addr,
+ VirtualSocket* socket) {
+ SocketAddress normalized(addr.ipaddr().Normalized(),
+ addr.port());
+ ASSERT((*bindings_)[normalized] == socket);
+ bindings_->erase(bindings_->find(normalized));
+ return 0;
+}
+
+void VirtualSocketServer::AddConnection(const SocketAddress& local,
+ const SocketAddress& remote,
+ VirtualSocket* remote_socket) {
+ // Add this socket pair to our routing table. This will allow
+ // multiple clients to connect to the same server address.
+ SocketAddress local_normalized(local.ipaddr().Normalized(),
+ local.port());
+ SocketAddress remote_normalized(remote.ipaddr().Normalized(),
+ remote.port());
+ SocketAddressPair address_pair(local_normalized, remote_normalized);
+ connections_->insert(std::pair<SocketAddressPair,
+ VirtualSocket*>(address_pair, remote_socket));
+}
+
+VirtualSocket* VirtualSocketServer::LookupConnection(
+ const SocketAddress& local,
+ const SocketAddress& remote) {
+ SocketAddress local_normalized(local.ipaddr().Normalized(),
+ local.port());
+ SocketAddress remote_normalized(remote.ipaddr().Normalized(),
+ remote.port());
+ SocketAddressPair address_pair(local_normalized, remote_normalized);
+ ConnectionMap::iterator it = connections_->find(address_pair);
+ return (connections_->end() != it) ? it->second : NULL;
+}
+
+void VirtualSocketServer::RemoveConnection(const SocketAddress& local,
+ const SocketAddress& remote) {
+ SocketAddress local_normalized(local.ipaddr().Normalized(),
+ local.port());
+ SocketAddress remote_normalized(remote.ipaddr().Normalized(),
+ remote.port());
+ SocketAddressPair address_pair(local_normalized, remote_normalized);
+ connections_->erase(address_pair);
+}
+
+static double Random() {
+ return static_cast<double>(rand()) / RAND_MAX;
+}
+
+int VirtualSocketServer::Connect(VirtualSocket* socket,
+ const SocketAddress& remote_addr,
+ bool use_delay) {
+ uint32 delay = use_delay ? GetRandomTransitDelay() : 0;
+ VirtualSocket* remote = LookupBinding(remote_addr);
+ if (!CanInteractWith(socket, remote)) {
+ LOG(LS_INFO) << "Address family mismatch between "
+ << socket->GetLocalAddress() << " and " << remote_addr;
+ return -1;
+ }
+ if (remote != NULL) {
+ SocketAddress addr = socket->GetLocalAddress();
+ msg_queue_->PostDelayed(delay, remote, MSG_ID_CONNECT,
+ new MessageAddress(addr));
+ } else {
+ LOG(LS_INFO) << "No one listening at " << remote_addr;
+ msg_queue_->PostDelayed(delay, socket, MSG_ID_DISCONNECT);
+ }
+ return 0;
+}
+
+bool VirtualSocketServer::Disconnect(VirtualSocket* socket) {
+ if (socket) {
+ // Remove the mapping.
+ msg_queue_->Post(socket, MSG_ID_DISCONNECT);
+ return true;
+ }
+ return false;
+}
+
+int VirtualSocketServer::SendUdp(VirtualSocket* socket,
+ const char* data, size_t data_size,
+ const SocketAddress& remote_addr) {
+ // See if we want to drop this packet.
+ if (Random() < drop_prob_) {
+ LOG(LS_VERBOSE) << "Dropping packet: bad luck";
+ return static_cast<int>(data_size);
+ }
+
+ VirtualSocket* recipient = LookupBinding(remote_addr);
+ if (!recipient) {
+ // Make a fake recipient for address family checking.
+ scoped_ptr<VirtualSocket> dummy_socket(
+ CreateSocketInternal(AF_INET, SOCK_DGRAM));
+ dummy_socket->SetLocalAddress(remote_addr);
+ if (!CanInteractWith(socket, dummy_socket.get())) {
+ LOG(LS_VERBOSE) << "Incompatible address families: "
+ << socket->GetLocalAddress() << " and " << remote_addr;
+ return -1;
+ }
+ LOG(LS_VERBOSE) << "No one listening at " << remote_addr;
+ return static_cast<int>(data_size);
+ }
+
+ if (!CanInteractWith(socket, recipient)) {
+ LOG(LS_VERBOSE) << "Incompatible address families: "
+ << socket->GetLocalAddress() << " and " << remote_addr;
+ return -1;
+ }
+
+ CritScope cs(&socket->crit_);
+
+ uint32 cur_time = Time();
+ PurgeNetworkPackets(socket, cur_time);
+
+ // Determine whether we have enough bandwidth to accept this packet. To do
+ // this, we need to update the send queue. Once we know it's current size,
+ // we know whether we can fit this packet.
+ //
+ // NOTE: There are better algorithms for maintaining such a queue (such as
+ // "Derivative Random Drop"); however, this algorithm is a more accurate
+ // simulation of what a normal network would do.
+
+ size_t packet_size = data_size + UDP_HEADER_SIZE;
+ if (socket->network_size_ + packet_size > network_capacity_) {
+ LOG(LS_VERBOSE) << "Dropping packet: network capacity exceeded";
+ return static_cast<int>(data_size);
+ }
+
+ AddPacketToNetwork(socket, recipient, cur_time, data, data_size,
+ UDP_HEADER_SIZE, false);
+
+ return static_cast<int>(data_size);
+}
+
+void VirtualSocketServer::SendTcp(VirtualSocket* socket) {
+ // TCP can't send more data than will fill up the receiver's buffer.
+ // We track the data that is in the buffer plus data in flight using the
+ // recipient's recv_buffer_size_. Anything beyond that must be stored in the
+ // sender's buffer. We will trigger the buffered data to be sent when data
+ // is read from the recv_buffer.
+
+ // Lookup the local/remote pair in the connections table.
+ VirtualSocket* recipient = LookupConnection(socket->local_addr_,
+ socket->remote_addr_);
+ if (!recipient) {
+ LOG(LS_VERBOSE) << "Sending data to no one.";
+ return;
+ }
+
+ CritScope cs(&socket->crit_);
+
+ uint32 cur_time = Time();
+ PurgeNetworkPackets(socket, cur_time);
+
+ while (true) {
+ size_t available = recv_buffer_capacity_ - recipient->recv_buffer_size_;
+ size_t max_data_size = _min<size_t>(available, TCP_MSS - TCP_HEADER_SIZE);
+ size_t data_size = _min(socket->send_buffer_.size(), max_data_size);
+ if (0 == data_size)
+ break;
+
+ AddPacketToNetwork(socket, recipient, cur_time, &socket->send_buffer_[0],
+ data_size, TCP_HEADER_SIZE, true);
+ recipient->recv_buffer_size_ += data_size;
+
+ size_t new_buffer_size = socket->send_buffer_.size() - data_size;
+ // Avoid undefined access beyond the last element of the vector.
+ // This only happens when new_buffer_size is 0.
+ if (data_size < socket->send_buffer_.size()) {
+ // memmove is required for potentially overlapping source/destination.
+ memmove(&socket->send_buffer_[0], &socket->send_buffer_[data_size],
+ new_buffer_size);
+ }
+ socket->send_buffer_.resize(new_buffer_size);
+ }
+
+ if (socket->write_enabled_
+ && (socket->send_buffer_.size() < send_buffer_capacity_)) {
+ socket->write_enabled_ = false;
+ socket->SignalWriteEvent(socket);
+ }
+}
+
+void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender,
+ VirtualSocket* recipient,
+ uint32 cur_time,
+ const char* data,
+ size_t data_size,
+ size_t header_size,
+ bool ordered) {
+ VirtualSocket::NetworkEntry entry;
+ entry.size = data_size + header_size;
+
+ sender->network_size_ += entry.size;
+ uint32 send_delay = SendDelay(static_cast<uint32>(sender->network_size_));
+ entry.done_time = cur_time + send_delay;
+ sender->network_.push_back(entry);
+
+ // Find the delay for crossing the many virtual hops of the network.
+ uint32 transit_delay = GetRandomTransitDelay();
+
+ // Post the packet as a message to be delivered (on our own thread)
+ Packet* p = new Packet(data, data_size, sender->local_addr_);
+ uint32 ts = TimeAfter(send_delay + transit_delay);
+ if (ordered) {
+ // Ensure that new packets arrive after previous ones
+ // TODO: consider ordering on a per-socket basis, since this
+ // introduces artifical delay.
+ ts = TimeMax(ts, network_delay_);
+ }
+ msg_queue_->PostAt(ts, recipient, MSG_ID_PACKET, p);
+ network_delay_ = TimeMax(ts, network_delay_);
+}
+
+void VirtualSocketServer::PurgeNetworkPackets(VirtualSocket* socket,
+ uint32 cur_time) {
+ while (!socket->network_.empty() &&
+ (socket->network_.front().done_time <= cur_time)) {
+ ASSERT(socket->network_size_ >= socket->network_.front().size);
+ socket->network_size_ -= socket->network_.front().size;
+ socket->network_.pop_front();
+ }
+}
+
+uint32 VirtualSocketServer::SendDelay(uint32 size) {
+ if (bandwidth_ == 0)
+ return 0;
+ else
+ return 1000 * size / bandwidth_;
+}
+
+#if 0
+void PrintFunction(std::vector<std::pair<double, double> >* f) {
+ return;
+ double sum = 0;
+ for (uint32 i = 0; i < f->size(); ++i) {
+ std::cout << (*f)[i].first << '\t' << (*f)[i].second << std::endl;
+ sum += (*f)[i].second;
+ }
+ if (!f->empty()) {
+ const double mean = sum / f->size();
+ double sum_sq_dev = 0;
+ for (uint32 i = 0; i < f->size(); ++i) {
+ double dev = (*f)[i].second - mean;
+ sum_sq_dev += dev * dev;
+ }
+ std::cout << "Mean = " << mean << " StdDev = "
+ << sqrt(sum_sq_dev / f->size()) << std::endl;
+ }
+}
+#endif // <unused>
+
+void VirtualSocketServer::UpdateDelayDistribution() {
+ Function* dist = CreateDistribution(delay_mean_, delay_stddev_,
+ delay_samples_);
+ // We take a lock just to make sure we don't leak memory.
+ {
+ CritScope cs(&delay_crit_);
+ delete delay_dist_;
+ delay_dist_ = dist;
+ }
+}
+
+static double PI = 4 * atan(1.0);
+
+static double Normal(double x, double mean, double stddev) {
+ double a = (x - mean) * (x - mean) / (2 * stddev * stddev);
+ return exp(-a) / (stddev * sqrt(2 * PI));
+}
+
+#if 0 // static unused gives a warning
+static double Pareto(double x, double min, double k) {
+ if (x < min)
+ return 0;
+ else
+ return k * std::pow(min, k) / std::pow(x, k+1);
+}
+#endif
+
+VirtualSocketServer::Function* VirtualSocketServer::CreateDistribution(
+ uint32 mean, uint32 stddev, uint32 samples) {
+ Function* f = new Function();
+
+ if (0 == stddev) {
+ f->push_back(Point(mean, 1.0));
+ } else {
+ double start = 0;
+ if (mean >= 4 * static_cast<double>(stddev))
+ start = mean - 4 * static_cast<double>(stddev);
+ double end = mean + 4 * static_cast<double>(stddev);
+
+ for (uint32 i = 0; i < samples; i++) {
+ double x = start + (end - start) * i / (samples - 1);
+ double y = Normal(x, mean, stddev);
+ f->push_back(Point(x, y));
+ }
+ }
+ return Resample(Invert(Accumulate(f)), 0, 1, samples);
+}
+
+uint32 VirtualSocketServer::GetRandomTransitDelay() {
+ size_t index = rand() % delay_dist_->size();
+ double delay = (*delay_dist_)[index].second;
+ //LOG_F(LS_INFO) << "random[" << index << "] = " << delay;
+ return static_cast<uint32>(delay);
+}
+
+struct FunctionDomainCmp {
+ bool operator()(const VirtualSocketServer::Point& p1,
+ const VirtualSocketServer::Point& p2) {
+ return p1.first < p2.first;
+ }
+ bool operator()(double v1, const VirtualSocketServer::Point& p2) {
+ return v1 < p2.first;
+ }
+ bool operator()(const VirtualSocketServer::Point& p1, double v2) {
+ return p1.first < v2;
+ }
+};
+
+VirtualSocketServer::Function* VirtualSocketServer::Accumulate(Function* f) {
+ ASSERT(f->size() >= 1);
+ double v = 0;
+ for (Function::size_type i = 0; i < f->size() - 1; ++i) {
+ double dx = (*f)[i + 1].first - (*f)[i].first;
+ double avgy = ((*f)[i + 1].second + (*f)[i].second) / 2;
+ (*f)[i].second = v;
+ v = v + dx * avgy;
+ }
+ (*f)[f->size()-1].second = v;
+ return f;
+}
+
+VirtualSocketServer::Function* VirtualSocketServer::Invert(Function* f) {
+ for (Function::size_type i = 0; i < f->size(); ++i)
+ std::swap((*f)[i].first, (*f)[i].second);
+
+ std::sort(f->begin(), f->end(), FunctionDomainCmp());
+ return f;
+}
+
+VirtualSocketServer::Function* VirtualSocketServer::Resample(
+ Function* f, double x1, double x2, uint32 samples) {
+ Function* g = new Function();
+
+ for (size_t i = 0; i < samples; i++) {
+ double x = x1 + (x2 - x1) * i / (samples - 1);
+ double y = Evaluate(f, x);
+ g->push_back(Point(x, y));
+ }
+
+ delete f;
+ return g;
+}
+
+double VirtualSocketServer::Evaluate(Function* f, double x) {
+ Function::iterator iter =
+ std::lower_bound(f->begin(), f->end(), x, FunctionDomainCmp());
+ if (iter == f->begin()) {
+ return (*f)[0].second;
+ } else if (iter == f->end()) {
+ ASSERT(f->size() >= 1);
+ return (*f)[f->size() - 1].second;
+ } else if (iter->first == x) {
+ return iter->second;
+ } else {
+ double x1 = (iter - 1)->first;
+ double y1 = (iter - 1)->second;
+ double x2 = iter->first;
+ double y2 = iter->second;
+ return y1 + (y2 - y1) * (x - x1) / (x2 - x1);
+ }
+}
+
+bool VirtualSocketServer::CanInteractWith(VirtualSocket* local,
+ VirtualSocket* remote) {
+ if (!local || !remote) {
+ return false;
+ }
+ IPAddress local_ip = local->GetLocalAddress().ipaddr();
+ IPAddress remote_ip = remote->GetLocalAddress().ipaddr();
+ IPAddress local_normalized = local_ip.Normalized();
+ IPAddress remote_normalized = remote_ip.Normalized();
+ // Check if the addresses are the same family after Normalization (turns
+ // mapped IPv6 address into IPv4 addresses).
+ // This will stop unmapped V6 addresses from talking to mapped V6 addresses.
+ if (local_normalized.family() == remote_normalized.family()) {
+ return true;
+ }
+
+ // If ip1 is IPv4 and ip2 is :: and ip2 is not IPV6_V6ONLY.
+ int remote_v6_only = 0;
+ remote->GetOption(Socket::OPT_IPV6_V6ONLY, &remote_v6_only);
+ if (local_ip.family() == AF_INET && !remote_v6_only && IPIsAny(remote_ip)) {
+ return true;
+ }
+ // Same check, backwards.
+ int local_v6_only = 0;
+ local->GetOption(Socket::OPT_IPV6_V6ONLY, &local_v6_only);
+ if (remote_ip.family() == AF_INET && !local_v6_only && IPIsAny(local_ip)) {
+ return true;
+ }
+
+ // Check to see if either socket was explicitly bound to IPv6-any.
+ // These sockets can talk with anyone.
+ if (local_ip.family() == AF_INET6 && local->was_any()) {
+ return true;
+ }
+ if (remote_ip.family() == AF_INET6 && remote->was_any()) {
+ return true;
+ }
+
+ return false;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/virtualsocketserver.h b/chromium/third_party/webrtc/base/virtualsocketserver.h
new file mode 100644
index 00000000000..87e35364cef
--- /dev/null
+++ b/chromium/third_party/webrtc/base/virtualsocketserver.h
@@ -0,0 +1,234 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_VIRTUALSOCKETSERVER_H_
+#define WEBRTC_BASE_VIRTUALSOCKETSERVER_H_
+
+#include <assert.h>
+
+#include <deque>
+#include <map>
+
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/socketserver.h"
+
+namespace rtc {
+
+class VirtualSocket;
+class SocketAddressPair;
+
+// Simulates a network in the same manner as a loopback interface. The
+// interface can create as many addresses as you want. All of the sockets
+// created by this network will be able to communicate with one another, unless
+// they are bound to addresses from incompatible families.
+class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
+ public:
+ // TODO: Add "owned" parameter.
+ // If "owned" is set, the supplied socketserver will be deleted later.
+ explicit VirtualSocketServer(SocketServer* ss);
+ virtual ~VirtualSocketServer();
+
+ SocketServer* socketserver() { return server_; }
+
+ // Limits the network bandwidth (maximum bytes per second). Zero means that
+ // all sends occur instantly. Defaults to 0.
+ uint32 bandwidth() const { return bandwidth_; }
+ void set_bandwidth(uint32 bandwidth) { bandwidth_ = bandwidth; }
+
+ // Limits the amount of data which can be in flight on the network without
+ // packet loss (on a per sender basis). Defaults to 64 KB.
+ uint32 network_capacity() const { return network_capacity_; }
+ void set_network_capacity(uint32 capacity) {
+ network_capacity_ = capacity;
+ }
+
+ // The amount of data which can be buffered by tcp on the sender's side
+ uint32 send_buffer_capacity() const { return send_buffer_capacity_; }
+ void set_send_buffer_capacity(uint32 capacity) {
+ send_buffer_capacity_ = capacity;
+ }
+
+ // The amount of data which can be buffered by tcp on the receiver's side
+ uint32 recv_buffer_capacity() const { return recv_buffer_capacity_; }
+ void set_recv_buffer_capacity(uint32 capacity) {
+ recv_buffer_capacity_ = capacity;
+ }
+
+ // Controls the (transit) delay for packets sent in the network. This does
+ // not inclue the time required to sit in the send queue. Both of these
+ // values are measured in milliseconds. Defaults to no delay.
+ uint32 delay_mean() const { return delay_mean_; }
+ uint32 delay_stddev() const { return delay_stddev_; }
+ uint32 delay_samples() const { return delay_samples_; }
+ void set_delay_mean(uint32 delay_mean) { delay_mean_ = delay_mean; }
+ void set_delay_stddev(uint32 delay_stddev) {
+ delay_stddev_ = delay_stddev;
+ }
+ void set_delay_samples(uint32 delay_samples) {
+ delay_samples_ = delay_samples;
+ }
+
+ // If the (transit) delay parameters are modified, this method should be
+ // called to recompute the new distribution.
+ void UpdateDelayDistribution();
+
+ // Controls the (uniform) probability that any sent packet is dropped. This
+ // is separate from calculations to drop based on queue size.
+ double drop_probability() { return drop_prob_; }
+ void set_drop_probability(double drop_prob) {
+ assert((0 <= drop_prob) && (drop_prob <= 1));
+ drop_prob_ = drop_prob;
+ }
+
+ // SocketFactory:
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ // SocketServer:
+ virtual void SetMessageQueue(MessageQueue* queue);
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ typedef std::pair<double, double> Point;
+ typedef std::vector<Point> Function;
+
+ static Function* CreateDistribution(uint32 mean, uint32 stddev,
+ uint32 samples);
+
+ // Similar to Thread::ProcessMessages, but it only processes messages until
+ // there are no immediate messages or pending network traffic. Returns false
+ // if Thread::Stop() was called.
+ bool ProcessMessagesUntilIdle();
+
+ protected:
+ // Returns a new IP not used before in this network.
+ IPAddress GetNextIP(int family);
+ uint16 GetNextPort();
+
+ VirtualSocket* CreateSocketInternal(int family, int type);
+
+ // Binds the given socket to addr, assigning and IP and Port if necessary
+ int Bind(VirtualSocket* socket, SocketAddress* addr);
+
+ // Binds the given socket to the given (fully-defined) address.
+ int Bind(VirtualSocket* socket, const SocketAddress& addr);
+
+ // Find the socket bound to the given address
+ VirtualSocket* LookupBinding(const SocketAddress& addr);
+
+ int Unbind(const SocketAddress& addr, VirtualSocket* socket);
+
+ // Adds a mapping between this socket pair and the socket.
+ void AddConnection(const SocketAddress& client,
+ const SocketAddress& server,
+ VirtualSocket* socket);
+
+ // Find the socket pair corresponding to this server address.
+ VirtualSocket* LookupConnection(const SocketAddress& client,
+ const SocketAddress& server);
+
+ void RemoveConnection(const SocketAddress& client,
+ const SocketAddress& server);
+
+ // Connects the given socket to the socket at the given address
+ int Connect(VirtualSocket* socket, const SocketAddress& remote_addr,
+ bool use_delay);
+
+ // Sends a disconnect message to the socket at the given address
+ bool Disconnect(VirtualSocket* socket);
+
+ // Sends the given packet to the socket at the given address (if one exists).
+ int SendUdp(VirtualSocket* socket, const char* data, size_t data_size,
+ const SocketAddress& remote_addr);
+
+ // Moves as much data as possible from the sender's buffer to the network
+ void SendTcp(VirtualSocket* socket);
+
+ // Places a packet on the network.
+ void AddPacketToNetwork(VirtualSocket* socket, VirtualSocket* recipient,
+ uint32 cur_time, const char* data, size_t data_size,
+ size_t header_size, bool ordered);
+
+ // Removes stale packets from the network
+ void PurgeNetworkPackets(VirtualSocket* socket, uint32 cur_time);
+
+ // Computes the number of milliseconds required to send a packet of this size.
+ uint32 SendDelay(uint32 size);
+
+ // Returns a random transit delay chosen from the appropriate distribution.
+ uint32 GetRandomTransitDelay();
+
+ // Basic operations on functions. Those that return a function also take
+ // ownership of the function given (and hence, may modify or delete it).
+ static Function* Accumulate(Function* f);
+ static Function* Invert(Function* f);
+ static Function* Resample(Function* f, double x1, double x2, uint32 samples);
+ static double Evaluate(Function* f, double x);
+
+ // NULL out our message queue if it goes away. Necessary in the case where
+ // our lifetime is greater than that of the thread we are using, since we
+ // try to send Close messages for all connected sockets when we shutdown.
+ void OnMessageQueueDestroyed() { msg_queue_ = NULL; }
+
+ // Determine if two sockets should be able to communicate.
+ // We don't (currently) specify an address family for sockets; instead,
+ // the currently bound address is used to infer the address family.
+ // Any socket that is not explicitly bound to an IPv4 address is assumed to be
+ // dual-stack capable.
+ // This function tests if two addresses can communicate, as well as the
+ // sockets to which they may be bound (the addresses may or may not yet be
+ // bound to the sockets).
+ // First the addresses are tested (after normalization):
+ // If both have the same family, then communication is OK.
+ // If only one is IPv4 then false, unless the other is bound to ::.
+ // This applies even if the IPv4 address is 0.0.0.0.
+ // The socket arguments are optional; the sockets are checked to see if they
+ // were explicitly bound to IPv6-any ('::'), and if so communication is
+ // permitted.
+ // NB: This scheme doesn't permit non-dualstack IPv6 sockets.
+ static bool CanInteractWith(VirtualSocket* local, VirtualSocket* remote);
+
+ private:
+ friend class VirtualSocket;
+
+ typedef std::map<SocketAddress, VirtualSocket*> AddressMap;
+ typedef std::map<SocketAddressPair, VirtualSocket*> ConnectionMap;
+
+ SocketServer* server_;
+ bool server_owned_;
+ MessageQueue* msg_queue_;
+ bool stop_on_idle_;
+ uint32 network_delay_;
+ in_addr next_ipv4_;
+ in6_addr next_ipv6_;
+ uint16 next_port_;
+ AddressMap* bindings_;
+ ConnectionMap* connections_;
+
+ uint32 bandwidth_;
+ uint32 network_capacity_;
+ uint32 send_buffer_capacity_;
+ uint32 recv_buffer_capacity_;
+ uint32 delay_mean_;
+ uint32 delay_stddev_;
+ uint32 delay_samples_;
+ Function* delay_dist_;
+ CriticalSection delay_crit_;
+
+ double drop_prob_;
+ DISALLOW_EVIL_CONSTRUCTORS(VirtualSocketServer);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_VIRTUALSOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/win32.cc b/chromium/third_party/webrtc/base/win32.cc
new file mode 100644
index 00000000000..8f566122523
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32.cc
@@ -0,0 +1,456 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32.h"
+
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#include <algorithm>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+// Helper function declarations for inet_ntop/inet_pton.
+static const char* inet_ntop_v4(const void* src, char* dst, socklen_t size);
+static const char* inet_ntop_v6(const void* src, char* dst, socklen_t size);
+static int inet_pton_v4(const char* src, void* dst);
+static int inet_pton_v6(const char* src, void* dst);
+
+// Implementation of inet_ntop (create a printable representation of an
+// ip address). XP doesn't have its own inet_ntop, and
+// WSAAddressToString requires both IPv6 to be installed and for Winsock
+// to be initialized.
+const char* win32_inet_ntop(int af, const void *src,
+ char* dst, socklen_t size) {
+ if (!src || !dst) {
+ return NULL;
+ }
+ switch (af) {
+ case AF_INET: {
+ return inet_ntop_v4(src, dst, size);
+ }
+ case AF_INET6: {
+ return inet_ntop_v6(src, dst, size);
+ }
+ }
+ return NULL;
+}
+
+// As above, but for inet_pton. Implements inet_pton for v4 and v6.
+// Note that our inet_ntop will output normal 'dotted' v4 addresses only.
+int win32_inet_pton(int af, const char* src, void* dst) {
+ if (!src || !dst) {
+ return 0;
+ }
+ if (af == AF_INET) {
+ return inet_pton_v4(src, dst);
+ } else if (af == AF_INET6) {
+ return inet_pton_v6(src, dst);
+ }
+ return -1;
+}
+
+// Helper function for inet_ntop for IPv4 addresses.
+// Outputs "dotted-quad" decimal notation.
+const char* inet_ntop_v4(const void* src, char* dst, socklen_t size) {
+ if (size < INET_ADDRSTRLEN) {
+ return NULL;
+ }
+ const struct in_addr* as_in_addr =
+ reinterpret_cast<const struct in_addr*>(src);
+ rtc::sprintfn(dst, size, "%d.%d.%d.%d",
+ as_in_addr->S_un.S_un_b.s_b1,
+ as_in_addr->S_un.S_un_b.s_b2,
+ as_in_addr->S_un.S_un_b.s_b3,
+ as_in_addr->S_un.S_un_b.s_b4);
+ return dst;
+}
+
+// Helper function for inet_ntop for IPv6 addresses.
+const char* inet_ntop_v6(const void* src, char* dst, socklen_t size) {
+ if (size < INET6_ADDRSTRLEN) {
+ return NULL;
+ }
+ const uint16* as_shorts =
+ reinterpret_cast<const uint16*>(src);
+ int runpos[8];
+ int current = 1;
+ int max = 1;
+ int maxpos = -1;
+ int run_array_size = ARRAY_SIZE(runpos);
+ // Run over the address marking runs of 0s.
+ for (int i = 0; i < run_array_size; ++i) {
+ if (as_shorts[i] == 0) {
+ runpos[i] = current;
+ if (current > max) {
+ maxpos = i;
+ max = current;
+ }
+ ++current;
+ } else {
+ runpos[i] = -1;
+ current =1;
+ }
+ }
+
+ if (max > 1) {
+ int tmpmax = maxpos;
+ // Run back through, setting -1 for all but the longest run.
+ for (int i = run_array_size - 1; i >= 0; i--) {
+ if (i > tmpmax) {
+ runpos[i] = -1;
+ } else if (runpos[i] == -1) {
+ // We're less than maxpos, we hit a -1, so the 'good' run is done.
+ // Setting tmpmax -1 means all remaining positions get set to -1.
+ tmpmax = -1;
+ }
+ }
+ }
+
+ char* cursor = dst;
+ // Print IPv4 compatible and IPv4 mapped addresses using the IPv4 helper.
+ // These addresses have an initial run of either eight zero-bytes followed
+ // by 0xFFFF, or an initial run of ten zero-bytes.
+ if (runpos[0] == 1 && (maxpos == 5 ||
+ (maxpos == 4 && as_shorts[5] == 0xFFFF))) {
+ *cursor++ = ':';
+ *cursor++ = ':';
+ if (maxpos == 4) {
+ cursor += rtc::sprintfn(cursor, INET6_ADDRSTRLEN - 2, "ffff:");
+ }
+ const struct in_addr* as_v4 =
+ reinterpret_cast<const struct in_addr*>(&(as_shorts[6]));
+ inet_ntop_v4(as_v4, cursor,
+ static_cast<socklen_t>(INET6_ADDRSTRLEN - (cursor - dst)));
+ } else {
+ for (int i = 0; i < run_array_size; ++i) {
+ if (runpos[i] == -1) {
+ cursor += rtc::sprintfn(cursor,
+ INET6_ADDRSTRLEN - (cursor - dst),
+ "%x", NetworkToHost16(as_shorts[i]));
+ if (i != 7 && runpos[i + 1] != 1) {
+ *cursor++ = ':';
+ }
+ } else if (runpos[i] == 1) {
+ // Entered the run; print the colons and skip the run.
+ *cursor++ = ':';
+ *cursor++ = ':';
+ i += (max - 1);
+ }
+ }
+ }
+ return dst;
+}
+
+// Helper function for inet_pton for IPv4 addresses.
+// |src| points to a character string containing an IPv4 network address in
+// dotted-decimal format, "ddd.ddd.ddd.ddd", where ddd is a decimal number
+// of up to three digits in the range 0 to 255.
+// The address is converted and copied to dst,
+// which must be sizeof(struct in_addr) (4) bytes (32 bits) long.
+int inet_pton_v4(const char* src, void* dst) {
+ const int kIpv4AddressSize = 4;
+ int found = 0;
+ const char* src_pos = src;
+ unsigned char result[kIpv4AddressSize] = {0};
+
+ while (*src_pos != '\0') {
+ // strtol won't treat whitespace characters in the begining as an error,
+ // so check to ensure this is started with digit before passing to strtol.
+ if (!isdigit(*src_pos)) {
+ return 0;
+ }
+ char* end_pos;
+ long value = strtol(src_pos, &end_pos, 10);
+ if (value < 0 || value > 255 || src_pos == end_pos) {
+ return 0;
+ }
+ ++found;
+ if (found > kIpv4AddressSize) {
+ return 0;
+ }
+ result[found - 1] = static_cast<unsigned char>(value);
+ src_pos = end_pos;
+ if (*src_pos == '.') {
+ // There's more.
+ ++src_pos;
+ } else if (*src_pos != '\0') {
+ // If it's neither '.' nor '\0' then return fail.
+ return 0;
+ }
+ }
+ if (found != kIpv4AddressSize) {
+ return 0;
+ }
+ memcpy(dst, result, sizeof(result));
+ return 1;
+}
+
+// Helper function for inet_pton for IPv6 addresses.
+int inet_pton_v6(const char* src, void* dst) {
+ // sscanf will pick any other invalid chars up, but it parses 0xnnnn as hex.
+ // Check for literal x in the input string.
+ const char* readcursor = src;
+ char c = *readcursor++;
+ while (c) {
+ if (c == 'x') {
+ return 0;
+ }
+ c = *readcursor++;
+ }
+ readcursor = src;
+
+ struct in6_addr an_addr;
+ memset(&an_addr, 0, sizeof(an_addr));
+
+ uint16* addr_cursor = reinterpret_cast<uint16*>(&an_addr.s6_addr[0]);
+ uint16* addr_end = reinterpret_cast<uint16*>(&an_addr.s6_addr[16]);
+ bool seencompressed = false;
+
+ // Addresses that start with "::" (i.e., a run of initial zeros) or
+ // "::ffff:" can potentially be IPv4 mapped or compatibility addresses.
+ // These have dotted-style IPv4 addresses on the end (e.g. "::192.168.7.1").
+ if (*readcursor == ':' && *(readcursor+1) == ':' &&
+ *(readcursor + 2) != 0) {
+ // Check for periods, which we'll take as a sign of v4 addresses.
+ const char* addrstart = readcursor + 2;
+ if (rtc::strchr(addrstart, ".")) {
+ const char* colon = rtc::strchr(addrstart, "::");
+ if (colon) {
+ uint16 a_short;
+ int bytesread = 0;
+ if (sscanf(addrstart, "%hx%n", &a_short, &bytesread) != 1 ||
+ a_short != 0xFFFF || bytesread != 4) {
+ // Colons + periods means has to be ::ffff:a.b.c.d. But it wasn't.
+ return 0;
+ } else {
+ an_addr.s6_addr[10] = 0xFF;
+ an_addr.s6_addr[11] = 0xFF;
+ addrstart = colon + 1;
+ }
+ }
+ struct in_addr v4;
+ if (inet_pton_v4(addrstart, &v4.s_addr)) {
+ memcpy(&an_addr.s6_addr[12], &v4, sizeof(v4));
+ memcpy(dst, &an_addr, sizeof(an_addr));
+ return 1;
+ } else {
+ // Invalid v4 address.
+ return 0;
+ }
+ }
+ }
+
+ // For addresses without a trailing IPv4 component ('normal' IPv6 addresses).
+ while (*readcursor != 0 && addr_cursor < addr_end) {
+ if (*readcursor == ':') {
+ if (*(readcursor + 1) == ':') {
+ if (seencompressed) {
+ // Can only have one compressed run of zeroes ("::") per address.
+ return 0;
+ }
+ // Hit a compressed run. Count colons to figure out how much of the
+ // address is skipped.
+ readcursor += 2;
+ const char* coloncounter = readcursor;
+ int coloncount = 0;
+ if (*coloncounter == 0) {
+ // Special case - trailing ::.
+ addr_cursor = addr_end;
+ } else {
+ while (*coloncounter) {
+ if (*coloncounter == ':') {
+ ++coloncount;
+ }
+ ++coloncounter;
+ }
+ // (coloncount + 1) is the number of shorts left in the address.
+ addr_cursor = addr_end - (coloncount + 1);
+ seencompressed = true;
+ }
+ } else {
+ ++readcursor;
+ }
+ } else {
+ uint16 word;
+ int bytesread = 0;
+ if (sscanf(readcursor, "%hx%n", &word, &bytesread) != 1) {
+ return 0;
+ } else {
+ *addr_cursor = HostToNetwork16(word);
+ ++addr_cursor;
+ readcursor += bytesread;
+ if (*readcursor != ':' && *readcursor != '\0') {
+ return 0;
+ }
+ }
+ }
+ }
+
+ if (*readcursor != '\0' || addr_cursor < addr_end) {
+ // Catches addresses too short or too long.
+ return 0;
+ }
+ memcpy(dst, &an_addr, sizeof(an_addr));
+ return 1;
+}
+
+//
+// Unix time is in seconds relative to 1/1/1970. So we compute the windows
+// FILETIME of that time/date, then we add/subtract in appropriate units to
+// convert to/from unix time.
+// The units of FILETIME are 100ns intervals, so by multiplying by or dividing
+// by 10000000, we can convert to/from seconds.
+//
+// FileTime = UnixTime*10000000 + FileTime(1970)
+// UnixTime = (FileTime-FileTime(1970))/10000000
+//
+
+void FileTimeToUnixTime(const FILETIME& ft, time_t* ut) {
+ ASSERT(NULL != ut);
+
+ // FILETIME has an earlier date base than time_t (1/1/1970), so subtract off
+ // the difference.
+ SYSTEMTIME base_st;
+ memset(&base_st, 0, sizeof(base_st));
+ base_st.wDay = 1;
+ base_st.wMonth = 1;
+ base_st.wYear = 1970;
+
+ FILETIME base_ft;
+ SystemTimeToFileTime(&base_st, &base_ft);
+
+ ULARGE_INTEGER base_ul, current_ul;
+ memcpy(&base_ul, &base_ft, sizeof(FILETIME));
+ memcpy(&current_ul, &ft, sizeof(FILETIME));
+
+ // Divide by big number to convert to seconds, then subtract out the 1970
+ // base date value.
+ const ULONGLONG RATIO = 10000000;
+ *ut = static_cast<time_t>((current_ul.QuadPart - base_ul.QuadPart) / RATIO);
+}
+
+void UnixTimeToFileTime(const time_t& ut, FILETIME* ft) {
+ ASSERT(NULL != ft);
+
+ // FILETIME has an earlier date base than time_t (1/1/1970), so add in
+ // the difference.
+ SYSTEMTIME base_st;
+ memset(&base_st, 0, sizeof(base_st));
+ base_st.wDay = 1;
+ base_st.wMonth = 1;
+ base_st.wYear = 1970;
+
+ FILETIME base_ft;
+ SystemTimeToFileTime(&base_st, &base_ft);
+
+ ULARGE_INTEGER base_ul;
+ memcpy(&base_ul, &base_ft, sizeof(FILETIME));
+
+ // Multiply by big number to convert to 100ns units, then add in the 1970
+ // base date value.
+ const ULONGLONG RATIO = 10000000;
+ ULARGE_INTEGER current_ul;
+ current_ul.QuadPart = base_ul.QuadPart + static_cast<int64>(ut) * RATIO;
+ memcpy(ft, &current_ul, sizeof(FILETIME));
+}
+
+bool Utf8ToWindowsFilename(const std::string& utf8, std::wstring* filename) {
+ // TODO: Integrate into fileutils.h
+ // TODO: Handle wide and non-wide cases via TCHAR?
+ // TODO: Skip \\?\ processing if the length is not > MAX_PATH?
+ // TODO: Write unittests
+
+ // Convert to Utf16
+ int wlen = ::MultiByteToWideChar(CP_UTF8, 0, utf8.c_str(),
+ static_cast<int>(utf8.length() + 1), NULL,
+ 0);
+ if (0 == wlen) {
+ return false;
+ }
+ wchar_t* wfilename = STACK_ARRAY(wchar_t, wlen);
+ if (0 == ::MultiByteToWideChar(CP_UTF8, 0, utf8.c_str(),
+ static_cast<int>(utf8.length() + 1),
+ wfilename, wlen)) {
+ return false;
+ }
+ // Replace forward slashes with backslashes
+ std::replace(wfilename, wfilename + wlen, L'/', L'\\');
+ // Convert to complete filename
+ DWORD full_len = ::GetFullPathName(wfilename, 0, NULL, NULL);
+ if (0 == full_len) {
+ return false;
+ }
+ wchar_t* filepart = NULL;
+ wchar_t* full_filename = STACK_ARRAY(wchar_t, full_len + 6);
+ wchar_t* start = full_filename + 6;
+ if (0 == ::GetFullPathName(wfilename, full_len, start, &filepart)) {
+ return false;
+ }
+ // Add long-path prefix
+ const wchar_t kLongPathPrefix[] = L"\\\\?\\UNC";
+ if ((start[0] != L'\\') || (start[1] != L'\\')) {
+ // Non-unc path: <pathname>
+ // Becomes: \\?\<pathname>
+ start -= 4;
+ ASSERT(start >= full_filename);
+ memcpy(start, kLongPathPrefix, 4 * sizeof(wchar_t));
+ } else if (start[2] != L'?') {
+ // Unc path: \\<server>\<pathname>
+ // Becomes: \\?\UNC\<server>\<pathname>
+ start -= 6;
+ ASSERT(start >= full_filename);
+ memcpy(start, kLongPathPrefix, 7 * sizeof(wchar_t));
+ } else {
+ // Already in long-path form.
+ }
+ filename->assign(start);
+ return true;
+}
+
+bool GetOsVersion(int* major, int* minor, int* build) {
+ OSVERSIONINFO info = {0};
+ info.dwOSVersionInfoSize = sizeof(info);
+ if (GetVersionEx(&info)) {
+ if (major) *major = info.dwMajorVersion;
+ if (minor) *minor = info.dwMinorVersion;
+ if (build) *build = info.dwBuildNumber;
+ return true;
+ }
+ return false;
+}
+
+bool GetCurrentProcessIntegrityLevel(int* level) {
+ bool ret = false;
+ HANDLE process = ::GetCurrentProcess(), token;
+ if (OpenProcessToken(process, TOKEN_QUERY | TOKEN_QUERY_SOURCE, &token)) {
+ DWORD size;
+ if (!GetTokenInformation(token, TokenIntegrityLevel, NULL, 0, &size) &&
+ GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+
+ char* buf = STACK_ARRAY(char, size);
+ TOKEN_MANDATORY_LABEL* til =
+ reinterpret_cast<TOKEN_MANDATORY_LABEL*>(buf);
+ if (GetTokenInformation(token, TokenIntegrityLevel, til, size, &size)) {
+
+ DWORD count = *GetSidSubAuthorityCount(til->Label.Sid);
+ *level = *GetSidSubAuthority(til->Label.Sid, count - 1);
+ ret = true;
+ }
+ }
+ CloseHandle(token);
+ }
+ return ret;
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32.h b/chromium/third_party/webrtc/base/win32.h
new file mode 100644
index 00000000000..bf5da254ce9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WIN32_H_
+#define WEBRTC_BASE_WIN32_H_
+
+#if defined(WEBRTC_WIN)
+
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+
+// Make sure we don't get min/max macros
+#ifndef NOMINMAX
+#define NOMINMAX
+#endif
+
+#include <winsock2.h>
+#include <windows.h>
+
+#ifndef SECURITY_MANDATORY_LABEL_AUTHORITY
+// Add defines that we use if we are compiling against older sdks
+#define SECURITY_MANDATORY_MEDIUM_RID (0x00002000L)
+#define TokenIntegrityLevel static_cast<TOKEN_INFORMATION_CLASS>(0x19)
+typedef struct _TOKEN_MANDATORY_LABEL {
+ SID_AND_ATTRIBUTES Label;
+} TOKEN_MANDATORY_LABEL, *PTOKEN_MANDATORY_LABEL;
+#endif // SECURITY_MANDATORY_LABEL_AUTHORITY
+
+#undef SetPort
+
+#include <string>
+
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/basictypes.h"
+
+namespace rtc {
+
+const char* win32_inet_ntop(int af, const void *src, char* dst, socklen_t size);
+int win32_inet_pton(int af, const char* src, void *dst);
+
+///////////////////////////////////////////////////////////////////////////////
+
+inline std::wstring ToUtf16(const char* utf8, size_t len) {
+ int len16 = ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len),
+ NULL, 0);
+ wchar_t* ws = STACK_ARRAY(wchar_t, len16);
+ ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len), ws, len16);
+ return std::wstring(ws, len16);
+}
+
+inline std::wstring ToUtf16(const std::string& str) {
+ return ToUtf16(str.data(), str.length());
+}
+
+inline std::string ToUtf8(const wchar_t* wide, size_t len) {
+ int len8 = ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len),
+ NULL, 0, NULL, NULL);
+ char* ns = STACK_ARRAY(char, len8);
+ ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len), ns, len8,
+ NULL, NULL);
+ return std::string(ns, len8);
+}
+
+inline std::string ToUtf8(const wchar_t* wide) {
+ return ToUtf8(wide, wcslen(wide));
+}
+
+inline std::string ToUtf8(const std::wstring& wstr) {
+ return ToUtf8(wstr.data(), wstr.length());
+}
+
+// Convert FILETIME to time_t
+void FileTimeToUnixTime(const FILETIME& ft, time_t* ut);
+
+// Convert time_t to FILETIME
+void UnixTimeToFileTime(const time_t& ut, FILETIME * ft);
+
+// Convert a Utf8 path representation to a non-length-limited Unicode pathname.
+bool Utf8ToWindowsFilename(const std::string& utf8, std::wstring* filename);
+
+// Convert a FILETIME to a UInt64
+inline uint64 ToUInt64(const FILETIME& ft) {
+ ULARGE_INTEGER r = {ft.dwLowDateTime, ft.dwHighDateTime};
+ return r.QuadPart;
+}
+
+enum WindowsMajorVersions {
+ kWindows2000 = 5,
+ kWindowsVista = 6,
+};
+bool GetOsVersion(int* major, int* minor, int* build);
+
+inline bool IsWindowsVistaOrLater() {
+ int major;
+ return (GetOsVersion(&major, NULL, NULL) && major >= kWindowsVista);
+}
+
+inline bool IsWindowsXpOrLater() {
+ int major, minor;
+ return (GetOsVersion(&major, &minor, NULL) &&
+ (major >= kWindowsVista ||
+ (major == kWindows2000 && minor >= 1)));
+}
+
+// Determine the current integrity level of the process.
+bool GetCurrentProcessIntegrityLevel(int* level);
+
+inline bool IsCurrentProcessLowIntegrity() {
+ int level;
+ return (GetCurrentProcessIntegrityLevel(&level) &&
+ level < SECURITY_MANDATORY_MEDIUM_RID);
+}
+
+bool AdjustCurrentProcessPrivilege(const TCHAR* privilege, bool to_enable);
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_WIN
+#endif // WEBRTC_BASE_WIN32_H_
diff --git a/chromium/third_party/webrtc/base/win32_unittest.cc b/chromium/third_party/webrtc/base/win32_unittest.cc
new file mode 100644
index 00000000000..0050c77264f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32_unittest.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/win32.h"
+#include "webrtc/base/winping.h"
+
+#if !defined(WEBRTC_WIN)
+#error Only for Windows
+#endif
+
+namespace rtc {
+
+class Win32Test : public testing::Test {
+ public:
+ Win32Test() {
+ }
+};
+
+TEST_F(Win32Test, FileTimeToUInt64Test) {
+ FILETIME ft;
+ ft.dwHighDateTime = 0xBAADF00D;
+ ft.dwLowDateTime = 0xFEED3456;
+
+ uint64 expected = 0xBAADF00DFEED3456;
+ EXPECT_EQ(expected, ToUInt64(ft));
+}
+
+TEST_F(Win32Test, WinPingTest) {
+ WinPing ping;
+ ASSERT_TRUE(ping.IsValid());
+
+ // Test valid ping cases.
+ WinPing::PingResult result = ping.Ping(IPAddress(INADDR_LOOPBACK), 20, 50, 1,
+ false);
+ ASSERT_EQ(WinPing::PING_SUCCESS, result);
+ if (HasIPv6Enabled()) {
+ WinPing::PingResult v6result = ping.Ping(IPAddress(in6addr_loopback), 20,
+ 50, 1, false);
+ ASSERT_EQ(WinPing::PING_SUCCESS, v6result);
+ }
+
+ // Test invalid parameter cases.
+ ASSERT_EQ(WinPing::PING_INVALID_PARAMS, ping.Ping(
+ IPAddress(INADDR_LOOPBACK), 0, 50, 1, false));
+ ASSERT_EQ(WinPing::PING_INVALID_PARAMS, ping.Ping(
+ IPAddress(INADDR_LOOPBACK), 20, 0, 1, false));
+ ASSERT_EQ(WinPing::PING_INVALID_PARAMS, ping.Ping(
+ IPAddress(INADDR_LOOPBACK), 20, 50, 0, false));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32filesystem.cc b/chromium/third_party/webrtc/base/win32filesystem.cc
new file mode 100644
index 00000000000..73f8ef0cf2f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32filesystem.cc
@@ -0,0 +1,460 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32filesystem.h"
+
+#include "webrtc/base/win32.h"
+#include <shellapi.h>
+#include <shlobj.h>
+#include <tchar.h>
+
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/stream.h"
+#include "webrtc/base/stringutils.h"
+
+// In several places in this file, we test the integrity level of the process
+// before calling GetLongPathName. We do this because calling GetLongPathName
+// when running under protected mode IE (a low integrity process) can result in
+// a virtualized path being returned, which is wrong if you only plan to read.
+// TODO: Waiting to hear back from IE team on whether this is the
+// best approach; IEIsProtectedModeProcess is another possible solution.
+
+namespace rtc {
+
+bool Win32Filesystem::CreateFolder(const Pathname &pathname) {
+ if (pathname.pathname().empty() || !pathname.filename().empty())
+ return false;
+
+ std::wstring path16;
+ if (!Utf8ToWindowsFilename(pathname.pathname(), &path16))
+ return false;
+
+ DWORD res = ::GetFileAttributes(path16.c_str());
+ if (res != INVALID_FILE_ATTRIBUTES) {
+ // Something exists at this location, check if it is a directory
+ return ((res & FILE_ATTRIBUTE_DIRECTORY) != 0);
+ } else if ((GetLastError() != ERROR_FILE_NOT_FOUND)
+ && (GetLastError() != ERROR_PATH_NOT_FOUND)) {
+ // Unexpected error
+ return false;
+ }
+
+ // Directory doesn't exist, look up one directory level
+ if (!pathname.parent_folder().empty()) {
+ Pathname parent(pathname);
+ parent.SetFolder(pathname.parent_folder());
+ if (!CreateFolder(parent)) {
+ return false;
+ }
+ }
+
+ return (::CreateDirectory(path16.c_str(), NULL) != 0);
+}
+
+FileStream *Win32Filesystem::OpenFile(const Pathname &filename,
+ const std::string &mode) {
+ FileStream *fs = new FileStream();
+ if (fs && !fs->Open(filename.pathname().c_str(), mode.c_str(), NULL)) {
+ delete fs;
+ fs = NULL;
+ }
+ return fs;
+}
+
+bool Win32Filesystem::CreatePrivateFile(const Pathname &filename) {
+ // To make the file private to the current user, we first must construct a
+ // SECURITY_DESCRIPTOR specifying an ACL. This code is mostly based upon
+ // http://msdn.microsoft.com/en-us/library/ms707085%28VS.85%29.aspx
+
+ // Get the current process token.
+ HANDLE process_token = INVALID_HANDLE_VALUE;
+ if (!::OpenProcessToken(::GetCurrentProcess(),
+ TOKEN_QUERY,
+ &process_token)) {
+ LOG_ERR(LS_ERROR) << "OpenProcessToken() failed";
+ return false;
+ }
+
+ // Get the size of its TOKEN_USER structure. Return value is not checked
+ // because we expect it to fail.
+ DWORD token_user_size = 0;
+ (void)::GetTokenInformation(process_token,
+ TokenUser,
+ NULL,
+ 0,
+ &token_user_size);
+
+ // Get the TOKEN_USER structure.
+ scoped_ptr<char[]> token_user_bytes(new char[token_user_size]);
+ PTOKEN_USER token_user = reinterpret_cast<PTOKEN_USER>(
+ token_user_bytes.get());
+ memset(token_user, 0, token_user_size);
+ BOOL success = ::GetTokenInformation(process_token,
+ TokenUser,
+ token_user,
+ token_user_size,
+ &token_user_size);
+ // We're now done with this.
+ ::CloseHandle(process_token);
+ if (!success) {
+ LOG_ERR(LS_ERROR) << "GetTokenInformation() failed";
+ return false;
+ }
+
+ if (!IsValidSid(token_user->User.Sid)) {
+ LOG_ERR(LS_ERROR) << "Current process has invalid user SID";
+ return false;
+ }
+
+ // Compute size needed for an ACL that allows access to just this user.
+ int acl_size = sizeof(ACL) + sizeof(ACCESS_ALLOWED_ACE) - sizeof(DWORD) +
+ GetLengthSid(token_user->User.Sid);
+
+ // Allocate it.
+ scoped_ptr<char[]> acl_bytes(new char[acl_size]);
+ PACL acl = reinterpret_cast<PACL>(acl_bytes.get());
+ memset(acl, 0, acl_size);
+ if (!::InitializeAcl(acl, acl_size, ACL_REVISION)) {
+ LOG_ERR(LS_ERROR) << "InitializeAcl() failed";
+ return false;
+ }
+
+ // Allow access to only the current user.
+ if (!::AddAccessAllowedAce(acl,
+ ACL_REVISION,
+ GENERIC_READ | GENERIC_WRITE | STANDARD_RIGHTS_ALL,
+ token_user->User.Sid)) {
+ LOG_ERR(LS_ERROR) << "AddAccessAllowedAce() failed";
+ return false;
+ }
+
+ // Now make the security descriptor.
+ SECURITY_DESCRIPTOR security_descriptor;
+ if (!::InitializeSecurityDescriptor(&security_descriptor,
+ SECURITY_DESCRIPTOR_REVISION)) {
+ LOG_ERR(LS_ERROR) << "InitializeSecurityDescriptor() failed";
+ return false;
+ }
+
+ // Put the ACL in it.
+ if (!::SetSecurityDescriptorDacl(&security_descriptor,
+ TRUE,
+ acl,
+ FALSE)) {
+ LOG_ERR(LS_ERROR) << "SetSecurityDescriptorDacl() failed";
+ return false;
+ }
+
+ // Finally create the file.
+ SECURITY_ATTRIBUTES security_attributes;
+ security_attributes.nLength = sizeof(security_attributes);
+ security_attributes.lpSecurityDescriptor = &security_descriptor;
+ security_attributes.bInheritHandle = FALSE;
+ HANDLE handle = ::CreateFile(
+ ToUtf16(filename.pathname()).c_str(),
+ GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE,
+ &security_attributes,
+ CREATE_NEW,
+ 0,
+ NULL);
+ if (INVALID_HANDLE_VALUE == handle) {
+ LOG_ERR(LS_ERROR) << "CreateFile() failed";
+ return false;
+ }
+ if (!::CloseHandle(handle)) {
+ LOG_ERR(LS_ERROR) << "CloseFile() failed";
+ // Continue.
+ }
+ return true;
+}
+
+bool Win32Filesystem::DeleteFile(const Pathname &filename) {
+ LOG(LS_INFO) << "Deleting file " << filename.pathname();
+ if (!IsFile(filename)) {
+ ASSERT(IsFile(filename));
+ return false;
+ }
+ return ::DeleteFile(ToUtf16(filename.pathname()).c_str()) != 0;
+}
+
+bool Win32Filesystem::DeleteEmptyFolder(const Pathname &folder) {
+ LOG(LS_INFO) << "Deleting folder " << folder.pathname();
+
+ std::string no_slash(folder.pathname(), 0, folder.pathname().length()-1);
+ return ::RemoveDirectory(ToUtf16(no_slash).c_str()) != 0;
+}
+
+bool Win32Filesystem::GetTemporaryFolder(Pathname &pathname, bool create,
+ const std::string *append) {
+ wchar_t buffer[MAX_PATH + 1];
+ if (!::GetTempPath(ARRAY_SIZE(buffer), buffer))
+ return false;
+ if (!IsCurrentProcessLowIntegrity() &&
+ !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ size_t len = strlen(buffer);
+ if ((len > 0) && (buffer[len-1] != '\\')) {
+ len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, L"\\");
+ }
+ if (len >= ARRAY_SIZE(buffer) - 1)
+ return false;
+ pathname.clear();
+ pathname.SetFolder(ToUtf8(buffer));
+ if (append != NULL) {
+ ASSERT(!append->empty());
+ pathname.AppendFolder(*append);
+ }
+ return !create || CreateFolder(pathname);
+}
+
+std::string Win32Filesystem::TempFilename(const Pathname &dir,
+ const std::string &prefix) {
+ wchar_t filename[MAX_PATH];
+ if (::GetTempFileName(ToUtf16(dir.pathname()).c_str(),
+ ToUtf16(prefix).c_str(), 0, filename) != 0)
+ return ToUtf8(filename);
+ ASSERT(false);
+ return "";
+}
+
+bool Win32Filesystem::MoveFile(const Pathname &old_path,
+ const Pathname &new_path) {
+ if (!IsFile(old_path)) {
+ ASSERT(IsFile(old_path));
+ return false;
+ }
+ LOG(LS_INFO) << "Moving " << old_path.pathname()
+ << " to " << new_path.pathname();
+ return ::MoveFile(ToUtf16(old_path.pathname()).c_str(),
+ ToUtf16(new_path.pathname()).c_str()) != 0;
+}
+
+bool Win32Filesystem::MoveFolder(const Pathname &old_path,
+ const Pathname &new_path) {
+ if (!IsFolder(old_path)) {
+ ASSERT(IsFolder(old_path));
+ return false;
+ }
+ LOG(LS_INFO) << "Moving " << old_path.pathname()
+ << " to " << new_path.pathname();
+ if (::MoveFile(ToUtf16(old_path.pathname()).c_str(),
+ ToUtf16(new_path.pathname()).c_str()) == 0) {
+ if (::GetLastError() != ERROR_NOT_SAME_DEVICE) {
+ LOG_GLE(LS_ERROR) << "Failed to move file";
+ return false;
+ }
+ if (!CopyFolder(old_path, new_path))
+ return false;
+ if (!DeleteFolderAndContents(old_path))
+ return false;
+ }
+ return true;
+}
+
+bool Win32Filesystem::IsFolder(const Pathname &path) {
+ WIN32_FILE_ATTRIBUTE_DATA data = {0};
+ if (0 == ::GetFileAttributesEx(ToUtf16(path.pathname()).c_str(),
+ GetFileExInfoStandard, &data))
+ return false;
+ return (data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) ==
+ FILE_ATTRIBUTE_DIRECTORY;
+}
+
+bool Win32Filesystem::IsFile(const Pathname &path) {
+ WIN32_FILE_ATTRIBUTE_DATA data = {0};
+ if (0 == ::GetFileAttributesEx(ToUtf16(path.pathname()).c_str(),
+ GetFileExInfoStandard, &data))
+ return false;
+ return (data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) == 0;
+}
+
+bool Win32Filesystem::IsAbsent(const Pathname& path) {
+ WIN32_FILE_ATTRIBUTE_DATA data = {0};
+ if (0 != ::GetFileAttributesEx(ToUtf16(path.pathname()).c_str(),
+ GetFileExInfoStandard, &data))
+ return false;
+ DWORD err = ::GetLastError();
+ return (ERROR_FILE_NOT_FOUND == err || ERROR_PATH_NOT_FOUND == err);
+}
+
+bool Win32Filesystem::CopyFile(const Pathname &old_path,
+ const Pathname &new_path) {
+ return ::CopyFile(ToUtf16(old_path.pathname()).c_str(),
+ ToUtf16(new_path.pathname()).c_str(), TRUE) != 0;
+}
+
+bool Win32Filesystem::IsTemporaryPath(const Pathname& pathname) {
+ TCHAR buffer[MAX_PATH + 1];
+ if (!::GetTempPath(ARRAY_SIZE(buffer), buffer))
+ return false;
+ if (!IsCurrentProcessLowIntegrity() &&
+ !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ return (::strnicmp(ToUtf16(pathname.pathname()).c_str(),
+ buffer, strlen(buffer)) == 0);
+}
+
+bool Win32Filesystem::GetFileSize(const Pathname &pathname, size_t *size) {
+ WIN32_FILE_ATTRIBUTE_DATA data = {0};
+ if (::GetFileAttributesEx(ToUtf16(pathname.pathname()).c_str(),
+ GetFileExInfoStandard, &data) == 0)
+ return false;
+ *size = data.nFileSizeLow;
+ return true;
+}
+
+bool Win32Filesystem::GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time) {
+ WIN32_FILE_ATTRIBUTE_DATA data = {0};
+ if (::GetFileAttributesEx(ToUtf16(path.pathname()).c_str(),
+ GetFileExInfoStandard, &data) == 0)
+ return false;
+ switch (which) {
+ case FTT_CREATED:
+ FileTimeToUnixTime(data.ftCreationTime, time);
+ break;
+ case FTT_MODIFIED:
+ FileTimeToUnixTime(data.ftLastWriteTime, time);
+ break;
+ case FTT_ACCESSED:
+ FileTimeToUnixTime(data.ftLastAccessTime, time);
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
+
+bool Win32Filesystem::GetAppPathname(Pathname* path) {
+ TCHAR buffer[MAX_PATH + 1];
+ if (0 == ::GetModuleFileName(NULL, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ path->SetPathname(ToUtf8(buffer));
+ return true;
+}
+
+bool Win32Filesystem::GetAppDataFolder(Pathname* path, bool per_user) {
+ ASSERT(!organization_name_.empty());
+ ASSERT(!application_name_.empty());
+ TCHAR buffer[MAX_PATH + 1];
+ int csidl = per_user ? CSIDL_LOCAL_APPDATA : CSIDL_COMMON_APPDATA;
+ if (!::SHGetSpecialFolderPath(NULL, buffer, csidl, TRUE))
+ return false;
+ if (!IsCurrentProcessLowIntegrity() &&
+ !::GetLongPathName(buffer, buffer, ARRAY_SIZE(buffer)))
+ return false;
+ size_t len = strcatn(buffer, ARRAY_SIZE(buffer), __T("\\"));
+ len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len,
+ ToUtf16(organization_name_).c_str());
+ if ((len > 0) && (buffer[len-1] != __T('\\'))) {
+ len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, __T("\\"));
+ }
+ len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len,
+ ToUtf16(application_name_).c_str());
+ if ((len > 0) && (buffer[len-1] != __T('\\'))) {
+ len += strcpyn(buffer + len, ARRAY_SIZE(buffer) - len, __T("\\"));
+ }
+ if (len >= ARRAY_SIZE(buffer) - 1)
+ return false;
+ path->clear();
+ path->SetFolder(ToUtf8(buffer));
+ return CreateFolder(*path);
+}
+
+bool Win32Filesystem::GetAppTempFolder(Pathname* path) {
+ if (!GetAppPathname(path))
+ return false;
+ std::string filename(path->filename());
+ return GetTemporaryFolder(*path, true, &filename);
+}
+
+bool Win32Filesystem::GetDiskFreeSpace(const Pathname& path, int64 *freebytes) {
+ if (!freebytes) {
+ return false;
+ }
+ char drive[4];
+ std::wstring drive16;
+ const wchar_t* target_drive = NULL;
+ if (path.GetDrive(drive, sizeof(drive))) {
+ drive16 = ToUtf16(drive);
+ target_drive = drive16.c_str();
+ } else if (path.folder().substr(0, 2) == "\\\\") {
+ // UNC path, fail.
+ // TODO: Handle UNC paths.
+ return false;
+ } else {
+ // The path is probably relative. GetDriveType and GetDiskFreeSpaceEx
+ // use the current drive if NULL is passed as the drive name.
+ // TODO: Add method to Pathname to determine if the path is relative.
+ // TODO: Add method to Pathname to convert a path to absolute.
+ }
+ UINT driveType = ::GetDriveType(target_drive);
+ if ( (driveType & DRIVE_REMOTE) || (driveType & DRIVE_UNKNOWN) ) {
+ LOG(LS_VERBOSE) << " remove or unknown drive " << drive;
+ return false;
+ }
+
+ int64 totalNumberOfBytes; // receives the number of bytes on disk
+ int64 totalNumberOfFreeBytes; // receives the free bytes on disk
+ // make sure things won't change in 64 bit machine
+ // TODO replace with compile time assert
+ ASSERT(sizeof(ULARGE_INTEGER) == sizeof(uint64)); //NOLINT
+ if (::GetDiskFreeSpaceEx(target_drive,
+ (PULARGE_INTEGER)freebytes,
+ (PULARGE_INTEGER)&totalNumberOfBytes,
+ (PULARGE_INTEGER)&totalNumberOfFreeBytes)) {
+ return true;
+ } else {
+ LOG(LS_VERBOSE) << " GetDiskFreeSpaceEx returns error ";
+ return false;
+ }
+}
+
+Pathname Win32Filesystem::GetCurrentDirectory() {
+ Pathname cwd;
+ int path_len = 0;
+ scoped_ptr<wchar_t[]> path;
+ do {
+ int needed = ::GetCurrentDirectory(path_len, path.get());
+ if (needed == 0) {
+ // Error.
+ LOG_GLE(LS_ERROR) << "::GetCurrentDirectory() failed";
+ return cwd; // returns empty pathname
+ }
+ if (needed <= path_len) {
+ // It wrote successfully.
+ break;
+ }
+ // Else need to re-alloc for "needed".
+ path.reset(new wchar_t[needed]);
+ path_len = needed;
+ } while (true);
+ cwd.SetFolder(ToUtf8(path.get()));
+ return cwd;
+}
+
+// TODO: Consider overriding DeleteFolderAndContents for speed and potentially
+// better OS integration (recycle bin?)
+/*
+ std::wstring temp_path16 = ToUtf16(temp_path.pathname());
+ temp_path16.append(1, '*');
+ temp_path16.append(1, '\0');
+
+ SHFILEOPSTRUCT file_op = { 0 };
+ file_op.wFunc = FO_DELETE;
+ file_op.pFrom = temp_path16.c_str();
+ file_op.fFlags = FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT;
+ return (0 == SHFileOperation(&file_op));
+*/
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32filesystem.h b/chromium/third_party/webrtc/base/win32filesystem.h
new file mode 100644
index 00000000000..3cd5373e341
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32filesystem.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef _WEBRTC_BASE_WIN32FILESYSTEM_H__
+#define _WEBRTC_BASE_WIN32FILESYSTEM_H__
+
+#include "fileutils.h"
+
+namespace rtc {
+
+class Win32Filesystem : public FilesystemInterface {
+ public:
+ // Opens a file. Returns an open StreamInterface if function succeeds. Otherwise,
+ // returns NULL.
+ virtual FileStream *OpenFile(const Pathname &filename,
+ const std::string &mode);
+
+ // Atomically creates an empty file accessible only to the current user if one
+ // does not already exist at the given path, otherwise fails.
+ virtual bool CreatePrivateFile(const Pathname &filename);
+
+ // This will attempt to delete the path located at filename.
+ // If the path points to a folder, it will fail with VERIFY
+ virtual bool DeleteFile(const Pathname &filename);
+
+ // This will attempt to delete an empty folder. If the path does not point to
+ // a folder, it fails with VERIFY. If the folder is not empty, it fails normally
+ virtual bool DeleteEmptyFolder(const Pathname &folder);
+
+ // Creates a directory. This will call itself recursively to create /foo/bar even if
+ // /foo does not exist.
+ // Returns TRUE if function succeeds
+ virtual bool CreateFolder(const Pathname &pathname);
+
+ // This moves a file from old_path to new_path. If the new path is on a
+ // different volume than the old, it will attempt to copy and then delete
+ // the folder
+ // Returns true if the file is successfully moved
+ virtual bool MoveFile(const Pathname &old_path, const Pathname &new_path);
+
+ // Moves a folder from old_path to new_path. If the new path is on a different
+ // volume from the old, it will attempt to Copy and then Delete the folder
+ // Returns true if the folder is successfully moved
+ virtual bool MoveFolder(const Pathname &old_path, const Pathname &new_path);
+
+ // This copies a file from old_path to _new_path
+ // Returns true if function succeeds
+ virtual bool CopyFile(const Pathname &old_path, const Pathname &new_path);
+
+ // Returns true if a pathname is a directory
+ virtual bool IsFolder(const Pathname& pathname);
+
+ // Returns true if a file exists at path
+ virtual bool IsFile(const Pathname &path);
+
+ // Returns true if pathname refers to no filesystem object, every parent
+ // directory either exists, or is also absent.
+ virtual bool IsAbsent(const Pathname& pathname);
+
+ // Returns true if pathname represents a temporary location on the system.
+ virtual bool IsTemporaryPath(const Pathname& pathname);
+
+ // All of the following functions set pathname and return true if successful.
+ // Returned paths always include a trailing backslash.
+ // If create is true, the path will be recursively created.
+ // If append is non-NULL, it will be appended (and possibly created).
+
+ virtual std::string TempFilename(const Pathname &dir, const std::string &prefix);
+
+ virtual bool GetFileSize(const Pathname& path, size_t* size);
+ virtual bool GetFileTime(const Pathname& path, FileTimeType which,
+ time_t* time);
+
+ // A folder appropriate for storing temporary files (Contents are
+ // automatically deleted when the program exists)
+ virtual bool GetTemporaryFolder(Pathname &path, bool create,
+ const std::string *append);
+
+ // Returns the path to the running application.
+ virtual bool GetAppPathname(Pathname* path);
+
+ virtual bool GetAppDataFolder(Pathname* path, bool per_user);
+
+ // Get a temporary folder that is unique to the current user and application.
+ virtual bool GetAppTempFolder(Pathname* path);
+
+ virtual bool GetDiskFreeSpace(const Pathname& path, int64 *freebytes);
+
+ virtual Pathname GetCurrentDirectory();
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_WINFILESYSTEM_H__
diff --git a/chromium/third_party/webrtc/base/win32regkey.cc b/chromium/third_party/webrtc/base/win32regkey.cc
new file mode 100644
index 00000000000..1ed0d4ea291
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32regkey.cc
@@ -0,0 +1,1102 @@
+/*
+ * Copyright 2003 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Registry configuration wrapers class implementation
+//
+// Change made by S. Ganesh - ganesh@google.com:
+// Use SHQueryValueEx instead of RegQueryValueEx throughout.
+// A call to the SHLWAPI function is essentially a call to the standard
+// function but with post-processing:
+// * to fix REG_SZ or REG_EXPAND_SZ data that is not properly null-terminated;
+// * to expand REG_EXPAND_SZ data.
+
+#include "webrtc/base/win32regkey.h"
+
+#include <shlwapi.h>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace rtc {
+
+RegKey::RegKey() {
+ h_key_ = NULL;
+}
+
+RegKey::~RegKey() {
+ Close();
+}
+
+HRESULT RegKey::Create(HKEY parent_key, const wchar_t* key_name) {
+ return Create(parent_key,
+ key_name,
+ REG_NONE,
+ REG_OPTION_NON_VOLATILE,
+ KEY_ALL_ACCESS,
+ NULL,
+ NULL);
+}
+
+HRESULT RegKey::Open(HKEY parent_key, const wchar_t* key_name) {
+ return Open(parent_key, key_name, KEY_ALL_ACCESS);
+}
+
+bool RegKey::HasValue(const TCHAR* value_name) const {
+ return (ERROR_SUCCESS == ::RegQueryValueEx(h_key_, value_name, NULL,
+ NULL, NULL, NULL));
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD value) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name, REG_DWORD, &value);
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD64 value) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name, REG_QWORD, &value);
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ float value) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name,
+ REG_BINARY, &value, sizeof(value));
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ double value) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name,
+ REG_BINARY, &value, sizeof(value));
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ const TCHAR* value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name,
+ REG_SZ, const_cast<wchar_t*>(value));
+}
+
+HRESULT RegKey::SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name, REG_BINARY,
+ const_cast<uint8*>(value), byte_count);
+}
+
+HRESULT RegKey::SetValueMultiSZ(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count) {
+ ASSERT(full_key_name != NULL);
+
+ return SetValueStaticHelper(full_key_name, value_name, REG_MULTI_SZ,
+ const_cast<uint8*>(value), byte_count);
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD* value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ return GetValueStaticHelper(full_key_name, value_name, REG_DWORD, value);
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD64* value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ return GetValueStaticHelper(full_key_name, value_name, REG_QWORD, value);
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ float* value) {
+ ASSERT(value != NULL);
+ ASSERT(full_key_name != NULL);
+
+ DWORD byte_count = 0;
+ scoped_ptr<byte[]> buffer;
+ HRESULT hr = GetValueStaticHelper(full_key_name, value_name,
+ REG_BINARY, buffer.accept(), &byte_count);
+ if (SUCCEEDED(hr)) {
+ ASSERT(byte_count == sizeof(*value));
+ if (byte_count == sizeof(*value)) {
+ *value = *reinterpret_cast<float*>(buffer.get());
+ }
+ }
+ return hr;
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ double* value) {
+ ASSERT(value != NULL);
+ ASSERT(full_key_name != NULL);
+
+ DWORD byte_count = 0;
+ scoped_ptr<byte[]> buffer;
+ HRESULT hr = GetValueStaticHelper(full_key_name, value_name,
+ REG_BINARY, buffer.accept(), &byte_count);
+ if (SUCCEEDED(hr)) {
+ ASSERT(byte_count == sizeof(*value));
+ if (byte_count == sizeof(*value)) {
+ *value = *reinterpret_cast<double*>(buffer.get());
+ }
+ }
+ return hr;
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ wchar_t** value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ return GetValueStaticHelper(full_key_name, value_name, REG_SZ, value);
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ std::wstring* value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ scoped_ptr<wchar_t[]> buffer;
+ HRESULT hr = RegKey::GetValue(full_key_name, value_name, buffer.accept());
+ if (SUCCEEDED(hr)) {
+ value->assign(buffer.get());
+ }
+ return hr;
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ std::vector<std::wstring>* value) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+
+ return GetValueStaticHelper(full_key_name, value_name, REG_MULTI_SZ, value);
+}
+
+HRESULT RegKey::GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value != NULL);
+ ASSERT(byte_count != NULL);
+
+ return GetValueStaticHelper(full_key_name, value_name,
+ REG_BINARY, value, byte_count);
+}
+
+HRESULT RegKey::DeleteSubKey(const wchar_t* key_name) {
+ ASSERT(key_name != NULL);
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegDeleteKey(h_key_, key_name);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+ if (hr == HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND) ||
+ hr == HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND)) {
+ hr = S_FALSE;
+ }
+ return hr;
+}
+
+HRESULT RegKey::DeleteValue(const wchar_t* value_name) {
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegDeleteValue(h_key_, value_name);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+ if (hr == HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND) ||
+ hr == HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND)) {
+ hr = S_FALSE;
+ }
+ return hr;
+}
+
+HRESULT RegKey::Close() {
+ HRESULT hr = S_OK;
+ if (h_key_ != NULL) {
+ LONG res = ::RegCloseKey(h_key_);
+ hr = HRESULT_FROM_WIN32(res);
+ h_key_ = NULL;
+ }
+ return hr;
+}
+
+HRESULT RegKey::Create(HKEY parent_key,
+ const wchar_t* key_name,
+ wchar_t* lpszClass,
+ DWORD options,
+ REGSAM sam_desired,
+ LPSECURITY_ATTRIBUTES lpSecAttr,
+ LPDWORD lpdwDisposition) {
+ ASSERT(key_name != NULL);
+ ASSERT(parent_key != NULL);
+
+ DWORD dw = 0;
+ HKEY h_key = NULL;
+ LONG res = ::RegCreateKeyEx(parent_key, key_name, 0, lpszClass, options,
+ sam_desired, lpSecAttr, &h_key, &dw);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+
+ if (lpdwDisposition) {
+ *lpdwDisposition = dw;
+ }
+
+ // we have to close the currently opened key
+ // before replacing it with the new one
+ if (hr == S_OK) {
+ hr = Close();
+ ASSERT(hr == S_OK);
+ h_key_ = h_key;
+ }
+ return hr;
+}
+
+HRESULT RegKey::Open(HKEY parent_key,
+ const wchar_t* key_name,
+ REGSAM sam_desired) {
+ ASSERT(key_name != NULL);
+ ASSERT(parent_key != NULL);
+
+ HKEY h_key = NULL;
+ LONG res = ::RegOpenKeyEx(parent_key, key_name, 0, sam_desired, &h_key);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+
+ // we have to close the currently opened key
+ // before replacing it with the new one
+ if (hr == S_OK) {
+ // close the currently opened key if any
+ hr = Close();
+ ASSERT(hr == S_OK);
+ h_key_ = h_key;
+ }
+ return hr;
+}
+
+// save the key and all of its subkeys and values to a file
+HRESULT RegKey::Save(const wchar_t* full_key_name, const wchar_t* file_name) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(file_name != NULL);
+
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+ if (!h_key) {
+ return E_FAIL;
+ }
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key, key_name.c_str(), KEY_READ);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ AdjustCurrentProcessPrivilege(SE_BACKUP_NAME, true);
+ LONG res = ::RegSaveKey(key.h_key_, file_name, NULL);
+ AdjustCurrentProcessPrivilege(SE_BACKUP_NAME, false);
+
+ return HRESULT_FROM_WIN32(res);
+}
+
+// restore the key and all of its subkeys and values which are saved into a file
+HRESULT RegKey::Restore(const wchar_t* full_key_name,
+ const wchar_t* file_name) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(file_name != NULL);
+
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+ if (!h_key) {
+ return E_FAIL;
+ }
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key, key_name.c_str(), KEY_WRITE);
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ AdjustCurrentProcessPrivilege(SE_RESTORE_NAME, true);
+ LONG res = ::RegRestoreKey(key.h_key_, file_name, REG_FORCE_RESTORE);
+ AdjustCurrentProcessPrivilege(SE_RESTORE_NAME, false);
+
+ return HRESULT_FROM_WIN32(res);
+}
+
+// check if the current key has the specified subkey
+bool RegKey::HasSubkey(const wchar_t* key_name) const {
+ ASSERT(key_name != NULL);
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key_, key_name, KEY_READ);
+ key.Close();
+ return hr == S_OK;
+}
+
+// static flush key
+HRESULT RegKey::FlushKey(const wchar_t* full_key_name) {
+ ASSERT(full_key_name != NULL);
+
+ HRESULT hr = HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND);
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ LONG res = ::RegFlushKey(h_key);
+ hr = HRESULT_FROM_WIN32(res);
+ }
+ return hr;
+}
+
+// static SET helper
+HRESULT RegKey::SetValueStaticHelper(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD type,
+ LPVOID value,
+ DWORD byte_count) {
+ ASSERT(full_key_name != NULL);
+
+ HRESULT hr = HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND);
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ RegKey key;
+ hr = key.Create(h_key, key_name.c_str());
+ if (hr == S_OK) {
+ switch (type) {
+ case REG_DWORD:
+ hr = key.SetValue(value_name, *(static_cast<DWORD*>(value)));
+ break;
+ case REG_QWORD:
+ hr = key.SetValue(value_name, *(static_cast<DWORD64*>(value)));
+ break;
+ case REG_SZ:
+ hr = key.SetValue(value_name, static_cast<const wchar_t*>(value));
+ break;
+ case REG_BINARY:
+ hr = key.SetValue(value_name, static_cast<const uint8*>(value),
+ byte_count);
+ break;
+ case REG_MULTI_SZ:
+ hr = key.SetValue(value_name, static_cast<const uint8*>(value),
+ byte_count, type);
+ break;
+ default:
+ ASSERT(false);
+ hr = HRESULT_FROM_WIN32(ERROR_DATATYPE_MISMATCH);
+ break;
+ }
+ // close the key after writing
+ HRESULT temp_hr = key.Close();
+ if (hr == S_OK) {
+ hr = temp_hr;
+ }
+ }
+ }
+ return hr;
+}
+
+// static GET helper
+HRESULT RegKey::GetValueStaticHelper(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD type,
+ LPVOID value,
+ DWORD* byte_count) {
+ ASSERT(full_key_name != NULL);
+
+ HRESULT hr = HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND);
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ RegKey key;
+ hr = key.Open(h_key, key_name.c_str(), KEY_READ);
+ if (hr == S_OK) {
+ switch (type) {
+ case REG_DWORD:
+ hr = key.GetValue(value_name, reinterpret_cast<DWORD*>(value));
+ break;
+ case REG_QWORD:
+ hr = key.GetValue(value_name, reinterpret_cast<DWORD64*>(value));
+ break;
+ case REG_SZ:
+ hr = key.GetValue(value_name, reinterpret_cast<wchar_t**>(value));
+ break;
+ case REG_MULTI_SZ:
+ hr = key.GetValue(value_name, reinterpret_cast<
+ std::vector<std::wstring>*>(value));
+ break;
+ case REG_BINARY:
+ hr = key.GetValue(value_name, reinterpret_cast<uint8**>(value),
+ byte_count);
+ break;
+ default:
+ ASSERT(false);
+ hr = HRESULT_FROM_WIN32(ERROR_DATATYPE_MISMATCH);
+ break;
+ }
+ // close the key after writing
+ HRESULT temp_hr = key.Close();
+ if (hr == S_OK) {
+ hr = temp_hr;
+ }
+ }
+ }
+ return hr;
+}
+
+// GET helper
+HRESULT RegKey::GetValueHelper(const wchar_t* value_name,
+ DWORD* type,
+ uint8** value,
+ DWORD* byte_count) const {
+ ASSERT(byte_count != NULL);
+ ASSERT(value != NULL);
+ ASSERT(type != NULL);
+
+ // init return buffer
+ *value = NULL;
+
+ // get the size of the return data buffer
+ LONG res = ::SHQueryValueEx(h_key_, value_name, NULL, type, NULL, byte_count);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+
+ if (hr == S_OK) {
+ // if the value length is 0, nothing to do
+ if (*byte_count != 0) {
+ // allocate the buffer
+ *value = new byte[*byte_count];
+ ASSERT(*value != NULL);
+
+ // make the call again to get the data
+ res = ::SHQueryValueEx(h_key_, value_name, NULL,
+ type, *value, byte_count);
+ hr = HRESULT_FROM_WIN32(res);
+ ASSERT(hr == S_OK);
+ }
+ }
+ return hr;
+}
+
+// Int32 Get
+HRESULT RegKey::GetValue(const wchar_t* value_name, DWORD* value) const {
+ ASSERT(value != NULL);
+
+ DWORD type = 0;
+ DWORD byte_count = sizeof(DWORD);
+ LONG res = ::SHQueryValueEx(h_key_, value_name, NULL, &type,
+ value, &byte_count);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+ ASSERT((hr != S_OK) || (type == REG_DWORD));
+ ASSERT((hr != S_OK) || (byte_count == sizeof(DWORD)));
+ return hr;
+}
+
+// Int64 Get
+HRESULT RegKey::GetValue(const wchar_t* value_name, DWORD64* value) const {
+ ASSERT(value != NULL);
+
+ DWORD type = 0;
+ DWORD byte_count = sizeof(DWORD64);
+ LONG res = ::SHQueryValueEx(h_key_, value_name, NULL, &type,
+ value, &byte_count);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+ ASSERT((hr != S_OK) || (type == REG_QWORD));
+ ASSERT((hr != S_OK) || (byte_count == sizeof(DWORD64)));
+ return hr;
+}
+
+// String Get
+HRESULT RegKey::GetValue(const wchar_t* value_name, wchar_t** value) const {
+ ASSERT(value != NULL);
+
+ DWORD byte_count = 0;
+ DWORD type = 0;
+
+ // first get the size of the string buffer
+ LONG res = ::SHQueryValueEx(h_key_, value_name, NULL,
+ &type, NULL, &byte_count);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+
+ if (hr == S_OK) {
+ // allocate room for the string and a terminating \0
+ *value = new wchar_t[(byte_count / sizeof(wchar_t)) + 1];
+
+ if ((*value) != NULL) {
+ if (byte_count != 0) {
+ // make the call again
+ res = ::SHQueryValueEx(h_key_, value_name, NULL, &type,
+ *value, &byte_count);
+ hr = HRESULT_FROM_WIN32(res);
+ } else {
+ (*value)[0] = L'\0';
+ }
+
+ ASSERT((hr != S_OK) || (type == REG_SZ) ||
+ (type == REG_MULTI_SZ) || (type == REG_EXPAND_SZ));
+ } else {
+ hr = E_OUTOFMEMORY;
+ }
+ }
+
+ return hr;
+}
+
+// get a string value
+HRESULT RegKey::GetValue(const wchar_t* value_name, std::wstring* value) const {
+ ASSERT(value != NULL);
+
+ DWORD byte_count = 0;
+ DWORD type = 0;
+
+ // first get the size of the string buffer
+ LONG res = ::SHQueryValueEx(h_key_, value_name, NULL,
+ &type, NULL, &byte_count);
+ HRESULT hr = HRESULT_FROM_WIN32(res);
+
+ if (hr == S_OK) {
+ if (byte_count != 0) {
+ // Allocate some memory and make the call again
+ value->resize(byte_count / sizeof(wchar_t) + 1);
+ res = ::SHQueryValueEx(h_key_, value_name, NULL, &type,
+ &value->at(0), &byte_count);
+ hr = HRESULT_FROM_WIN32(res);
+ value->resize(wcslen(value->data()));
+ } else {
+ value->clear();
+ }
+
+ ASSERT((hr != S_OK) || (type == REG_SZ) ||
+ (type == REG_MULTI_SZ) || (type == REG_EXPAND_SZ));
+ }
+
+ return hr;
+}
+
+// convert REG_MULTI_SZ bytes to string array
+HRESULT RegKey::MultiSZBytesToStringArray(const uint8* buffer,
+ DWORD byte_count,
+ std::vector<std::wstring>* value) {
+ ASSERT(buffer != NULL);
+ ASSERT(value != NULL);
+
+ const wchar_t* data = reinterpret_cast<const wchar_t*>(buffer);
+ DWORD data_len = byte_count / sizeof(wchar_t);
+ value->clear();
+ if (data_len > 1) {
+ // must be terminated by two null characters
+ if (data[data_len - 1] != 0 || data[data_len - 2] != 0) {
+ return E_INVALIDARG;
+ }
+
+ // put null-terminated strings into arrays
+ while (*data) {
+ std::wstring str(data);
+ value->push_back(str);
+ data += str.length() + 1;
+ }
+ }
+ return S_OK;
+}
+
+// get a std::vector<std::wstring> value from REG_MULTI_SZ type
+HRESULT RegKey::GetValue(const wchar_t* value_name,
+ std::vector<std::wstring>* value) const {
+ ASSERT(value != NULL);
+
+ DWORD byte_count = 0;
+ DWORD type = 0;
+ uint8* buffer = 0;
+
+ // first get the size of the buffer
+ HRESULT hr = GetValueHelper(value_name, &type, &buffer, &byte_count);
+ ASSERT((hr != S_OK) || (type == REG_MULTI_SZ));
+
+ if (SUCCEEDED(hr)) {
+ hr = MultiSZBytesToStringArray(buffer, byte_count, value);
+ }
+
+ return hr;
+}
+
+// Binary data Get
+HRESULT RegKey::GetValue(const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count) const {
+ ASSERT(byte_count != NULL);
+ ASSERT(value != NULL);
+
+ DWORD type = 0;
+ HRESULT hr = GetValueHelper(value_name, &type, value, byte_count);
+ ASSERT((hr != S_OK) || (type == REG_MULTI_SZ) || (type == REG_BINARY));
+ return hr;
+}
+
+// Raw data get
+HRESULT RegKey::GetValue(const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count,
+ DWORD*type) const {
+ ASSERT(type != NULL);
+ ASSERT(byte_count != NULL);
+ ASSERT(value != NULL);
+
+ return GetValueHelper(value_name, type, value, byte_count);
+}
+
+// Int32 set
+HRESULT RegKey::SetValue(const wchar_t* value_name, DWORD value) const {
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegSetValueEx(h_key_, value_name, NULL, REG_DWORD,
+ reinterpret_cast<const uint8*>(&value),
+ sizeof(DWORD));
+ return HRESULT_FROM_WIN32(res);
+}
+
+// Int64 set
+HRESULT RegKey::SetValue(const wchar_t* value_name, DWORD64 value) const {
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegSetValueEx(h_key_, value_name, NULL, REG_QWORD,
+ reinterpret_cast<const uint8*>(&value),
+ sizeof(DWORD64));
+ return HRESULT_FROM_WIN32(res);
+}
+
+// String set
+HRESULT RegKey::SetValue(const wchar_t* value_name,
+ const wchar_t* value) const {
+ ASSERT(value != NULL);
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegSetValueEx(h_key_, value_name, NULL, REG_SZ,
+ reinterpret_cast<const uint8*>(value),
+ (lstrlen(value) + 1) * sizeof(wchar_t));
+ return HRESULT_FROM_WIN32(res);
+}
+
+// Binary data set
+HRESULT RegKey::SetValue(const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count) const {
+ ASSERT(h_key_ != NULL);
+
+ // special case - if 'value' is NULL make sure byte_count is zero
+ if (value == NULL) {
+ byte_count = 0;
+ }
+
+ LONG res = ::RegSetValueEx(h_key_, value_name, NULL,
+ REG_BINARY, value, byte_count);
+ return HRESULT_FROM_WIN32(res);
+}
+
+// Raw data set
+HRESULT RegKey::SetValue(const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count,
+ DWORD type) const {
+ ASSERT(value != NULL);
+ ASSERT(h_key_ != NULL);
+
+ LONG res = ::RegSetValueEx(h_key_, value_name, NULL, type, value, byte_count);
+ return HRESULT_FROM_WIN32(res);
+}
+
+bool RegKey::HasKey(const wchar_t* full_key_name) {
+ ASSERT(full_key_name != NULL);
+
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ RegKey key;
+ HRESULT hr = key.Open(h_key, key_name.c_str(), KEY_READ);
+ key.Close();
+ return S_OK == hr;
+ }
+ return false;
+}
+
+// static version of HasValue
+bool RegKey::HasValue(const wchar_t* full_key_name, const wchar_t* value_name) {
+ ASSERT(full_key_name != NULL);
+
+ bool has_value = false;
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ RegKey key;
+ if (key.Open(h_key, key_name.c_str(), KEY_READ) == S_OK) {
+ has_value = key.HasValue(value_name);
+ key.Close();
+ }
+ }
+ return has_value;
+}
+
+HRESULT RegKey::GetValueType(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD* value_type) {
+ ASSERT(full_key_name != NULL);
+ ASSERT(value_type != NULL);
+
+ *value_type = REG_NONE;
+
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key, key_name.c_str(), KEY_READ);
+ if (SUCCEEDED(hr)) {
+ LONG res = ::SHQueryValueEx(key.h_key_, value_name, NULL, value_type,
+ NULL, NULL);
+ if (res != ERROR_SUCCESS) {
+ hr = HRESULT_FROM_WIN32(res);
+ }
+ }
+
+ return hr;
+}
+
+HRESULT RegKey::DeleteKey(const wchar_t* full_key_name) {
+ ASSERT(full_key_name != NULL);
+
+ return DeleteKey(full_key_name, true);
+}
+
+HRESULT RegKey::DeleteKey(const wchar_t* full_key_name, bool recursively) {
+ ASSERT(full_key_name != NULL);
+
+ // need to open the parent key first
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ // get the parent key
+ std::wstring parent_key(GetParentKeyInfo(&key_name));
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key, parent_key.c_str());
+
+ if (hr == S_OK) {
+ hr = recursively ? key.RecurseDeleteSubKey(key_name.c_str())
+ : key.DeleteSubKey(key_name.c_str());
+ } else if (hr == HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND) ||
+ hr == HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND)) {
+ hr = S_FALSE;
+ }
+
+ key.Close();
+ return hr;
+}
+
+HRESULT RegKey::DeleteValue(const wchar_t* full_key_name,
+ const wchar_t* value_name) {
+ ASSERT(full_key_name != NULL);
+
+ HRESULT hr = HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND);
+ // get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ if (h_key != NULL) {
+ RegKey key;
+ hr = key.Open(h_key, key_name.c_str());
+ if (hr == S_OK) {
+ hr = key.DeleteValue(value_name);
+ key.Close();
+ }
+ }
+ return hr;
+}
+
+HRESULT RegKey::RecurseDeleteSubKey(const wchar_t* key_name) {
+ ASSERT(key_name != NULL);
+
+ RegKey key;
+ HRESULT hr = key.Open(h_key_, key_name);
+
+ if (hr == S_OK) {
+ // enumerate all subkeys of this key and recursivelly delete them
+ FILETIME time = {0};
+ wchar_t key_name_buf[kMaxKeyNameChars] = {0};
+ DWORD key_name_buf_size = kMaxKeyNameChars;
+ while (hr == S_OK &&
+ ::RegEnumKeyEx(key.h_key_, 0, key_name_buf, &key_name_buf_size,
+ NULL, NULL, NULL, &time) == ERROR_SUCCESS) {
+ hr = key.RecurseDeleteSubKey(key_name_buf);
+
+ // restore the buffer size
+ key_name_buf_size = kMaxKeyNameChars;
+ }
+ // close the top key
+ key.Close();
+ }
+
+ if (hr == S_OK) {
+ // the key has no more children keys
+ // delete the key and all of its values
+ hr = DeleteSubKey(key_name);
+ }
+
+ return hr;
+}
+
+HKEY RegKey::GetRootKeyInfo(std::wstring* full_key_name) {
+ ASSERT(full_key_name != NULL);
+
+ HKEY h_key = NULL;
+ // get the root HKEY
+ size_t index = full_key_name->find(L'\\');
+ std::wstring root_key;
+
+ if (index == -1) {
+ root_key = *full_key_name;
+ *full_key_name = L"";
+ } else {
+ root_key = full_key_name->substr(0, index);
+ *full_key_name = full_key_name->substr(index + 1,
+ full_key_name->length() - index - 1);
+ }
+
+ for (std::wstring::iterator iter = root_key.begin();
+ iter != root_key.end(); ++iter) {
+ *iter = toupper(*iter);
+ }
+
+ if (!root_key.compare(L"HKLM") ||
+ !root_key.compare(L"HKEY_LOCAL_MACHINE")) {
+ h_key = HKEY_LOCAL_MACHINE;
+ } else if (!root_key.compare(L"HKCU") ||
+ !root_key.compare(L"HKEY_CURRENT_USER")) {
+ h_key = HKEY_CURRENT_USER;
+ } else if (!root_key.compare(L"HKU") ||
+ !root_key.compare(L"HKEY_USERS")) {
+ h_key = HKEY_USERS;
+ } else if (!root_key.compare(L"HKCR") ||
+ !root_key.compare(L"HKEY_CLASSES_ROOT")) {
+ h_key = HKEY_CLASSES_ROOT;
+ }
+
+ return h_key;
+}
+
+
+// Returns true if this key name is 'safe' for deletion
+// (doesn't specify a key root)
+bool RegKey::SafeKeyNameForDeletion(const wchar_t* key_name) {
+ ASSERT(key_name != NULL);
+ std::wstring key(key_name);
+
+ HKEY root_key = GetRootKeyInfo(&key);
+
+ if (!root_key) {
+ key = key_name;
+ }
+ if (key.empty()) {
+ return false;
+ }
+ bool found_subkey = false, backslash_found = false;
+ for (size_t i = 0 ; i < key.length() ; ++i) {
+ if (key[i] == L'\\') {
+ backslash_found = true;
+ } else if (backslash_found) {
+ found_subkey = true;
+ break;
+ }
+ }
+ return (root_key == HKEY_USERS) ? found_subkey : true;
+}
+
+std::wstring RegKey::GetParentKeyInfo(std::wstring* key_name) {
+ ASSERT(key_name != NULL);
+
+ // get the parent key
+ size_t index = key_name->rfind(L'\\');
+ std::wstring parent_key;
+ if (index == -1) {
+ parent_key = L"";
+ } else {
+ parent_key = key_name->substr(0, index);
+ *key_name = key_name->substr(index + 1, key_name->length() - index - 1);
+ }
+
+ return parent_key;
+}
+
+// get the number of values for this key
+uint32 RegKey::GetValueCount() {
+ DWORD num_values = 0;
+
+ if (ERROR_SUCCESS != ::RegQueryInfoKey(
+ h_key_, // key handle
+ NULL, // buffer for class name
+ NULL, // size of class string
+ NULL, // reserved
+ NULL, // number of subkeys
+ NULL, // longest subkey size
+ NULL, // longest class string
+ &num_values, // number of values for this key
+ NULL, // longest value name
+ NULL, // longest value data
+ NULL, // security descriptor
+ NULL)) { // last write time
+ ASSERT(false);
+ }
+ return num_values;
+}
+
+// Enumerators for the value_names for this key
+
+// Called to get the value name for the given value name index
+// Use GetValueCount() to get the total value_name count for this key
+// Returns failure if no key at the specified index
+HRESULT RegKey::GetValueNameAt(int index, std::wstring* value_name,
+ DWORD* type) {
+ ASSERT(value_name != NULL);
+
+ LONG res = ERROR_SUCCESS;
+ wchar_t value_name_buf[kMaxValueNameChars] = {0};
+ DWORD value_name_buf_size = kMaxValueNameChars;
+ res = ::RegEnumValue(h_key_, index, value_name_buf, &value_name_buf_size,
+ NULL, type, NULL, NULL);
+
+ if (res == ERROR_SUCCESS) {
+ value_name->assign(value_name_buf);
+ }
+
+ return HRESULT_FROM_WIN32(res);
+}
+
+uint32 RegKey::GetSubkeyCount() {
+ // number of values for key
+ DWORD num_subkeys = 0;
+
+ if (ERROR_SUCCESS != ::RegQueryInfoKey(
+ h_key_, // key handle
+ NULL, // buffer for class name
+ NULL, // size of class string
+ NULL, // reserved
+ &num_subkeys, // number of subkeys
+ NULL, // longest subkey size
+ NULL, // longest class string
+ NULL, // number of values for this key
+ NULL, // longest value name
+ NULL, // longest value data
+ NULL, // security descriptor
+ NULL)) { // last write time
+ ASSERT(false);
+ }
+ return num_subkeys;
+}
+
+HRESULT RegKey::GetSubkeyNameAt(int index, std::wstring* key_name) {
+ ASSERT(key_name != NULL);
+
+ LONG res = ERROR_SUCCESS;
+ wchar_t key_name_buf[kMaxKeyNameChars] = {0};
+ DWORD key_name_buf_size = kMaxKeyNameChars;
+
+ res = ::RegEnumKeyEx(h_key_, index, key_name_buf, &key_name_buf_size,
+ NULL, NULL, NULL, NULL);
+
+ if (res == ERROR_SUCCESS) {
+ key_name->assign(key_name_buf);
+ }
+
+ return HRESULT_FROM_WIN32(res);
+}
+
+// Is the key empty: having no sub-keys and values
+bool RegKey::IsKeyEmpty(const wchar_t* full_key_name) {
+ ASSERT(full_key_name != NULL);
+
+ bool is_empty = true;
+
+ // Get the root HKEY
+ std::wstring key_name(full_key_name);
+ HKEY h_key = GetRootKeyInfo(&key_name);
+
+ // Open the key to check
+ if (h_key != NULL) {
+ RegKey key;
+ HRESULT hr = key.Open(h_key, key_name.c_str(), KEY_READ);
+ if (SUCCEEDED(hr)) {
+ is_empty = key.GetSubkeyCount() == 0 && key.GetValueCount() == 0;
+ key.Close();
+ }
+ }
+
+ return is_empty;
+}
+
+bool AdjustCurrentProcessPrivilege(const TCHAR* privilege, bool to_enable) {
+ ASSERT(privilege != NULL);
+
+ bool ret = false;
+ HANDLE token;
+ if (::OpenProcessToken(::GetCurrentProcess(),
+ TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY, &token)) {
+ LUID luid;
+ memset(&luid, 0, sizeof(luid));
+ if (::LookupPrivilegeValue(NULL, privilege, &luid)) {
+ TOKEN_PRIVILEGES privs;
+ privs.PrivilegeCount = 1;
+ privs.Privileges[0].Luid = luid;
+ privs.Privileges[0].Attributes = to_enable ? SE_PRIVILEGE_ENABLED : 0;
+ if (::AdjustTokenPrivileges(token, FALSE, &privs, 0, NULL, 0)) {
+ ret = true;
+ } else {
+ LOG_GLE(LS_ERROR) << "AdjustTokenPrivileges failed";
+ }
+ } else {
+ LOG_GLE(LS_ERROR) << "LookupPrivilegeValue failed";
+ }
+ CloseHandle(token);
+ } else {
+ LOG_GLE(LS_ERROR) << "OpenProcessToken(GetCurrentProcess) failed";
+ }
+
+ return ret;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32regkey.h b/chromium/third_party/webrtc/base/win32regkey.h
new file mode 100644
index 00000000000..b33d4dc2b39
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32regkey.h
@@ -0,0 +1,337 @@
+/*
+ * Copyright 2003 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Registry configuration wrappers class
+//
+// Offers static functions for convenient
+// fast access for individual values
+//
+// Also provides a wrapper class for efficient
+// batch operations on values of a given registry key.
+//
+
+#ifndef WEBRTC_BASE_WIN32REGKEY_H_
+#define WEBRTC_BASE_WIN32REGKEY_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/win32.h"
+
+namespace rtc {
+
+// maximum sizes registry key and value names
+const int kMaxKeyNameChars = 255 + 1;
+const int kMaxValueNameChars = 16383 + 1;
+
+class RegKey {
+ public:
+ // constructor
+ RegKey();
+
+ // destructor
+ ~RegKey();
+
+ // create a reg key
+ HRESULT Create(HKEY parent_key, const wchar_t* key_name);
+
+ HRESULT Create(HKEY parent_key,
+ const wchar_t* key_name,
+ wchar_t* reg_class,
+ DWORD options,
+ REGSAM sam_desired,
+ LPSECURITY_ATTRIBUTES lp_sec_attr,
+ LPDWORD lp_disposition);
+
+ // open an existing reg key
+ HRESULT Open(HKEY parent_key, const wchar_t* key_name);
+
+ HRESULT Open(HKEY parent_key, const wchar_t* key_name, REGSAM sam_desired);
+
+ // close this reg key
+ HRESULT Close();
+
+ // check if the key has a specified value
+ bool HasValue(const wchar_t* value_name) const;
+
+ // get the number of values for this key
+ uint32 GetValueCount();
+
+ // Called to get the value name for the given value name index
+ // Use GetValueCount() to get the total value_name count for this key
+ // Returns failure if no key at the specified index
+ // If you modify the key while enumerating, the indexes will be out of order.
+ // Since the index order is not guaranteed, you need to reset your counting
+ // loop.
+ // 'type' refers to REG_DWORD, REG_QWORD, etc..
+ // 'type' can be NULL if not interested in the value type
+ HRESULT GetValueNameAt(int index, std::wstring* value_name, DWORD* type);
+
+ // check if the current key has the specified subkey
+ bool HasSubkey(const wchar_t* key_name) const;
+
+ // get the number of subkeys for this key
+ uint32 GetSubkeyCount();
+
+ // Called to get the key name for the given key index
+ // Use GetSubkeyCount() to get the total count for this key
+ // Returns failure if no key at the specified index
+ // If you modify the key while enumerating, the indexes will be out of order.
+ // Since the index order is not guaranteed, you need to reset your counting
+ // loop.
+ HRESULT GetSubkeyNameAt(int index, std::wstring* key_name);
+
+ // SETTERS
+
+ // set an int32 value - use when reading multiple values from a key
+ HRESULT SetValue(const wchar_t* value_name, DWORD value) const;
+
+ // set an int64 value
+ HRESULT SetValue(const wchar_t* value_name, DWORD64 value) const;
+
+ // set a string value
+ HRESULT SetValue(const wchar_t* value_name, const wchar_t* value) const;
+
+ // set binary data
+ HRESULT SetValue(const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count) const;
+
+ // set raw data, including type
+ HRESULT SetValue(const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count,
+ DWORD type) const;
+
+ // GETTERS
+
+ // get an int32 value
+ HRESULT GetValue(const wchar_t* value_name, DWORD* value) const;
+
+ // get an int64 value
+ HRESULT GetValue(const wchar_t* value_name, DWORD64* value) const;
+
+ // get a string value - the caller must free the return buffer
+ HRESULT GetValue(const wchar_t* value_name, wchar_t** value) const;
+
+ // get a string value
+ HRESULT GetValue(const wchar_t* value_name, std::wstring* value) const;
+
+ // get a std::vector<std::wstring> value from REG_MULTI_SZ type
+ HRESULT GetValue(const wchar_t* value_name,
+ std::vector<std::wstring>* value) const;
+
+ // get binary data - the caller must free the return buffer
+ HRESULT GetValue(const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count) const;
+
+ // get raw data, including type - the caller must free the return buffer
+ HRESULT GetValue(const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count,
+ DWORD* type) const;
+
+ // STATIC VERSIONS
+
+ // flush
+ static HRESULT FlushKey(const wchar_t* full_key_name);
+
+ // check if a key exists
+ static bool HasKey(const wchar_t* full_key_name);
+
+ // check if the key has a specified value
+ static bool HasValue(const wchar_t* full_key_name, const wchar_t* value_name);
+
+ // SETTERS
+
+ // STATIC int32 set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD value);
+
+ // STATIC int64 set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD64 value);
+
+ // STATIC float set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ float value);
+
+ // STATIC double set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ double value);
+
+ // STATIC string set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ const wchar_t* value);
+
+ // STATIC binary data set
+ static HRESULT SetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ const uint8* value,
+ DWORD byte_count);
+
+ // STATIC multi-string set
+ static HRESULT SetValueMultiSZ(const wchar_t* full_key_name,
+ const TCHAR* value_name,
+ const uint8* value,
+ DWORD byte_count);
+
+ // GETTERS
+
+ // STATIC int32 get
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD* value);
+
+ // STATIC int64 get
+ //
+ // Note: if you are using time64 you should
+ // likely use GetLimitedTimeValue (util.h) instead of this method.
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD64* value);
+
+ // STATIC float get
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ float* value);
+
+ // STATIC double get
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ double* value);
+
+ // STATIC string get
+ // Note: the caller must free the return buffer for wchar_t* version
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ wchar_t** value);
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ std::wstring* value);
+
+ // STATIC REG_MULTI_SZ get
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ std::vector<std::wstring>* value);
+
+ // STATIC get binary data - the caller must free the return buffer
+ static HRESULT GetValue(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ uint8** value,
+ DWORD* byte_count);
+
+ // Get type of a registry value
+ static HRESULT GetValueType(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD* value_type);
+
+ // delete a subkey of the current key (with no subkeys)
+ HRESULT DeleteSubKey(const wchar_t* key_name);
+
+ // recursively delete a sub key of the current key (and all its subkeys)
+ HRESULT RecurseDeleteSubKey(const wchar_t* key_name);
+
+ // STATIC version of delete key - handles nested keys also
+ // delete a key and all its sub-keys recursively
+ // Returns S_FALSE if key didn't exist, S_OK if deletion was successful,
+ // and failure otherwise.
+ static HRESULT DeleteKey(const wchar_t* full_key_name);
+
+ // STATIC version of delete key
+ // delete a key recursively or non-recursively
+ // Returns S_FALSE if key didn't exist, S_OK if deletion was successful,
+ // and failure otherwise.
+ static HRESULT DeleteKey(const wchar_t* full_key_name, bool recursive);
+
+ // delete the specified value
+ HRESULT DeleteValue(const wchar_t* value_name);
+
+ // STATIC version of delete value
+ // Returns S_FALSE if key didn't exist, S_OK if deletion was successful,
+ // and failure otherwise.
+ static HRESULT DeleteValue(const wchar_t* full_key_name,
+ const wchar_t* value_name);
+
+ // Peek inside (use a RegKey as a smart wrapper around a registry handle)
+ HKEY key() { return h_key_; }
+
+ // helper function to get the HKEY and the root key from a string
+ // modifies the argument in place and returns the key name
+ // e.g. HKLM\\Software\\Google\... returns HKLM, "Software\\Google\..."
+ // Necessary for the static versions that use the full name of the reg key
+ static HKEY GetRootKeyInfo(std::wstring* full_key_name);
+
+ // Returns true if this key name is 'safe' for deletion (doesn't specify a key
+ // root)
+ static bool SafeKeyNameForDeletion(const wchar_t* key_name);
+
+ // save the key and all of its subkeys and values to a file
+ static HRESULT Save(const wchar_t* full_key_name, const wchar_t* file_name);
+
+ // restore the key and all of its subkeys and values which are saved into a
+ // file
+ static HRESULT Restore(const wchar_t* full_key_name,
+ const wchar_t* file_name);
+
+ // Is the key empty: having no sub-keys and values
+ static bool IsKeyEmpty(const wchar_t* full_key_name);
+
+ private:
+
+ // helper function to get any value from the registry
+ // used when the size of the data is unknown
+ HRESULT GetValueHelper(const wchar_t* value_name,
+ DWORD* type, uint8** value,
+ DWORD* byte_count) const;
+
+ // helper function to get the parent key name and the subkey from a string
+ // modifies the argument in place and returns the key name
+ // Necessary for the static versions that use the full name of the reg key
+ static std::wstring GetParentKeyInfo(std::wstring* key_name);
+
+ // common SET Helper for the static case
+ static HRESULT SetValueStaticHelper(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD type,
+ LPVOID value,
+ DWORD byte_count = 0);
+
+ // common GET Helper for the static case
+ static HRESULT GetValueStaticHelper(const wchar_t* full_key_name,
+ const wchar_t* value_name,
+ DWORD type,
+ LPVOID value,
+ DWORD* byte_count = NULL);
+
+ // convert REG_MULTI_SZ bytes to string array
+ static HRESULT MultiSZBytesToStringArray(const uint8* buffer,
+ DWORD byte_count,
+ std::vector<std::wstring>* value);
+
+ // the HKEY for the current key
+ HKEY h_key_;
+
+ // for unittest
+ friend void RegKeyHelperFunctionsTest();
+
+ DISALLOW_EVIL_CONSTRUCTORS(RegKey);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WIN32REGKEY_H_
diff --git a/chromium/third_party/webrtc/base/win32regkey_unittest.cc b/chromium/third_party/webrtc/base/win32regkey_unittest.cc
new file mode 100644
index 00000000000..1e77381820b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32regkey_unittest.cc
@@ -0,0 +1,590 @@
+/*
+ * Copyright 2003 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unittest for registry access API
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/win32regkey.h"
+
+namespace rtc {
+
+#ifndef EXPECT_SUCCEEDED
+#define EXPECT_SUCCEEDED(x) EXPECT_TRUE(SUCCEEDED(x))
+#endif
+
+#ifndef EXPECT_FAILED
+#define EXPECT_FAILED(x) EXPECT_TRUE(FAILED(x))
+#endif
+
+#define kBaseKey L"Software\\Google\\__TEST"
+#define kSubkeyName L"subkey_test"
+
+const wchar_t kRkey1[] = kBaseKey;
+const wchar_t kRkey1SubkeyName[] = kSubkeyName;
+const wchar_t kRkey1Subkey[] = kBaseKey L"\\" kSubkeyName;
+const wchar_t kFullRkey1[] = L"HKCU\\" kBaseKey;
+const wchar_t kFullRkey1Subkey[] = L"HKCU\\" kBaseKey L"\\" kSubkeyName;
+
+const wchar_t kValNameInt[] = L"Int32 Value";
+const DWORD kIntVal = 20;
+const DWORD kIntVal2 = 30;
+
+const wchar_t kValNameInt64[] = L"Int64 Value";
+const DWORD64 kIntVal64 = 119600064000000000uI64;
+
+const wchar_t kValNameFloat[] = L"Float Value";
+const float kFloatVal = 12.3456789f;
+
+const wchar_t kValNameDouble[] = L"Double Value";
+const double kDoubleVal = 98.7654321;
+
+const wchar_t kValNameStr[] = L"Str Value";
+const wchar_t kStrVal[] = L"Some string data 1";
+const wchar_t kStrVal2[] = L"Some string data 2";
+
+const wchar_t kValNameBinary[] = L"Binary Value";
+const char kBinaryVal[] = "Some binary data abcdefghi 1";
+const char kBinaryVal2[] = "Some binary data abcdefghi 2";
+
+const wchar_t kValNameMultiStr[] = L"MultiStr Value";
+const wchar_t kMultiSZ[] = L"abc\0def\0P12345\0";
+const wchar_t kEmptyMultiSZ[] = L"";
+const wchar_t kInvalidMultiSZ[] = {L'6', L'7', L'8'};
+
+// friend function of RegKey
+void RegKeyHelperFunctionsTest() {
+ // Try out some dud values
+ std::wstring temp_key = L"";
+ EXPECT_TRUE(RegKey::GetRootKeyInfo(&temp_key) == NULL);
+ EXPECT_STREQ(temp_key.c_str(), L"");
+
+ temp_key = L"a";
+ EXPECT_TRUE(RegKey::GetRootKeyInfo(&temp_key) == NULL);
+ EXPECT_STREQ(temp_key.c_str(), L"");
+
+ // The basics
+ temp_key = L"HKLM\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_LOCAL_MACHINE);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKEY_LOCAL_MACHINE\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_LOCAL_MACHINE);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKCU\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CURRENT_USER);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKEY_CURRENT_USER\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CURRENT_USER);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKU\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_USERS);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKEY_USERS\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_USERS);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKCR\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CLASSES_ROOT);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"HKEY_CLASSES_ROOT\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CLASSES_ROOT);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ // Make sure it is case insensitive
+ temp_key = L"hkcr\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CLASSES_ROOT);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"hkey_CLASSES_ROOT\\a";
+ EXPECT_EQ(RegKey::GetRootKeyInfo(&temp_key), HKEY_CLASSES_ROOT);
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ //
+ // Test RegKey::GetParentKeyInfo
+ //
+
+ // dud cases
+ temp_key = L"";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(), L"");
+ EXPECT_STREQ(temp_key.c_str(), L"");
+
+ temp_key = L"a";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(), L"");
+ EXPECT_STREQ(temp_key.c_str(), L"a");
+
+ temp_key = L"a\\b";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(), L"a");
+ EXPECT_STREQ(temp_key.c_str(), L"b");
+
+ temp_key = L"\\b";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(), L"");
+ EXPECT_STREQ(temp_key.c_str(), L"b");
+
+ // Some regular cases
+ temp_key = L"HKEY_CLASSES_ROOT\\moon";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(),
+ L"HKEY_CLASSES_ROOT");
+ EXPECT_STREQ(temp_key.c_str(), L"moon");
+
+ temp_key = L"HKEY_CLASSES_ROOT\\moon\\doggy";
+ EXPECT_STREQ(RegKey::GetParentKeyInfo(&temp_key).c_str(),
+ L"HKEY_CLASSES_ROOT\\moon");
+ EXPECT_STREQ(temp_key.c_str(), L"doggy");
+
+ //
+ // Test MultiSZBytesToStringArray
+ //
+
+ std::vector<std::wstring> result;
+ EXPECT_SUCCEEDED(RegKey::MultiSZBytesToStringArray(
+ reinterpret_cast<const uint8*>(kMultiSZ), sizeof(kMultiSZ), &result));
+ EXPECT_EQ(result.size(), 3);
+ EXPECT_STREQ(result[0].c_str(), L"abc");
+ EXPECT_STREQ(result[1].c_str(), L"def");
+ EXPECT_STREQ(result[2].c_str(), L"P12345");
+
+ EXPECT_SUCCEEDED(RegKey::MultiSZBytesToStringArray(
+ reinterpret_cast<const uint8*>(kEmptyMultiSZ),
+ sizeof(kEmptyMultiSZ), &result));
+ EXPECT_EQ(result.size(), 0);
+ EXPECT_FALSE(SUCCEEDED(RegKey::MultiSZBytesToStringArray(
+ reinterpret_cast<const uint8*>(kInvalidMultiSZ),
+ sizeof(kInvalidMultiSZ), &result)));
+}
+
+TEST(RegKeyTest, RegKeyHelperFunctionsTest) {
+ RegKeyHelperFunctionsTest();
+}
+
+TEST(RegKeyTest, RegKeyNonStaticFunctionsTest) {
+ DWORD int_val = 0;
+ DWORD64 int64_val = 0;
+ wchar_t* str_val = NULL;
+ uint8* binary_val = NULL;
+ DWORD uint8_count = 0;
+
+ // Just in case...
+ // make sure the no test key residue is left from previous aborted runs
+ RegKey::DeleteKey(kFullRkey1);
+
+ // initial state
+ RegKey r_key;
+ EXPECT_TRUE(r_key.key() == NULL);
+
+ // create a reg key
+ EXPECT_SUCCEEDED(r_key.Create(HKEY_CURRENT_USER, kRkey1));
+
+ // do the create twice - it should return the already created one
+ EXPECT_SUCCEEDED(r_key.Create(HKEY_CURRENT_USER, kRkey1));
+
+ // now do an open - should work just fine
+ EXPECT_SUCCEEDED(r_key.Open(HKEY_CURRENT_USER, kRkey1));
+
+ // get an in-existent value
+ EXPECT_EQ(r_key.GetValue(kValNameInt, &int_val),
+ HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND));
+
+ // set and get some values
+
+ // set an INT 32
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameInt, kIntVal));
+
+ // check that the value exists
+ EXPECT_TRUE(r_key.HasValue(kValNameInt));
+
+ // read it back
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameInt, &int_val));
+ EXPECT_EQ(int_val, kIntVal);
+
+ // set it again!
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameInt, kIntVal2));
+
+ // read it again
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameInt, &int_val));
+ EXPECT_EQ(int_val, kIntVal2);
+
+ // delete the value
+ EXPECT_SUCCEEDED(r_key.DeleteValue(kValNameInt));
+
+ // check that the value is gone
+ EXPECT_FALSE(r_key.HasValue(kValNameInt));
+
+ // set an INT 64
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameInt64, kIntVal64));
+
+ // check that the value exists
+ EXPECT_TRUE(r_key.HasValue(kValNameInt64));
+
+ // read it back
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameInt64, &int64_val));
+ EXPECT_EQ(int64_val, kIntVal64);
+
+ // delete the value
+ EXPECT_SUCCEEDED(r_key.DeleteValue(kValNameInt64));
+
+ // check that the value is gone
+ EXPECT_FALSE(r_key.HasValue(kValNameInt64));
+
+ // set a string
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameStr, kStrVal));
+
+ // check that the value exists
+ EXPECT_TRUE(r_key.HasValue(kValNameStr));
+
+ // read it back
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameStr, &str_val));
+ EXPECT_TRUE(lstrcmp(str_val, kStrVal) == 0);
+ delete[] str_val;
+
+ // set it again
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameStr, kStrVal2));
+
+ // read it again
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameStr, &str_val));
+ EXPECT_TRUE(lstrcmp(str_val, kStrVal2) == 0);
+ delete[] str_val;
+
+ // delete the value
+ EXPECT_SUCCEEDED(r_key.DeleteValue(kValNameStr));
+
+ // check that the value is gone
+ EXPECT_FALSE(r_key.HasValue(kValNameInt));
+
+ // set a binary value
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameBinary,
+ reinterpret_cast<const uint8*>(kBinaryVal), sizeof(kBinaryVal) - 1));
+
+ // check that the value exists
+ EXPECT_TRUE(r_key.HasValue(kValNameBinary));
+
+ // read it back
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameBinary, &binary_val, &uint8_count));
+ EXPECT_TRUE(memcmp(binary_val, kBinaryVal, sizeof(kBinaryVal) - 1) == 0);
+ delete[] binary_val;
+
+ // set it again
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameBinary,
+ reinterpret_cast<const uint8*>(kBinaryVal2), sizeof(kBinaryVal) - 1));
+
+ // read it again
+ EXPECT_SUCCEEDED(r_key.GetValue(kValNameBinary, &binary_val, &uint8_count));
+ EXPECT_TRUE(memcmp(binary_val, kBinaryVal2, sizeof(kBinaryVal2) - 1) == 0);
+ delete[] binary_val;
+
+ // delete the value
+ EXPECT_SUCCEEDED(r_key.DeleteValue(kValNameBinary));
+
+ // check that the value is gone
+ EXPECT_FALSE(r_key.HasValue(kValNameBinary));
+
+ // set some values and check the total count
+
+ // set an INT 32
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameInt, kIntVal));
+
+ // set an INT 64
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameInt64, kIntVal64));
+
+ // set a string
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameStr, kStrVal));
+
+ // set a binary value
+ EXPECT_SUCCEEDED(r_key.SetValue(kValNameBinary,
+ reinterpret_cast<const uint8*>(kBinaryVal), sizeof(kBinaryVal) - 1));
+
+ // get the value count
+ uint32 value_count = r_key.GetValueCount();
+ EXPECT_EQ(value_count, 4);
+
+ // check the value names
+ std::wstring value_name;
+ DWORD type = 0;
+
+ EXPECT_SUCCEEDED(r_key.GetValueNameAt(0, &value_name, &type));
+ EXPECT_STREQ(value_name.c_str(), kValNameInt);
+ EXPECT_EQ(type, REG_DWORD);
+
+ EXPECT_SUCCEEDED(r_key.GetValueNameAt(1, &value_name, &type));
+ EXPECT_STREQ(value_name.c_str(), kValNameInt64);
+ EXPECT_EQ(type, REG_QWORD);
+
+ EXPECT_SUCCEEDED(r_key.GetValueNameAt(2, &value_name, &type));
+ EXPECT_STREQ(value_name.c_str(), kValNameStr);
+ EXPECT_EQ(type, REG_SZ);
+
+ EXPECT_SUCCEEDED(r_key.GetValueNameAt(3, &value_name, &type));
+ EXPECT_STREQ(value_name.c_str(), kValNameBinary);
+ EXPECT_EQ(type, REG_BINARY);
+
+ // check that there are no more values
+ EXPECT_FAILED(r_key.GetValueNameAt(4, &value_name, &type));
+
+ uint32 subkey_count = r_key.GetSubkeyCount();
+ EXPECT_EQ(subkey_count, 0);
+
+ // now create a subkey and make sure we can get the name
+ RegKey temp_key;
+ EXPECT_SUCCEEDED(temp_key.Create(HKEY_CURRENT_USER, kRkey1Subkey));
+
+ // check the subkey exists
+ EXPECT_TRUE(r_key.HasSubkey(kRkey1SubkeyName));
+
+ // check the name
+ EXPECT_EQ(r_key.GetSubkeyCount(), 1);
+
+ std::wstring subkey_name;
+ EXPECT_SUCCEEDED(r_key.GetSubkeyNameAt(0, &subkey_name));
+ EXPECT_STREQ(subkey_name.c_str(), kRkey1SubkeyName);
+
+ // delete the key
+ EXPECT_SUCCEEDED(r_key.DeleteSubKey(kRkey1));
+
+ // close this key
+ EXPECT_SUCCEEDED(r_key.Close());
+
+ // whack the whole key
+ EXPECT_SUCCEEDED(RegKey::DeleteKey(kFullRkey1));
+}
+
+TEST(RegKeyTest, RegKeyStaticFunctionsTest) {
+ DWORD int_val = 0;
+ DWORD64 int64_val = 0;
+ float float_val = 0;
+ double double_val = 0;
+ wchar_t* str_val = NULL;
+ std::wstring wstr_val;
+ uint8* binary_val = NULL;
+ DWORD uint8_count = 0;
+
+ // Just in case...
+ // make sure the no test key residue is left from previous aborted runs
+ RegKey::DeleteKey(kFullRkey1);
+
+ // get an in-existent value from an un-existent key
+ EXPECT_EQ(RegKey::GetValue(kFullRkey1, kValNameInt, &int_val),
+ HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND));
+
+ // set int32
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameInt, kIntVal));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameInt));
+
+ // get an in-existent value from an existent key
+ EXPECT_EQ(RegKey::GetValue(kFullRkey1, L"bogus", &int_val),
+ HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameInt, &int_val));
+ EXPECT_EQ(int_val, kIntVal);
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameInt));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameInt));
+
+ // set int64
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameInt64, kIntVal64));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameInt64));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameInt64, &int64_val));
+ EXPECT_EQ(int64_val, kIntVal64);
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameInt64));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameInt64));
+
+ // set float
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameFloat, kFloatVal));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameFloat));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameFloat, &float_val));
+ EXPECT_EQ(float_val, kFloatVal);
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameFloat));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameFloat));
+ EXPECT_FAILED(RegKey::GetValue(kFullRkey1, kValNameFloat, &float_val));
+
+ // set double
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameDouble, kDoubleVal));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameDouble));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameDouble, &double_val));
+ EXPECT_EQ(double_val, kDoubleVal);
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameDouble));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameDouble));
+ EXPECT_FAILED(RegKey::GetValue(kFullRkey1, kValNameDouble, &double_val));
+
+ // set string
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameStr, kStrVal));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameStr));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameStr, &str_val));
+ EXPECT_TRUE(lstrcmp(str_val, kStrVal) == 0);
+ delete[] str_val;
+
+ // read it back in std::wstring
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameStr, &wstr_val));
+ EXPECT_STREQ(wstr_val.c_str(), kStrVal);
+
+ // get an in-existent value from an existent key
+ EXPECT_EQ(RegKey::GetValue(kFullRkey1, L"bogus", &str_val),
+ HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND));
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameStr));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameStr));
+
+ // set binary
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameBinary,
+ reinterpret_cast<const uint8*>(kBinaryVal), sizeof(kBinaryVal)-1));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameBinary,
+ &binary_val, &uint8_count));
+ EXPECT_TRUE(memcmp(binary_val, kBinaryVal, sizeof(kBinaryVal)-1) == 0);
+ delete[] binary_val;
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameBinary));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // special case - set a binary value with length 0
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameBinary,
+ reinterpret_cast<const uint8*>(kBinaryVal), 0));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameBinary,
+ &binary_val, &uint8_count));
+ EXPECT_EQ(uint8_count, 0);
+ EXPECT_TRUE(binary_val == NULL);
+ delete[] binary_val;
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameBinary));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // special case - set a NULL binary value
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1, kValNameBinary, NULL, 100));
+
+ // check that the value exists
+ EXPECT_TRUE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // read it back
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameBinary,
+ &binary_val, &uint8_count));
+ EXPECT_EQ(uint8_count, 0);
+ EXPECT_TRUE(binary_val == NULL);
+ delete[] binary_val;
+
+ // delete the value
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1, kValNameBinary));
+
+ // check that the value is gone
+ EXPECT_FALSE(RegKey::HasValue(kFullRkey1, kValNameBinary));
+
+ // test read/write REG_MULTI_SZ value
+ std::vector<std::wstring> result;
+ EXPECT_SUCCEEDED(RegKey::SetValueMultiSZ(kFullRkey1, kValNameMultiStr,
+ reinterpret_cast<const uint8*>(kMultiSZ), sizeof(kMultiSZ)));
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameMultiStr, &result));
+ EXPECT_EQ(result.size(), 3);
+ EXPECT_STREQ(result[0].c_str(), L"abc");
+ EXPECT_STREQ(result[1].c_str(), L"def");
+ EXPECT_STREQ(result[2].c_str(), L"P12345");
+ EXPECT_SUCCEEDED(RegKey::SetValueMultiSZ(kFullRkey1, kValNameMultiStr,
+ reinterpret_cast<const uint8*>(kEmptyMultiSZ), sizeof(kEmptyMultiSZ)));
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameMultiStr, &result));
+ EXPECT_EQ(result.size(), 0);
+ // writing REG_MULTI_SZ value will automatically add ending null characters
+ EXPECT_SUCCEEDED(RegKey::SetValueMultiSZ(kFullRkey1, kValNameMultiStr,
+ reinterpret_cast<const uint8*>(kInvalidMultiSZ), sizeof(kInvalidMultiSZ)));
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1, kValNameMultiStr, &result));
+ EXPECT_EQ(result.size(), 1);
+ EXPECT_STREQ(result[0].c_str(), L"678");
+
+ // Run the following test only in dev machine
+ // This is because the build machine might not have admin privilege
+#ifdef IS_PRIVATE_BUILD
+ // get a temp file name
+ wchar_t temp_path[MAX_PATH] = {0};
+ EXPECT_LT(::GetTempPath(ARRAY_SIZE(temp_path), temp_path),
+ static_cast<DWORD>(ARRAY_SIZE(temp_path)));
+ wchar_t temp_file[MAX_PATH] = {0};
+ EXPECT_NE(::GetTempFileName(temp_path, L"rkut_",
+ ::GetTickCount(), temp_file), 0);
+
+ // test save
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1Subkey, kValNameInt, kIntVal));
+ EXPECT_SUCCEEDED(RegKey::SetValue(kFullRkey1Subkey, kValNameInt64, kIntVal64));
+ EXPECT_SUCCEEDED(RegKey::Save(kFullRkey1Subkey, temp_file));
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1Subkey, kValNameInt));
+ EXPECT_SUCCEEDED(RegKey::DeleteValue(kFullRkey1Subkey, kValNameInt64));
+
+ // test restore
+ EXPECT_SUCCEEDED(RegKey::Restore(kFullRkey1Subkey, temp_file));
+ int_val = 0;
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1Subkey, kValNameInt, &int_val));
+ EXPECT_EQ(int_val, kIntVal);
+ int64_val = 0;
+ EXPECT_SUCCEEDED(RegKey::GetValue(kFullRkey1Subkey,
+ kValNameInt64,
+ &int64_val));
+ EXPECT_EQ(int64_val, kIntVal64);
+
+ // delete the temp file
+ EXPECT_EQ(TRUE, ::DeleteFile(temp_file));
+#endif
+
+ // whack the whole key
+ EXPECT_SUCCEEDED(RegKey::DeleteKey(kFullRkey1));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32securityerrors.cc b/chromium/third_party/webrtc/base/win32securityerrors.cc
new file mode 100644
index 00000000000..71fe466a906
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32securityerrors.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+
+extern const ConstantLabel SECURITY_ERRORS[];
+
+const ConstantLabel SECURITY_ERRORS[] = {
+ KLABEL(SEC_I_COMPLETE_AND_CONTINUE),
+ KLABEL(SEC_I_COMPLETE_NEEDED),
+ KLABEL(SEC_I_CONTEXT_EXPIRED),
+ KLABEL(SEC_I_CONTINUE_NEEDED),
+ KLABEL(SEC_I_INCOMPLETE_CREDENTIALS),
+ KLABEL(SEC_I_RENEGOTIATE),
+ KLABEL(SEC_E_CERT_EXPIRED),
+ KLABEL(SEC_E_INCOMPLETE_MESSAGE),
+ KLABEL(SEC_E_INSUFFICIENT_MEMORY),
+ KLABEL(SEC_E_INTERNAL_ERROR),
+ KLABEL(SEC_E_INVALID_HANDLE),
+ KLABEL(SEC_E_INVALID_TOKEN),
+ KLABEL(SEC_E_LOGON_DENIED),
+ KLABEL(SEC_E_NO_AUTHENTICATING_AUTHORITY),
+ KLABEL(SEC_E_NO_CREDENTIALS),
+ KLABEL(SEC_E_NOT_OWNER),
+ KLABEL(SEC_E_OK),
+ KLABEL(SEC_E_SECPKG_NOT_FOUND),
+ KLABEL(SEC_E_TARGET_UNKNOWN),
+ KLABEL(SEC_E_UNKNOWN_CREDENTIALS),
+ KLABEL(SEC_E_UNSUPPORTED_FUNCTION),
+ KLABEL(SEC_E_UNTRUSTED_ROOT),
+ KLABEL(SEC_E_WRONG_PRINCIPAL),
+ LASTLABEL
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32socketinit.cc b/chromium/third_party/webrtc/base/win32socketinit.cc
new file mode 100644
index 00000000000..02a6c26f409
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32socketinit.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32socketinit.h"
+
+#include "webrtc/base/win32.h"
+
+namespace rtc {
+
+// Please don't remove this function.
+void EnsureWinsockInit() {
+ // The default implementation uses a global initializer, so WSAStartup
+ // happens at module load time. Thus we don't need to do anything here.
+ // The hook is provided so that a client that statically links with
+ // libjingle can override it, to provide its own initialization.
+}
+
+#if defined(WEBRTC_WIN)
+class WinsockInitializer {
+ public:
+ WinsockInitializer() {
+ WSADATA wsaData;
+ WORD wVersionRequested = MAKEWORD(1, 0);
+ err_ = WSAStartup(wVersionRequested, &wsaData);
+ }
+ ~WinsockInitializer() {
+ if (!err_)
+ WSACleanup();
+ }
+ int error() {
+ return err_;
+ }
+ private:
+ int err_;
+};
+WinsockInitializer g_winsockinit;
+#endif
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32socketinit.h b/chromium/third_party/webrtc/base/win32socketinit.h
new file mode 100644
index 00000000000..46d27cba05d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32socketinit.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WIN32SOCKETINIT_H_
+#define WEBRTC_BASE_WIN32SOCKETINIT_H_
+
+namespace rtc {
+
+void EnsureWinsockInit();
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WIN32SOCKETINIT_H_
diff --git a/chromium/third_party/webrtc/base/win32socketserver.cc b/chromium/third_party/webrtc/base/win32socketserver.cc
new file mode 100644
index 00000000000..d0b736c58cf
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32socketserver.cc
@@ -0,0 +1,850 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/win32socketserver.h"
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/winping.h"
+#include "webrtc/base/win32window.h"
+#include <ws2tcpip.h> // NOLINT
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32Socket
+///////////////////////////////////////////////////////////////////////////////
+
+// TODO: Move this to a common place where PhysicalSocketServer can
+// share it.
+// Standard MTUs
+static const uint16 PACKET_MAXIMUMS[] = {
+ 65535, // Theoretical maximum, Hyperchannel
+ 32000, // Nothing
+ 17914, // 16Mb IBM Token Ring
+ 8166, // IEEE 802.4
+ // 4464 // IEEE 802.5 (4Mb max)
+ 4352, // FDDI
+ // 2048, // Wideband Network
+ 2002, // IEEE 802.5 (4Mb recommended)
+ // 1536, // Expermental Ethernet Networks
+ // 1500, // Ethernet, Point-to-Point (default)
+ 1492, // IEEE 802.3
+ 1006, // SLIP, ARPANET
+ // 576, // X.25 Networks
+ // 544, // DEC IP Portal
+ // 512, // NETBIOS
+ 508, // IEEE 802/Source-Rt Bridge, ARCNET
+ 296, // Point-to-Point (low delay)
+ 68, // Official minimum
+ 0, // End of list marker
+};
+
+static const int IP_HEADER_SIZE = 20u;
+static const int ICMP_HEADER_SIZE = 8u;
+static const int ICMP_PING_TIMEOUT_MILLIS = 10000u;
+
+// TODO: Enable for production builds also? Use FormatMessage?
+#ifdef _DEBUG
+LPCSTR WSAErrorToString(int error, LPCSTR *description_result) {
+ LPCSTR string = "Unspecified";
+ LPCSTR description = "Unspecified description";
+ switch (error) {
+ case ERROR_SUCCESS:
+ string = "SUCCESS";
+ description = "Operation succeeded";
+ break;
+ case WSAEWOULDBLOCK:
+ string = "WSAEWOULDBLOCK";
+ description = "Using a non-blocking socket, will notify later";
+ break;
+ case WSAEACCES:
+ string = "WSAEACCES";
+ description = "Access denied, or sharing violation";
+ break;
+ case WSAEADDRNOTAVAIL:
+ string = "WSAEADDRNOTAVAIL";
+ description = "Address is not valid in this context";
+ break;
+ case WSAENETDOWN:
+ string = "WSAENETDOWN";
+ description = "Network is down";
+ break;
+ case WSAENETUNREACH:
+ string = "WSAENETUNREACH";
+ description = "Network is up, but unreachable";
+ break;
+ case WSAENETRESET:
+ string = "WSANETRESET";
+ description = "Connection has been reset due to keep-alive activity";
+ break;
+ case WSAECONNABORTED:
+ string = "WSAECONNABORTED";
+ description = "Aborted by host";
+ break;
+ case WSAECONNRESET:
+ string = "WSAECONNRESET";
+ description = "Connection reset by host";
+ break;
+ case WSAETIMEDOUT:
+ string = "WSAETIMEDOUT";
+ description = "Timed out, host failed to respond";
+ break;
+ case WSAECONNREFUSED:
+ string = "WSAECONNREFUSED";
+ description = "Host actively refused connection";
+ break;
+ case WSAEHOSTDOWN:
+ string = "WSAEHOSTDOWN";
+ description = "Host is down";
+ break;
+ case WSAEHOSTUNREACH:
+ string = "WSAEHOSTUNREACH";
+ description = "Host is unreachable";
+ break;
+ case WSAHOST_NOT_FOUND:
+ string = "WSAHOST_NOT_FOUND";
+ description = "No such host is known";
+ break;
+ }
+ if (description_result) {
+ *description_result = description;
+ }
+ return string;
+}
+
+void ReportWSAError(LPCSTR context, int error, const SocketAddress& address) {
+ LPCSTR description_string;
+ LPCSTR error_string = WSAErrorToString(error, &description_string);
+ LOG(LS_INFO) << context << " = " << error
+ << " (" << error_string << ":" << description_string << ") ["
+ << address.ToString() << "]";
+}
+#else
+void ReportWSAError(LPCSTR context, int error, const SocketAddress& address) {}
+#endif
+
+/////////////////////////////////////////////////////////////////////////////
+// Win32Socket::EventSink
+/////////////////////////////////////////////////////////////////////////////
+
+#define WM_SOCKETNOTIFY (WM_USER + 50)
+#define WM_DNSNOTIFY (WM_USER + 51)
+
+struct Win32Socket::DnsLookup {
+ HANDLE handle;
+ uint16 port;
+ char buffer[MAXGETHOSTSTRUCT];
+};
+
+class Win32Socket::EventSink : public Win32Window {
+ public:
+ explicit EventSink(Win32Socket * parent) : parent_(parent) { }
+
+ void Dispose();
+
+ virtual bool OnMessage(UINT uMsg, WPARAM wParam, LPARAM lParam,
+ LRESULT& result);
+ virtual void OnNcDestroy();
+
+ private:
+ bool OnSocketNotify(UINT uMsg, WPARAM wParam, LPARAM lParam, LRESULT& result);
+ bool OnDnsNotify(WPARAM wParam, LPARAM lParam, LRESULT& result);
+
+ Win32Socket * parent_;
+};
+
+void Win32Socket::EventSink::Dispose() {
+ parent_ = NULL;
+ if (::IsWindow(handle())) {
+ ::DestroyWindow(handle());
+ } else {
+ delete this;
+ }
+}
+
+bool Win32Socket::EventSink::OnMessage(UINT uMsg, WPARAM wParam,
+ LPARAM lParam, LRESULT& result) {
+ switch (uMsg) {
+ case WM_SOCKETNOTIFY:
+ case WM_TIMER:
+ return OnSocketNotify(uMsg, wParam, lParam, result);
+ case WM_DNSNOTIFY:
+ return OnDnsNotify(wParam, lParam, result);
+ }
+ return false;
+}
+
+bool Win32Socket::EventSink::OnSocketNotify(UINT uMsg, WPARAM wParam,
+ LPARAM lParam, LRESULT& result) {
+ result = 0;
+
+ int wsa_event = WSAGETSELECTEVENT(lParam);
+ int wsa_error = WSAGETSELECTERROR(lParam);
+
+ // Treat connect timeouts as close notifications
+ if (uMsg == WM_TIMER) {
+ wsa_event = FD_CLOSE;
+ wsa_error = WSAETIMEDOUT;
+ }
+
+ if (parent_)
+ parent_->OnSocketNotify(static_cast<SOCKET>(wParam), wsa_event, wsa_error);
+ return true;
+}
+
+bool Win32Socket::EventSink::OnDnsNotify(WPARAM wParam, LPARAM lParam,
+ LRESULT& result) {
+ result = 0;
+
+ int error = WSAGETASYNCERROR(lParam);
+ if (parent_)
+ parent_->OnDnsNotify(reinterpret_cast<HANDLE>(wParam), error);
+ return true;
+}
+
+void Win32Socket::EventSink::OnNcDestroy() {
+ if (parent_) {
+ LOG(LS_ERROR) << "EventSink hwnd is being destroyed, but the event sink"
+ " hasn't yet been disposed.";
+ } else {
+ delete this;
+ }
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// Win32Socket
+/////////////////////////////////////////////////////////////////////////////
+
+Win32Socket::Win32Socket()
+ : socket_(INVALID_SOCKET), error_(0), state_(CS_CLOSED), connect_time_(0),
+ closing_(false), close_error_(0), sink_(NULL), dns_(NULL) {
+}
+
+Win32Socket::~Win32Socket() {
+ Close();
+}
+
+bool Win32Socket::CreateT(int family, int type) {
+ Close();
+ int proto = (SOCK_DGRAM == type) ? IPPROTO_UDP : IPPROTO_TCP;
+ socket_ = ::WSASocket(family, type, proto, NULL, NULL, 0);
+ if (socket_ == INVALID_SOCKET) {
+ UpdateLastError();
+ return false;
+ }
+ if ((SOCK_DGRAM == type) && !SetAsync(FD_READ | FD_WRITE)) {
+ return false;
+ }
+ return true;
+}
+
+int Win32Socket::Attach(SOCKET s) {
+ ASSERT(socket_ == INVALID_SOCKET);
+ if (socket_ != INVALID_SOCKET)
+ return SOCKET_ERROR;
+
+ ASSERT(s != INVALID_SOCKET);
+ if (s == INVALID_SOCKET)
+ return SOCKET_ERROR;
+
+ socket_ = s;
+ state_ = CS_CONNECTED;
+
+ if (!SetAsync(FD_READ | FD_WRITE | FD_CLOSE))
+ return SOCKET_ERROR;
+
+ return 0;
+}
+
+void Win32Socket::SetTimeout(int ms) {
+ if (sink_)
+ ::SetTimer(sink_->handle(), 1, ms, 0);
+}
+
+SocketAddress Win32Socket::GetLocalAddress() const {
+ sockaddr_storage addr = {0};
+ socklen_t addrlen = sizeof(addr);
+ int result = ::getsockname(socket_, reinterpret_cast<sockaddr*>(&addr),
+ &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr, &address);
+ } else {
+ LOG(LS_WARNING) << "GetLocalAddress: unable to get local addr, socket="
+ << socket_;
+ }
+ return address;
+}
+
+SocketAddress Win32Socket::GetRemoteAddress() const {
+ sockaddr_storage addr = {0};
+ socklen_t addrlen = sizeof(addr);
+ int result = ::getpeername(socket_, reinterpret_cast<sockaddr*>(&addr),
+ &addrlen);
+ SocketAddress address;
+ if (result >= 0) {
+ SocketAddressFromSockAddrStorage(addr, &address);
+ } else {
+ LOG(LS_WARNING) << "GetRemoteAddress: unable to get remote addr, socket="
+ << socket_;
+ }
+ return address;
+}
+
+int Win32Socket::Bind(const SocketAddress& addr) {
+ ASSERT(socket_ != INVALID_SOCKET);
+ if (socket_ == INVALID_SOCKET)
+ return SOCKET_ERROR;
+
+ sockaddr_storage saddr;
+ size_t len = addr.ToSockAddrStorage(&saddr);
+ int err = ::bind(socket_,
+ reinterpret_cast<sockaddr*>(&saddr),
+ static_cast<int>(len));
+ UpdateLastError();
+ return err;
+}
+
+int Win32Socket::Connect(const SocketAddress& addr) {
+ if (state_ != CS_CLOSED) {
+ SetError(EALREADY);
+ return SOCKET_ERROR;
+ }
+
+ if (!addr.IsUnresolvedIP()) {
+ return DoConnect(addr);
+ }
+
+ LOG_F(LS_INFO) << "async dns lookup (" << addr.hostname() << ")";
+ DnsLookup * dns = new DnsLookup;
+ if (!sink_) {
+ // Explicitly create the sink ourselves here; we can't rely on SetAsync
+ // because we don't have a socket_ yet.
+ CreateSink();
+ }
+ // TODO: Replace with IPv6 compatible lookup.
+ dns->handle = WSAAsyncGetHostByName(sink_->handle(), WM_DNSNOTIFY,
+ addr.hostname().c_str(), dns->buffer,
+ sizeof(dns->buffer));
+
+ if (!dns->handle) {
+ LOG_F(LS_ERROR) << "WSAAsyncGetHostByName error: " << WSAGetLastError();
+ delete dns;
+ UpdateLastError();
+ Close();
+ return SOCKET_ERROR;
+ }
+
+ dns->port = addr.port();
+ dns_ = dns;
+ state_ = CS_CONNECTING;
+ return 0;
+}
+
+int Win32Socket::DoConnect(const SocketAddress& addr) {
+ if ((socket_ == INVALID_SOCKET) && !CreateT(addr.family(), SOCK_STREAM)) {
+ return SOCKET_ERROR;
+ }
+ if (!SetAsync(FD_READ | FD_WRITE | FD_CONNECT | FD_CLOSE)) {
+ return SOCKET_ERROR;
+ }
+
+ sockaddr_storage saddr = {0};
+ size_t len = addr.ToSockAddrStorage(&saddr);
+ connect_time_ = Time();
+ int result = connect(socket_,
+ reinterpret_cast<SOCKADDR*>(&saddr),
+ static_cast<int>(len));
+ if (result != SOCKET_ERROR) {
+ state_ = CS_CONNECTED;
+ } else {
+ int code = WSAGetLastError();
+ if (code == WSAEWOULDBLOCK) {
+ state_ = CS_CONNECTING;
+ } else {
+ ReportWSAError("WSAAsync:connect", code, addr);
+ error_ = code;
+ Close();
+ return SOCKET_ERROR;
+ }
+ }
+ addr_ = addr;
+
+ return 0;
+}
+
+int Win32Socket::GetError() const {
+ return error_;
+}
+
+void Win32Socket::SetError(int error) {
+ error_ = error;
+}
+
+Socket::ConnState Win32Socket::GetState() const {
+ return state_;
+}
+
+int Win32Socket::GetOption(Option opt, int* value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+
+ char* p = reinterpret_cast<char*>(value);
+ int optlen = sizeof(value);
+ return ::getsockopt(socket_, slevel, sopt, p, &optlen);
+}
+
+int Win32Socket::SetOption(Option opt, int value) {
+ int slevel;
+ int sopt;
+ if (TranslateOption(opt, &slevel, &sopt) == -1)
+ return -1;
+
+ const char* p = reinterpret_cast<const char*>(&value);
+ return ::setsockopt(socket_, slevel, sopt, p, sizeof(value));
+}
+
+int Win32Socket::Send(const void* buffer, size_t length) {
+ int sent = ::send(socket_,
+ reinterpret_cast<const char*>(buffer),
+ static_cast<int>(length),
+ 0);
+ UpdateLastError();
+ return sent;
+}
+
+int Win32Socket::SendTo(const void* buffer, size_t length,
+ const SocketAddress& addr) {
+ sockaddr_storage saddr;
+ size_t addr_len = addr.ToSockAddrStorage(&saddr);
+ int sent = ::sendto(socket_, reinterpret_cast<const char*>(buffer),
+ static_cast<int>(length), 0,
+ reinterpret_cast<sockaddr*>(&saddr),
+ static_cast<int>(addr_len));
+ UpdateLastError();
+ return sent;
+}
+
+int Win32Socket::Recv(void* buffer, size_t length) {
+ int received = ::recv(socket_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0);
+ UpdateLastError();
+ if (closing_ && received <= static_cast<int>(length))
+ PostClosed();
+ return received;
+}
+
+int Win32Socket::RecvFrom(void* buffer, size_t length,
+ SocketAddress* out_addr) {
+ sockaddr_storage saddr;
+ socklen_t addr_len = sizeof(saddr);
+ int received = ::recvfrom(socket_, static_cast<char*>(buffer),
+ static_cast<int>(length), 0,
+ reinterpret_cast<sockaddr*>(&saddr), &addr_len);
+ UpdateLastError();
+ if (received != SOCKET_ERROR)
+ SocketAddressFromSockAddrStorage(saddr, out_addr);
+ if (closing_ && received <= static_cast<int>(length))
+ PostClosed();
+ return received;
+}
+
+int Win32Socket::Listen(int backlog) {
+ int err = ::listen(socket_, backlog);
+ if (!SetAsync(FD_ACCEPT))
+ return SOCKET_ERROR;
+
+ UpdateLastError();
+ if (err == 0)
+ state_ = CS_CONNECTING;
+ return err;
+}
+
+Win32Socket* Win32Socket::Accept(SocketAddress* out_addr) {
+ sockaddr_storage saddr;
+ socklen_t addr_len = sizeof(saddr);
+ SOCKET s = ::accept(socket_, reinterpret_cast<sockaddr*>(&saddr), &addr_len);
+ UpdateLastError();
+ if (s == INVALID_SOCKET)
+ return NULL;
+ if (out_addr)
+ SocketAddressFromSockAddrStorage(saddr, out_addr);
+ Win32Socket* socket = new Win32Socket;
+ if (0 == socket->Attach(s))
+ return socket;
+ delete socket;
+ return NULL;
+}
+
+int Win32Socket::Close() {
+ int err = 0;
+ if (socket_ != INVALID_SOCKET) {
+ err = ::closesocket(socket_);
+ socket_ = INVALID_SOCKET;
+ closing_ = false;
+ close_error_ = 0;
+ UpdateLastError();
+ }
+ if (dns_) {
+ WSACancelAsyncRequest(dns_->handle);
+ delete dns_;
+ dns_ = NULL;
+ }
+ if (sink_) {
+ sink_->Dispose();
+ sink_ = NULL;
+ }
+ addr_.Clear();
+ state_ = CS_CLOSED;
+ return err;
+}
+
+int Win32Socket::EstimateMTU(uint16* mtu) {
+ SocketAddress addr = GetRemoteAddress();
+ if (addr.IsAny()) {
+ error_ = ENOTCONN;
+ return -1;
+ }
+
+ WinPing ping;
+ if (!ping.IsValid()) {
+ error_ = EINVAL; // can't think of a better error ID
+ return -1;
+ }
+
+ for (int level = 0; PACKET_MAXIMUMS[level + 1] > 0; ++level) {
+ int32 size = PACKET_MAXIMUMS[level] - IP_HEADER_SIZE - ICMP_HEADER_SIZE;
+ WinPing::PingResult result = ping.Ping(addr.ipaddr(), size,
+ ICMP_PING_TIMEOUT_MILLIS, 1, false);
+ if (result == WinPing::PING_FAIL) {
+ error_ = EINVAL; // can't think of a better error ID
+ return -1;
+ }
+ if (result != WinPing::PING_TOO_LARGE) {
+ *mtu = PACKET_MAXIMUMS[level];
+ return 0;
+ }
+ }
+
+ ASSERT(false);
+ return 0;
+}
+
+void Win32Socket::CreateSink() {
+ ASSERT(NULL == sink_);
+
+ // Create window
+ sink_ = new EventSink(this);
+ sink_->Create(NULL, L"EventSink", 0, 0, 0, 0, 10, 10);
+}
+
+bool Win32Socket::SetAsync(int events) {
+ if (NULL == sink_) {
+ CreateSink();
+ ASSERT(NULL != sink_);
+ }
+
+ // start the async select
+ if (WSAAsyncSelect(socket_, sink_->handle(), WM_SOCKETNOTIFY, events)
+ == SOCKET_ERROR) {
+ UpdateLastError();
+ Close();
+ return false;
+ }
+
+ return true;
+}
+
+bool Win32Socket::HandleClosed(int close_error) {
+ // WM_CLOSE will be received before all data has been read, so we need to
+ // hold on to it until the read buffer has been drained.
+ char ch;
+ closing_ = true;
+ close_error_ = close_error;
+ return (::recv(socket_, &ch, 1, MSG_PEEK) <= 0);
+}
+
+void Win32Socket::PostClosed() {
+ // If we see that the buffer is indeed drained, then send the close.
+ closing_ = false;
+ ::PostMessage(sink_->handle(), WM_SOCKETNOTIFY,
+ socket_, WSAMAKESELECTREPLY(FD_CLOSE, close_error_));
+}
+
+void Win32Socket::UpdateLastError() {
+ error_ = WSAGetLastError();
+}
+
+int Win32Socket::TranslateOption(Option opt, int* slevel, int* sopt) {
+ switch (opt) {
+ case OPT_DONTFRAGMENT:
+ *slevel = IPPROTO_IP;
+ *sopt = IP_DONTFRAGMENT;
+ break;
+ case OPT_RCVBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_RCVBUF;
+ break;
+ case OPT_SNDBUF:
+ *slevel = SOL_SOCKET;
+ *sopt = SO_SNDBUF;
+ break;
+ case OPT_NODELAY:
+ *slevel = IPPROTO_TCP;
+ *sopt = TCP_NODELAY;
+ break;
+ case OPT_DSCP:
+ LOG(LS_WARNING) << "Socket::OPT_DSCP not supported.";
+ return -1;
+ default:
+ ASSERT(false);
+ return -1;
+ }
+ return 0;
+}
+
+void Win32Socket::OnSocketNotify(SOCKET socket, int event, int error) {
+ // Ignore events if we're already closed.
+ if (socket != socket_)
+ return;
+
+ error_ = error;
+ switch (event) {
+ case FD_CONNECT:
+ if (error != ERROR_SUCCESS) {
+ ReportWSAError("WSAAsync:connect notify", error, addr_);
+#ifdef _DEBUG
+ int32 duration = TimeSince(connect_time_);
+ LOG(LS_INFO) << "WSAAsync:connect error (" << duration
+ << " ms), faking close";
+#endif
+ state_ = CS_CLOSED;
+ // If you get an error connecting, close doesn't really do anything
+ // and it certainly doesn't send back any close notification, but
+ // we really only maintain a few states, so it is easiest to get
+ // back into a known state by pretending that a close happened, even
+ // though the connect event never did occur.
+ SignalCloseEvent(this, error);
+ } else {
+#ifdef _DEBUG
+ int32 duration = TimeSince(connect_time_);
+ LOG(LS_INFO) << "WSAAsync:connect (" << duration << " ms)";
+#endif
+ state_ = CS_CONNECTED;
+ SignalConnectEvent(this);
+ }
+ break;
+
+ case FD_ACCEPT:
+ case FD_READ:
+ if (error != ERROR_SUCCESS) {
+ ReportWSAError("WSAAsync:read notify", error, addr_);
+ } else {
+ SignalReadEvent(this);
+ }
+ break;
+
+ case FD_WRITE:
+ if (error != ERROR_SUCCESS) {
+ ReportWSAError("WSAAsync:write notify", error, addr_);
+ } else {
+ SignalWriteEvent(this);
+ }
+ break;
+
+ case FD_CLOSE:
+ if (HandleClosed(error)) {
+ ReportWSAError("WSAAsync:close notify", error, addr_);
+ state_ = CS_CLOSED;
+ SignalCloseEvent(this, error);
+ }
+ break;
+ }
+}
+
+void Win32Socket::OnDnsNotify(HANDLE task, int error) {
+ if (!dns_ || dns_->handle != task)
+ return;
+
+ uint32 ip = 0;
+ if (error == 0) {
+ hostent* pHost = reinterpret_cast<hostent*>(dns_->buffer);
+ uint32 net_ip = *reinterpret_cast<uint32*>(pHost->h_addr_list[0]);
+ ip = NetworkToHost32(net_ip);
+ }
+
+ LOG_F(LS_INFO) << "(" << IPAddress(ip).ToSensitiveString()
+ << ", " << error << ")";
+
+ if (error == 0) {
+ SocketAddress address(ip, dns_->port);
+ error = DoConnect(address);
+ } else {
+ Close();
+ }
+
+ if (error) {
+ error_ = error;
+ SignalCloseEvent(this, error_);
+ } else {
+ delete dns_;
+ dns_ = NULL;
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32SocketServer
+// Provides cricket base services on top of a win32 gui thread
+///////////////////////////////////////////////////////////////////////////////
+
+static UINT s_wm_wakeup_id = 0;
+const TCHAR Win32SocketServer::kWindowName[] = L"libjingle Message Window";
+
+Win32SocketServer::Win32SocketServer(MessageQueue* message_queue)
+ : message_queue_(message_queue),
+ wnd_(this),
+ posted_(false),
+ hdlg_(NULL) {
+ if (s_wm_wakeup_id == 0)
+ s_wm_wakeup_id = RegisterWindowMessage(L"WM_WAKEUP");
+ if (!wnd_.Create(NULL, kWindowName, 0, 0, 0, 0, 0, 0)) {
+ LOG_GLE(LS_ERROR) << "Failed to create message window.";
+ }
+}
+
+Win32SocketServer::~Win32SocketServer() {
+ if (wnd_.handle() != NULL) {
+ KillTimer(wnd_.handle(), 1);
+ wnd_.Destroy();
+ }
+}
+
+Socket* Win32SocketServer::CreateSocket(int type) {
+ return CreateSocket(AF_INET, type);
+}
+
+Socket* Win32SocketServer::CreateSocket(int family, int type) {
+ return CreateAsyncSocket(family, type);
+}
+
+AsyncSocket* Win32SocketServer::CreateAsyncSocket(int type) {
+ return CreateAsyncSocket(AF_INET, type);
+}
+
+AsyncSocket* Win32SocketServer::CreateAsyncSocket(int family, int type) {
+ Win32Socket* socket = new Win32Socket;
+ if (socket->CreateT(family, type)) {
+ return socket;
+ }
+ delete socket;
+ return NULL;
+}
+
+void Win32SocketServer::SetMessageQueue(MessageQueue* queue) {
+ message_queue_ = queue;
+}
+
+bool Win32SocketServer::Wait(int cms, bool process_io) {
+ BOOL b;
+ if (process_io) {
+ // Spin the Win32 message pump at least once, and as long as requested.
+ // This is the Thread::ProcessMessages case.
+ uint32 start = Time();
+ do {
+ MSG msg;
+ SetTimer(wnd_.handle(), 0, cms, NULL);
+ // Get the next available message. If we have a modeless dialog, give
+ // give the message to IsDialogMessage, which will return true if it
+ // was a message for the dialog that it handled internally.
+ // Otherwise, dispatch as usual via Translate/DispatchMessage.
+ b = GetMessage(&msg, NULL, 0, 0);
+ if (b == -1) {
+ LOG_GLE(LS_ERROR) << "GetMessage failed.";
+ return false;
+ } else if(b) {
+ if (!hdlg_ || !IsDialogMessage(hdlg_, &msg)) {
+ TranslateMessage(&msg);
+ DispatchMessage(&msg);
+ }
+ }
+ KillTimer(wnd_.handle(), 0);
+ } while (b && TimeSince(start) < cms);
+ } else if (cms != 0) {
+ // Sit and wait forever for a WakeUp. This is the Thread::Send case.
+ ASSERT(cms == -1);
+ MSG msg;
+ b = GetMessage(&msg, NULL, s_wm_wakeup_id, s_wm_wakeup_id);
+ {
+ CritScope scope(&cs_);
+ posted_ = false;
+ }
+ } else {
+ // No-op (cms == 0 && !process_io). This is the Pump case.
+ b = TRUE;
+ }
+ return (b != FALSE);
+}
+
+void Win32SocketServer::WakeUp() {
+ if (wnd_.handle()) {
+ // Set the "message pending" flag, if not already set.
+ {
+ CritScope scope(&cs_);
+ if (posted_)
+ return;
+ posted_ = true;
+ }
+
+ PostMessage(wnd_.handle(), s_wm_wakeup_id, 0, 0);
+ }
+}
+
+void Win32SocketServer::Pump() {
+ // Clear the "message pending" flag.
+ {
+ CritScope scope(&cs_);
+ posted_ = false;
+ }
+
+ // Dispatch all the messages that are currently in our queue. If new messages
+ // are posted during the dispatch, they will be handled in the next Pump.
+ // We use max(1, ...) to make sure we try to dispatch at least once, since
+ // this allow us to process "sent" messages, not included in the size() count.
+ Message msg;
+ for (size_t max_messages_to_process = _max<size_t>(1, message_queue_->size());
+ max_messages_to_process > 0 && message_queue_->Get(&msg, 0, false);
+ --max_messages_to_process) {
+ message_queue_->Dispatch(&msg);
+ }
+
+ // Anything remaining?
+ int delay = message_queue_->GetDelay();
+ if (delay == -1) {
+ KillTimer(wnd_.handle(), 1);
+ } else {
+ SetTimer(wnd_.handle(), 1, delay, NULL);
+ }
+}
+
+bool Win32SocketServer::MessageWindow::OnMessage(UINT wm, WPARAM wp,
+ LPARAM lp, LRESULT& lr) {
+ bool handled = false;
+ if (wm == s_wm_wakeup_id || (wm == WM_TIMER && wp == 1)) {
+ ss_->Pump();
+ lr = 0;
+ handled = true;
+ }
+ return handled;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32socketserver.h b/chromium/third_party/webrtc/base/win32socketserver.h
new file mode 100644
index 00000000000..a03f6c028cf
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32socketserver.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WIN32SOCKETSERVER_H_
+#define WEBRTC_BASE_WIN32SOCKETSERVER_H_
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/socketserver.h"
+#include "webrtc/base/socketfactory.h"
+#include "webrtc/base/socket.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/win32window.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32Socket
+///////////////////////////////////////////////////////////////////////////////
+
+class Win32Socket : public AsyncSocket {
+ public:
+ Win32Socket();
+ virtual ~Win32Socket();
+
+ bool CreateT(int family, int type);
+
+ int Attach(SOCKET s);
+ void SetTimeout(int ms);
+
+ // AsyncSocket Interface
+ virtual SocketAddress GetLocalAddress() const;
+ virtual SocketAddress GetRemoteAddress() const;
+ virtual int Bind(const SocketAddress& addr);
+ virtual int Connect(const SocketAddress& addr);
+ virtual int Send(const void *buffer, size_t length);
+ virtual int SendTo(const void *buffer, size_t length, const SocketAddress& addr);
+ virtual int Recv(void *buffer, size_t length);
+ virtual int RecvFrom(void *buffer, size_t length, SocketAddress *out_addr);
+ virtual int Listen(int backlog);
+ virtual Win32Socket *Accept(SocketAddress *out_addr);
+ virtual int Close();
+ virtual int GetError() const;
+ virtual void SetError(int error);
+ virtual ConnState GetState() const;
+ virtual int EstimateMTU(uint16* mtu);
+ virtual int GetOption(Option opt, int* value);
+ virtual int SetOption(Option opt, int value);
+
+ private:
+ void CreateSink();
+ bool SetAsync(int events);
+ int DoConnect(const SocketAddress& addr);
+ bool HandleClosed(int close_error);
+ void PostClosed();
+ void UpdateLastError();
+ static int TranslateOption(Option opt, int* slevel, int* sopt);
+
+ void OnSocketNotify(SOCKET socket, int event, int error);
+ void OnDnsNotify(HANDLE task, int error);
+
+ SOCKET socket_;
+ int error_;
+ ConnState state_;
+ SocketAddress addr_; // address that we connected to (see DoConnect)
+ uint32 connect_time_;
+ bool closing_;
+ int close_error_;
+
+ class EventSink;
+ friend class EventSink;
+ EventSink * sink_;
+
+ struct DnsLookup;
+ DnsLookup * dns_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32SocketServer
+///////////////////////////////////////////////////////////////////////////////
+
+class Win32SocketServer : public SocketServer {
+ public:
+ explicit Win32SocketServer(MessageQueue* message_queue);
+ virtual ~Win32SocketServer();
+
+ void set_modeless_dialog(HWND hdlg) {
+ hdlg_ = hdlg;
+ }
+
+ // SocketServer Interface
+ virtual Socket* CreateSocket(int type);
+ virtual Socket* CreateSocket(int family, int type);
+
+ virtual AsyncSocket* CreateAsyncSocket(int type);
+ virtual AsyncSocket* CreateAsyncSocket(int family, int type);
+
+ virtual void SetMessageQueue(MessageQueue* queue);
+ virtual bool Wait(int cms, bool process_io);
+ virtual void WakeUp();
+
+ void Pump();
+
+ HWND handle() { return wnd_.handle(); }
+
+ private:
+ class MessageWindow : public Win32Window {
+ public:
+ explicit MessageWindow(Win32SocketServer* ss) : ss_(ss) {}
+ private:
+ virtual bool OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT& result);
+ Win32SocketServer* ss_;
+ };
+
+ static const TCHAR kWindowName[];
+ MessageQueue *message_queue_;
+ MessageWindow wnd_;
+ CriticalSection cs_;
+ bool posted_;
+ HWND hdlg_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32Thread. Automatically pumps Windows messages.
+///////////////////////////////////////////////////////////////////////////////
+
+class Win32Thread : public Thread {
+ public:
+ Win32Thread() : ss_(this), id_(0) {
+ set_socketserver(&ss_);
+ }
+ virtual ~Win32Thread() {
+ Stop();
+ set_socketserver(NULL);
+ }
+ virtual void Run() {
+ id_ = GetCurrentThreadId();
+ Thread::Run();
+ id_ = 0;
+ }
+ virtual void Quit() {
+ PostThreadMessage(id_, WM_QUIT, 0, 0);
+ }
+ private:
+ Win32SocketServer ss_;
+ DWORD id_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_WIN
+
+#endif // WEBRTC_BASE_WIN32SOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/win32socketserver_unittest.cc b/chromium/third_party/webrtc/base/win32socketserver_unittest.cc
new file mode 100644
index 00000000000..1d3ef2ea37f
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32socketserver_unittest.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/socket_unittest.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/win32socketserver.h"
+
+namespace rtc {
+
+// Test that Win32SocketServer::Wait works as expected.
+TEST(Win32SocketServerTest, TestWait) {
+ Win32SocketServer server(NULL);
+ uint32 start = Time();
+ server.Wait(1000, true);
+ EXPECT_GE(TimeSince(start), 1000);
+}
+
+// Test that Win32Socket::Pump does not touch general Windows messages.
+TEST(Win32SocketServerTest, TestPump) {
+ Win32SocketServer server(NULL);
+ SocketServerScope scope(&server);
+ EXPECT_EQ(TRUE, PostMessage(NULL, WM_USER, 999, 0));
+ server.Pump();
+ MSG msg;
+ EXPECT_EQ(TRUE, PeekMessage(&msg, NULL, WM_USER, 0, PM_REMOVE));
+ EXPECT_EQ(WM_USER, msg.message);
+ EXPECT_EQ(999, msg.wParam);
+}
+
+// Test that Win32Socket passes all the generic Socket tests.
+class Win32SocketTest : public SocketTest {
+ protected:
+ Win32SocketTest() : server_(NULL), scope_(&server_) {}
+ Win32SocketServer server_;
+ SocketServerScope scope_;
+};
+
+TEST_F(Win32SocketTest, TestConnectIPv4) {
+ SocketTest::TestConnectIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectIPv6) {
+ SocketTest::TestConnectIPv6();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithDnsLookupIPv4) {
+ SocketTest::TestConnectWithDnsLookupIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithDnsLookupIPv6) {
+ SocketTest::TestConnectWithDnsLookupIPv6();
+}
+
+TEST_F(Win32SocketTest, TestConnectFailIPv4) {
+ SocketTest::TestConnectFailIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectFailIPv6) {
+ SocketTest::TestConnectFailIPv6();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithDnsLookupFailIPv4) {
+ SocketTest::TestConnectWithDnsLookupFailIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithDnsLookupFailIPv6) {
+ SocketTest::TestConnectWithDnsLookupFailIPv6();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithClosedSocketIPv4) {
+ SocketTest::TestConnectWithClosedSocketIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectWithClosedSocketIPv6) {
+ SocketTest::TestConnectWithClosedSocketIPv6();
+}
+
+TEST_F(Win32SocketTest, TestConnectWhileNotClosedIPv4) {
+ SocketTest::TestConnectWhileNotClosedIPv4();
+}
+
+TEST_F(Win32SocketTest, TestConnectWhileNotClosedIPv6) {
+ SocketTest::TestConnectWhileNotClosedIPv6();
+}
+
+TEST_F(Win32SocketTest, TestServerCloseDuringConnectIPv4) {
+ SocketTest::TestServerCloseDuringConnectIPv4();
+}
+
+TEST_F(Win32SocketTest, TestServerCloseDuringConnectIPv6) {
+ SocketTest::TestServerCloseDuringConnectIPv6();
+}
+
+TEST_F(Win32SocketTest, TestClientCloseDuringConnectIPv4) {
+ SocketTest::TestClientCloseDuringConnectIPv4();
+}
+
+TEST_F(Win32SocketTest, TestClientCloseDuringConnectIPv6) {
+ SocketTest::TestClientCloseDuringConnectIPv6();
+}
+
+TEST_F(Win32SocketTest, TestServerCloseIPv4) {
+ SocketTest::TestServerCloseIPv4();
+}
+
+TEST_F(Win32SocketTest, TestServerCloseIPv6) {
+ SocketTest::TestServerCloseIPv6();
+}
+
+TEST_F(Win32SocketTest, TestCloseInClosedCallbackIPv4) {
+ SocketTest::TestCloseInClosedCallbackIPv4();
+}
+
+TEST_F(Win32SocketTest, TestCloseInClosedCallbackIPv6) {
+ SocketTest::TestCloseInClosedCallbackIPv6();
+}
+
+TEST_F(Win32SocketTest, TestSocketServerWaitIPv4) {
+ SocketTest::TestSocketServerWaitIPv4();
+}
+
+TEST_F(Win32SocketTest, TestSocketServerWaitIPv6) {
+ SocketTest::TestSocketServerWaitIPv6();
+}
+
+TEST_F(Win32SocketTest, TestTcpIPv4) {
+ SocketTest::TestTcpIPv4();
+}
+
+TEST_F(Win32SocketTest, TestTcpIPv6) {
+ SocketTest::TestTcpIPv6();
+}
+
+TEST_F(Win32SocketTest, TestUdpIPv4) {
+ SocketTest::TestUdpIPv4();
+}
+
+TEST_F(Win32SocketTest, TestUdpIPv6) {
+ SocketTest::TestUdpIPv6();
+}
+
+TEST_F(Win32SocketTest, TestGetSetOptionsIPv4) {
+ SocketTest::TestGetSetOptionsIPv4();
+}
+
+TEST_F(Win32SocketTest, TestGetSetOptionsIPv6) {
+ SocketTest::TestGetSetOptionsIPv6();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32toolhelp.h b/chromium/third_party/webrtc/base/win32toolhelp.h
new file mode 100644
index 00000000000..dfafdb317f1
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32toolhelp.h
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_BASE_WIN32TOOLHELP_H_
+#define WEBRTC_BASE_WIN32TOOLHELP_H_
+
+#if !defined(WEBRTC_WIN)
+#error WEBRTC_WIN Only
+#endif
+
+#include "webrtc/base/win32.h"
+
+// Should be included first, but that causes redefinitions.
+#include <tlhelp32.h>
+
+#include "webrtc/base/constructormagic.h"
+
+namespace rtc {
+
+// The toolhelp api used to enumerate processes and their modules
+// on Windows is very repetetive and clunky to use. This little
+// template wraps it to make it a little more programmer friendly.
+//
+// Traits: Traits type that adapts the enumerator to the corresponding
+// win32 toolhelp api. Each traits class need to:
+// - define the type of the enumerated data as a public symbol Type
+//
+// - implement bool First(HANDLE, T*) normally calls a
+// Xxxx32First method in the toolhelp API. Ex Process32First(...)
+//
+// - implement bool Next(HANDLE, T*) normally calls a
+// Xxxx32Next method in the toolhelp API. Ex Process32Next(...)
+//
+// - implement bool CloseHandle(HANDLE)
+//
+template<typename Traits>
+class ToolhelpEnumeratorBase {
+ public:
+ ToolhelpEnumeratorBase(HANDLE snapshot)
+ : snapshot_(snapshot), broken_(false), first_(true) {
+
+ // Clear out the Traits::Type structure instance.
+ Zero(&current_);
+ }
+
+ virtual ~ToolhelpEnumeratorBase() {
+ Close();
+ }
+
+ // Moves forward to the next object using the First and Next
+ // pointers. If either First or Next ever indicates an failure
+ // all subsequent calls to this method will fail; the enumerator
+ // object is considered broken.
+ bool Next() {
+ if (!Valid()) {
+ return false;
+ }
+
+ // Move the iteration forward.
+ current_.dwSize = sizeof(typename Traits::Type);
+ bool incr_ok = false;
+ if (first_) {
+ incr_ok = Traits::First(snapshot_, &current_);
+ first_ = false;
+ } else {
+ incr_ok = Traits::Next(snapshot_, &current_);
+ }
+
+ if (!incr_ok) {
+ Zero(&current_);
+ broken_ = true;
+ }
+
+ return incr_ok;
+ }
+
+ const typename Traits::Type& current() const {
+ return current_;
+ }
+
+ void Close() {
+ if (snapshot_ != INVALID_HANDLE_VALUE) {
+ Traits::CloseHandle(snapshot_);
+ snapshot_ = INVALID_HANDLE_VALUE;
+ }
+ }
+
+ private:
+ // Checks the state of the snapshot handle.
+ bool Valid() {
+ return snapshot_ != INVALID_HANDLE_VALUE && !broken_;
+ }
+
+ static void Zero(typename Traits::Type* buff) {
+ ZeroMemory(buff, sizeof(typename Traits::Type));
+ }
+
+ HANDLE snapshot_;
+ typename Traits::Type current_;
+ bool broken_;
+ bool first_;
+};
+
+class ToolhelpTraits {
+ public:
+ static HANDLE CreateSnapshot(uint32 flags, uint32 process_id) {
+ return CreateToolhelp32Snapshot(flags, process_id);
+ }
+
+ static bool CloseHandle(HANDLE handle) {
+ return ::CloseHandle(handle) == TRUE;
+ }
+};
+
+class ToolhelpProcessTraits : public ToolhelpTraits {
+ public:
+ typedef PROCESSENTRY32 Type;
+
+ static bool First(HANDLE handle, Type* t) {
+ return ::Process32First(handle, t) == TRUE;
+ }
+
+ static bool Next(HANDLE handle, Type* t) {
+ return ::Process32Next(handle, t) == TRUE;
+ }
+};
+
+class ProcessEnumerator : public ToolhelpEnumeratorBase<ToolhelpProcessTraits> {
+ public:
+ ProcessEnumerator()
+ : ToolhelpEnumeratorBase(
+ ToolhelpProcessTraits::CreateSnapshot(TH32CS_SNAPPROCESS, 0)) {
+ }
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(ProcessEnumerator);
+};
+
+class ToolhelpModuleTraits : public ToolhelpTraits {
+ public:
+ typedef MODULEENTRY32 Type;
+
+ static bool First(HANDLE handle, Type* t) {
+ return ::Module32First(handle, t) == TRUE;
+ }
+
+ static bool Next(HANDLE handle, Type* t) {
+ return ::Module32Next(handle, t) == TRUE;
+ }
+};
+
+class ModuleEnumerator : public ToolhelpEnumeratorBase<ToolhelpModuleTraits> {
+ public:
+ explicit ModuleEnumerator(uint32 process_id)
+ : ToolhelpEnumeratorBase(
+ ToolhelpModuleTraits::CreateSnapshot(TH32CS_SNAPMODULE,
+ process_id)) {
+ }
+
+ private:
+ DISALLOW_EVIL_CONSTRUCTORS(ModuleEnumerator);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WIN32TOOLHELP_H_
diff --git a/chromium/third_party/webrtc/base/win32toolhelp_unittest.cc b/chromium/third_party/webrtc/base/win32toolhelp_unittest.cc
new file mode 100644
index 00000000000..280f2ec98d0
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32toolhelp_unittest.cc
@@ -0,0 +1,278 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/pathutils.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/win32toolhelp.h"
+
+namespace rtc {
+
+typedef struct {
+ // Required to match the toolhelp api struct 'design'.
+ DWORD dwSize;
+ int a;
+ uint32 b;
+} TestData;
+
+class Win32ToolhelpTest : public testing::Test {
+ public:
+ Win32ToolhelpTest() {
+ }
+
+ HANDLE AsHandle() {
+ return reinterpret_cast<HANDLE>(this);
+ }
+
+ static Win32ToolhelpTest* AsFixture(HANDLE handle) {
+ return reinterpret_cast<Win32ToolhelpTest*>(handle);
+ }
+
+ static bool First(HANDLE handle, TestData* d) {
+ Win32ToolhelpTest* tst = Win32ToolhelpTest::AsFixture(handle);
+ // This method should be called only once for every test.
+ // If it is called more than once it return false which
+ // should break the test.
+ EXPECT_EQ(0, tst->first_called_); // Just to be safe.
+ if (tst->first_called_ > 0) {
+ return false;
+ }
+
+ *d = kTestData[0];
+ tst->index_ = 1;
+ ++(tst->first_called_);
+ return true;
+ }
+
+ static bool Next(HANDLE handle, TestData* d) {
+ Win32ToolhelpTest* tst = Win32ToolhelpTest::AsFixture(handle);
+ ++(tst->next_called_);
+
+ if (tst->index_ >= kTestDataSize) {
+ return FALSE;
+ }
+
+ *d = kTestData[tst->index_];
+ ++(tst->index_);
+ return true;
+ }
+
+ static bool Fail(HANDLE handle, TestData* d) {
+ Win32ToolhelpTest* tst = Win32ToolhelpTest::AsFixture(handle);
+ ++(tst->fail_called_);
+ return false;
+ }
+
+ static bool CloseHandle(HANDLE handle) {
+ Win32ToolhelpTest* tst = Win32ToolhelpTest::AsFixture(handle);
+ ++(tst->close_handle_called_);
+ return true;
+ }
+
+ protected:
+ virtual void SetUp() {
+ fail_called_ = 0;
+ first_called_ = 0;
+ next_called_ = 0;
+ close_handle_called_ = 0;
+ index_ = 0;
+ }
+
+ static bool AllZero(const TestData& data) {
+ return data.dwSize == 0 && data.a == 0 && data.b == 0;
+ }
+
+ static bool Equals(const TestData& expected, const TestData& actual) {
+ return expected.dwSize == actual.dwSize
+ && expected.a == actual.a
+ && expected.b == actual.b;
+ }
+
+ bool CheckCallCounters(int first, int next, int fail, int close) {
+ bool match = first_called_ == first && next_called_ == next
+ && fail_called_ == fail && close_handle_called_ == close;
+
+ if (!match) {
+ LOG(LS_ERROR) << "Expected: ("
+ << first << ", "
+ << next << ", "
+ << fail << ", "
+ << close << ")";
+
+ LOG(LS_ERROR) << "Actual: ("
+ << first_called_ << ", "
+ << next_called_ << ", "
+ << fail_called_ << ", "
+ << close_handle_called_ << ")";
+ }
+ return match;
+ }
+
+ static const int kTestDataSize = 3;
+ static const TestData kTestData[];
+ int index_;
+ int first_called_;
+ int fail_called_;
+ int next_called_;
+ int close_handle_called_;
+};
+
+const TestData Win32ToolhelpTest::kTestData[] = {
+ {1, 1, 1}, {2, 2, 2}, {3, 3, 3}
+};
+
+
+class TestTraits {
+ public:
+ typedef TestData Type;
+
+ static bool First(HANDLE handle, Type* t) {
+ return Win32ToolhelpTest::First(handle, t);
+ }
+
+ static bool Next(HANDLE handle, Type* t) {
+ return Win32ToolhelpTest::Next(handle, t);
+ }
+
+ static bool CloseHandle(HANDLE handle) {
+ return Win32ToolhelpTest::CloseHandle(handle);
+ }
+};
+
+class BadFirstTraits {
+ public:
+ typedef TestData Type;
+
+ static bool First(HANDLE handle, Type* t) {
+ return Win32ToolhelpTest::Fail(handle, t);
+ }
+
+ static bool Next(HANDLE handle, Type* t) {
+ // This should never be called.
+ ADD_FAILURE();
+ return false;
+ }
+
+ static bool CloseHandle(HANDLE handle) {
+ return Win32ToolhelpTest::CloseHandle(handle);
+ }
+};
+
+class BadNextTraits {
+ public:
+ typedef TestData Type;
+
+ static bool First(HANDLE handle, Type* t) {
+ return Win32ToolhelpTest::First(handle, t);
+ }
+
+ static bool Next(HANDLE handle, Type* t) {
+ return Win32ToolhelpTest::Fail(handle, t);
+ }
+
+ static bool CloseHandle(HANDLE handle) {
+ return Win32ToolhelpTest::CloseHandle(handle);
+ }
+};
+
+// The toolhelp in normally inherited but most of
+// these tests only excercise the methods from the
+// traits therefore I use a typedef to make the
+// test code easier to read.
+typedef rtc::ToolhelpEnumeratorBase<TestTraits> EnumeratorForTest;
+
+TEST_F(Win32ToolhelpTest, TestNextWithInvalidCtorHandle) {
+ EnumeratorForTest t(INVALID_HANDLE_VALUE);
+
+ EXPECT_FALSE(t.Next());
+ EXPECT_TRUE(CheckCallCounters(0, 0, 0, 0));
+}
+
+// Tests that Next() returns false if the first-pointer
+// function fails.
+TEST_F(Win32ToolhelpTest, TestNextFirstFails) {
+ typedef rtc::ToolhelpEnumeratorBase<BadFirstTraits> BadEnumerator;
+ rtc::scoped_ptr<BadEnumerator> t(new BadEnumerator(AsHandle()));
+
+ // If next ever fails it shall always fail.
+ EXPECT_FALSE(t->Next());
+ EXPECT_FALSE(t->Next());
+ EXPECT_FALSE(t->Next());
+ t.reset();
+ EXPECT_TRUE(CheckCallCounters(0, 0, 1, 1));
+}
+
+// Tests that Next() returns false if the next-pointer
+// function fails.
+TEST_F(Win32ToolhelpTest, TestNextNextFails) {
+ typedef rtc::ToolhelpEnumeratorBase<BadNextTraits> BadEnumerator;
+ rtc::scoped_ptr<BadEnumerator> t(new BadEnumerator(AsHandle()));
+
+ // If next ever fails it shall always fail. No more calls
+ // shall be dispatched to Next(...).
+ EXPECT_TRUE(t->Next());
+ EXPECT_FALSE(t->Next());
+ EXPECT_FALSE(t->Next());
+ t.reset();
+ EXPECT_TRUE(CheckCallCounters(1, 0, 1, 1));
+}
+
+
+// Tests that current returns an object is all zero's
+// if Next() hasn't been called.
+TEST_F(Win32ToolhelpTest, TestCurrentNextNotCalled) {
+ rtc::scoped_ptr<EnumeratorForTest> t(new EnumeratorForTest(AsHandle()));
+ EXPECT_TRUE(AllZero(t->current()));
+ t.reset();
+ EXPECT_TRUE(CheckCallCounters(0, 0, 0, 1));
+}
+
+// Tests the simple everything works path through the code.
+TEST_F(Win32ToolhelpTest, TestCurrentNextCalled) {
+ rtc::scoped_ptr<EnumeratorForTest> t(new EnumeratorForTest(AsHandle()));
+
+ EXPECT_TRUE(t->Next());
+ EXPECT_TRUE(Equals(t->current(), kTestData[0]));
+ EXPECT_TRUE(t->Next());
+ EXPECT_TRUE(Equals(t->current(), kTestData[1]));
+ EXPECT_TRUE(t->Next());
+ EXPECT_TRUE(Equals(t->current(), kTestData[2]));
+ EXPECT_FALSE(t->Next());
+ t.reset();
+ EXPECT_TRUE(CheckCallCounters(1, 3, 0, 1));
+}
+
+TEST_F(Win32ToolhelpTest, TestCurrentProcess) {
+ WCHAR buf[MAX_PATH];
+ GetModuleFileName(NULL, buf, ARRAY_SIZE(buf));
+ std::wstring name = ToUtf16(Pathname(ToUtf8(buf)).filename());
+
+ rtc::ProcessEnumerator processes;
+ bool found = false;
+ while (processes.Next()) {
+ if (!name.compare(processes.current().szExeFile)) {
+ found = true;
+ break;
+ }
+ }
+ EXPECT_TRUE(found);
+
+ rtc::ModuleEnumerator modules(processes.current().th32ProcessID);
+ found = false;
+ while (modules.Next()) {
+ if (!name.compare(modules.current().szModule)) {
+ found = true;
+ break;
+ }
+ }
+ EXPECT_TRUE(found);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32window.cc b/chromium/third_party/webrtc/base/win32window.cc
new file mode 100644
index 00000000000..4d410140541
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32window.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/win32window.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32Window
+///////////////////////////////////////////////////////////////////////////////
+
+static const wchar_t kWindowBaseClassName[] = L"WindowBaseClass";
+HINSTANCE Win32Window::instance_ = NULL;
+ATOM Win32Window::window_class_ = 0;
+
+Win32Window::Win32Window() : wnd_(NULL) {
+}
+
+Win32Window::~Win32Window() {
+ ASSERT(NULL == wnd_);
+}
+
+bool Win32Window::Create(HWND parent, const wchar_t* title, DWORD style,
+ DWORD exstyle, int x, int y, int cx, int cy) {
+ if (wnd_) {
+ // Window already exists.
+ return false;
+ }
+
+ if (!window_class_) {
+ if (!GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
+ GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
+ reinterpret_cast<LPCWSTR>(&Win32Window::WndProc),
+ &instance_)) {
+ LOG_GLE(LS_ERROR) << "GetModuleHandleEx failed";
+ return false;
+ }
+
+ // Class not registered, register it.
+ WNDCLASSEX wcex;
+ memset(&wcex, 0, sizeof(wcex));
+ wcex.cbSize = sizeof(wcex);
+ wcex.hInstance = instance_;
+ wcex.lpfnWndProc = &Win32Window::WndProc;
+ wcex.lpszClassName = kWindowBaseClassName;
+ window_class_ = ::RegisterClassEx(&wcex);
+ if (!window_class_) {
+ LOG_GLE(LS_ERROR) << "RegisterClassEx failed";
+ return false;
+ }
+ }
+ wnd_ = ::CreateWindowEx(exstyle, kWindowBaseClassName, title, style,
+ x, y, cx, cy, parent, NULL, instance_, this);
+ return (NULL != wnd_);
+}
+
+void Win32Window::Destroy() {
+ VERIFY(::DestroyWindow(wnd_) != FALSE);
+}
+
+void Win32Window::Shutdown() {
+ if (window_class_) {
+ ::UnregisterClass(MAKEINTATOM(window_class_), instance_);
+ window_class_ = 0;
+ }
+}
+
+bool Win32Window::OnMessage(UINT uMsg, WPARAM wParam, LPARAM lParam,
+ LRESULT& result) {
+ switch (uMsg) {
+ case WM_CLOSE:
+ if (!OnClose()) {
+ result = 0;
+ return true;
+ }
+ break;
+ }
+ return false;
+}
+
+LRESULT Win32Window::WndProc(HWND hwnd, UINT uMsg,
+ WPARAM wParam, LPARAM lParam) {
+ Win32Window* that = reinterpret_cast<Win32Window*>(
+ ::GetWindowLongPtr(hwnd, GWLP_USERDATA));
+ if (!that && (WM_CREATE == uMsg)) {
+ CREATESTRUCT* cs = reinterpret_cast<CREATESTRUCT*>(lParam);
+ that = static_cast<Win32Window*>(cs->lpCreateParams);
+ that->wnd_ = hwnd;
+ ::SetWindowLongPtr(hwnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(that));
+ }
+ if (that) {
+ LRESULT result;
+ bool handled = that->OnMessage(uMsg, wParam, lParam, result);
+ if (WM_DESTROY == uMsg) {
+ for (HWND child = ::GetWindow(hwnd, GW_CHILD); child;
+ child = ::GetWindow(child, GW_HWNDNEXT)) {
+ LOG(LS_INFO) << "Child window: " << static_cast<void*>(child);
+ }
+ }
+ if (WM_NCDESTROY == uMsg) {
+ ::SetWindowLongPtr(hwnd, GWLP_USERDATA, NULL);
+ that->wnd_ = NULL;
+ that->OnNcDestroy();
+ }
+ if (handled) {
+ return result;
+ }
+ }
+ return ::DefWindowProc(hwnd, uMsg, wParam, lParam);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32window.h b/chromium/third_party/webrtc/base/win32window.h
new file mode 100644
index 00000000000..c0ba6b23d26
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32window.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WIN32WINDOW_H_
+#define WEBRTC_BASE_WIN32WINDOW_H_
+
+#if defined(WEBRTC_WIN)
+
+#include "webrtc/base/win32.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// Win32Window
+///////////////////////////////////////////////////////////////////////////////
+
+class Win32Window {
+ public:
+ Win32Window();
+ virtual ~Win32Window();
+
+ HWND handle() const { return wnd_; }
+
+ bool Create(HWND parent, const wchar_t* title, DWORD style, DWORD exstyle,
+ int x, int y, int cx, int cy);
+ void Destroy();
+
+ // Call this when your DLL unloads.
+ static void Shutdown();
+
+ protected:
+ virtual bool OnMessage(UINT uMsg, WPARAM wParam, LPARAM lParam,
+ LRESULT& result);
+
+ virtual bool OnClose() { return true; }
+ virtual void OnNcDestroy() { }
+
+ private:
+ static LRESULT CALLBACK WndProc(HWND hwnd, UINT uMsg, WPARAM wParam,
+ LPARAM lParam);
+
+ HWND wnd_;
+ static HINSTANCE instance_;
+ static ATOM window_class_;
+};
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_WIN
+
+#endif // WEBRTC_BASE_WIN32WINDOW_H_
diff --git a/chromium/third_party/webrtc/base/win32window_unittest.cc b/chromium/third_party/webrtc/base/win32window_unittest.cc
new file mode 100644
index 00000000000..5dba67eb520
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32window_unittest.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/win32window.h"
+#include "webrtc/base/logging.h"
+
+static LRESULT kDummyResult = 0x1234ABCD;
+
+class TestWindow : public rtc::Win32Window {
+ public:
+ TestWindow() : destroyed_(false) { memset(&msg_, 0, sizeof(msg_)); }
+ const MSG& msg() const { return msg_; }
+ bool destroyed() const { return destroyed_; }
+
+ virtual bool OnMessage(UINT uMsg, WPARAM wParam,
+ LPARAM lParam, LRESULT& result) {
+ msg_.message = uMsg;
+ msg_.wParam = wParam;
+ msg_.lParam = lParam;
+ result = kDummyResult;
+ return true;
+ }
+ virtual void OnNcDestroy() {
+ destroyed_ = true;
+ }
+
+ private:
+ MSG msg_;
+ bool destroyed_;
+};
+
+TEST(Win32WindowTest, Basics) {
+ TestWindow wnd;
+ EXPECT_TRUE(wnd.handle() == NULL);
+ EXPECT_FALSE(wnd.destroyed());
+ EXPECT_TRUE(wnd.Create(0, L"Test", 0, 0, 0, 0, 100, 100));
+ EXPECT_TRUE(wnd.handle() != NULL);
+ EXPECT_EQ(kDummyResult, ::SendMessage(wnd.handle(), WM_USER, 1, 2));
+ EXPECT_EQ(WM_USER, wnd.msg().message);
+ EXPECT_EQ(1, wnd.msg().wParam);
+ EXPECT_EQ(2, wnd.msg().lParam);
+ wnd.Destroy();
+ EXPECT_TRUE(wnd.handle() == NULL);
+ EXPECT_TRUE(wnd.destroyed());
+}
+
+TEST(Win32WindowTest, MultipleWindows) {
+ TestWindow wnd1, wnd2;
+ EXPECT_TRUE(wnd1.Create(0, L"Test", 0, 0, 0, 0, 100, 100));
+ EXPECT_TRUE(wnd2.Create(0, L"Test", 0, 0, 0, 0, 100, 100));
+ EXPECT_TRUE(wnd1.handle() != NULL);
+ EXPECT_TRUE(wnd2.handle() != NULL);
+ wnd1.Destroy();
+ wnd2.Destroy();
+ EXPECT_TRUE(wnd2.handle() == NULL);
+ EXPECT_TRUE(wnd1.handle() == NULL);
+}
diff --git a/chromium/third_party/webrtc/base/win32windowpicker.cc b/chromium/third_party/webrtc/base/win32windowpicker.cc
new file mode 100644
index 00000000000..b4550ae4a47
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32windowpicker.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/win32windowpicker.h"
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+
+namespace {
+
+// Window class names that we want to filter out.
+const char kProgramManagerClass[] = "Progman";
+const char kButtonClass[] = "Button";
+
+} // namespace
+
+BOOL CALLBACK Win32WindowPicker::EnumProc(HWND hwnd, LPARAM l_param) {
+ WindowDescriptionList* descriptions =
+ reinterpret_cast<WindowDescriptionList*>(l_param);
+
+ // Skip windows that are invisible, minimized, have no title, or are owned,
+ // unless they have the app window style set. Except for minimized windows,
+ // this is what Alt-Tab does.
+ // TODO: Figure out how to grab a thumbnail of a minimized window and
+ // include them in the list.
+ int len = GetWindowTextLength(hwnd);
+ HWND owner = GetWindow(hwnd, GW_OWNER);
+ LONG exstyle = GetWindowLong(hwnd, GWL_EXSTYLE);
+ if (len == 0 || IsIconic(hwnd) || !IsWindowVisible(hwnd) ||
+ (owner && !(exstyle & WS_EX_APPWINDOW))) {
+ // TODO: Investigate if windows without title still could be
+ // interesting to share. We could use the name of the process as title:
+ //
+ // GetWindowThreadProcessId()
+ // OpenProcess()
+ // QueryFullProcessImageName()
+ return TRUE;
+ }
+
+ // Skip the Program Manager window and the Start button.
+ TCHAR class_name_w[500];
+ ::GetClassName(hwnd, class_name_w, 500);
+ std::string class_name = ToUtf8(class_name_w);
+ if (class_name == kProgramManagerClass || class_name == kButtonClass) {
+ // We don't want the Program Manager window nor the Start button.
+ return TRUE;
+ }
+
+ TCHAR window_title[500];
+ GetWindowText(hwnd, window_title, ARRAY_SIZE(window_title));
+ std::string title = ToUtf8(window_title);
+
+ WindowId id(hwnd);
+ WindowDescription desc(id, title);
+ descriptions->push_back(desc);
+ return TRUE;
+}
+
+BOOL CALLBACK Win32WindowPicker::MonitorEnumProc(HMONITOR h_monitor,
+ HDC hdc_monitor,
+ LPRECT lprc_monitor,
+ LPARAM l_param) {
+ DesktopDescriptionList* desktop_desc =
+ reinterpret_cast<DesktopDescriptionList*>(l_param);
+
+ DesktopId id(h_monitor, static_cast<int>(desktop_desc->size()));
+ // TODO: Figure out an appropriate desktop title.
+ DesktopDescription desc(id, "");
+
+ // Determine whether it's the primary monitor.
+ MONITORINFO monitor_info = {0};
+ monitor_info.cbSize = sizeof(monitor_info);
+ bool primary = (GetMonitorInfo(h_monitor, &monitor_info) &&
+ (monitor_info.dwFlags & MONITORINFOF_PRIMARY) != 0);
+ desc.set_primary(primary);
+
+ desktop_desc->push_back(desc);
+ return TRUE;
+}
+
+Win32WindowPicker::Win32WindowPicker() {
+}
+
+bool Win32WindowPicker::Init() {
+ return true;
+}
+// TODO: Consider changing enumeration to clear() descriptions
+// before append().
+bool Win32WindowPicker::GetWindowList(WindowDescriptionList* descriptions) {
+ LPARAM desc = reinterpret_cast<LPARAM>(descriptions);
+ return EnumWindows(Win32WindowPicker::EnumProc, desc) != FALSE;
+}
+
+bool Win32WindowPicker::GetDesktopList(DesktopDescriptionList* descriptions) {
+ // Create a fresh WindowDescriptionList so that we can use desktop_desc.size()
+ // in MonitorEnumProc to compute the desktop index.
+ DesktopDescriptionList desktop_desc;
+ HDC hdc = GetDC(NULL);
+ bool success = false;
+ if (EnumDisplayMonitors(hdc, NULL, Win32WindowPicker::MonitorEnumProc,
+ reinterpret_cast<LPARAM>(&desktop_desc)) != FALSE) {
+ // Append the desktop descriptions to the end of the returned descriptions.
+ descriptions->insert(descriptions->end(), desktop_desc.begin(),
+ desktop_desc.end());
+ success = true;
+ }
+ ReleaseDC(NULL, hdc);
+ return success;
+}
+
+bool Win32WindowPicker::GetDesktopDimensions(const DesktopId& id,
+ int* width,
+ int* height) {
+ MONITORINFOEX monitor_info;
+ monitor_info.cbSize = sizeof(MONITORINFOEX);
+ if (!GetMonitorInfo(id.id(), &monitor_info)) {
+ return false;
+ }
+ *width = monitor_info.rcMonitor.right - monitor_info.rcMonitor.left;
+ *height = monitor_info.rcMonitor.bottom - monitor_info.rcMonitor.top;
+ return true;
+}
+
+bool Win32WindowPicker::IsVisible(const WindowId& id) {
+ return (::IsWindow(id.id()) != FALSE && ::IsWindowVisible(id.id()) != FALSE);
+}
+
+bool Win32WindowPicker::MoveToFront(const WindowId& id) {
+ return SetForegroundWindow(id.id()) != FALSE;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/win32windowpicker.h b/chromium/third_party/webrtc/base/win32windowpicker.h
new file mode 100644
index 00000000000..9c84bfd9878
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32windowpicker.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_BASE_WIN32WINDOWPICKER_H_
+#define WEBRTC_BASE_WIN32WINDOWPICKER_H_
+
+#include "webrtc/base/win32.h"
+#include "webrtc/base/windowpicker.h"
+
+namespace rtc {
+
+class Win32WindowPicker : public WindowPicker {
+ public:
+ Win32WindowPicker();
+ virtual bool Init();
+ virtual bool IsVisible(const WindowId& id);
+ virtual bool MoveToFront(const WindowId& id);
+ virtual bool GetWindowList(WindowDescriptionList* descriptions);
+ virtual bool GetDesktopList(DesktopDescriptionList* descriptions);
+ virtual bool GetDesktopDimensions(const DesktopId& id, int* width,
+ int* height);
+
+ protected:
+ static BOOL CALLBACK EnumProc(HWND hwnd, LPARAM l_param);
+ static BOOL CALLBACK MonitorEnumProc(HMONITOR h_monitor,
+ HDC hdc_monitor,
+ LPRECT lprc_monitor,
+ LPARAM l_param);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WIN32WINDOWPICKER_H_
diff --git a/chromium/third_party/webrtc/base/win32windowpicker_unittest.cc b/chromium/third_party/webrtc/base/win32windowpicker_unittest.cc
new file mode 100644
index 00000000000..71e8af6bf28
--- /dev/null
+++ b/chromium/third_party/webrtc/base/win32windowpicker_unittest.cc
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/win32window.h"
+#include "webrtc/base/win32windowpicker.h"
+#include "webrtc/base/windowpicker.h"
+
+#if !defined(WEBRTC_WIN)
+#error Only for Windows
+#endif
+
+namespace rtc {
+
+static const TCHAR* kVisibleWindowTitle = L"Visible Window";
+static const TCHAR* kInvisibleWindowTitle = L"Invisible Window";
+
+class Win32WindowPickerForTest : public Win32WindowPicker {
+ public:
+ Win32WindowPickerForTest() {
+ EXPECT_TRUE(visible_window_.Create(NULL, kVisibleWindowTitle, WS_VISIBLE,
+ 0, 0, 0, 0, 0));
+ EXPECT_TRUE(invisible_window_.Create(NULL, kInvisibleWindowTitle, 0,
+ 0, 0, 0, 0, 0));
+ }
+
+ ~Win32WindowPickerForTest() {
+ visible_window_.Destroy();
+ invisible_window_.Destroy();
+ }
+
+ virtual bool GetWindowList(WindowDescriptionList* descriptions) {
+ if (!Win32WindowPicker::EnumProc(visible_window_.handle(),
+ reinterpret_cast<LPARAM>(descriptions))) {
+ return false;
+ }
+ if (!Win32WindowPicker::EnumProc(invisible_window_.handle(),
+ reinterpret_cast<LPARAM>(descriptions))) {
+ return false;
+ }
+ return true;
+ }
+
+ Win32Window* visible_window() {
+ return &visible_window_;
+ }
+
+ Win32Window* invisible_window() {
+ return &invisible_window_;
+ }
+
+ private:
+ Win32Window visible_window_;
+ Win32Window invisible_window_;
+};
+
+TEST(Win32WindowPickerTest, TestGetWindowList) {
+ Win32WindowPickerForTest window_picker;
+ WindowDescriptionList descriptions;
+ EXPECT_TRUE(window_picker.GetWindowList(&descriptions));
+ EXPECT_EQ(1, descriptions.size());
+ WindowDescription desc = descriptions.front();
+ EXPECT_EQ(window_picker.visible_window()->handle(), desc.id().id());
+ TCHAR window_title[500];
+ GetWindowText(window_picker.visible_window()->handle(), window_title,
+ ARRAY_SIZE(window_title));
+ EXPECT_EQ(0, wcscmp(window_title, kVisibleWindowTitle));
+}
+
+TEST(Win32WindowPickerTest, TestIsVisible) {
+ Win32WindowPickerForTest window_picker;
+ HWND visible_id = window_picker.visible_window()->handle();
+ HWND invisible_id = window_picker.invisible_window()->handle();
+ EXPECT_TRUE(window_picker.IsVisible(WindowId(visible_id)));
+ EXPECT_FALSE(window_picker.IsVisible(WindowId(invisible_id)));
+}
+
+TEST(Win32WindowPickerTest, TestMoveToFront) {
+ Win32WindowPickerForTest window_picker;
+ HWND visible_id = window_picker.visible_window()->handle();
+ HWND invisible_id = window_picker.invisible_window()->handle();
+
+ // There are a number of condition where SetForegroundWindow might
+ // fail depending on the state of the calling process. To be on the
+ // safe side we doesn't expect MoveToFront to return true, just test
+ // that we don't crash.
+ window_picker.MoveToFront(WindowId(visible_id));
+ window_picker.MoveToFront(WindowId(invisible_id));
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/window.h b/chromium/third_party/webrtc/base/window.h
new file mode 100644
index 00000000000..d9610265248
--- /dev/null
+++ b/chromium/third_party/webrtc/base/window.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WINDOW_H_
+#define WEBRTC_BASE_WINDOW_H_
+
+#include "webrtc/base/stringencode.h"
+
+// Define platform specific window types.
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+typedef unsigned long Window; // Avoid include <X11/Xlib.h>.
+#elif defined(WEBRTC_WIN)
+// We commonly include win32.h in webrtc/base so just include it here.
+#include "webrtc/base/win32.h" // Include HWND, HMONITOR.
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+typedef unsigned int CGWindowID;
+typedef unsigned int CGDirectDisplayID;
+#endif
+
+namespace rtc {
+
+class WindowId {
+ public:
+ // Define WindowT for each platform.
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ typedef Window WindowT;
+#elif defined(WEBRTC_WIN)
+ typedef HWND WindowT;
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ typedef CGWindowID WindowT;
+#else
+ typedef unsigned int WindowT;
+#endif
+
+ static WindowId Cast(uint64 id) {
+#if defined(WEBRTC_WIN)
+ return WindowId(reinterpret_cast<WindowId::WindowT>(id));
+#else
+ return WindowId(static_cast<WindowId::WindowT>(id));
+#endif
+ }
+
+ static uint64 Format(const WindowT& id) {
+#if defined(WEBRTC_WIN)
+ return static_cast<uint64>(reinterpret_cast<uintptr_t>(id));
+#else
+ return static_cast<uint64>(id);
+#endif
+ }
+
+ WindowId() : id_(0) {}
+ WindowId(const WindowT& id) : id_(id) {} // NOLINT
+ const WindowT& id() const { return id_; }
+ bool IsValid() const { return id_ != 0; }
+ bool Equals(const WindowId& other) const {
+ return id_ == other.id();
+ }
+
+ private:
+ WindowT id_;
+};
+
+class DesktopId {
+ public:
+ // Define DesktopT for each platform.
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+ typedef Window DesktopT;
+#elif defined(WEBRTC_WIN)
+ typedef HMONITOR DesktopT;
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ typedef CGDirectDisplayID DesktopT;
+#else
+ typedef unsigned int DesktopT;
+#endif
+
+ static DesktopId Cast(int id, int index) {
+#if defined(WEBRTC_WIN)
+ return DesktopId(reinterpret_cast<DesktopId::DesktopT>(id), index);
+#else
+ return DesktopId(static_cast<DesktopId::DesktopT>(id), index);
+#endif
+ }
+
+ DesktopId() : id_(0), index_(-1) {}
+ DesktopId(const DesktopT& id, int index) // NOLINT
+ : id_(id), index_(index) {
+ }
+ const DesktopT& id() const { return id_; }
+ int index() const { return index_; }
+ bool IsValid() const { return index_ != -1; }
+ bool Equals(const DesktopId& other) const {
+ return id_ == other.id() && index_ == other.index();
+ }
+
+ private:
+ // Id is the platform specific desktop identifier.
+ DesktopT id_;
+ // Index is the desktop index as enumerated by each platform.
+ // Desktop capturer typically takes the index instead of id.
+ int index_;
+};
+
+// Window event types.
+enum WindowEvent {
+ WE_RESIZE = 0,
+ WE_CLOSE = 1,
+ WE_MINIMIZE = 2,
+ WE_RESTORE = 3,
+};
+
+inline std::string ToString(const WindowId& window) {
+ return ToString(window.id());
+}
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WINDOW_H_
diff --git a/chromium/third_party/webrtc/base/windowpicker.h b/chromium/third_party/webrtc/base/windowpicker.h
new file mode 100644
index 00000000000..3ae7b0e4915
--- /dev/null
+++ b/chromium/third_party/webrtc/base/windowpicker.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WINDOWPICKER_H_
+#define WEBRTC_BASE_WINDOWPICKER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/window.h"
+
+namespace rtc {
+
+class WindowDescription {
+ public:
+ WindowDescription() : id_() {}
+ WindowDescription(const WindowId& id, const std::string& title)
+ : id_(id), title_(title) {
+ }
+ const WindowId& id() const { return id_; }
+ void set_id(const WindowId& id) { id_ = id; }
+ const std::string& title() const { return title_; }
+ void set_title(const std::string& title) { title_ = title; }
+
+ private:
+ WindowId id_;
+ std::string title_;
+};
+
+class DesktopDescription {
+ public:
+ DesktopDescription() : id_() {}
+ DesktopDescription(const DesktopId& id, const std::string& title)
+ : id_(id), title_(title), primary_(false) {
+ }
+ const DesktopId& id() const { return id_; }
+ void set_id(const DesktopId& id) { id_ = id; }
+ const std::string& title() const { return title_; }
+ void set_title(const std::string& title) { title_ = title; }
+ // Indicates whether it is the primary desktop in the system.
+ bool primary() const { return primary_; }
+ void set_primary(bool primary) { primary_ = primary; }
+
+ private:
+ DesktopId id_;
+ std::string title_;
+ bool primary_;
+};
+
+typedef std::vector<WindowDescription> WindowDescriptionList;
+typedef std::vector<DesktopDescription> DesktopDescriptionList;
+
+class WindowPicker {
+ public:
+ virtual ~WindowPicker() {}
+ virtual bool Init() = 0;
+
+ // TODO: Move this two methods to window.h when we no longer need to load
+ // CoreGraphics dynamically.
+ virtual bool IsVisible(const WindowId& id) = 0;
+ virtual bool MoveToFront(const WindowId& id) = 0;
+
+ // Gets a list of window description and appends to descriptions.
+ // Returns true if successful.
+ virtual bool GetWindowList(WindowDescriptionList* descriptions) = 0;
+ // Gets a list of desktop descriptions and appends to descriptions.
+ // Returns true if successful.
+ virtual bool GetDesktopList(DesktopDescriptionList* descriptions) = 0;
+ // Gets the width and height of a desktop.
+ // Returns true if successful.
+ virtual bool GetDesktopDimensions(const DesktopId& id, int* width,
+ int* height) = 0;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WINDOWPICKER_H_
diff --git a/chromium/third_party/webrtc/base/windowpicker_unittest.cc b/chromium/third_party/webrtc/base/windowpicker_unittest.cc
new file mode 100644
index 00000000000..edd01bc0b2c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/windowpicker_unittest.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/testutils.h"
+#include "webrtc/base/window.h"
+#include "webrtc/base/windowpicker.h"
+#include "webrtc/base/windowpickerfactory.h"
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+# define DISABLE_ON_MAC(name) DISABLED_ ## name
+#else
+# define DISABLE_ON_MAC(name) name
+#endif
+
+TEST(WindowPickerTest, GetWindowList) {
+ MAYBE_SKIP_SCREENCAST_TEST();
+ if (!rtc::WindowPickerFactory::IsSupported()) {
+ LOG(LS_INFO) << "skipping test: window capturing is not supported with "
+ << "current configuration.";
+ }
+ rtc::scoped_ptr<rtc::WindowPicker> picker(
+ rtc::WindowPickerFactory::CreateWindowPicker());
+ EXPECT_TRUE(picker->Init());
+ rtc::WindowDescriptionList descriptions;
+ EXPECT_TRUE(picker->GetWindowList(&descriptions));
+}
+
+// TODO(hughv) Investigate why this fails on pulse but not locally after
+// upgrading to XCode 4.5. The failure is GetDesktopList returning FALSE.
+TEST(WindowPickerTest, DISABLE_ON_MAC(GetDesktopList)) {
+ MAYBE_SKIP_SCREENCAST_TEST();
+ if (!rtc::WindowPickerFactory::IsSupported()) {
+ LOG(LS_INFO) << "skipping test: window capturing is not supported with "
+ << "current configuration.";
+ }
+ rtc::scoped_ptr<rtc::WindowPicker> picker(
+ rtc::WindowPickerFactory::CreateWindowPicker());
+ EXPECT_TRUE(picker->Init());
+ rtc::DesktopDescriptionList descriptions;
+ EXPECT_TRUE(picker->GetDesktopList(&descriptions));
+ if (descriptions.size() > 0) {
+ int width = 0;
+ int height = 0;
+ EXPECT_TRUE(picker->GetDesktopDimensions(descriptions[0].id(), &width,
+ &height));
+ EXPECT_GT(width, 0);
+ EXPECT_GT(height, 0);
+
+ // Test |IsPrimaryDesktop|. Only one desktop should be a primary.
+ bool found_primary = false;
+ for (rtc::DesktopDescriptionList::iterator it = descriptions.begin();
+ it != descriptions.end(); ++it) {
+ if (it->primary()) {
+ EXPECT_FALSE(found_primary);
+ found_primary = true;
+ }
+ }
+ EXPECT_TRUE(found_primary);
+ }
+}
diff --git a/chromium/third_party/webrtc/base/windowpickerfactory.h b/chromium/third_party/webrtc/base/windowpickerfactory.h
new file mode 100644
index 00000000000..00a4cc469ed
--- /dev/null
+++ b/chromium/third_party/webrtc/base/windowpickerfactory.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WINDOWPICKERFACTORY_H_
+#define WEBRTC_BASE_WINDOWPICKERFACTORY_H_
+
+#if defined(WEBRTC_WIN)
+#include "webrtc/base/win32windowpicker.h"
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include "webrtc/base/macutils.h"
+#include "webrtc/base/macwindowpicker.h"
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#include "webrtc/base/linuxwindowpicker.h"
+#endif
+
+#include "webrtc/base/windowpicker.h"
+
+namespace rtc {
+
+class WindowPickerFactory {
+ public:
+ virtual ~WindowPickerFactory() {}
+
+ // Instance method for dependency injection.
+ virtual WindowPicker* Create() {
+ return CreateWindowPicker();
+ }
+
+ static WindowPicker* CreateWindowPicker() {
+#if defined(WEBRTC_WIN)
+ return new Win32WindowPicker();
+#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ return new MacWindowPicker();
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) && defined(HAVE_X11)
+ return new LinuxWindowPicker();
+#else
+ return NULL;
+#endif
+ }
+
+ static bool IsSupported() {
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ return GetOSVersionName() >= kMacOSLeopard;
+#else
+ return true;
+#endif
+ }
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WINDOWPICKERFACTORY_H_
diff --git a/chromium/third_party/webrtc/base/winfirewall.cc b/chromium/third_party/webrtc/base/winfirewall.cc
new file mode 100644
index 00000000000..97e6d151814
--- /dev/null
+++ b/chromium/third_party/webrtc/base/winfirewall.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/winfirewall.h"
+
+#include "webrtc/base/win32.h"
+
+#include <comdef.h>
+#include <netfw.h>
+
+#define RELEASE(lpUnk) do { \
+ if ((lpUnk) != NULL) { \
+ (lpUnk)->Release(); \
+ (lpUnk) = NULL; \
+ } \
+} while (0)
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// WinFirewall
+//////////////////////////////////////////////////////////////////////
+
+WinFirewall::WinFirewall() : mgr_(NULL), policy_(NULL), profile_(NULL) {
+}
+
+WinFirewall::~WinFirewall() {
+ Shutdown();
+}
+
+bool WinFirewall::Initialize(HRESULT* result) {
+ if (mgr_) {
+ if (result) {
+ *result = S_OK;
+ }
+ return true;
+ }
+
+ HRESULT hr = CoCreateInstance(__uuidof(NetFwMgr),
+ 0, CLSCTX_INPROC_SERVER,
+ __uuidof(INetFwMgr),
+ reinterpret_cast<void **>(&mgr_));
+ if (SUCCEEDED(hr) && (mgr_ != NULL))
+ hr = mgr_->get_LocalPolicy(&policy_);
+ if (SUCCEEDED(hr) && (policy_ != NULL))
+ hr = policy_->get_CurrentProfile(&profile_);
+
+ if (result)
+ *result = hr;
+ return SUCCEEDED(hr) && (profile_ != NULL);
+}
+
+void WinFirewall::Shutdown() {
+ RELEASE(profile_);
+ RELEASE(policy_);
+ RELEASE(mgr_);
+}
+
+bool WinFirewall::Enabled() const {
+ if (!profile_)
+ return false;
+
+ VARIANT_BOOL fwEnabled = VARIANT_FALSE;
+ profile_->get_FirewallEnabled(&fwEnabled);
+ return (fwEnabled != VARIANT_FALSE);
+}
+
+bool WinFirewall::QueryAuthorized(const char* filename, bool* authorized)
+ const {
+ return QueryAuthorizedW(ToUtf16(filename).c_str(), authorized);
+}
+
+bool WinFirewall::QueryAuthorizedW(const wchar_t* filename, bool* authorized)
+ const {
+ *authorized = false;
+ bool success = false;
+
+ if (!profile_)
+ return false;
+
+ _bstr_t bfilename = filename;
+
+ INetFwAuthorizedApplications* apps = NULL;
+ HRESULT hr = profile_->get_AuthorizedApplications(&apps);
+ if (SUCCEEDED(hr) && (apps != NULL)) {
+ INetFwAuthorizedApplication* app = NULL;
+ hr = apps->Item(bfilename, &app);
+ if (SUCCEEDED(hr) && (app != NULL)) {
+ VARIANT_BOOL fwEnabled = VARIANT_FALSE;
+ hr = app->get_Enabled(&fwEnabled);
+ app->Release();
+
+ if (SUCCEEDED(hr)) {
+ success = true;
+ *authorized = (fwEnabled != VARIANT_FALSE);
+ }
+ } else if (hr == HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND)) {
+ // No entry in list of authorized apps
+ success = true;
+ } else {
+ // Unexpected error
+ }
+ apps->Release();
+ }
+
+ return success;
+}
+
+bool WinFirewall::AddApplication(const char* filename,
+ const char* friendly_name,
+ bool authorized,
+ HRESULT* result) {
+ return AddApplicationW(ToUtf16(filename).c_str(),
+ ToUtf16(friendly_name).c_str(), authorized, result);
+}
+
+bool WinFirewall::AddApplicationW(const wchar_t* filename,
+ const wchar_t* friendly_name,
+ bool authorized,
+ HRESULT* result) {
+ INetFwAuthorizedApplications* apps = NULL;
+ HRESULT hr = profile_->get_AuthorizedApplications(&apps);
+ if (SUCCEEDED(hr) && (apps != NULL)) {
+ INetFwAuthorizedApplication* app = NULL;
+ hr = CoCreateInstance(__uuidof(NetFwAuthorizedApplication),
+ 0, CLSCTX_INPROC_SERVER,
+ __uuidof(INetFwAuthorizedApplication),
+ reinterpret_cast<void **>(&app));
+ if (SUCCEEDED(hr) && (app != NULL)) {
+ _bstr_t bstr = filename;
+ hr = app->put_ProcessImageFileName(bstr);
+ bstr = friendly_name;
+ if (SUCCEEDED(hr))
+ hr = app->put_Name(bstr);
+ if (SUCCEEDED(hr))
+ hr = app->put_Enabled(authorized ? VARIANT_TRUE : VARIANT_FALSE);
+ if (SUCCEEDED(hr))
+ hr = apps->Add(app);
+ app->Release();
+ }
+ apps->Release();
+ }
+ if (result)
+ *result = hr;
+ return SUCCEEDED(hr);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/winfirewall.h b/chromium/third_party/webrtc/base/winfirewall.h
new file mode 100644
index 00000000000..a74631bafc9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/winfirewall.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WINFIREWALL_H_
+#define WEBRTC_BASE_WINFIREWALL_H_
+
+#ifndef _HRESULT_DEFINED
+#define _HRESULT_DEFINED
+typedef long HRESULT; // Can't forward declare typedef, but don't need all win
+#endif // !_HRESULT_DEFINED
+
+struct INetFwMgr;
+struct INetFwPolicy;
+struct INetFwProfile;
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// WinFirewall
+//////////////////////////////////////////////////////////////////////
+
+class WinFirewall {
+ public:
+ WinFirewall();
+ ~WinFirewall();
+
+ bool Initialize(HRESULT* result);
+ void Shutdown();
+
+ bool Enabled() const;
+ bool QueryAuthorized(const char* filename, bool* authorized) const;
+ bool QueryAuthorizedW(const wchar_t* filename, bool* authorized) const;
+
+ bool AddApplication(const char* filename, const char* friendly_name,
+ bool authorized, HRESULT* result);
+ bool AddApplicationW(const wchar_t* filename, const wchar_t* friendly_name,
+ bool authorized, HRESULT* result);
+
+ private:
+ INetFwMgr* mgr_;
+ INetFwPolicy* policy_;
+ INetFwProfile* profile_;
+};
+
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WINFIREWALL_H_
diff --git a/chromium/third_party/webrtc/base/winfirewall_unittest.cc b/chromium/third_party/webrtc/base/winfirewall_unittest.cc
new file mode 100644
index 00000000000..e5c3d6ac189
--- /dev/null
+++ b/chromium/third_party/webrtc/base/winfirewall_unittest.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/winfirewall.h"
+
+#include <objbase.h>
+
+namespace rtc {
+
+TEST(WinFirewallTest, ReadStatus) {
+ ::CoInitialize(NULL);
+ WinFirewall fw;
+ HRESULT hr;
+ bool authorized;
+
+ EXPECT_FALSE(fw.QueryAuthorized("bogus.exe", &authorized));
+ EXPECT_TRUE(fw.Initialize(&hr));
+ EXPECT_EQ(S_OK, hr);
+
+ EXPECT_TRUE(fw.QueryAuthorized("bogus.exe", &authorized));
+
+ // Unless we mock out INetFwMgr we can't really have an expectation either way
+ // about whether we're authorized. It will depend on the settings of the
+ // machine running the test. Same goes for AddApplication.
+
+ fw.Shutdown();
+ EXPECT_FALSE(fw.QueryAuthorized("bogus.exe", &authorized));
+
+ ::CoUninitialize();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/winping.cc b/chromium/third_party/webrtc/base/winping.cc
new file mode 100644
index 00000000000..cbb0847bb23
--- /dev/null
+++ b/chromium/third_party/webrtc/base/winping.cc
@@ -0,0 +1,359 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/winping.h"
+
+#include <assert.h>
+#include <Iphlpapi.h>
+
+#include "webrtc/base/byteorder.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/ipaddress.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/nethelpers.h"
+#include "webrtc/base/socketaddress.h"
+
+namespace rtc {
+
+//////////////////////////////////////////////////////////////////////
+// Found in IPExport.h
+//////////////////////////////////////////////////////////////////////
+
+typedef struct icmp_echo_reply {
+ ULONG Address; // Replying address
+ ULONG Status; // Reply IP_STATUS
+ ULONG RoundTripTime; // RTT in milliseconds
+ USHORT DataSize; // Reply data size in bytes
+ USHORT Reserved; // Reserved for system use
+ PVOID Data; // Pointer to the reply data
+ struct ip_option_information Options; // Reply options
+} ICMP_ECHO_REPLY, * PICMP_ECHO_REPLY;
+
+typedef struct icmpv6_echo_reply_lh {
+ sockaddr_in6 Address;
+ ULONG Status;
+ unsigned int RoundTripTime;
+} ICMPV6_ECHO_REPLY, *PICMPV6_ECHO_REPLY;
+
+//
+// IP_STATUS codes returned from IP APIs
+//
+
+#define IP_STATUS_BASE 11000
+
+#define IP_SUCCESS 0
+#define IP_BUF_TOO_SMALL (IP_STATUS_BASE + 1)
+#define IP_DEST_NET_UNREACHABLE (IP_STATUS_BASE + 2)
+#define IP_DEST_HOST_UNREACHABLE (IP_STATUS_BASE + 3)
+#define IP_DEST_PROT_UNREACHABLE (IP_STATUS_BASE + 4)
+#define IP_DEST_PORT_UNREACHABLE (IP_STATUS_BASE + 5)
+#define IP_NO_RESOURCES (IP_STATUS_BASE + 6)
+#define IP_BAD_OPTION (IP_STATUS_BASE + 7)
+#define IP_HW_ERROR (IP_STATUS_BASE + 8)
+#define IP_PACKET_TOO_BIG (IP_STATUS_BASE + 9)
+#define IP_REQ_TIMED_OUT (IP_STATUS_BASE + 10)
+#define IP_BAD_REQ (IP_STATUS_BASE + 11)
+#define IP_BAD_ROUTE (IP_STATUS_BASE + 12)
+#define IP_TTL_EXPIRED_TRANSIT (IP_STATUS_BASE + 13)
+#define IP_TTL_EXPIRED_REASSEM (IP_STATUS_BASE + 14)
+#define IP_PARAM_PROBLEM (IP_STATUS_BASE + 15)
+#define IP_SOURCE_QUENCH (IP_STATUS_BASE + 16)
+#define IP_OPTION_TOO_BIG (IP_STATUS_BASE + 17)
+#define IP_BAD_DESTINATION (IP_STATUS_BASE + 18)
+
+#define IP_ADDR_DELETED (IP_STATUS_BASE + 19)
+#define IP_SPEC_MTU_CHANGE (IP_STATUS_BASE + 20)
+#define IP_MTU_CHANGE (IP_STATUS_BASE + 21)
+#define IP_UNLOAD (IP_STATUS_BASE + 22)
+#define IP_ADDR_ADDED (IP_STATUS_BASE + 23)
+#define IP_MEDIA_CONNECT (IP_STATUS_BASE + 24)
+#define IP_MEDIA_DISCONNECT (IP_STATUS_BASE + 25)
+#define IP_BIND_ADAPTER (IP_STATUS_BASE + 26)
+#define IP_UNBIND_ADAPTER (IP_STATUS_BASE + 27)
+#define IP_DEVICE_DOES_NOT_EXIST (IP_STATUS_BASE + 28)
+#define IP_DUPLICATE_ADDRESS (IP_STATUS_BASE + 29)
+#define IP_INTERFACE_METRIC_CHANGE (IP_STATUS_BASE + 30)
+#define IP_RECONFIG_SECFLTR (IP_STATUS_BASE + 31)
+#define IP_NEGOTIATING_IPSEC (IP_STATUS_BASE + 32)
+#define IP_INTERFACE_WOL_CAPABILITY_CHANGE (IP_STATUS_BASE + 33)
+#define IP_DUPLICATE_IPADD (IP_STATUS_BASE + 34)
+
+#define IP_GENERAL_FAILURE (IP_STATUS_BASE + 50)
+#define MAX_IP_STATUS IP_GENERAL_FAILURE
+#define IP_PENDING (IP_STATUS_BASE + 255)
+
+//
+// Values used in the IP header Flags field.
+//
+#define IP_FLAG_DF 0x2 // Don't fragment this packet.
+
+//
+// Supported IP Option Types.
+//
+// These types define the options which may be used in the OptionsData field
+// of the ip_option_information structure. See RFC 791 for a complete
+// description of each.
+//
+#define IP_OPT_EOL 0 // End of list option
+#define IP_OPT_NOP 1 // No operation
+#define IP_OPT_SECURITY 0x82 // Security option
+#define IP_OPT_LSRR 0x83 // Loose source route
+#define IP_OPT_SSRR 0x89 // Strict source route
+#define IP_OPT_RR 0x7 // Record route
+#define IP_OPT_TS 0x44 // Timestamp
+#define IP_OPT_SID 0x88 // Stream ID (obsolete)
+#define IP_OPT_ROUTER_ALERT 0x94 // Router Alert Option
+
+#define MAX_OPT_SIZE 40 // Maximum length of IP options in bytes
+
+//////////////////////////////////////////////////////////////////////
+// Global Constants and Types
+//////////////////////////////////////////////////////////////////////
+
+const char * const ICMP_DLL_NAME = "Iphlpapi.dll";
+const char * const ICMP_CREATE_FUNC = "IcmpCreateFile";
+const char * const ICMP_CLOSE_FUNC = "IcmpCloseHandle";
+const char * const ICMP_SEND_FUNC = "IcmpSendEcho";
+const char * const ICMP6_CREATE_FUNC = "Icmp6CreateFile";
+const char * const ICMP6_CLOSE_FUNC = "Icmp6CloseHandle";
+const char * const ICMP6_SEND_FUNC = "Icmp6SendEcho2";
+
+inline uint32 ReplySize(uint32 data_size, int family) {
+ if (family == AF_INET) {
+ // A ping error message is 8 bytes long, so make sure we allow for at least
+ // 8 bytes of reply data.
+ return sizeof(ICMP_ECHO_REPLY) + rtc::_max<uint32>(8, data_size);
+ } else if (family == AF_INET6) {
+ // Per MSDN, Send6IcmpEcho2 needs at least one ICMPV6_ECHO_REPLY,
+ // 8 bytes for ICMP header, _and_ an IO_BLOCK_STATUS (2 pointers),
+ // in addition to the data size.
+ return sizeof(ICMPV6_ECHO_REPLY) + data_size + 8 + (2 * sizeof(DWORD*));
+ } else {
+ return 0;
+ }
+}
+
+//////////////////////////////////////////////////////////////////////
+// WinPing
+//////////////////////////////////////////////////////////////////////
+
+WinPing::WinPing()
+ : dll_(0), hping_(INVALID_HANDLE_VALUE), create_(0), close_(0), send_(0),
+ create6_(0), send6_(0), data_(0), dlen_(0), reply_(0),
+ rlen_(0), valid_(false) {
+
+ dll_ = LoadLibraryA(ICMP_DLL_NAME);
+ if (!dll_) {
+ LOG(LERROR) << "LoadLibrary: " << GetLastError();
+ return;
+ }
+
+ create_ = (PIcmpCreateFile) GetProcAddress(dll_, ICMP_CREATE_FUNC);
+ close_ = (PIcmpCloseHandle) GetProcAddress(dll_, ICMP_CLOSE_FUNC);
+ send_ = (PIcmpSendEcho) GetProcAddress(dll_, ICMP_SEND_FUNC);
+ if (!create_ || !close_ || !send_) {
+ LOG(LERROR) << "GetProcAddress(ICMP_*): " << GetLastError();
+ return;
+ }
+ hping_ = create_();
+ if (hping_ == INVALID_HANDLE_VALUE) {
+ LOG(LERROR) << "IcmpCreateFile: " << GetLastError();
+ return;
+ }
+
+ if (HasIPv6Enabled()) {
+ create6_ = (PIcmp6CreateFile) GetProcAddress(dll_, ICMP6_CREATE_FUNC);
+ send6_ = (PIcmp6SendEcho2) GetProcAddress(dll_, ICMP6_SEND_FUNC);
+ if (!create6_ || !send6_) {
+ LOG(LERROR) << "GetProcAddress(ICMP6_*): " << GetLastError();
+ return;
+ }
+ hping6_ = create6_();
+ if (hping6_ == INVALID_HANDLE_VALUE) {
+ LOG(LERROR) << "Icmp6CreateFile: " << GetLastError();
+ }
+ }
+
+ dlen_ = 0;
+ rlen_ = ReplySize(dlen_, AF_INET);
+ data_ = new char[dlen_];
+ reply_ = new char[rlen_];
+
+ valid_ = true;
+}
+
+WinPing::~WinPing() {
+ if ((hping_ != INVALID_HANDLE_VALUE) && close_) {
+ if (!close_(hping_))
+ LOG(WARNING) << "IcmpCloseHandle: " << GetLastError();
+ }
+ if ((hping6_ != INVALID_HANDLE_VALUE) && close_) {
+ if (!close_(hping6_)) {
+ LOG(WARNING) << "Icmp6CloseHandle: " << GetLastError();
+ }
+ }
+
+ if (dll_)
+ FreeLibrary(dll_);
+
+ delete[] data_;
+ delete[] reply_;
+}
+
+WinPing::PingResult WinPing::Ping(
+ IPAddress ip, uint32 data_size, uint32 timeout, uint8 ttl,
+ bool allow_fragments) {
+
+ if (data_size == 0 || timeout == 0 || ttl == 0) {
+ LOG(LERROR) << "IcmpSendEcho: data_size/timeout/ttl is 0.";
+ return PING_INVALID_PARAMS;
+ }
+
+ assert(IsValid());
+
+ IP_OPTION_INFORMATION ipopt;
+ memset(&ipopt, 0, sizeof(ipopt));
+ if (!allow_fragments)
+ ipopt.Flags |= IP_FLAG_DF;
+ ipopt.Ttl = ttl;
+
+ uint32 reply_size = ReplySize(data_size, ip.family());
+
+ if (data_size > dlen_) {
+ delete [] data_;
+ dlen_ = data_size;
+ data_ = new char[dlen_];
+ memset(data_, 'z', dlen_);
+ }
+
+ if (reply_size > rlen_) {
+ delete [] reply_;
+ rlen_ = reply_size;
+ reply_ = new char[rlen_];
+ }
+ DWORD result = 0;
+ if (ip.family() == AF_INET) {
+ result = send_(hping_, ip.ipv4_address().S_un.S_addr,
+ data_, uint16(data_size), &ipopt,
+ reply_, reply_size, timeout);
+ } else if (ip.family() == AF_INET6) {
+ sockaddr_in6 src = {0};
+ sockaddr_in6 dst = {0};
+ src.sin6_family = AF_INET6;
+ dst.sin6_family = AF_INET6;
+ dst.sin6_addr = ip.ipv6_address();
+ result = send6_(hping6_, NULL, NULL, NULL,
+ &src, &dst,
+ data_, int16(data_size), &ipopt,
+ reply_, reply_size, timeout);
+ }
+ if (result == 0) {
+ DWORD error = GetLastError();
+ if (error == IP_PACKET_TOO_BIG)
+ return PING_TOO_LARGE;
+ if (error == IP_REQ_TIMED_OUT)
+ return PING_TIMEOUT;
+ LOG(LERROR) << "IcmpSendEcho(" << ip.ToSensitiveString()
+ << ", " << data_size << "): " << error;
+ return PING_FAIL;
+ }
+
+ return PING_SUCCESS;
+}
+
+//////////////////////////////////////////////////////////////////////
+// Microsoft Documenation
+//////////////////////////////////////////////////////////////////////
+//
+// Routine Name:
+//
+// IcmpCreateFile
+//
+// Routine Description:
+//
+// Opens a handle on which ICMP Echo Requests can be issued.
+//
+// Arguments:
+//
+// None.
+//
+// Return Value:
+//
+// An open file handle or INVALID_HANDLE_VALUE. Extended error information
+// is available by calling GetLastError().
+//
+//////////////////////////////////////////////////////////////////////
+//
+// Routine Name:
+//
+// IcmpCloseHandle
+//
+// Routine Description:
+//
+// Closes a handle opened by ICMPOpenFile.
+//
+// Arguments:
+//
+// IcmpHandle - The handle to close.
+//
+// Return Value:
+//
+// TRUE if the handle was closed successfully, otherwise FALSE. Extended
+// error information is available by calling GetLastError().
+//
+//////////////////////////////////////////////////////////////////////
+//
+// Routine Name:
+//
+// IcmpSendEcho
+//
+// Routine Description:
+//
+// Sends an ICMP Echo request and returns any replies. The
+// call returns when the timeout has expired or the reply buffer
+// is filled.
+//
+// Arguments:
+//
+// IcmpHandle - An open handle returned by ICMPCreateFile.
+//
+// DestinationAddress - The destination of the echo request.
+//
+// RequestData - A buffer containing the data to send in the
+// request.
+//
+// RequestSize - The number of bytes in the request data buffer.
+//
+// RequestOptions - Pointer to the IP header options for the request.
+// May be NULL.
+//
+// ReplyBuffer - A buffer to hold any replies to the request.
+// On return, the buffer will contain an array of
+// ICMP_ECHO_REPLY structures followed by the
+// options and data for the replies. The buffer
+// should be large enough to hold at least one
+// ICMP_ECHO_REPLY structure plus
+// MAX(RequestSize, 8) bytes of data since an ICMP
+// error message contains 8 bytes of data.
+//
+// ReplySize - The size in bytes of the reply buffer.
+//
+// Timeout - The time in milliseconds to wait for replies.
+//
+// Return Value:
+//
+// Returns the number of ICMP_ECHO_REPLY structures stored in ReplyBuffer.
+// The status of each reply is contained in the structure. If the return
+// value is zero, extended error information is available via
+// GetLastError().
+//
+//////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/winping.h b/chromium/third_party/webrtc/base/winping.h
new file mode 100644
index 00000000000..75f82b7b4ad
--- /dev/null
+++ b/chromium/third_party/webrtc/base/winping.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WINPING_H__
+#define WEBRTC_BASE_WINPING_H__
+
+#if defined(WEBRTC_WIN)
+
+#include "webrtc/base/win32.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/IPAddress.h"
+
+namespace rtc {
+
+// This class wraps a Win32 API for doing ICMP pinging. This API, unlike the
+// the normal socket APIs (as implemented on Win9x), will return an error if
+// an ICMP packet with the dont-fragment bit set is too large. This means this
+// class can be used to detect the MTU to a given address.
+
+typedef struct ip_option_information {
+ UCHAR Ttl; // Time To Live
+ UCHAR Tos; // Type Of Service
+ UCHAR Flags; // IP header flags
+ UCHAR OptionsSize; // Size in bytes of options data
+ PUCHAR OptionsData; // Pointer to options data
+} IP_OPTION_INFORMATION, * PIP_OPTION_INFORMATION;
+
+typedef HANDLE (WINAPI *PIcmpCreateFile)();
+
+typedef BOOL (WINAPI *PIcmpCloseHandle)(HANDLE icmp_handle);
+
+typedef HANDLE (WINAPI *PIcmp6CreateFile)();
+
+typedef BOOL (WINAPI *PIcmp6CloseHandle)(HANDLE icmp_handle);
+
+typedef DWORD (WINAPI *PIcmpSendEcho)(
+ HANDLE IcmpHandle,
+ ULONG DestinationAddress,
+ LPVOID RequestData,
+ WORD RequestSize,
+ PIP_OPTION_INFORMATION RequestOptions,
+ LPVOID ReplyBuffer,
+ DWORD ReplySize,
+ DWORD Timeout);
+
+typedef DWORD (WINAPI *PIcmp6SendEcho2)(
+ HANDLE IcmpHandle,
+ HANDLE Event,
+ FARPROC ApcRoutine,
+ PVOID ApcContext,
+ struct sockaddr_in6 *SourceAddress,
+ struct sockaddr_in6 *DestinationAddress,
+ LPVOID RequestData,
+ WORD RequestSize,
+ PIP_OPTION_INFORMATION RequestOptions,
+ LPVOID ReplyBuffer,
+ DWORD ReplySize,
+ DWORD Timeout
+);
+
+class WinPing {
+public:
+ WinPing();
+ ~WinPing();
+
+ // Determines whether the class was initialized correctly.
+ bool IsValid() { return valid_; }
+
+ // Attempts to send a ping with the given parameters.
+ enum PingResult { PING_FAIL, PING_INVALID_PARAMS,
+ PING_TOO_LARGE, PING_TIMEOUT, PING_SUCCESS };
+ PingResult Ping(
+ IPAddress ip, uint32 data_size, uint32 timeout_millis, uint8 ttl,
+ bool allow_fragments);
+
+private:
+ HMODULE dll_;
+ HANDLE hping_;
+ HANDLE hping6_;
+ PIcmpCreateFile create_;
+ PIcmpCloseHandle close_;
+ PIcmpSendEcho send_;
+ PIcmp6CreateFile create6_;
+ PIcmp6SendEcho2 send6_;
+ char* data_;
+ uint32 dlen_;
+ char* reply_;
+ uint32 rlen_;
+ bool valid_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_WIN
+
+#endif // WEBRTC_BASE_WINPING_H__
diff --git a/chromium/third_party/webrtc/base/worker.cc b/chromium/third_party/webrtc/base/worker.cc
new file mode 100644
index 00000000000..1b48b9b1d8b
--- /dev/null
+++ b/chromium/third_party/webrtc/base/worker.cc
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/worker.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+enum {
+ MSG_HAVEWORK = 0,
+};
+
+Worker::Worker() : worker_thread_(NULL) {}
+
+Worker::~Worker() {
+ // We need to already be stopped before being destroyed. We cannot call
+ // StopWork() from here because the subclass's data has already been
+ // destructed, so OnStop() cannot be called.
+ ASSERT(!worker_thread_);
+}
+
+bool Worker::StartWork() {
+ rtc::Thread *me = rtc::Thread::Current();
+ if (worker_thread_) {
+ if (worker_thread_ == me) {
+ // Already working on this thread, so nothing to do.
+ return true;
+ } else {
+ LOG(LS_ERROR) << "Automatically switching threads is not supported";
+ ASSERT(false);
+ return false;
+ }
+ }
+ worker_thread_ = me;
+ OnStart();
+ return true;
+}
+
+bool Worker::StopWork() {
+ if (!worker_thread_) {
+ // Already not working, so nothing to do.
+ return true;
+ } else if (worker_thread_ != rtc::Thread::Current()) {
+ LOG(LS_ERROR) << "Stopping from a different thread is not supported";
+ ASSERT(false);
+ return false;
+ }
+ OnStop();
+ worker_thread_->Clear(this, MSG_HAVEWORK);
+ worker_thread_ = NULL;
+ return true;
+}
+
+void Worker::HaveWork() {
+ ASSERT(worker_thread_ != NULL);
+ worker_thread_->Post(this, MSG_HAVEWORK);
+}
+
+void Worker::OnMessage(rtc::Message *msg) {
+ ASSERT(msg->message_id == MSG_HAVEWORK);
+ ASSERT(worker_thread_ == rtc::Thread::Current());
+ OnHaveWork();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/worker.h b/chromium/third_party/webrtc/base/worker.h
new file mode 100644
index 00000000000..694a78057b9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/worker.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_WORKER_H_
+#define WEBRTC_BASE_WORKER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/messagehandler.h"
+
+namespace rtc {
+
+class Thread;
+
+// A worker is an object that performs some specific long-lived task in an
+// event-driven manner.
+// The only method that should be considered thread-safe is HaveWork(), which
+// allows you to signal the availability of work from any thread. All other
+// methods are thread-hostile. Specifically:
+// StartWork()/StopWork() should not be called concurrently with themselves or
+// each other, and it is an error to call them while the worker is running on
+// a different thread.
+// The destructor may not be called if the worker is currently running
+// (regardless of the thread), but you can call StopWork() in a subclass's
+// destructor.
+class Worker : private MessageHandler {
+ public:
+ Worker();
+
+ // Destroys this Worker, but it must have already been stopped via StopWork().
+ virtual ~Worker();
+
+ // Attaches the worker to the current thread and begins processing work if not
+ // already doing so.
+ bool StartWork();
+ // Stops processing work if currently doing so and detaches from the current
+ // thread.
+ bool StopWork();
+
+ protected:
+ // Signal that work is available to be done. May only be called within the
+ // lifetime of a OnStart()/OnStop() pair.
+ void HaveWork();
+
+ // These must be implemented by a subclass.
+ // Called on the worker thread to start working.
+ virtual void OnStart() = 0;
+ // Called on the worker thread when work has been signalled via HaveWork().
+ virtual void OnHaveWork() = 0;
+ // Called on the worker thread to stop working. Upon return, any pending
+ // OnHaveWork() calls are cancelled.
+ virtual void OnStop() = 0;
+
+ private:
+ // Inherited from MessageHandler.
+ virtual void OnMessage(Message *msg);
+
+ // The thread that is currently doing the work.
+ Thread *worker_thread_;
+
+ DISALLOW_COPY_AND_ASSIGN(Worker);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_WORKER_H_
diff --git a/chromium/third_party/webrtc/build/OWNERS b/chromium/third_party/webrtc/build/OWNERS
index ec3a85255a4..2978427ca2a 100644
--- a/chromium/third_party/webrtc/build/OWNERS
+++ b/chromium/third_party/webrtc/build/OWNERS
@@ -1,4 +1,9 @@
-fischman@webrtc.org
-kjellander@webrtc.org
-wu@webrtc.org
+fischman@webrtc.org
+kjellander@webrtc.org
+wu@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/build/apk_tests.gyp b/chromium/third_party/webrtc/build/apk_tests.gyp
index 4a1f6e0e273..01859753e20 100644
--- a/chromium/third_party/webrtc/build/apk_tests.gyp
+++ b/chromium/third_party/webrtc/build/apk_tests.gyp
@@ -61,20 +61,6 @@
],
},
{
- 'target_name': 'metrics_unittests_apk',
- 'type': 'none',
- 'variables': {
- 'test_suite_name': 'metrics_unittests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)metrics_unittests<(SHARED_LIB_SUFFIX)',
- },
- 'dependencies': [
- '<(webrtc_root)/test/metrics.gyp:metrics_unittests',
- ],
- 'includes': [
- '../../../build/apk_test.gypi',
- ],
- },
- {
'target_name': 'modules_tests_apk',
'type': 'none',
'variables': {
@@ -103,20 +89,6 @@
],
},
{
- 'target_name': 'neteq_unittests_apk',
- 'type': 'none',
- 'variables': {
- 'test_suite_name': 'neteq_unittests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)neteq_unittests<(SHARED_LIB_SUFFIX)',
- },
- 'dependencies': [
- '<(webrtc_root)/modules/modules.gyp:neteq_unittests',
- ],
- 'includes': [
- '../../../build/apk_test.gypi',
- ],
- },
- {
'target_name': 'system_wrappers_unittests_apk',
'type': 'none',
'variables': {
@@ -213,7 +185,50 @@
'includes': [
'../../../build/apk_test.gypi',
],
- },
+ },
+ {
+ 'target_name': 'audio_codec_speed_tests_apk',
+ 'type': 'none',
+ 'variables': {
+ 'test_suite_name': 'audio_codec_speed_tests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)audio_codec_speed_tests<(SHARED_LIB_SUFFIX)',
+ },
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:audio_codec_speed_tests',
+ ],
+ 'includes': [
+ '../../../build/apk_test.gypi',
+ ],
+ },
+ {
+ 'target_name': 'video_capture_tests_apk',
+ 'type': 'none',
+ 'variables': {
+ 'test_suite_name': 'video_capture_tests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_capture_tests<(SHARED_LIB_SUFFIX)',
+ },
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:video_capture_tests',
+ 'video_capture_java',
+ ],
+ 'includes': [
+ '../../../build/apk_test.gypi',
+ ],
+ },
+ {
+ # Used only by video_capture_tests_apk above, and impossible to use in the
+ # standalone build, which is why it's declared here instead of under
+ # modules/video_capture/ (to avoid the need for a forked _noop.gyp file
+ # like this file has; see comment at the top of this file).
+ 'target_name': 'video_capture_java',
+ 'type': 'none',
+ 'variables': {
+ 'java_in_dir': '<(webrtc_root)/modules/video_capture/android/java',
+ },
+ 'includes': [
+ '../../../build/java.gypi',
+ ],
+ },
],
}
diff --git a/chromium/third_party/webrtc/build/apk_tests_noop.gyp b/chromium/third_party/webrtc/build/apk_tests_noop.gyp
index 98c4af9aa86..3523e79bb13 100644
--- a/chromium/third_party/webrtc/build/apk_tests_noop.gyp
+++ b/chromium/third_party/webrtc/build/apk_tests_noop.gyp
@@ -22,10 +22,6 @@
'type': 'none',
},
{
- 'target_name': 'metrics_unittests',
- 'type': 'none',
- },
- {
'target_name': 'modules_tests_apk',
'type': 'none',
},
@@ -34,10 +30,6 @@
'type': 'none',
},
{
- 'target_name': 'neteq_unittests_apk',
- 'type': 'none',
- },
- {
'target_name': 'system_wrappers_unittests_apk',
'type': 'none',
},
@@ -65,5 +57,13 @@
'target_name': 'webrtc_perf_tests_apk',
'type': 'none',
},
+ {
+ 'target_name': 'audio_codec_speed_tests_apk',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'video_capture_tests_apk',
+ 'type': 'none',
+ },
],
}
diff --git a/chromium/third_party/webrtc/build/common.gypi b/chromium/third_party/webrtc/build/common.gypi
index aab65ea7a59..cbc398e4004 100644
--- a/chromium/third_party/webrtc/build/common.gypi
+++ b/chromium/third_party/webrtc/build/common.gypi
@@ -14,8 +14,7 @@
'variables': {
'variables': {
'variables': {
- # This will be set to zero in the supplement.gypi triggered by a
- # gclient hook in the standalone build.
+ # This will already be set to zero by supplement.gypi
'build_with_chromium%': 1,
},
'build_with_chromium%': '<(build_with_chromium)',
@@ -26,11 +25,13 @@
'webrtc_root%': '<(DEPTH)/third_party/webrtc',
'apk_tests_path%': '<(DEPTH)/third_party/webrtc/build/apk_tests.gyp',
'modules_java_gyp_path%': '<(DEPTH)/third_party/webrtc/modules/modules_java_chromium.gyp',
+ 'gen_core_neon_offsets_gyp%': '<(DEPTH)/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp',
}, {
'build_with_libjingle%': 0,
'webrtc_root%': '<(DEPTH)/webrtc',
'apk_tests_path%': '<(DEPTH)/webrtc/build/apk_test_noop.gyp',
'modules_java_gyp_path%': '<(DEPTH)/webrtc/modules/modules_java.gyp',
+ 'gen_core_neon_offsets_gyp%':'<(DEPTH)/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp',
}],
],
},
@@ -39,7 +40,7 @@
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
-
+ 'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)',
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator',
'include_opus%': 1,
@@ -49,9 +50,16 @@
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
+ 'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)',
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
'include_opus%': '<(include_opus)',
'rbe_components_path%': '<(rbe_components_path)',
+ 'external_libraries%': '0',
+ 'json_root%': '<(DEPTH)/third_party/jsoncpp/source/include/',
+ # openssl needs to be defined or gyp will complain. Is is only used when
+ # when providing external libraries so just use current directory as a
+ # placeholder.
+ 'ssl_root%': '.',
# The Chromium common.gypi we use treats all gyp files without
# chromium_code==1 as third party code. This disables many of the
@@ -61,6 +69,9 @@
# third party code will still have the reduced warning settings.
'chromium_code': 1,
+ # Set to 1 to enable code coverage on Linux using the gcov library.
+ 'coverage%': 0,
+
# Remote bitrate estimator logging/plotting.
'enable_bwe_test_logging%': 0,
@@ -80,9 +91,14 @@
'enable_protobuf%': 1,
# Disable these to not build components which can be externally provided.
+ 'build_json%': 1,
'build_libjpeg%': 1,
'build_libyuv%': 1,
'build_libvpx%': 1,
+ 'build_ssl%': 1,
+
+ # Disable by default
+ 'have_dbus_glib%': 0,
# Enable to use the Mozilla internal settings.
'build_with_mozilla%': 0,
@@ -110,9 +126,6 @@
# Exclude internal video render module in Chromium build.
'include_internal_video_render%': 0,
-
- # Include ndk cpu features in Chromium build.
- 'include_ndk_cpu_features%': 1,
}, { # Settings for the standalone (not-in-Chromium) build.
# TODO(andrew): For now, disable the Chrome plugins, which causes a
# flood of chromium-style warnings. Investigate enabling them:
@@ -123,7 +136,6 @@
'include_internal_audio_device%': 1,
'include_internal_video_capture%': 1,
'include_internal_video_render%': 1,
- 'include_ndk_cpu_features%': 0,
}],
['build_with_libjingle==1', {
'include_tests%': 0,
@@ -135,7 +147,6 @@
['OS=="ios"', {
'build_libjpeg%': 0,
'enable_protobuf%': 0,
- 'include_tests%': 0,
}],
['target_arch=="arm" or target_arch=="armv7"', {
'prefer_fixed_point%': 1,
@@ -144,9 +155,6 @@
},
'target_defaults': {
'include_dirs': [
- # Allow includes to be prefixed with webrtc/ in case it is not an
- # immediate subdirectory of <(DEPTH).
- '../..',
# To include the top-level directory when building in Chrome, so we can
# use full paths (e.g. headers inside testing/ or third_party/).
'<(DEPTH)',
@@ -161,6 +169,14 @@
'WEBRTC_MOZILLA_BUILD',
],
}],
+ ['have_dbus_glib==1', {
+ 'defines': [
+ 'HAVE_DBUS_GLIB',
+ ],
+ 'cflags': [
+ '<!@(pkg-config --cflags dbus-glib-1)',
+ ],
+ }],
['enable_video==1', {
'defines': ['WEBRTC_MODULE_UTILITY_VIDEO',],
}],
@@ -168,17 +184,36 @@
'defines': [
# Changes settings for Chromium build.
'WEBRTC_CHROMIUM_BUILD',
+ 'LOGGING_INSIDE_WEBRTC',
+ ],
+ 'include_dirs': [
+ # overrides must be included first as that is the mechanism for
+ # selecting the override headers in Chromium.
+ '../overrides',
+ # Allow includes to be prefixed with webrtc/ in case it is not an
+ # immediate subdirectory of <(DEPTH).
+ '../..',
],
}, {
'conditions': [
['os_posix==1', {
- 'cflags': [
- '-Wextra',
- # We need to repeat some flags from Chromium's common.gypi here
- # that get overridden by -Wextra.
- '-Wno-unused-parameter',
- '-Wno-missing-field-initializers',
- '-Wno-strict-overflow',
+ 'conditions': [
+ # -Wextra is currently disabled in Chromium's common.gypi. Enable
+ # for targets that can handle it. For Android/arm64 right now
+ # there will be an 'enumeral and non-enumeral type in conditional
+ # expression' warning in android_tools/ndk_experimental's version
+ # of stlport.
+ # See: https://code.google.com/p/chromium/issues/detail?id=379699
+ ['target_arch!="arm64" or OS!="android"', {
+ 'cflags': [
+ '-Wextra',
+ # We need to repeat some flags from Chromium's common.gypi
+ # here that get overridden by -Wextra.
+ '-Wno-unused-parameter',
+ '-Wno-missing-field-initializers',
+ '-Wno-strict-overflow',
+ ],
+ }],
],
'cflags_cc': [
'-Wnon-virtual-dtor',
@@ -186,6 +221,11 @@
'-Woverloaded-virtual',
],
}],
+ ['clang==1', {
+ 'cflags': [
+ '-Wthread-safety',
+ ],
+ }],
],
}],
['target_arch=="arm" or target_arch=="armv7"', {
@@ -258,6 +298,18 @@
}],
],
}],
+ ['coverage==1 and OS=="linux"', {
+ 'cflags': [ '-ftest-coverage',
+ '-fprofile-arcs' ],
+ 'link_settings': { 'libraries': [ '-lgcov' ] },
+ }],
+ ['os_posix==1', {
+ # For access to standard POSIXish features, use WEBRTC_POSIX instead of
+ # a more specific macro.
+ 'defines': [
+ 'WEBRTC_POSIX',
+ ],
+ }],
['OS=="ios"', {
'defines': [
'WEBRTC_MAC',
@@ -313,9 +365,6 @@
}],
], # conditions
'direct_dependent_settings': {
- 'include_dirs': [
- '../..',
- ],
'conditions': [
['build_with_mozilla==1', {
'defines': [
@@ -328,6 +377,16 @@
# Changes settings for Chromium build.
'WEBRTC_CHROMIUM_BUILD',
],
+ 'include_dirs': [
+ # overrides must be included first as that is the mechanism for
+ # selecting the override headers in Chromium.
+ '../overrides',
+ '../..',
+ ],
+ }, {
+ 'include_dirs': [
+ '../..',
+ ],
}],
['OS=="mac"', {
'defines': [
@@ -363,6 +422,13 @@
}]
],
}],
+ ['os_posix==1', {
+ # For access to standard POSIXish features, use WEBRTC_POSIX instead
+ # of a more specific macro.
+ 'defines': [
+ 'WEBRTC_POSIX',
+ ],
+ }],
],
},
}, # target_defaults
diff --git a/chromium/third_party/webrtc/build/download_vs_toolchain.py b/chromium/third_party/webrtc/build/download_vs_toolchain.py
new file mode 100644
index 00000000000..2462bdceda6
--- /dev/null
+++ b/chromium/third_party/webrtc/build/download_vs_toolchain.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This script is used to run the vs_toolchain.py script to download the
+# Visual Studio toolchain. It's just a temporary measure while waiting for the
+# Chrome team to move find_depot_tools into src/build to get rid of these
+# workarounds (similar one in gyp_webrtc).
+
+import os
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir, os.pardir))
+sys.path.insert(0, os.path.join(checkout_root, 'build'))
+sys.path.insert(0, os.path.join(checkout_root, 'tools', 'find_depot_tools'))
+
+
+import vs_toolchain
+
+
+if __name__ == '__main__':
+ sys.exit(vs_toolchain.main())
diff --git a/chromium/third_party/webrtc/build/generate_asm_header.gypi b/chromium/third_party/webrtc/build/generate_asm_header.gypi
deleted file mode 100644
index 4600bb19a9f..00000000000
--- a/chromium/third_party/webrtc/build/generate_asm_header.gypi
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-# This file is meant to be included into a target to provide an action
-# to generate C header files. These headers include definitions
-# that can be used in ARM assembly files.
-#
-# To use this, create a gyp target with the following form:
-# {
-# 'target_name': 'my_asm_headers_lib',
-# 'type': 'static_library',
-# 'sources': [
-# 'foo.c',
-# 'bar.c',
-# ],
-# 'includes': ['path/to/this/gypi/file'],
-# }
-#
-# The headers are guaranteed to be generated before any
-# source files, even within this target, are compiled.
-#
-# The 'asm_header_dir' variable specifies the path suffix that output
-# files are generated under.
-
-# TODO(kma): port this block from Android into other build systems.
-{
- 'variables': {
- 'out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(asm_header_dir)',
- 'process_outputs_as_sources': 1,
- 'conditions': [
- # We only support Android and iOS.
- ['OS=="android"', {
- 'compiler_to_use':
- '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-gcc)',
- 'compiler_options': '-I<(webrtc_root)/.. -I<@(android_ndk_include) -S',
- 'pattern_to_detect': 'offset_',
- }],
- ['OS=="ios"', {
- 'compiler_to_use': 'clang',
- 'compiler_options':
- '-arch armv7 -I<(webrtc_root)/.. -isysroot $(SDKROOT) -S',
- 'pattern_to_detect': '_offset_',
- }],
- ]
- },
- 'rules': [
- {
- 'rule_name': 'generate_asm_header',
- 'extension': 'c',
- 'inputs': [
- 'generate_asm_header.py',
- ],
- 'outputs': [
- '<(out_dir)/<(RULE_INPUT_ROOT).h',
- ],
- 'action': [
- 'python',
- '<(webrtc_root)/build/generate_asm_header.py',
- '--compiler=<(compiler_to_use)',
- '--options=<(compiler_options)',
- '--pattern=<(pattern_to_detect)',
- '--dir=<(out_dir)',
- '<(RULE_INPUT_PATH)',
- ],
- 'message': 'Generating assembly header files',
- 'process_outputs_as_sources': 1,
- },
- ],
- 'direct_dependent_settings': {
- 'include_dirs': ['<(out_dir)',],
- },
- # This target exports a hard dependency because it generates header files.
- 'hard_dependency': 1,
-}
diff --git a/chromium/third_party/webrtc/build/generate_asm_header.py b/chromium/third_party/webrtc/build/generate_asm_header.py
deleted file mode 100644
index c159d507419..00000000000
--- a/chromium/third_party/webrtc/build/generate_asm_header.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-"""This script is a tool to generate special header files from input
-C source files.
-
-It first assembles the input source files to generate intermediate assembly
-files (*.s). Then it parses the .s files and finds declarations of variables
-whose names start with the string specified as the third argument in the
-command-line, translates the variable names and values into constant defines
-and writes them into header files.
-"""
-
-import os
-import re
-import subprocess
-import sys
-from optparse import OptionParser
-
-def main(argv):
- parser = OptionParser()
- usage = 'Usage: %prog [options] input_filename'
- parser.set_usage(usage)
- parser.add_option('--compiler', default = 'gcc', help = 'compiler name')
- parser.add_option('--options', default = '-S', help = 'compiler options')
- parser.add_option('--pattern', default = 'offset_', help = 'A match pattern'
- ' used for searching the relevant constants.')
- parser.add_option('--dir', default = '.', help = 'output directory')
- (options, args) = parser.parse_args()
-
- # Generate complete intermediate and header file names.
- input_filename = args[0]
- output_root = (options.dir + '/' +
- os.path.splitext(os.path.basename(input_filename))[0])
- interim_filename = output_root + '.s'
- out_filename = output_root + '.h'
-
- # Set the shell command with the compiler and options inputs.
- compiler_command = (options.compiler + " " + options.options + " " +
- input_filename + " -o " + interim_filename)
-
- # Run the shell command and generate the intermediate file.
- subprocess.check_call(compiler_command, shell=True)
-
- interim_file = open(interim_filename) # The intermediate file.
- out_file = open(out_filename, 'w') # The output header file.
-
- # Generate the output header file.
- while True:
- line = interim_file.readline()
- if not line: break
- if line.startswith(options.pattern):
- # Find name of the next constant and write to the output file.
- const_name = re.sub(r'^_', '', line.split(':')[0])
- out_file.write('#define %s ' % const_name)
-
- # Find value of the constant we just found and write to the output file.
- line = interim_file.readline()
- const_value = filter(str.isdigit, line.split(' ')[0])
- if const_value != '':
- out_file.write('%s\n' % const_value)
-
- interim_file.close()
- out_file.close()
-
-if __name__ == "__main__":
- main(sys.argv[1:])
diff --git a/chromium/third_party/webrtc/build/gyp_webrtc b/chromium/third_party/webrtc/build/gyp_webrtc
new file mode 100755
index 00000000000..4d5ae791caa
--- /dev/null
+++ b/chromium/third_party/webrtc/build/gyp_webrtc
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This script is used to run GYP for WebRTC. It contains selected parts of the
+# main function from the src/build/gyp_chromium file.
+
+import glob
+import os
+import shlex
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir, os.pardir))
+
+sys.path.insert(0, os.path.join(checkout_root, 'build'))
+sys.path.insert(0, os.path.join(checkout_root, 'tools', 'find_depot_tools'))
+import gyp_chromium
+import gyp_helper
+import vs_toolchain
+
+sys.path.insert(0, os.path.join(checkout_root, 'tools', 'gyp', 'pylib'))
+import gyp
+
+
+if __name__ == '__main__':
+ args = sys.argv[1:]
+
+ if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+ print 'Skipping gyp_webrtc due to GYP_CHROMIUM_NO_ACTION env var.'
+ sys.exit(0)
+
+ if 'SKIP_WEBRTC_GYP_ENV' not in os.environ:
+ # Update the environment based on webrtc.gyp_env
+ gyp_env_path = os.path.join(os.path.dirname(checkout_root),
+ 'webrtc.gyp_env')
+ gyp_helper.apply_gyp_environment_from_file(gyp_env_path)
+
+ # This could give false positives since it doesn't actually do real option
+ # parsing. Oh well.
+ gyp_file_specified = False
+ for arg in args:
+ if arg.endswith('.gyp'):
+ gyp_file_specified = True
+ break
+
+ # If we didn't get a file, assume 'all.gyp' in the root of the checkout.
+ if not gyp_file_specified:
+ args.append(os.path.join(checkout_root, 'all.gyp'))
+
+ # There shouldn't be a circular dependency relationship between .gyp files,
+ args.append('--no-circular-check')
+
+ # Default to ninja unless GYP_GENERATORS is set.
+ if not os.environ.get('GYP_GENERATORS'):
+ os.environ['GYP_GENERATORS'] = 'ninja'
+
+ vs2013_runtime_dll_dirs = None
+ if int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')):
+ vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+
+ # Enforce gyp syntax checking. This adds about 20% execution time.
+ args.append('--check')
+
+ supplemental_includes = gyp_chromium.GetSupplementalFiles()
+ gn_vars_dict = gyp_chromium.GetGypVars(supplemental_includes)
+
+ # Automatically turn on crosscompile support for platforms that need it.
+ if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+ gn_vars_dict.get('OS') in ['android', 'ios'],
+ 'GYP_CROSSCOMPILE' not in os.environ)):
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+
+ args.extend(['-I' + i for i in
+ gyp_chromium.additional_include_files(supplemental_includes,
+ args)])
+
+ # Set the gyp depth variable to the root of the checkout.
+ args.append('--depth=' + os.path.relpath(checkout_root))
+
+ print 'Updating projects from gyp files...'
+ sys.stdout.flush()
+
+ # Off we go...
+ gyp_rc = gyp.main(args)
+
+ if vs2013_runtime_dll_dirs:
+ x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+ vs_toolchain.CopyVsRuntimeDlls(
+ os.path.join(checkout_root, gyp_chromium.GetOutputDirectory()),
+ (x86_runtime, x64_runtime))
+
+ sys.exit(gyp_rc)
diff --git a/chromium/third_party/webrtc/build/gyp_webrtc.py b/chromium/third_party/webrtc/build/gyp_webrtc.py
new file mode 100644
index 00000000000..87d8a57ddf0
--- /dev/null
+++ b/chromium/third_party/webrtc/build/gyp_webrtc.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is (possibly, depending on python version) imported by
+# gyp_webrtc when GYP_PARALLEL=1 and it creates sub-processes
+# through the multiprocessing library.
+
+# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
+# imports that don't end in .py (and aren't directories with an
+# __init__.py). This wrapper makes "import gyp_webrtc" work with
+# those old versions and makes it possible to execute gyp_webrtc.py
+# directly on Windows where the extension is useful.
+
+import os
+
+path = os.path.abspath(os.path.split(__file__)[0])
+execfile(os.path.join(path, 'gyp_webrtc'))
diff --git a/chromium/third_party/webrtc/build/isolate.gypi b/chromium/third_party/webrtc/build/isolate.gypi
index 510c47e2639..ce90575e84f 100644
--- a/chromium/third_party/webrtc/build/isolate.gypi
+++ b/chromium/third_party/webrtc/build/isolate.gypi
@@ -12,6 +12,11 @@
# build/common.gypi is different for the standalone and Chromium builds. Gyp
# doesn't permit conditional inclusion or variable expansion in include paths.
# http://code.google.com/p/gyp/wiki/InputFormatReference#Including_Other_Files
+#
+# Local modifications:
+# * Removed include of '../chrome/version.gypi'.
+# * Removal passing of version_full variable created in version.gypi:
+# '--extra-variable', 'version_full=<(version_full)',
# This file is meant to be included into a target to provide a rule
# to "build" .isolate files into a .isolated file.
@@ -44,6 +49,9 @@
#
# The generated .isolated file will be:
# <(PRODUCT_DIR)/foo_test.isolated
+#
+# See http://dev.chromium.org/developers/testing/isolated-testing/for-swes
+# for more information.
{
'rules': [
@@ -54,7 +62,6 @@
# Files that are known to be involved in this step.
'<(DEPTH)/tools/swarming_client/isolate.py',
'<(DEPTH)/tools/swarming_client/run_isolated.py',
- '<(DEPTH)/tools/swarming_client/googletest/run_test_cases.py',
# Disable file tracking by the build driver for now. This means the
# project must have the proper build-time dependency for their runtime
@@ -71,47 +78,51 @@
'outputs': [
'<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
],
+ 'action': [
+ 'python',
+ '<(DEPTH)/tools/swarming_client/isolate.py',
+ '<(test_isolation_mode)',
+ '--result', '<@(_outputs)',
+ '--isolate', '<(RULE_INPUT_PATH)',
+
+ # Variables should use the -V FOO=<(FOO) form so frequent values,
+ # like '0' or '1', aren't stripped out by GYP. Run 'isolate.py help' for
+ # more details.
+ #
+ # This list needs to be kept in sync with the cmd line options
+ # in src/build/android/pylib/gtest/setup.py.
+
+ # Path variables are used to replace file paths when loading a .isolate
+ # file
+ '--path-variable', 'DEPTH', '<(DEPTH)',
+ '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
+
+ '--config-variable', 'OS=<(OS)',
+ '--config-variable', 'chromeos=<(chromeos)',
+ '--config-variable', 'component=<(component)',
+ # TODO(kbr): move this to chrome_tests.gypi:gles2_conform_tests_run
+ # once support for user-defined config variables is added.
+ '--config-variable',
+ 'internal_gles2_conform_tests=<(internal_gles2_conform_tests)',
+ '--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)',
+ '--config-variable', 'use_openssl=<(use_openssl)',
+ ],
'conditions': [
- ["test_isolation_outdir==''", {
- 'action': [
- 'python',
- '<(DEPTH)/tools/swarming_client/isolate.py',
- '<(test_isolation_mode)',
- # GYP will eliminate duplicate arguments so '<(PRODUCT_DIR)' cannot
- # be provided twice. To work around this behavior, append '/'.
- #
- # Also have a space after <(PRODUCT_DIR) or visual studio will
- # escape the argument wrappping " with the \ and merge it into
- # the following arguments.
- #
- # Other variables should use the -V FOO=<(FOO) form so frequent
- # values, like '0' or '1', aren't stripped out by GYP.
- '--outdir', '<(PRODUCT_DIR)/ ',
- '--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
- '--variable', 'OS=<(OS)',
- '--result', '<@(_outputs)',
- '--isolate', '<(RULE_INPUT_PATH)',
- ],
- }, {
+ # Note: When gyp merges lists, it appends them to the old value.
+ ['OS=="mac"', {
+ # <(mac_product_name) can contain a space, so don't use FOO=<(FOO)
+ # form.
'action': [
- 'python',
- '<(DEPTH)/tools/swarming_client/isolate.py',
- '<(test_isolation_mode)',
- '--outdir', '<(test_isolation_outdir)',
- # See comment above.
- '--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
- '--variable', 'OS=<(OS)',
- '--result', '<@(_outputs)',
- '--isolate', '<(RULE_INPUT_PATH)',
+ '--extra-variable', 'mac_product_name', '<(mac_product_name)',
],
}],
+ ["test_isolation_outdir!=''", {
+ 'action': [ '--isolate-server', '<(test_isolation_outdir)' ],
+ }],
['test_isolation_fail_on_missing == 0', {
- 'action': ['--ignore_broken_items'],
- },
- ],
+ 'action': ['--ignore_broken_items'],
+ }],
],
-
- 'msvs_cygwin_shell': 0,
},
],
}
diff --git a/chromium/third_party/webrtc/build/merge_libs.gyp b/chromium/third_party/webrtc/build/merge_libs.gyp
index c31b259de1c..4f8cdac85e8 100644
--- a/chromium/third_party/webrtc/build/merge_libs.gyp
+++ b/chromium/third_party/webrtc/build/merge_libs.gyp
@@ -10,7 +10,7 @@
'includes': ['common.gypi',],
'variables': {
'merge_libs_dependencies': [
- '../video_engine/video_engine.gyp:video_engine_core',
+ '../webrtc.gyp:webrtc',
],
},
'targets': [
@@ -31,7 +31,7 @@
'actions': [
{
'variables': {
- 'output_lib_name': 'webrtc',
+ 'output_lib_name': 'webrtc_merged',
'output_lib': '<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)<(output_lib_name)<(STATIC_LIB_SUFFIX)',
},
'action_name': 'merge_libs',
diff --git a/chromium/third_party/webrtc/build/merge_libs.py b/chromium/third_party/webrtc/build/merge_libs.py
index 5edae3964e6..1e78c68125f 100644
--- a/chromium/third_party/webrtc/build/merge_libs.py
+++ b/chromium/third_party/webrtc/build/merge_libs.py
@@ -21,7 +21,8 @@ def FindFiles(path, pattern):
"""Finds files matching |pattern| under |path|.
Returns a list of file paths matching |pattern|, by walking the directory tree
- under |path|. Filenames containing the string 'do_not_use' are excluded.
+ under |path|. Filenames containing the string 'do_not_use' or 'protoc' are
+ excluded.
Args:
path: The root path for the search.
@@ -34,7 +35,7 @@ def FindFiles(path, pattern):
files = []
for root, _, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, pattern):
- if 'do_not_use' not in filename:
+ if 'do_not_use' not in filename and 'protoc' not in filename:
# We use the relative path here to avoid "argument list too long"
# errors on Linux.
files.append(os.path.relpath(os.path.join(root, filename)))
diff --git a/chromium/third_party/webrtc/build/protoc.gypi b/chromium/third_party/webrtc/build/protoc.gypi
index a996c5c4054..5e486f16c2e 100644
--- a/chromium/third_party/webrtc/build/protoc.gypi
+++ b/chromium/third_party/webrtc/build/protoc.gypi
@@ -105,7 +105,6 @@
'--cpp_out', '<(cc_generator_options)<(cc_dir)',
'--python_out', '<(py_dir)',
],
- 'msvs_cygwin_shell': 0,
'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
'process_outputs_as_sources': 1,
},
diff --git a/chromium/third_party/webrtc/build/webrtc.gni b/chromium/third_party/webrtc/build/webrtc.gni
new file mode 100644
index 00000000000..e269a262c70
--- /dev/null
+++ b/chromium/third_party/webrtc/build/webrtc.gni
@@ -0,0 +1,57 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+declare_args() {
+ # Assume Chromium build for now, since that's the priority case for getting GN
+ # up and running with WebRTC.
+ build_with_chromium = true
+ build_with_libjingle = true
+
+ if (build_with_libjingle) {
+ include_tests = false
+ restrict_webrtc_logging = true
+ } else {
+ include_tests = true
+ restrict_webrtc_logging = false
+ }
+
+ # Adds video support to dependencies shared by voice and video engine.
+ # This should normally be enabled; the intended use is to disable only
+ # when building voice engine exclusively.
+ enable_video = true
+
+ # Selects fixed-point code where possible.
+ prefer_fixed_point = false
+
+ build_libjpeg = true
+ # Enables the use of protocol buffers for debug recordings.
+ enable_protobuf = true
+
+ # Disable by default.
+ have_dbus_glib = false
+
+ # Enable to use the Mozilla internal settings.
+ build_with_mozilla = false
+
+ # Define MIPS architecture variant, MIPS DSP variant and MIPS FPU
+ # This may be subject to change in accordance to Chromium's MIPS flags
+ mips_arch_variant = "mips32r1"
+ mips_dsp_rev = 0
+ mips_fpu = true
+
+ enable_android_opensl = true
+
+ if (is_ios) {
+ build_libjpeg = false
+ enable_protobuf = false
+ }
+
+ if (cpu_arch == "arm") {
+ prefer_fixed_point = true
+ }
+}
diff --git a/chromium/third_party/webrtc/call.h b/chromium/third_party/webrtc/call.h
index 7ee2592452d..480d73ec68e 100644
--- a/chromium/third_party/webrtc/call.h
+++ b/chromium/third_party/webrtc/call.h
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CALL_H_
-#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CALL_H_
+#ifndef WEBRTC_CALL_H_
+#define WEBRTC_CALL_H_
#include <string>
#include <vector>
@@ -25,12 +25,32 @@ const char* Version();
class PacketReceiver {
public:
- virtual bool DeliverPacket(const uint8_t* packet, size_t length) = 0;
+ enum DeliveryStatus {
+ DELIVERY_OK,
+ DELIVERY_UNKNOWN_SSRC,
+ DELIVERY_PACKET_ERROR,
+ };
+
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) = 0;
protected:
virtual ~PacketReceiver() {}
};
+// Callback interface for reporting when a system overuse is detected.
+// The detection is based on the jitter of incoming captured frames.
+class OveruseCallback {
+ public:
+ // Called as soon as an overuse is detected.
+ virtual void OnOveruse() = 0;
+ // Called periodically when the system is not overused any longer.
+ virtual void OnNormalUse() = 0;
+
+ protected:
+ virtual ~OveruseCallback() {}
+};
+
// A Call instance can contain several send and/or receive streams. All streams
// are assumed to have the same remote endpoint and will share bitrate estimates
// etc.
@@ -40,21 +60,25 @@ class Call {
explicit Config(newapi::Transport* send_transport)
: webrtc_config(NULL),
send_transport(send_transport),
- overuse_detection(false),
voice_engine(NULL),
- trace_callback(NULL),
- trace_filter(kTraceDefault) {}
+ overuse_callback(NULL),
+ start_bitrate_bps(-1) {}
webrtc::Config* webrtc_config;
newapi::Transport* send_transport;
- bool overuse_detection;
// VoiceEngine used for audio/video synchronization for this Call.
VoiceEngine* voice_engine;
- TraceCallback* trace_callback;
- uint32_t trace_filter;
+ // Callback for overuse and normal usage based on the jitter of incoming
+ // captured frames. 'NULL' disables the callback.
+ OveruseCallback* overuse_callback;
+
+ // Start bitrate used before a valid bitrate estimate is calculated. '-1'
+ // lets the call decide start bitrate.
+ // Note: This currently only affects video.
+ int start_bitrate_bps;
};
static Call* Create(const Call::Config& config);
@@ -62,16 +86,13 @@ class Call {
static Call* Create(const Call::Config& config,
const webrtc::Config& webrtc_config);
- virtual std::vector<VideoCodec> GetVideoCodecs() = 0;
-
virtual VideoSendStream::Config GetDefaultSendConfig() = 0;
virtual VideoSendStream* CreateVideoSendStream(
- const VideoSendStream::Config& config) = 0;
+ const VideoSendStream::Config& config,
+ const std::vector<VideoStream>& video_streams,
+ const void* encoder_settings) = 0;
- // Returns the internal state of the send stream, for resume sending with a
- // new stream with different settings.
- // Note: Only the last returned send-stream state is valid.
virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0;
virtual VideoReceiveStream::Config GetDefaultReceiveConfig() = 0;
@@ -98,4 +119,4 @@ class Call {
};
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CALL_H_
+#endif // WEBRTC_CALL_H_
diff --git a/chromium/third_party/webrtc/common.gyp b/chromium/third_party/webrtc/common.gyp
new file mode 100644
index 00000000000..b6b6354ab90
--- /dev/null
+++ b/chromium/third_party/webrtc/common.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': ['build/common.gypi'],
+ 'targets': [
+ {
+ 'target_name': 'webrtc_common',
+ 'type': 'static_library',
+ 'sources': [
+ 'config.h',
+ 'config.cc',
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/common_audio/OWNERS b/chromium/third_party/webrtc/common_audio/OWNERS
index 84582f2c990..83d774b2a57 100644
--- a/chromium/third_party/webrtc/common_audio/OWNERS
+++ b/chromium/third_party/webrtc/common_audio/OWNERS
@@ -2,3 +2,10 @@ bjornv@webrtc.org
tina.legrand@webrtc.org
jan.skoglund@webrtc.org
andrew@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/common_audio/audio_util.cc b/chromium/third_party/webrtc/common_audio/audio_util.cc
index a6114fdf484..0c961e1ad74 100644
--- a/chromium/third_party/webrtc/common_audio/audio_util.cc
+++ b/chromium/third_party/webrtc/common_audio/audio_util.cc
@@ -14,28 +14,19 @@
namespace webrtc {
-void Deinterleave(const int16_t* interleaved, int samples_per_channel,
- int num_channels, int16_t** deinterleaved) {
- for (int i = 0; i < num_channels; i++) {
- int16_t* channel = deinterleaved[i];
- int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel; j++) {
- channel[j] = interleaved[interleaved_idx];
- interleaved_idx += num_channels;
- }
- }
+void RoundToInt16(const float* src, int size, int16_t* dest) {
+ for (int i = 0; i < size; ++i)
+ dest[i] = RoundToInt16(src[i]);
}
-void Interleave(const int16_t* const* deinterleaved, int samples_per_channel,
- int num_channels, int16_t* interleaved) {
- for (int i = 0; i < num_channels; ++i) {
- const int16_t* channel = deinterleaved[i];
- int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel; j++) {
- interleaved[interleaved_idx] = channel[j];
- interleaved_idx += num_channels;
- }
- }
+void ScaleAndRoundToInt16(const float* src, int size, int16_t* dest) {
+ for (int i = 0; i < size; ++i)
+ dest[i] = ScaleAndRoundToInt16(src[i]);
+}
+
+void ScaleToFloat(const int16_t* src, int size, float* dest) {
+ for (int i = 0; i < size; ++i)
+ dest[i] = ScaleToFloat(src[i]);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/audio_util_unittest.cc b/chromium/third_party/webrtc/common_audio/audio_util_unittest.cc
index c724e378f52..bf9ad812523 100644
--- a/chromium/third_party/webrtc/common_audio/audio_util_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/audio_util_unittest.cc
@@ -16,22 +16,46 @@ namespace webrtc {
void ExpectArraysEq(const int16_t* ref, const int16_t* test, int length) {
for (int i = 0; i < length; ++i) {
- EXPECT_EQ(test[i], ref[i]);
+ EXPECT_EQ(ref[i], test[i]);
}
}
-TEST(AudioUtilTest, Clamp) {
- EXPECT_EQ(1000.f, ClampInt16(1000.f));
- EXPECT_EQ(32767.f, ClampInt16(32767.5f));
- EXPECT_EQ(-32768.f, ClampInt16(-32768.5f));
+void ExpectArraysEq(const float* ref, const float* test, int length) {
+ for (int i = 0; i < length; ++i) {
+ EXPECT_FLOAT_EQ(ref[i], test[i]);
+ }
+}
+
+TEST(AudioUtilTest, RoundToInt16) {
+ const int kSize = 7;
+ const float kInput[kSize] = {
+ 0.f, 0.4f, 0.5f, -0.4f, -0.5f, 32768.f, -32769.f};
+ const int16_t kReference[kSize] = {0, 0, 1, 0, -1, 32767, -32768};
+ int16_t output[kSize];
+ RoundToInt16(kInput, kSize, output);
+ ExpectArraysEq(kReference, output, kSize);
+}
+
+TEST(AudioUtilTest, ScaleAndRoundToInt16) {
+ const int kSize = 9;
+ const float kInput[kSize] = {
+ 0.f, 0.4f / 32767.f, 0.6f / 32767.f, -0.4f / 32768.f, -0.6f / 32768.f,
+ 1.f, -1.f, 1.1f, -1.1f};
+ const int16_t kReference[kSize] = {
+ 0, 0, 1, 0, -1, 32767, -32768, 32767, -32768};
+ int16_t output[kSize];
+ ScaleAndRoundToInt16(kInput, kSize, output);
+ ExpectArraysEq(kReference, output, kSize);
}
-TEST(AudioUtilTest, Round) {
- EXPECT_EQ(0, RoundToInt16(0.f));
- EXPECT_EQ(0, RoundToInt16(0.4f));
- EXPECT_EQ(1, RoundToInt16(0.5f));
- EXPECT_EQ(0, RoundToInt16(-0.4f));
- EXPECT_EQ(-1, RoundToInt16(-0.5f));
+TEST(AudioUtilTest, ScaleToFloat) {
+ const int kSize = 7;
+ const int16_t kInput[kSize] = {0, 1, -1, 16384, -16384, 32767, -32768};
+ const float kReference[kSize] = {
+ 0.f, 1.f / 32767.f, -1.f / 32768.f, 16384.f / 32767.f, -0.5f, 1.f, -1.f};
+ float output[kSize];
+ ScaleToFloat(kInput, kSize, output);
+ ExpectArraysEq(kReference, output, kSize);
}
TEST(AudioUtilTest, InterleavingStereo) {
@@ -44,12 +68,12 @@ TEST(AudioUtilTest, InterleavingStereo) {
Deinterleave(kInterleaved, kSamplesPerChannel, kNumChannels, deinterleaved);
const int16_t kRefLeft[] = {2, 4, 8, 16};
const int16_t kRefRight[] = {3, 9, 27, 81};
- ExpectArraysEq(left, kRefLeft, kSamplesPerChannel);
- ExpectArraysEq(right, kRefRight, kSamplesPerChannel);
+ ExpectArraysEq(kRefLeft, left, kSamplesPerChannel);
+ ExpectArraysEq(kRefRight, right, kSamplesPerChannel);
int16_t interleaved[kLength];
Interleave(deinterleaved, kSamplesPerChannel, kNumChannels, interleaved);
- ExpectArraysEq(interleaved, kInterleaved, kLength);
+ ExpectArraysEq(kInterleaved, interleaved, kLength);
}
TEST(AudioUtilTest, InterleavingMonoIsIdentical) {
@@ -59,11 +83,11 @@ TEST(AudioUtilTest, InterleavingMonoIsIdentical) {
int16_t mono[kSamplesPerChannel];
int16_t* deinterleaved[] = {mono};
Deinterleave(kInterleaved, kSamplesPerChannel, kNumChannels, deinterleaved);
- ExpectArraysEq(mono, kInterleaved, kSamplesPerChannel);
+ ExpectArraysEq(kInterleaved, mono, kSamplesPerChannel);
int16_t interleaved[kSamplesPerChannel];
Interleave(deinterleaved, kSamplesPerChannel, kNumChannels, interleaved);
- ExpectArraysEq(interleaved, mono, kSamplesPerChannel);
+ ExpectArraysEq(mono, interleaved, kSamplesPerChannel);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/common_audio.gyp b/chromium/third_party/webrtc/common_audio/common_audio.gyp
index fa0135eff77..3bed4e4d80e 100644
--- a/chromium/third_party/webrtc/common_audio/common_audio.gyp
+++ b/chromium/third_party/webrtc/common_audio/common_audio.gyp
@@ -30,6 +30,10 @@
},
'sources': [
'audio_util.cc',
+ 'fir_filter.cc',
+ 'fir_filter.h',
+ 'fir_filter_neon.h',
+ 'fir_filter_sse.h',
'include/audio_util.h',
'resampler/include/push_resampler.h',
'resampler/include/resampler.h',
@@ -152,6 +156,7 @@
'target_name': 'common_audio_sse2',
'type': 'static_library',
'sources': [
+ 'fir_filter_sse.cc',
'resampler/sinc_resampler_sse.cc',
],
'cflags': ['-msse2',],
@@ -168,6 +173,7 @@
'type': 'static_library',
'includes': ['../build/arm_neon.gypi',],
'sources': [
+ 'fir_filter_neon.cc',
'resampler/sinc_resampler_neon.cc',
'signal_processing/cross_correlation_neon.S',
'signal_processing/downsample_fast_neon.S',
@@ -190,6 +196,7 @@
],
'sources': [
'audio_util_unittest.cc',
+ 'fir_filter_unittest.cc',
'resampler/resampler_unittest.cc',
'resampler/push_resampler_unittest.cc',
'resampler/push_sinc_resampler_unittest.cc',
@@ -208,7 +215,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -219,7 +226,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'common_audio_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/common_audio/common_audio_unittests.isolate b/chromium/third_party/webrtc/common_audio/common_audio_unittests.isolate
index 49f1e984b0e..cc5e6ab4587 100644
--- a/chromium/third_party/webrtc/common_audio/common_audio_unittests.isolate
+++ b/chromium/third_party/webrtc/common_audio/common_audio_unittests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter.cc b/chromium/third_party/webrtc/common_audio/fir_filter.cc
new file mode 100644
index 00000000000..a5411b888b4
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_audio/fir_filter.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/common_audio/fir_filter_neon.h"
+#include "webrtc/common_audio/fir_filter_sse.h"
+#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class FIRFilterC : public FIRFilter {
+ public:
+ FIRFilterC(const float* coefficients,
+ size_t coefficients_length);
+
+ virtual void Filter(const float* in, size_t length, float* out) OVERRIDE;
+
+ private:
+ size_t coefficients_length_;
+ size_t state_length_;
+ scoped_ptr<float[]> coefficients_;
+ scoped_ptr<float[]> state_;
+};
+
+FIRFilter* FIRFilter::Create(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length) {
+ if (!coefficients || coefficients_length <= 0 || max_input_length <= 0) {
+ assert(false);
+ return NULL;
+ }
+
+ FIRFilter* filter = NULL;
+// If we know the minimum architecture at compile time, avoid CPU detection.
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+#if defined(__SSE2__)
+ filter =
+ new FIRFilterSSE2(coefficients, coefficients_length, max_input_length);
+#else
+ // x86 CPU detection required.
+ if (WebRtc_GetCPUInfo(kSSE2)) {
+ filter =
+ new FIRFilterSSE2(coefficients, coefficients_length, max_input_length);
+ } else {
+ filter = new FIRFilterC(coefficients, coefficients_length);
+ }
+#endif
+#elif defined(WEBRTC_ARCH_ARM_V7)
+#if defined(WEBRTC_ARCH_ARM_NEON)
+ filter =
+ new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
+#else
+ // ARM CPU detection required.
+ if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
+ filter =
+ new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
+ } else {
+ filter = new FIRFilterC(coefficients, coefficients_length);
+ }
+#endif
+#else
+ filter = new FIRFilterC(coefficients, coefficients_length);
+#endif
+
+ return filter;
+}
+
+FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length)
+ : coefficients_length_(coefficients_length),
+ state_length_(coefficients_length - 1),
+ coefficients_(new float[coefficients_length_]),
+ state_(new float[state_length_]) {
+ for (size_t i = 0; i < coefficients_length_; ++i) {
+ coefficients_[i] = coefficients[coefficients_length_ - i - 1];
+ }
+ memset(state_.get(), 0.f, state_length_ * sizeof(state_[0]));
+}
+
+void FIRFilterC::Filter(const float* in, size_t length, float* out) {
+ assert(length > 0);
+
+ // Convolves the input signal |in| with the filter kernel |coefficients_|
+ // taking into account the previous state.
+ for (size_t i = 0; i < length; ++i) {
+ out[i] = 0.f;
+ size_t j;
+ for (j = 0; state_length_ > i && j < state_length_ - i; ++j) {
+ out[i] += state_[i + j] * coefficients_[j];
+ }
+ for (; j < coefficients_length_; ++j) {
+ out[i] += in[j + i - state_length_] * coefficients_[j];
+ }
+ }
+
+ // Update current state.
+ if (length >= state_length_) {
+ memcpy(
+ state_.get(), &in[length - state_length_], state_length_ * sizeof(*in));
+ } else {
+ memmove(state_.get(),
+ &state_[length],
+ (state_length_ - length) * sizeof(state_[0]));
+ memcpy(&state_[state_length_ - length], in, length * sizeof(*in));
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter.h b/chromium/third_party/webrtc/common_audio/fir_filter.h
new file mode 100644
index 00000000000..a5dc6eced1c
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_FIR_FILTER_H_
+#define WEBRTC_COMMON_AUDIO_FIR_FILTER_H_
+
+#include <string.h>
+
+namespace webrtc {
+
+// Finite Impulse Response filter using floating-point arithmetic.
+class FIRFilter {
+ public:
+ // Creates a filter with the given coefficients. All initial state values will
+ // be zeros.
+ // The length of the chunks fed to the filter should never be greater than
+ // |max_input_length|. This is needed because, when vectorizing it is
+ // necessary to concatenate the input after the state, and resizing this array
+ // dynamically is expensive.
+ static FIRFilter* Create(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length);
+
+ virtual ~FIRFilter() {}
+
+ // Filters the |in| data supplied.
+ // |out| must be previously allocated and it must be at least of |length|.
+ virtual void Filter(const float* in, size_t length, float* out) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_AUDIO_FIR_FILTER_H_
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter_neon.cc b/chromium/third_party/webrtc/common_audio/fir_filter_neon.cc
new file mode 100644
index 00000000000..97a75db0f27
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter_neon.cc
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_audio/fir_filter_neon.h"
+
+#include <arm_neon.h>
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/system_wrappers/interface/aligned_malloc.h"
+
+namespace webrtc {
+
+FIRFilterNEON::FIRFilterNEON(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length)
+ : // Closest higher multiple of four.
+ coefficients_length_((coefficients_length + 3) & ~0x03),
+ state_length_(coefficients_length_ - 1),
+ coefficients_(static_cast<float*>(
+ AlignedMalloc(sizeof(float) * coefficients_length_, 16))),
+ state_(static_cast<float*>(
+ AlignedMalloc(sizeof(float) * (max_input_length + state_length_),
+ 16))) {
+ // Add zeros at the end of the coefficients.
+ size_t padding = coefficients_length_ - coefficients_length;
+ memset(coefficients_.get(), 0.f, padding * sizeof(coefficients_[0]));
+ // The coefficients are reversed to compensate for the order in which the
+ // input samples are acquired (most recent last).
+ for (size_t i = 0; i < coefficients_length; ++i) {
+ coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
+ }
+ memset(state_.get(),
+ 0.f,
+ (max_input_length + state_length_) * sizeof(state_[0]));
+}
+
+void FIRFilterNEON::Filter(const float* in, size_t length, float* out) {
+ assert(length > 0);
+
+ memcpy(&state_[state_length_], in, length * sizeof(*in));
+
+ // Convolves the input signal |in| with the filter kernel |coefficients_|
+ // taking into account the previous state.
+ for (size_t i = 0; i < length; ++i) {
+ float* in_ptr = &state_[i];
+ float* coef_ptr = coefficients_.get();
+
+ float32x4_t m_sum = vmovq_n_f32(0);
+ float32x4_t m_in;
+
+ for (size_t j = 0; j < coefficients_length_; j += 4) {
+ m_in = vld1q_f32(in_ptr + j);
+ m_sum = vmlaq_f32(m_sum, m_in, vld1q_f32(coef_ptr + j));
+ }
+
+ float32x2_t m_half = vadd_f32(vget_high_f32(m_sum), vget_low_f32(m_sum));
+ out[i] = vget_lane_f32(vpadd_f32(m_half, m_half), 0);
+ }
+
+ // Update current state.
+ memmove(state_.get(), &state_[length], state_length_ * sizeof(state_[0]));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter_neon.h b/chromium/third_party/webrtc/common_audio/fir_filter_neon.h
new file mode 100644
index 00000000000..df41c9256cf
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter_neon.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_
+#define WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_
+
+#include "webrtc/common_audio/fir_filter.h"
+#include "webrtc/system_wrappers/interface/aligned_malloc.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class FIRFilterNEON : public FIRFilter {
+ public:
+ FIRFilterNEON(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length);
+
+ virtual void Filter(const float* in, size_t length, float* out) OVERRIDE;
+
+ private:
+ size_t coefficients_length_;
+ size_t state_length_;
+ scoped_ptr<float[], AlignedFreeDeleter> coefficients_;
+ scoped_ptr<float[], AlignedFreeDeleter> state_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter_sse.cc b/chromium/third_party/webrtc/common_audio/fir_filter_sse.cc
new file mode 100644
index 00000000000..7d873a735c5
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter_sse.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_audio/fir_filter_sse.h"
+
+#include <assert.h>
+#include <string.h>
+#include <xmmintrin.h>
+
+#include "webrtc/system_wrappers/interface/aligned_malloc.h"
+
+namespace webrtc {
+
+FIRFilterSSE2::FIRFilterSSE2(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length)
+ : // Closest higher multiple of four.
+ coefficients_length_((coefficients_length + 3) & ~0x03),
+ state_length_(coefficients_length_ - 1),
+ coefficients_(static_cast<float*>(
+ AlignedMalloc(sizeof(float) * coefficients_length_, 16))),
+ state_(static_cast<float*>(
+ AlignedMalloc(sizeof(float) * (max_input_length + state_length_),
+ 16))) {
+ // Add zeros at the end of the coefficients.
+ size_t padding = coefficients_length_ - coefficients_length;
+ memset(coefficients_.get(), 0.f, padding * sizeof(coefficients_[0]));
+ // The coefficients are reversed to compensate for the order in which the
+ // input samples are acquired (most recent last).
+ for (size_t i = 0; i < coefficients_length; ++i) {
+ coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
+ }
+ memset(state_.get(),
+ 0.f,
+ (max_input_length + state_length_) * sizeof(state_[0]));
+}
+
+void FIRFilterSSE2::Filter(const float* in, size_t length, float* out) {
+ assert(length > 0);
+
+ memcpy(&state_[state_length_], in, length * sizeof(*in));
+
+ // Convolves the input signal |in| with the filter kernel |coefficients_|
+ // taking into account the previous state.
+ for (size_t i = 0; i < length; ++i) {
+ float* in_ptr = &state_[i];
+ float* coef_ptr = coefficients_.get();
+
+ __m128 m_sum = _mm_setzero_ps();
+ __m128 m_in;
+
+ // Depending on if the pointer is aligned with 16 bytes or not it is loaded
+ // differently.
+ if (reinterpret_cast<uintptr_t>(in_ptr) & 0x0F) {
+ for (size_t j = 0; j < coefficients_length_; j += 4) {
+ m_in = _mm_loadu_ps(in_ptr + j);
+ m_sum = _mm_add_ps(m_sum, _mm_mul_ps(m_in, _mm_load_ps(coef_ptr + j)));
+ }
+ } else {
+ for (size_t j = 0; j < coefficients_length_; j += 4) {
+ m_in = _mm_load_ps(in_ptr + j);
+ m_sum = _mm_add_ps(m_sum, _mm_mul_ps(m_in, _mm_load_ps(coef_ptr + j)));
+ }
+ }
+ m_sum = _mm_add_ps(_mm_movehl_ps(m_sum, m_sum), m_sum);
+ _mm_store_ss(out + i, _mm_add_ss(m_sum, _mm_shuffle_ps(m_sum, m_sum, 1)));
+ }
+
+ // Update current state.
+ memmove(state_.get(), &state_[length], state_length_ * sizeof(state_[0]));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter_sse.h b/chromium/third_party/webrtc/common_audio/fir_filter_sse.h
new file mode 100644
index 00000000000..a0b9164ef9e
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter_sse.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_FIR_FILTER_SSE_H_
+#define WEBRTC_COMMON_AUDIO_FIR_FILTER_SSE_H_
+
+#include "webrtc/common_audio/fir_filter.h"
+#include "webrtc/system_wrappers/interface/aligned_malloc.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class FIRFilterSSE2 : public FIRFilter {
+ public:
+ FIRFilterSSE2(const float* coefficients,
+ size_t coefficients_length,
+ size_t max_input_length);
+
+ virtual void Filter(const float* in, size_t length, float* out) OVERRIDE;
+
+ private:
+ size_t coefficients_length_;
+ size_t state_length_;
+ scoped_ptr<float[], AlignedFreeDeleter> coefficients_;
+ scoped_ptr<float[], AlignedFreeDeleter> state_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_COMMON_AUDIO_FIR_FILTER_SSE_H_
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter_unittest.cc b/chromium/third_party/webrtc/common_audio/fir_filter_unittest.cc
new file mode 100644
index 00000000000..01c716197d6
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/fir_filter_unittest.cc
@@ -0,0 +1,207 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_audio/fir_filter.h"
+
+#include <string.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+static const float kCoefficients[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f};
+static const size_t kCoefficientsLength = sizeof(kCoefficients) /
+ sizeof(kCoefficients[0]);
+
+static const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f,
+ 8.f, 9.f, 10.f};
+static const size_t kInputLength = sizeof(kInput) /
+ sizeof(kInput[0]);
+
+void VerifyOutput(const float* expected_output,
+ const float* output,
+ size_t length) {
+ EXPECT_EQ(0, memcmp(expected_output,
+ output,
+ length * sizeof(expected_output[0])));
+}
+
+TEST(FIRFilterTest, FilterAsIdentity) {
+ const float kCoefficients[] = {1.f, 0.f, 0.f, 0.f, 0.f};
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kInputLength));
+ filter->Filter(kInput, kInputLength, output);
+
+ VerifyOutput(kInput, output, kInputLength);
+}
+
+TEST(FIRFilterTest, FilterUsedAsScalarMultiplication) {
+ const float kCoefficients[] = {5.f, 0.f, 0.f, 0.f, 0.f};
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kInputLength));
+ filter->Filter(kInput, kInputLength, output);
+
+ EXPECT_FLOAT_EQ(5.f, output[0]);
+ EXPECT_FLOAT_EQ(20.f, output[3]);
+ EXPECT_FLOAT_EQ(25.f, output[4]);
+ EXPECT_FLOAT_EQ(50.f, output[kInputLength - 1]);
+}
+
+TEST(FIRFilterTest, FilterUsedAsInputShifting) {
+ const float kCoefficients[] = {0.f, 0.f, 0.f, 0.f, 1.f};
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kInputLength));
+ filter->Filter(kInput, kInputLength, output);
+
+ EXPECT_FLOAT_EQ(0.f, output[0]);
+ EXPECT_FLOAT_EQ(0.f, output[3]);
+ EXPECT_FLOAT_EQ(1.f, output[4]);
+ EXPECT_FLOAT_EQ(2.f, output[5]);
+ EXPECT_FLOAT_EQ(6.f, output[kInputLength - 1]);
+}
+
+TEST(FIRFilterTest, FilterUsedAsArbitraryWeighting) {
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kInputLength));
+ filter->Filter(kInput, kInputLength, output);
+
+ EXPECT_FLOAT_EQ(0.2f, output[0]);
+ EXPECT_FLOAT_EQ(3.4f, output[3]);
+ EXPECT_FLOAT_EQ(5.21f, output[4]);
+ EXPECT_FLOAT_EQ(7.02f, output[5]);
+ EXPECT_FLOAT_EQ(14.26f, output[kInputLength - 1]);
+}
+
+TEST(FIRFilterTest, FilterInLengthLesserOrEqualToCoefficientsLength) {
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(
+ FIRFilter::Create(kCoefficients, kCoefficientsLength, 2));
+ filter->Filter(kInput, 2, output);
+
+ EXPECT_FLOAT_EQ(0.2f, output[0]);
+ EXPECT_FLOAT_EQ(0.7f, output[1]);
+ filter.reset(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kCoefficientsLength));
+ filter->Filter(kInput, kCoefficientsLength, output);
+
+ EXPECT_FLOAT_EQ(0.2f, output[0]);
+ EXPECT_FLOAT_EQ(3.4f, output[3]);
+ EXPECT_FLOAT_EQ(5.21f, output[4]);
+}
+
+TEST(FIRFilterTest, MultipleFilterCalls) {
+ float output[kInputLength];
+ scoped_ptr<FIRFilter> filter(
+ FIRFilter::Create(kCoefficients, kCoefficientsLength, 3));
+ filter->Filter(kInput, 2, output);
+ EXPECT_FLOAT_EQ(0.2f, output[0]);
+ EXPECT_FLOAT_EQ(0.7f, output[1]);
+
+ filter->Filter(kInput, 2, output);
+ EXPECT_FLOAT_EQ(1.3f, output[0]);
+ EXPECT_FLOAT_EQ(2.4f, output[1]);
+
+ filter->Filter(kInput, 2, output);
+ EXPECT_FLOAT_EQ(2.81f, output[0]);
+ EXPECT_FLOAT_EQ(2.62f, output[1]);
+
+ filter->Filter(kInput, 2, output);
+ EXPECT_FLOAT_EQ(2.81f, output[0]);
+ EXPECT_FLOAT_EQ(2.62f, output[1]);
+
+ filter->Filter(&kInput[3], 3, output);
+ EXPECT_FLOAT_EQ(3.41f, output[0]);
+ EXPECT_FLOAT_EQ(4.12f, output[1]);
+ EXPECT_FLOAT_EQ(6.21f, output[2]);
+
+ filter->Filter(&kInput[3], 3, output);
+ EXPECT_FLOAT_EQ(8.12f, output[0]);
+ EXPECT_FLOAT_EQ(9.14f, output[1]);
+ EXPECT_FLOAT_EQ(9.45f, output[2]);
+}
+
+TEST(FIRFilterTest, VerifySampleBasedVsBlockBasedFiltering) {
+ float output_block_based[kInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kInputLength));
+ filter->Filter(kInput, kInputLength, output_block_based);
+
+ float output_sample_based[kInputLength];
+ filter.reset(FIRFilter::Create(kCoefficients, kCoefficientsLength, 1));
+ for (size_t i = 0; i < kInputLength; ++i) {
+ filter->Filter(&kInput[i], 1, &output_sample_based[i]);
+ }
+
+ EXPECT_EQ(0, memcmp(output_sample_based,
+ output_block_based,
+ kInputLength));
+}
+
+TEST(FIRFilterTest, SimplestHighPassFilter) {
+ const float kCoefficients[] = {1.f, -1.f};
+ const size_t kCoefficientsLength = sizeof(kCoefficients) /
+ sizeof(kCoefficients[0]);
+
+ float kConstantInput[] = {1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f};
+ const size_t kConstantInputLength = sizeof(kConstantInput) /
+ sizeof(kConstantInput[0]);
+
+ float output[kConstantInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kConstantInputLength));
+ filter->Filter(kConstantInput, kConstantInputLength, output);
+ EXPECT_FLOAT_EQ(1.f, output[0]);
+ for (size_t i = kCoefficientsLength - 1; i < kConstantInputLength; ++i) {
+ EXPECT_FLOAT_EQ(0.f, output[i]);
+ }
+}
+
+TEST(FIRFilterTest, SimplestLowPassFilter) {
+ const float kCoefficients[] = {1.f, 1.f};
+ const size_t kCoefficientsLength = sizeof(kCoefficients) /
+ sizeof(kCoefficients[0]);
+
+ float kHighFrequencyInput[] = {-1.f, 1.f, -1.f, 1.f, -1.f, 1.f, -1.f, 1.f};
+ const size_t kHighFrequencyInputLength = sizeof(kHighFrequencyInput) /
+ sizeof(kHighFrequencyInput[0]);
+
+ float output[kHighFrequencyInputLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kHighFrequencyInputLength));
+ filter->Filter(kHighFrequencyInput, kHighFrequencyInputLength, output);
+ EXPECT_FLOAT_EQ(-1.f, output[0]);
+ for (size_t i = kCoefficientsLength - 1; i < kHighFrequencyInputLength; ++i) {
+ EXPECT_FLOAT_EQ(0.f, output[i]);
+ }
+}
+
+TEST(FIRFilterTest, SameOutputWhenSwapedCoefficientsAndInput) {
+ float output[kCoefficientsLength];
+ float output_swaped[kCoefficientsLength];
+ scoped_ptr<FIRFilter> filter(FIRFilter::Create(
+ kCoefficients, kCoefficientsLength, kCoefficientsLength));
+ // Use kCoefficientsLength for in_length to get same-length outputs.
+ filter->Filter(kInput, kCoefficientsLength, output);
+
+ filter.reset(FIRFilter::Create(
+ kInput, kCoefficientsLength, kCoefficientsLength));
+ filter->Filter(kCoefficients, kCoefficientsLength, output_swaped);
+
+ for (size_t i = 0 ; i < kCoefficientsLength; ++i) {
+ EXPECT_FLOAT_EQ(output[i], output_swaped[i]);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/include/audio_util.h b/chromium/third_party/webrtc/common_audio/include/audio_util.h
index 5e86c1f06ac..18fdbe2ad0c 100644
--- a/chromium/third_party/webrtc/common_audio/include/audio_util.h
+++ b/chromium/third_party/webrtc/common_audio/include/audio_util.h
@@ -11,36 +11,83 @@
#ifndef WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_
#define WEBRTC_COMMON_AUDIO_INCLUDE_AUDIO_UTIL_H_
+#include <limits>
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
-// Clamp the floating |value| to the range representable by an int16_t.
-static inline float ClampInt16(float value) {
- const float kMaxInt16 = 32767.f;
- const float kMinInt16 = -32768.f;
- return value < kMinInt16 ? kMinInt16 :
- (value > kMaxInt16 ? kMaxInt16 : value);
+typedef std::numeric_limits<int16_t> limits_int16;
+
+static inline int16_t RoundToInt16(float v) {
+ const float kMaxRound = limits_int16::max() - 0.5f;
+ const float kMinRound = limits_int16::min() + 0.5f;
+ if (v > 0)
+ return v >= kMaxRound ? limits_int16::max() :
+ static_cast<int16_t>(v + 0.5f);
+ return v <= kMinRound ? limits_int16::min() :
+ static_cast<int16_t>(v - 0.5f);
}
-// Return a rounded int16_t of the floating |value|. Doesn't handle overflow;
-// use ClampInt16 if necessary.
-static inline int16_t RoundToInt16(float value) {
- return static_cast<int16_t>(value < 0.f ? value - 0.5f : value + 0.5f);
+// Scale (from [-1, 1]) and round to full-range int16 with clamping.
+static inline int16_t ScaleAndRoundToInt16(float v) {
+ if (v > 0)
+ return v >= 1 ? limits_int16::max() :
+ static_cast<int16_t>(v * limits_int16::max() + 0.5f);
+ return v <= -1 ? limits_int16::min() :
+ static_cast<int16_t>(-v * limits_int16::min() - 0.5f);
}
+// Scale to float [-1, 1].
+static inline float ScaleToFloat(int16_t v) {
+ const float kMaxInt16Inverse = 1.f / limits_int16::max();
+ const float kMinInt16Inverse = 1.f / limits_int16::min();
+ return v * (v > 0 ? kMaxInt16Inverse : -kMinInt16Inverse);
+}
+
+// Round |size| elements of |src| to int16 with clamping and write to |dest|.
+void RoundToInt16(const float* src, int size, int16_t* dest);
+
+// Scale (from [-1, 1]) and round |size| elements of |src| to full-range int16
+// with clamping and write to |dest|.
+void ScaleAndRoundToInt16(const float* src, int size, int16_t* dest);
+
+// Scale |size| elements of |src| to float [-1, 1] and write to |dest|.
+void ScaleToFloat(const int16_t* src, int size, float* dest);
+
// Deinterleave audio from |interleaved| to the channel buffers pointed to
// by |deinterleaved|. There must be sufficient space allocated in the
// |deinterleaved| buffers (|num_channel| buffers with |samples_per_channel|
// per buffer).
-void Deinterleave(const int16_t* interleaved, int samples_per_channel,
- int num_channels, int16_t** deinterleaved);
+template <typename T>
+void Deinterleave(const T* interleaved, int samples_per_channel,
+ int num_channels, T** deinterleaved) {
+ for (int i = 0; i < num_channels; ++i) {
+ T* channel = deinterleaved[i];
+ int interleaved_idx = i;
+ for (int j = 0; j < samples_per_channel; ++j) {
+ channel[j] = interleaved[interleaved_idx];
+ interleaved_idx += num_channels;
+ }
+ }
+}
// Interleave audio from the channel buffers pointed to by |deinterleaved| to
// |interleaved|. There must be sufficient space allocated in |interleaved|
// (|samples_per_channel| * |num_channels|).
-void Interleave(const int16_t* const* deinterleaved, int samples_per_channel,
- int num_channels, int16_t* interleaved);
+template <typename T>
+void Interleave(const T* const* deinterleaved, int samples_per_channel,
+ int num_channels, T* interleaved) {
+ for (int i = 0; i < num_channels; ++i) {
+ const T* channel = deinterleaved[i];
+ int interleaved_idx = i;
+ for (int j = 0; j < samples_per_channel; ++j) {
+ interleaved[interleaved_idx] = channel[j];
+ interleaved_idx += num_channels;
+ }
+ }
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/resampler/include/push_resampler.h b/chromium/third_party/webrtc/common_audio/resampler/include/push_resampler.h
index 770a992618a..f04dc0f3e81 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/include/push_resampler.h
+++ b/chromium/third_party/webrtc/common_audio/resampler/include/push_resampler.h
@@ -20,6 +20,7 @@ class PushSincResampler;
// Wraps PushSincResampler to provide stereo support.
// TODO(ajm): add support for an arbitrary number of channels.
+template <typename T>
class PushResampler {
public:
PushResampler();
@@ -32,22 +33,18 @@ class PushResampler {
// Returns the total number of samples provided in destination (e.g. 32 kHz,
// 2 channel audio gives 640 samples).
- int Resample(const int16_t* src, int src_length, int16_t* dst,
- int dst_capacity);
+ int Resample(const T* src, int src_length, T* dst, int dst_capacity);
private:
- int ResampleSinc(const int16_t* src, int src_length, int16_t* dst,
- int dst_capacity);
-
scoped_ptr<PushSincResampler> sinc_resampler_;
scoped_ptr<PushSincResampler> sinc_resampler_right_;
int src_sample_rate_hz_;
int dst_sample_rate_hz_;
int num_channels_;
- scoped_array<int16_t> src_left_;
- scoped_array<int16_t> src_right_;
- scoped_array<int16_t> dst_left_;
- scoped_array<int16_t> dst_right_;
+ scoped_ptr<T[]> src_left_;
+ scoped_ptr<T[]> src_right_;
+ scoped_ptr<T[]> dst_left_;
+ scoped_ptr<T[]> dst_right_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_resampler.cc b/chromium/third_party/webrtc/common_audio/resampler/push_resampler.cc
index 29944187d82..973c8f74f76 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_resampler.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_resampler.cc
@@ -18,22 +18,21 @@
namespace webrtc {
-PushResampler::PushResampler()
+template <typename T>
+PushResampler<T>::PushResampler()
: src_sample_rate_hz_(0),
dst_sample_rate_hz_(0),
- num_channels_(0),
- src_left_(NULL),
- src_right_(NULL),
- dst_left_(NULL),
- dst_right_(NULL) {
+ num_channels_(0) {
}
-PushResampler::~PushResampler() {
+template <typename T>
+PushResampler<T>::~PushResampler() {
}
-int PushResampler::InitializeIfNeeded(int src_sample_rate_hz,
- int dst_sample_rate_hz,
- int num_channels) {
+template <typename T>
+int PushResampler<T>::InitializeIfNeeded(int src_sample_rate_hz,
+ int dst_sample_rate_hz,
+ int num_channels) {
if (src_sample_rate_hz == src_sample_rate_hz_ &&
dst_sample_rate_hz == dst_sample_rate_hz_ &&
num_channels == num_channels_)
@@ -53,10 +52,10 @@ int PushResampler::InitializeIfNeeded(int src_sample_rate_hz,
sinc_resampler_.reset(new PushSincResampler(src_size_10ms_mono,
dst_size_10ms_mono));
if (num_channels_ == 2) {
- src_left_.reset(new int16_t[src_size_10ms_mono]);
- src_right_.reset(new int16_t[src_size_10ms_mono]);
- dst_left_.reset(new int16_t[dst_size_10ms_mono]);
- dst_right_.reset(new int16_t[dst_size_10ms_mono]);
+ src_left_.reset(new T[src_size_10ms_mono]);
+ src_right_.reset(new T[src_size_10ms_mono]);
+ dst_left_.reset(new T[dst_size_10ms_mono]);
+ dst_right_.reset(new T[dst_size_10ms_mono]);
sinc_resampler_right_.reset(new PushSincResampler(src_size_10ms_mono,
dst_size_10ms_mono));
}
@@ -64,8 +63,9 @@ int PushResampler::InitializeIfNeeded(int src_sample_rate_hz,
return 0;
}
-int PushResampler::Resample(const int16_t* src, int src_length,
- int16_t* dst, int dst_capacity) {
+template <typename T>
+int PushResampler<T>::Resample(const T* src, int src_length, T* dst,
+ int dst_capacity) {
const int src_size_10ms = src_sample_rate_hz_ * num_channels_ / 100;
const int dst_size_10ms = dst_sample_rate_hz_ * num_channels_ / 100;
if (src_length != src_size_10ms || dst_capacity < dst_size_10ms)
@@ -74,13 +74,13 @@ int PushResampler::Resample(const int16_t* src, int src_length,
if (src_sample_rate_hz_ == dst_sample_rate_hz_) {
// The old resampler provides this memcpy facility in the case of matching
// sample rates, so reproduce it here for the sinc resampler.
- memcpy(dst, src, src_length * sizeof(int16_t));
+ memcpy(dst, src, src_length * sizeof(T));
return src_length;
}
if (num_channels_ == 2) {
const int src_length_mono = src_length / num_channels_;
const int dst_capacity_mono = dst_capacity / num_channels_;
- int16_t* deinterleaved[] = {src_left_.get(), src_right_.get()};
+ T* deinterleaved[] = {src_left_.get(), src_right_.get()};
Deinterleave(src, src_length_mono, num_channels_, deinterleaved);
int dst_length_mono =
@@ -98,4 +98,8 @@ int PushResampler::Resample(const int16_t* src, int src_length,
}
}
+// Explictly generate required instantiations.
+template class PushResampler<int16_t>;
+template class PushResampler<float>;
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
index c40923bf987..4449f4c6331 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
@@ -16,7 +16,7 @@
namespace webrtc {
TEST(PushResamplerTest, VerifiesInputParameters) {
- PushResampler resampler;
+ PushResampler<int16_t> resampler;
EXPECT_EQ(-1, resampler.InitializeIfNeeded(-1, 16000, 1));
EXPECT_EQ(-1, resampler.InitializeIfNeeded(16000, -1, 1));
EXPECT_EQ(-1, resampler.InitializeIfNeeded(16000, 16000, 0));
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.cc b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.cc
index 1fb72dc76cf..027555902b8 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.cc
@@ -9,22 +9,23 @@
*/
#include "webrtc/common_audio/include/audio_util.h"
-#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
+#include <assert.h>
#include <string.h>
+#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
+
namespace webrtc {
-PushSincResampler::PushSincResampler(int source_frames,
- int destination_frames)
+PushSincResampler::PushSincResampler(int source_frames, int destination_frames)
: resampler_(new SincResampler(source_frames * 1.0 / destination_frames,
- source_frames, this)),
- float_buffer_(new float[destination_frames]),
+ source_frames,
+ this)),
source_ptr_(NULL),
+ source_ptr_int_(NULL),
destination_frames_(destination_frames),
first_pass_(true),
- source_available_(0) {
-}
+ source_available_(0) {}
PushSincResampler::~PushSincResampler() {
}
@@ -33,6 +34,21 @@ int PushSincResampler::Resample(const int16_t* source,
int source_length,
int16_t* destination,
int destination_capacity) {
+ if (!float_buffer_.get())
+ float_buffer_.reset(new float[destination_frames_]);
+
+ source_ptr_int_ = source;
+ // Pass NULL as the float source to have Run() read from the int16 source.
+ Resample(NULL, source_length, float_buffer_.get(), destination_frames_);
+ RoundToInt16(float_buffer_.get(), destination_frames_, destination);
+ source_ptr_int_ = NULL;
+ return destination_frames_;
+}
+
+int PushSincResampler::Resample(const float* source,
+ int source_length,
+ float* destination,
+ int destination_capacity) {
assert(source_length == resampler_->request_frames());
assert(destination_capacity >= destination_frames_);
// Cache the source pointer. Calling Resample() will immediately trigger
@@ -54,17 +70,14 @@ int PushSincResampler::Resample(const int16_t* source,
// request in order to prime the buffer with a single Run() request for
// |source_frames|.
if (first_pass_)
- resampler_->Resample(resampler_->ChunkSize(), float_buffer_.get());
+ resampler_->Resample(resampler_->ChunkSize(), destination);
- resampler_->Resample(destination_frames_, float_buffer_.get());
- for (int i = 0; i < destination_frames_; ++i)
- destination[i] = RoundToInt16(ClampInt16(float_buffer_[i]));
+ resampler_->Resample(destination_frames_, destination);
source_ptr_ = NULL;
return destination_frames_;
}
void PushSincResampler::Run(int frames, float* destination) {
- assert(source_ptr_ != NULL);
// Ensure we are only asked for the available samples. This would fail if
// Run() was triggered more than once per Resample() call.
assert(source_available_ == frames);
@@ -74,11 +87,16 @@ void PushSincResampler::Run(int frames, float* destination) {
// discarded, as described in Resample().
memset(destination, 0, frames * sizeof(float));
first_pass_ = false;
+ return;
+ }
+
+ if (source_ptr_) {
+ memcpy(destination, source_ptr_, frames * sizeof(float));
} else {
for (int i = 0; i < frames; ++i)
- destination[i] = static_cast<float>(source_ptr_[i]);
- source_available_ -= frames;
+ destination[i] = static_cast<float>(source_ptr_int_[i]);
}
+ source_available_ -= frames;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.h b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.h
index 82b9045fce4..df724e2e5ed 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.h
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_COMMON_AUDIO_RESAMPLER_PUSH_SINC_RESAMPLER_H_
#define WEBRTC_COMMON_AUDIO_RESAMPLER_PUSH_SINC_RESAMPLER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/resampler/sinc_resampler.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -35,16 +35,24 @@ class PushSincResampler : public SincResamplerCallback {
// to |destination_frames|).
int Resample(const int16_t* source, int source_frames,
int16_t* destination, int destination_capacity);
+ int Resample(const float* source,
+ int source_frames,
+ float* destination,
+ int destination_capacity);
// Implements SincResamplerCallback.
virtual void Run(int frames, float* destination) OVERRIDE;
SincResampler* get_resampler_for_testing() { return resampler_.get(); }
+ static float AlgorithmicDelaySeconds(int source_rate_hz) {
+ return 1.f / source_rate_hz * SincResampler::kKernelSize / 2;
+ }
private:
scoped_ptr<SincResampler> resampler_;
- scoped_array<float> float_buffer_;
- const int16_t* source_ptr_;
+ scoped_ptr<float[]> float_buffer_;
+ const float* source_ptr_;
+ const int16_t* source_ptr_int_;
const int destination_frames_;
// True on the first call to Resample(), to prime the SincResampler buffer.
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
index d5005c6d05b..1ca4fdf9364 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
@@ -12,6 +12,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -34,6 +35,9 @@ class PushSincResamplerTest
virtual ~PushSincResamplerTest() {}
protected:
+ void ResampleBenchmarkTest(bool int_format);
+ void ResampleTest(bool int_format);
+
int input_rate_;
int output_rate_;
double rms_error_;
@@ -47,20 +51,18 @@ class ZeroSource : public SincResamplerCallback {
}
};
-// Disabled because it takes too long to run routinely. Use for performance
-// benchmarking when needed.
-TEST_P(PushSincResamplerTest, DISABLED_ResampleBenchmark) {
+void PushSincResamplerTest::ResampleBenchmarkTest(bool int_format) {
const int input_samples = input_rate_ / 100;
const int output_samples = output_rate_ / 100;
- const int kResampleIterations = 200000;
+ const int kResampleIterations = 500000;
// Source for data to be resampled.
ZeroSource resampler_source;
- scoped_array<float> resampled_destination(new float[output_samples]);
- scoped_array<float> source(new float[input_samples]);
- scoped_array<int16_t> source_int(new int16_t[input_samples]);
- scoped_array<int16_t> destination_int(new int16_t[output_samples]);
+ scoped_ptr<float[]> resampled_destination(new float[output_samples]);
+ scoped_ptr<float[]> source(new float[input_samples]);
+ scoped_ptr<int16_t[]> source_int(new int16_t[input_samples]);
+ scoped_ptr<int16_t[]> destination_int(new int16_t[output_samples]);
resampler_source.Run(input_samples, source.get());
for (int i = 0; i < input_samples; ++i) {
@@ -82,10 +84,22 @@ TEST_P(PushSincResamplerTest, DISABLED_ResampleBenchmark) {
PushSincResampler resampler(input_samples, output_samples);
start = TickTime::Now();
- for (int i = 0; i < kResampleIterations; ++i) {
- EXPECT_EQ(output_samples,
- resampler.Resample(source_int.get(), input_samples,
- destination_int.get(), output_samples));
+ if (int_format) {
+ for (int i = 0; i < kResampleIterations; ++i) {
+ EXPECT_EQ(output_samples,
+ resampler.Resample(source_int.get(),
+ input_samples,
+ destination_int.get(),
+ output_samples));
+ }
+ } else {
+ for (int i = 0; i < kResampleIterations; ++i) {
+ EXPECT_EQ(output_samples,
+ resampler.Resample(source.get(),
+ input_samples,
+ resampled_destination.get(),
+ output_samples));
+ }
}
double total_time_us = (TickTime::Now() - start).Microseconds();
printf("PushSincResampler took %.2f us per frame; which is a %.1f%% overhead "
@@ -93,8 +107,18 @@ TEST_P(PushSincResamplerTest, DISABLED_ResampleBenchmark) {
(total_time_us - total_time_sinc_us) / total_time_sinc_us * 100);
}
+// Disabled because it takes too long to run routinely. Use for performance
+// benchmarking when needed.
+TEST_P(PushSincResamplerTest, DISABLED_BenchmarkInt) {
+ ResampleBenchmarkTest(true);
+}
+
+TEST_P(PushSincResamplerTest, DISABLED_BenchmarkFloat) {
+ ResampleBenchmarkTest(false);
+}
+
// Tests resampling using a given input and output sample rate.
-TEST_P(PushSincResamplerTest, Resample) {
+void PushSincResamplerTest::ResampleTest(bool int_format) {
// Make comparisons using one second of data.
static const double kTestDurationSecs = 1;
// 10 ms blocks.
@@ -115,11 +139,11 @@ TEST_P(PushSincResamplerTest, Resample) {
// TODO(dalecurtis): If we switch to AVX/SSE optimization, we'll need to
// allocate these on 32-byte boundaries and ensure they're sized % 32 bytes.
- scoped_array<float> resampled_destination(new float[output_samples]);
- scoped_array<float> pure_destination(new float[output_samples]);
- scoped_array<float> source(new float[input_samples]);
- scoped_array<int16_t> source_int(new int16_t[input_block_size]);
- scoped_array<int16_t> destination_int(new int16_t[output_block_size]);
+ scoped_ptr<float[]> resampled_destination(new float[output_samples]);
+ scoped_ptr<float[]> pure_destination(new float[output_samples]);
+ scoped_ptr<float[]> source(new float[input_samples]);
+ scoped_ptr<int16_t[]> source_int(new int16_t[input_block_size]);
+ scoped_ptr<int16_t[]> destination_int(new int16_t[output_block_size]);
// The sinc resampler has an implicit delay of approximately half the kernel
// size at the input sample rate. By moving to a push model, this delay
@@ -134,17 +158,27 @@ TEST_P(PushSincResamplerTest, Resample) {
// With the PushSincResampler, we produce the signal block-by-10ms-block
// rather than in a single pass, to exercise how it will be used in WebRTC.
resampler_source.Run(input_samples, source.get());
- for (int i = 0; i < kNumBlocks; ++i) {
- for (int j = 0; j < input_block_size; ++j) {
- source_int[j] = static_cast<int16_t>(floor(32767 *
- source[i * input_block_size + j] + 0.5));
+ if (int_format) {
+ for (int i = 0; i < kNumBlocks; ++i) {
+ ScaleAndRoundToInt16(
+ &source[i * input_block_size], input_block_size, source_int.get());
+ EXPECT_EQ(output_block_size,
+ resampler.Resample(source_int.get(),
+ input_block_size,
+ destination_int.get(),
+ output_block_size));
+ ScaleToFloat(destination_int.get(),
+ output_block_size,
+ &resampled_destination[i * output_block_size]);
}
- EXPECT_EQ(output_block_size,
- resampler.Resample(source_int.get(), input_block_size,
- destination_int.get(), output_block_size));
- for (int j = 0; j < output_block_size; ++j) {
- resampled_destination[i * output_block_size + j] =
- static_cast<float>(destination_int[j]) / 32767;
+ } else {
+ for (int i = 0; i < kNumBlocks; ++i) {
+ EXPECT_EQ(
+ output_block_size,
+ resampler.Resample(&source[i * input_block_size],
+ input_block_size,
+ &resampled_destination[i * output_block_size],
+ output_block_size));
}
}
@@ -204,13 +238,19 @@ TEST_P(PushSincResamplerTest, Resample) {
EXPECT_LE(high_freq_max_error, kHighFrequencyMaxError);
}
+TEST_P(PushSincResamplerTest, ResampleInt) { ResampleTest(true); }
+
+TEST_P(PushSincResamplerTest, ResampleFloat) { ResampleTest(false); }
+
// Almost all conversions have an RMS error of around -14 dbFS.
static const double kResamplingRMSError = -14.42;
// Thresholds chosen arbitrarily based on what each resampling reported during
// testing. All thresholds are in dbFS, http://en.wikipedia.org/wiki/DBFS.
INSTANTIATE_TEST_CASE_P(
- PushSincResamplerTest, PushSincResamplerTest, testing::Values(
+ PushSincResamplerTest,
+ PushSincResamplerTest,
+ testing::Values(
// First run through the rates tested in SincResamplerTest. The
// thresholds are identical.
//
@@ -261,7 +301,7 @@ INSTANTIATE_TEST_CASE_P(
// practice anyway.
// To 8 kHz
- std::tr1::make_tuple(8000, 8000, kResamplingRMSError, -75.51),
+ std::tr1::make_tuple(8000, 8000, kResamplingRMSError, -75.50),
std::tr1::make_tuple(16000, 8000, -18.56, -28.79),
std::tr1::make_tuple(32000, 8000, -20.36, -14.13),
std::tr1::make_tuple(44100, 8000, -21.00, -11.39),
@@ -278,7 +318,7 @@ INSTANTIATE_TEST_CASE_P(
// To 32 kHz
std::tr1::make_tuple(8000, 32000, kResamplingRMSError, -70.30),
std::tr1::make_tuple(16000, 32000, kResamplingRMSError, -75.51),
- std::tr1::make_tuple(32000, 32000, kResamplingRMSError, -75.56),
+ std::tr1::make_tuple(32000, 32000, kResamplingRMSError, -75.51),
std::tr1::make_tuple(44100, 32000, -16.44, -51.10),
std::tr1::make_tuple(48000, 32000, -16.90, -44.03),
std::tr1::make_tuple(96000, 32000, -19.61, -18.04),
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
index 05c00276e15..84f8125b59d 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
@@ -90,6 +90,7 @@
#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h"
#include "webrtc/typedefs.h"
+#include <assert.h>
#include <math.h>
#include <string.h>
@@ -114,13 +115,12 @@ static double SincScaleFactor(double io_ratio) {
}
// If we know the minimum architecture at compile time, avoid CPU detection.
-// iOS lies about its architecture, so we also need to exclude it here.
-#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(WEBRTC_IOS)
-#if defined(__SSE__)
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+#if defined(__SSE2__)
#define CONVOLVE_FUNC Convolve_SSE
void SincResampler::InitializeCPUSpecificFeatures() {}
#else
-// X86 CPU detection required. Function will be set by
+// x86 CPU detection required. Function will be set by
// InitializeCPUSpecificFeatures().
// TODO(dalecurtis): Once Chrome moves to an SSE baseline this can be removed.
#define CONVOLVE_FUNC convolve_proc_
@@ -134,7 +134,7 @@ void SincResampler::InitializeCPUSpecificFeatures() {
#define CONVOLVE_FUNC Convolve_NEON
void SincResampler::InitializeCPUSpecificFeatures() {}
#else
-// NEON CPU detection required. Function will be set by
+// ARM CPU detection required. Function will be set by
// InitializeCPUSpecificFeatures().
#define CONVOLVE_FUNC convolve_proc_
@@ -165,12 +165,12 @@ SincResampler::SincResampler(double io_sample_rate_ratio,
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
input_buffer_(static_cast<float*>(
AlignedMalloc(sizeof(float) * input_buffer_size_, 16))),
-#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
+#if defined(WEBRTC_CPU_DETECTION)
convolve_proc_(NULL),
#endif
r1_(input_buffer_.get()),
r2_(input_buffer_.get() + kKernelSize / 2) {
-#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
+#if defined(WEBRTC_CPU_DETECTION)
InitializeCPUSpecificFeatures();
assert(convolve_proc_);
#endif
@@ -223,20 +223,20 @@ void SincResampler::InitializeKernel() {
for (int i = 0; i < kKernelSize; ++i) {
const int idx = i + offset_idx * kKernelSize;
const float pre_sinc = M_PI * (i - kKernelSize / 2 - subsample_offset);
- kernel_pre_sinc_storage_.get()[idx] = pre_sinc;
+ kernel_pre_sinc_storage_[idx] = pre_sinc;
// Compute Blackman window, matching the offset of the sinc().
const float x = (i - subsample_offset) / kKernelSize;
const float window = kA0 - kA1 * cos(2.0 * M_PI * x) + kA2
* cos(4.0 * M_PI * x);
- kernel_window_storage_.get()[idx] = window;
+ kernel_window_storage_[idx] = window;
// Compute the sinc with offset, then window the sinc() function and store
// at the correct offset.
if (pre_sinc == 0) {
- kernel_storage_.get()[idx] = sinc_scale_factor * window;
+ kernel_storage_[idx] = sinc_scale_factor * window;
} else {
- kernel_storage_.get()[idx] =
+ kernel_storage_[idx] =
window * sin(sinc_scale_factor * pre_sinc) / pre_sinc;
}
}
@@ -257,13 +257,13 @@ void SincResampler::SetRatio(double io_sample_rate_ratio) {
for (int offset_idx = 0; offset_idx <= kKernelOffsetCount; ++offset_idx) {
for (int i = 0; i < kKernelSize; ++i) {
const int idx = i + offset_idx * kKernelSize;
- const float window = kernel_window_storage_.get()[idx];
- const float pre_sinc = kernel_pre_sinc_storage_.get()[idx];
+ const float window = kernel_window_storage_[idx];
+ const float pre_sinc = kernel_pre_sinc_storage_[idx];
if (pre_sinc == 0) {
- kernel_storage_.get()[idx] = sinc_scale_factor * window;
+ kernel_storage_[idx] = sinc_scale_factor * window;
} else {
- kernel_storage_.get()[idx] =
+ kernel_storage_[idx] =
window * sin(sinc_scale_factor * pre_sinc) / pre_sinc;
}
}
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
index 60abd6128f9..71ade798c7e 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
@@ -14,19 +14,12 @@
#ifndef WEBRTC_COMMON_AUDIO_RESAMPLER_SINC_RESAMPLER_H_
#define WEBRTC_COMMON_AUDIO_RESAMPLER_SINC_RESAMPLER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/aligned_malloc.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/testsupport/gtest_prod_util.h"
#include "webrtc/typedefs.h"
-#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(WEBRTC_IOS) && \
- !defined(__SSE__)) || \
- (defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON))
-// Convenience define.
-#define WEBRTC_RESAMPLER_CPU_DETECTION
-#endif
-
namespace webrtc {
// Callback class for providing more data into the resampler. Expects |frames|
@@ -106,9 +99,8 @@ class SincResampler {
void InitializeCPUSpecificFeatures();
// Compute convolution of |k1| and |k2| over |input_ptr|, resultant sums are
- // linearly interpolated using |kernel_interpolation_factor|. On x86, the
- // underlying implementation is chosen at run time based on SSE support. On
- // ARM, NEON support is chosen at compile time based on compilation flags.
+ // linearly interpolated using |kernel_interpolation_factor|. On x86 and ARM
+ // the underlying implementation is chosen at run time.
static float Convolve_C(const float* input_ptr, const float* k1,
const float* k2, double kernel_interpolation_factor);
#if defined(WEBRTC_ARCH_X86_FAMILY)
@@ -146,18 +138,18 @@ class SincResampler {
// Contains kKernelOffsetCount kernels back-to-back, each of size kKernelSize.
// The kernel offsets are sub-sample shifts of a windowed sinc shifted from
// 0.0 to 1.0 sample.
- scoped_ptr_malloc<float, AlignedFree> kernel_storage_;
- scoped_ptr_malloc<float, AlignedFree> kernel_pre_sinc_storage_;
- scoped_ptr_malloc<float, AlignedFree> kernel_window_storage_;
+ scoped_ptr<float[], AlignedFreeDeleter> kernel_storage_;
+ scoped_ptr<float[], AlignedFreeDeleter> kernel_pre_sinc_storage_;
+ scoped_ptr<float[], AlignedFreeDeleter> kernel_window_storage_;
// Data from the source is copied into this buffer for each processing pass.
- scoped_ptr_malloc<float, AlignedFree> input_buffer_;
+ scoped_ptr<float[], AlignedFreeDeleter> input_buffer_;
// Stores the runtime selection of which Convolve function to use.
// TODO(ajm): Move to using a global static which must only be initialized
// once by the user. We're not doing this initially, because we don't have
// e.g. a LazyInstance helper in webrtc.
-#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
+#if defined(WEBRTC_CPU_DETECTION)
typedef float (*ConvolveProc)(const float*, const float*, const float*,
double);
ConvolveProc convolve_proc_;
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
index c085cfc20e2..97908625d2d 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
@@ -62,7 +62,7 @@ TEST(SincResamplerTest, ChunkedResample) {
static const int kChunks = 2;
int max_chunk_size = resampler.ChunkSize() * kChunks;
- scoped_array<float> resampled_destination(new float[max_chunk_size]);
+ scoped_ptr<float[]> resampled_destination(new float[max_chunk_size]);
// Verify requesting ChunkSize() frames causes a single callback.
EXPECT_CALL(mock_source, Run(_, _))
@@ -81,7 +81,7 @@ TEST(SincResamplerTest, Flush) {
MockSource mock_source;
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
&mock_source);
- scoped_array<float> resampled_destination(new float[resampler.ChunkSize()]);
+ scoped_ptr<float[]> resampled_destination(new float[resampler.ChunkSize()]);
// Fill the resampler with junk data.
EXPECT_CALL(mock_source, Run(_, _))
@@ -266,7 +266,7 @@ TEST_P(SincResamplerTest, Resample) {
// Force an update to the sample rate ratio to ensure dyanmic sample rate
// changes are working correctly.
- scoped_array<float> kernel(new float[SincResampler::kKernelStorageSize]);
+ scoped_ptr<float[]> kernel(new float[SincResampler::kKernelStorageSize]);
memcpy(kernel.get(), resampler.get_kernel_for_testing(),
SincResampler::kKernelStorageSize);
resampler.SetRatio(M_PI);
@@ -278,8 +278,8 @@ TEST_P(SincResamplerTest, Resample) {
// TODO(dalecurtis): If we switch to AVX/SSE optimization, we'll need to
// allocate these on 32-byte boundaries and ensure they're sized % 32 bytes.
- scoped_array<float> resampled_destination(new float[output_samples]);
- scoped_array<float> pure_destination(new float[output_samples]);
+ scoped_ptr<float[]> resampled_destination(new float[output_samples]);
+ scoped_ptr<float[]> pure_destination(new float[output_samples]);
// Generate resampled signal.
resampler.Resample(output_samples, resampled_destination.get());
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h b/chromium/third_party/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h
index 6a5cbf5c2b2..7e9fe75e3db 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h
@@ -14,8 +14,8 @@
#ifndef WEBRTC_COMMON_AUDIO_RESAMPLER_SINUSOIDAL_LINEAR_CHIRP_SOURCE_H_
#define WEBRTC_COMMON_AUDIO_RESAMPLER_SINUSOIDAL_LINEAR_CHIRP_SOURCE_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/resampler/sinc_resampler.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/complex_fft.c b/chromium/third_party/webrtc/common_audio/signal_processing/complex_fft.c
index 2e8eb323c88..c82306473a4 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/complex_fft.c
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/complex_fft.c
@@ -105,7 +105,6 @@ int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode)
#ifdef WEBRTC_ARCH_ARM_V7
int32_t wri = 0;
- int32_t frfi_r = 0;
__asm __volatile("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
"r"((int32_t)wr), "r"((int32_t)wi));
#endif
@@ -115,19 +114,19 @@ int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode)
j = i + l;
#ifdef WEBRTC_ARCH_ARM_V7
+ register int32_t frfi_r;
__asm __volatile(
- "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd], lsl #16\n\t"
- "smlsd %[tr32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
- "smladx %[ti32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
- :[frfi_r]"+r"(frfi_r),
- [tr32]"=r"(tr32),
- [ti32]"=r"(ti32)
- :[frfi_even]"r"((int32_t)frfi[2*j]),
- [frfi_odd]"r"((int32_t)frfi[2*j +1]),
- [wri]"r"(wri),
- [cfftrnd]"r"(CFFTRND)
- );
-
+ "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd],"
+ " lsl #16\n\t"
+ "smlsd %[tr32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
+ "smladx %[ti32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
+ :[frfi_r]"=&r"(frfi_r),
+ [tr32]"=&r"(tr32),
+ [ti32]"=r"(ti32)
+ :[frfi_even]"r"((int32_t)frfi[2*j]),
+ [frfi_odd]"r"((int32_t)frfi[2*j +1]),
+ [wri]"r"(wri),
+ [cfftrnd]"r"(CFFTRND));
#else
tr32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j])
- WEBRTC_SPL_MUL_16_16(wi, frfi[2 * j + 1]) + CFFTRND;
@@ -252,7 +251,6 @@ int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode)
#ifdef WEBRTC_ARCH_ARM_V7
int32_t wri = 0;
- int32_t frfi_r = 0;
__asm __volatile("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
"r"((int32_t)wr), "r"((int32_t)wi));
#endif
@@ -262,12 +260,13 @@ int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode)
j = i + l;
#ifdef WEBRTC_ARCH_ARM_V7
+ register int32_t frfi_r;
__asm __volatile(
"pkhbt %[frfi_r], %[frfi_even], %[frfi_odd], lsl #16\n\t"
"smlsd %[tr32], %[wri], %[frfi_r], %[cifftrnd]\n\t"
"smladx %[ti32], %[wri], %[frfi_r], %[cifftrnd]\n\t"
- :[frfi_r]"+r"(frfi_r),
- [tr32]"=r"(tr32),
+ :[frfi_r]"=&r"(frfi_r),
+ [tr32]"=&r"(tr32),
[ti32]"=r"(ti32)
:[frfi_even]"r"((int32_t)frfi[2*j]),
[frfi_odd]"r"((int32_t)frfi[2*j +1]),
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h b/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
index c567beba5ea..3a5d51cc1f6 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
@@ -27,7 +27,6 @@
#define WEBRTC_SPL_WORD32_MAX (int32_t)0x7fffffff
#define WEBRTC_SPL_WORD32_MIN (int32_t)0x80000000
#define WEBRTC_SPL_MAX_LPC_ORDER 14
-#define WEBRTC_SPL_MAX_SEED_USED 0x80000000L
#define WEBRTC_SPL_MIN(A, B) (A < B ? A : B) // Get min value
#define WEBRTC_SPL_MAX(A, B) (A > B ? A : B) // Get max value
// TODO(kma/bjorn): For the next two macros, investigate how to correct the code
@@ -54,12 +53,8 @@
((int32_t) ((int32_t)(a) * (int32_t)(b)))
#define WEBRTC_SPL_UMUL(a, b) \
((uint32_t) ((uint32_t)(a) * (uint32_t)(b)))
-#define WEBRTC_SPL_UMUL_RSFT16(a, b) \
- ((uint32_t) ((uint32_t)(a) * (uint32_t)(b)) >> 16)
#define WEBRTC_SPL_UMUL_16_16(a, b) \
((uint32_t) (uint16_t)(a) * (uint16_t)(b))
-#define WEBRTC_SPL_UMUL_16_16_RSFT16(a, b) \
- (((uint32_t) (uint16_t)(a) * (uint16_t)(b)) >> 16)
#define WEBRTC_SPL_UMUL_32_16(a, b) \
((uint32_t) ((uint32_t)(a) * (uint16_t)(b)))
#define WEBRTC_SPL_UMUL_32_16_RSFT16(a, b) \
@@ -83,11 +78,6 @@
#define WEBRTC_SPL_MUL_32_32_RSFT32(a32a, a32b, b32) \
((int32_t)(WEBRTC_SPL_MUL_16_32_RSFT16(a32a, b32) \
+ (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32) >> 16)))
-#define WEBRTC_SPL_MUL_32_32_RSFT32BI(a32, b32) \
- ((int32_t)(WEBRTC_SPL_MUL_16_32_RSFT16(( \
- (int16_t)(a32 >> 16)), b32) + \
- (WEBRTC_SPL_MUL_16_32_RSFT16(( \
- (int16_t)((a32 & 0x0000FFFF) >> 1)), b32) >> 15)))
#endif
#endif
@@ -107,8 +97,6 @@
#define WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(a, b, c) \
((WEBRTC_SPL_MUL_16_16(a, b) + ((int32_t) \
(((int32_t)1) << ((c) - 1)))) >> (c))
-#define WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(a, b) \
- ((WEBRTC_SPL_MUL_16_16(a, b) + ((int32_t) (1 << 14))) >> 15)
// C + the 32 most significant bits of A * B
#define WEBRTC_SPL_SCALEDIFF32(A, B, C) \
@@ -120,10 +108,7 @@
#define WEBRTC_SPL_SUB_SAT_W32(a, b) WebRtcSpl_SubSatW32(a, b)
#define WEBRTC_SPL_ADD_SAT_W16(a, b) WebRtcSpl_AddSatW16(a, b)
-#define WEBRTC_SPL_SUB_SAT_W16(a, b) WebRtcSpl_SubSatW16(a, b)
-// We cannot do casting here due to signed/unsigned problem
-#define WEBRTC_SPL_IS_NEG(a) ((a) & 0x80000000)
// Shifting with negative numbers allowed
// Positive means left shift
#define WEBRTC_SPL_SHIFT_W16(x, c) \
@@ -138,14 +123,9 @@
#define WEBRTC_SPL_RSHIFT_W32(x, c) ((x) >> (c))
#define WEBRTC_SPL_LSHIFT_W32(x, c) ((x) << (c))
-#define WEBRTC_SPL_RSHIFT_U16(x, c) ((uint16_t)(x) >> (c))
-#define WEBRTC_SPL_LSHIFT_U16(x, c) ((uint16_t)(x) << (c))
#define WEBRTC_SPL_RSHIFT_U32(x, c) ((uint32_t)(x) >> (c))
#define WEBRTC_SPL_LSHIFT_U32(x, c) ((uint32_t)(x) << (c))
-#define WEBRTC_SPL_VNEW(t, n) (t *) malloc (sizeof (t) * (n))
-#define WEBRTC_SPL_FREE free
-
#define WEBRTC_SPL_RAND(a) \
((int16_t)(WEBRTC_SPL_MUL_16_16_RSFT((a), 18816, 7) & 0x00007fff))
@@ -675,7 +655,6 @@ void WebRtcSpl_SqrtOfOneMinusXSquared(int16_t* in_vector,
// Randomization functions. Implementations collected in
// randomization_functions.c and descriptions at bottom of this file.
-uint32_t WebRtcSpl_IncreaseSeed(uint32_t* seed);
int16_t WebRtcSpl_RandU(uint32_t* seed);
int16_t WebRtcSpl_RandN(uint32_t* seed);
int16_t WebRtcSpl_RandUArray(int16_t* vector,
@@ -996,12 +975,14 @@ void WebRtcSpl_UpsampleBy2(const int16_t* in, int16_t len,
* END OF RESAMPLING FUNCTIONS
************************************************************/
void WebRtcSpl_AnalysisQMF(const int16_t* in_data,
+ int in_data_length,
int16_t* low_band,
int16_t* high_band,
int32_t* filter_state1,
int32_t* filter_state2);
void WebRtcSpl_SynthesisQMF(const int16_t* low_band,
const int16_t* high_band,
+ int band_length,
int16_t* out_data,
int32_t* filter_state1,
int32_t* filter_state2);
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h b/chromium/third_party/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h
index fdbcb434380..a437a5556de 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h
@@ -41,7 +41,7 @@ static __inline int32_t WEBRTC_SPL_MUL_32_32_RSFT32(int16_t a,
__asm __volatile (
"pkhbt %[tmp], %[b], %[a], lsl #16\n\t"
"smmulr %[tmp], %[tmp], %[c]\n\t"
- :[tmp]"+r"(tmp)
+ :[tmp]"+&r"(tmp)
:[a]"r"(a),
[b]"r"(b),
[c]"r"(c)
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/randomization_functions.c b/chromium/third_party/webrtc/common_audio/signal_processing/randomization_functions.c
index e2711fd9f50..73f24093c2f 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/randomization_functions.c
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/randomization_functions.c
@@ -11,7 +11,6 @@
/*
* This file contains implementations of the randomization functions
- * WebRtcSpl_IncreaseSeed()
* WebRtcSpl_RandU()
* WebRtcSpl_RandN()
* WebRtcSpl_RandUArray()
@@ -22,6 +21,8 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+static const uint32_t kMaxSeedUsed = 0x80000000;
+
static const int16_t kRandNTable[] = {
9178, -7260, 40, 10189, 4894, -3531, -13779, 14764,
-4008, -8884, -8990, 1008, 7368, 5184, 3251, -5817,
@@ -89,31 +90,26 @@ static const int16_t kRandNTable[] = {
2374, -5797, 11839, 8940, -11874, 18213, 2855, 10492
};
-uint32_t WebRtcSpl_IncreaseSeed(uint32_t *seed)
-{
- seed[0] = (seed[0] * ((int32_t)69069) + 1) & (WEBRTC_SPL_MAX_SEED_USED - 1);
- return seed[0];
+static uint32_t IncreaseSeed(uint32_t* seed) {
+ seed[0] = (seed[0] * ((int32_t)69069) + 1) & (kMaxSeedUsed - 1);
+ return seed[0];
}
-int16_t WebRtcSpl_RandU(uint32_t *seed)
-{
- return (int16_t)(WebRtcSpl_IncreaseSeed(seed) >> 16);
+int16_t WebRtcSpl_RandU(uint32_t* seed) {
+ return (int16_t)(IncreaseSeed(seed) >> 16);
}
-int16_t WebRtcSpl_RandN(uint32_t *seed)
-{
- return kRandNTable[WebRtcSpl_IncreaseSeed(seed) >> 23];
+int16_t WebRtcSpl_RandN(uint32_t* seed) {
+ return kRandNTable[IncreaseSeed(seed) >> 23];
}
-// Creates an array of uniformly distributed variables
+// Creates an array of uniformly distributed variables.
int16_t WebRtcSpl_RandUArray(int16_t* vector,
int16_t vector_length,
- uint32_t* seed)
-{
- int i;
- for (i = 0; i < vector_length; i++)
- {
- vector[i] = WebRtcSpl_RandU(seed);
- }
- return vector_length;
+ uint32_t* seed) {
+ int i;
+ for (i = 0; i < vector_length; i++) {
+ vector[i] = WebRtcSpl_RandU(seed);
+ }
+ return vector_length;
}
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/real_fft_unittest.cc b/chromium/third_party/webrtc/common_audio/signal_processing/real_fft_unittest.cc
index fa98836b9aa..9bd35cd68b4 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/real_fft_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/real_fft_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/signal_processing_unittest.cc b/chromium/third_party/webrtc/common_audio/signal_processing/signal_processing_unittest.cc
index a1bf0d5e8ed..81ca3694505 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/signal_processing_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/signal_processing_unittest.cc
@@ -46,9 +46,7 @@ TEST_F(SplTest, MacroTest) {
EXPECT_EQ(-2147483645, WEBRTC_SPL_MUL(a, b));
EXPECT_EQ(2147483651u, WEBRTC_SPL_UMUL(a, b));
b = WEBRTC_SPL_WORD16_MAX >> 1;
- EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_RSFT16(a, b));
EXPECT_EQ(1073627139u, WEBRTC_SPL_UMUL_16_16(a, b));
- EXPECT_EQ(16382u, WEBRTC_SPL_UMUL_16_16_RSFT16(a, b));
EXPECT_EQ(4294918147u, WEBRTC_SPL_UMUL_32_16(a, b));
EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_32_16_RSFT16(a, b));
EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_U16(a, b));
@@ -63,15 +61,12 @@ TEST_F(SplTest, MacroTest) {
EXPECT_EQ(-3, WEBRTC_SPL_MUL_16_32_RSFT14(a, b));
EXPECT_EQ(-24, WEBRTC_SPL_MUL_16_32_RSFT11(a, b));
- int a32 = WEBRTC_SPL_WORD32_MAX;
int a32a = (WEBRTC_SPL_WORD32_MAX >> 16);
int a32b = (WEBRTC_SPL_WORD32_MAX & 0x0000ffff);
EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32(a32a, a32b, A));
- EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32BI(a32, A));
EXPECT_EQ(-12288, WEBRTC_SPL_MUL_16_16_RSFT(a, b, 2));
EXPECT_EQ(-12287, WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(a, b, 2));
- EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(a, b));
EXPECT_EQ(16380, WEBRTC_SPL_ADD_SAT_W32(a, b));
EXPECT_EQ(21, WEBRTC_SPL_SAT(a, A, B));
@@ -80,9 +75,6 @@ TEST_F(SplTest, MacroTest) {
EXPECT_EQ(16386, WEBRTC_SPL_SUB_SAT_W32(a, b));
EXPECT_EQ(16380, WEBRTC_SPL_ADD_SAT_W16(a, b));
- EXPECT_EQ(16386, WEBRTC_SPL_SUB_SAT_W16(a, b));
-
- EXPECT_TRUE(WEBRTC_SPL_IS_NEG(b));
// Shifting with negative numbers allowed
int shift_amount = 1; // Workaround compiler warning using variable here.
@@ -97,8 +89,6 @@ TEST_F(SplTest, MacroTest) {
EXPECT_EQ(8191, WEBRTC_SPL_RSHIFT_W32(a, 1));
EXPECT_EQ(32766, WEBRTC_SPL_LSHIFT_W32(a, 1));
- EXPECT_EQ(8191, WEBRTC_SPL_RSHIFT_U16(a, 1));
- EXPECT_EQ(32766, WEBRTC_SPL_LSHIFT_U16(a, 1));
EXPECT_EQ(8191u, WEBRTC_SPL_RSHIFT_U32(a, 1));
EXPECT_EQ(32766u, WEBRTC_SPL_LSHIFT_U32(a, 1));
@@ -117,16 +107,12 @@ TEST_F(SplTest, MacroTest) {
WEBRTC_SPL_WORD32_MAX));
EXPECT_EQ(0x3fffffff, WEBRTC_SPL_MUL_32_32_RSFT32(WEBRTC_SPL_WORD16_MAX,
0xffff, WEBRTC_SPL_WORD32_MAX));
- EXPECT_EQ(0x3fffffff, WEBRTC_SPL_MUL_32_32_RSFT32BI(WEBRTC_SPL_WORD32_MAX,
- WEBRTC_SPL_WORD32_MAX));
#else
EXPECT_EQ(-1073741823,
WEBRTC_SPL_MUL_16_32_RSFT16(WEBRTC_SPL_WORD16_MIN,
WEBRTC_SPL_WORD32_MAX));
EXPECT_EQ(0x3fff7ffe, WEBRTC_SPL_MUL_32_32_RSFT32(WEBRTC_SPL_WORD16_MAX,
0xffff, WEBRTC_SPL_WORD32_MAX));
- EXPECT_EQ(0x3ffffffd, WEBRTC_SPL_MUL_32_32_RSFT32BI(WEBRTC_SPL_WORD32_MAX,
- WEBRTC_SPL_WORD32_MAX));
#endif
}
@@ -497,7 +483,7 @@ TEST_F(SplTest, RandTest) {
int16_t b16[kVectorSize];
uint32_t bSeed = 100000;
- EXPECT_EQ(464449057u, WebRtcSpl_IncreaseSeed(&bSeed));
+ EXPECT_EQ(7086, WebRtcSpl_RandU(&bSeed));
EXPECT_EQ(31565, WebRtcSpl_RandU(&bSeed));
EXPECT_EQ(-9786, WebRtcSpl_RandN(&bSeed));
EXPECT_EQ(kVectorSize, WebRtcSpl_RandUArray(b16, kVectorSize, &bSeed));
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c b/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
index 454e13ba973..762f9e420e1 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
@@ -65,8 +65,10 @@ static void InitPointersToNeon() {
WebRtcSpl_MinValueW32 = WebRtcSpl_MinValueW32Neon;
WebRtcSpl_CrossCorrelation = WebRtcSpl_CrossCorrelationNeon;
WebRtcSpl_DownsampleFast = WebRtcSpl_DownsampleFastNeon;
+ /* TODO(henrik.lundin): re-enable NEON when the crash from bug 3243 is
+ understood. */
WebRtcSpl_ScaleAndAddVectorsWithRound =
- WebRtcSpl_ScaleAndAddVectorsWithRoundNeon;
+ WebRtcSpl_ScaleAndAddVectorsWithRoundC;
WebRtcSpl_CreateRealFFT = WebRtcSpl_CreateRealFFTNeon;
WebRtcSpl_FreeRealFFT = WebRtcSpl_FreeRealFFTNeon;
WebRtcSpl_RealForwardFFT = WebRtcSpl_RealForwardFFTNeon;
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/splitting_filter.c b/chromium/third_party/webrtc/common_audio/signal_processing/splitting_filter.c
index cf6ec9d7b16..dbda042277c 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/splitting_filter.c
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/splitting_filter.c
@@ -15,10 +15,12 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-// Number of samples in a low/high-band frame.
+#include <assert.h>
+
+// Maximum number of samples in a low/high-band frame.
enum
{
- kBandFrameLength = 160
+ kMaxBandFrameLength = 240 // 10 ms at 48 kHz.
};
// QMF filter coefficients in Q16.
@@ -116,34 +118,37 @@ void WebRtcSpl_AllPassQMF(int32_t* in_data, int16_t data_length,
filter_state[5] = out_data[data_length - 1]; // y[N-1], becomes y[-1] next time
}
-void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int16_t* low_band,
- int16_t* high_band, int32_t* filter_state1,
- int32_t* filter_state2)
+void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int in_data_length,
+ int16_t* low_band, int16_t* high_band,
+ int32_t* filter_state1, int32_t* filter_state2)
{
int16_t i;
int16_t k;
int32_t tmp;
- int32_t half_in1[kBandFrameLength];
- int32_t half_in2[kBandFrameLength];
- int32_t filter1[kBandFrameLength];
- int32_t filter2[kBandFrameLength];
+ int32_t half_in1[kMaxBandFrameLength];
+ int32_t half_in2[kMaxBandFrameLength];
+ int32_t filter1[kMaxBandFrameLength];
+ int32_t filter2[kMaxBandFrameLength];
+ const int band_length = in_data_length / 2;
+ assert(in_data_length % 2 == 0);
+ assert(band_length <= kMaxBandFrameLength);
// Split even and odd samples. Also shift them to Q10.
- for (i = 0, k = 0; i < kBandFrameLength; i++, k += 2)
+ for (i = 0, k = 0; i < band_length; i++, k += 2)
{
half_in2[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)in_data[k], 10);
half_in1[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)in_data[k + 1], 10);
}
// All pass filter even and odd samples, independently.
- WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter1,
- filter_state1);
- WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter2,
- filter_state2);
+ WebRtcSpl_AllPassQMF(half_in1, band_length, filter1,
+ WebRtcSpl_kAllPassFilter1, filter_state1);
+ WebRtcSpl_AllPassQMF(half_in2, band_length, filter2,
+ WebRtcSpl_kAllPassFilter2, filter_state2);
// Take the sum and difference of filtered version of odd and even
// branches to get upper & lower band.
- for (i = 0; i < kBandFrameLength; i++)
+ for (i = 0; i < band_length; i++)
{
tmp = filter1[i] + filter2[i] + 1024;
tmp = WEBRTC_SPL_RSHIFT_W32(tmp, 11);
@@ -156,20 +161,21 @@ void WebRtcSpl_AnalysisQMF(const int16_t* in_data, int16_t* low_band,
}
void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band,
- int16_t* out_data, int32_t* filter_state1,
- int32_t* filter_state2)
+ int band_length, int16_t* out_data,
+ int32_t* filter_state1, int32_t* filter_state2)
{
int32_t tmp;
- int32_t half_in1[kBandFrameLength];
- int32_t half_in2[kBandFrameLength];
- int32_t filter1[kBandFrameLength];
- int32_t filter2[kBandFrameLength];
+ int32_t half_in1[kMaxBandFrameLength];
+ int32_t half_in2[kMaxBandFrameLength];
+ int32_t filter1[kMaxBandFrameLength];
+ int32_t filter2[kMaxBandFrameLength];
int16_t i;
int16_t k;
+ assert(band_length <= kMaxBandFrameLength);
// Obtain the sum and difference channels out of upper and lower-band channels.
// Also shift to Q10 domain.
- for (i = 0; i < kBandFrameLength; i++)
+ for (i = 0; i < band_length; i++)
{
tmp = (int32_t)low_band[i] + (int32_t)high_band[i];
half_in1[i] = WEBRTC_SPL_LSHIFT_W32(tmp, 10);
@@ -178,15 +184,15 @@ void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band,
}
// all-pass filter the sum and difference channels
- WebRtcSpl_AllPassQMF(half_in1, kBandFrameLength, filter1, WebRtcSpl_kAllPassFilter2,
- filter_state1);
- WebRtcSpl_AllPassQMF(half_in2, kBandFrameLength, filter2, WebRtcSpl_kAllPassFilter1,
- filter_state2);
+ WebRtcSpl_AllPassQMF(half_in1, band_length, filter1,
+ WebRtcSpl_kAllPassFilter2, filter_state1);
+ WebRtcSpl_AllPassQMF(half_in2, band_length, filter2,
+ WebRtcSpl_kAllPassFilter1, filter_state2);
// The filtered signals are even and odd samples of the output. Combine
// them. The signals are Q10 should shift them back to Q0 and take care of
// saturation.
- for (i = 0, k = 0; i < kBandFrameLength; i++)
+ for (i = 0, k = 0; i < band_length; i++)
{
tmp = WEBRTC_SPL_RSHIFT_W32(filter2[i] + 512, 10);
out_data[k++] = WebRtcSpl_SatW32ToW16(tmp);
diff --git a/chromium/third_party/webrtc/common_audio/vad/include/webrtc_vad.h b/chromium/third_party/webrtc/common_audio/vad/include/webrtc_vad.h
index f6e959fa354..053827303b1 100644
--- a/chromium/third_party/webrtc/common_audio/vad/include/webrtc_vad.h
+++ b/chromium/third_party/webrtc/common_audio/vad/include/webrtc_vad.h
@@ -34,9 +34,7 @@ int WebRtcVad_Create(VadInst** handle);
// Frees the dynamic memory of a specified VAD instance.
//
// - handle [i] : Pointer to VAD instance that should be freed.
-//
-// returns : 0 - (OK), -1 - (NULL pointer in)
-int WebRtcVad_Free(VadInst* handle);
+void WebRtcVad_Free(VadInst* handle);
// Initializes a VAD instance.
//
@@ -71,7 +69,7 @@ int WebRtcVad_set_mode(VadInst* handle, int mode);
// returns : 1 - (Active Voice),
// 0 - (Non-active Voice),
// -1 - (Error)
-int WebRtcVad_Process(VadInst* handle, int fs, int16_t* audio_frame,
+int WebRtcVad_Process(VadInst* handle, int fs, const int16_t* audio_frame,
int frame_length);
// Checks for valid combinations of |rate| and |frame_length|. We support 10,
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad_core.c b/chromium/third_party/webrtc/common_audio/vad/vad_core.c
index 80c31f481f6..98da6eaf0b7 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad_core.c
+++ b/chromium/third_party/webrtc/common_audio/vad/vad_core.c
@@ -603,7 +603,7 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) {
// Calculate VAD decision by first extracting feature values and then calculate
// probability for both speech and background noise.
-int WebRtcVad_CalcVad48khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad48khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length) {
int vad;
int i;
@@ -628,7 +628,7 @@ int WebRtcVad_CalcVad48khz(VadInstT* inst, int16_t* speech_frame,
return vad;
}
-int WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad32khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length)
{
int len, vad;
@@ -650,7 +650,7 @@ int WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
return vad;
}
-int WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad16khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length)
{
int len, vad;
@@ -666,7 +666,7 @@ int WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
return vad;
}
-int WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad8khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length)
{
int16_t feature_vector[kNumChannels], total_power;
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad_core.h b/chromium/third_party/webrtc/common_audio/vad/vad_core.h
index d6c1da2710d..202963d8c68 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad_core.h
+++ b/chromium/third_party/webrtc/common_audio/vad/vad_core.h
@@ -85,9 +85,9 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode);
/****************************************************************************
* WebRtcVad_CalcVad48khz(...)
- * WebRtcVad_CalcVad32khz(...)
- * WebRtcVad_CalcVad16khz(...)
- * WebRtcVad_CalcVad8khz(...)
+ * WebRtcVad_CalcVad32khz(...)
+ * WebRtcVad_CalcVad16khz(...)
+ * WebRtcVad_CalcVad8khz(...)
*
* Calculate probability for active speech and make VAD decision.
*
@@ -103,13 +103,13 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode);
* 0 - No active speech
* 1-6 - Active speech
*/
-int WebRtcVad_CalcVad48khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad48khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length);
-int WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad32khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length);
-int WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad16khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length);
-int WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+int WebRtcVad_CalcVad8khz(VadInstT* inst, const int16_t* speech_frame,
int frame_length);
#endif // WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad_sp.c b/chromium/third_party/webrtc/common_audio/vad/vad_sp.c
index 41deb3d057c..e981ad23e27 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad_sp.c
+++ b/chromium/third_party/webrtc/common_audio/vad/vad_sp.c
@@ -24,7 +24,7 @@ static const int16_t kSmoothingUp = 32439; // 0.99 in Q15.
// TODO(bjornv): Move this function to vad_filterbank.c.
// Downsampling filter based on splitting filter and allpass functions.
-void WebRtcVad_Downsampling(int16_t* signal_in,
+void WebRtcVad_Downsampling(const int16_t* signal_in,
int16_t* signal_out,
int32_t* filter_state,
int in_length) {
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad_sp.h b/chromium/third_party/webrtc/common_audio/vad/vad_sp.h
index f84876a0122..b5e62593c0f 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad_sp.h
+++ b/chromium/third_party/webrtc/common_audio/vad/vad_sp.h
@@ -30,7 +30,7 @@
//
// Output:
// - signal_out : Downsampled signal (of length |in_length| / 2).
-void WebRtcVad_Downsampling(int16_t* signal_in,
+void WebRtcVad_Downsampling(const int16_t* signal_in,
int16_t* signal_out,
int32_t* filter_state,
int in_length);
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad_unittest.cc b/chromium/third_party/webrtc/common_audio/vad/vad_unittest.cc
index 1d73d34a40f..a1127ad244a 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/vad/vad_unittest.cc
@@ -70,7 +70,6 @@ TEST_F(VadTest, ApiTest) {
// NULL instance tests
EXPECT_EQ(-1, WebRtcVad_Create(NULL));
EXPECT_EQ(-1, WebRtcVad_Init(NULL));
- EXPECT_EQ(-1, WebRtcVad_Free(NULL));
EXPECT_EQ(-1, WebRtcVad_set_mode(NULL, kModes[0]));
EXPECT_EQ(-1, WebRtcVad_Process(NULL, kRates[0], speech, kFrameLengths[0]));
@@ -121,7 +120,7 @@ TEST_F(VadTest, ApiTest) {
}
}
- EXPECT_EQ(0, WebRtcVad_Free(handle));
+ WebRtcVad_Free(handle);
}
TEST_F(VadTest, ValidRatesFrameLengths) {
diff --git a/chromium/third_party/webrtc/common_audio/vad/webrtc_vad.c b/chromium/third_party/webrtc/common_audio/vad/webrtc_vad.c
index 3acd3c37d8d..8a9b9317d8c 100644
--- a/chromium/third_party/webrtc/common_audio/vad/webrtc_vad.c
+++ b/chromium/third_party/webrtc/common_audio/vad/webrtc_vad.c
@@ -44,14 +44,8 @@ int WebRtcVad_Create(VadInst** handle) {
return 0;
}
-int WebRtcVad_Free(VadInst* handle) {
- if (handle == NULL) {
- return -1;
- }
-
+void WebRtcVad_Free(VadInst* handle) {
free(handle);
-
- return 0;
}
// TODO(bjornv): Move WebRtcVad_InitCore() code here.
@@ -74,7 +68,7 @@ int WebRtcVad_set_mode(VadInst* handle, int mode) {
return WebRtcVad_set_mode_core(self, mode);
}
-int WebRtcVad_Process(VadInst* handle, int fs, int16_t* audio_frame,
+int WebRtcVad_Process(VadInst* handle, int fs, const int16_t* audio_frame,
int frame_length) {
int vad = -1;
VadInstT* self = (VadInstT*) handle;
diff --git a/chromium/third_party/webrtc/common_types.h b/chromium/third_party/webrtc/common_types.h
index 3d47b86132e..6892a83f084 100644
--- a/chromium/third_party/webrtc/common_types.h
+++ b/chromium/third_party/webrtc/common_types.h
@@ -11,6 +11,12 @@
#ifndef WEBRTC_COMMON_TYPES_H_
#define WEBRTC_COMMON_TYPES_H_
+#include <stddef.h>
+#include <string.h>
+
+#include <string>
+#include <vector>
+
#include "webrtc/typedefs.h"
#if defined(_MSC_VER)
@@ -33,7 +39,7 @@
#define RTP_PAYLOAD_NAME_SIZE 32
-#if defined(WEBRTC_WIN)
+#if defined(WEBRTC_WIN) || defined(WIN32)
// Compares two strings without regard to case.
#define STR_CASE_CMP(s1, s2) ::_stricmp(s1, s2)
// Compares characters of two strings without regard to case.
@@ -89,7 +95,6 @@ enum TraceModule
kTraceAudioDevice = 0x0012,
kTraceVideoRenderer = 0x0014,
kTraceVideoCapture = 0x0015,
- kTraceVideoPreocessing = 0x0016,
kTraceRemoteBitrateEstimator = 0x0017,
};
@@ -157,71 +162,6 @@ enum FrameType
kVideoFrameDelta = 4, // depends on the previus frame
};
-// Interface for encrypting and decrypting regular data and rtp/rtcp packets.
-// Implement this interface if you wish to provide an encryption scheme to
-// the voice or video engines.
-class Encryption
-{
-public:
- // Encrypt the given data.
- //
- // Args:
- // channel: The channel to encrypt data for.
- // in_data: The data to encrypt. This data is bytes_in bytes long.
- // out_data: The buffer to write the encrypted data to. You may write more
- // bytes of encrypted data than what you got as input, up to a maximum
- // of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
- // webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
- // bytes_in: The number of bytes in the input buffer.
- // bytes_out: The number of bytes written in out_data.
- virtual void encrypt(
- int channel,
- unsigned char* in_data,
- unsigned char* out_data,
- int bytes_in,
- int* bytes_out) = 0;
-
- // Decrypts the given data. This should reverse the effects of encrypt().
- //
- // Args:
- // channel_no: The channel to decrypt data for.
- // in_data: The data to decrypt. This data is bytes_in bytes long.
- // out_data: The buffer to write the decrypted data to. You may write more
- // bytes of decrypted data than what you got as input, up to a maximum
- // of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
- // webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
- // bytes_in: The number of bytes in the input buffer.
- // bytes_out: The number of bytes written in out_data.
- virtual void decrypt(
- int channel,
- unsigned char* in_data,
- unsigned char* out_data,
- int bytes_in,
- int* bytes_out) = 0;
-
- // Encrypts a RTCP packet. Otherwise, this method has the same contract as
- // encrypt().
- virtual void encrypt_rtcp(
- int channel,
- unsigned char* in_data,
- unsigned char* out_data,
- int bytes_in,
- int* bytes_out) = 0;
-
- // Decrypts a RTCP packet. Otherwise, this method has the same contract as
- // decrypt().
- virtual void decrypt_rtcp(
- int channel,
- unsigned char* in_data,
- unsigned char* out_data,
- int bytes_in,
- int* bytes_out) = 0;
-
-protected:
- virtual ~Encryption() {}
- Encryption() {}
-};
-
// External transport callback interface
class Transport
{
@@ -257,6 +197,24 @@ class RtcpStatisticsCallback {
uint32_t ssrc) = 0;
};
+// Statistics for RTCP packet types.
+struct RtcpPacketTypeCounter {
+ RtcpPacketTypeCounter()
+ : nack_packets(0),
+ fir_packets(0),
+ pli_packets(0) {}
+
+ void Add(const RtcpPacketTypeCounter& other) {
+ nack_packets += other.nack_packets;
+ fir_packets += other.fir_packets;
+ pli_packets += other.pli_packets;
+ }
+
+ uint32_t nack_packets;
+ uint32_t fir_packets;
+ uint32_t pli_packets;
+};
+
// Data usage statistics for a (rtp) stream
struct StreamDataCounters {
StreamDataCounters()
@@ -315,14 +273,26 @@ class FrameCountObserver {
// ==================================================================
// Each codec supported can be described by this structure.
-struct CodecInst
-{
- int pltype;
- char plname[RTP_PAYLOAD_NAME_SIZE];
- int plfreq;
- int pacsize;
- int channels;
- int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
+struct CodecInst {
+ int pltype;
+ char plname[RTP_PAYLOAD_NAME_SIZE];
+ int plfreq;
+ int pacsize;
+ int channels;
+ int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
+
+ bool operator==(const CodecInst& other) const {
+ return pltype == other.pltype &&
+ (STR_CASE_CMP(plname, other.plname) == 0) &&
+ plfreq == other.plfreq &&
+ pacsize == other.pacsize &&
+ channels == other.channels &&
+ rate == other.rate;
+ }
+
+ bool operator!=(const CodecInst& other) const {
+ return !(*this == other);
+ }
};
// RTP
@@ -498,6 +468,7 @@ enum AudioLayers
kAudioLinuxPulse = 4
};
+// TODO(henrika): to be removed.
enum NetEqModes // NetEQ playout configurations
{
// Optimized trade-off between low delay and jitter robustness for two-way
@@ -514,6 +485,7 @@ enum NetEqModes // NetEQ playout configurations
kNetEqOff = 3,
};
+// TODO(henrika): to be removed.
enum OnHoldModes // On Hold direction
{
kHoldSendAndPlay = 0, // Put both sending and playing in on-hold state.
@@ -521,6 +493,7 @@ enum OnHoldModes // On Hold direction
kHoldPlayOnly // Put only playing in on-hold state.
};
+// TODO(henrika): to be removed.
enum AmrMode
{
kRfc3267BwEfficient = 0,
@@ -584,23 +557,34 @@ enum VP8ResilienceMode {
};
// VP8 specific
-struct VideoCodecVP8
-{
- bool pictureLossIndicationOn;
- bool feedbackModeOn;
- VideoCodecComplexity complexity;
- VP8ResilienceMode resilience;
- unsigned char numberOfTemporalLayers;
- bool denoisingOn;
- bool errorConcealmentOn;
- bool automaticResizeOn;
- bool frameDroppingOn;
- int keyFrameInterval;
-};
-
-// Unknown specific
-struct VideoCodecGeneric
-{
+struct VideoCodecVP8 {
+ bool pictureLossIndicationOn;
+ bool feedbackModeOn;
+ VideoCodecComplexity complexity;
+ VP8ResilienceMode resilience;
+ unsigned char numberOfTemporalLayers;
+ bool denoisingOn;
+ bool errorConcealmentOn;
+ bool automaticResizeOn;
+ bool frameDroppingOn;
+ int keyFrameInterval;
+
+ bool operator==(const VideoCodecVP8& other) const {
+ return pictureLossIndicationOn == other.pictureLossIndicationOn &&
+ feedbackModeOn == other.feedbackModeOn &&
+ complexity == other.complexity &&
+ resilience == other.resilience &&
+ numberOfTemporalLayers == other.numberOfTemporalLayers &&
+ denoisingOn == other.denoisingOn &&
+ errorConcealmentOn == other.errorConcealmentOn &&
+ automaticResizeOn == other.automaticResizeOn &&
+ frameDroppingOn == other.frameDroppingOn &&
+ keyFrameInterval == other.keyFrameInterval;
+ }
+
+ bool operator!=(const VideoCodecVP8& other) const {
+ return !(*this == other);
+ }
};
// Video codec types
@@ -617,21 +601,33 @@ enum VideoCodecType
union VideoCodecUnion
{
VideoCodecVP8 VP8;
- VideoCodecGeneric Generic;
};
// Simulcast is when the same stream is encoded multiple times with different
// settings such as resolution.
-struct SimulcastStream
-{
- unsigned short width;
- unsigned short height;
- unsigned char numberOfTemporalLayers;
- unsigned int maxBitrate; // kilobits/sec.
- unsigned int targetBitrate; // kilobits/sec.
- unsigned int minBitrate; // kilobits/sec.
- unsigned int qpMax; // minimum quality
+struct SimulcastStream {
+ unsigned short width;
+ unsigned short height;
+ unsigned char numberOfTemporalLayers;
+ unsigned int maxBitrate; // kilobits/sec.
+ unsigned int targetBitrate; // kilobits/sec.
+ unsigned int minBitrate; // kilobits/sec.
+ unsigned int qpMax; // minimum quality
+
+ bool operator==(const SimulcastStream& other) const {
+ return width == other.width &&
+ height == other.height &&
+ numberOfTemporalLayers == other.numberOfTemporalLayers &&
+ maxBitrate == other.maxBitrate &&
+ targetBitrate == other.targetBitrate &&
+ minBitrate == other.minBitrate &&
+ qpMax == other.qpMax;
+ }
+
+ bool operator!=(const SimulcastStream& other) const {
+ return !(*this == other);
+ }
};
enum VideoCodecMode {
@@ -640,31 +636,60 @@ enum VideoCodecMode {
};
// Common video codec properties
-struct VideoCodec
-{
- VideoCodecType codecType;
- char plName[kPayloadNameSize];
- unsigned char plType;
-
- unsigned short width;
- unsigned short height;
-
- unsigned int startBitrate; // kilobits/sec.
- unsigned int maxBitrate; // kilobits/sec.
- unsigned int minBitrate; // kilobits/sec.
- unsigned char maxFramerate;
-
- VideoCodecUnion codecSpecific;
-
- unsigned int qpMax;
- unsigned char numberOfSimulcastStreams;
- SimulcastStream simulcastStream[kMaxSimulcastStreams];
-
- VideoCodecMode mode;
+struct VideoCodec {
+ VideoCodecType codecType;
+ char plName[kPayloadNameSize];
+ unsigned char plType;
+
+ unsigned short width;
+ unsigned short height;
+
+ unsigned int startBitrate; // kilobits/sec.
+ unsigned int maxBitrate; // kilobits/sec.
+ unsigned int minBitrate; // kilobits/sec.
+ unsigned int targetBitrate; // kilobits/sec.
+
+ unsigned char maxFramerate;
+
+ VideoCodecUnion codecSpecific;
+
+ unsigned int qpMax;
+ unsigned char numberOfSimulcastStreams;
+ SimulcastStream simulcastStream[kMaxSimulcastStreams];
+
+ VideoCodecMode mode;
+
+ // When using an external encoder/decoder this allows to pass
+ // extra options without requiring webrtc to be aware of them.
+ Config* extra_options;
+
+ bool operator==(const VideoCodec& other) const {
+ bool ret = codecType == other.codecType &&
+ (STR_CASE_CMP(plName, other.plName) == 0) &&
+ plType == other.plType &&
+ width == other.width &&
+ height == other.height &&
+ startBitrate == other.startBitrate &&
+ maxBitrate == other.maxBitrate &&
+ minBitrate == other.minBitrate &&
+ targetBitrate == other.targetBitrate &&
+ maxFramerate == other.maxFramerate &&
+ qpMax == other.qpMax &&
+ numberOfSimulcastStreams == other.numberOfSimulcastStreams &&
+ mode == other.mode;
+ if (ret && codecType == kVideoCodecVP8) {
+ ret &= (codecSpecific.VP8 == other.codecSpecific.VP8);
+ }
+
+ for (unsigned char i = 0; i < other.numberOfSimulcastStreams && ret; ++i) {
+ ret &= (simulcastStream[i] == other.simulcastStream[i]);
+ }
+ return ret;
+ }
- // When using an external encoder/decoder this allows to pass
- // extra options without requiring webrtc to be aware of them.
- Config* extra_options;
+ bool operator!=(const VideoCodec& other) const {
+ return !(*this == other);
+ }
};
// Bandwidth over-use detector options. These are used to drive
@@ -697,20 +722,67 @@ struct OverUseDetectorOptions {
// This structure will have the information about when packet is actually
// received by socket.
struct PacketTime {
- PacketTime() : timestamp(-1), max_error_us(-1) {}
- PacketTime(int64_t timestamp, int64_t max_error_us)
- : timestamp(timestamp), max_error_us(max_error_us) {
+ PacketTime() : timestamp(-1), not_before(-1) {}
+ PacketTime(int64_t timestamp, int64_t not_before)
+ : timestamp(timestamp), not_before(not_before) {
+ }
+
+ int64_t timestamp; // Receive time after socket delivers the data.
+ int64_t not_before; // Earliest possible time the data could have arrived,
+ // indicating the potential error in the |timestamp|
+ // value,in case the system is busy.
+ // For example, the time of the last select() call.
+ // If unknown, this value will be set to zero.
+};
+
+struct RTPHeaderExtension {
+ RTPHeaderExtension()
+ : hasTransmissionTimeOffset(false),
+ transmissionTimeOffset(0),
+ hasAbsoluteSendTime(false),
+ absoluteSendTime(0),
+ hasAudioLevel(false),
+ audioLevel(0) {}
+
+ bool hasTransmissionTimeOffset;
+ int32_t transmissionTimeOffset;
+ bool hasAbsoluteSendTime;
+ uint32_t absoluteSendTime;
+
+ // Audio Level includes both level in dBov and voiced/unvoiced bit. See:
+ // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+ bool hasAudioLevel;
+ uint8_t audioLevel;
+};
+
+struct RTPHeader {
+ RTPHeader()
+ : markerBit(false),
+ payloadType(0),
+ sequenceNumber(0),
+ timestamp(0),
+ ssrc(0),
+ numCSRCs(0),
+ paddingLength(0),
+ headerLength(0),
+ payload_type_frequency(0),
+ extension() {
+ memset(&arrOfCSRCs, 0, sizeof(arrOfCSRCs));
}
- int64_t timestamp; // Receive time after socket delivers the data.
- int64_t max_error_us; // Earliest possible time the data could have arrived,
- // indicating the potential error in the |timestamp|
- // value,in case the system is busy.
- // For example, the time of the last select() call.
- // If unknown, this value will be set to zero.
+ bool markerBit;
+ uint8_t payloadType;
+ uint16_t sequenceNumber;
+ uint32_t timestamp;
+ uint32_t ssrc;
+ uint8_t numCSRCs;
+ uint32_t arrOfCSRCs[kRtpCsrcSize];
+ uint8_t paddingLength;
+ uint16_t headerLength;
+ int payload_type_frequency;
+ RTPHeaderExtension extension;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_TYPES_H_
-
diff --git a/chromium/third_party/webrtc/common_video/OWNERS b/chromium/third_party/webrtc/common_video/OWNERS
index 7183cf21392..0c328fa9f99 100644
--- a/chromium/third_party/webrtc/common_video/OWNERS
+++ b/chromium/third_party/webrtc/common_video/OWNERS
@@ -2,3 +2,10 @@ stefan@webrtc.org
mikhal@webrtc.org
marpan@webrtc.org
henrik.lundin@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/common_video/common_video_unittests.gyp b/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
index 9523361cda8..91a11edacbb 100644
--- a/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
+++ b/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
@@ -32,7 +32,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -43,7 +43,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'common_video_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/common_video/common_video_unittests.isolate b/chromium/third_party/webrtc/common_video/common_video_unittests.isolate
index a95bd414485..d33366c27e7 100644
--- a/chromium/third_party/webrtc/common_video/common_video_unittests.isolate
+++ b/chromium/third_party/webrtc/common_video/common_video_unittests.isolate
@@ -8,29 +8,27 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../resources/foreman_cif.yuv',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/common_video/i420_video_frame.cc b/chromium/third_party/webrtc/common_video/i420_video_frame.cc
index e369ffe108f..fdc2bbc2305 100644
--- a/chromium/third_party/webrtc/common_video/i420_video_frame.cc
+++ b/chromium/third_party/webrtc/common_video/i420_video_frame.cc
@@ -10,6 +10,8 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
+#include <string.h>
+
#include <algorithm> // swap
namespace webrtc {
@@ -18,6 +20,7 @@ I420VideoFrame::I420VideoFrame()
: width_(0),
height_(0),
timestamp_(0),
+ ntp_time_ms_(0),
render_time_ms_(0) {}
I420VideoFrame::~I420VideoFrame() {}
@@ -37,6 +40,7 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
v_plane_.CreateEmptyPlane(size_v, stride_v, size_v);
// Creating empty frame - reset all values.
timestamp_ = 0;
+ ntp_time_ms_ = 0;
render_time_ms_ = 0;
return 0;
}
@@ -71,10 +75,20 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
if (ret < 0)
return ret;
timestamp_ = videoFrame.timestamp_;
+ ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_;
return 0;
}
+I420VideoFrame* I420VideoFrame::CloneFrame() const {
+ scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame());
+ if (new_frame->CopyFrame(*this) == -1) {
+ // CopyFrame failed.
+ return NULL;
+ }
+ return new_frame.release();
+}
+
void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
y_plane_.Swap(videoFrame->y_plane_);
u_plane_.Swap(videoFrame->u_plane_);
@@ -82,6 +96,7 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
std::swap(width_, videoFrame->width_);
std::swap(height_, videoFrame->height_);
std::swap(timestamp_, videoFrame->timestamp_);
+ std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
std::swap(render_time_ms_, videoFrame->render_time_ms_);
}
diff --git a/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc b/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
index 5c738bd791e..ca01fd0cd52 100644
--- a/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
@@ -19,8 +19,8 @@
namespace webrtc {
-bool EqualFrames(const I420VideoFrame& videoFrame1,
- const I420VideoFrame& videoFrame2);
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
bool EqualFramesExceptSize(const I420VideoFrame& frame1,
const I420VideoFrame& frame2);
int ExpectedSize(int plane_stride, int image_height, PlaneType type);
@@ -49,10 +49,12 @@ TEST(TestI420VideoFrame, WidthHeightValues) {
EXPECT_EQ(valid_value, frame.height());
EXPECT_EQ(invalid_value, frame.set_height(0));
EXPECT_EQ(valid_value, frame.height());
- frame.set_timestamp(100u);
- EXPECT_EQ(100u, frame.timestamp());
- frame.set_render_time_ms(100);
- EXPECT_EQ(100, frame.render_time_ms());
+ frame.set_timestamp(123u);
+ EXPECT_EQ(123u, frame.timestamp());
+ frame.set_ntp_time_ms(456);
+ EXPECT_EQ(456, frame.ntp_time_ms());
+ frame.set_render_time_ms(789);
+ EXPECT_EQ(789, frame.render_time_ms());
}
TEST(TestI420VideoFrame, SizeAllocation) {
@@ -82,7 +84,8 @@ TEST(TestI420VideoFrame, ResetSize) {
TEST(TestI420VideoFrame, CopyFrame) {
I420VideoFrame frame1, frame2;
uint32_t timestamp = 1;
- int64_t render_time_ms = 1;
+ int64_t ntp_time_ms = 2;
+ int64_t render_time_ms = 3;
int stride_y = 15;
int stride_u = 10;
int stride_v = 10;
@@ -92,6 +95,7 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_EQ(0, frame1.CreateEmptyFrame(width, height,
stride_y, stride_u, stride_v));
frame1.set_timestamp(timestamp);
+ frame1.set_ntp_time_ms(ntp_time_ms);
frame1.set_render_time_ms(render_time_ms);
const int kSizeY = 225;
const int kSizeU = 80;
@@ -118,6 +122,29 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_TRUE(EqualFrames(frame1, frame2));
}
+TEST(TestI420VideoFrame, CloneFrame) {
+ I420VideoFrame frame1;
+ scoped_ptr<I420VideoFrame> frame2;
+ const int kSizeY = 225;
+ const int kSizeU = 80;
+ const int kSizeV = 80;
+ uint8_t buffer_y[kSizeY];
+ uint8_t buffer_u[kSizeU];
+ uint8_t buffer_v[kSizeV];
+ memset(buffer_y, 16, kSizeY);
+ memset(buffer_u, 8, kSizeU);
+ memset(buffer_v, 4, kSizeV);
+ frame1.CreateFrame(
+ kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v, 20, 20, 20, 10, 10);
+ frame1.set_timestamp(1);
+ frame1.set_ntp_time_ms(2);
+ frame1.set_render_time_ms(3);
+
+ frame2.reset(frame1.CloneFrame());
+ EXPECT_TRUE(frame2.get() != NULL);
+ EXPECT_TRUE(EqualFrames(frame1, *frame2));
+}
+
TEST(TestI420VideoFrame, CopyBuffer) {
I420VideoFrame frame1, frame2;
int width = 15;
@@ -151,7 +178,8 @@ TEST(TestI420VideoFrame, CopyBuffer) {
TEST(TestI420VideoFrame, FrameSwap) {
I420VideoFrame frame1, frame2;
uint32_t timestamp1 = 1;
- int64_t render_time_ms1 = 1;
+ int64_t ntp_time_ms1 = 2;
+ int64_t render_time_ms1 = 3;
int stride_y1 = 15;
int stride_u1 = 10;
int stride_v1 = 10;
@@ -160,8 +188,9 @@ TEST(TestI420VideoFrame, FrameSwap) {
const int kSizeY1 = 225;
const int kSizeU1 = 80;
const int kSizeV1 = 80;
- uint32_t timestamp2 = 2;
- int64_t render_time_ms2 = 4;
+ uint32_t timestamp2 = 4;
+ int64_t ntp_time_ms2 = 5;
+ int64_t render_time_ms2 = 6;
int stride_y2 = 30;
int stride_u2 = 20;
int stride_v2 = 20;
@@ -174,6 +203,7 @@ TEST(TestI420VideoFrame, FrameSwap) {
EXPECT_EQ(0, frame1.CreateEmptyFrame(width1, height1,
stride_y1, stride_u1, stride_v1));
frame1.set_timestamp(timestamp1);
+ frame1.set_ntp_time_ms(ntp_time_ms1);
frame1.set_render_time_ms(render_time_ms1);
// Set memory for frame1.
uint8_t buffer_y1[kSizeY1];
@@ -190,6 +220,7 @@ TEST(TestI420VideoFrame, FrameSwap) {
EXPECT_EQ(0, frame2.CreateEmptyFrame(width2, height2,
stride_y2, stride_u2, stride_v2));
frame2.set_timestamp(timestamp2);
+ frame1.set_ntp_time_ms(ntp_time_ms2);
frame2.set_render_time_ms(render_time_ms2);
// Set memory for frame2.
uint8_t buffer_y2[kSizeY2];
@@ -226,28 +257,24 @@ TEST(TestI420VideoFrame, RefCountedInstantiation) {
bool EqualFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
- if (!EqualFramesExceptSize(frame1, frame2))
- return false;
- // Compare allocated memory size.
- bool ret = true;
- ret |= (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane));
- ret |= (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane));
- ret |= (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane));
- return ret;
+ return (EqualFramesExceptSize(frame1, frame2) &&
+ (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+ (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+ (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)));
}
bool EqualFramesExceptSize(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
- bool ret = true;
- ret |= (frame1.width() == frame2.width());
- ret |= (frame1.height() == frame2.height());
- ret |= (frame1.stride(kYPlane) == frame2.stride(kYPlane));
- ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane));
- ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane));
- ret |= (frame1.timestamp() == frame2.timestamp());
- ret |= (frame1.render_time_ms() == frame2.render_time_ms());
- if (!ret)
+ if ((frame1.width() != frame2.width()) ||
+ (frame1.height() != frame2.height()) ||
+ (frame1.stride(kYPlane) != frame2.stride(kYPlane)) ||
+ (frame1.stride(kUPlane) != frame2.stride(kUPlane)) ||
+ (frame1.stride(kVPlane) != frame2.stride(kVPlane)) ||
+ (frame1.timestamp() != frame2.timestamp()) ||
+ (frame1.ntp_time_ms() != frame2.ntp_time_ms()) ||
+ (frame1.render_time_ms() != frame2.render_time_ms())) {
return false;
+ }
// Memory should be the equal for the minimum of the two sizes.
int size_y = std::min(frame1.allocated_size(kYPlane),
frame2.allocated_size(kYPlane));
@@ -255,13 +282,9 @@ bool EqualFramesExceptSize(const I420VideoFrame& frame1,
frame2.allocated_size(kUPlane));
int size_v = std::min(frame1.allocated_size(kVPlane),
frame2.allocated_size(kVPlane));
- int ret_val = 0;
- ret_val += memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y);
- ret_val += memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u);
- ret_val += memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v);
- if (ret_val == 0)
- return true;
- return false;
+ return (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y) == 0 &&
+ memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u) == 0 &&
+ memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v) == 0);
}
int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
diff --git a/chromium/third_party/webrtc/common_video/interface/i420_video_frame.h b/chromium/third_party/webrtc/common_video/interface/i420_video_frame.h
index 45f2ec3039c..5f7a572bdad 100644
--- a/chromium/third_party/webrtc/common_video/interface/i420_video_frame.h
+++ b/chromium/third_party/webrtc/common_video/interface/i420_video_frame.h
@@ -15,6 +15,8 @@
//
// Storing and handling of YUV (I420) video frames.
+#include <assert.h>
+
#include "webrtc/common_video/plane.h"
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/typedefs.h"
@@ -49,13 +51,13 @@ class I420VideoFrame {
// on set dimensions - height and plane stride.
// If required size is bigger than the allocated one, new buffers of adequate
// size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CreateEmptyFrame(int width, int height,
int stride_y, int stride_u, int stride_v);
// CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
int size_u, const uint8_t* buffer_u,
int size_v, const uint8_t* buffer_v,
@@ -64,9 +66,13 @@ class I420VideoFrame {
// Copy frame: If required size is bigger than allocated one, new buffers of
// adequate size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CopyFrame(const I420VideoFrame& videoFrame);
+ // Make a copy of |this|. The caller owns the returned frame.
+ // Return value: a new frame on success, NULL on error.
+ virtual I420VideoFrame* CloneFrame() const;
+
// Swap Frame.
virtual void SwapFrame(I420VideoFrame* videoFrame);
@@ -99,6 +105,14 @@ class I420VideoFrame {
// Get frame timestamp (90kHz).
virtual uint32_t timestamp() const {return timestamp_;}
+ // Set capture ntp time in miliseconds.
+ virtual void set_ntp_time_ms(int64_t ntp_time_ms) {
+ ntp_time_ms_ = ntp_time_ms;
+ }
+
+ // Get capture ntp time in miliseconds.
+ virtual int64_t ntp_time_ms() const {return ntp_time_ms_;}
+
// Set render time in miliseconds.
virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
render_time_ms;}
@@ -136,6 +150,7 @@ class I420VideoFrame {
int width_;
int height_;
uint32_t timestamp_;
+ int64_t ntp_time_ms_;
int64_t render_time_ms_;
}; // I420VideoFrame
diff --git a/chromium/third_party/webrtc/common_video/interface/texture_video_frame.h b/chromium/third_party/webrtc/common_video/interface/texture_video_frame.h
index e905ea7338b..2c625ab57d7 100644
--- a/chromium/third_party/webrtc/common_video/interface/texture_video_frame.h
+++ b/chromium/third_party/webrtc/common_video/interface/texture_video_frame.h
@@ -49,6 +49,7 @@ class TextureVideoFrame : public I420VideoFrame {
int stride_u,
int stride_v) OVERRIDE;
virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
+ virtual I420VideoFrame* CloneFrame() const OVERRIDE;
virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
virtual uint8_t* buffer(PlaneType type) OVERRIDE;
virtual const uint8_t* buffer(PlaneType type) const OVERRIDE;
diff --git a/chromium/third_party/webrtc/common_video/interface/video_image.h b/chromium/third_party/webrtc/common_video/interface/video_image.h
index a7b65fd2cd5..c8df436b3cc 100644
--- a/chromium/third_party/webrtc/common_video/interface/video_image.h
+++ b/chromium/third_party/webrtc/common_video/interface/video_image.h
@@ -46,6 +46,7 @@ public:
: _encodedWidth(0),
_encodedHeight(0),
_timeStamp(0),
+ ntp_time_ms_(0),
capture_time_ms_(0),
_frameType(kDeltaFrame),
_buffer(buffer),
@@ -56,6 +57,8 @@ public:
uint32_t _encodedWidth;
uint32_t _encodedHeight;
uint32_t _timeStamp;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms_;
int64_t capture_time_ms_;
VideoFrameType _frameType;
uint8_t* _buffer;
diff --git a/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc b/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
index 3df520ed6cb..0abe7f3cc0e 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -84,7 +84,7 @@ class TestLibYuv : public ::testing::Test {
FILE* source_file_;
I420VideoFrame orig_frame_;
- scoped_array<uint8_t> orig_buffer_;
+ scoped_ptr<uint8_t[]> orig_buffer_;
const int width_;
const int height_;
const int size_y_;
@@ -147,7 +147,7 @@ TEST_F(TestLibYuv, ConvertTest) {
(width_ + 1) / 2,
(width_ + 1) / 2));
printf("\nConvert #%d I420 <-> I420 \n", j);
- scoped_array<uint8_t> out_i420_buffer(new uint8_t[frame_length_]);
+ scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,
@@ -162,7 +162,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> RGB24\n", j);
- scoped_array<uint8_t> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]);
+ scoped_ptr<uint8_t[]> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]);
// Align the stride values for the output frame.
int stride_y = 0;
int stride_uv = 0;
@@ -184,7 +184,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> UYVY\n", j);
- scoped_array<uint8_t> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
+ scoped_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
@@ -196,8 +196,8 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> YV12\n", j);
- scoped_array<uint8_t> outYV120Buffer(new uint8_t[frame_length_]);
- scoped_array<uint8_t> res_i420_buffer(new uint8_t[frame_length_]);
+ scoped_ptr<uint8_t[]> outYV120Buffer(new uint8_t[frame_length_]);
+ scoped_ptr<uint8_t[]> res_i420_buffer(new uint8_t[frame_length_]);
I420VideoFrame yv12_frame;
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYV12, 0, outYV120Buffer.get()));
yv12_frame.CreateFrame(size_y_, outYV120Buffer.get(),
@@ -218,7 +218,7 @@ TEST_F(TestLibYuv, ConvertTest) {
j++;
printf("\nConvert #%d I420 <-> YUY2\n", j);
- scoped_array<uint8_t> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
+ scoped_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
@@ -231,7 +231,7 @@ TEST_F(TestLibYuv, ConvertTest) {
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
- scoped_array<uint8_t> out_rgb565_buffer(new uint8_t[width_ * height_ * 2]);
+ scoped_ptr<uint8_t[]> out_rgb565_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
out_rgb565_buffer.get()));
@@ -250,7 +250,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_GT(ceil(psnr), 40);
printf("\nConvert #%d I420 <-> ARGB8888\n", j);
- scoped_array<uint8_t> out_argb8888_buffer(new uint8_t[width_ * height_ * 4]);
+ scoped_ptr<uint8_t[]> out_argb8888_buffer(new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
out_argb8888_buffer.get()));
@@ -283,7 +283,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_,
stride_y, stride_uv, stride_uv));
- scoped_array<uint8_t> out_i420_buffer(new uint8_t[frame_length_]);
+ scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,
diff --git a/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc b/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
index fee10df718e..f186d82d89d 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
@@ -99,7 +99,7 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
kI420, kI420,
kScalePoint));
I420VideoFrame test_frame2;
- scoped_array<uint8_t> orig_buffer(new uint8_t[frame_length_]);
+ scoped_ptr<uint8_t[]> orig_buffer(new uint8_t[frame_length_]);
EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U);
test_frame_.CreateFrame(size_y_, orig_buffer.get(),
size_uv_, orig_buffer.get() + size_y_,
@@ -442,7 +442,7 @@ void TestScaler::ScaleSequence(ScaleMethod method,
total_clock = 0;
int frame_count = 0;
int src_required_size = CalcBufferSize(kI420, src_width, src_height);
- scoped_array<uint8_t> frame_buffer(new uint8_t[src_required_size]);
+ scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]);
int size_y = src_width * src_height;
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);
diff --git a/chromium/third_party/webrtc/common_video/plane.cc b/chromium/third_party/webrtc/common_video/plane.cc
index 68d32cd4591..3776de1323e 100644
--- a/chromium/third_party/webrtc/common_video/plane.cc
+++ b/chromium/third_party/webrtc/common_video/plane.cc
@@ -20,8 +20,7 @@ namespace webrtc {
static const int kBufferAlignment = 64;
Plane::Plane()
- : buffer_(NULL),
- allocated_size_(0),
+ : allocated_size_(0),
plane_size_(0),
stride_(0) {}
@@ -42,8 +41,8 @@ int Plane::MaybeResize(int new_size) {
return -1;
if (new_size <= allocated_size_)
return 0;
- Allocator<uint8_t>::scoped_ptr_aligned new_buffer(
- AlignedMalloc<uint8_t>(new_size, kBufferAlignment));
+ scoped_ptr<uint8_t, AlignedFreeDeleter> new_buffer(static_cast<uint8_t*>(
+ AlignedMalloc(new_size, kBufferAlignment)));
if (buffer_.get()) {
memcpy(new_buffer.get(), buffer_.get(), plane_size_);
}
diff --git a/chromium/third_party/webrtc/common_video/plane.h b/chromium/third_party/webrtc/common_video/plane.h
index 1b74f37ec05..4031e03b4ce 100644
--- a/chromium/third_party/webrtc/common_video/plane.h
+++ b/chromium/third_party/webrtc/common_video/plane.h
@@ -12,6 +12,7 @@
#define COMMON_VIDEO_PLANE_H
#include "webrtc/system_wrappers/interface/aligned_malloc.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -63,7 +64,7 @@ class Plane {
// Return value: 0 on success ,-1 on error.
int MaybeResize(int new_size);
- Allocator<uint8_t>::scoped_ptr_aligned buffer_;
+ scoped_ptr<uint8_t, AlignedFreeDeleter> buffer_;
int allocated_size_;
int plane_size_;
int stride_;
diff --git a/chromium/third_party/webrtc/common_video/texture_video_frame.cc b/chromium/third_party/webrtc/common_video/texture_video_frame.cc
index ea53dc25cf2..f301d19c8e8 100644
--- a/chromium/third_party/webrtc/common_video/texture_video_frame.cc
+++ b/chromium/third_party/webrtc/common_video/texture_video_frame.cc
@@ -12,14 +12,6 @@
#include <assert.h>
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#define NOTREACHED() \
- do { \
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, "Not reached"); \
- assert(false); \
- } while (0)
-
namespace webrtc {
TextureVideoFrame::TextureVideoFrame(NativeHandle* handle,
@@ -41,7 +33,7 @@ int TextureVideoFrame::CreateEmptyFrame(int width,
int stride_y,
int stride_u,
int stride_v) {
- NOTREACHED();
+ assert(false); // Should not be called.
return -1;
}
@@ -56,46 +48,51 @@ int TextureVideoFrame::CreateFrame(int size_y,
int stride_y,
int stride_u,
int stride_v) {
- NOTREACHED();
+ assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
- NOTREACHED();
+ assert(false); // Should not be called.
return -1;
}
+I420VideoFrame* TextureVideoFrame::CloneFrame() const {
+ return new TextureVideoFrame(
+ handle_, width(), height(), timestamp(), render_time_ms());
+}
+
void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
- NOTREACHED();
+ assert(false); // Should not be called.
}
uint8_t* TextureVideoFrame::buffer(PlaneType type) {
- NOTREACHED();
+ assert(false); // Should not be called.
return NULL;
}
const uint8_t* TextureVideoFrame::buffer(PlaneType type) const {
- NOTREACHED();
+ assert(false); // Should not be called.
return NULL;
}
int TextureVideoFrame::allocated_size(PlaneType type) const {
- NOTREACHED();
+ assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::stride(PlaneType type) const {
- NOTREACHED();
+ assert(false); // Should not be called.
return -1;
}
bool TextureVideoFrame::IsZeroSize() const {
- NOTREACHED();
+ assert(false); // Should not be called.
return true;
}
void TextureVideoFrame::ResetSize() {
- NOTREACHED();
+ assert(false); // Should not be called.
}
void* TextureVideoFrame::native_handle() const { return handle_.get(); }
diff --git a/chromium/third_party/webrtc/common_video/texture_video_frame_unittest.cc b/chromium/third_party/webrtc/common_video/texture_video_frame_unittest.cc
index 04e09a67d8f..408f5f6120d 100644
--- a/chromium/third_party/webrtc/common_video/texture_video_frame_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/texture_video_frame_unittest.cc
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/common_video/interface/texture_video_frame.h"
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_video/interface/native_handle.h"
-#include "webrtc/common_video/interface/texture_video_frame.h"
namespace webrtc {
@@ -27,6 +28,9 @@ class NativeHandleImpl : public NativeHandle {
int32_t ref_count_;
};
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+
TEST(TestTextureVideoFrame, InitialValues) {
NativeHandleImpl handle;
TextureVideoFrame frame(&handle, 640, 480, 100, 10);
@@ -55,4 +59,21 @@ TEST(TestTextureVideoFrame, RefCount) {
EXPECT_EQ(0, handle.ref_count());
}
+TEST(TestTextureVideoFrame, CloneFrame) {
+ NativeHandleImpl handle;
+ TextureVideoFrame frame1(&handle, 640, 480, 100, 200);
+ scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame());
+ EXPECT_TRUE(frame2.get() != NULL);
+ EXPECT_TRUE(EqualTextureFrames(frame1, *frame2));
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.native_handle() == frame2.native_handle()) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/config.cc b/chromium/third_party/webrtc/config.cc
new file mode 100644
index 00000000000..e0324b9e475
--- /dev/null
+++ b/chromium/third_party/webrtc/config.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/config.h"
+
+#include <sstream>
+#include <string>
+
+namespace webrtc {
+std::string FecConfig::ToString() const {
+ std::stringstream ss;
+ ss << "{ulpfec_payload_type: " << ulpfec_payload_type;
+ ss << ", red_payload_type: " << red_payload_type;
+ ss << '}';
+ return ss.str();
+}
+
+std::string RtpExtension::ToString() const {
+ std::stringstream ss;
+ ss << "{name: " << name;
+ ss << ", id: " << id;
+ ss << '}';
+ return ss.str();
+}
+
+std::string VideoStream::ToString() const {
+ std::stringstream ss;
+ ss << "{width: " << width;
+ ss << ", height: " << height;
+ ss << ", max_framerate: " << max_framerate;
+ ss << ", min_bitrate_bps:" << min_bitrate_bps;
+ ss << ", target_bitrate_bps:" << target_bitrate_bps;
+ ss << ", max_bitrate_bps:" << max_bitrate_bps;
+ ss << ", max_qp: " << max_qp;
+
+ ss << ", temporal_layers: {";
+ for (size_t i = 0; i < temporal_layers.size(); ++i) {
+ ss << temporal_layers[i];
+ if (i != temporal_layers.size() - 1)
+ ss << "}, {";
+ }
+ ss << '}';
+
+ ss << '}';
+ return ss.str();
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/config.h b/chromium/third_party/webrtc/config.h
index 3ff3bb861f7..7717bbad9e3 100644
--- a/chromium/third_party/webrtc/config.h
+++ b/chromium/third_party/webrtc/config.h
@@ -16,6 +16,7 @@
#include <string>
#include <vector>
+#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -33,6 +34,15 @@ struct RtpStatistics {
std::string c_name;
};
+struct StreamStats {
+ StreamStats() : key_frames(0), delta_frames(0), bitrate_bps(0) {}
+ uint32_t key_frames;
+ uint32_t delta_frames;
+ int32_t bitrate_bps;
+ StreamDataCounters rtp_stats;
+ RtcpStatistics rtcp_stats;
+};
+
// Settings for NACK, see RFC 4585 for details.
struct NackConfig {
NackConfig() : rtp_history_ms(0) {}
@@ -47,6 +57,7 @@ struct NackConfig {
// payload types to '-1' to disable.
struct FecConfig {
FecConfig() : ulpfec_payload_type(-1), red_payload_type(-1) {}
+ std::string ToString() const;
// Payload type used for ULPFEC packets.
int ulpfec_payload_type;
@@ -54,28 +65,42 @@ struct FecConfig {
int red_payload_type;
};
-// Settings for RTP retransmission payload format, see RFC 4588 for details.
-struct RtxConfig {
- RtxConfig() : rtx_payload_type(0), video_payload_type(0) {}
- // SSRCs to use for the RTX streams.
- std::vector<uint32_t> ssrcs;
-
- // Payload type to use for the RTX stream.
- int rtx_payload_type;
-
- // Original video payload this RTX stream is used for.
- int video_payload_type;
-};
-
// RTP header extension to use for the video stream, see RFC 5285.
struct RtpExtension {
- static const char* kTOffset;
- static const char* kAbsSendTime;
RtpExtension(const char* name, int id) : name(name), id(id) {}
+ std::string ToString() const;
// TODO(mflodman) Add API to query supported extensions.
+ static const char* kTOffset;
+ static const char* kAbsSendTime;
std::string name;
int id;
};
+
+struct VideoStream {
+ VideoStream()
+ : width(0),
+ height(0),
+ max_framerate(-1),
+ min_bitrate_bps(-1),
+ target_bitrate_bps(-1),
+ max_bitrate_bps(-1),
+ max_qp(-1) {}
+ std::string ToString() const;
+
+ size_t width;
+ size_t height;
+ int max_framerate;
+
+ int min_bitrate_bps;
+ int target_bitrate_bps;
+ int max_bitrate_bps;
+
+ int max_qp;
+
+ // Bitrate thresholds for enabling additional temporal layers.
+ std::vector<int> temporal_layers;
+};
+
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_CONFIG_H_
diff --git a/chromium/third_party/webrtc/engine_configurations.h b/chromium/third_party/webrtc/engine_configurations.h
index 8294c9afc83..be858b8e5ce 100644
--- a/chromium/third_party/webrtc/engine_configurations.h
+++ b/chromium/third_party/webrtc/engine_configurations.h
@@ -63,7 +63,6 @@
#define WEBRTC_VOICE_ENGINE_AGC // Near-end AGC
#define WEBRTC_VOICE_ENGINE_ECHO // Near-end AEC
#define WEBRTC_VOICE_ENGINE_NR // Near-end NS
-#define WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
#define WEBRTC_VOICE_ENGINE_TYPING_DETECTION // Typing detection
@@ -84,11 +83,6 @@
#define WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
#define WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
-#ifndef WEBRTC_CHROMIUM_BUILD
-#define WEBRTC_VOICE_ENGINE_CALL_REPORT_API
-#define WEBRTC_VOICE_ENGINE_ENCRYPTION_API
-#endif
-
// ============================================================================
// VideoEngine
// ============================================================================
@@ -102,7 +96,6 @@
#define WEBRTC_VIDEO_ENGINE_CAPTURE_API
#define WEBRTC_VIDEO_ENGINE_CODEC_API
-#define WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
#define WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
#define WEBRTC_VIDEO_ENGINE_RENDER_API
#define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
diff --git a/chromium/third_party/webrtc/examples/OWNERS b/chromium/third_party/webrtc/examples/OWNERS
new file mode 100644
index 00000000000..5c2e168242a
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/OWNERS
@@ -0,0 +1,2 @@
+fischman@webrtc.org
+henrike@webrtc.org \ No newline at end of file
diff --git a/chromium/third_party/webrtc/examples/android/OWNERS b/chromium/third_party/webrtc/examples/android/OWNERS
index 11c49701198..475705e2eb8 100644
--- a/chromium/third_party/webrtc/examples/android/OWNERS
+++ b/chromium/third_party/webrtc/examples/android/OWNERS
@@ -1,2 +1,2 @@
-fischman@webrtc.org
-henrike@webrtc.org
+fischman@webrtc.org
+henrike@webrtc.org
diff --git a/chromium/third_party/webrtc/examples/android/media_demo/jni/on_load.cc b/chromium/third_party/webrtc/examples/android/media_demo/jni/on_load.cc
index 27a2394b3d5..9fc4ca92b21 100644
--- a/chromium/third_party/webrtc/examples/android/media_demo/jni/on_load.cc
+++ b/chromium/third_party/webrtc/examples/android/media_demo/jni/on_load.cc
@@ -38,7 +38,7 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
jobject context) {
webrtc_examples::SetVoeDeviceObjects(g_vm);
webrtc_examples::SetVieDeviceObjects(g_vm);
- CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm) == 0,
+ CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm, context) == 0,
"Failed to register android objects to video engine");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, jni, context) == 0,
"Failed to register android objects to voice engine");
@@ -47,6 +47,8 @@ JOWW(void, NativeWebRtcContextRegistry_register)(
JOWW(void, NativeWebRtcContextRegistry_unRegister)(
JNIEnv* jni,
jclass) {
+ CHECK(webrtc::VideoEngine::SetAndroidObjects(NULL, NULL) == 0,
+ "Failed to unregister android objects from video engine");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL, NULL) == 0,
"Failed to unregister android objects from voice engine");
webrtc_examples::ClearVieDeviceObjects();
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/AndroidManifest.xml b/chromium/third_party/webrtc/examples/android/opensl_loopback/AndroidManifest.xml
new file mode 100644
index 00000000000..3d32a7afcd0
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/AndroidManifest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ android:versionCode="1" package="org.webrtc.app" android:versionName="1.07">
+ <application android:icon="@drawable/logo"
+ android:label="@string/app_name"
+ android:debuggable="true">
+ <activity android:name=".OpenSlDemo"
+ android:label="@string/app_name"
+ android:screenOrientation="landscape"
+ >
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ <action android:name="android.intent.action.HEADSET_PLUG"/>
+ </intent-filter>
+ </activity>
+ </application>
+
+ <uses-sdk android:minSdkVersion="14" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.WAKE_LOCK" />
+</manifest>
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/README b/chromium/third_party/webrtc/examples/android/opensl_loopback/README
new file mode 100644
index 00000000000..59f6de9dbba
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/README
@@ -0,0 +1,23 @@
+This directory contains an app for measuring the total delay from the native
+OpenSL implementation. Note that it just loops audio back from mic to speakers.
+
+Prerequisites:
+- Make sure gclient is checking out tools necessary to target Android: your
+ .gclient file should contain a line like:
+ target_os = ['android']
+ Make sure to re-run gclient sync after adding this to download the tools.
+- Env vars need to be set up to target Android; easiest way to do this is to run
+ (from the libjingle trunk directory):
+ . ./build/android/envsetup.sh
+ Note that this clobbers any previously-set $GYP_DEFINES so it must be done
+ before the next item.
+- Set up webrtc-related GYP variables:
+ export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>
+ enable_android_opensl=1"
+- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
+
+Example of building & using the app:
+
+cd <path/to/repository>/trunk
+ninja -C out/Debug OpenSlDemo
+adb install -r out/Debug/OpenSlDemo-debug.apk \ No newline at end of file
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/build.xml b/chromium/third_party/webrtc/examples/android/opensl_loopback/build.xml
new file mode 100644
index 00000000000..b6e033a6a82
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="OpenSlDemo" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_SDK_ROOT env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.cc b/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.cc
new file mode 100644
index 00000000000..23b60eebf31
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/audio_device/android/opensles_common.h"
+#include "webrtc/modules/audio_device/android/audio_common.h"
+
+namespace webrtc {
+
+FakeAudioDeviceBuffer::FakeAudioDeviceBuffer()
+ : fifo_(kNumBuffers),
+ next_available_buffer_(0),
+ record_channels_(0),
+ play_channels_(0) {
+ buf_.reset(new scoped_ptr<int8_t[]>[kNumBuffers]);
+ for (int i = 0; i < kNumBuffers; ++i) {
+ buf_[i].reset(new int8_t[buffer_size_bytes()]);
+ }
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordingSampleRate(uint32_t fsHz) {
+ assert(static_cast<int>(fsHz) == sample_rate());
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetPlayoutSampleRate(uint32_t fsHz) {
+ assert(static_cast<int>(fsHz) == sample_rate());
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordingChannels(uint8_t channels) {
+ assert(channels > 0);
+ record_channels_ = channels;
+ assert((play_channels_ == 0) ||
+ (record_channels_ == play_channels_));
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetPlayoutChannels(uint8_t channels) {
+ assert(channels > 0);
+ play_channels_ = channels;
+ assert((record_channels_ == 0) ||
+ (record_channels_ == play_channels_));
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordedBuffer(const void* audioBuffer,
+ uint32_t nSamples) {
+ assert(audioBuffer);
+ assert(fifo_.size() < fifo_.capacity());
+ assert(nSamples == kDefaultBufSizeInSamples);
+ int8_t* buffer = buf_[next_available_buffer_].get();
+ next_available_buffer_ = (next_available_buffer_ + 1) % kNumBuffers;
+ memcpy(buffer, audioBuffer, nSamples * sizeof(int16_t));
+ fifo_.Push(buffer);
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::RequestPlayoutData(uint32_t nSamples) {
+ assert(nSamples == kDefaultBufSizeInSamples);
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::GetPlayoutData(void* audioBuffer) {
+ assert(audioBuffer);
+ if (fifo_.size() < 1) {
+ // Playout silence until there is data available.
+ memset(audioBuffer, 0, buffer_size_bytes());
+ return buffer_size_samples();
+ }
+ int8_t* buffer = fifo_.Pop();
+ memcpy(audioBuffer, buffer, buffer_size_bytes());
+ return buffer_size_samples();
+}
+
+int FakeAudioDeviceBuffer::sample_rate() const {
+ return audio_manager_.low_latency_supported() ?
+ audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
+}
+
+int FakeAudioDeviceBuffer::buffer_size_samples() const {
+ return sample_rate() * 10 / 1000;
+}
+
+int FakeAudioDeviceBuffer::buffer_size_bytes() const {
+ return buffer_size_samples() * kNumChannels * sizeof(int16_t);
+}
+
+
+void FakeAudioDeviceBuffer::ClearBuffer() {
+ while (fifo_.size() != 0) {
+ fifo_.Pop();
+ }
+ next_available_buffer_ = 0;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h b/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h
new file mode 100644
index 00000000000..1ef866cb34f
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_OPENSL_LOOPBACK_FAKE_AUDIO_DEVICE_BUFFER_H_
+#define WEBRTC_EXAMPLES_ANDROID_OPENSL_LOOPBACK_FAKE_AUDIO_DEVICE_BUFFER_H_
+
+#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
+#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
+#include "webrtc/modules/audio_device/audio_device_buffer.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+// Fake AudioDeviceBuffer implementation that returns audio data that is pushed
+// to it. It implements all APIs used by the OpenSL implementation.
+class FakeAudioDeviceBuffer : public AudioDeviceBuffer {
+ public:
+ FakeAudioDeviceBuffer();
+ virtual ~FakeAudioDeviceBuffer() {}
+
+ virtual int32_t SetRecordingSampleRate(uint32_t fsHz);
+ virtual int32_t SetPlayoutSampleRate(uint32_t fsHz);
+ virtual int32_t SetRecordingChannels(uint8_t channels);
+ virtual int32_t SetPlayoutChannels(uint8_t channels);
+ virtual int32_t SetRecordedBuffer(const void* audioBuffer,
+ uint32_t nSamples);
+ virtual void SetVQEData(int playDelayMS,
+ int recDelayMS,
+ int clockDrift) {}
+ virtual int32_t DeliverRecordedData() { return 0; }
+ virtual int32_t RequestPlayoutData(uint32_t nSamples);
+ virtual int32_t GetPlayoutData(void* audioBuffer);
+
+ void ClearBuffer();
+
+ private:
+ enum {
+ // Each buffer contains 10 ms of data since that is what OpenSlesInput
+ // delivers. Keep 7 buffers which would cover 70 ms of data. These buffers
+ // are needed because of jitter between OpenSl recording and playing.
+ kNumBuffers = 7,
+ };
+ int sample_rate() const;
+ int buffer_size_samples() const;
+ int buffer_size_bytes() const;
+
+ // Java API handle
+ AudioManagerJni audio_manager_;
+
+ SingleRwFifo fifo_;
+ scoped_ptr<scoped_ptr<int8_t[]>[]> buf_;
+ int next_available_buffer_;
+
+ uint8_t record_channels_;
+ uint8_t play_channels_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_EXAMPLES_ANDROID_OPENSL_LOOPBACK_FAKE_AUDIO_DEVICE_BUFFER_H_
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/jni/opensl_runner.cc b/chromium/third_party/webrtc/examples/android/opensl_loopback/jni/opensl_runner.cc
new file mode 100644
index 00000000000..5b7c092343b
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/jni/opensl_runner.cc
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <jni.h>
+
+#include "webrtc/examples/android/opensl_loopback/fake_audio_device_buffer.h"
+#include "webrtc/modules/audio_device/android/audio_device_template.h"
+#include "webrtc/modules/audio_device/android/audio_record_jni.h"
+#include "webrtc/modules/audio_device/android/audio_track_jni.h"
+#include "webrtc/modules/audio_device/android/opensles_input.h"
+#include "webrtc/modules/audio_device/android/opensles_output.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+// Java globals
+static JavaVM* g_vm = NULL;
+static jclass g_osr = NULL;
+
+namespace webrtc {
+
+template <class InputType, class OutputType>
+class OpenSlRunnerTemplate {
+ public:
+ OpenSlRunnerTemplate()
+ : output_(0),
+ input_(0, &output_) {
+ output_.AttachAudioBuffer(&audio_buffer_);
+ if (output_.Init() != 0) {
+ assert(false);
+ }
+ if (output_.InitPlayout() != 0) {
+ assert(false);
+ }
+ input_.AttachAudioBuffer(&audio_buffer_);
+ if (input_.Init() != 0) {
+ assert(false);
+ }
+ if (input_.InitRecording() != 0) {
+ assert(false);
+ }
+ }
+
+ ~OpenSlRunnerTemplate() {}
+
+ void StartPlayRecord() {
+ output_.StartPlayout();
+ input_.StartRecording();
+ }
+
+ void StopPlayRecord() {
+ // There are large enough buffers to compensate for recording and playing
+ // jitter such that the timing of stopping playing or recording should not
+ // result in over or underrun.
+ input_.StopRecording();
+ output_.StopPlayout();
+ audio_buffer_.ClearBuffer();
+ }
+
+ private:
+ OutputType output_;
+ InputType input_;
+ FakeAudioDeviceBuffer audio_buffer_;
+};
+
+class OpenSlRunner
+ : public OpenSlRunnerTemplate<OpenSlesInput, OpenSlesOutput> {
+ public:
+ // Global class implementing native code.
+ static OpenSlRunner* g_runner;
+
+
+ OpenSlRunner() {}
+ virtual ~OpenSlRunner() {}
+
+ static JNIEXPORT void JNICALL RegisterApplicationContext(
+ JNIEnv* env,
+ jobject obj,
+ jobject context) {
+ assert(!g_runner); // Should only be called once.
+ OpenSlesInput::SetAndroidAudioDeviceObjects(g_vm, env, context);
+ OpenSlesOutput::SetAndroidAudioDeviceObjects(g_vm, env, context);
+ g_runner = new OpenSlRunner();
+ }
+
+ static JNIEXPORT void JNICALL Start(JNIEnv * env, jobject) {
+ g_runner->StartPlayRecord();
+ }
+
+ static JNIEXPORT void JNICALL Stop(JNIEnv * env, jobject) {
+ g_runner->StopPlayRecord();
+ }
+};
+
+OpenSlRunner* OpenSlRunner::g_runner = NULL;
+
+} // namespace webrtc
+
+jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ // Only called once.
+ assert(!g_vm);
+ JNIEnv* env;
+ if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
+ return -1;
+ }
+
+ jclass local_osr = env->FindClass("org/webrtc/app/OpenSlRunner");
+ assert(local_osr != NULL);
+ g_osr = static_cast<jclass>(env->NewGlobalRef(local_osr));
+ JNINativeMethod nativeFunctions[] = {
+ {"RegisterApplicationContext", "(Landroid/content/Context;)V",
+ reinterpret_cast<void*>(
+ &webrtc::OpenSlRunner::RegisterApplicationContext)},
+ {"Start", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Start)},
+ {"Stop", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Stop)}
+ };
+ int ret_val = env->RegisterNatives(g_osr, nativeFunctions, 3);
+ if (ret_val != 0) {
+ assert(false);
+ }
+ g_vm = vm;
+ return JNI_VERSION_1_6;
+}
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/project.properties b/chromium/third_party/webrtc/examples/android/opensl_loopback/project.properties
new file mode 100644
index 00000000000..8459f9b8107
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-19
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/res/drawable/logo.png b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/drawable/logo.png
new file mode 100644
index 00000000000..a07c69fa5a0
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/drawable/logo.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/res/layout/open_sl_demo.xml b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/layout/open_sl_demo.xml
new file mode 100644
index 00000000000..1efad733668
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/layout/open_sl_demo.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:gravity="bottom">
+ <TextView android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layout_weight="1"
+ android:layout_gravity="top"
+ android:text="About: This application, when started, loops back audio as quickly as the native OpenSL implementation allows. Just starting it will lead to a feedback loop. It can be used to measure delay with the proper hardware. Using it as is has little utility." />
+ <Button android:id="@+id/btStartStopCall"
+ android:layout_width="100dip"
+ android:layout_height="wrap_content"
+ android:text="@string/startCall"
+ android:layout_gravity="center"/>
+ <Button android:id="@+id/btExit"
+ android:layout_width="100dip"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ android:text="@string/exit"/>
+</LinearLayout >
diff --git a/chromium/third_party/webrtc/examples/android/opensl_loopback/res/values/strings.xml b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/values/strings.xml
new file mode 100644
index 00000000000..f51980624f0
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/android/opensl_loopback/res/values/strings.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="app_name">WebRTCOpenSLLoopback</string>
+ <string name="startCall">StartCall</string>
+ <string name="stopCall">StopCall</string>
+ <string name="exit">Exit</string>
+</resources>
diff --git a/chromium/third_party/webrtc/experiments.h b/chromium/third_party/webrtc/experiments.h
index 0fe1cd1c568..b03d248cda7 100644
--- a/chromium/third_party/webrtc/experiments.h
+++ b/chromium/third_party/webrtc/experiments.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_EXPERIMENTS_H_
#define WEBRTC_EXPERIMENTS_H_
+#include "webrtc/typedefs.h"
+
namespace webrtc {
struct PaddingStrategy {
PaddingStrategy()
@@ -21,5 +23,30 @@ struct PaddingStrategy {
const bool redundant_payloads;
};
+
+struct RemoteBitrateEstimatorMinRate {
+ RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
+ RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
+
+ uint32_t min_rate;
+};
+
+struct SkipEncodingUnusedStreams {
+ SkipEncodingUnusedStreams() : enabled(false) {}
+ explicit SkipEncodingUnusedStreams(bool set_enabled)
+ : enabled(set_enabled) {}
+ virtual ~SkipEncodingUnusedStreams() {}
+
+ const bool enabled;
+};
+
+struct AimdRemoteRateControl {
+ AimdRemoteRateControl() : enabled(false) {}
+ explicit AimdRemoteRateControl(bool set_enabled)
+ : enabled(set_enabled) {}
+ virtual ~AimdRemoteRateControl() {}
+
+ const bool enabled;
+};
} // namespace webrtc
#endif // WEBRTC_EXPERIMENTS_H_
diff --git a/chromium/third_party/webrtc/frame_callback.h b/chromium/third_party/webrtc/frame_callback.h
index cfb07d8b161..1d73f4a0472 100644
--- a/chromium/third_party/webrtc/frame_callback.h
+++ b/chromium/third_party/webrtc/frame_callback.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_FRAME_CALLBACK_H_
#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_FRAME_CALLBACK_H_
+#include <stddef.h>
+
#include "webrtc/common_types.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/OWNERS b/chromium/third_party/webrtc/modules/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
index 5c23f7ab71e..c66be2e484a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
@@ -67,9 +67,9 @@ int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData,
W_upper_LSB = W_upper & 0x0000FFFF;
W_upper_MSB = WEBRTC_SPL_RSHIFT_W32(W_upper, 16);
W_lower = WEBRTC_SPL_UMUL(W_upper_MSB, cdfLo);
- W_lower += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfLo);
+ W_lower += ((W_upper_LSB * cdfLo) >> 16);
W_upper = WEBRTC_SPL_UMUL(W_upper_MSB, cdfHi);
- W_upper += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfHi);
+ W_upper += ((W_upper_LSB * cdfHi) >> 16);
/* shift interval such that it begins at zero */
W_upper -= ++W_lower;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
index b540ed5eef2..9391fb3c1d0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
@@ -17,7 +17,6 @@
#include "arith_routins.h"
-
/* Tables for piecewise linear cdf functions: y = k*x */
/* x Points for function piecewise() in Q15 */
@@ -248,7 +247,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
int16_t envCount;
uint16_t tmpARSpecQ8 = 0;
int k, i;
-
+ int offset = 0;
/* point to beginning of stream buffer */
streamPtr = streamData->stream + streamData->stream_index;
@@ -304,7 +303,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
if (streamVal > W_tmp)
{
@@ -313,7 +312,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
while (streamVal > W_tmp)
{
@@ -323,7 +322,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
/* error check */
if (W_lower == W_tmp) {
@@ -342,7 +341,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
while ( !(streamVal > W_tmp) )
{
@@ -352,7 +351,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
/* error check */
if (W_upper == W_tmp){
@@ -377,14 +376,27 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
* W_upper < 2^24 */
while ( !(W_upper & 0xFF000000) )
{
- /* read next byte from stream */
- if (streamData->full == 0) {
- streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) | (*streamPtr++ & 0x00FF);
- streamData->full = 1;
+ if (streamPtr < streamData->stream + streamData->stream_size) {
+ /* read next byte from stream */
+ if (streamData->full == 0) {
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) | (*streamPtr++ & 0x00FF);
+ streamData->full = 1;
+ } else {
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) |
+ ((*streamPtr) >> 8);
+ streamData->full = 0;
+ }
} else {
- streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) |
- WEBRTC_SPL_RSHIFT_U16(*streamPtr, 8);
- streamData->full = 0;
+ /* Intending to read outside the stream. This can happen for the last
+ * two or three bytes. It is how the algorithm is implemented. Do
+ * not read from the bit stream and insert zeros instead. */
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8);
+ if (streamData->full == 0) {
+ offset++; // We would have incremented the pointer in this case.
+ streamData->full = 1;
+ } else {
+ streamData->full = 0;
+ }
}
W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
}
@@ -392,7 +404,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
envCount++;
}
- streamData->stream_index = streamPtr - streamData->stream;
+ streamData->stream_index = streamPtr + offset - streamData->stream;
streamData->W_upper = W_upper;
streamData->streamval = streamVal;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
index 88c7e1abe84..2f649324e77 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
@@ -179,6 +179,21 @@ void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0,
int32_t* ptr2);
#endif
+#if defined(MIPS32_LE)
+int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale);
+
+void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0,
+ int16_t input1,
+ int32_t input2,
+ int32_t* ptr0,
+ int32_t* ptr1,
+ int32_t* ptr2);
+#endif
+
/* Function pointers associated with the above functions. */
typedef int (*AutocorrFix)(int32_t* __restrict r,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
index 6bccb8c8393..6bd5843cd97 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
@@ -326,7 +326,7 @@ int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
for( k = 0; k < lag0; k++ )
{
corr = WEBRTC_SPL_ADD_SAT_W32( corr, WEBRTC_SPL_ABS_W32(
- WEBRTC_SPL_SUB_SAT_W16(
+ WebRtcSpl_SubSatW16(
(ISACdec_obj->plcstr_obj).lastPitchLP[k],
(ISACdec_obj->plcstr_obj).prevPitchInvIn[
FRAMESAMPLES_HALF - 2*lag0 - 10 + i + k ] ) ) );
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
index e209c0ee59a..daf0d629993 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
@@ -15,18 +15,21 @@
*
*/
-#include "arith_routins.h"
-#include "bandwidth_estimator.h"
-#include "codec.h"
-#include "pitch_gain_tables.h"
-#include "pitch_lag_tables.h"
-#include "entropy_coding.h"
-#include "lpc_tables.h"
-#include "lpc_masking_model.h"
-#include "pitch_estimator.h"
-#include "structs.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h"
+
+#include <assert.h>
#include <stdio.h>
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h"
+
int WebRtcIsacfix_EncodeImpl(int16_t *in,
ISACFIX_EncInst_t *ISACenc_obj,
@@ -450,12 +453,14 @@ int WebRtcIsacfix_EncodeImpl(int16_t *in,
while (stream_length < MinBytes)
{
+ assert(stream_length >= 0);
if (stream_length & 0x0001){
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] |= (uint16_t)(ISACenc_obj->bitstr_seed & 0xFF);
} else {
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
- ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] = WEBRTC_SPL_LSHIFT_U16(ISACenc_obj->bitstr_seed, 8);
+ ISACenc_obj->bitstr_obj.stream[stream_length / 2] =
+ ((uint16_t)ISACenc_obj->bitstr_seed << 8);
}
stream_length++;
}
@@ -467,7 +472,8 @@ int WebRtcIsacfix_EncodeImpl(int16_t *in,
}
else {
ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0x00FF;
- ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += WEBRTC_SPL_LSHIFT_U16((MinBytes - usefulstr_len) & 0x00FF, 8);
+ ISACenc_obj->bitstr_obj.stream[usefulstr_len >> 1] +=
+ ((uint16_t)((MinBytes - usefulstr_len) & 0x00FF) << 8);
}
}
else
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
index 28d10357245..3fefc1a5dcc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
@@ -58,6 +58,17 @@ void WebRtcIsacfix_AllpassFilter2FixDec16Neon(
int32_t *filter_state_ch2);
#endif
+#if defined(MIPS_DSP_R1_LE)
+void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(
+ int16_t *data_ch1,
+ int16_t *data_ch2,
+ const int16_t *factor_ch1,
+ const int16_t *factor_ch2,
+ const int length,
+ int32_t *filter_state_ch1,
+ int32_t *filter_state_ch2);
+#endif
+
#if defined(__cplusplus) || defined(c_plusplus)
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
index 9c9d098aeef..64557e132d0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
@@ -102,8 +102,8 @@ void WebRtcIsacfix_HighpassFilterFixDec32(int16_t *io,
#ifdef WEBRTC_ARCH_ARM_V7
{
- int tmp_coeff0 = 0;
- int tmp_coeff1 = 0;
+ register int tmp_coeff0;
+ register int tmp_coeff1;
__asm __volatile(
"ldr %[tmp_coeff0], [%[coeff]]\n\t"
"ldr %[tmp_coeff1], [%[coeff], #4]\n\t"
@@ -113,12 +113,12 @@ void WebRtcIsacfix_HighpassFilterFixDec32(int16_t *io,
"ldr %[tmp_coeff1], [%[coeff], #12]\n\t"
"smmulr %[a1], %[tmp_coeff0], %[state0]\n\t"
"smmulr %[b1], %[tmp_coeff1], %[state1]\n\t"
- :[a2]"+r"(a2),
- [b2]"+r"(b2),
- [a1]"+r"(a1),
- [b1]"+r"(b1),
- [tmp_coeff0]"+r"(tmp_coeff0),
- [tmp_coeff1]"+r"(tmp_coeff1)
+ :[a2]"=&r"(a2),
+ [b2]"=&r"(b2),
+ [a1]"=&r"(a1),
+ [b1]"=r"(b1),
+ [tmp_coeff0]"=&r"(tmp_coeff0),
+ [tmp_coeff1]"=&r"(tmp_coeff1)
:[coeff]"r"(coefficient),
[state0]"r"(state0),
[state1]"r"(state1)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c
new file mode 100644
index 00000000000..1887745b7c0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h"
+
+// WebRtcIsacfix_AllpassFilter2FixDec16 function optimized for MIPSDSP platform
+// Bit-exact with WebRtcIsacfix_AllpassFilter2FixDec16C from filterbanks.c
+void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(
+ int16_t *data_ch1, // Input and output in channel 1, in Q0
+ int16_t *data_ch2, // Input and output in channel 2, in Q0
+ const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15
+ const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15
+ const int length, // Length of the data buffers
+ int32_t *filter_state_ch1, // Filter state for channel 1, in Q16
+ int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16
+
+ int32_t st0_ch1, st1_ch1; // channel1 state variables
+ int32_t st0_ch2, st1_ch2; // channel2 state variables
+ int32_t f_ch10, f_ch11, f_ch20, f_ch21; // factor variables
+ int32_t r0, r1, r2, r3, r4, r5; // temporary ragister variables
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // Load all the state and factor variables
+ "lh %[f_ch10], 0(%[factor_ch1]) \n\t"
+ "lh %[f_ch20], 0(%[factor_ch2]) \n\t"
+ "lh %[f_ch11], 2(%[factor_ch1]) \n\t"
+ "lh %[f_ch21], 2(%[factor_ch2]) \n\t"
+ "lw %[st0_ch1], 0(%[filter_state_ch1]) \n\t"
+ "lw %[st1_ch1], 4(%[filter_state_ch1]) \n\t"
+ "lw %[st0_ch2], 0(%[filter_state_ch2]) \n\t"
+ "lw %[st1_ch2], 4(%[filter_state_ch2]) \n\t"
+ // Allpass filtering loop
+ "1: \n\t"
+ "lh %[r0], 0(%[data_ch1]) \n\t"
+ "lh %[r1], 0(%[data_ch2]) \n\t"
+ "addiu %[length], %[length], -1 \n\t"
+ "mul %[r2], %[r0], %[f_ch10] \n\t"
+ "mul %[r3], %[r1], %[f_ch20] \n\t"
+ "sll %[r0], %[r0], 16 \n\t"
+ "sll %[r1], %[r1], 16 \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "addq_s.w %[r2], %[r2], %[st0_ch1] \n\t"
+ "sll %[r3], %[r3], 1 \n\t"
+ "addq_s.w %[r3], %[r3], %[st0_ch2] \n\t"
+ "sra %[r2], %[r2], 16 \n\t"
+ "mul %[st0_ch1], %[f_ch10], %[r2] \n\t"
+ "sra %[r3], %[r3], 16 \n\t"
+ "mul %[st0_ch2], %[f_ch20], %[r3] \n\t"
+ "mul %[r4], %[r2], %[f_ch11] \n\t"
+ "mul %[r5], %[r3], %[f_ch21] \n\t"
+ "sll %[st0_ch1], %[st0_ch1], 1 \n\t"
+ "subq_s.w %[st0_ch1], %[r0], %[st0_ch1] \n\t"
+ "sll %[st0_ch2], %[st0_ch2], 1 \n\t"
+ "subq_s.w %[st0_ch2], %[r1], %[st0_ch2] \n\t"
+ "sll %[r4], %[r4], 1 \n\t"
+ "addq_s.w %[r4], %[r4], %[st1_ch1] \n\t"
+ "sll %[r5], %[r5], 1 \n\t"
+ "addq_s.w %[r5], %[r5], %[st1_ch2] \n\t"
+ "sra %[r4], %[r4], 16 \n\t"
+ "mul %[r0], %[r4], %[f_ch11] \n\t"
+ "sra %[r5], %[r5], 16 \n\t"
+ "mul %[r1], %[r5], %[f_ch21] \n\t"
+ "sh %[r4], 0(%[data_ch1]) \n\t"
+ "sh %[r5], 0(%[data_ch2]) \n\t"
+ "addiu %[data_ch1], %[data_ch1], 2 \n\t"
+ "sll %[r2], %[r2], 16 \n\t"
+ "sll %[r0], %[r0], 1 \n\t"
+ "subq_s.w %[st1_ch1], %[r2], %[r0] \n\t"
+ "sll %[r3], %[r3], 16 \n\t"
+ "sll %[r1], %[r1], 1 \n\t"
+ "subq_s.w %[st1_ch2], %[r3], %[r1] \n\t"
+ "bgtz %[length], 1b \n\t"
+ " addiu %[data_ch2], %[data_ch2], 2 \n\t"
+ // Store channel states
+ "sw %[st0_ch1], 0(%[filter_state_ch1]) \n\t"
+ "sw %[st1_ch1], 4(%[filter_state_ch1]) \n\t"
+ "sw %[st0_ch2], 0(%[filter_state_ch2]) \n\t"
+ "sw %[st1_ch2], 4(%[filter_state_ch2]) \n\t"
+ ".set pop \n\t"
+ : [f_ch10] "=&r" (f_ch10), [f_ch20] "=&r" (f_ch20),
+ [f_ch11] "=&r" (f_ch11), [f_ch21] "=&r" (f_ch21),
+ [st0_ch1] "=&r" (st0_ch1), [st1_ch1] "=&r" (st1_ch1),
+ [st0_ch2] "=&r" (st0_ch2), [st1_ch2] "=&r" (st1_ch2),
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5)
+ : [factor_ch1] "r" (factor_ch1), [factor_ch2] "r" (factor_ch2),
+ [filter_state_ch1] "r" (filter_state_ch1),
+ [filter_state_ch2] "r" (filter_state_ch2),
+ [data_ch1] "r" (data_ch1), [data_ch2] "r" (data_ch2),
+ [length] "r" (length)
+ : "memory", "hi", "lo"
+ );
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c
new file mode 100644
index 00000000000..056dc275d39
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c
@@ -0,0 +1,365 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h"
+
+// MIPS optimized implementation of the Autocorrelation function in fixed point.
+// NOTE! Different from SPLIB-version in how it scales the signal.
+int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale) {
+ int i = 0;
+ int16_t scaling = 0;
+ int16_t* in = (int16_t*)x;
+ int loop_size = (int)(N >> 3);
+ int count = (int)(N & 7);
+ // Declare temporary variables used as registry values.
+ int32_t r0, r1, r2, r3;
+#if !defined(MIPS_DSP_R2_LE)
+ // For non-DSPR2 optimizations 4 more registers are used.
+ int32_t r4, r5, r6, r7;
+#endif
+
+ // Calculate r[0] and scaling needed.
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ // Loop is unrolled 8 times, set accumulator to zero in branch delay slot.
+ "beqz %[loop_size], 2f \n\t"
+ " mult $0, $0 \n\t"
+ "1: \n\t"
+ // Load 8 samples per loop iteration.
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 4(%[in]) \n\t"
+ "ulw %[r2], 8(%[in]) \n\t"
+ "ulw %[r3], 12(%[in]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "lh %[r4], 8(%[in]) \n\t"
+ "lh %[r5], 10(%[in]) \n\t"
+ "lh %[r6], 12(%[in]) \n\t"
+ "lh %[r7], 14(%[in]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+ // Multiply and accumulate.
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r0] \n\t"
+ "dpa.w.ph $ac0, %[r1], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r2] \n\t"
+ "dpa.w.ph $ac0, %[r3], %[r3] \n\t"
+#else
+ "madd %[r0], %[r0] \n\t"
+ "madd %[r1], %[r1] \n\t"
+ "madd %[r2], %[r2] \n\t"
+ "madd %[r3], %[r3] \n\t"
+ "madd %[r4], %[r4] \n\t"
+ "madd %[r5], %[r5] \n\t"
+ "madd %[r6], %[r6] \n\t"
+ "madd %[r7], %[r7] \n\t"
+#endif
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in], %[in], 16 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ " extr.w %[r0], $ac0, 31 \n\t"
+#else
+ " mfhi %[r2] \n\t"
+#endif
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "madd %[r0], %[r0] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in], %[in], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extr.w %[r0], $ac0, 31 \n\t"
+#else
+ "mfhi %[r2] \n\t"
+#endif
+ "4: \n\t"
+#if !defined(MIPS_DSP_R1_LE)
+ "mflo %[r3] \n\t"
+ "sll %[r0], %[r2], 1 \n\t"
+ "srl %[r1], %[r3], 31 \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ // Calculate scaling (the value of shifting).
+ "clz %[r1], %[r0] \n\t"
+ "addiu %[r1], %[r1], -32 \n\t"
+ "subu %[scaling], $0, %[r1] \n\t"
+ "slti %[r1], %[r0], 0x1 \n\t"
+ "movn %[scaling], $0, %[r1] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extrv.w %[r0], $ac0, %[scaling] \n\t"
+ "mfhi %[r2], $ac0 \n\t"
+#else
+ "addiu %[r1], %[scaling], -32 \n\t"
+ "subu %[r1], $0, %[r1] \n\t"
+ "sllv %[r1], %[r2], %[r1] \n\t"
+ "srlv %[r0], %[r3], %[scaling] \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ "slti %[r1], %[scaling], 32 \n\t"
+ "movz %[r0], %[r2], %[r1] \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [r0] "=&r" (r0),
+ [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [count] "+r" (count), [scaling] "=r" (scaling)
+ : [N] "r" (N)
+ : "memory", "hi", "lo"
+ );
+ r[0] = r0;
+
+ // Correlation calculation is divided in 3 cases depending on the scaling
+ // value (different accumulator manipulation needed). Three slightly different
+ // loops are written in order to avoid branches inside the loop.
+ if (scaling == 0) {
+ // In this case, the result will be in low part of the accumulator.
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ // Loop processing 4 pairs of samples per iteration.
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+ " mflo %[r0] \n\t"
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+ "mflo %[r0] \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ :
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ } else if (scaling == 32) {
+ // In this case, the result will be high part of the accumulator.
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ // Loop processing 4 pairs of samples per iteration.
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+ " mfhi %[r0] \n\t"
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+ "mfhi %[r0] \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ :
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ } else {
+ // In this case, the result is obtained by combining low and high parts
+ // of the accumulator.
+#if !defined(MIPS_DSP_R1_LE)
+ int32_t tmp_shift = 32 - scaling;
+#endif
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ " extrv.w %[r0], $ac0, %[scaling] \n\t"
+#else
+ " mfhi %[r0] \n\t"
+#endif
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extrv.w %[r0], $ac0, %[scaling] \n\t"
+#else
+ "mfhi %[r0] \n\t"
+#endif
+ "4: \n\t"
+#if !defined(MIPS_DSP_R1_LE)
+ "mflo %[r1] \n\t"
+ "sllv %[r0], %[r0], %[tmp_shift] \n\t"
+ "srlv %[r1], %[r1], %[scaling] \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ : [scaling] "r" (scaling)
+#if !defined(MIPS_DSP_R1_LE)
+ , [tmp_shift] "r" (tmp_shift)
+#endif
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ }
+ *scale = scaling;
+
+ return (order + 1);
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
index 8baa30738f6..7635908094e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
@@ -179,7 +179,7 @@ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
}
/****************************************************************************
- * WebRtcAecm_InitNeon(...)
+ * WebRtcIsacfix_InitNeon(...)
*
* This function initializes function pointers for ARM Neon platform.
*/
@@ -200,6 +200,23 @@ static void WebRtcIsacfix_InitNeon(void) {
#endif
/****************************************************************************
+ * WebRtcIsacfix_InitMIPS(...)
+ *
+ * This function initializes function pointers for MIPS platform.
+ */
+
+#if defined(MIPS32_LE)
+static void WebRtcIsacfix_InitMIPS(void) {
+ WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrMIPS;
+ WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopMIPS;
+#if defined(MIPS_DSP_R1_LE)
+ WebRtcIsacfix_AllpassFilter2FixDec16 =
+ WebRtcIsacfix_AllpassFilter2FixDec16MIPS;
+#endif
+}
+#endif
+
+/****************************************************************************
* WebRtcIsacfix_EncoderInit(...)
*
* This function initializes a ISAC instance prior to the encoder calls.
@@ -296,6 +313,10 @@ int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
WebRtcIsacfix_InitNeon();
#endif
+#if defined(MIPS32_LE)
+ WebRtcIsacfix_InitMIPS();
+#endif
+
return statusInit;
}
@@ -587,15 +608,11 @@ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -675,15 +692,11 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -790,7 +803,7 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
+ ISAC_inst->ISACdec_obj.bitstr_obj.stream_size = (len + 1) >> 1;
/* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_ARCH_BIG_ENDIAN
@@ -891,7 +904,7 @@ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
+ ISAC_inst->ISACdec_obj.bitstr_obj.stream_size = (len + 1) >> 1;
/* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_ARCH_BIG_ENDIAN
@@ -1266,15 +1279,11 @@ int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded,
int16_t* frameLength)
{
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
streamdata.stream_index = 0;
@@ -1315,15 +1324,11 @@ int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded,
int16_t* rateIndex)
{
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
streamdata.stream_index = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
index 87c98606a11..a18a803d659 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
@@ -85,6 +85,30 @@
'pitch_filter_c.c',
],
}],
+ ['target_arch=="mipsel"', {
+ 'sources': [
+ 'filters_mips.c',
+ 'lattice_mips.c',
+ ],
+ 'sources!': [
+ 'lattice_c.c',
+ ],
+ 'conditions': [
+ ['mips_dsp_rev>0', {
+ 'sources': [
+ 'filterbanks_mips.c',
+ ],
+ }],
+ ['mips_dsp_rev>1', {
+ 'sources': [
+ 'pitch_filter_mips.c',
+ ],
+ 'sources!': [
+ 'pitch_filter_c.c',
+ ],
+ }],
+ ],
+ }],
],
},
],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c
new file mode 100644
index 00000000000..c596922168e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c
@@ -0,0 +1,327 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h"
+#include "webrtc/typedefs.h"
+
+// Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients
+// cth_Q15[] and sth_Q15[].
+void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples
+ int16_t* ar_f_Q0, // Input samples
+ int16_t* cth_Q15, // Filter coefficients
+ int16_t* sth_Q15, // Filter coefficients
+ int16_t order_coef) { // order of the filter
+ int n = 0;
+
+ for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) {
+ int count = order_coef - 1;
+ int offset;
+#if !defined(MIPS_DSP_R1_LE)
+ int16_t* tmp_cth;
+ int16_t* tmp_sth;
+ int16_t* tmp_arg;
+ int32_t max_q16 = 0x7fff;
+ int32_t min_q16 = 0xffff8000;
+#endif
+ // Declare variables used as temporary registers.
+ int32_t r0, r1, r2, t0, t1, t2, t_ar;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "bltz %[count], 2f \n\t"
+ " lh %[t_ar], 0(%[tmp]) \n\t"
+ // Inner loop
+ "1: \n\t"
+ "sll %[offset], %[count], 1 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "lhx %[r0], %[offset](%[cth_Q15]) \n\t"
+ "lhx %[r1], %[offset](%[sth_Q15]) \n\t"
+ "lhx %[r2], %[offset](%[ar_g_Q0]) \n\t"
+#else
+ "addu %[tmp_cth], %[cth_Q15], %[offset] \n\t"
+ "addu %[tmp_sth], %[sth_Q15], %[offset] \n\t"
+ "addu %[tmp_arg], %[ar_g_Q0], %[offset] \n\t"
+ "lh %[r0], 0(%[tmp_cth]) \n\t"
+ "lh %[r1], 0(%[tmp_sth]) \n\t"
+ "lh %[r2], 0(%[tmp_arg]) \n\t"
+#endif
+ "mul %[t0], %[r0], %[t_ar] \n\t"
+ "mul %[t1], %[r1], %[t_ar] \n\t"
+ "mul %[t2], %[r1], %[r2] \n\t"
+ "mul %[r0], %[r0], %[r2] \n\t"
+ "subu %[t0], %[t0], %[t2] \n\t"
+ "addu %[t1], %[t1], %[r0] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "shra_r.w %[t1], %[t1], 15 \n\t"
+ "shra_r.w %[t0], %[t0], 15 \n\t"
+#else
+ "addiu %[t1], %[t1], 0x4000 \n\t"
+ "sra %[t1], %[t1], 15 \n\t"
+ "addiu %[t0], %[t0], 0x4000 \n\t"
+ "sra %[t0], %[t0], 15 \n\t"
+#endif
+ "addiu %[offset], %[offset], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "shll_s.w %[t1], %[t1], 16 \n\t"
+ "shll_s.w %[t_ar], %[t0], 16 \n\t"
+#else
+ "slt %[r0], %[t1], %[max_q16] \n\t"
+ "slt %[r1], %[t0], %[max_q16] \n\t"
+ "movz %[t1], %[max_q16], %[r0] \n\t"
+ "movz %[t0], %[max_q16], %[r1] \n\t"
+#endif
+ "addu %[offset], %[offset], %[ar_g_Q0] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "sra %[t1], %[t1], 16 \n\t"
+ "sra %[t_ar], %[t_ar], 16 \n\t"
+#else
+ "slt %[r0], %[t1], %[min_q16] \n\t"
+ "slt %[r1], %[t0], %[min_q16] \n\t"
+ "movn %[t1], %[min_q16], %[r0] \n\t"
+ "movn %[t0], %[min_q16], %[r1] \n\t"
+ "addu %[t_ar], $zero, %[t0] \n\t"
+#endif
+ "sh %[t1], 0(%[offset]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[count], %[count], -1 \n\t"
+ "2: \n\t"
+ "sh %[t_ar], 0(%[tmp]) \n\t"
+ "sh %[t_ar], 0(%[ar_g_Q0]) \n\t"
+ ".set pop \n\t"
+ : [t_ar] "=&r" (t_ar), [count] "+r" (count), [offset] "=&r" (offset),
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [t0] "=&r" (t0),
+#if !defined(MIPS_DSP_R1_LE)
+ [tmp_cth] "=&r" (tmp_cth), [tmp_sth] "=&r" (tmp_sth),
+ [tmp_arg] "=&r" (tmp_arg),
+#endif
+ [t1] "=&r" (t1), [t2] "=&r" (t2)
+ : [tmp] "r" (&ar_f_Q0[n+1]), [cth_Q15] "r" (cth_Q15),
+#if !defined(MIPS_DSP_R1_LE)
+ [max_q16] "r" (max_q16), [min_q16] "r" (min_q16),
+#endif
+ [sth_Q15] "r" (sth_Q15), [ar_g_Q0] "r" (ar_g_Q0)
+ : "memory", "hi", "lo"
+ );
+ }
+}
+
+// MIPS optimization of the inner loop used for function
+// WebRtcIsacfix_NormLatticeFilterMa(). It does:
+//
+// for 0 <= n < HALF_SUBFRAMELEN - 1:
+// *ptr2 = input2 * (*ptr2) + input0 * (*ptr0));
+// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+//
+// Note, function WebRtcIsacfix_FilterMaLoopMIPS and WebRtcIsacfix_FilterMaLoopC
+// are not bit-exact. The accuracy of the MIPS function is same or better.
+void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, // Filter coefficient
+ int16_t input1, // Filter coefficient
+ int32_t input2, // Inverse coeff (1/input1)
+ int32_t* ptr0, // Sample buffer
+ int32_t* ptr1, // Sample buffer
+ int32_t* ptr2) { // Sample buffer
+#if defined(MIPS_DSP_R2_LE)
+ // MIPS DSPR2 version. 4 available accumulators allows loop unrolling 4 times.
+ // This variant is not bit-exact with WebRtcIsacfix_FilterMaLoopC, since we
+ // are exploiting 64-bit accumulators. The accuracy of the MIPS DSPR2 function
+ // is same or better.
+ int n = (HALF_SUBFRAMELEN - 1) >> 2;
+ int m = (HALF_SUBFRAMELEN - 1) & 3;
+
+ int r0, r1, r2, r3;
+ int t0, t1, t2, t3;
+ int s0, s1, s2, s3;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[r1], 4(%[ptr0]) \n\t"
+ "lw %[r2], 8(%[ptr0]) \n\t"
+ "lw %[r3], 12(%[ptr0]) \n\t"
+ "mult $ac0, %[r0], %[input0] \n\t"
+ "mult $ac1, %[r1], %[input0] \n\t"
+ "mult $ac2, %[r2], %[input0] \n\t"
+ "mult $ac3, %[r3], %[input0] \n\t"
+ "lw %[t0], 0(%[ptr2]) \n\t"
+ "extr_rs.w %[s0], $ac0, 15 \n\t"
+ "extr_rs.w %[s1], $ac1, 15 \n\t"
+ "extr_rs.w %[s2], $ac2, 15 \n\t"
+ "extr_rs.w %[s3], $ac3, 15 \n\t"
+ "lw %[t1], 4(%[ptr2]) \n\t"
+ "lw %[t2], 8(%[ptr2]) \n\t"
+ "lw %[t3], 12(%[ptr2]) \n\t"
+ "addu %[t0], %[t0], %[s0] \n\t"
+ "addu %[t1], %[t1], %[s1] \n\t"
+ "addu %[t2], %[t2], %[s2] \n\t"
+ "addu %[t3], %[t3], %[s3] \n\t"
+ "mult $ac0, %[t0], %[input2] \n\t"
+ "mult $ac1, %[t1], %[input2] \n\t"
+ "mult $ac2, %[t2], %[input2] \n\t"
+ "mult $ac3, %[t3], %[input2] \n\t"
+ "addiu %[ptr0], %[ptr0], 16 \n\t"
+ "extr_rs.w %[t0], $ac0, 16 \n\t"
+ "extr_rs.w %[t1], $ac1, 16 \n\t"
+ "extr_rs.w %[t2], $ac2, 16 \n\t"
+ "extr_rs.w %[t3], $ac3, 16 \n\t"
+ "addiu %[n], %[n], -1 \n\t"
+ "mult $ac0, %[r0], %[input1] \n\t"
+ "mult $ac1, %[r1], %[input1] \n\t"
+ "mult $ac2, %[r2], %[input1] \n\t"
+ "mult $ac3, %[r3], %[input1] \n\t"
+ "sw %[t0], 0(%[ptr2]) \n\t"
+ "extr_rs.w %[s0], $ac0, 15 \n\t"
+ "extr_rs.w %[s1], $ac1, 15 \n\t"
+ "extr_rs.w %[s2], $ac2, 15 \n\t"
+ "extr_rs.w %[s3], $ac3, 15 \n\t"
+ "sw %[t1], 4(%[ptr2]) \n\t"
+ "sw %[t2], 8(%[ptr2]) \n\t"
+ "sw %[t3], 12(%[ptr2]) \n\t"
+ "mult $ac0, %[t0], %[input0] \n\t"
+ "mult $ac1, %[t1], %[input0] \n\t"
+ "mult $ac2, %[t2], %[input0] \n\t"
+ "mult $ac3, %[t3], %[input0] \n\t"
+ "addiu %[ptr2], %[ptr2], 16 \n\t"
+ "extr_rs.w %[t0], $ac0, 15 \n\t"
+ "extr_rs.w %[t1], $ac1, 15 \n\t"
+ "extr_rs.w %[t2], $ac2, 15 \n\t"
+ "extr_rs.w %[t3], $ac3, 15 \n\t"
+ "addu %[t0], %[t0], %[s0] \n\t"
+ "addu %[t1], %[t1], %[s1] \n\t"
+ "addu %[t2], %[t2], %[s2] \n\t"
+ "addu %[t3], %[t3], %[s3] \n\t"
+ "sw %[t0], 0(%[ptr1]) \n\t"
+ "sw %[t1], 4(%[ptr1]) \n\t"
+ "sw %[t2], 8(%[ptr1]) \n\t"
+ "sw %[t3], 12(%[ptr1]) \n\t"
+ "bgtz %[n], 1b \n\t"
+ " addiu %[ptr1], %[ptr1], 16 \n\t"
+ "beq %[m], %0, 3f \n\t"
+ " nop \n\t"
+ "2: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[t0], 0(%[ptr2]) \n\t"
+ "addiu %[ptr0], %[ptr0], 4 \n\t"
+ "mult $ac0, %[r0], %[input0] \n\t"
+ "mult $ac1, %[r0], %[input1] \n\t"
+ "extr_rs.w %[r1], $ac0, 15 \n\t"
+ "extr_rs.w %[t1], $ac1, 15 \n\t"
+ "addu %[t0], %[t0], %[r1] \n\t"
+ "mult $ac0, %[t0], %[input2] \n\t"
+ "extr_rs.w %[t0], $ac0, 16 \n\t"
+ "sw %[t0], 0(%[ptr2]) \n\t"
+ "mult $ac0, %[t0], %[input0] \n\t"
+ "addiu %[ptr2], %[ptr2], 4 \n\t"
+ "addiu %[m], %[m], -1 \n\t"
+ "extr_rs.w %[t0], $ac0, 15 \n\t"
+ "addu %[t0], %[t0], %[t1] \n\t"
+ "sw %[t0], 0(%[ptr1]) \n\t"
+ "bgtz %[m], 2b \n\t"
+ " addiu %[ptr1], %[ptr1], 4 \n\t"
+ "3: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [t0] "=&r" (t0), [t1] "=&r" (t1),
+ [t2] "=&r" (t2), [t3] "=&r" (t3), [s0] "=&r" (s0),
+ [s1] "=&r" (s1), [s2] "=&r" (s2), [s3] "=&r" (s3),
+ [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), [m] "+r" (m),
+ [ptr2] "+r" (ptr2), [n] "+r" (n)
+ : [input0] "r" (input0), [input1] "r" (input1),
+ [input2] "r" (input2)
+ : "memory", "hi", "lo", "$ac1hi", "$ac1lo", "$ac2hi",
+ "$ac2lo", "$ac3hi", "$ac3lo"
+ );
+#else
+ // Non-DSPR2 version of the function. Avoiding the accumulator usage due to
+ // large latencies. This variant is bit-exact with C code.
+ int n = HALF_SUBFRAMELEN - 1;
+ int32_t t16a, t16b;
+ int32_t r0, r1, r2, r3, r4;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[t16a], %[input2], 16 \n\t"
+ "andi %[t16b], %[input2], 0xFFFF \n\t"
+#if defined(MIPS32R2_LE)
+ "seh %[t16b], %[t16b] \n\t"
+ "seh %[input0], %[input0] \n\t"
+ "seh %[input1], %[input1] \n\t"
+#else
+ "sll %[t16b], %[t16b], 16 \n\t"
+ "sra %[t16b], %[t16b], 16 \n\t"
+ "sll %[input0], %[input0], 16 \n\t"
+ "sra %[input0], %[input0], 16 \n\t"
+ "sll %[input1], %[input1], 16 \n\t"
+ "sra %[input1], %[input1], 16 \n\t"
+#endif
+ "addiu %[r0], %[t16a], 1 \n\t"
+ "slt %[r1], %[t16b], $zero \n\t"
+ "movn %[t16a], %[r0], %[r1] \n\t"
+ "1: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[r1], 0(%[ptr2]) \n\t"
+ "addiu %[ptr0], %[ptr0], 4 \n\t"
+ "sra %[r2], %[r0], 16 \n\t"
+ "andi %[r0], %[r0], 0xFFFF \n\t"
+ "mul %[r3], %[r2], %[input0] \n\t"
+ "mul %[r4], %[r0], %[input0] \n\t"
+ "mul %[r2], %[r2], %[input1] \n\t"
+ "mul %[r0], %[r0], %[input1] \n\t"
+ "addiu %[ptr2], %[ptr2], 4 \n\t"
+ "sll %[r3], %[r3], 1 \n\t"
+ "sra %[r4], %[r4], 1 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "addu %[r3], %[r3], %[r4] \n\t"
+ "addu %[r1], %[r1], %[r3] \n\t"
+ "sra %[r3], %[r1], 16 \n\t"
+ "andi %[r4], %[r1], 0xFFFF \n\t"
+ "sra %[r4], %[r4], 1 \n\t"
+ "mul %[r1], %[r1], %[t16a] \n\t"
+ "mul %[r3], %[r3], %[t16b] \n\t"
+ "mul %[r4], %[r4], %[t16b] \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "sra %[r0], %[r0], 1 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "addu %[r0], %[r0], %[r2] \n\t"
+ "addiu %[n], %[n], -1 \n\t"
+ "addu %[r1], %[r1], %[r3] \n\t"
+ "addiu %[r4], %[r4], 0x4000 \n\t"
+ "sra %[r4], %[r4], 15 \n\t"
+ "addu %[r1], %[r1], %[r4] \n\t"
+ "sra %[r2], %[r1], 16 \n\t"
+ "andi %[r3], %[r1], 0xFFFF \n\t"
+ "mul %[r3], %[r3], %[input0] \n\t"
+ "mul %[r2], %[r2], %[input0] \n\t"
+ "sw %[r1], -4(%[ptr2]) \n\t"
+ "sra %[r3], %[r3], 1 \n\t"
+ "addiu %[r3], %[r3], 0x2000 \n\t"
+ "sra %[r3], %[r3], 14 \n\t"
+ "addu %[r0], %[r0], %[r3] \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "addu %[r0], %[r0], %[r2] \n\t"
+ "sw %[r0], 0(%[ptr1]) \n\t"
+ "bgtz %[n], 1b \n\t"
+ " addiu %[ptr1], %[ptr1], 4 \n\t"
+ ".set pop \n\t"
+ : [t16a] "=&r" (t16a), [t16b] "=&r" (t16b), [r0] "=&r" (r0),
+ [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [r4] "=&r" (r4), [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1),
+ [ptr2] "+r" (ptr2), [n] "+r" (n)
+ : [input0] "r" (input0), [input1] "r" (input1),
+ [input2] "r" (input2)
+ : "hi", "lo", "memory"
+ );
+#endif
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
index 0dc8174399e..deba0d5e29f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
@@ -834,13 +834,15 @@ void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0,
/* bandwidth expansion */
for (n = 1; n <= ORDERLO; n++) {
- a_LOQ11[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecLo[n-1], a_LOQ11[n]);
+ a_LOQ11[n] = (int16_t) ((WEBRTC_SPL_MUL_16_16(
+ kPolyVecLo[n-1], a_LOQ11[n]) + ((int32_t) (1 << 14))) >> 15);
}
polyHI[0] = a_HIQ12[0];
for (n = 1; n <= ORDERHI; n++) {
- a_HIQ12[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecHi[n-1], a_HIQ12[n]);
+ a_HIQ12[n] = (int16_t) ((WEBRTC_SPL_MUL_16_16(
+ kPolyVecHi[n-1], a_HIQ12[n]) + ((int32_t) (1 << 14))) >> 15);
polyHI[n] = a_HIQ12[n];
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c
new file mode 100644
index 00000000000..8334f7eb18b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h"
+
+void WebRtcIsacfix_PitchFilterCore(int loopNumber,
+ int16_t gain,
+ int index,
+ int16_t sign,
+ int16_t* inputState,
+ int16_t* outputBuf2,
+ const int16_t* coefficient,
+ int16_t* inputBuf,
+ int16_t* outputBuf,
+ int* index2) {
+ int ind2t = *index2;
+ int i = 0;
+ int16_t* out2_pos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)] + ind2t;
+ int32_t w1, w2, w3, w4, w5, gain32, sign32;
+ int32_t coef1, coef2, coef3, coef4, coef5 = 0;
+ // Define damp factors as int32_t (pair of int16_t)
+ int32_t kDampF0 = 0x0000F70A;
+ int32_t kDampF1 = 0x51EC2000;
+ int32_t kDampF2 = 0xF70A2000;
+ int16_t* input1 = inputBuf + ind2t;
+ int16_t* output1 = outputBuf + ind2t;
+ int16_t* output2 = outputBuf2 + ind2t + PITCH_BUFFSIZE;
+
+ // Load coefficients outside the loop and sign-extend gain and sign
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwl %[coef1], 3(%[coefficient]) \n\t"
+ "lwl %[coef2], 7(%[coefficient]) \n\t"
+ "lwl %[coef3], 11(%[coefficient]) \n\t"
+ "lwl %[coef4], 15(%[coefficient]) \n\t"
+ "lwr %[coef1], 0(%[coefficient]) \n\t"
+ "lwr %[coef2], 4(%[coefficient]) \n\t"
+ "lwr %[coef3], 8(%[coefficient]) \n\t"
+ "lwr %[coef4], 12(%[coefficient]) \n\t"
+ "lhu %[coef5], 16(%[coefficient]) \n\t"
+ "seh %[gain32], %[gain] \n\t"
+ "seh %[sign32], %[sign] \n\t"
+ ".set pop \n\t"
+ : [coef1] "=&r" (coef1), [coef2] "=&r" (coef2), [coef3] "=&r" (coef3),
+ [coef4] "=&r" (coef4), [coef5] "=&r" (coef5), [gain32] "=&r" (gain32),
+ [sign32] "=&r" (sign32)
+ : [coefficient] "r" (coefficient), [gain] "r" (gain),
+ [sign] "r" (sign)
+ : "memory"
+ );
+
+ for (i = 0; i < loopNumber; i++) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // Filter to get fractional pitch
+ "li %[w1], 8192 \n\t"
+ "mtlo %[w1] \n\t"
+ "mthi $0 \n\t"
+ "lwl %[w1], 3(%[out2_pos2]) \n\t"
+ "lwl %[w2], 7(%[out2_pos2]) \n\t"
+ "lwl %[w3], 11(%[out2_pos2]) \n\t"
+ "lwl %[w4], 15(%[out2_pos2]) \n\t"
+ "lwr %[w1], 0(%[out2_pos2]) \n\t"
+ "lwr %[w2], 4(%[out2_pos2]) \n\t"
+ "lwr %[w3], 8(%[out2_pos2]) \n\t"
+ "lwr %[w4], 12(%[out2_pos2]) \n\t"
+ "lhu %[w5], 16(%[out2_pos2]) \n\t"
+ "dpa.w.ph $ac0, %[w1], %[coef1] \n\t"
+ "dpa.w.ph $ac0, %[w2], %[coef2] \n\t"
+ "dpa.w.ph $ac0, %[w3], %[coef3] \n\t"
+ "dpa.w.ph $ac0, %[w4], %[coef4] \n\t"
+ "dpa.w.ph $ac0, %[w5], %[coef5] \n\t"
+ "addiu %[out2_pos2], %[out2_pos2], 2 \n\t"
+ "mthi $0, $ac1 \n\t"
+ "lwl %[w2], 3(%[inputState]) \n\t"
+ "lwl %[w3], 7(%[inputState]) \n\t"
+ // Fractional pitch shift & saturation
+ "extr_s.h %[w1], $ac0, 14 \n\t"
+ "li %[w4], 16384 \n\t"
+ "lwr %[w2], 0(%[inputState]) \n\t"
+ "lwr %[w3], 4(%[inputState]) \n\t"
+ "mtlo %[w4], $ac1 \n\t"
+ // Shift low pass filter state
+ "swl %[w2], 5(%[inputState]) \n\t"
+ "swl %[w3], 9(%[inputState]) \n\t"
+ "mul %[w1], %[gain32], %[w1] \n\t"
+ "swr %[w2], 2(%[inputState]) \n\t"
+ "swr %[w3], 6(%[inputState]) \n\t"
+ // Low pass filter accumulation
+ "dpa.w.ph $ac1, %[kDampF1], %[w2] \n\t"
+ "dpa.w.ph $ac1, %[kDampF2], %[w3] \n\t"
+ "lh %[w4], 0(%[input1]) \n\t"
+ "addiu %[input1], %[input1], 2 \n\t"
+ "shra_r.w %[w1], %[w1], 12 \n\t"
+ "sh %[w1], 0(%[inputState]) \n\t"
+ "dpa.w.ph $ac1, %[kDampF0], %[w1] \n\t"
+ // Low pass filter shift & saturation
+ "extr_s.h %[w2], $ac1, 15 \n\t"
+ "mul %[w2], %[w2], %[sign32] \n\t"
+ // Buffer update
+ "subu %[w2], %[w4], %[w2] \n\t"
+ "shll_s.w %[w2], %[w2], 16 \n\t"
+ "sra %[w2], %[w2], 16 \n\t"
+ "sh %[w2], 0(%[output1]) \n\t"
+ "addu %[w2], %[w2], %[w4] \n\t"
+ "shll_s.w %[w2], %[w2], 16 \n\t"
+ "addiu %[output1], %[output1], 2 \n\t"
+ "sra %[w2], %[w2], 16 \n\t"
+ "sh %[w2], 0(%[output2]) \n\t"
+ "addiu %[output2], %[output2], 2 \n\t"
+ ".set pop \n\t"
+ : [w1] "=&r" (w1), [w2] "=&r" (w2), [w3] "=&r" (w3), [w4] "=&r" (w4),
+ [w5] "=&r" (w5), [input1] "+r" (input1), [out2_pos2] "+r" (out2_pos2),
+ [output1] "+r" (output1), [output2] "+r" (output2)
+ : [coefficient] "r" (coefficient), [inputState] "r" (inputState),
+ [gain32] "r" (gain32), [sign32] "r" (sign32), [kDampF0] "r" (kDampF0),
+ [kDampF1] "r" (kDampF1), [kDampF2] "r" (kDampF2),
+ [coef1] "r" (coef1), [coef2] "r" (coef2), [coef3] "r" (coef3),
+ [coef4] "r" (coef4), [coef5] "r" (coef5)
+ : "hi", "lo", "$ac1hi", "$ac1lo", "memory"
+ );
+ }
+ (*index2) += loopNumber;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
index 4d043566369..bd20ba0165a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
@@ -26,13 +26,14 @@
/* Bitstream struct for decoder */
typedef struct Bitstreamstruct_dec {
- uint16_t *stream; /* Pointer to bytestream to decode */
+ uint16_t stream[STREAM_MAXW16_60MS]; /* Array bytestream to decode */
uint32_t W_upper; /* Upper boundary of interval W */
uint32_t streamval;
uint16_t stream_index; /* Index to the current position in bytestream */
int16_t full; /* 0 - first byte in memory filled, second empty*/
/* 1 - both bytes are empty (we just filled the previous memory */
+ int stream_size; /* The size of stream. */
} Bitstr_dec;
/* Bitstream struct for encoder */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
index 46682ac556a..6713b28695c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
@@ -42,7 +42,11 @@ DEFINE_FUNCTION WebRtcIsacfix_Time2SpecNeon
add r5, sp, #(16 + FRAMESAMPLES * 2) @ tmpimQ16;
adr r9, WebRtcIsacfix_kCosTab1
+#if defined(__APPLE__)
+ mov r6, #:lower16:(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#else
mov r6, #(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#endif
add r10, r9, r6 @ WebRtcIsacfix_kSinTab1
vmov.u32 q14, #0 @ Initialize the maximum values for tmpInIm.
@@ -455,7 +459,12 @@ TransformAndFindMax:
bgt TransformAndFindMax
adr r10, WebRtcIsacfix_kSinTab1
+#if defined(__APPLE__)
+ mov r2, #:lower16:(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#else
mov r2, #(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#endif
+
sub r11, r10, r2 @ WebRtcIsacfix_kCosTab1
@ Find the maximum value in the Neon registers
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
index f937b3453fa..76a61e6d33c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
@@ -14,7 +14,7 @@
/*
* Define the fixed-point numeric formats
*/
-#include "typedefs.h"
+#include "webrtc/typedefs.h"
typedef struct WebRtcISACStruct ISACStruct;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
index 66bf06d472b..9ae69a0bbf2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
@@ -1449,13 +1449,18 @@ void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) {
/* quantize reflection coefficients (add noise feedback?) */
for (k = 0; k < AR_ORDER; k++) {
index[k] = WebRtcIsac_kQArRcInitIndex[k];
-
+ // The safe-guards in following while conditions are to suppress gcc 4.8.3
+ // warnings, Issue 2888. Otherwise, first and last elements of
+ // |WebRtcIsac_kQArBoundaryLevels| are such that the following search
+ // *never* cause an out-of-boundary read.
if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]]) {
- while (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) {
+ while (index[k] + 1 < NUM_AR_RC_QUANT_BAUNDARY &&
+ RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) {
index[k]++;
}
} else {
- while (RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ;
+ while (index[k] > 0 &&
+ RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ;
}
RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
index f3f1650b42b..fa54a8d873c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
@@ -15,20 +15,21 @@
*
*/
-#include "isac.h"
-#include "bandwidth_estimator.h"
-#include "crc.h"
-#include "entropy_coding.h"
-#include "codec.h"
-#include "structs.h"
-#include "signal_processing_library.h"
-#include "lpc_shape_swb16_tables.h"
-#include "os_specific_inline.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#include <math.h>
#include <stdio.h>
-#include <string.h>
#include <stdlib.h>
-#include <math.h>
+#include <string.h>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/codec.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/crc.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/structs.h"
#define BIT_MASK_DEC_INIT 0x0001
#define BIT_MASK_ENC_INIT 0x0002
@@ -273,7 +274,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
ISACMainStruct* instISAC;
if (ISAC_main_inst != NULL) {
- instISAC = (ISACMainStruct*)WEBRTC_SPL_VNEW(ISACMainStruct, 1);
+ instISAC = (ISACMainStruct*)malloc(sizeof(ISACMainStruct));
*ISAC_main_inst = (ISACStruct*)instISAC;
if (*ISAC_main_inst != NULL) {
instISAC->errorCode = 0;
@@ -306,7 +307,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
*/
int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WEBRTC_SPL_FREE(instISAC);
+ free(instISAC);
return 0;
}
@@ -552,8 +553,8 @@ int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
}
if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) {
- WebRtcSpl_AnalysisQMF(speech_in_ptr, speechInLB, speechInUB,
- instISAC->analysisFBState1,
+ WebRtcSpl_AnalysisQMF(speech_in_ptr, SWBFRAMESAMPLES_10ms, speechInLB,
+ speechInUB, instISAC->analysisFBState1,
instISAC->analysisFBState2);
/* Convert from fixed to floating point. */
@@ -1314,7 +1315,7 @@ static int16_t Decode(ISACStruct* ISAC_main_inst,
speechIdx = 0;
while (speechIdx < numSamplesLB) {
WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx], &outFrameUB[speechIdx],
- &decoded[(speechIdx << 1)],
+ FRAMESAMPLES_10ms, &decoded[(speechIdx << 1)],
instISAC->synthesisFBState1,
instISAC->synthesisFBState2);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
index 9eae0555f65..0f6d889225d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
@@ -13,68 +13,69 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-const uint16_t WebRtcIsac_kQArRc1Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 2 */
-const uint16_t WebRtcIsac_kQArRc2Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 3 */
-const uint16_t WebRtcIsac_kQArRc3Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 4 */
-const uint16_t WebRtcIsac_kQArRc4Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 5 */
-const uint16_t WebRtcIsac_kQArRc5Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 6 */
-const uint16_t WebRtcIsac_kQArRc6Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531,
65533, 65535};
/* representation levels for quantized reflection coefficient 1 */
-const int16_t WebRtcIsac_kQArRc1Levels[11] = {
+const int16_t WebRtcIsac_kQArRc1Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 2 */
-const int16_t WebRtcIsac_kQArRc2Levels[11] = {
+const int16_t WebRtcIsac_kQArRc2Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 3 */
-const int16_t WebRtcIsac_kQArRc3Levels[11] = {
+const int16_t WebRtcIsac_kQArRc3Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 4 */
-const int16_t WebRtcIsac_kQArRc4Levels[11] = {
+const int16_t WebRtcIsac_kQArRc4Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 5 */
-const int16_t WebRtcIsac_kQArRc5Levels[11] = {
+const int16_t WebRtcIsac_kQArRc5Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 6 */
-const int16_t WebRtcIsac_kQArRc6Levels[11] = {
+const int16_t WebRtcIsac_kQArRc6Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
};
/* quantization boundary levels for reflection coefficients */
-const int16_t WebRtcIsac_kQArBoundaryLevels[12] = {
--32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, 32767
+const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY] = {
+-32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441,
+32767
};
/* initial index for AR reflection coefficient quantizer and cdf table search */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
index 22fe6a2102c..989cb367bff 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
@@ -21,27 +21,29 @@
#include "structs.h"
+#define NUM_AR_RC_QUANT_BAUNDARY 12
+
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-extern const uint16_t WebRtcIsac_kQArRc1Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 2 */
-extern const uint16_t WebRtcIsac_kQArRc2Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 3 */
-extern const uint16_t WebRtcIsac_kQArRc3Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 4 */
-extern const uint16_t WebRtcIsac_kQArRc4Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 5 */
-extern const uint16_t WebRtcIsac_kQArRc5Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 6 */
-extern const uint16_t WebRtcIsac_kQArRc6Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* quantization boundary levels for reflection coefficients */
-extern const int16_t WebRtcIsac_kQArBoundaryLevels[12];
+extern const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY];
/* initial indices for AR reflection coefficient quantizer and cdf table search */
extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER];
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
index 1bd73e75bd0..62c890c84bc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
@@ -18,10 +18,9 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
-
-#include "typedefs.h"
-#include "settings.h"
-#include "isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/settings.h"
+#include "webrtc/typedefs.h"
typedef struct Bitstreamstruct {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
index 1370aff06a6..7998fdbdebf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INTERFACE_OPUS_INTERFACE_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INTERFACE_OPUS_INTERFACE_H_
-#include "typedefs.h"
+#include "webrtc/typedefs.h"
#ifdef __cplusplus
extern "C" {
@@ -59,6 +59,64 @@ int16_t WebRtcOpus_Encode(OpusEncInst* inst, int16_t* audio_in, int16_t samples,
*/
int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate);
+/****************************************************************************
+ * WebRtcOpus_SetPacketLossRate(...)
+ *
+ * This function configures the encoder's expected packet loss percentage.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - loss_rate : loss percentage in the range 0-100, inclusive.
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate);
+
+/* TODO(minyue): Check whether an API to check the FEC and the packet loss rate
+ * is needed. It might not be very useful since there are not many use cases and
+ * the caller can always maintain the states. */
+
+/****************************************************************************
+ * WebRtcOpus_EnableFec()
+ *
+ * This function enables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_EnableFec(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DisableFec()
+ *
+ * This function disables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DisableFec(OpusEncInst* inst);
+
+/*
+ * WebRtcOpus_SetComplexity(...)
+ *
+ * This function adjusts the computational complexity. The effect is the same as
+ * calling the complexity setting of Opus as an Opus encoder related CTL.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - complexity : New target complexity (0-10, inclusive)
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity);
+
int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels);
int16_t WebRtcOpus_DecoderFree(OpusDecInst* inst);
@@ -113,6 +171,7 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const int16_t* encoded,
int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
int16_t encoded_bytes, int16_t* decoded,
int16_t* audio_type);
+
/****************************************************************************
* WebRtcOpus_DecodePlc(...)
* TODO(tlegrand): Remove master and slave functions when NetEq4 is in place.
@@ -138,6 +197,28 @@ int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
int16_t number_of_lost_frames);
/****************************************************************************
+ * WebRtcOpus_DecodeFec(...)
+ *
+ * This function decodes the FEC data from an Opus packet into one or more audio
+ * frames at the ACM interface's sampling rate (32 kHz).
+ *
+ * Input:
+ * - inst : Decoder context
+ * - encoded : Encoded data
+ * - encoded_bytes : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector (previous frame)
+ *
+ * Return value : >0 - Samples per channel in decoded vector
+ * 0 - No FEC data in the packet
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
+ int16_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type);
+
+/****************************************************************************
* WebRtcOpus_DurationEst(...)
*
* This function calculates the duration of an opus packet.
@@ -152,6 +233,40 @@ int WebRtcOpus_DurationEst(OpusDecInst* inst,
const uint8_t* payload,
int payload_length_bytes);
+/* TODO(minyue): Check whether it is needed to add a decoder context to the
+ * arguments, like WebRtcOpus_DurationEst(...). In fact, the packet itself tells
+ * the duration. The decoder context in WebRtcOpus_DurationEst(...) is not used.
+ * So it may be advisable to remove it from WebRtcOpus_DurationEst(...). */
+
+/****************************************************************************
+ * WebRtcOpus_FecDurationEst(...)
+ *
+ * This function calculates the duration of the FEC data within an opus packet.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : >0 - The duration of the FEC data in the
+ * packet in samples.
+ * 0 - No FEC data in the packet.
+ */
+int WebRtcOpus_FecDurationEst(const uint8_t* payload,
+ int payload_length_bytes);
+
+/****************************************************************************
+ * WebRtcOpus_PacketHasFec(...)
+ *
+ * This function detects if an opus packet has FEC.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : 0 - the packet does NOT contain FEC.
+ * 1 - the packet contains FEC.
+ */
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ int payload_length_bytes);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
index 406870232bb..b1dedd7d4a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
@@ -32,4 +32,26 @@
],
},
],
+ 'conditions': [
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'webrtc_opus_fec_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'webrtc_opus',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'sources': [
+ 'opus_fec_test.cc',
+ ],
+ },
+ ],
+ }],
+ ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
new file mode 100644
index 00000000000..fb4cb04f361
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
@@ -0,0 +1,249 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using ::std::string;
+using ::std::tr1::tuple;
+using ::std::tr1::make_tuple;
+using ::std::tr1::get;
+using ::testing::TestWithParam;
+using ::testing::ValuesIn;
+
+namespace webrtc {
+
+// Define coding parameter as <channels, bit_rate, filename, extension>.
+typedef tuple<int, int, string, string> coding_param;
+typedef struct mode mode;
+
+struct mode {
+ bool fec;
+ uint8_t target_packet_loss_rate;
+};
+
+const int kOpusBlockDurationMs = 20;
+const int kOpusInputSamplingKhz = 48;
+const int kOpusOutputSamplingKhz = 32;
+
+class OpusFecTest : public TestWithParam<coding_param> {
+ protected:
+ OpusFecTest();
+
+ virtual void SetUp();
+ virtual void TearDown();
+
+ virtual void EncodeABlock();
+
+ virtual void DecodeABlock(bool lost_previous, bool lost_current);
+
+ int block_duration_ms_;
+ int input_sampling_khz_;
+ int output_sampling_khz_;
+
+ // Number of samples-per-channel in a frame.
+ int input_length_sample_;
+
+ // Expected output number of samples-per-channel in a frame.
+ int output_length_sample_;
+
+ int channels_;
+ int bit_rate_;
+
+ size_t data_pointer_;
+ size_t loop_length_samples_;
+ int max_bytes_;
+ int encoded_bytes_;
+
+ WebRtcOpusEncInst* opus_encoder_;
+ WebRtcOpusDecInst* opus_decoder_;
+
+ string in_filename_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<int16_t[]> out_data_;
+ scoped_ptr<uint8_t[]> bit_stream_;
+};
+
+void OpusFecTest::SetUp() {
+ channels_ = get<0>(GetParam());
+ bit_rate_ = get<1>(GetParam());
+ printf("Coding %d channel signal at %d bps.\n", channels_, bit_rate_);
+
+ in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam()));
+
+ FILE* fp = fopen(in_filename_.c_str(), "rb");
+ ASSERT_FALSE(fp == NULL);
+
+ // Obtain file size.
+ fseek(fp, 0, SEEK_END);
+ loop_length_samples_ = ftell(fp) / sizeof(int16_t);
+ rewind(fp);
+
+ // Allocate memory to contain the whole file.
+ in_data_.reset(new int16_t[loop_length_samples_ +
+ input_length_sample_ * channels_]);
+
+ // Copy the file into the buffer.
+ ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp),
+ loop_length_samples_);
+ fclose(fp);
+
+ // The audio will be used in a looped manner. To ease the acquisition of an
+ // audio frame that crosses the end of the excerpt, we add an extra block
+ // length of samples to the end of the array, starting over again from the
+ // beginning of the array. Audio frames cross the end of the excerpt always
+ // appear as a continuum of memory.
+ memcpy(&in_data_[loop_length_samples_], &in_data_[0],
+ input_length_sample_ * channels_ * sizeof(int16_t));
+
+ // Maximum number of bytes in output bitstream.
+ max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t);
+
+ out_data_.reset(new int16_t[2 * output_length_sample_ * channels_]);
+ bit_stream_.reset(new uint8_t[max_bytes_]);
+
+ // Create encoder memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_));
+ // Set bitrate.
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_));
+}
+
+void OpusFecTest::TearDown() {
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
+}
+
+OpusFecTest::OpusFecTest()
+ : block_duration_ms_(kOpusBlockDurationMs),
+ input_sampling_khz_(kOpusInputSamplingKhz),
+ output_sampling_khz_(kOpusOutputSamplingKhz),
+ input_length_sample_(block_duration_ms_ * input_sampling_khz_),
+ output_length_sample_(block_duration_ms_ * output_sampling_khz_),
+ data_pointer_(0),
+ max_bytes_(0),
+ encoded_bytes_(0),
+ opus_encoder_(NULL),
+ opus_decoder_(NULL) {
+}
+
+void OpusFecTest::EncodeABlock() {
+ int16_t value = WebRtcOpus_Encode(opus_encoder_,
+ &in_data_[data_pointer_],
+ input_length_sample_,
+ max_bytes_, &bit_stream_[0]);
+ EXPECT_GT(value, 0);
+
+ encoded_bytes_ = value;
+}
+
+void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) {
+ int16_t audio_type;
+ int16_t value_1 = 0, value_2 = 0;
+
+ if (lost_previous) {
+ // Decode previous frame.
+ if (!lost_current &&
+ WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_) == 1) {
+ value_1 = WebRtcOpus_DecodeFec(opus_decoder_, &bit_stream_[0],
+ encoded_bytes_, &out_data_[0],
+ &audio_type);
+ } else {
+ value_1 = WebRtcOpus_DecodePlc(opus_decoder_, &out_data_[0], 1);
+ }
+ EXPECT_EQ(output_length_sample_, value_1);
+ }
+
+ if (!lost_current) {
+ // Decode current frame.
+ value_2 = WebRtcOpus_DecodeNew(opus_decoder_, &bit_stream_[0],
+ encoded_bytes_,
+ &out_data_[value_1 * channels_],
+ &audio_type);
+ EXPECT_EQ(output_length_sample_, value_2);
+ }
+}
+
+TEST_P(OpusFecTest, RandomPacketLossTest) {
+ const int kDurationMs = 200000;
+ int time_now_ms, fec_frames;
+ int actual_packet_loss_rate;
+ bool lost_current, lost_previous;
+ mode mode_set[3] = {{true, 0},
+ {false, 0},
+ {true, 50}};
+
+ lost_current = false;
+ for (int i = 0; i < 3; i++) {
+ if (mode_set[i].fec) {
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_encoder_,
+ mode_set[i].target_packet_loss_rate));
+ printf("FEC is ON, target at packet loss rate %d percent.\n",
+ mode_set[i].target_packet_loss_rate);
+ } else {
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_encoder_));
+ printf("FEC is OFF.\n");
+ }
+ // In this test, we let the target packet loss rate match the actual rate.
+ actual_packet_loss_rate = mode_set[i].target_packet_loss_rate;
+ // Run every mode a certain time.
+ time_now_ms = 0;
+ fec_frames = 0;
+ while (time_now_ms < kDurationMs) {
+ // Encode & decode.
+ EncodeABlock();
+
+ // Check if payload has FEC.
+ int16_t fec = WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_);
+
+ // If FEC is disabled or the target packet loss rate is set to 0, there
+ // should be no FEC in the bit stream.
+ if (!mode_set[i].fec || mode_set[i].target_packet_loss_rate == 0) {
+ EXPECT_EQ(fec, 0);
+ } else if (fec == 1) {
+ fec_frames++;
+ }
+
+ lost_previous = lost_current;
+ lost_current = rand() < actual_packet_loss_rate * (RAND_MAX / 100);
+ DecodeABlock(lost_previous, lost_current);
+
+ time_now_ms += block_duration_ms_;
+
+ // |data_pointer_| is incremented and wrapped across
+ // |loop_length_samples_|.
+ data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) %
+ loop_length_samples_;
+ }
+ if (mode_set[i].fec) {
+ printf("%.2f percent frames has FEC.\n",
+ static_cast<float>(fec_frames) * block_duration_ms_ / 2000);
+ }
+ }
+}
+
+const coding_param param_set[] =
+ {make_tuple(1, 64000, string("audio_coding/testfile32kHz"),
+ string("pcm")),
+ make_tuple(1, 32000, string("audio_coding/testfile32kHz"),
+ string("pcm")),
+ make_tuple(2, 64000, string("audio_coding/teststereo32kHz"),
+ string("pcm"))};
+
+// 64 kbps, stereo
+INSTANTIATE_TEST_CASE_P(AllTest, OpusFecTest,
+ ValuesIn(param_set));
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
index 98b924f219c..24fc4fc405a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
@@ -103,7 +103,40 @@ int16_t WebRtcOpus_Encode(OpusEncInst* inst, int16_t* audio_in, int16_t samples,
int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate) {
if (inst) {
- return opus_encoder_ctl(inst->encoder, OPUS_SET_BITRATE(rate));
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_BITRATE(rate));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder,
+ OPUS_SET_PACKET_LOSS_PERC(loss_rate));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_EnableFec(OpusEncInst* inst) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_INBAND_FEC(1));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_DisableFec(OpusEncInst* inst) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_INBAND_FEC(0));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_COMPLEXITY(complexity));
} else {
return -1;
}
@@ -217,6 +250,23 @@ static int DecodeNative(OpusDecoder* inst, const int16_t* encoded,
return -1;
}
+static int DecodeFec(OpusDecoder* inst, const int16_t* encoded,
+ int16_t encoded_bytes, int frame_size,
+ int16_t* decoded, int16_t* audio_type) {
+ unsigned char* coded = (unsigned char*) encoded;
+ opus_int16* audio = (opus_int16*) decoded;
+
+ int res = opus_decode(inst, coded, encoded_bytes, audio, frame_size, 1);
+
+ /* TODO(tlegrand): set to DTX for zero-length packets? */
+ *audio_type = 0;
+
+ if (res > 0) {
+ return res;
+ }
+ return -1;
+}
+
/* Resample from 48 to 32 kHz. Length of state is assumed to be
* kWebRtcOpusStateSize (7).
*/
@@ -542,6 +592,52 @@ int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
return resampled_samples;
}
+int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
+ int16_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type) {
+ /* |buffer| is big enough for 120 ms (the largest Opus packet size) of stereo
+ * audio at 48 kHz. */
+ int16_t buffer[kWebRtcOpusMaxFrameSize];
+ int16_t* coded = (int16_t*)encoded;
+ int decoded_samples;
+ int resampled_samples;
+ int fec_samples;
+
+ if (WebRtcOpus_PacketHasFec(encoded, encoded_bytes) != 1) {
+ return 0;
+ }
+
+ fec_samples = opus_packet_get_samples_per_frame(encoded, 48000);
+
+ /* Decode to a temporary buffer. */
+ decoded_samples = DecodeFec(inst->decoder_left, coded, encoded_bytes,
+ fec_samples, buffer, audio_type);
+ if (decoded_samples < 0) {
+ return -1;
+ }
+
+ /* If mono case, just do a regular call to the decoder.
+ * If stereo, we need to de-interleave the stereo output into blocks with
+ * left and right channel. Each block is resampled to 32 kHz, and then
+ * interleaved again. */
+ if (inst->channels == 2) {
+ /* De-interleave and resample. */
+ resampled_samples = WebRtcOpus_DeInterleaveResample(inst,
+ buffer,
+ decoded_samples,
+ decoded);
+ } else {
+ /* Resample from 48 kHz to 32 kHz. Filter state memory for left channel is
+ * used for mono signals. */
+ resampled_samples = WebRtcOpus_Resample48to32(buffer,
+ decoded_samples,
+ inst->state_48_32_left,
+ decoded);
+ }
+
+ return resampled_samples;
+}
+
int WebRtcOpus_DurationEst(OpusDecInst* inst,
const uint8_t* payload,
int payload_length_bytes) {
@@ -562,3 +658,79 @@ int WebRtcOpus_DurationEst(OpusDecInst* inst,
samples = samples * 2 / 3;
return samples;
}
+
+int WebRtcOpus_FecDurationEst(const uint8_t* payload,
+ int payload_length_bytes) {
+ int samples;
+ if (WebRtcOpus_PacketHasFec(payload, payload_length_bytes) != 1) {
+ return 0;
+ }
+
+ samples = opus_packet_get_samples_per_frame(payload, 48000);
+ if (samples < 480 || samples > 5760) {
+ /* Invalid payload duration. */
+ return 0;
+ }
+ /* Compensate for the down-sampling from 48 kHz to 32 kHz.
+ * This should be removed when the resampling in WebRtcOpus_Decode is
+ * removed. */
+ samples = samples * 2 / 3;
+ return samples;
+}
+
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ int payload_length_bytes) {
+ int frames, channels, payload_length_ms;
+ int n;
+ opus_int16 frame_sizes[48];
+ const unsigned char *frame_data[48];
+
+ if (payload == NULL || payload_length_bytes <= 0)
+ return 0;
+
+ /* In CELT_ONLY mode, packets should not have FEC. */
+ if (payload[0] & 0x80)
+ return 0;
+
+ payload_length_ms = opus_packet_get_samples_per_frame(payload, 48000) / 48;
+ if (10 > payload_length_ms)
+ payload_length_ms = 10;
+
+ channels = opus_packet_get_nb_channels(payload);
+
+ switch (payload_length_ms) {
+ case 10:
+ case 20: {
+ frames = 1;
+ break;
+ }
+ case 40: {
+ frames = 2;
+ break;
+ }
+ case 60: {
+ frames = 3;
+ break;
+ }
+ default: {
+ return 0; // It is actually even an invalid packet.
+ }
+ }
+
+ /* The following is to parse the LBRR flags. */
+ if (opus_packet_parse(payload, payload_length_bytes, NULL, frame_data,
+ frame_sizes, NULL) < 0) {
+ return 0;
+ }
+
+ if (frame_sizes[0] <= 1) {
+ return 0;
+ }
+
+ for (n = 0; n < channels; n++) {
+ if (frame_data[0][0] & (0x80 >> ((n + 1) * (frames + 1) - 1)))
+ return 1;
+ }
+
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
new file mode 100644
index 00000000000..16099c6d93a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
+
+using ::std::string;
+using ::std::tr1::make_tuple;
+using ::testing::ValuesIn;
+
+namespace webrtc {
+
+static const int kOpusBlockDurationMs = 20;
+static const int kOpusInputSamplingKhz = 48;
+static const int kOpustOutputSamplingKhz = 32;
+
+class OpusSpeedTest : public AudioCodecSpeedTest {
+ protected:
+ OpusSpeedTest();
+ virtual void SetUp() OVERRIDE;
+ virtual void TearDown() OVERRIDE;
+ virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes);
+ virtual float DecodeABlock(const uint8_t* bit_stream, int encoded_bytes,
+ int16_t* out_data);
+ WebRtcOpusEncInst* opus_encoder_;
+ WebRtcOpusDecInst* opus_decoder_;
+};
+
+OpusSpeedTest::OpusSpeedTest()
+ : AudioCodecSpeedTest(kOpusBlockDurationMs,
+ kOpusInputSamplingKhz,
+ kOpustOutputSamplingKhz),
+ opus_encoder_(NULL),
+ opus_decoder_(NULL) {
+}
+
+void OpusSpeedTest::SetUp() {
+ AudioCodecSpeedTest::SetUp();
+ /* Create encoder memory. */
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_));
+ /* Set bitrate. */
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_));
+}
+
+void OpusSpeedTest::TearDown() {
+ AudioCodecSpeedTest::TearDown();
+ /* Free memory. */
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
+}
+
+float OpusSpeedTest::EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes) {
+ clock_t clocks = clock();
+ int value = WebRtcOpus_Encode(opus_encoder_, in_data,
+ input_length_sample_, max_bytes,
+ bit_stream);
+ clocks = clock() - clocks;
+ EXPECT_GT(value, 0);
+ *encoded_bytes = value;
+ return 1000.0 * clocks / CLOCKS_PER_SEC;
+}
+
+float OpusSpeedTest::DecodeABlock(const uint8_t* bit_stream,
+ int encoded_bytes, int16_t* out_data) {
+ int value;
+ int16_t audio_type;
+ clock_t clocks = clock();
+ value = WebRtcOpus_DecodeNew(opus_decoder_, bit_stream, encoded_bytes,
+ out_data, &audio_type);
+ clocks = clock() - clocks;
+ EXPECT_EQ(output_length_sample_, value);
+ return 1000.0 * clocks / CLOCKS_PER_SEC;
+}
+
+#define ADD_TEST(complexity) \
+TEST_P(OpusSpeedTest, OpusSetComplexityTest##complexity) { \
+ /* Test audio length in second. */ \
+ size_t kDurationSec = 400; \
+ /* Set complexity. */ \
+ printf("Setting complexity to %d ...\n", complexity); \
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_encoder_, complexity)); \
+ EncodeDecode(kDurationSec); \
+}
+
+ADD_TEST(10);
+ADD_TEST(9);
+ADD_TEST(8);
+ADD_TEST(7);
+ADD_TEST(6);
+ADD_TEST(5);
+ADD_TEST(4);
+ADD_TEST(3);
+ADD_TEST(2);
+ADD_TEST(1);
+ADD_TEST(0);
+
+// List all test cases: (channel, bit rat, filename, extension).
+const coding_param param_set[] =
+ {make_tuple(1, 64000, string("audio_coding/speech_mono_32_48kHz"),
+ string("pcm"), true),
+ make_tuple(1, 32000, string("audio_coding/speech_mono_32_48kHz"),
+ string("pcm"), true),
+ make_tuple(2, 64000, string("audio_coding/music_stereo_48kHz"),
+ string("pcm"), true)};
+
+INSTANTIATE_TEST_CASE_P(AllTest, OpusSpeedTest,
+ ValuesIn(param_set));
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
index b699cf9df5e..ed876cd1050 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
@@ -202,6 +202,27 @@ TEST_F(OpusTest, OpusSetBitRate) {
EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
}
+TEST_F(OpusTest, OpusSetComplexity) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_mono_encoder_, 9));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 9));
+
+ // Create encoder memory, try with different complexities.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, 0));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 0));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, 10));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 10));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_mono_encoder_, 11));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 11));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
// Encode and decode one frame (stereo), initialize the decoder and
// decode once more.
TEST_F(OpusTest, OpusDecodeInit) {
@@ -265,6 +286,47 @@ TEST_F(OpusTest, OpusDecodeInit) {
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_stereo_decoder_new_));
}
+TEST_F(OpusTest, OpusEnableDisableFec) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_EnableFec(opus_mono_encoder_));
+ EXPECT_EQ(-1, WebRtcOpus_DisableFec(opus_stereo_encoder_));
+
+ // Create encoder memory, try with different bitrates.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_stereo_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_stereo_encoder_));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
+TEST_F(OpusTest, OpusSetPacketLossRate) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 50));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 50));
+
+ // Create encoder memory, try with different bitrates.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 50));
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 50));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, -1));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, -1));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 101));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 101));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
+
// PLC in mono mode.
TEST_F(OpusTest, OpusDecodePlcMono) {
// Create encoder memory.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
new file mode 100644
index 00000000000..c7cafdff9ba
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using ::std::tr1::get;
+
+namespace webrtc {
+
+AudioCodecSpeedTest::AudioCodecSpeedTest(int block_duration_ms,
+ int input_sampling_khz,
+ int output_sampling_khz)
+ : block_duration_ms_(block_duration_ms),
+ input_sampling_khz_(input_sampling_khz),
+ output_sampling_khz_(output_sampling_khz),
+ input_length_sample_(block_duration_ms_ * input_sampling_khz_),
+ output_length_sample_(block_duration_ms_ * output_sampling_khz_),
+ data_pointer_(0),
+ loop_length_samples_(0),
+ max_bytes_(0),
+ encoded_bytes_(0),
+ encoding_time_ms_(0.0),
+ decoding_time_ms_(0.0),
+ out_file_(NULL) {
+}
+
+void AudioCodecSpeedTest::SetUp() {
+ channels_ = get<0>(GetParam());
+ bit_rate_ = get<1>(GetParam());
+ in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam()));
+ save_out_data_ = get<4>(GetParam());
+
+ FILE* fp = fopen(in_filename_.c_str(), "rb");
+ assert(fp != NULL);
+
+ // Obtain file size.
+ fseek(fp, 0, SEEK_END);
+ loop_length_samples_ = ftell(fp) / sizeof(int16_t);
+ rewind(fp);
+
+ // Allocate memory to contain the whole file.
+ in_data_.reset(new int16_t[loop_length_samples_ +
+ input_length_sample_ * channels_]);
+
+ data_pointer_ = 0;
+
+ // Copy the file into the buffer.
+ ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp),
+ loop_length_samples_);
+ fclose(fp);
+
+ // Add an extra block length of samples to the end of the array, starting
+ // over again from the beginning of the array. This is done to simplify
+ // the reading process when reading over the end of the loop.
+ memcpy(&in_data_[loop_length_samples_], &in_data_[0],
+ input_length_sample_ * channels_ * sizeof(int16_t));
+
+ max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t);
+ out_data_.reset(new int16_t[output_length_sample_ * channels_]);
+ bit_stream_.reset(new uint8_t[max_bytes_]);
+
+ if (save_out_data_) {
+ std::string out_filename =
+ ::testing::UnitTest::GetInstance()->current_test_info()->name();
+
+ // Erase '/'
+ size_t found;
+ while ((found = out_filename.find('/')) != std::string::npos)
+ out_filename.replace(found, 1, "_");
+
+ out_filename = test::OutputPath() + out_filename + ".pcm";
+
+ out_file_ = fopen(out_filename.c_str(), "wb");
+ assert(out_file_ != NULL);
+
+ printf("Output to be saved in %s.\n", out_filename.c_str());
+ }
+}
+
+void AudioCodecSpeedTest::TearDown() {
+ if (save_out_data_) {
+ fclose(out_file_);
+ }
+}
+
+void AudioCodecSpeedTest::EncodeDecode(size_t audio_duration_sec) {
+ size_t time_now_ms = 0;
+ float time_ms;
+
+ printf("Coding %d kHz-sampled %d-channel audio at %d bps ...\n",
+ input_sampling_khz_, channels_, bit_rate_);
+
+ while (time_now_ms < audio_duration_sec * 1000) {
+ // Encode & decode.
+ time_ms = EncodeABlock(&in_data_[data_pointer_], &bit_stream_[0],
+ max_bytes_, &encoded_bytes_);
+ encoding_time_ms_ += time_ms;
+ time_ms = DecodeABlock(&bit_stream_[0], encoded_bytes_, &out_data_[0]);
+ decoding_time_ms_ += time_ms;
+ if (save_out_data_) {
+ fwrite(&out_data_[0], sizeof(int16_t),
+ output_length_sample_ * channels_, out_file_);
+ }
+ data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) %
+ loop_length_samples_;
+ time_now_ms += block_duration_ms_;
+ }
+
+ printf("Encoding: %.2f%% real time,\nDecoding: %.2f%% real time.\n",
+ (encoding_time_ms_ / audio_duration_sec) / 10.0,
+ (decoding_time_ms_ / audio_duration_sec) / 10.0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
new file mode 100644
index 00000000000..2c9b45e4f86
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
+
+#include <string>
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Define coding parameter as
+// <channels, bit_rate, file_name, extension, if_save_output>.
+typedef std::tr1::tuple<int, int, std::string, std::string, bool> coding_param;
+
+class AudioCodecSpeedTest : public testing::TestWithParam<coding_param> {
+ protected:
+ AudioCodecSpeedTest(int block_duration_ms,
+ int input_sampling_khz,
+ int output_sampling_khz);
+ virtual void SetUp();
+ virtual void TearDown();
+
+ // EncodeABlock(...) does the following:
+ // 1. encodes a block of audio, saved in |in_data|,
+ // 2. save the bit stream to |bit_stream| of |max_bytes| bytes in size,
+ // 3. assign |encoded_bytes| with the length of the bit stream (in bytes),
+ // 4. return the cost of time (in millisecond) spent on actual encoding.
+ virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes) = 0;
+
+ // DecodeABlock(...) does the following:
+ // 1. decodes the bit stream in |bit_stream| with a length of |encoded_bytes|
+ // (in bytes),
+ // 2. save the decoded audio in |out_data|,
+ // 3. return the cost of time (in millisecond) spent on actual decoding.
+ virtual float DecodeABlock(const uint8_t* bit_stream, int encoded_bytes,
+ int16_t* out_data) = 0;
+
+ // Encoding and decode an audio of |audio_duration| (in seconds) and
+ // record the runtime for encoding and decoding separately.
+ void EncodeDecode(size_t audio_duration);
+
+ int block_duration_ms_;
+ int input_sampling_khz_;
+ int output_sampling_khz_;
+
+ // Number of samples-per-channel in a frame.
+ int input_length_sample_;
+
+ // Expected output number of samples-per-channel in a frame.
+ int output_length_sample_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<int16_t[]> out_data_;
+ size_t data_pointer_;
+ size_t loop_length_samples_;
+ scoped_ptr<uint8_t[]> bit_stream_;
+
+ // Maximum number of bytes in output bitstream for a frame of audio.
+ int max_bytes_;
+
+ int encoded_bytes_;
+ float encoding_time_ms_;
+ float decoding_time_ms_;
+ FILE* out_file_;
+
+ int channels_;
+
+ // Bit rate is in bit-per-second.
+ int bit_rate_;
+
+ std::string in_filename_;
+
+ // Determines whether to save the output to file.
+ bool save_out_data_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi
new file mode 100644
index 00000000000..4d675e10cfa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ 'audio_processing',
+ 'iSACFix',
+ 'webrtc_opus',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'audio_codec_speed_test.h',
+ 'audio_codec_speed_test.cc',
+ '<(webrtc_root)/modules/audio_coding/codecs/opus/opus_speed_test.cc',
+ '<(webrtc_root)/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc',
+ ],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
+ }],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are using
+ # Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ },
+ ],
+ }],
+ ['test_isolation_mode != "noop"', {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_run',
+ 'type': 'none',
+ 'dependencies': [
+ 'audio_codec_speed_tests',
+ ],
+ 'includes': [
+ '../../../../build/isolate.gypi',
+ 'audio_codec_speed_tests.isolate',
+ ],
+ 'sources': [
+ 'audio_codec_speed_tests.isolate',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate
new file mode 100644
index 00000000000..8c5a2bd0ec0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate
@@ -0,0 +1,40 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'conditions': [
+ ['OS=="android"', {
+ 'variables': {
+ 'isolate_dependency_untracked': [
+ '<(DEPTH)/resources/',
+ '<(DEPTH)/data/',
+ ],
+ },
+ }],
+ ['OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'command': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'isolate_dependency_touched': [
+ '<(DEPTH)/DEPS',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/resources/audio_coding/music_stereo_48kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/speech_mono_32_48kHz.pcm',
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'isolate_dependency_untracked': [
+ '<(DEPTH)/tools/swarming_client/',
+ ],
+ },
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
index e1e6256ca48..83880d21dc8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
@@ -1,3 +1,4 @@
tina.legrand@webrtc.org
turaj@webrtc.org
jan.skoglund@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
index fd30a137ae0..e55b6c4660f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
@@ -20,7 +20,7 @@
#include <assert.h>
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/system_wrappers/interface/trace.h"
// Includes needed to create the codecs.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
index 98869efeead..65be793e37b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
@@ -18,7 +18,7 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
index 7216a574af8..6197a9d93ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
@@ -40,7 +41,8 @@ class ACMG722 : public ACMGenericCodec {
int32_t Add10MsDataSafe(const uint32_t timestamp,
const int16_t* data,
const uint16_t length_smpl,
- const uint8_t audio_channel);
+ const uint8_t audio_channel)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
void DestructEncoderSafe();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
index aa8e8be0637..a4808c0e384 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
@@ -26,7 +26,7 @@ namespace acm2 {
// Enum for CNG
enum {
kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
- kNewCNGNumPLCParams = 8
+ kNewCNGNumLPCParams = 8
};
// Interval for sending new CNG parameters (SID frames) is 100 msec.
@@ -56,9 +56,10 @@ ACMGenericCodec::ACMGenericCodec()
vad_mode_(VADNormal),
dtx_enabled_(false),
ptr_dtx_inst_(NULL),
- num_lpc_params_(kNewCNGNumPLCParams),
+ num_lpc_params_(kNewCNGNumLPCParams),
sent_cn_previous_(false),
prev_frame_cng_(0),
+ has_internal_fec_(false),
neteq_decode_lock_(NULL),
codec_wrapper_lock_(*RWLockWrapper::CreateRWLock()),
last_timestamp_(0xD87F3F9F),
@@ -546,7 +547,7 @@ void ACMGenericCodec::DestructEncoder() {
WebRtcCng_FreeEnc(ptr_dtx_inst_);
ptr_dtx_inst_ = NULL;
}
- num_lpc_params_ = kNewCNGNumPLCParams;
+ num_lpc_params_ = kNewCNGNumLPCParams;
DestructEncoderSafe();
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
index d41580fff54..fa21ca015aa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
@@ -13,9 +13,10 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/system_wrappers/interface/trace.h"
#define MAX_FRAME_SIZE_10MSEC 6
@@ -560,6 +561,46 @@ class ACMGenericCodec {
//
virtual AudioDecoder* Decoder(int /* codec_id */) { return NULL; }
+ ///////////////////////////////////////////////////////////////////////////
+ // bool HasInternalFEC()
+ // Used to check if the codec has internal FEC.
+ //
+ // Return value:
+ // true if the codec has an internal FEC, e.g. Opus.
+ // false otherwise.
+ //
+ bool HasInternalFEC() const { return has_internal_fec_; }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetFEC();
+ // Sets the codec internal FEC. No effects on codecs that do not provide
+ // internal FEC.
+ //
+ // Input:
+ // -enable_fec : if true FEC will be enabled otherwise the FEC is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed, or the codec does not support FEC
+ // 0 if succeeded.
+ //
+ virtual int SetFEC(bool /* enable_fec */) { return -1; }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetPacketLossRate()
+ // Sets expected packet loss rate for encoding. Some encoders provide packet
+ // loss gnostic encoding to make stream less sensitive to packet losses,
+ // through e.g., FEC. No effects on codecs that do not provide such encoding.
+ //
+ // Input:
+ // -loss_rate : expected packet loss rate (0 -- 100 inclusive).
+ //
+ // Return value:
+ // -1 if failed,
+ // 0 if succeeded or packet loss rate is ignored.
+ //
+ virtual int SetPacketLossRate(int /* loss_rate */) { return 0; }
+
protected:
///////////////////////////////////////////////////////////////////////////
// All the functions with FunctionNameSafe(...) contain the actual
@@ -576,7 +617,8 @@ class ACMGenericCodec {
virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
const int16_t* data,
const uint16_t length,
- const uint8_t audio_channel);
+ const uint8_t audio_channel)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See EncoderParam() for the description of function, input(s)/output(s)
@@ -588,14 +630,15 @@ class ACMGenericCodec {
// See ResetEncoder() for the description of function, input(s)/output(s)
// and return value.
//
- int16_t ResetEncoderSafe();
+ int16_t ResetEncoderSafe() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See InitEncoder() for the description of function, input(s)/output(s)
// and return value.
//
int16_t InitEncoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
+ bool force_initialization)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See InitDecoder() for the description of function, input(s)/output(s)
@@ -641,7 +684,8 @@ class ACMGenericCodec {
// See SetVAD() for the description of function, input(s)/output(s) and
// return value.
//
- int16_t SetVADSafe(bool* enable_dtx, bool* enable_vad, ACMVADMode* mode);
+ int16_t SetVADSafe(bool* enable_dtx, bool* enable_vad, ACMVADMode* mode)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See ReplaceInternalDTX() for the description of function, input and
@@ -678,7 +722,8 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- int16_t EnableVAD(ACMVADMode mode);
+ int16_t EnableVAD(ACMVADMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t DisableVAD()
@@ -688,7 +733,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- int16_t DisableVAD();
+ int16_t DisableVAD() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t EnableDTX()
@@ -699,7 +744,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- virtual int16_t EnableDTX();
+ virtual int16_t EnableDTX() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t DisableDTX()
@@ -710,7 +755,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- virtual int16_t DisableDTX();
+ virtual int16_t DisableDTX() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t InternalEncode()
@@ -838,7 +883,8 @@ class ACMGenericCodec {
//
int16_t ProcessFrameVADDTX(uint8_t* bitstream,
int16_t* bitstream_len_byte,
- int16_t* samples_processed);
+ int16_t* samples_processed)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// CurrentRate()
@@ -885,19 +931,23 @@ class ACMGenericCodec {
// True if the encoder instance initialized
bool encoder_initialized_;
- bool registered_in_neteq_;
+ const bool registered_in_neteq_; // TODO(henrik.lundin) Remove?
// VAD/DTX
bool has_internal_dtx_;
- WebRtcVadInst* ptr_vad_inst_;
- bool vad_enabled_;
- ACMVADMode vad_mode_;
- int16_t vad_label_[MAX_FRAME_SIZE_10MSEC];
- bool dtx_enabled_;
- WebRtcCngEncInst* ptr_dtx_inst_;
- uint8_t num_lpc_params_;
- bool sent_cn_previous_;
- int16_t prev_frame_cng_;
+ WebRtcVadInst* ptr_vad_inst_ GUARDED_BY(codec_wrapper_lock_);
+ bool vad_enabled_ GUARDED_BY(codec_wrapper_lock_);
+ ACMVADMode vad_mode_ GUARDED_BY(codec_wrapper_lock_);
+ int16_t vad_label_[MAX_FRAME_SIZE_10MSEC] GUARDED_BY(codec_wrapper_lock_);
+ bool dtx_enabled_ GUARDED_BY(codec_wrapper_lock_);
+ WebRtcCngEncInst* ptr_dtx_inst_ GUARDED_BY(codec_wrapper_lock_);
+ uint8_t num_lpc_params_ // TODO(henrik.lundin) Delete and
+ GUARDED_BY(codec_wrapper_lock_); // replace with kNewCNGNumLPCParams.
+ bool sent_cn_previous_ GUARDED_BY(codec_wrapper_lock_);
+ int16_t prev_frame_cng_ GUARDED_BY(codec_wrapper_lock_);
+
+ // FEC.
+ bool has_internal_fec_;
WebRtcACMCodecParams encoder_params_;
@@ -909,7 +959,7 @@ class ACMGenericCodec {
// such as buffers and state variables.
RWLockWrapper& codec_wrapper_lock_;
- uint32_t last_timestamp_;
+ uint32_t last_timestamp_ GUARDED_BY(codec_wrapper_lock_);
uint32_t unique_id_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
index e27284212fd..9fbcdd4cd8b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
@@ -14,7 +14,8 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#ifdef WEBRTC_CODEC_ISAC
@@ -59,14 +60,15 @@ static const int32_t kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
ACMISAC::ACMISAC(int16_t /* codec_id */)
- : codec_inst_ptr_(NULL),
+ : codec_inst_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ codec_inst_ptr_(NULL),
is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
enforce_frame_size_(false),
isac_currentBN_(32000),
samples_in10MsAudio_(160), // Initiates to 16 kHz mode.
- audio_decoder_(NULL),
- decoder_initialized_(false) {}
+ decoder_initialized_(false) {
+}
ACMISAC::~ACMISAC() {
return;
@@ -261,81 +263,14 @@ static uint16_t ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
#endif
-// Decoder class to be injected into NetEq.
-class AcmAudioDecoderIsac : public AudioDecoder {
- public:
- AcmAudioDecoderIsac(int codec_id, void* state)
- : AudioDecoder(ACMCodecDB::neteq_decoders_[codec_id]) {
- state_ = state;
- }
-
- // ACMISAC is the owner of the object where |state_| is pointing to.
- // Therefore, it should not be deleted in this destructor.
- virtual ~AcmAudioDecoderIsac() {}
-
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type;
- int ret = ACM_ISAC_DECODE_B(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
- }
-
- virtual bool HasDecodePlc() const { return true; }
-
- virtual int DecodePlc(int num_frames, int16_t* decoded) {
- return ACM_ISAC_DECODEPLC(static_cast<ACM_ISAC_STRUCT*>(state_),
- decoded, static_cast<int16_t>(num_frames));
- }
-
- virtual int Init() {
- return 0; // We expect that the initialized instance is injected in the
- // constructor.
- }
-
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return ACM_ISAC_DECODE_BWE(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(payload),
- static_cast<uint32_t>(payload_len),
- rtp_sequence_number,
- rtp_timestamp,
- arrival_timestamp);
- }
-
- virtual int DecodeRedundant(const uint8_t* encoded,
- size_t encoded_len, int16_t* decoded,
- SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = ACM_ISAC_DECODERCU(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
- }
-
- virtual int ErrorCode() {
- return ACM_ISAC_GETERRORCODE(static_cast<ACM_ISAC_STRUCT*>(state_));
- }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AcmAudioDecoderIsac);
-};
-
ACMISAC::ACMISAC(int16_t codec_id)
- : is_enc_initialized_(false),
+ : AudioDecoder(ACMCodecDB::neteq_decoders_[codec_id]),
+ codec_inst_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
enforce_frame_size_(false),
isac_current_bn_(32000),
samples_in_10ms_audio_(160), // Initiates to 16 kHz mode.
- audio_decoder_(NULL),
decoder_initialized_(false) {
codec_id_ = codec_id;
@@ -345,14 +280,10 @@ ACMISAC::ACMISAC(int16_t codec_id)
return;
}
codec_inst_ptr_->inst = NULL;
+ state_ = codec_inst_ptr_;
}
ACMISAC::~ACMISAC() {
- if (audio_decoder_ != NULL) {
- delete audio_decoder_;
- audio_decoder_ = NULL;
- }
-
if (codec_inst_ptr_ != NULL) {
if (codec_inst_ptr_->inst != NULL) {
ACM_ISAC_FREE(codec_inst_ptr_->inst);
@@ -364,6 +295,34 @@ ACMISAC::~ACMISAC() {
return;
}
+int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
+ // set decoder sampling frequency.
+ if (codec_params->codec_inst.plfreq == 32000 ||
+ codec_params->codec_inst.plfreq == 48000) {
+ UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
+ } else {
+ UpdateDecoderSampFreq(ACMCodecDB::kISAC);
+ }
+
+ // in a one-way communication we may never register send-codec.
+ // However we like that the BWE to work properly so it has to
+ // be initialized. The BWE is initialized when iSAC encoder is initialized.
+ // Therefore, we need this.
+ if (!encoder_initialized_) {
+ // Since we don't require a valid rate or a valid packet size when
+ // initializing the decoder, we set valid values before initializing encoder
+ codec_params->codec_inst.rate = kIsacWbDefaultRate;
+ codec_params->codec_inst.pacsize = kIsacPacSize960;
+ if (InternalInitEncoder(codec_params) < 0) {
+ return -1;
+ }
+ encoder_initialized_ = true;
+ }
+
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
+}
+
ACMGenericCodec* ACMISAC::CreateInstance(void) { return NULL; }
int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
@@ -375,6 +334,7 @@ int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
// at the first 10ms pushed in to iSAC if the bit-rate is low, this is
// sort of a bug in iSAC. to address this we treat iSAC as the
// following.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -428,6 +388,7 @@ int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
if (UpdateEncoderSampFreq((uint16_t)codec_params->codec_inst.plfreq) < 0) {
return -1;
}
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
return -1;
}
@@ -450,38 +411,8 @@ int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
return 0;
}
-int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
- } else {
- UpdateDecoderSampFreq(ACMCodecDB::kISAC);
- }
-
- // in a one-way communication we may never register send-codec.
- // However we like that the BWE to work properly so it has to
- // be initialized. The BWE is initialized when iSAC encoder is initialized.
- // Therefore, we need this.
- if (!encoder_initialized_) {
- // Since we don't require a valid rate or a valid packet size when
- // initializing the decoder, we set valid values before initializing encoder
- codec_params->codec_inst.rate = kIsacWbDefaultRate;
- codec_params->codec_inst.pacsize = kIsacPacSize960;
- if (InternalInitEncoder(codec_params) < 0) {
- return -1;
- }
- encoder_initialized_ = true;
- }
-
- return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
-}
-
int16_t ACMISAC::InternalCreateEncoder() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -493,19 +424,6 @@ int16_t ACMISAC::InternalCreateEncoder() {
return status;
}
-void ACMISAC::DestructEncoderSafe() {
- // codec with shared instance cannot delete.
- encoder_initialized_ = false;
- return;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- ACM_ISAC_FREE(static_cast<ACM_ISAC_STRUCT *>(ptr_inst));
- }
- return;
-}
-
int16_t ACMISAC::Transcode(uint8_t* bitstream,
int16_t* bitstream_len_byte,
int16_t q_bwe,
@@ -513,6 +431,7 @@ int16_t ACMISAC::Transcode(uint8_t* bitstream,
bool is_red) {
int16_t jitter_info = 0;
// transcode from a higher rate to lower rate sanity check
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -530,7 +449,27 @@ int16_t ACMISAC::Transcode(uint8_t* bitstream,
}
}
+void ACMISAC::UpdateFrameLen() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
+ encoder_params_.codec_inst.pacsize = frame_len_smpl_;
+}
+
+void ACMISAC::DestructEncoderSafe() {
+ // codec with shared instance cannot delete.
+ encoder_initialized_ = false;
+ return;
+}
+
+void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
+ if (ptr_inst != NULL) {
+ ACM_ISAC_FREE(static_cast<ACM_ISAC_STRUCT *>(ptr_inst));
+ }
+ return;
+}
+
int16_t ACMISAC::SetBitRateSafe(int32_t bit_rate) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -594,6 +533,7 @@ int32_t ACMISAC::GetEstimatedBandwidthSafe() {
int samp_rate;
// Get bandwidth information
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBWE(codec_inst_ptr_->inst, &bandwidth_index, &delay_index);
// Validy check of index
@@ -615,6 +555,7 @@ int32_t ACMISAC::SetEstimatedBandwidthSafe(int32_t estimated_bandwidth) {
int16_t bandwidth_index;
// Check sample frequency and choose appropriate table
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
samp_rate = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
if (samp_rate == 16000) {
@@ -657,6 +598,7 @@ int32_t ACMISAC::GetRedPayloadSafe(
return -1;
#else
uint8_t* red_payload, int16_t* payload_bytes) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
int16_t bytes =
WebRtcIsac_GetRedPayload(
codec_inst_ptr_->inst, reinterpret_cast<int16_t*>(red_payload));
@@ -672,6 +614,7 @@ int16_t ACMISAC::UpdateDecoderSampFreq(
#ifdef WEBRTC_CODEC_ISAC
int16_t codec_id) {
// The decoder supports only wideband and super-wideband.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (ACMCodecDB::kISAC == codec_id) {
return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 16000);
} else if (ACMCodecDB::kISACSWB == codec_id ||
@@ -700,6 +643,7 @@ int16_t ACMISAC::UpdateEncoderSampFreq(
in_audio_ix_read_ = 0;
in_audio_ix_write_ = 0;
in_timestamp_ix_write_ = 0;
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (WebRtcIsac_SetEncSampRate(codec_inst_ptr_->inst,
encoder_samp_freq_hz) < 0) {
return -1;
@@ -718,6 +662,7 @@ int16_t ACMISAC::UpdateEncoderSampFreq(
}
int16_t ACMISAC::EncoderSampFreq(uint16_t* samp_freq_hz) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
*samp_freq_hz = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
return 0;
}
@@ -730,6 +675,7 @@ int32_t ACMISAC::ConfigISACBandwidthEstimator(
{
uint16_t samp_freq_hz;
EncoderSampFreq(&samp_freq_hz);
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
// TODO(turajs): at 32kHz we hardcode calling with 30ms and enforce
// the frame-size otherwise we might get error. Revise if
// control-bwe is changed.
@@ -749,26 +695,25 @@ int32_t ACMISAC::ConfigISACBandwidthEstimator(
return -1;
}
UpdateFrameLen();
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
return 0;
}
int32_t ACMISAC::SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
return ACM_ISAC_SETMAXPAYLOADSIZE(codec_inst_ptr_->inst,
max_payload_len_bytes);
}
int32_t ACMISAC::SetISACMaxRate(const uint32_t max_rate_bit_per_sec) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
return ACM_ISAC_SETMAXRATE(codec_inst_ptr_->inst, max_rate_bit_per_sec);
}
-void ACMISAC::UpdateFrameLen() {
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
-}
-
void ACMISAC::CurrentRate(int32_t* rate_bit_per_sec) {
if (isac_coding_mode_ == ADAPTIVE) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, rate_bit_per_sec);
}
}
@@ -784,12 +729,71 @@ int16_t ACMISAC::REDPayloadISAC(const int32_t isac_rate,
return status;
}
-AudioDecoder* ACMISAC::Decoder(int codec_id) {
- if (audio_decoder_)
- return audio_decoder_;
+int ACMISAC::Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type;
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ int ret =
+ ACM_ISAC_DECODE_B(static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len),
+ decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int ACMISAC::DecodePlc(int num_frames, int16_t* decoded) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODEPLC(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ decoded,
+ static_cast<int16_t>(num_frames));
+}
+
+int ACMISAC::IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODE_BWE(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(payload),
+ static_cast<uint32_t>(payload_len),
+ rtp_sequence_number,
+ rtp_timestamp,
+ arrival_timestamp);
+}
+
+int ACMISAC::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ int16_t ret =
+ ACM_ISAC_DECODERCU(static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len),
+ decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int ACMISAC::ErrorCode() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_GETERRORCODE(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst));
+}
+AudioDecoder* ACMISAC::Decoder(int codec_id) {
// Create iSAC instance if it does not exist.
if (!encoder_exist_) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
assert(codec_inst_ptr_->inst == NULL);
encoder_initialized_ = false;
decoder_initialized_ = false;
@@ -822,8 +826,7 @@ AudioDecoder* ACMISAC::Decoder(int codec_id) {
decoder_initialized_ = true;
}
- audio_decoder_ = new AcmAudioDecoderIsac(codec_id, codec_inst_ptr_->inst);
- return audio_decoder_;
+ return this;
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
index a3227d5d0b3..3249526f7d8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
@@ -12,86 +12,119 @@
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_ISAC_H_
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
+class CriticalSectionWrapper;
+
namespace acm2 {
struct ACMISACInst;
-class AcmAudioDecoderIsac;
enum IsacCodingMode {
ADAPTIVE,
CHANNEL_INDEPENDENT
};
-class ACMISAC : public ACMGenericCodec {
+class ACMISAC : public ACMGenericCodec, AudioDecoder {
public:
explicit ACMISAC(int16_t codec_id);
~ACMISAC();
- // for FEC
- ACMGenericCodec* CreateInstance(void);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams* codec_params);
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
+ // Methods below are inherited from ACMGenericCodec.
+ ACMGenericCodec* CreateInstance(void) OVERRIDE;
- int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) OVERRIDE;
- int16_t InternalInitDecoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params) OVERRIDE;
- int16_t UpdateDecoderSampFreq(int16_t codec_id);
+ int16_t UpdateDecoderSampFreq(int16_t codec_id) OVERRIDE;
- int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz);
+ int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz) OVERRIDE;
- int16_t EncoderSampFreq(uint16_t* samp_freq_hz);
+ int16_t EncoderSampFreq(uint16_t* samp_freq_hz) OVERRIDE;
int32_t ConfigISACBandwidthEstimator(const uint8_t init_frame_size_msec,
const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size);
+ const bool enforce_frame_size) OVERRIDE;
- int32_t SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes);
+ int32_t SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes) OVERRIDE;
- int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec);
+ int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec) OVERRIDE;
int16_t REDPayloadISAC(const int32_t isac_rate,
const int16_t isac_bw_estimate,
uint8_t* payload,
- int16_t* payload_len_bytes);
+ int16_t* payload_len_bytes) OVERRIDE;
- protected:
- void DestructEncoderSafe();
+ // Methods below are inherited from AudioDecoder.
+ virtual int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) OVERRIDE;
- int16_t SetBitRateSafe(const int32_t bit_rate);
+ virtual bool HasDecodePlc() const OVERRIDE { return true; }
- int32_t GetEstimatedBandwidthSafe();
+ virtual int DecodePlc(int num_frames, int16_t* decoded) OVERRIDE;
- int32_t SetEstimatedBandwidthSafe(int32_t estimated_bandwidth);
+ virtual int Init() OVERRIDE { return 0; }
- int32_t GetRedPayloadSafe(uint8_t* red_payload, int16_t* payload_bytes);
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) OVERRIDE;
- int16_t InternalCreateEncoder();
+ virtual int DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) OVERRIDE;
- void InternalDestructEncoderInst(void* ptr_inst);
+ virtual int ErrorCode() OVERRIDE;
+ protected:
int16_t Transcode(uint8_t* bitstream,
int16_t* bitstream_len_byte,
int16_t q_bwe,
int32_t rate,
bool is_red);
- void CurrentRate(int32_t* rate_bit_per_sec);
-
void UpdateFrameLen();
- virtual AudioDecoder* Decoder(int codec_id);
+ // Methods below are inherited from ACMGenericCodec.
+ void DestructEncoderSafe() OVERRIDE;
+
+ int16_t SetBitRateSafe(const int32_t bit_rate) OVERRIDE;
+
+ int32_t GetEstimatedBandwidthSafe() OVERRIDE;
+
+ int32_t SetEstimatedBandwidthSafe(int32_t estimated_bandwidth) OVERRIDE;
+
+ int32_t GetRedPayloadSafe(uint8_t* red_payload,
+ int16_t* payload_bytes) OVERRIDE;
+
+ int16_t InternalCreateEncoder() OVERRIDE;
+
+ void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
+
+ void CurrentRate(int32_t* rate_bit_per_sec) OVERRIDE;
+
+ virtual AudioDecoder* Decoder(int codec_id) OVERRIDE;
- ACMISACInst* codec_inst_ptr_;
+ // |codec_inst_crit_sect_| protects |codec_inst_ptr_|.
+ const scoped_ptr<CriticalSectionWrapper> codec_inst_crit_sect_;
+ ACMISACInst* codec_inst_ptr_ GUARDED_BY(codec_inst_crit_sect_);
bool is_enc_initialized_;
IsacCodingMode isac_coding_mode_;
bool enforce_frame_size_;
int32_t isac_current_bn_;
uint16_t samples_in_10ms_audio_;
- AcmAudioDecoderIsac* audio_decoder_;
bool decoder_initialized_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
index c00a9203a9d..544c932f39f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
@@ -75,6 +75,8 @@ ACMOpus::ACMOpus(int16_t codec_id)
// Opus has internal DTX, but we dont use it for now.
has_internal_dtx_ = false;
+ has_internal_fec_ = true;
+
if (codec_id_ != ACMCodecDB::kOpus) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"Wrong codec id for Opus.");
@@ -140,6 +142,20 @@ int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
// Store bitrate.
bitrate_ = codec_params->codec_inst.rate;
+ // TODO(tlegrand): Remove this code when we have proper APIs to set the
+ // complexity at a higher level.
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
+ // If we are on Android, iOS and/or ARM, use a lower complexity setting as
+ // default, to save encoder complexity.
+ const int kOpusComplexity5 = 5;
+ WebRtcOpus_SetComplexity(encoder_inst_ptr_, kOpusComplexity5);
+ if (ret < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
+ "Setting complexity failed for Opus");
+ return ret;
+ }
+#endif
+
return 0;
}
@@ -184,6 +200,31 @@ int16_t ACMOpus::SetBitRateSafe(const int32_t rate) {
return -1;
}
+int ACMOpus::SetFEC(bool enable_fec) {
+ // Ask the encoder to enable FEC.
+ if (enable_fec) {
+ if (WebRtcOpus_EnableFec(encoder_inst_ptr_) == 0) {
+ fec_enabled_ = true;
+ return 0;
+ }
+ } else {
+ if (WebRtcOpus_DisableFec(encoder_inst_ptr_) == 0) {
+ fec_enabled_ = false;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int ACMOpus::SetPacketLossRate(int loss_rate) {
+ // Ask the encoder to change the target packet loss rate.
+ if (WebRtcOpus_SetPacketLossRate(encoder_inst_ptr_, loss_rate) == 0) {
+ packet_loss_rate_ = loss_rate;
+ return 0;
+ }
+ return -1;
+}
+
#endif // WEBRTC_CODEC_OPUS
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
index a346e3c8ff3..07ce0721686 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
@@ -32,6 +32,10 @@ class ACMOpus : public ACMGenericCodec {
int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ virtual int SetFEC(bool enable_fec) OVERRIDE;
+
+ virtual int SetPacketLossRate(int loss_rate) OVERRIDE;
+
protected:
void DestructEncoderSafe();
@@ -43,8 +47,11 @@ class ACMOpus : public ACMGenericCodec {
WebRtcOpusEncInst* encoder_inst_ptr_;
uint16_t sample_freq_;
- uint16_t bitrate_;
+ int32_t bitrate_;
int channels_;
+
+ bool fec_enabled_;
+ int packet_loss_rate_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
index ac92198f92b..cb7c4184079 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
@@ -21,8 +21,9 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/main/acm2/nack.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
@@ -35,7 +36,6 @@ namespace acm2 {
namespace {
-const int kNeteqInitSampleRateHz = 16000;
const int kNackThresholdPackets = 2;
// |vad_activity_| field of |audio_frame| is set to |previous_audio_activity_|
@@ -117,21 +117,23 @@ bool IsCng(int codec_id) {
} // namespace
-AcmReceiver::AcmReceiver()
- : id_(0),
- neteq_(NetEq::Create(kNeteqInitSampleRateHz)),
+AcmReceiver::AcmReceiver(const AudioCodingModule::Config& config)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
last_audio_decoder_(-1), // Invalid value.
- decode_lock_(RWLockWrapper::CreateRWLock()),
- neteq_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_enabled_(true),
- previous_audio_activity_(AudioFrame::kVadUnknown),
- current_sample_rate_hz_(kNeteqInitSampleRateHz),
+ previous_audio_activity_(AudioFrame::kVadPassive),
+ current_sample_rate_hz_(config.neteq_config.sample_rate_hz),
nack_(),
nack_enabled_(false),
+ neteq_(NetEq::Create(config.neteq_config)),
+ decode_lock_(RWLockWrapper::CreateRWLock()),
+ vad_enabled_(true),
+ clock_(config.clock),
av_sync_(false),
initial_delay_manager_(),
missing_packets_sync_stream_(),
late_packets_sync_stream_() {
+ assert(clock_);
for (int n = 0; n < ACMCodecDB::kMaxNumCodecs; ++n) {
decoders_[n].registered = false;
}
@@ -148,7 +150,6 @@ AcmReceiver::AcmReceiver()
AcmReceiver::~AcmReceiver() {
delete neteq_;
delete decode_lock_;
- delete neteq_crit_sect_;
}
int AcmReceiver::SetMinimumDelay(int delay_ms) {
@@ -162,7 +163,7 @@ int AcmReceiver::SetInitialDelay(int delay_ms) {
if (delay_ms < 0 || delay_ms > 10000) {
return -1;
}
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (delay_ms == 0) {
av_sync_ = false;
@@ -206,7 +207,7 @@ int AcmReceiver::LeastRequiredDelayMs() const {
}
int AcmReceiver::current_sample_rate_hz() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
return current_sample_rate_hz_;
}
@@ -269,7 +270,7 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
const RTPHeader* header = &rtp_header.header; // Just a shorthand.
{
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
int codec_id = RtpHeaderToCodecIndex(*header, incoming_payload);
if (codec_id < 0) {
@@ -328,7 +329,7 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
rtp_header, receive_timestamp, packet_type, new_codec, sample_rate_hz,
missing_packets_sync_stream_.get());
}
- }
+ } // |crit_sect_| is released.
{
WriteLockScoped lock_codecs(*decode_lock_); // Lock to prevent an encoding.
@@ -359,7 +360,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
{
// Accessing members, take the lock.
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (av_sync_) {
assert(initial_delay_manager_.get());
@@ -404,7 +405,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
}
// Accessing members, take the lock.
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
// Update NACK.
int decoded_sequence_num = 0;
@@ -426,9 +427,13 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
if (ptr_audio_buffer == audio_buffer_) {
// Data is written to local buffer.
if (need_resampling) {
- samples_per_channel = resampler_.Resample10Msec(
- audio_buffer_, current_sample_rate_hz_, desired_freq_hz,
- num_channels, audio_frame->data_);
+ samples_per_channel =
+ resampler_.Resample10Msec(audio_buffer_,
+ current_sample_rate_hz_,
+ desired_freq_hz,
+ num_channels,
+ AudioFrame::kMaxDataSizeSamples,
+ audio_frame->data_);
if (samples_per_channel < 0) {
LOG_FERR0(LS_ERROR, "AcmReceiver::GetAudio") << "Resampler Failed.";
return -1;
@@ -442,9 +447,13 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
// Data is written into |audio_frame|.
if (need_resampling) {
// We might end up here ONLY if codec is changed.
- samples_per_channel = resampler_.Resample10Msec(
- audio_frame->data_, current_sample_rate_hz_, desired_freq_hz,
- num_channels, audio_buffer_);
+ samples_per_channel =
+ resampler_.Resample10Msec(audio_frame->data_,
+ current_sample_rate_hz_,
+ desired_freq_hz,
+ num_channels,
+ AudioFrame::kMaxDataSizeSamples,
+ audio_buffer_);
if (samples_per_channel < 0) {
LOG_FERR0(LS_ERROR, "AcmReceiver::GetAudio") << "Resampler Failed.";
return -1;
@@ -463,6 +472,19 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
SetAudioFrameActivityAndType(vad_enabled_, type, audio_frame);
previous_audio_activity_ = audio_frame->vad_activity_;
call_stats_.DecodedByNetEq(audio_frame->speech_type_);
+
+ // Computes the RTP timestamp of the first sample in |audio_frame| from
+ // |GetPlayoutTimestamp|, which is the timestamp of the last sample of
+ // |audio_frame|.
+ uint32_t playout_timestamp = 0;
+ if (GetPlayoutTimestamp(&playout_timestamp)) {
+ audio_frame->timestamp_ =
+ playout_timestamp - audio_frame->samples_per_channel_;
+ } else {
+ // Remain 0 until we have a valid |playout_timestamp|.
+ audio_frame->timestamp_ = 0;
+ }
+
return 0;
}
@@ -473,19 +495,25 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
assert(acm_codec_id >= 0 && acm_codec_id < ACMCodecDB::kMaxNumCodecs);
NetEqDecoder neteq_decoder = ACMCodecDB::neteq_decoders_[acm_codec_id];
- CriticalSectionScoped lock(neteq_crit_sect_);
+ // Make sure the right decoder is registered for Opus.
+ if (neteq_decoder == kDecoderOpus && channels == 2) {
+ neteq_decoder = kDecoderOpus_2ch;
+ }
+
+ CriticalSectionScoped lock(crit_sect_.get());
// The corresponding NetEq decoder ID.
// If this coder has been registered before.
if (decoders_[acm_codec_id].registered) {
- if (decoders_[acm_codec_id].payload_type == payload_type) {
+ if (decoders_[acm_codec_id].payload_type == payload_type &&
+ decoders_[acm_codec_id].channels == channels) {
// Re-registering the same codec with the same payload-type. Do nothing
// and return.
return 0;
}
- // Changing the payload-type of this codec. First unregister. Then register
- // with new payload-type.
+ // Changing the payload-type or number of channels for this codec.
+ // First unregister. Then register with new payload-type/channels.
if (neteq_->RemovePayloadType(decoders_[acm_codec_id].payload_type) !=
NetEq::kOK) {
LOG_F(LS_ERROR) << "Cannot remover payload "
@@ -499,8 +527,7 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
ret_val = neteq_->RegisterPayloadType(neteq_decoder, payload_type);
} else {
ret_val = neteq_->RegisterExternalDecoder(
- audio_decoder, neteq_decoder,
- ACMCodecDB::database_[acm_codec_id].plfreq, payload_type);
+ audio_decoder, neteq_decoder, payload_type);
}
if (ret_val != NetEq::kOK) {
LOG_FERR3(LS_ERROR, "AcmReceiver::AddCodec", acm_codec_id, payload_type,
@@ -519,13 +546,13 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
void AcmReceiver::EnableVad() {
neteq_->EnableVad();
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
vad_enabled_ = true;
}
void AcmReceiver::DisableVad() {
neteq_->DisableVad();
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
vad_enabled_ = false;
}
@@ -537,7 +564,7 @@ void AcmReceiver::FlushBuffers() {
// many as it can.
int AcmReceiver::RemoveAllCodecs() {
int ret_val = 0;
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
for (int n = 0; n < ACMCodecDB::kMaxNumCodecs; ++n) {
if (decoders_[n].registered) {
if (neteq_->RemovePayloadType(decoders_[n].payload_type) == 0) {
@@ -557,15 +584,13 @@ int AcmReceiver::RemoveAllCodecs() {
int AcmReceiver::RemoveCodec(uint8_t payload_type) {
int codec_index = PayloadType2CodecIndex(payload_type);
if (codec_index < 0) { // Such a payload-type is not registered.
- LOG(LS_WARNING) << "payload_type " << payload_type << " is not registered,"
- " no action is taken.";
return 0;
}
if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
LOG_FERR1(LS_ERROR, "AcmReceiver::RemoveCodec", payload_type);
return -1;
}
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
decoders_[codec_index].registered = false;
if (last_audio_decoder_ == codec_index)
last_audio_decoder_ = -1; // Codec is removed, invalidate last decoder.
@@ -573,26 +598,27 @@ int AcmReceiver::RemoveCodec(uint8_t payload_type) {
}
void AcmReceiver::set_id(int id) {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
id_ = id;
}
-uint32_t AcmReceiver::PlayoutTimestamp() {
+bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
if (av_sync_) {
assert(initial_delay_manager_.get());
- if (initial_delay_manager_->buffering())
- return initial_delay_manager_->playout_timestamp();
+ if (initial_delay_manager_->buffering()) {
+ return initial_delay_manager_->GetPlayoutTimestamp(timestamp);
+ }
}
- return neteq_->PlayoutTimestamp();
+ return neteq_->GetPlayoutTimestamp(timestamp);
}
int AcmReceiver::last_audio_codec_id() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
return last_audio_decoder_;
}
int AcmReceiver::last_audio_payload_type() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (last_audio_decoder_ < 0)
return -1;
assert(decoders_[last_audio_decoder_].registered);
@@ -600,7 +626,7 @@ int AcmReceiver::last_audio_payload_type() const {
}
int AcmReceiver::RedPayloadType() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (ACMCodecDB::kRED < 0 ||
!decoders_[ACMCodecDB::kRED].registered) {
LOG_F(LS_WARNING) << "RED is not registered.";
@@ -610,9 +636,8 @@ int AcmReceiver::RedPayloadType() const {
}
int AcmReceiver::LastAudioCodec(CodecInst* codec) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (last_audio_decoder_ < 0) {
- LOG_F(LS_WARNING) << "No audio payload is received, yet.";
return -1;
}
assert(decoders_[last_audio_decoder_].registered);
@@ -636,6 +661,7 @@ void AcmReceiver::NetworkStatistics(ACMNetworkStatistics* acm_stat) {
acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate;
acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate;
acm_stat->clockDriftPPM = neteq_stat.clockdrift_ppm;
+ acm_stat->addedSamples = neteq_stat.added_zero_samples;
std::vector<int> waiting_times;
neteq_->WaitingTimes(&waiting_times);
@@ -665,7 +691,7 @@ void AcmReceiver::NetworkStatistics(ACMNetworkStatistics* acm_stat) {
int AcmReceiver::DecoderByPayloadType(uint8_t payload_type,
CodecInst* codec) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
int codec_index = PayloadType2CodecIndex(payload_type);
if (codec_index < 0) {
LOG_FERR1(LS_ERROR, "AcmReceiver::DecoderByPayloadType", payload_type);
@@ -691,7 +717,7 @@ int AcmReceiver::EnableNack(size_t max_nack_list_size) {
if (max_nack_list_size == 0 || max_nack_list_size > Nack::kNackListSizeLimit)
return -1;
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (!nack_enabled_) {
nack_.reset(Nack::Create(kNackThresholdPackets));
nack_enabled_ = true;
@@ -707,14 +733,14 @@ int AcmReceiver::EnableNack(size_t max_nack_list_size) {
}
void AcmReceiver::DisableNack() {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
nack_.reset(); // Memory is released.
nack_enabled_ = false;
}
std::vector<uint16_t> AcmReceiver::GetNackList(
int round_trip_time_ms) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (round_trip_time_ms < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
"GetNackList: round trip time cannot be negative."
@@ -730,7 +756,7 @@ std::vector<uint16_t> AcmReceiver::GetNackList(
void AcmReceiver::ResetInitialDelay() {
{
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
av_sync_ = false;
initial_delay_manager_.reset(NULL);
missing_packets_sync_stream_.reset(NULL);
@@ -752,13 +778,9 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
// exceeds a threshold.
int num_packets;
int max_num_packets;
- int buffer_size_byte;
- int max_buffer_size_byte;
const float kBufferingThresholdScale = 0.9f;
- neteq_->PacketBufferStatistics(&num_packets, &max_num_packets,
- &buffer_size_byte, &max_buffer_size_byte);
- if (num_packets > max_num_packets * kBufferingThresholdScale ||
- buffer_size_byte > max_buffer_size_byte * kBufferingThresholdScale) {
+ neteq_->PacketBufferStatistics(&num_packets, &max_num_packets);
+ if (num_packets > max_num_packets * kBufferingThresholdScale) {
initial_delay_manager_->DisableBuffering();
return false;
}
@@ -771,7 +793,6 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
current_sample_rate_hz_ = ACMCodecDB::database_[last_audio_decoder_].plfreq;
frame->num_channels_ = decoders_[last_audio_decoder_].channels;
} else {
- current_sample_rate_hz_ = kNeteqInitSampleRateHz;
frame->num_channels_ = 1;
}
@@ -785,7 +806,6 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
frame->samples_per_channel_ = frame->sample_rate_hz_ / 100; // Always 10 ms.
frame->speech_type_ = AudioFrame::kCNG;
frame->vad_activity_ = AudioFrame::kVadPassive;
- frame->energy_ = 0;
int samples = frame->samples_per_channel_ * frame->num_channels_;
memset(frame->data_, 0, samples * sizeof(int16_t));
return true;
@@ -815,7 +835,7 @@ uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const {
// We masked 6 most significant bits of 32-bit so there is no overflow in
// the conversion from milliseconds to timestamp.
const uint32_t now_in_ms = static_cast<uint32_t>(
- TickTime::MillisecondTimestamp() & 0x03ffffff);
+ clock_->TimeInMilliseconds() & 0x03ffffff);
return static_cast<uint32_t>(
(decoder_sampling_rate / 1000) * now_in_ms);
}
@@ -839,7 +859,7 @@ void AcmReceiver::InsertStreamOfSyncPackets(
void AcmReceiver::GetDecodingCallStatistics(
AudioDecodingCallStats* stats) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
*stats = call_stats_.GetDecodingStatistics();
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
index 81eb5206b8a..b6898f73f9a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
@@ -20,9 +20,10 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -47,9 +48,7 @@ class AcmReceiver {
};
// Constructor of the class
- AcmReceiver();
-
- explicit AcmReceiver(NetEq* neteq);
+ explicit AcmReceiver(const AudioCodingModule::Config& config);
// Destructor of the class.
~AcmReceiver();
@@ -244,9 +243,10 @@ class AcmReceiver {
void set_id(int id); // TODO(turajs): can be inline.
//
- // Returns the RTP timestamp of the last sample delivered by GetAudio().
+ // Gets the RTP timestamp of the last sample delivered by GetAudio().
+ // Returns true if the RTP timestamp is valid, otherwise false.
//
- uint32_t PlayoutTimestamp();
+ bool GetPlayoutTimestamp(uint32_t* timestamp);
//
// Return the index of the codec associated with the last non-CNG/non-DTMF
@@ -328,7 +328,8 @@ class AcmReceiver {
private:
int PayloadType2CodecIndex(uint8_t payload_type) const;
- bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame);
+ bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
int GetNumSyncPacketToInsert(uint16_t received_squence_number);
@@ -339,20 +340,23 @@ class AcmReceiver {
void InsertStreamOfSyncPackets(InitialDelayManager::SyncStream* sync_stream);
- int id_;
+ scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int id_; // TODO(henrik.lundin) Make const.
+ int last_audio_decoder_ GUARDED_BY(crit_sect_);
+ AudioFrame::VADActivity previous_audio_activity_ GUARDED_BY(crit_sect_);
+ int current_sample_rate_hz_ GUARDED_BY(crit_sect_);
+ ACMResampler resampler_ GUARDED_BY(crit_sect_);
+ // Used in GetAudio, declared as member to avoid allocating every 10ms.
+ // TODO(henrik.lundin) Stack-allocate in GetAudio instead?
+ int16_t audio_buffer_[AudioFrame::kMaxDataSizeSamples] GUARDED_BY(crit_sect_);
+ scoped_ptr<Nack> nack_ GUARDED_BY(crit_sect_);
+ bool nack_enabled_ GUARDED_BY(crit_sect_);
+ CallStatistics call_stats_ GUARDED_BY(crit_sect_);
NetEq* neteq_;
Decoder decoders_[ACMCodecDB::kMaxNumCodecs];
- int last_audio_decoder_;
RWLockWrapper* decode_lock_;
- CriticalSectionWrapper* neteq_crit_sect_;
bool vad_enabled_;
- AudioFrame::VADActivity previous_audio_activity_;
- int current_sample_rate_hz_;
- ACMResampler resampler_;
- // Used in GetAudio, declared as member to avoid allocating every 10ms.
- int16_t audio_buffer_[AudioFrame::kMaxDataSizeSamples];
- scoped_ptr<Nack> nack_;
- bool nack_enabled_;
+ Clock* clock_; // TODO(henrik.lundin) Make const if possible.
// Indicates if a non-zero initial delay is set, and the receiver is in
// AV-sync mode.
@@ -366,8 +370,6 @@ class AcmReceiver {
// initial delay is set.
scoped_ptr<InitialDelayManager::SyncStream> missing_packets_sync_stream_;
scoped_ptr<InitialDelayManager::SyncStream> late_packets_sync_stream_;
-
- CallStatistics call_stats_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
index 712eeb26877..4234f146474 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
@@ -16,7 +16,8 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/test_suite.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -42,12 +43,14 @@ class AcmReceiverTest : public AudioPacketizationCallback,
public ::testing::Test {
protected:
AcmReceiverTest()
- : receiver_(new AcmReceiver),
- acm_(new AudioCodingModuleImpl(0)),
- timestamp_(0),
+ : timestamp_(0),
packet_sent_(false),
last_packet_send_timestamp_(timestamp_),
- last_frame_type_(kFrameEmpty) {}
+ last_frame_type_(kFrameEmpty) {
+ AudioCodingModule::Config config;
+ acm_.reset(new AudioCodingModuleImpl(config));
+ receiver_.reset(new AcmReceiver(config));
+ }
~AcmReceiverTest() {}
@@ -302,55 +305,6 @@ TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PostdecodingVad)) {
EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
}
-TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(FlushBuffer)) {
- const int id = ACMCodecDB::kISAC;
- EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels,
- NULL));
- const int kNumPackets = 5;
- const int num_10ms_frames = codecs_[id].pacsize / (codecs_[id].plfreq / 100);
- for (int n = 0; n < kNumPackets; ++n)
- InsertOnePacketOfSilence(id);
- ACMNetworkStatistics statistics;
- receiver_->NetworkStatistics(&statistics);
- ASSERT_EQ(num_10ms_frames * kNumPackets * 10, statistics.currentBufferSize);
-
- receiver_->FlushBuffers();
- receiver_->NetworkStatistics(&statistics);
- ASSERT_EQ(0, statistics.currentBufferSize);
-}
-
-TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PlayoutTimestamp)) {
- const int id = ACMCodecDB::kPCM16Bwb;
- EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels,
- NULL));
- receiver_->SetPlayoutMode(fax);
- const int kNumPackets = 5;
- const int num_10ms_frames = codecs_[id].pacsize / (codecs_[id].plfreq / 100);
- uint32_t expected_timestamp;
- AudioFrame frame;
- int ts_offset = 0;
- bool first_audio_frame = true;
- for (int n = 0; n < kNumPackets; ++n) {
- packet_sent_ = false;
- InsertOnePacketOfSilence(id);
- ASSERT_TRUE(packet_sent_);
- expected_timestamp = last_packet_send_timestamp_;
- for (int k = 0; k < num_10ms_frames; ++k) {
- ASSERT_EQ(0, receiver_->GetAudio(codecs_[id].plfreq, &frame));
- if (first_audio_frame) {
- // There is an offset in playout timestamps. Perhaps, it is related to
- // initial delay that NetEq applies
- ts_offset = receiver_->PlayoutTimestamp() - expected_timestamp;
- first_audio_frame = false;
- } else {
- EXPECT_EQ(expected_timestamp + ts_offset,
- receiver_->PlayoutTimestamp());
- }
- expected_timestamp += codecs_[id].plfreq / 100; // Increment by 10 ms.
- }
- }
-}
-
TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(LastAudioCodec)) {
const int kCodecId[] = {
ACMCodecDB::kISAC, ACMCodecDB::kPCMA, ACMCodecDB::kISACSWB,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
index 3abe4f1ec46..97d87b1b3a4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
@@ -10,61 +10,59 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
+#include <assert.h>
#include <string.h>
#include "webrtc/common_audio/resampler/include/resampler.h"
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
-
namespace acm2 {
-ACMResampler::ACMResampler()
- : resampler_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {
+ACMResampler::ACMResampler() {
}
ACMResampler::~ACMResampler() {
- delete resampler_crit_sect_;
}
int ACMResampler::Resample10Msec(const int16_t* in_audio,
int in_freq_hz,
int out_freq_hz,
int num_audio_channels,
+ int out_capacity_samples,
int16_t* out_audio) {
- CriticalSectionScoped cs(resampler_crit_sect_);
-
+ int in_length = in_freq_hz * num_audio_channels / 100;
+ int out_length = out_freq_hz * num_audio_channels / 100;
if (in_freq_hz == out_freq_hz) {
- size_t length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- memcpy(out_audio, in_audio, length * sizeof(int16_t));
- return static_cast<int16_t>(in_freq_hz / 100);
+ if (out_capacity_samples < in_length) {
+ assert(false);
+ return -1;
+ }
+ memcpy(out_audio, in_audio, in_length * sizeof(int16_t));
+ return in_length / num_audio_channels;
}
- // |maxLen| is maximum number of samples for 10ms at 48kHz.
- int max_len = 480 * num_audio_channels;
- int length_in = (in_freq_hz / 100) * num_audio_channels;
- int out_len;
-
- ResamplerType type = (num_audio_channels == 1) ? kResamplerSynchronous :
- kResamplerSynchronousStereo;
-
- if (resampler_.ResetIfNeeded(in_freq_hz, out_freq_hz, type) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
- "Error in reset of resampler");
+ if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
+ num_audio_channels) != 0) {
+ LOG_FERR3(LS_ERROR, InitializeIfNeeded, in_freq_hz, out_freq_hz,
+ num_audio_channels);
return -1;
}
- if (resampler_.Push(in_audio, length_in, out_audio, max_len, out_len) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
- "Error in resampler: resampler.Push");
+ out_length =
+ resampler_.Resample(in_audio, in_length, out_audio, out_capacity_samples);
+ if (out_length == -1) {
+ LOG_FERR4(LS_ERROR,
+ Resample,
+ in_audio,
+ in_length,
+ out_audio,
+ out_capacity_samples);
return -1;
}
- return out_len / num_audio_channels;
+ return out_length / num_audio_channels;
}
} // namespace acm2
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
index e992955f5f3..a8fc6b6f26a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
@@ -11,13 +11,10 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
-#include "webrtc/common_audio/resampler/include/resampler.h"
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/typedefs.h"
namespace webrtc {
-
-class CriticalSectionWrapper;
-
namespace acm2 {
class ACMResampler {
@@ -29,16 +26,14 @@ class ACMResampler {
int in_freq_hz,
int out_freq_hz,
int num_audio_channels,
+ int out_capacity_samples,
int16_t* out_audio);
private:
- // Use the Resampler class.
- Resampler resampler_;
- CriticalSectionWrapper* resampler_crit_sect_;
+ PushResampler<int16_t> resampler_;
};
} // namespace acm2
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
index 60ed69cb29c..eca909cc49c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
@@ -13,22 +13,21 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
-const char kLegacyAcmVersion[] = "acm1";
-const char kExperimentalAcmVersion[] = "acm2";
-
// Create module
AudioCodingModule* AudioCodingModule::Create(int id) {
- return new acm1::AudioCodingModuleImpl(id, Clock::GetRealTimeClock());
+ return Create(id, Clock::GetRealTimeClock());
}
AudioCodingModule* AudioCodingModule::Create(int id, Clock* clock) {
- return new acm1::AudioCodingModuleImpl(id, clock);
+ AudioCodingModule::Config config;
+ config.id = id;
+ config.clock = clock;
+ return new acm2::AudioCodingModuleImpl(config);
}
// Get number of supported codecs
@@ -95,13 +94,4 @@ bool AudioCodingModule::IsCodecValid(const CodecInst& codec) {
}
}
-AudioCodingModule* AudioCodingModuleFactory::Create(int id) const {
- return new acm1::AudioCodingModuleImpl(static_cast<int32_t>(id),
- Clock::GetRealTimeClock());
-}
-
-AudioCodingModule* NewAudioCodingModuleFactory::Create(int id) const {
- return new acm2::AudioCodingModuleImpl(id);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
index f51c3bf7d74..90dad6c55c0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
@@ -7,16 +7,36 @@
# be found in the AUTHORS file in the root of the source tree.
{
+ 'variables': {
+ 'audio_coding_dependencies': [
+ 'CNG',
+ 'G711',
+ 'G722',
+ 'iLBC',
+ 'iSAC',
+ 'iSACFix',
+ 'PCM16B',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ 'audio_coding_defines': [],
+ 'conditions': [
+ ['include_opus==1', {
+ 'audio_coding_dependencies': ['webrtc_opus',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
+ }],
+ ],
+ },
'targets': [
{
- 'target_name': 'acm2',
+ 'target_name': 'audio_coding_module',
'type': 'static_library',
'defines': [
'<@(audio_coding_defines)',
],
'dependencies': [
'<@(audio_coding_dependencies)',
- 'NetEq4',
+ 'neteq',
],
'include_dirs': [
'../interface',
@@ -93,4 +113,45 @@
],
},
],
+ 'conditions': [
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'delay_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ '../test/delay_test.cc',
+ '../test/Channel.cc',
+ '../test/PCMFile.cc',
+ '../test/utility.cc',
+ ],
+ }, # delay_test
+ {
+ 'target_name': 'insert_packet_with_timing',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ '../test/insert_packet_with_timing.cc',
+ '../test/Channel.cc',
+ '../test/PCMFile.cc',
+ ],
+ }, # delay_test
+ ],
+ }],
+ ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
index 4c64e07dd5c..a07e8543347 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
@@ -39,11 +39,11 @@ enum {
kMaxPacketSize = 2560
};
-// Maximum number of payloads that can be packed in one RED payload. For
-// regular FEC, we only pack two payloads. In case of dual-streaming, in worst
-// case we might pack 3 payloads in one RED payload.
+// Maximum number of payloads that can be packed in one RED packet. For
+// regular RED, we only pack two payloads. In case of dual-streaming, in worst
+// case we might pack 3 payloads in one RED packet.
enum {
- kNumFecFragmentationVectors = 2,
+ kNumRedFragmentationVectors = 2,
kMaxNumFragmentationVectors = 3
};
@@ -114,9 +114,10 @@ static int TimestampLessThan(uint32_t t1, uint32_t t2) {
} // namespace
-AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
- : packetization_callback_(NULL),
- id_(id),
+AudioCodingModuleImpl::AudioCodingModuleImpl(
+ const AudioCodingModule::Config& config)
+ : acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
expected_codec_ts_(0xD87F3F9F),
expected_in_ts_(0xD87F3F9F),
send_codec_inst_(),
@@ -131,18 +132,20 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
stereo_send_(false),
current_send_codec_idx_(-1),
send_codec_registered_(false),
- acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_callback_(NULL),
+ receiver_(config),
is_first_red_(true),
- fec_enabled_(false),
- last_fec_timestamp_(0),
+ red_enabled_(false),
+ last_red_timestamp_(0),
+ codec_fec_enabled_(false),
previous_pltype_(255),
aux_rtp_header_(NULL),
receiver_initialized_(false),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
secondary_send_codec_inst_(),
codec_timestamp_(expected_codec_ts_),
- first_10ms_data_(false) {
+ first_10ms_data_(false),
+ callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ packetization_callback_(NULL),
+ vad_callback_(NULL) {
// Nullify send codec memory, set payload type and set codec name to
// invalid values.
@@ -159,8 +162,6 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
mirror_codec_idx_[i] = -1;
}
- receiver_.set_id(id_);
-
// Allocate memory for RED.
red_buffer_ = new uint8_t[MAX_PAYLOAD_SIZE_BYTE];
@@ -201,7 +202,7 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot initialize receiver");
}
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
+ WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_, "Created");
}
AudioCodingModuleImpl::~AudioCodingModuleImpl() {
@@ -349,7 +350,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
WebRtcACMEncodingType encoding_type;
if (secondary_encoder_->Encode(red_buffer_, &len_bytes,
- &last_fec_timestamp_,
+ &last_red_timestamp_,
&encoding_type) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"ProcessDual(): Encoding of secondary encoder Failed");
@@ -372,7 +373,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
index_primary = secondary_ready_to_encode ?
TimestampLessThan(primary_timestamp, secondary_timestamp) : 0;
index_primary += has_previous_payload ?
- TimestampLessThan(primary_timestamp, last_fec_timestamp_) : 0;
+ TimestampLessThan(primary_timestamp, last_red_timestamp_) : 0;
}
if (secondary_ready_to_encode) {
@@ -384,7 +385,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
if (has_previous_payload) {
index_previous_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, last_fec_timestamp_)) : 0;
+ (1 - TimestampLessThan(primary_timestamp, last_red_timestamp_)) : 0;
// If secondary is ready it always have a timestamp larger than previous
// secondary. So the index is either 0 or 1.
index_previous_secondary += secondary_ready_to_encode ? 1 : 0;
@@ -405,7 +406,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
} else if (index_secondary == 0) {
current_timestamp = secondary_timestamp;
} else {
- current_timestamp = last_fec_timestamp_;
+ current_timestamp = last_red_timestamp_;
}
fragmentation_.fragmentationVectorSize = 0;
@@ -420,7 +421,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
fragmentation_.fragmentationPlType[index_previous_secondary] =
secondary_send_codec_inst_.pltype;
fragmentation_.fragmentationTimeDiff[index_previous_secondary] =
- static_cast<uint16_t>(current_timestamp - last_fec_timestamp_);
+ static_cast<uint16_t>(current_timestamp - last_red_timestamp_);
fragmentation_.fragmentationVectorSize++;
}
@@ -462,7 +463,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
{
CriticalSectionScoped lock(callback_crit_sect_);
if (packetization_callback_ != NULL) {
- // Callback with payload data, including redundant data (FEC/RED).
+ // Callback with payload data, including redundant data (RED).
if (packetization_callback_->SendData(kAudioFrameSpeech,
my_red_payload_type,
current_timestamp, stream,
@@ -495,7 +496,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
FrameType frame_type = kAudioFrameSpeech;
uint8_t current_payload_type = 0;
bool has_data_to_send = false;
- bool fec_active = false;
+ bool red_active = false;
RTPFragmentationHeader my_fragmentation;
// Keep the scope of the ACM critical section limited.
@@ -562,15 +563,15 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Redundancy encode is done here. The two bitstreams packetized into
// one RTP packet and the fragmentation points are set.
// Only apply RED on speech data.
- if ((fec_enabled_) &&
+ if ((red_enabled_) &&
((encoding_type == kActiveNormalEncoded) ||
(encoding_type == kPassiveNormalEncoded))) {
- // FEC is enabled within this scope.
+ // RED is enabled within this scope.
//
// Note that, a special solution exists for iSAC since it is the only
// codec for which GetRedPayload has a non-empty implementation.
//
- // Summary of the FEC scheme below (use iSAC as example):
+ // Summary of the RED scheme below (use iSAC as example):
//
// 1st (is_first_red_ is true) encoded iSAC frame (primary #1) =>
// - call GetRedPayload() and store redundancy for packet #1 in
@@ -581,7 +582,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// - store primary #2 in 1st fragment of RED buffer and send the
// combined packet
// - the transmitted packet contains primary #2 (new) and
- // reduncancy for packet #1 (old)
+ // redundancy for packet #1 (old)
// - call GetRed_Payload() and store redundancy for packet #2 in
// second fragment of RED buffer
//
@@ -604,19 +605,19 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
//
// Hence, even if every second packet is dropped, perfect
// reconstruction is possible.
- fec_active = true;
+ red_active = true;
has_data_to_send = false;
// Skip the following part for the first packet in a RED session.
if (!is_first_red_) {
- // Rearrange stream such that FEC packets are included.
+ // Rearrange stream such that RED packets are included.
// Replace stream now that we have stored current stream.
memcpy(stream + fragmentation_.fragmentationOffset[1], red_buffer_,
fragmentation_.fragmentationLength[1]);
// Update the fragmentation time difference vector, in number of
// timestamps.
uint16_t time_since_last = static_cast<uint16_t>(
- rtp_timestamp - last_fec_timestamp_);
+ rtp_timestamp - last_red_timestamp_);
// Update fragmentation vectors.
fragmentation_.fragmentationPlType[1] =
@@ -630,7 +631,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Insert new packet payload type.
fragmentation_.fragmentationPlType[0] = current_payload_type;
- last_fec_timestamp_ = rtp_timestamp;
+ last_red_timestamp_ = rtp_timestamp;
// Can be modified by the GetRedPayload() call if iSAC is utilized.
red_length_bytes = length_bytes;
@@ -650,7 +651,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
if (codecs_[current_send_codec_idx_]->GetRedPayload(
red_buffer_, &red_length_bytes) == -1) {
// The codec was not iSAC => use current encoder output as redundant
- // data instead (trivial FEC scheme).
+ // data instead (trivial RED scheme).
memcpy(red_buffer_, stream, red_length_bytes);
}
@@ -658,7 +659,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Update payload type with RED payload type.
current_payload_type = red_pltype_;
// We have packed 2 payloads.
- fragmentation_.fragmentationVectorSize = kNumFecFragmentationVectors;
+ fragmentation_.fragmentationVectorSize = kNumRedFragmentationVectors;
// Copy to local variable, as it will be used outside ACM lock.
my_fragmentation.CopyFrom(fragmentation_);
@@ -672,8 +673,8 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
CriticalSectionScoped lock(callback_crit_sect_);
if (packetization_callback_ != NULL) {
- if (fec_active) {
- // Callback with payload data, including redundant data (FEC/RED).
+ if (red_active) {
+ // Callback with payload data, including redundant data (RED).
packetization_callback_->SendData(frame_type, current_payload_type,
rtp_timestamp, stream, length_bytes,
&my_fragmentation);
@@ -713,14 +714,14 @@ int AudioCodingModuleImpl::InitializeSender() {
}
}
- // Initialize FEC/RED.
+ // Initialize RED.
is_first_red_ = true;
- if (fec_enabled_ || secondary_encoder_.get() != NULL) {
+ if (red_enabled_ || secondary_encoder_.get() != NULL) {
if (red_buffer_ != NULL) {
memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
}
- if (fec_enabled_) {
- ResetFragmentation(kNumFecFragmentationVectors);
+ if (red_enabled_) {
+ ResetFragmentation(kNumRedFragmentationVectors);
} else {
ResetFragmentation(0);
}
@@ -1031,10 +1032,20 @@ int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
// Everything is fine so we can replace the previous codec with this one.
if (send_codec_registered_) {
- // If we change codec we start fresh with FEC.
+ // If we change codec we start fresh with RED.
// This is not strictly required by the standard.
is_first_red_ = true;
codec_ptr->SetVAD(&dtx_enabled_, &vad_enabled_, &vad_mode_);
+
+ if (!codec_ptr->HasInternalFEC()) {
+ codec_fec_enabled_ = false;
+ } else {
+ if (codec_ptr->SetFEC(codec_fec_enabled_) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot set codec FEC");
+ return -1;
+ }
+ }
}
current_send_codec_idx_ = codec_id;
@@ -1120,8 +1131,18 @@ int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
}
send_codec_inst_.rate = send_codec.rate;
}
- previous_pltype_ = send_codec_inst_.pltype;
+ if (!codecs_[codec_id]->HasInternalFEC()) {
+ codec_fec_enabled_ = false;
+ } else {
+ if (codecs_[codec_id]->SetFEC(codec_fec_enabled_) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot set codec FEC");
+ return -1;
+ }
+ }
+
+ previous_pltype_ = send_codec_inst_.pltype;
return 0;
}
}
@@ -1205,11 +1226,7 @@ int AudioCodingModuleImpl::Add10MsData(
return -1;
}
- // Allow for 8, 16, 32 and 48kHz input audio.
- if ((audio_frame.sample_rate_hz_ != 8000)
- && (audio_frame.sample_rate_hz_ != 16000)
- && (audio_frame.sample_rate_hz_ != 32000)
- && (audio_frame.sample_rate_hz_ != 48000)) {
+ if (audio_frame.sample_rate_hz_ > 48000) {
assert(false);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot Add 10 ms audio, input frequency not valid");
@@ -1365,13 +1382,17 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
// The result of the resampler is written to output frame.
dest_ptr_audio = preprocess_frame_.data_;
- preprocess_frame_.samples_per_channel_ = resampler_.Resample10Msec(
- src_ptr_audio, in_frame.sample_rate_hz_, send_codec_inst_.plfreq,
- preprocess_frame_.num_channels_, dest_ptr_audio);
+ preprocess_frame_.samples_per_channel_ =
+ resampler_.Resample10Msec(src_ptr_audio,
+ in_frame.sample_rate_hz_,
+ send_codec_inst_.plfreq,
+ preprocess_frame_.num_channels_,
+ AudioFrame::kMaxDataSizeSamples,
+ dest_ptr_audio);
if (preprocess_frame_.samples_per_channel_ < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add 10 ms audio, resmapling failed");
+ "Cannot add 10 ms audio, resampling failed");
return -1;
}
preprocess_frame_.sample_rate_hz_ = send_codec_inst_.plfreq;
@@ -1384,42 +1405,87 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
}
/////////////////////////////////////////
-// (FEC) Forward Error Correction
+// (RED) Redundant Coding
//
-bool AudioCodingModuleImpl::FECStatus() const {
+bool AudioCodingModuleImpl::REDStatus() const {
CriticalSectionScoped lock(acm_crit_sect_);
- return fec_enabled_;
+
+ return red_enabled_;
}
-// Configure FEC status i.e on/off.
-int AudioCodingModuleImpl::SetFECStatus(
+// Configure RED status i.e on/off.
+int AudioCodingModuleImpl::SetREDStatus(
#ifdef WEBRTC_CODEC_RED
- bool enable_fec) {
+ bool enable_red) {
CriticalSectionScoped lock(acm_crit_sect_);
- if (fec_enabled_ != enable_fec) {
+ if (enable_red == true && codec_fec_enabled_ == true) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return -1;
+ }
+
+ if (red_enabled_ != enable_red) {
// Reset the RED buffer.
memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
// Reset fragmentation buffers.
- ResetFragmentation(kNumFecFragmentationVectors);
- // Set fec_enabled_.
- fec_enabled_ = enable_fec;
+ ResetFragmentation(kNumRedFragmentationVectors);
+ // Set red_enabled_.
+ red_enabled_ = enable_red;
}
- is_first_red_ = true; // Make sure we restart FEC.
+ is_first_red_ = true; // Make sure we restart RED.
return 0;
#else
- bool /* enable_fec */) {
- fec_enabled_ = false;
+ bool /* enable_red */) {
+ red_enabled_ = false;
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- " WEBRTC_CODEC_RED is undefined => fec_enabled_ = %d",
- fec_enabled_);
+ " WEBRTC_CODEC_RED is undefined => red_enabled_ = %d",
+ red_enabled_);
return -1;
#endif
}
/////////////////////////////////////////
+// (FEC) Forward Error Correction (codec internal)
+//
+
+bool AudioCodingModuleImpl::CodecFEC() const {
+ return codec_fec_enabled_;
+}
+
+int AudioCodingModuleImpl::SetCodecFEC(bool enable_codec_fec) {
+ CriticalSectionScoped lock(acm_crit_sect_);
+
+ if (enable_codec_fec == true && red_enabled_ == true) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return -1;
+ }
+
+ // Set codec FEC.
+ if (HaveValidEncoder("SetCodecFEC") &&
+ codecs_[current_send_codec_idx_]->SetFEC(enable_codec_fec) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Set codec internal FEC failed.");
+ return -1;
+ }
+ codec_fec_enabled_ = enable_codec_fec;
+ return 0;
+}
+
+int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
+ if (HaveValidEncoder("SetPacketLossRate") &&
+ codecs_[current_send_codec_idx_]->SetPacketLossRate(loss_rate) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Set packet loss rate failed.");
+ return -1;
+ }
+ return 0;
+}
+
+/////////////////////////////////////////
// (VAD) Voice Activity Detection
//
int AudioCodingModuleImpl::SetVAD(bool enable_dtx,
@@ -1710,8 +1776,6 @@ int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
}
audio_frame->id_ = id_;
- audio_frame->energy_ = 0;
- audio_frame->timestamp_ = 0;
return 0;
}
@@ -1770,6 +1834,7 @@ int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
aux_rtp_header_->type.Audio.channel = 1;
}
+ aux_rtp_header_->header.timestamp = timestamp;
IncomingPacket(incoming_payload, payload_length, *aux_rtp_header_);
// Get ready for the next payload.
aux_rtp_header_->header.sequenceNumber++;
@@ -1851,8 +1916,7 @@ int AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
}
int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
- *timestamp = receiver_.PlayoutTimestamp();
- return 0;
+ return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
}
bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
@@ -1976,10 +2040,6 @@ int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
return receiver_.LeastRequiredDelayMs();
}
-const char* AudioCodingModuleImpl::Version() const {
- return kExperimentalAcmVersion;
-}
-
void AudioCodingModuleImpl::GetDecodingCallStatistics(
AudioDecodingCallStats* call_stats) const {
receiver_.GetDecodingCallStatistics(call_stats);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
index bc4ea0f7a66..e54202bf6da 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
@@ -19,11 +19,11 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
class CriticalSectionWrapper;
-class RWLockWrapper;
namespace acm2 {
@@ -32,11 +32,9 @@ class ACMGenericCodec;
class AudioCodingModuleImpl : public AudioCodingModule {
public:
- explicit AudioCodingModuleImpl(int id);
+ explicit AudioCodingModuleImpl(const AudioCodingModule::Config& config);
~AudioCodingModuleImpl();
- virtual const char* Version() const;
-
// Change the unique identifier of this object.
virtual int32_t ChangeUniqueId(const int32_t id);
@@ -94,14 +92,27 @@ class AudioCodingModuleImpl : public AudioCodingModule {
int Add10MsData(const AudioFrame& audio_frame);
/////////////////////////////////////////
- // (FEC) Forward Error Correction
+ // (RED) Redundant Coding
+ //
+
+ // Configure RED status i.e. on/off.
+ int SetREDStatus(bool enable_red);
+
+ // Get RED status.
+ bool REDStatus() const;
+
+ /////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
//
- // Configure FEC status i.e on/off.
- int SetFECStatus(bool enable_fec);
+ // Configure FEC status i.e. on/off.
+ int SetCodecFEC(bool enabled_codec_fec);
// Get FEC status.
- bool FECStatus() const;
+ bool CodecFEC() const;
+
+ // Set target packet loss rate
+ int SetPacketLossRate(int loss_rate);
/////////////////////////////////////////
// (VAD) Voice Activity Detection
@@ -235,13 +246,14 @@ class AudioCodingModuleImpl : public AudioCodingModule {
ACMGenericCodec* CreateCodec(const CodecInst& codec);
- int InitializeReceiverSafe();
+ int InitializeReceiverSafe() EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
bool HaveValidEncoder(const char* caller_name) const;
// Set VAD/DTX status. This function does not acquire a lock, and it is
// created to be called only from inside a critical section.
- int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode);
+ int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Process buffered audio when dual-streaming is not enabled (When RED is
// enabled still this function is used.)
@@ -263,18 +275,22 @@ class AudioCodingModuleImpl : public AudioCodingModule {
// -1: if encountering an error.
// 0: otherwise.
int PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out);
+ const AudioFrame** ptr_out)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Change required states after starting to receive the codec corresponding
// to |index|.
int UpdateUponReceivingCodec(int index);
- int EncodeFragmentation(int fragmentation_index, int payload_type,
+ int EncodeFragmentation(int fragmentation_index,
+ int payload_type,
uint32_t current_timestamp,
ACMGenericCodec* encoder,
- uint8_t* stream);
+ uint8_t* stream)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
- void ResetFragmentation(int vector_size);
+ void ResetFragmentation(int vector_size)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Get a pointer to AudioDecoder of the given codec. For some codecs, e.g.
// iSAC, encoding and decoding have to be performed on a shared
@@ -289,50 +305,50 @@ class AudioCodingModuleImpl : public AudioCodingModule {
int GetAudioDecoder(const CodecInst& codec, int codec_id,
int mirror_id, AudioDecoder** decoder);
- AudioPacketizationCallback* packetization_callback_;
-
- int id_;
- uint32_t expected_codec_ts_;
- uint32_t expected_in_ts_;
- CodecInst send_codec_inst_;
-
- uint8_t cng_nb_pltype_;
- uint8_t cng_wb_pltype_;
- uint8_t cng_swb_pltype_;
- uint8_t cng_fb_pltype_;
-
- uint8_t red_pltype_;
- bool vad_enabled_;
- bool dtx_enabled_;
- ACMVADMode vad_mode_;
+ CriticalSectionWrapper* acm_crit_sect_;
+ int id_; // TODO(henrik.lundin) Make const.
+ uint32_t expected_codec_ts_ GUARDED_BY(acm_crit_sect_);
+ uint32_t expected_in_ts_ GUARDED_BY(acm_crit_sect_);
+ CodecInst send_codec_inst_ GUARDED_BY(acm_crit_sect_);
+
+ uint8_t cng_nb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_wb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_swb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_fb_pltype_ GUARDED_BY(acm_crit_sect_);
+
+ uint8_t red_pltype_ GUARDED_BY(acm_crit_sect_);
+ bool vad_enabled_ GUARDED_BY(acm_crit_sect_);
+ bool dtx_enabled_ GUARDED_BY(acm_crit_sect_);
+ ACMVADMode vad_mode_ GUARDED_BY(acm_crit_sect_);
ACMGenericCodec* codecs_[ACMCodecDB::kMaxNumCodecs];
int mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_send_;
+ bool stereo_send_ GUARDED_BY(acm_crit_sect_);
int current_send_codec_idx_;
bool send_codec_registered_;
- ACMResampler resampler_;
+ ACMResampler resampler_ GUARDED_BY(acm_crit_sect_);
AcmReceiver receiver_;
- CriticalSectionWrapper* acm_crit_sect_;
- ACMVADCallback* vad_callback_;
- // RED/FEC.
- bool is_first_red_;
- bool fec_enabled_;
+ // RED.
+ bool is_first_red_ GUARDED_BY(acm_crit_sect_);
+ bool red_enabled_ GUARDED_BY(acm_crit_sect_);
// TODO(turajs): |red_buffer_| is allocated in constructor, why having them
// as pointers and not an array. If concerned about the memory, then make a
// set-up function to allocate them only when they are going to be used, i.e.
- // FEC or Dual-streaming is enabled.
- uint8_t* red_buffer_;
+ // RED or Dual-streaming is enabled.
+ uint8_t* red_buffer_ GUARDED_BY(acm_crit_sect_);
// TODO(turajs): we actually don't need |fragmentation_| as a member variable.
// It is sufficient to keep the length & payload type of previous payload in
// member variables.
- RTPFragmentationHeader fragmentation_;
- uint32_t last_fec_timestamp_;
+ RTPFragmentationHeader fragmentation_ GUARDED_BY(acm_crit_sect_);
+ uint32_t last_red_timestamp_ GUARDED_BY(acm_crit_sect_);
+
+ // Codec internal FEC
+ bool codec_fec_enabled_;
// This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_;
+ uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
// Used when payloads are pushed into ACM without any RTP info
// One example is when pre-encoded bit-stream is pushed from
@@ -342,15 +358,18 @@ class AudioCodingModuleImpl : public AudioCodingModule {
// be used in other methods, locks need to be taken.
WebRtcRTPHeader* aux_rtp_header_;
- bool receiver_initialized_;
+ bool receiver_initialized_ GUARDED_BY(acm_crit_sect_);
- CriticalSectionWrapper* callback_crit_sect_;
+ AudioFrame preprocess_frame_ GUARDED_BY(acm_crit_sect_);
+ CodecInst secondary_send_codec_inst_ GUARDED_BY(acm_crit_sect_);
+ scoped_ptr<ACMGenericCodec> secondary_encoder_ GUARDED_BY(acm_crit_sect_);
+ uint32_t codec_timestamp_ GUARDED_BY(acm_crit_sect_);
+ bool first_10ms_data_ GUARDED_BY(acm_crit_sect_);
- AudioFrame preprocess_frame_;
- CodecInst secondary_send_codec_inst_;
- scoped_ptr<ACMGenericCodec> secondary_encoder_;
- uint32_t codec_timestamp_;
- bool first_10ms_data_;
+ CriticalSectionWrapper* callback_crit_sect_;
+ AudioPacketizationCallback* packetization_callback_
+ GUARDED_BY(callback_crit_sect_);
+ ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc
new file mode 100644
index 00000000000..37cd70e5e84
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc
@@ -0,0 +1,514 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+#include "webrtc/system_wrappers/interface/thread_wrapper.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+const int kSampleRateHz = 16000;
+const int kNumSamples10ms = kSampleRateHz / 100;
+const int kFrameSizeMs = 10; // Multiple of 10.
+const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
+const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
+const uint8_t kPayloadType = 111;
+
+class RtpUtility {
+ public:
+ RtpUtility(int samples_per_packet, uint8_t payload_type)
+ : samples_per_packet_(samples_per_packet), payload_type_(payload_type) {}
+
+ virtual ~RtpUtility() {}
+
+ void Populate(WebRtcRTPHeader* rtp_header) {
+ rtp_header->header.sequenceNumber = 0xABCD;
+ rtp_header->header.timestamp = 0xABCDEF01;
+ rtp_header->header.payloadType = payload_type_;
+ rtp_header->header.markerBit = false;
+ rtp_header->header.ssrc = 0x1234;
+ rtp_header->header.numCSRCs = 0;
+ rtp_header->frameType = kAudioFrameSpeech;
+
+ rtp_header->header.payload_type_frequency = kSampleRateHz;
+ rtp_header->type.Audio.channel = 1;
+ rtp_header->type.Audio.isCNG = false;
+ }
+
+ void Forward(WebRtcRTPHeader* rtp_header) {
+ ++rtp_header->header.sequenceNumber;
+ rtp_header->header.timestamp += samples_per_packet_;
+ }
+
+ private:
+ int samples_per_packet_;
+ uint8_t payload_type_;
+};
+
+class PacketizationCallbackStub : public AudioPacketizationCallback {
+ public:
+ PacketizationCallbackStub()
+ : num_calls_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+ virtual int32_t SendData(
+ FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ uint16_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ ++num_calls_;
+ last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
+ return 0;
+ }
+
+ int num_calls() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return num_calls_;
+ }
+
+ int last_payload_len_bytes() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_payload_vec_.size();
+ }
+
+ void SwapBuffers(std::vector<uint8_t>* payload) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ last_payload_vec_.swap(*payload);
+ }
+
+ private:
+ int num_calls_ GUARDED_BY(crit_sect_);
+ std::vector<uint8_t> last_payload_vec_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+};
+
+class AudioCodingModuleTest : public ::testing::Test {
+ protected:
+ AudioCodingModuleTest()
+ : id_(1),
+ rtp_utility_(new RtpUtility(kFrameSizeSamples, kPayloadType)),
+ clock_(Clock::GetRealTimeClock()) {}
+
+ ~AudioCodingModuleTest() {}
+
+ void TearDown() {}
+
+ void SetUp() {
+ acm_.reset(AudioCodingModule::Create(id_, clock_));
+
+ RegisterCodec();
+
+ rtp_utility_->Populate(&rtp_header_);
+
+ input_frame_.sample_rate_hz_ = kSampleRateHz;
+ input_frame_.num_channels_ = 1;
+ input_frame_.samples_per_channel_ = kSampleRateHz * 10 / 1000; // 10 ms.
+ COMPILE_ASSERT(kSampleRateHz * 10 / 1000 <= AudioFrame::kMaxDataSizeSamples,
+ audio_frame_too_small);
+ memset(input_frame_.data_,
+ 0,
+ input_frame_.samples_per_channel_ * sizeof(input_frame_.data_[0]));
+
+ ASSERT_EQ(0, acm_->RegisterTransportCallback(&packet_cb_));
+ }
+
+ virtual void RegisterCodec() {
+ AudioCodingModule::Codec("L16", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register L16 codec in ACM.
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ virtual void InsertPacketAndPullAudio() {
+ InsertPacket();
+ PullAudio();
+ }
+
+ virtual void InsertPacket() {
+ const uint8_t kPayload[kPayloadSizeBytes] = {0};
+ ASSERT_EQ(0,
+ acm_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
+ rtp_utility_->Forward(&rtp_header_);
+ }
+
+ virtual void PullAudio() {
+ AudioFrame audio_frame;
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
+ }
+
+ virtual void InsertAudio() {
+ ASSERT_EQ(0, acm_->Add10MsData(input_frame_));
+ input_frame_.timestamp_ += kNumSamples10ms;
+ }
+
+ virtual void Encode() {
+ int32_t encoded_bytes = acm_->Process();
+ // Expect to get one packet with two bytes per sample, or no packet at all,
+ // depending on how many 10 ms blocks go into |codec_.pacsize|.
+ EXPECT_TRUE(encoded_bytes == 2 * codec_.pacsize || encoded_bytes == 0);
+ }
+
+ const int id_;
+ scoped_ptr<RtpUtility> rtp_utility_;
+ scoped_ptr<AudioCodingModule> acm_;
+ PacketizationCallbackStub packet_cb_;
+ WebRtcRTPHeader rtp_header_;
+ AudioFrame input_frame_;
+ CodecInst codec_;
+ Clock* clock_;
+};
+
+// Check if the statistics are initialized correctly. Before any call to ACM
+// all fields have to be zero.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(InitializedToZero)) {
+ AudioDecodingCallStats stats;
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+// Apply an initial playout delay. Calls to AudioCodingModule::PlayoutData10ms()
+// should result in generating silence, check the associated field.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(SilenceGeneratorCalled)) {
+ AudioDecodingCallStats stats;
+ const int kInitialDelay = 100;
+
+ acm_->SetInitialPlayoutDelay(kInitialDelay);
+
+ int num_calls = 0;
+ for (int time_ms = 0; time_ms < kInitialDelay;
+ time_ms += kFrameSizeMs, ++num_calls) {
+ InsertPacketAndPullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(num_calls, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+// Insert some packets and pull audio. Check statistics are valid. Then,
+// simulate packet loss and check if PLC and PLC-to-CNG statistics are
+// correctly updated.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(NetEqCalls)) {
+ AudioDecodingCallStats stats;
+ const int kNumNormalCalls = 10;
+
+ for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) {
+ InsertPacketAndPullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+
+ const int kNumPlc = 3;
+ const int kNumPlcCng = 5;
+
+ // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG.
+ for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) {
+ PullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(kNumPlc, stats.decoded_plc);
+ EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
+}
+
+TEST_F(AudioCodingModuleTest, VerifyOutputFrame) {
+ AudioFrame audio_frame;
+ const int kSampleRateHz = 32000;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
+ EXPECT_EQ(id_, audio_frame.id_);
+ EXPECT_EQ(0u, audio_frame.timestamp_);
+ EXPECT_GT(audio_frame.num_channels_, 0);
+ EXPECT_EQ(kSampleRateHz / 100, audio_frame.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz, audio_frame.sample_rate_hz_);
+}
+
+TEST_F(AudioCodingModuleTest, FailOnZeroDesiredFrequency) {
+ AudioFrame audio_frame;
+ EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
+}
+
+// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz
+// codec, while the derive class AcmIsacMtTest is using iSAC.
+class AudioCodingModuleMtTest : public AudioCodingModuleTest {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AudioCodingModuleMtTest()
+ : AudioCodingModuleTest(),
+ send_thread_(ThreadWrapper::CreateThread(CbSendThread,
+ this,
+ kRealtimePriority,
+ "send")),
+ insert_packet_thread_(ThreadWrapper::CreateThread(CbInsertPacketThread,
+ this,
+ kRealtimePriority,
+ "insert_packet")),
+ pull_audio_thread_(ThreadWrapper::CreateThread(CbPullAudioThread,
+ this,
+ kRealtimePriority,
+ "pull_audio")),
+ test_complete_(EventWrapper::Create()),
+ send_count_(0),
+ insert_packet_count_(0),
+ pull_audio_count_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ next_insert_packet_time_ms_(0),
+ fake_clock_(new SimulatedClock(0)) {
+ clock_ = fake_clock_.get();
+ }
+
+ void SetUp() {
+ AudioCodingModuleTest::SetUp();
+ StartThreads();
+ }
+
+ void StartThreads() {
+ unsigned int thread_id = 0;
+ ASSERT_TRUE(send_thread_->Start(thread_id));
+ ASSERT_TRUE(insert_packet_thread_->Start(thread_id));
+ ASSERT_TRUE(pull_audio_thread_->Start(thread_id));
+ }
+
+ void TearDown() {
+ AudioCodingModuleTest::TearDown();
+ pull_audio_thread_->Stop();
+ send_thread_->Stop();
+ insert_packet_thread_->Stop();
+ }
+
+ EventTypeWrapper RunTest() {
+ return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
+ }
+
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static bool CbSendThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)->CbSendImpl();
+ }
+
+ // The send thread doesn't have to care about the current simulated time,
+ // since only the AcmReceiver is using the clock.
+ bool CbSendImpl() {
+ SleepMs(1);
+ if (HasFatalFailure()) {
+ // End the test early if a fatal failure (ASSERT_*) has occurred.
+ test_complete_->Set();
+ }
+ ++send_count_;
+ InsertAudio();
+ Encode();
+ if (TestDone()) {
+ test_complete_->Set();
+ }
+ return true;
+ }
+
+ static bool CbInsertPacketThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)
+ ->CbInsertPacketImpl();
+ }
+
+ bool CbInsertPacketImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
+ return true;
+ }
+ next_insert_packet_time_ms_ += 10;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ ++insert_packet_count_;
+ InsertPacket();
+ return true;
+ }
+
+ static bool CbPullAudioThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)
+ ->CbPullAudioImpl();
+ }
+
+ bool CbPullAudioImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ // Don't let the insert thread fall behind.
+ if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) {
+ return true;
+ }
+ ++pull_audio_count_;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ PullAudio();
+ fake_clock_->AdvanceTimeMilliseconds(10);
+ return true;
+ }
+
+ scoped_ptr<ThreadWrapper> send_thread_;
+ scoped_ptr<ThreadWrapper> insert_packet_thread_;
+ scoped_ptr<ThreadWrapper> pull_audio_thread_;
+ const scoped_ptr<EventWrapper> test_complete_;
+ int send_count_;
+ int insert_packet_count_;
+ int pull_audio_count_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
+ scoped_ptr<SimulatedClock> fake_clock_;
+};
+
+TEST_F(AudioCodingModuleMtTest, DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+
+// This is a multi-threaded ACM test using iSAC. The test encodes audio
+// from a PCM file. The most recent encoded frame is used as input to the
+// receiving part. Depending on timing, it may happen that the same RTP packet
+// is inserted into the receiver multiple times, but this is a valid use-case,
+// and simplifies the test code a lot.
+class AcmIsacMtTest : public AudioCodingModuleMtTest {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AcmIsacMtTest()
+ : AudioCodingModuleMtTest(),
+ last_packet_number_(0) {}
+
+ ~AcmIsacMtTest() {}
+
+ void SetUp() {
+ AudioCodingModuleTest::SetUp();
+
+ // Set up input audio source to read from specified file, loop after 5
+ // seconds, and deliver blocks of 10 ms.
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
+ audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
+
+ // Generate one packet to have something to insert.
+ int loop_counter = 0;
+ while (packet_cb_.last_payload_len_bytes() == 0) {
+ InsertAudio();
+ Encode();
+ ASSERT_LT(loop_counter++, 10);
+ }
+ // Set |last_packet_number_| to one less that |num_calls| so that the packet
+ // will be fetched in the next InsertPacket() call.
+ last_packet_number_ = packet_cb_.num_calls() - 1;
+
+ StartThreads();
+ }
+
+ virtual void RegisterCodec() {
+ COMPILE_ASSERT(kSampleRateHz == 16000, test_designed_for_isac_16khz);
+ AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
+ // registered in AudioCodingModuleTest::SetUp();
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ void InsertPacket() {
+ int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
+ if (num_calls > last_packet_number_) {
+ // Get the new payload out from the callback handler.
+ // Note that since we swap buffers here instead of directly inserting
+ // a pointer to the data in |packet_cb_|, we avoid locking the callback
+ // for the duration of the IncomingPacket() call.
+ packet_cb_.SwapBuffers(&last_payload_vec_);
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ rtp_utility_->Forward(&rtp_header_);
+ last_packet_number_ = num_calls;
+ }
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ ASSERT_EQ(
+ 0,
+ acm_->IncomingPacket(
+ &last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
+ }
+
+ void InsertAudio() {
+ memcpy(input_frame_.data_, audio_loop_.GetNextBlock(), kNumSamples10ms);
+ AudioCodingModuleTest::InsertAudio();
+ }
+
+ void Encode() { ASSERT_GE(acm_->Process(), 0); }
+
+ // This method is the same as AudioCodingModuleMtTest::TestDone(), but here
+ // it is using the constants defined in this class (i.e., shorter test run).
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ int last_packet_number_;
+ std::vector<uint8_t> last_payload_vec_;
+ test::AudioLoop audio_loop_;
+};
+
+TEST_F(AcmIsacMtTest, DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
index 9153325afaf..4c3e9fc3939 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include <cassert>
+#include <assert.h>
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
index c2b218cb6cf..786fb2e5275 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
@@ -219,6 +219,14 @@ void InitialDelayManager::LatePackets(
return;
}
+bool InitialDelayManager::GetPlayoutTimestamp(uint32_t* playout_timestamp) {
+ if (!buffering_) {
+ return false;
+ }
+ *playout_timestamp = playout_timestamp_;
+ return true;
+}
+
void InitialDelayManager::DisableBuffering() {
buffering_ = false;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
index 3c5ba3c0139..6edc1150843 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
@@ -65,8 +65,9 @@ class InitialDelayManager {
// sequence of late (or perhaps missing) packets is computed.
void LatePackets(uint32_t timestamp_now, SyncStream* sync_stream);
- // Playout timestamp, valid when buffering.
- uint32_t playout_timestamp() { return playout_timestamp_; }
+ // Get playout timestamp.
+ // Returns true if the timestamp is valid (when buffering), otherwise false.
+ bool GetPlayoutTimestamp(uint32_t* playout_timestamp);
// True if buffered audio is less than the given initial delay (specified at
// the constructor). Buffering might be disabled by the client of this class.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
index 1e129f37e90..38b7cfc2714 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <cstring>
+#include <string.h>
#include "gtest/gtest.h"
#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
@@ -359,7 +359,9 @@ TEST_F(InitialDelayManagerTest, BufferingAudio) {
EXPECT_TRUE(manager_->buffering());
const uint32_t expected_playout_timestamp = rtp_info_.header.timestamp -
kInitDelayMs * kSamplingRateHz / 1000;
- EXPECT_EQ(expected_playout_timestamp, manager_->playout_timestamp());
+ uint32_t actual_playout_timestamp = 0;
+ EXPECT_TRUE(manager_->GetPlayoutTimestamp(&actual_playout_timestamp));
+ EXPECT_EQ(expected_playout_timestamp, actual_playout_timestamp);
NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
index 8011d8856c0..5837c31a899 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
@@ -398,7 +398,7 @@ TEST(NackTest, ChangeOfListSizeAppliedAndOldElementsRemoved) {
// Packet lost more than NACK-list size limit.
uint16_t num_lost_packets = kNackThreshold + kNackListSize + 5;
- scoped_array<uint16_t> seq_num_lost(new uint16_t[num_lost_packets]);
+ scoped_ptr<uint16_t[]> seq_num_lost(new uint16_t[num_lost_packets]);
for (int n = 0; n < num_lost_packets; ++n) {
seq_num_lost[n] = ++seq_num;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h b/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
index db45addde22..cb0953aa400 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
@@ -15,7 +15,9 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/modules/interface/module.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -25,7 +27,6 @@ struct CodecInst;
struct WebRtcRTPHeader;
class AudioFrame;
class RTPFragmentationHeader;
-class Clock;
#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 kHz
@@ -73,15 +74,22 @@ class ACMVQMonCallback {
const uint16_t delayMS) = 0; // average delay in ms
};
-// Version string for testing, to distinguish instances of ACM1 from ACM2.
-extern const char kLegacyAcmVersion[];
-extern const char kExperimentalAcmVersion[];
-
class AudioCodingModule: public Module {
protected:
AudioCodingModule() {}
public:
+ struct Config {
+ Config()
+ : id(0),
+ neteq_config(),
+ clock(Clock::GetRealTimeClock()) {}
+
+ int id;
+ NetEq::Config neteq_config;
+ Clock* clock;
+ };
+
///////////////////////////////////////////////////////////////////////////
// Creation and destruction of a ACM.
//
@@ -178,11 +186,6 @@ class AudioCodingModule: public Module {
//
static bool IsCodecValid(const CodecInst& codec);
- // Returns the version of ACM. This facilitates distinguishing instances of
- // ACM1 from ACM2 while testing. This API will be removed when ACM1 is
- // completely removed.
- virtual const char* Version() const = 0;
-
///////////////////////////////////////////////////////////////////////////
// Sender
//
@@ -370,12 +373,12 @@ class AudioCodingModule: public Module {
virtual int32_t Add10MsData(const AudioFrame& audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
- // (FEC) Forward Error Correction
+ // (RED) Redundant Coding
//
///////////////////////////////////////////////////////////////////////////
- // int32_t SetFECStatus(const bool enable)
- // configure FEC status i.e. on/off.
+ // int32_t SetREDStatus()
+ // configure RED status i.e. on/off.
//
// RFC 2198 describes a solution which has a single payload type which
// signifies a packet with redundancy. That packet then becomes a container,
@@ -385,27 +388,69 @@ class AudioCodingModule: public Module {
// since each encapsulated payload must be preceded by a header indicating
// the type of data enclosed.
//
- // This means that FEC is actually a RED scheme.
+ // Input:
+ // -enable_red : if true RED is enabled, otherwise RED is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed to set RED status,
+ // 0 if succeeded.
+ //
+ virtual int32_t SetREDStatus(bool enable_red) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // bool REDStatus()
+ // Get RED status
+ //
+ // Return value:
+ // true if RED is enabled,
+ // false if RED is disabled.
+ //
+ virtual bool REDStatus() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SetCodecFEC()
+ // Configures codec internal FEC status i.e. on/off. No effects on codecs that
+ // do not provide internal FEC.
//
// Input:
- // -enable_fec : if true FEC is enabled, otherwise FEC is
+ // -enable_fec : if true FEC will be enabled otherwise the FEC is
// disabled.
//
// Return value:
- // -1 if failed to set FEC status,
+ // -1 if failed, or the codec does not support FEC
// 0 if succeeded.
//
- virtual int32_t SetFECStatus(const bool enable_fec) = 0;
+ virtual int SetCodecFEC(bool enable_codec_fec) = 0;
///////////////////////////////////////////////////////////////////////////
- // bool FECStatus()
- // Get FEC status
+ // bool CodecFEC()
+ // Gets status of codec internal FEC.
//
- // Return value
+ // Return value:
// true if FEC is enabled,
// false if FEC is disabled.
//
- virtual bool FECStatus() const = 0;
+ virtual bool CodecFEC() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetPacketLossRate()
+ // Sets expected packet loss rate for encoding. Some encoders provide packet
+ // loss gnostic encoding to make stream less sensitive to packet losses,
+ // through e.g., FEC. No effects on codecs that do not provide such encoding.
+ //
+ // Input:
+ // -packet_loss_rate : expected packet loss rate (0 -- 100 inclusive).
+ //
+ // Return value
+ // -1 if failed to set packet loss rate,
+ // 0 if succeeded.
+ //
+ virtual int SetPacketLossRate(int packet_loss_rate) = 0;
///////////////////////////////////////////////////////////////////////////
// (VAD) Voice Activity Detection
@@ -936,20 +981,6 @@ class AudioCodingModule: public Module {
AudioDecodingCallStats* call_stats) const = 0;
};
-struct AudioCodingModuleFactory {
- AudioCodingModuleFactory() {}
- virtual ~AudioCodingModuleFactory() {}
-
- virtual AudioCodingModule* Create(int id) const;
-};
-
-struct NewAudioCodingModuleFactory : AudioCodingModuleFactory {
- NewAudioCodingModuleFactory() {}
- virtual ~NewAudioCodingModuleFactory() {}
-
- virtual AudioCodingModule* Create(int id) const;
-};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk b/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk
deleted file mode 100644
index 90214a9c408..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-include $(LOCAL_PATH)/../../../../../android-webrtc.mk
-
-LOCAL_ARM_MODE := arm
-LOCAL_MODULE_CLASS := STATIC_LIBRARIES
-LOCAL_MODULE := libwebrtc_audio_coding
-LOCAL_MODULE_TAGS := optional
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := \
- acm_cng.cc \
- acm_codec_database.cc \
- acm_dtmf_detection.cc \
- acm_dtmf_playout.cc \
- acm_g722.cc \
- acm_generic_codec.cc \
- acm_ilbc.cc \
- acm_isac.cc \
- acm_neteq.cc \
- acm_pcm16b.cc \
- acm_pcma.cc \
- acm_pcmu.cc \
- acm_red.cc \
- acm_resampler.cc \
- audio_coding_module.cc \
- audio_coding_module_impl.cc
-
-# Flags passed to both C and C++ files.
-LOCAL_CFLAGS := \
- $(MY_WEBRTC_COMMON_DEFS)
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/../interface \
- $(LOCAL_PATH)/../../codecs/cng/include \
- $(LOCAL_PATH)/../../codecs/g711/include \
- $(LOCAL_PATH)/../../codecs/g722/include \
- $(LOCAL_PATH)/../../codecs/ilbc/interface \
- $(LOCAL_PATH)/../../codecs/iSAC/main/interface \
- $(LOCAL_PATH)/../../codecs/iSAC/fix/interface \
- $(LOCAL_PATH)/../../codecs/pcm16b/include \
- $(LOCAL_PATH)/../../neteq/interface \
- $(LOCAL_PATH)/../../../.. \
- $(LOCAL_PATH)/../../../interface \
- $(LOCAL_PATH)/../../../../common_audio/resampler/include \
- $(LOCAL_PATH)/../../../../common_audio/signal_processing/include \
- $(LOCAL_PATH)/../../../../common_audio/vad/include \
- $(LOCAL_PATH)/../../../../system_wrappers/interface
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libdl \
- libstlport
-
-ifndef NDK_ROOT
-include external/stlport/libstlport.mk
-endif
-include $(BUILD_STATIC_LIBRARY)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc
deleted file mode 100644
index d398607789b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_amr.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_AMR
-// NOTE! GSM AMR is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/amr/main/interface/amr_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcAmr_CreateEnc(AMR_encinst_t_** enc_inst);
-// int16_t WebRtcAmr_CreateDec(AMR_decinst_t_** dec_inst);
-// int16_t WebRtcAmr_FreeEnc(AMR_encinst_t_* enc_inst);
-// int16_t WebRtcAmr_FreeDec(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_Encode(AMR_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t*output,
-// int16_t mode);
-// int16_t WebRtcAmr_EncoderInit(AMR_encinst_t_* enc_inst,
-// int16_t dtx_mode);
-// int16_t WebRtcAmr_EncodeBitmode(AMR_encinst_t_* enc_inst,
-// int format);
-// int16_t WebRtcAmr_Decode(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecodePlc(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecoderInit(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecodeBitmode(AMR_decinst_t_* dec_inst,
-// int format);
-#include "amr_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AMR
-ACMAMR::ACMAMR(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // Invalid value.
- encoding_rate_(0), // Invalid value.
- encoder_packing_format_(AMRBandwidthEfficient),
- decoder_packing_format_(AMRBandwidthEfficient) {
- return;
-}
-
-ACMAMR::~ACMAMR() {
- return;
-}
-
-int16_t ACMAMR::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMAMR::EnableDTX() {
- return -1;
-}
-
-int16_t ACMAMR::DisableDTX() {
- return -1;
-}
-
-int16_t ACMAMR::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMAMR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMAMR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMR::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMAMR::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMAMR::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMAMR::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMAMR::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMAMR::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMAMR::SetAMREncoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
- return AMRUndefined;
-}
-
-int16_t ACMAMR::SetAMRDecoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
- return AMRUndefined;
-}
-
-#else //===================== Actual Implementation =======================
-
-#define WEBRTC_AMR_MR475 0
-#define WEBRTC_AMR_MR515 1
-#define WEBRTC_AMR_MR59 2
-#define WEBRTC_AMR_MR67 3
-#define WEBRTC_AMR_MR74 4
-#define WEBRTC_AMR_MR795 5
-#define WEBRTC_AMR_MR102 6
-#define WEBRTC_AMR_MR122 7
-
-ACMAMR::ACMAMR(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0) { // invalid value
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- encoder_packing_format_ = AMRBandwidthEfficient;
- decoder_packing_format_ = AMRBandwidthEfficient;
- return;
-}
-
-ACMAMR::~ACMAMR() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMAMR::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t vad_decision = 1;
- // sanity check, if the rate is set correctly. we might skip this
- // sanity check. if rate is not set correctly, initialization flag
- // should be false and should not be here.
- if ((encoding_mode_ < WEBRTC_AMR_MR475) ||
- (encoding_mode_ > WEBRTC_AMR_MR122)) {
- *bitstream_len_byte = 0;
- return -1;
- }
- *bitstream_len_byte = WebRtcAmr_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream,
- encoding_mode_);
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
- vad_decision = 0;
- }
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // increment the read index
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMAMR::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMAMR::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- status += (WebRtcAmr_EncoderInit(
- encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
- status += (WebRtcAmr_EncodeBitmode(
- encoder_inst_ptr_, encoder_packing_format_) < 0) ? -1 : 0;
- return (status < 0) ? -1 : 0;
-}
-
-int16_t ACMAMR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status =
- ((WebRtcAmr_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
- status += WebRtcAmr_DecodeBitmode(decoder_inst_ptr_, decoder_packing_format_);
- return (status < 0) ? -1 : 0;
-}
-
-int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AMR_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAMR, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_AMR_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMAMR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMR::InternalCreateEncoder() {
- return WebRtcAmr_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMAMR::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_mode_ = -1; // invalid value
- encoding_rate_ = 0; // invalid value
-}
-
-int16_t ACMAMR::InternalCreateDecoder() {
- return WebRtcAmr_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMAMR::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMAMR::SetBitRateSafe(const int32_t rate) {
- switch (rate) {
- case 4750: {
- encoding_mode_ = WEBRTC_AMR_MR475;
- encoding_rate_ = 4750;
- break;
- }
- case 5150: {
- encoding_mode_ = WEBRTC_AMR_MR515;
- encoding_rate_ = 5150;
- break;
- }
- case 5900: {
- encoding_mode_ = WEBRTC_AMR_MR59;
- encoding_rate_ = 5900;
- break;
- }
- case 6700: {
- encoding_mode_ = WEBRTC_AMR_MR67;
- encoding_rate_ = 6700;
- break;
- }
- case 7400: {
- encoding_mode_ = WEBRTC_AMR_MR74;
- encoding_rate_ = 7400;
- break;
- }
- case 7950: {
- encoding_mode_ = WEBRTC_AMR_MR795;
- encoding_rate_ = 7950;
- break;
- }
- case 10200: {
- encoding_mode_ = WEBRTC_AMR_MR102;
- encoding_rate_ = 10200;
- break;
- }
- case 12200: {
- encoding_mode_ = WEBRTC_AMR_MR122;
- encoding_rate_ = 12200;
- break;
- }
- default: {
- return -1;
- }
- }
- return 0;
-}
-
-void ACMAMR::InternalDestructEncoderInst(void* ptr_inst) {
- // Free the memory where ptr_inst is pointing to
- if (ptr_inst != NULL) {
- WebRtcAmr_FreeEnc(reinterpret_cast<AMR_encinst_t_*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMAMR::SetAMREncoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMR Encoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmr_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- encoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
- return encoder_packing_format_;
-}
-
-int16_t ACMAMR::SetAMRDecoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMR decoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmr_DecodeBitmode(decoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- decoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
- return decoder_packing_format_;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h
deleted file mode 100644
index 19c657246a2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct AMR_encinst_t_;
-struct AMR_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMAMR : public ACMGenericCodec {
- public:
- explicit ACMAMR(int16_t codec_id);
- virtual ~ACMAMR();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- int16_t SetAMREncoderPackingFormat(const ACMAMRPackingFormat packing_format);
-
- ACMAMRPackingFormat AMREncoderPackingFormat() const;
-
- int16_t SetAMRDecoderPackingFormat(const ACMAMRPackingFormat packing_format);
-
- ACMAMRPackingFormat AMRDecoderPackingFormat() const;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
-
- virtual int16_t DisableDTX() OVERRIDE;
-
- AMR_encinst_t_* encoder_inst_ptr_;
- AMR_decinst_t_* decoder_inst_ptr_;
- int16_t encoding_mode_;
- int16_t encoding_rate_;
- ACMAMRPackingFormat encoder_packing_format_;
- ACMAMRPackingFormat decoder_packing_format_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
deleted file mode 100644
index 8b1b58d03cf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_amrwb.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_AMRWB
-// NOTE! GSM AMR-wb is not included in the open-source package. The
-// following interface file is needed:
-//
-// /modules/audio_coding/codecs/amrwb/main/interface/amrwb_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcAmrWb_CreateEnc(AMRWB_encinst_t_** enc_inst);
-// int16_t WebRtcAmrWb_CreateDec(AMRWB_decinst_t_** dec_inst);
-// int16_t WebRtcAmrWb_FreeEnc(AMRWB_encinst_t_* enc_inst);
-// int16_t WebRtcAmrWb_FreeDec(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_Encode(AMRWB_encinst_t_* enc_inst, int16_t* input,
-// int16_t len, int16_t* output, int16_t mode);
-// int16_t WebRtcAmrWb_EncoderInit(AMRWB_encinst_t_* enc_inst,
-// int16_t dtx_mode);
-// int16_t WebRtcAmrWb_EncodeBitmode(AMRWB_encinst_t_* enc_inst,
-// int format);
-// int16_t WebRtcAmrWb_Decode(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecodePlc(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecoderInit(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecodeBitmode(AMRWB_decinst_t_* dec_inst,
-// int format);
-#include "amrwb_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AMRWB
-ACMAMRwb::ACMAMRwb(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0), // invalid value
- encoder_packing_format_(AMRBandwidthEfficient),
- decoder_packing_format_(AMRBandwidthEfficient) {
-}
-
-ACMAMRwb::~ACMAMRwb() {
-}
-
-int16_t ACMAMRwb::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMAMRwb::EnableDTX() {
- return -1;
-}
-
-int16_t ACMAMRwb::DisableDTX() {
- return -1;
-}
-
-int16_t ACMAMRwb::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMAMRwb::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec*
-ACMAMRwb::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMRwb::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMAMRwb::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMAMRwb::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMAMRwb::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMAMRwb::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMAMRwb::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
- return AMRUndefined;
-}
-
-int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
- return AMRUndefined;
-}
-
-#else //===================== Actual Implementation =======================
-
-#define AMRWB_MODE_7k 0
-#define AMRWB_MODE_9k 1
-#define AMRWB_MODE_12k 2
-#define AMRWB_MODE_14k 3
-#define AMRWB_MODE_16k 4
-#define AMRWB_MODE_18k 5
-#define AMRWB_MODE_20k 6
-#define AMRWB_MODE_23k 7
-#define AMRWB_MODE_24k 8
-
-ACMAMRwb::ACMAMRwb(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0) { // invalid value
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- encoder_packing_format_ = AMRBandwidthEfficient;
- decoder_packing_format_ = AMRBandwidthEfficient;
- return;
-}
-
-ACMAMRwb::~ACMAMRwb() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMAMRwb::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t vad_decision = 1;
- // sanity check, if the rate is set correctly. we might skip this
- // sanity check. if rate is not set correctly, initialization flag
- // should be false and should not be here.
- if ((encoding_mode_ < AMRWB_MODE_7k) || (encoding_mode_ > AMRWB_MODE_24k)) {
- *bitstream_len_byte = 0;
- return -1;
- }
- *bitstream_len_byte = WebRtcAmrWb_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream,
- encoding_mode_);
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
- vad_decision = 0;
- }
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMAMRwb::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMAMRwb::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMAMRwb::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- // sanity check
- if (encoder_inst_ptr_ == NULL) {
- return -1;
- }
-
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- status += (WebRtcAmrWb_EncoderInit(
- encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
- status += (WebRtcAmrWb_EncodeBitmode(
- encoder_inst_ptr_, encoder_packing_format_) < 0) ? -1 : 0;
- return (status < 0) ? -1 : 0;
-}
-
-int16_t ACMAMRwb::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status = WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_,
- decoder_packing_format_);
- status += ((WebRtcAmrWb_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
- return (status < 0) ? -1 : 0;
-}
-
-int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AMRWB_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAMRWB, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- SET_AMRWB_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMAMRwb::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMRwb::InternalCreateEncoder() {
- return WebRtcAmrWb_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMAMRwb::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_mode_ = -1; // invalid value
- encoding_rate_ = 0;
-}
-
-int16_t ACMAMRwb::InternalCreateDecoder() {
- return WebRtcAmrWb_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMAMRwb::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMAMRwb::SetBitRateSafe(const int32_t rate) {
- switch (rate) {
- case 7000: {
- encoding_mode_ = AMRWB_MODE_7k;
- encoding_rate_ = 7000;
- break;
- }
- case 9000: {
- encoding_mode_ = AMRWB_MODE_9k;
- encoding_rate_ = 9000;
- break;
- }
- case 12000: {
- encoding_mode_ = AMRWB_MODE_12k;
- encoding_rate_ = 12000;
- break;
- }
- case 14000: {
- encoding_mode_ = AMRWB_MODE_14k;
- encoding_rate_ = 14000;
- break;
- }
- case 16000: {
- encoding_mode_ = AMRWB_MODE_16k;
- encoding_rate_ = 16000;
- break;
- }
- case 18000: {
- encoding_mode_ = AMRWB_MODE_18k;
- encoding_rate_ = 18000;
- break;
- }
- case 20000: {
- encoding_mode_ = AMRWB_MODE_20k;
- encoding_rate_ = 20000;
- break;
- }
- case 23000: {
- encoding_mode_ = AMRWB_MODE_23k;
- encoding_rate_ = 23000;
- break;
- }
- case 24000: {
- encoding_mode_ = AMRWB_MODE_24k;
- encoding_rate_ = 24000;
- break;
- }
- default: {
- return -1;
- }
- }
- return 0;
-}
-
-void ACMAMRwb::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcAmrWb_FreeEnc(static_cast<AMRWB_encinst_t_*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMRwb encoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmrWb_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- encoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
- return encoder_packing_format_;
-}
-
-int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMRwb decoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- decoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
- return decoder_packing_format_;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h
deleted file mode 100644
index 25934187e55..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct AMRWB_encinst_t_;
-struct AMRWB_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMAMRwb : public ACMGenericCodec {
- public:
- explicit ACMAMRwb(int16_t codec_id);
- virtual ~ACMAMRwb();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t SetAMRwbEncoderPackingFormat(
- const ACMAMRPackingFormat packing_format);
-
- virtual ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
-
- virtual int16_t SetAMRwbDecoderPackingFormat(
- const ACMAMRPackingFormat packing_format);
-
- virtual ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
-
- virtual int16_t DisableDTX() OVERRIDE;
-
- AMRWB_encinst_t_* encoder_inst_ptr_;
- AMRWB_decinst_t_* decoder_inst_ptr_;
-
- int16_t encoding_mode_;
- int16_t encoding_rate_;
- ACMAMRPackingFormat encoder_packing_format_;
- ACMAMRPackingFormat decoder_packing_format_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc
deleted file mode 100644
index 3b838143d4a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_celt.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_CELT
-// NOTE! Celt is not included in the open-source package. Modify this file or
-// your codec API to match the function call and name of used Celt API file.
-#include "celt_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_CELT
-
-ACMCELT::ACMCELT(int16_t /* codec_id */)
- : enc_inst_ptr_(NULL),
- dec_inst_ptr_(NULL),
- sampling_freq_(0),
- bitrate_(0),
- channels_(1),
- dec_channels_(1) {
- return;
-}
-
-ACMCELT::~ACMCELT() {
- return;
-}
-
-int16_t ACMCELT::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMCELT::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMCELT::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCELT::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMCELT::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMCELT::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMCELT::DestructDecoderSafe() {
- return;
-}
-
-void ACMCELT::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-bool ACMCELT::IsTrueStereoCodec() {
- return true;
-}
-
-int16_t ACMCELT::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-void ACMCELT::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-ACMCELT::ACMCELT(int16_t codec_id)
- : enc_inst_ptr_(NULL),
- dec_inst_ptr_(NULL),
- sampling_freq_(32000), // Default sampling frequency.
- bitrate_(64000), // Default rate.
- channels_(1), // Default send mono.
- dec_channels_(1) { // Default receive mono.
- // TODO(tlegrand): remove later when ACMGenericCodec has a new constructor.
- codec_id_ = codec_id;
-
- return;
-}
-
-ACMCELT::~ACMCELT() {
- if (enc_inst_ptr_ != NULL) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- }
- if (dec_inst_ptr_ != NULL) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMCELT::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = 0;
-
- // Call Encoder.
- *bitstream_len_byte = WebRtcCelt_Encode(enc_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- bitstream);
-
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * channels_;
-
- if (*bitstream_len_byte < 0) {
- // Error reported from the encoder.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for Celt");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMCELT::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // Set bitrate and check that it is within the valid range.
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- if (status < 0) {
- return -1;
- }
-
- // If number of channels changed we need to re-create memory.
- if (codec_params->codec_inst.channels != channels_) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- // Store new number of channels.
- channels_ = codec_params->codec_inst.channels;
- if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, channels_) < 0) {
- return -1;
- }
- }
-
- // Initiate encoder.
- if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- // If number of channels changed we need to re-create memory.
- if (codec_params->codec_inst.channels != dec_channels_) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- // Store new number of channels.
- dec_channels_ = codec_params->codec_inst.channels;
- if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
- return -1;
- }
- }
-
- // Initiate decoder, both master and slave parts.
- if (WebRtcCelt_DecoderInit(dec_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for Celt.");
- return -1;
- }
- if (WebRtcCelt_DecoderInitSlave(dec_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for Celt.");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: Decoder uninitialized for Celt");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" and "SET_CELT_FUNCTIONS" or "SET_CELTSLAVE_FUNCTIONS".
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.channels == 1) {
- SET_CODEC_PAR(codec_def, kDecoderCELT_32, codec_inst.pltype, dec_inst_ptr_,
- 32000);
- } else {
- SET_CODEC_PAR(codec_def, kDecoderCELT_32_2ch, codec_inst.pltype,
- dec_inst_ptr_, 32000);
- }
-
- // If this is the master of NetEQ, regular decoder will be added, otherwise
- // the slave decoder will be used.
- if (is_master_) {
- SET_CELT_FUNCTIONS(codec_def);
- } else {
- SET_CELTSLAVE_FUNCTIONS(codec_def);
- }
- return 0;
-}
-
-ACMGenericCodec* ACMCELT::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCELT::InternalCreateEncoder() {
- if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, num_channels_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: create encoder failed for Celt");
- return -1;
- }
- channels_ = num_channels_;
- return 0;
-}
-
-void ACMCELT::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (enc_inst_ptr_ != NULL) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMCELT::InternalCreateDecoder() {
- if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: create decoder failed for Celt");
- return -1;
- }
-
- return 0;
-}
-
-void ACMCELT::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (dec_inst_ptr_ != NULL) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- }
-}
-
-void ACMCELT::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcCelt_FreeEnc(static_cast<CELT_encinst_t*>(ptr_inst));
- }
- return;
-}
-
-bool ACMCELT::IsTrueStereoCodec() {
- return true;
-}
-
-int16_t ACMCELT::SetBitRateSafe(const int32_t rate) {
- // Check that rate is in the valid range.
- if ((rate >= 48000) && (rate <= 128000)) {
- // Store new rate.
- bitrate_ = rate;
-
- // Initiate encoder with new rate.
- if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Failed to initiate Celt with rate %d",
- rate);
- return -1;
- }
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate Celt, %d", rate);
- return -1;
- }
-}
-
-// Copy the stereo packet so that NetEq will insert into both master and slave.
-void ACMCELT::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Duplicate the payload.
- memcpy(&payload[*payload_length], &payload[0],
- sizeof(uint8_t) * (*payload_length));
- // Double the size of the packet.
- *payload_length *= 2;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h
deleted file mode 100644
index 4a4610e0d47..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct CELT_encinst_t_;
-struct CELT_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMCELT : public ACMGenericCodec {
- public:
- explicit ACMCELT(int16_t codec_id);
- virtual ~ACMCELT();
-
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual bool IsTrueStereoCodec() OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- CELT_encinst_t_* enc_inst_ptr_;
- CELT_decinst_t_* dec_inst_ptr_;
- uint16_t sampling_freq_;
- int32_t bitrate_;
- uint16_t channels_;
- uint16_t dec_channels_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc
deleted file mode 100644
index 6f3a5057e0b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_cng.h"
-
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMCNG::ACMCNG(int16_t codec_id) {
- encoder_inst_ptr_ = NULL;
- decoder_inst_ptr_ = NULL;
- codec_id_ = codec_id;
- samp_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
- return;
-}
-
-ACMCNG::~ACMCNG() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-// CNG is not like a regular encoder, this function
-// should not be called normally
-// instead the following function is called from inside
-// ACMGenericCodec::ProcessFrameVADDTX
-int16_t ACMCNG::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMCNG::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-// CNG is not like a regular encoder,
-// this function should not be called normally
-// instead the following function is called from inside
-// ACMGenericCodec::ProcessFrameVADDTX
-int16_t ACMCNG::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMCNG::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return WebRtcCng_InitDec(decoder_inst_ptr_);
-}
-
-int32_t ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // TODO(tlegrand): log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_CNG_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
-
- if (samp_freq_hz_ == 8000 || samp_freq_hz_ == 16000 ||
- samp_freq_hz_ == 32000 || samp_freq_hz_ == 48000) {
- SET_CODEC_PAR((codec_def), kDecoderCNG, codec_inst.pltype,
- decoder_inst_ptr_, samp_freq_hz_);
- SET_CNG_FUNCTIONS((codec_def));
- return 0;
- } else {
- return -1;
- }
-}
-
-ACMGenericCodec* ACMCNG::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCNG::InternalCreateEncoder() {
- if (WebRtcCng_CreateEnc(&encoder_inst_ptr_) < 0) {
- encoder_inst_ptr_ = NULL;
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMCNG::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMCNG::InternalCreateDecoder() {
- if (WebRtcCng_CreateDec(&decoder_inst_ptr_) < 0) {
- decoder_inst_ptr_ = NULL;
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMCNG::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-void ACMCNG::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcCng_FreeEnc(static_cast<CNG_enc_inst*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMCNG::EnableDTX() { return -1; }
-int16_t ACMCNG::DisableDTX() { return -1; }
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h
deleted file mode 100644
index 728312d55fb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct WebRtcCngEncInst;
-struct WebRtcCngDecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMCNG: public ACMGenericCodec {
- public:
- explicit ACMCNG(int16_t codec_id);
- virtual ~ACMCNG();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
- virtual int16_t DisableDTX() OVERRIDE;
-
- WebRtcCngEncInst* encoder_inst_ptr_;
- WebRtcCngDecInst* decoder_inst_ptr_;
- uint16_t samp_freq_hz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc
deleted file mode 100644
index 138effd6a9f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc
+++ /dev/null
@@ -1,956 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-// TODO(tlegrand): Change constant input pointers in all functions to constant
-// references, where appropriate.
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Includes needed to create the codecs.
-// G.711, PCM mu-law and A-law.
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcma.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcmu.h"
-// CNG.
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_cng.h"
-// NetEQ.
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#endif
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#endif
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
-#include "webrtc/modules/audio_coding/main/source/acm_isac.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-#ifdef WEBRTC_CODEC_PCM16
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcm16b.h"
-#endif
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#include "webrtc/modules/audio_coding/main/source/acm_ilbc.h"
-#endif
-#ifdef WEBRTC_CODEC_AMR
-#include "webrtc/modules/audio_coding/codecs/amr/include/amr_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_amr.h"
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
-#include "webrtc/modules/audio_coding/codecs/amrwb/include/amrwb_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_amrwb.h"
-#endif
-#ifdef WEBRTC_CODEC_CELT
-#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_celt.h"
-#endif
-#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g722.h"
-#endif
-#ifdef WEBRTC_CODEC_G722_1
-#include "webrtc/modules/audio_coding/codecs/g7221/include/g7221_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7221.h"
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
-#include "webrtc/modules/audio_coding/codecs/g7221c/include/g7221c_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7221c.h"
-#endif
-#ifdef WEBRTC_CODEC_G729
-#include "webrtc/modules/audio_coding/codecs/g729/include/g729_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g729.h"
-#endif
-#ifdef WEBRTC_CODEC_G729_1
-#include "webrtc/modules/audio_coding/codecs/g7291/include/g7291_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7291.h"
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
-#include "webrtc/modules/audio_coding/codecs/gsmfr/include/gsmfr_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_gsmfr.h"
-#endif
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_opus.h"
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
-#include "webrtc/modules/audio_coding/codecs/speex/include/speex_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_speex.h"
-#endif
-#ifdef WEBRTC_CODEC_AVT
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h"
-#endif
-#ifdef WEBRTC_CODEC_RED
-#include "webrtc/modules/audio_coding/main/source/acm_red.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-// Not yet used payload-types.
-// 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68,
-// 67, 66, 65
-
-const CodecInst ACMCodecDB::database_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
-# if (defined(WEBRTC_CODEC_ISAC))
- {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
- {105, "ISAC", 48000, kIsacPacSize1440, 1, kIsacSwbDefaultRate},
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- {107, "L16", 8000, 80, 1, 128000},
- {108, "L16", 16000, 160, 1, 256000},
- {109, "L16", 32000, 320, 1, 512000},
- // Stereo
- {111, "L16", 8000, 80, 2, 128000},
- {112, "L16", 16000, 160, 2, 256000},
- {113, "L16", 32000, 320, 2, 512000},
-#endif
- // G.711, PCM mu-law and A-law.
- // Mono
- {0, "PCMU", 8000, 160, 1, 64000},
- {8, "PCMA", 8000, 160, 1, 64000},
- // Stereo
- {110, "PCMU", 8000, 160, 2, 64000},
- {118, "PCMA", 8000, 160, 2, 64000},
-#ifdef WEBRTC_CODEC_ILBC
- {102, "ILBC", 8000, 240, 1, 13300},
-#endif
-#ifdef WEBRTC_CODEC_AMR
- {114, "AMR", 8000, 160, 1, 12200},
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- {115, "AMR-WB", 16000, 320, 1, 20000},
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- {116, "CELT", 32000, 640, 1, 64000},
- // Stereo
- {117, "CELT", 32000, 640, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {9, "G722", 16000, 320, 1, 64000},
- // Stereo
- {119, "G722", 16000, 320, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- {92, "G7221", 16000, 320, 1, 32000},
- {91, "G7221", 16000, 320, 1, 24000},
- {90, "G7221", 16000, 320, 1, 16000},
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- {89, "G7221", 32000, 640, 1, 48000},
- {88, "G7221", 32000, 640, 1, 32000},
- {87, "G7221", 32000, 640, 1, 24000},
-#endif
-#ifdef WEBRTC_CODEC_G729
- {18, "G729", 8000, 240, 1, 8000},
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- {86, "G7291", 16000, 320, 1, 32000},
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- {3, "GSM", 8000, 160, 1, 13200},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus internally supports 48, 24, 16, 12, 8 kHz.
- // Mono and stereo.
- {120, "opus", 48000, 960, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- {85, "speex", 8000, 160, 1, 11000},
- {84, "speex", 16000, 320, 1, 22000},
-#endif
- // Comfort noise for four different sampling frequencies.
- {13, "CN", 8000, 240, 1, 0},
- {98, "CN", 16000, 480, 1, 0},
- {99, "CN", 32000, 960, 1, 0},
-#ifdef ENABLE_48000_HZ
- {100, "CN", 48000, 1440, 1, 0},
-#endif
-#ifdef WEBRTC_CODEC_AVT
- {106, "telephone-event", 8000, 240, 1, 0},
-#endif
-#ifdef WEBRTC_CODEC_RED
- {127, "red", 8000, 0, 1, 0},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, "Null", -1, -1, -1, -1}
-};
-
-// Create database with all codec settings at compile time.
-// Each entry needs the following parameters in the given order:
-// Number of allowed packet sizes, a vector with the allowed packet sizes,
-// Basic block samples, max number of channels that are supported.
-const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
-# if (defined(WEBRTC_CODEC_ISAC))
- {1, {kIsacPacSize960}, 0, 1},
- {1, {kIsacPacSize1440}, 0, 1},
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
- // Stereo
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
-#endif
- // G.711, PCM mu-law and A-law.
- // Mono
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- // Stereo
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
-#ifdef WEBRTC_CODEC_ILBC
- {4, {160, 240, 320, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_AMR
- {3, {160, 320, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- {3, {320, 640, 960}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- {1, {640}, 0, 2},
- // Stereo
- {1, {640}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
- // Stereo
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- {1, {320}, 320, 1},
- {1, {320}, 320, 1},
- {1, {320}, 320, 1},
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- {1, {640}, 640, 1},
- {1, {640}, 640, 1},
- {1, {640}, 640, 1},
-#endif
-#ifdef WEBRTC_CODEC_G729
- {6, {80, 160, 240, 320, 400, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- {3, {320, 640, 960}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- {3, {160, 320, 480}, 160, 1},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus supports frames shorter than 10ms,
- // but it doesn't help us to use them.
- // Mono and stereo.
- {4, {480, 960, 1920, 2880}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- {3, {160, 320, 480}, 0, 1},
- {3, {320, 640, 960}, 0, 1},
-#endif
- // Comfort noise for three different sampling frequencies.
- {1, {240}, 240, 1},
- {1, {480}, 480, 1},
- {1, {960}, 960, 1},
-#ifdef ENABLE_48000_HZ
- {1, {1440}, 1440, 1},
-#endif
-#ifdef WEBRTC_CODEC_AVT
- {1, {240}, 240, 1},
-#endif
-#ifdef WEBRTC_CODEC_RED
- {1, {0}, 0, 1},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, {-1}, -1, -1}
-};
-
-// Create a database of all NetEQ decoders at compile time.
-const WebRtcNetEQDecoder ACMCodecDB::neteq_decoders_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- kDecoderISAC,
-# if (defined(WEBRTC_CODEC_ISAC))
- kDecoderISACswb,
- kDecoderISACfb,
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- // Stereo
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
-#endif
- // G.711, PCM mu-las and A-law.
- // Mono
- kDecoderPCMu,
- kDecoderPCMa,
- // Stereo
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
-#ifdef WEBRTC_CODEC_ILBC
- kDecoderILBC,
-#endif
-#ifdef WEBRTC_CODEC_AMR
- kDecoderAMR,
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- kDecoderAMRWB,
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- kDecoderCELT_32,
- // Stereo
- kDecoderCELT_32_2ch,
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- kDecoderG722,
- // Stereo
- kDecoderG722_2ch,
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- kDecoderG722_1_32,
- kDecoderG722_1_24,
- kDecoderG722_1_16,
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- kDecoderG722_1C_48,
- kDecoderG722_1C_32,
- kDecoderG722_1C_24,
-#endif
-#ifdef WEBRTC_CODEC_G729
- kDecoderG729,
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- kDecoderG729_1,
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- kDecoderGSMFR,
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Mono and stereo.
- kDecoderOpus,
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- kDecoderSPEEX_8,
- kDecoderSPEEX_16,
-#endif
- // Comfort noise for three different sampling frequencies.
- kDecoderCNG,
- kDecoderCNG,
- kDecoderCNG,
-#ifdef ENABLE_48000_HZ
- kDecoderCNG,
-#endif
-#ifdef WEBRTC_CODEC_AVT
- kDecoderAVT,
-#endif
-#ifdef WEBRTC_CODEC_RED
- kDecoderRED,
-#endif
- kDecoderReservedEnd
-};
-
-// Get codec information from database.
-// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
-int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
- // Error check to see that codec_id is not out of bounds.
- if ((codec_id < 0) || (codec_id >= kNumCodecs)) {
- return -1;
- }
-
- // Copy database information for the codec to the output.
- memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
-
- return 0;
-}
-
-// Enumerator for error codes when asking for codec database id.
-enum {
- kInvalidCodec = -10,
- kInvalidPayloadtype = -30,
- kInvalidPacketSize = -40,
- kInvalidRate = -50
-};
-
-// Gets the codec id number from the database. If there is some mismatch in
-// the codec settings, the function will return an error code.
-// NOTE! The first mismatch found will generate the return value.
-int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id) {
- // Look for a matching codec in the database.
- int codec_id = CodecId(codec_inst);
-
- // Checks if we found a matching codec.
- if (codec_id == -1) {
- return kInvalidCodec;
- }
-
- // Checks the validity of payload type
- if (!ValidPayloadType(codec_inst->pltype)) {
- return kInvalidPayloadtype;
- }
-
- // Comfort Noise is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
- *mirror_id = codec_id;
- return codec_id;
- }
-
- // RED is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
- *mirror_id = codec_id;
- return codec_id;
- }
-
- // Checks the validity of packet size.
- if (codec_settings_[codec_id].num_packet_sizes > 0) {
- bool packet_size_ok = false;
- int i;
- int packet_size_samples;
- for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
- packet_size_samples =
- codec_settings_[codec_id].packet_sizes_samples[i];
- if (codec_inst->pacsize == packet_size_samples) {
- packet_size_ok = true;
- break;
- }
- }
-
- if (!packet_size_ok) {
- return kInvalidPacketSize;
- }
- }
-
- if (codec_inst->pacsize < 1) {
- return kInvalidPacketSize;
- }
-
- // Check the validity of rate. Codecs with multiple rates have their own
- // function for this.
- *mirror_id = codec_id;
- if (STR_CASE_CMP("isac", codec_inst->plname) == 0) {
- if (IsISACRateValid(codec_inst->rate)) {
- // Set mirrorID to iSAC WB which is only created once to be used both for
- // iSAC WB and SWB, because they need to share struct.
- *mirror_id = kISAC;
- return codec_id;
- } else {
- return kInvalidRate;
- }
- } else if (STR_CASE_CMP("ilbc", codec_inst->plname) == 0) {
- return IsILBCRateValid(codec_inst->rate, codec_inst->pacsize)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("amr", codec_inst->plname) == 0) {
- return IsAMRRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("amr-wb", codec_inst->plname) == 0) {
- return IsAMRwbRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("g7291", codec_inst->plname) == 0) {
- return IsG7291RateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("opus", codec_inst->plname) == 0) {
- return IsOpusRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("speex", codec_inst->plname) == 0) {
- return IsSpeexRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("celt", codec_inst->plname) == 0) {
- return IsCeltRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- }
-
- return IsRateValid(codec_id, codec_inst->rate) ?
- codec_id : kInvalidRate;
-}
-
-// Looks for a matching payload name, frequency, and channels in the
-// codec list. Need to check all three since some codecs have several codec
-// entries with different frequencies and/or channels.
-// Does not check other codec settings, such as payload type and packet size.
-// Returns the id of the codec, or -1 if no match is found.
-int ACMCodecDB::CodecId(const CodecInst* codec_inst) {
- return (CodecId(codec_inst->plname, codec_inst->plfreq,
- codec_inst->channels));
-}
-
-int ACMCodecDB::CodecId(const char* payload_name, int frequency, int channels) {
- for (int id = 0; id < kNumCodecs; id++) {
- bool name_match = false;
- bool frequency_match = false;
- bool channels_match = false;
-
- // Payload name, sampling frequency and number of channels need to match.
- // NOTE! If |frequency| is -1, the frequency is not applicable, and is
- // always treated as true, like for RED.
- name_match = (STR_CASE_CMP(database_[id].plname, payload_name) == 0);
- frequency_match = (frequency == database_[id].plfreq) || (frequency == -1);
- // The number of channels must match for all codecs but Opus.
- if (STR_CASE_CMP(payload_name, "opus") != 0) {
- channels_match = (channels == database_[id].channels);
- } else {
- // For opus we just check that number of channels is valid.
- channels_match = (channels == 1 || channels == 2);
- }
-
- if (name_match && frequency_match && channels_match) {
- // We have found a matching codec in the list.
- return id;
- }
- }
-
- // We didn't find a matching codec.
- return -1;
-}
-// Gets codec id number, and mirror id, from database for the receiver.
-int ACMCodecDB::ReceiverCodecNumber(const CodecInst* codec_inst,
- int* mirror_id) {
- // Look for a matching codec in the database.
- int codec_id = CodecId(codec_inst);
-
- // Set |mirror_id| to |codec_id|, except for iSAC. In case of iSAC we always
- // set |mirror_id| to iSAC WB (kISAC) which is only created once to be used
- // both for iSAC WB and SWB, because they need to share struct.
- if (STR_CASE_CMP(codec_inst->plname, "ISAC") != 0) {
- *mirror_id = codec_id;
- } else {
- *mirror_id = kISAC;
- }
-
- return codec_id;
-}
-
-// Returns the codec sampling frequency for codec with id = "codec_id" in
-// database.
-int ACMCodecDB::CodecFreq(int codec_id) {
- // Error check to see that codec_id is not out of bounds.
- if (codec_id < 0 || codec_id >= kNumCodecs) {
- return -1;
- }
-
- return database_[codec_id].plfreq;
-}
-
-// Returns the codec's basic coding block size in samples.
-int ACMCodecDB::BasicCodingBlock(int codec_id) {
- // Error check to see that codec_id is not out of bounds.
- if (codec_id < 0 || codec_id >= kNumCodecs) {
- return -1;
- }
-
- return codec_settings_[codec_id].basic_block_samples;
-}
-
-// Returns the NetEQ decoder database.
-const WebRtcNetEQDecoder* ACMCodecDB::NetEQDecoders() {
- return neteq_decoders_;
-}
-
-// Gets mirror id. The Id is used for codecs sharing struct for settings that
-// need different payload types.
-int ACMCodecDB::MirrorID(int codec_id) {
- if (STR_CASE_CMP(database_[codec_id].plname, "isac") == 0) {
- return kISAC;
- } else {
- return codec_id;
- }
-}
-
-// Creates memory/instance for storing codec state.
-ACMGenericCodec* ACMCodecDB::CreateCodecInstance(const CodecInst* codec_inst) {
- // All we have support for right now.
- if (!STR_CASE_CMP(codec_inst->plname, "ISAC")) {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- return new ACMISAC(kISAC);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "PCMU")) {
- if (codec_inst->channels == 1) {
- return new ACMPCMU(kPCMU);
- } else {
- return new ACMPCMU(kPCMU_2ch);
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "PCMA")) {
- if (codec_inst->channels == 1) {
- return new ACMPCMA(kPCMA);
- } else {
- return new ACMPCMA(kPCMA_2ch);
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "ILBC")) {
-#ifdef WEBRTC_CODEC_ILBC
- return new ACMILBC(kILBC);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "AMR")) {
-#ifdef WEBRTC_CODEC_AMR
- return new ACMAMR(kGSMAMR);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "AMR-WB")) {
-#ifdef WEBRTC_CODEC_AMRWB
- return new ACMAMRwb(kGSMAMRWB);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "CELT")) {
-#ifdef WEBRTC_CODEC_CELT
- if (codec_inst->channels == 1) {
- return new ACMCELT(kCELT32);
- } else {
- return new ACMCELT(kCELT32_2ch);
- }
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G722")) {
-#ifdef WEBRTC_CODEC_G722
- if (codec_inst->channels == 1) {
- return new ACMG722(kG722);
- } else {
- return new ACMG722(kG722_2ch);
- }
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G7221")) {
- switch (codec_inst->plfreq) {
- case 16000: {
-#ifdef WEBRTC_CODEC_G722_1
- int codec_id;
- switch (codec_inst->rate) {
- case 16000 : {
- codec_id = kG722_1_16;
- break;
- }
- case 24000 : {
- codec_id = kG722_1_24;
- break;
- }
- case 32000 : {
- codec_id = kG722_1_32;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMG722_1(codec_id);
-#endif
- }
- case 32000: {
-#ifdef WEBRTC_CODEC_G722_1C
- int codec_id;
- switch (codec_inst->rate) {
- case 24000 : {
- codec_id = kG722_1C_24;
- break;
- }
- case 32000 : {
- codec_id = kG722_1C_32;
- break;
- }
- case 48000 : {
- codec_id = kG722_1C_48;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMG722_1C(codec_id);
-#endif
- }
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
- // For CN we need to check sampling frequency to know what codec to create.
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kCNNB;
- break;
- }
- case 16000: {
- codec_id = kCNWB;
- break;
- }
- case 32000: {
- codec_id = kCNSWB;
- break;
- }
-#ifdef ENABLE_48000_HZ
- case 48000: {
- codec_id = kCNFB;
- break;
- }
-#endif
- default: {
- return NULL;
- }
- }
- return new ACMCNG(codec_id);
- } else if (!STR_CASE_CMP(codec_inst->plname, "G729")) {
-#ifdef WEBRTC_CODEC_G729
- return new ACMG729(kG729);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G7291")) {
-#ifdef WEBRTC_CODEC_G729_1
- return new ACMG729_1(kG729_1);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "opus")) {
-#ifdef WEBRTC_CODEC_OPUS
- return new ACMOpus(kOpus);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "speex")) {
-#ifdef WEBRTC_CODEC_SPEEX
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kSPEEX8;
- break;
- }
- case 16000: {
- codec_id = kSPEEX16;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMSPEEX(codec_id);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
- // For CN we need to check sampling frequency to know what codec to create.
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kCNNB;
- break;
- }
- case 16000: {
- codec_id = kCNWB;
- break;
- }
- case 32000: {
- codec_id = kCNSWB;
- break;
- }
-#ifdef ENABLE_48000_HZ
- case 48000: {
- codec_id = kCNFB;
- break;
- }
-#endif
- default: {
- return NULL;
- }
- }
- return new ACMCNG(codec_id);
- } else if (!STR_CASE_CMP(codec_inst->plname, "L16")) {
-#ifdef WEBRTC_CODEC_PCM16
- // For L16 we need to check sampling frequency to know what codec to create.
- int codec_id;
- if (codec_inst->channels == 1) {
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kPCM16B;
- break;
- }
- case 16000: {
- codec_id = kPCM16Bwb;
- break;
- }
- case 32000: {
- codec_id = kPCM16Bswb32kHz;
- break;
- }
- default: {
- return NULL;
- }
- }
- } else {
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kPCM16B_2ch;
- break;
- }
- case 16000: {
- codec_id = kPCM16Bwb_2ch;
- break;
- }
- case 32000: {
- codec_id = kPCM16Bswb32kHz_2ch;
- break;
- }
- default: {
- return NULL;
- }
- }
- }
- return new ACMPCM16B(codec_id);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "telephone-event")) {
-#ifdef WEBRTC_CODEC_AVT
- return new ACMDTMFPlayout(kAVT);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "red")) {
-#ifdef WEBRTC_CODEC_RED
- return new ACMRED(kRED);
-#endif
- }
- return NULL;
-}
-
-// Checks if the bitrate is valid for the codec.
-bool ACMCodecDB::IsRateValid(int codec_id, int rate) {
- if (database_[codec_id].rate == rate) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for iSAC.
-bool ACMCodecDB::IsISACRateValid(int rate) {
- if ((rate == -1) || ((rate <= 56000) && (rate >= 10000))) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for iLBC.
-bool ACMCodecDB::IsILBCRateValid(int rate, int frame_size_samples) {
- if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
- (rate == 13300)) {
- return true;
- } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
- (rate == 15200)) {
- return true;
- } else {
- return false;
- }
-}
-
-// Check if the bitrate is valid for the GSM-AMR.
-bool ACMCodecDB::IsAMRRateValid(int rate) {
- switch (rate) {
- case 4750:
- case 5150:
- case 5900:
- case 6700:
- case 7400:
- case 7950:
- case 10200:
- case 12200: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Check if the bitrate is valid for GSM-AMR-WB.
-bool ACMCodecDB::IsAMRwbRateValid(int rate) {
- switch (rate) {
- case 7000:
- case 9000:
- case 12000:
- case 14000:
- case 16000:
- case 18000:
- case 20000:
- case 23000:
- case 24000: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Check if the bitrate is valid for G.729.1.
-bool ACMCodecDB::IsG7291RateValid(int rate) {
- switch (rate) {
- case 8000:
- case 12000:
- case 14000:
- case 16000:
- case 18000:
- case 20000:
- case 22000:
- case 24000:
- case 26000:
- case 28000:
- case 30000:
- case 32000: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Checks if the bitrate is valid for Speex.
-bool ACMCodecDB::IsSpeexRateValid(int rate) {
- if (rate > 2000) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for Opus.
-bool ACMCodecDB::IsOpusRateValid(int rate) {
- if ((rate < 6000) || (rate > 510000)) {
- return false;
- }
- return true;
-}
-
-// Checks if the bitrate is valid for Celt.
-bool ACMCodecDB::IsCeltRateValid(int rate) {
- if ((rate >= 48000) && (rate <= 128000)) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the payload type is in the valid range.
-bool ACMCodecDB::ValidPayloadType(int payload_type) {
- if ((payload_type < 0) || (payload_type > 127)) {
- return false;
- }
- return true;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h
deleted file mode 100644
index 7a7054dd1aa..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-// TODO(tlegrand): replace class ACMCodecDB with a namespace.
-class ACMCodecDB {
- public:
- // Enum with array indexes for the supported codecs. NOTE! The order MUST
- // be the same as when creating the database in acm_codec_database.cc.
- enum {
- kNone = -1
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- , kISAC
-# if (defined(WEBRTC_CODEC_ISAC))
- , kISACSWB
- , kISACFB
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- , kPCM16B
- , kPCM16Bwb
- , kPCM16Bswb32kHz
- // Stereo
- , kPCM16B_2ch
- , kPCM16Bwb_2ch
- , kPCM16Bswb32kHz_2ch
-#endif
- // Mono
- , kPCMU
- , kPCMA
- // Stereo
- , kPCMU_2ch
- , kPCMA_2ch
-#ifdef WEBRTC_CODEC_ILBC
- , kILBC
-#endif
-#ifdef WEBRTC_CODEC_AMR
- , kGSMAMR
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- , kGSMAMRWB
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- , kCELT32
- // Stereo
- , kCELT32_2ch
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- , kG722
- // Stereo
- , kG722_2ch
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- , kG722_1_32
- , kG722_1_24
- , kG722_1_16
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- , kG722_1C_48
- , kG722_1C_32
- , kG722_1C_24
-#endif
-#ifdef WEBRTC_CODEC_G729
- , kG729
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- , kG729_1
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- , kGSMFR
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Mono and stereo
- , kOpus
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- , kSPEEX8
- , kSPEEX16
-#endif
- , kCNNB
- , kCNWB
- , kCNSWB
-#ifdef ENABLE_48000_HZ
- , kCNFB
-#endif
-#ifdef WEBRTC_CODEC_AVT
- , kAVT
-#endif
-#ifdef WEBRTC_CODEC_RED
- , kRED
-#endif
- , kNumCodecs
- };
-
- // Set unsupported codecs to -1
-#ifndef WEBRTC_CODEC_ISAC
- enum {kISACSWB = -1};
- enum {kISACFB = -1};
-# ifndef WEBRTC_CODEC_ISACFX
- enum {kISAC = -1};
-# endif
-#endif
-#ifndef WEBRTC_CODEC_PCM16
- // Mono
- enum {kPCM16B = -1};
- enum {kPCM16Bwb = -1};
- enum {kPCM16Bswb32kHz = -1};
- // Stereo
- enum {kPCM16B_2ch = -1};
- enum {kPCM16Bwb_2ch = -1};
- enum {kPCM16Bswb32kHz_2ch = -1};
-#endif
- // 48 kHz not supported, always set to -1.
- enum {kPCM16Bswb48kHz = -1};
-#ifndef WEBRTC_CODEC_ILBC
- enum {kILBC = -1};
-#endif
-#ifndef WEBRTC_CODEC_AMR
- enum {kGSMAMR = -1};
-#endif
-#ifndef WEBRTC_CODEC_AMRWB
- enum {kGSMAMRWB = -1};
-#endif
-#ifndef WEBRTC_CODEC_CELT
- // Mono
- enum {kCELT32 = -1};
- // Stereo
- enum {kCELT32_2ch = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722
- // Mono
- enum {kG722 = -1};
- // Stereo
- enum {kG722_2ch = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722_1
- enum {kG722_1_32 = -1};
- enum {kG722_1_24 = -1};
- enum {kG722_1_16 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722_1C
- enum {kG722_1C_48 = -1};
- enum {kG722_1C_32 = -1};
- enum {kG722_1C_24 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G729
- enum {kG729 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G729_1
- enum {kG729_1 = -1};
-#endif
-#ifndef WEBRTC_CODEC_GSMFR
- enum {kGSMFR = -1};
-#endif
-#ifndef WEBRTC_CODEC_SPEEX
- enum {kSPEEX8 = -1};
- enum {kSPEEX16 = -1};
-#endif
-#ifndef WEBRTC_CODEC_OPUS
- // Mono and stereo
- enum {kOpus = -1};
-#endif
-#ifndef WEBRTC_CODEC_AVT
- enum {kAVT = -1};
-#endif
-#ifndef WEBRTC_CODEC_RED
- enum {kRED = -1};
-#endif
-
- // kMaxNumCodecs - Maximum number of codecs that can be activated in one
- // build.
- // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
- // These might need to be increased if adding a new codec to the database
- static const int kMaxNumCodecs = 50;
- static const int kMaxNumPacketSize = 6;
-
- // Codec specific settings
- //
- // num_packet_sizes - number of allowed packet sizes.
- // packet_sizes_samples - list of the allowed packet sizes.
- // basic_block_samples - assigned a value different from 0 if the codec
- // requires to be fed with a specific number of samples
- // that can be different from packet size.
- // channel_support - number of channels supported to encode;
- // 1 = mono, 2 = stereo, etc.
- struct CodecSettings {
- int num_packet_sizes;
- int packet_sizes_samples[kMaxNumPacketSize];
- int basic_block_samples;
- int channel_support;
- };
-
- // Gets codec information from database at the position in database given by
- // [codec_id].
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Output:
- // [codec_inst] - filled with information about the codec.
- // Return:
- // 0 if successful, otherwise -1.
- static int Codec(int codec_id, CodecInst* codec_inst);
-
- // Returns codec id and mirror id from database, given the information
- // received in the input [codec_inst]. Mirror id is a number that tells
- // where to find the codec's memory (instance). The number is either the
- // same as codec id (most common), or a number pointing at a different
- // entry in the database, if the codec has several entries with different
- // payload types. This is used for codecs that must share one struct even if
- // the payload type differs.
- // One example is the codec iSAC which has the same struct for both 16 and
- // 32 khz, but they have different entries in the database. Let's say the
- // function is called with iSAC 32kHz. The function will return 1 as that is
- // the entry in the data base, and [mirror_id] = 0, as that is the entry for
- // iSAC 16 kHz, which holds the shared memory.
- // Input:
- // [codec_inst] - Information about the codec for which we require the
- // database id.
- // Output:
- // [mirror_id] - mirror id, which most often is the same as the return
- // value, see above.
- // Return:
- // codec id if successful, otherwise < 0.
- static int CodecNumber(const CodecInst* codec_inst, int* mirror_id);
- static int CodecId(const CodecInst* codec_inst);
- static int CodecId(const char* payload_name, int frequency, int channels);
- static int ReceiverCodecNumber(const CodecInst* codec_inst, int* mirror_id);
-
- // Returns the codec sampling frequency for codec with id = "codec_id" in
- // database.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Return:
- // codec sampling frequency if successful, otherwise -1.
- static int CodecFreq(int codec_id);
-
- // Return the codec's basic coding block size in samples.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Return:
- // codec basic block size if successful, otherwise -1.
- static int BasicCodingBlock(int codec_id);
-
- // Returns the NetEQ decoder database.
- static const WebRtcNetEQDecoder* NetEQDecoders();
-
- // Returns mirror id, which is a number that tells where to find the codec's
- // memory (instance). It is either the same as codec id (most common), or a
- // number pointing at a different entry in the database, if the codec have
- // several entries with different payload types. This is used for codecs that
- // must share struct even if the payload type differs.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies codec's position in the database.
- // Return:
- // Mirror id on success, otherwise -1.
- static int MirrorID(int codec_id);
-
- // Create memory/instance for storing codec state.
- // Input:
- // [codec_inst] - information about codec. Only name of codec, "plname", is
- // used in this function.
- static ACMGenericCodec* CreateCodecInstance(const CodecInst* codec_inst);
-
- // Checks if the bitrate is valid for the codec.
- // Input:
- // [codec_id] - number that specifies codec's position in the database.
- // [rate] - bitrate to check.
- // [frame_size_samples] - (used for iLBC) specifies which frame size to go
- // with the rate.
- static bool IsRateValid(int codec_id, int rate);
- static bool IsISACRateValid(int rate);
- static bool IsILBCRateValid(int rate, int frame_size_samples);
- static bool IsAMRRateValid(int rate);
- static bool IsAMRwbRateValid(int rate);
- static bool IsG7291RateValid(int rate);
- static bool IsSpeexRateValid(int rate);
- static bool IsOpusRateValid(int rate);
- static bool IsCeltRateValid(int rate);
-
- // Check if the payload type is valid, meaning that it is in the valid range
- // of 0 to 127.
- // Input:
- // [payload_type] - payload type.
- static bool ValidPayloadType(int payload_type);
-
- // Databases with information about the supported codecs
- // database_ - stored information about all codecs: payload type, name,
- // sampling frequency, packet size in samples, default channel
- // support, and default rate.
- // codec_settings_ - stored codec settings: number of allowed packet sizes,
- // a vector with the allowed packet sizes, basic block
- // samples, and max number of channels that are supported.
- // neteq_decoders_ - list of supported decoders in NetEQ.
- static const CodecInst database_[kMaxNumCodecs];
- static const CodecSettings codec_settings_[kMaxNumCodecs];
- static const WebRtcNetEQDecoder neteq_decoders_[kMaxNumCodecs];
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
deleted file mode 100644
index edb62987689..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMDTMFDetection::ACMDTMFDetection() {}
-
-ACMDTMFDetection::~ACMDTMFDetection() {}
-
-int16_t ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
- return -1;
-}
-
-int16_t ACMDTMFDetection::Disable() {
- return -1;
-}
-
-int16_t ACMDTMFDetection::Detect(
- const int16_t* /* in_audio_buff */,
- const uint16_t /* in_buff_len_word16 */,
- const int32_t /* in_freq_hz */,
- bool& /* tone_detected */,
- int16_t& /* tone */) {
- return -1;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
deleted file mode 100644
index 74553107a36..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMDTMFDetection {
- public:
- ACMDTMFDetection();
- ~ACMDTMFDetection();
- int16_t Enable(ACMCountries cpt = ACMDisableCountryDetection);
- int16_t Disable();
- int16_t Detect(const int16_t* in_audio_buff,
- const uint16_t in_buff_len_word16,
- const int32_t in_freq_hz,
- bool& tone_detected,
- int16_t& tone);
-
- private:
- ACMResampler resampler_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
deleted file mode 100644
index 32195e6fe82..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AVT
-
-ACMDTMFPlayout::ACMDTMFPlayout(
- int16_t /* codec_id */) {
- return;
-}
-
-ACMDTMFPlayout::~ACMDTMFPlayout() {
- return;
-}
-
-int16_t ACMDTMFPlayout::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::DecodeSafe(
- uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateEncoder() {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMDTMFPlayout::DestructEncoderSafe() {
- return;
-}
-
-void ACMDTMFPlayout::DestructDecoderSafe() {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMDTMFPlayout::ACMDTMFPlayout(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMDTMFPlayout::~ACMDTMFPlayout() {
- return;
-}
-
-int16_t ACMDTMFPlayout::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return 0;
-}
-
-int16_t ACMDTMFPlayout::DecodeSafe(
- uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // DTMFPlayout has no instance
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // DTMFPlayout has no instance
- return 0;
-}
-
-int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AVT_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAVT, codec_inst.pltype, NULL, 8000);
- SET_AVT_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateEncoder() {
- // DTMFPlayout has no instance
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateDecoder() {
- // DTMFPlayout has no instance
- return 0;
-}
-
-void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // DTMFPlayout has no instance
- return;
-}
-
-void ACMDTMFPlayout::DestructEncoderSafe() {
- // DTMFPlayout has no instance
- return;
-}
-
-void ACMDTMFPlayout::DestructDecoderSafe() {
- // DTMFPlayout has no instance
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
deleted file mode 100644
index 46175f59e66..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMDTMFPlayout: public ACMGenericCodec {
- public:
- explicit ACMDTMFPlayout(int16_t codec_id);
- virtual ~ACMDTMFPlayout();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc
deleted file mode 100644
index 1c19109b6a3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc
+++ /dev/null
@@ -1,358 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g722.h"
-
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722
-
-ACMG722::ACMG722(int16_t /* codec_id */)
- : ptr_enc_str_(NULL),
- ptr_dec_str_(NULL),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL) {}
-
-ACMG722::~ACMG722() {}
-
-int32_t ACMG722::Add10MsDataSafe(
- const uint32_t /* timestamp */,
- const int16_t* /* data */,
- const uint16_t /* length_smpl */,
- const uint8_t /* audio_channel */) {
- return -1;
-}
-
-int16_t ACMG722::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMG722::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-// Encoder and decoder memory
-struct ACMG722EncStr {
- G722EncInst* inst; // instance for left channel in case of stereo
- G722EncInst* inst_right; // instance for right channel in case of stereo
-};
-struct ACMG722DecStr {
- G722DecInst* inst; // instance for left channel in case of stereo
- G722DecInst* inst_right; // instance for right channel in case of stereo
-};
-
-ACMG722::ACMG722(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL) {
- // Encoder
- ptr_enc_str_ = new ACMG722EncStr;
- if (ptr_enc_str_ != NULL) {
- ptr_enc_str_->inst = NULL;
- ptr_enc_str_->inst_right = NULL;
- }
- // Decoder
- ptr_dec_str_ = new ACMG722DecStr;
- if (ptr_dec_str_ != NULL) {
- ptr_dec_str_->inst = NULL;
- ptr_dec_str_->inst_right = NULL; // Not used
- }
- codec_id_ = codec_id;
- return;
-}
-
-ACMG722::~ACMG722() {
- // Encoder
- if (ptr_enc_str_ != NULL) {
- if (ptr_enc_str_->inst != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
- ptr_enc_str_->inst = NULL;
- }
- if (ptr_enc_str_->inst_right != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst_right);
- ptr_enc_str_->inst_right = NULL;
- }
- delete ptr_enc_str_;
- ptr_enc_str_ = NULL;
- }
- // Decoder
- if (ptr_dec_str_ != NULL) {
- if (ptr_dec_str_->inst != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst);
- ptr_dec_str_->inst = NULL;
- }
- if (ptr_dec_str_->inst_right != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst_right);
- ptr_dec_str_->inst_right = NULL;
- }
- delete ptr_dec_str_;
- ptr_dec_str_ = NULL;
- }
- return;
-}
-
-int32_t ACMG722::Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- return ACMGenericCodec::Add10MsDataSafe((timestamp >> 1), data, length_smpl,
- audio_channel);
-}
-
-int16_t ACMG722::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- int16_t left_channel[960];
- int16_t right_channel[960];
- uint8_t out_left[480];
- uint8_t out_right[480];
- int16_t len_in_bytes;
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- len_in_bytes = WebRtcG722_Encode(encoder_inst_ptr_, left_channel,
- frame_len_smpl_,
- (int16_t*)out_left);
- len_in_bytes += WebRtcG722_Encode(encoder_inst_ptr_right_, right_channel,
- frame_len_smpl_,
- (int16_t*)out_right);
- *bitstream_len_byte = len_in_bytes;
-
- // Interleave the 4 bits per sample from left and right channel
- for (int i = 0, j = 0; i < len_in_bytes; i += 2, j++) {
- bitstream[i] = (out_left[j] & 0xF0) + (out_right[j] >> 4);
- bitstream[i + 1] = ((out_left[j] & 0x0F) << 4) + (out_right[j] & 0x0F);
- }
- } else {
- *bitstream_len_byte = WebRtcG722_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- }
-
- // increment the read index this tell the caller how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- if (codec_params->codec_inst.channels == 2) {
- // Create codec struct for right channel
- if (ptr_enc_str_->inst_right == NULL) {
- WebRtcG722_CreateEncoder(&ptr_enc_str_->inst_right);
- if (ptr_enc_str_->inst_right == NULL) {
- return -1;
- }
- }
- encoder_inst_ptr_right_ = ptr_enc_str_->inst_right;
- if (WebRtcG722_EncoderInit(encoder_inst_ptr_right_) < 0) {
- return -1;
- }
- }
-
- return WebRtcG722_EncoderInit(encoder_inst_ptr_);
-}
-
-int16_t ACMG722::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return WebRtcG722_DecoderInit(decoder_inst_ptr_);
-}
-
-int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // TODO(turajs): log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G722_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.channels == 1) {
- SET_CODEC_PAR(codec_def, kDecoderG722, codec_inst.pltype, decoder_inst_ptr_,
- 16000);
- } else {
- SET_CODEC_PAR(codec_def, kDecoderG722_2ch, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- }
- SET_G722_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMG722::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722::InternalCreateEncoder() {
- if (ptr_enc_str_ == NULL) {
- // this structure must be created at the costructor
- // if it is still NULL then there is a probelm and
- // we dont continue
- return -1;
- }
- WebRtcG722_CreateEncoder(&ptr_enc_str_->inst);
- if (ptr_enc_str_->inst == NULL) {
- return -1;
- }
- encoder_inst_ptr_ = ptr_enc_str_->inst;
- return 0;
-}
-
-void ACMG722::DestructEncoderSafe() {
- if (ptr_enc_str_ != NULL) {
- if (ptr_enc_str_->inst != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
- ptr_enc_str_->inst = NULL;
- }
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMG722::InternalCreateDecoder() {
- if (ptr_dec_str_ == NULL) {
- // this structure must be created at the costructor
- // if it is still NULL then there is a probelm and
- // we dont continue
- return -1;
- }
-
- WebRtcG722_CreateDecoder(&ptr_dec_str_->inst);
- if (ptr_dec_str_->inst == NULL) {
- return -1;
- }
- decoder_inst_ptr_ = ptr_dec_str_->inst;
- return 0;
-}
-
-void ACMG722::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (ptr_dec_str_ != NULL) {
- if (ptr_dec_str_->inst != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst);
- ptr_dec_str_->inst = NULL;
- }
- }
-}
-
-void ACMG722::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcG722_FreeEncoder(static_cast<G722EncInst*>(ptr_inst));
- }
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMG722::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Regroup the 4 bits/sample so to |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
- // where "lx" is 4 bits representing left sample number x, and "rx" right
- // sample. Two samples fits in one byte, represented with |...|.
- for (int i = 0; i < *payload_length; i += 2) {
- right_byte = ((payload[i] & 0x0F) << 4) + (payload[i + 1] & 0x0F);
- payload[i] = (payload[i] & 0xF0) + (payload[i + 1] >> 4);
- payload[i + 1] = right_byte;
- }
-
- // Move one byte representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h
deleted file mode 100644
index cf7ebe1e223..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-typedef struct WebRtcG722EncInst G722EncInst;
-typedef struct WebRtcG722DecInst G722DecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-// forward declaration
-struct ACMG722EncStr;
-struct ACMG722DecStr;
-
-class ACMG722 : public ACMGenericCodec {
- public:
- explicit ACMG722(int16_t codec_id);
- virtual ~ACMG722();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- ACMG722EncStr* ptr_enc_str_;
- ACMG722DecStr* ptr_dec_str_;
-
- G722EncInst* encoder_inst_ptr_;
- G722EncInst* encoder_inst_ptr_right_; // Prepared for stereo
- G722DecInst* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc
deleted file mode 100644
index ed172fd3e1b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7221.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G722_1
-// NOTE! G.722.1 is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/g7221/main/interface/g7221_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcG7221_CreateEnc16(G722_1_16_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateEnc24(G722_1_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateEnc32(G722_1_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateDec16(G722_1_16_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_CreateDec24(G722_1_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_CreateDec32(G722_1_32_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221_FreeEnc16(G722_1_16_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeEnc24(G722_1_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeEnc32(G722_1_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeDec16(G722_1_16_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_FreeDec24(G722_1_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_FreeDec32(G722_1_32_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221_EncoderInit16(G722_1_16_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_EncoderInit24(G722_1_24_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_EncoderInit32(G722_1_32_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_DecoderInit16(G722_1_16_decinst_t_* dec_inst);
-// int16_t WebRtcG7221_DecoderInit24(G722_1_24_decinst_t_* dec_inst);
-// int16_t WebRtcG7221_DecoderInit32(G722_1_32_decinst_t_* dec_inst);
-//
-// int16_t WebRtcG7221_Encode16(G722_1_16_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Encode24(G722_1_24_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Encode32(G722_1_32_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221_Decode16(G722_1_16_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Decode24(G722_1_24_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Decode32(G722_1_32_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221_DecodePlc16(G722_1_16_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221_DecodePlc24(G722_1_24_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221_DecodePlc32(G722_1_32_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-#include "g7221_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722_1
-
-ACMG722_1::ACMG722_1(int16_t /* codec_id */)
- : operational_rate_(-1),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst16_ptr_(NULL),
- encoder_inst16_ptr_right_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- decoder_inst16_ptr_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL) {
- return;
-}
-
-ACMG722_1::~ACMG722_1() {
- return;
-}
-
-int16_t ACMG722_1::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722_1::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722_1::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722_1::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722_1::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722_1::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG722_1::ACMG722_1(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst16_ptr_(NULL),
- encoder_inst16_ptr_right_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- decoder_inst16_ptr_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL) {
- codec_id_ = codec_id;
- if (codec_id_ == ACMCodecDB::kG722_1_16) {
- operational_rate_ = 16000;
- } else if (codec_id_ == ACMCodecDB::kG722_1_24) {
- operational_rate_ = 24000;
- } else if (codec_id_ == ACMCodecDB::kG722_1_32) {
- operational_rate_ = 32000;
- } else {
- operational_rate_ = -1;
- }
- return;
-}
-
-ACMG722_1::~ACMG722_1() {
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
-
- switch (operational_rate_) {
- case 16000: {
- encoder_inst16_ptr_ = NULL;
- encoder_inst16_ptr_right_ = NULL;
- decoder_inst16_ptr_ = NULL;
- break;
- }
- case 24000: {
- encoder_inst24_ptr_ = NULL;
- encoder_inst24_ptr_right_ = NULL;
- decoder_inst24_ptr_ = NULL;
- break;
- }
- case 32000: {
- encoder_inst32_ptr_ = NULL;
- encoder_inst32_ptr_right_ = NULL;
- decoder_inst32_ptr_ = NULL;
- break;
- }
- default: {
- break;
- }
- }
- return;
-}
-
-int16_t ACMG722_1::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t left_channel[320];
- int16_t right_channel[320];
- int16_t len_in_bytes;
- int16_t out_bits[160];
-
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- } else {
- memcpy(left_channel, &in_audio_[in_audio_ix_read_], 320);
- }
-
- switch (operational_rate_) {
- case 16000: {
- len_in_bytes = WebRtcG7221_Encode16(encoder_inst16_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode16(encoder_inst16_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 24000: {
- len_in_bytes = WebRtcG7221_Encode24(encoder_inst24_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode24(encoder_inst24_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 32000: {
- len_in_bytes = WebRtcG7221_Encode32(encoder_inst32_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode32(encoder_inst32_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncode: Wrong rate for G722_1.");
- return -1;
- }
- }
- memcpy(bitstream, out_bits, len_in_bytes);
- *bitstream_len_byte = len_in_bytes;
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 320 * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722_1::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- int16_t ret;
-
- switch (operational_rate_) {
- case 16000: {
- ret = WebRtcG7221_EncoderInit16(encoder_inst16_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit16(encoder_inst16_ptr_);
- }
- case 24000: {
- ret = WebRtcG7221_EncoderInit24(encoder_inst24_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit24(encoder_inst24_ptr_);
- }
- case 32000: {
- ret = WebRtcG7221_EncoderInit32(encoder_inst32_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit32(encoder_inst32_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "InternalInitEncoder: Wrong rate for G722_1.");
- return -1;
- }
- }
-}
-
-int16_t ACMG722_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- switch (operational_rate_) {
- case 16000: {
- return WebRtcG7221_DecoderInit16(decoder_inst16_ptr_);
- }
- case 24000: {
- return WebRtcG7221_DecoderInit24(decoder_inst24_ptr_);
- }
- case 32000: {
- return WebRtcG7221_DecoderInit32(decoder_inst32_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: Wrong rate for G722_1.");
- return -1;
- }
- }
-}
-
-int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
- // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
- // Get an entry of that array (neteq wrapper will allocate memory)
- // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
- // be the index of the entry.
- // Fill up the given structure by calling
- // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- switch (operational_rate_) {
- case 16000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_16, codec_inst.pltype,
- decoder_inst16_ptr_, 16000);
- SET_G722_1_16_FUNCTIONS((codec_def));
- break;
- }
- case 24000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_24, codec_inst.pltype,
- decoder_inst24_ptr_, 16000);
- SET_G722_1_24_FUNCTIONS((codec_def));
- break;
- }
- case 32000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_32, codec_inst.pltype,
- decoder_inst32_ptr_, 16000);
- SET_G722_1_32_FUNCTIONS((codec_def));
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-ACMGenericCodec* ACMG722_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1::InternalCreateEncoder() {
- if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
- return -1;
- }
- switch (operational_rate_) {
- case 16000: {
- WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_);
- WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_right_);
- break;
- }
- case 24000: {
- WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_);
- WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_right_);
- break;
- }
- case 32000: {
- WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_);
- WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_right_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- encoder_inst16_ptr_ = NULL;
- encoder_inst24_ptr_ = NULL;
- encoder_inst32_ptr_ = NULL;
-}
-
-int16_t ACMG722_1::InternalCreateDecoder() {
- if (decoder_inst_ptr_ == NULL) {
- return -1;
- }
- switch (operational_rate_) {
- case 16000: {
- WebRtcG7221_CreateDec16(&decoder_inst16_ptr_);
- break;
- }
- case 24000: {
- WebRtcG7221_CreateDec24(&decoder_inst24_ptr_);
- break;
- }
- case 32000: {
- WebRtcG7221_CreateDec32(&decoder_inst32_ptr_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
- decoder_inst16_ptr_ = NULL;
- decoder_inst24_ptr_ = NULL;
- decoder_inst32_ptr_ = NULL;
-}
-
-void ACMG722_1::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- delete ptr_inst;
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h
deleted file mode 100644
index 8ea66742c97..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G722_1_16_encinst_t_;
-struct G722_1_16_decinst_t_;
-struct G722_1_24_encinst_t_;
-struct G722_1_24_decinst_t_;
-struct G722_1_32_encinst_t_;
-struct G722_1_32_decinst_t_;
-struct G722_1_Inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG722_1: public ACMGenericCodec {
- public:
- explicit ACMG722_1(int16_t codec_id);
- ~ACMG722_1();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio, int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int32_t operational_rate_;
-
- G722_1_Inst_t_* encoder_inst_ptr_;
- G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
- G722_1_Inst_t_* decoder_inst_ptr_;
-
- // Only one set of these pointer is valid at any instance
- G722_1_16_encinst_t_* encoder_inst16_ptr_;
- G722_1_16_encinst_t_* encoder_inst16_ptr_right_;
- G722_1_24_encinst_t_* encoder_inst24_ptr_;
- G722_1_24_encinst_t_* encoder_inst24_ptr_right_;
- G722_1_32_encinst_t_* encoder_inst32_ptr_;
- G722_1_32_encinst_t_* encoder_inst32_ptr_right_;
-
- // Only one of these pointer is valid at any instance
- G722_1_16_decinst_t_* decoder_inst16_ptr_;
- G722_1_24_decinst_t_* decoder_inst24_ptr_;
- G722_1_32_decinst_t_* decoder_inst32_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
deleted file mode 100644
index 96caba0a08f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
+++ /dev/null
@@ -1,510 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7221c.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G722_1C
-// NOTE! G.722.1C is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/g7221c/main/interface/g7221c_interface.h
-//
-// The API in the header file should match the one below.
-//
-
-// int16_t WebRtcG7221C_CreateEnc24(G722_1C_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateEnc32(G722_1C_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateEnc48(G722_1C_48_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateDec24(G722_1C_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_CreateDec32(G722_1C_32_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_CreateDec48(G722_1C_48_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221C_FreeEnc24(G722_1C_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeEnc32(G722_1C_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeEnc48(G722_1C_48_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeDec24(G722_1C_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_FreeDec32(G722_1C_32_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_FreeDec48(G722_1C_48_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221C_EncoderInit24(G722_1C_24_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_EncoderInit32(G722_1C_32_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_EncoderInit48(G722_1C_48_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_DecoderInit24(G722_1C_24_decinst_t_* dec_inst);
-// int16_t WebRtcG7221C_DecoderInit32(G722_1C_32_decinst_t_* dec_inst);
-// int16_t WebRtcG7221C_DecoderInit48(G722_1C_48_decinst_t_* dec_inst);
-//
-// int16_t WebRtcG7221C_Encode24(G722_1C_24_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Encode32(G722_1C_32_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Encode48(G722_1C_48_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221C_Decode24(G722_1C_24_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Decode32(G722_1C_32_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Decode48(G722_1C_48_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221C_DecodePlc24(G722_1C_24_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221C_DecodePlc32(G722_1C_32_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221C_DecodePlc48(G722_1C_48_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-#include "g7221c_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722_1C
-
-ACMG722_1C::ACMG722_1C(int16_t /* codec_id */)
- : operational_rate_(-1),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- encoder_inst48_ptr_(NULL),
- encoder_inst48_ptr_right_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL),
- decoder_inst48_ptr_(NULL) {
- return;
-}
-
-ACMG722_1C::~ACMG722_1C() {
- return;
-}
-
-int16_t ACMG722_1C::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722_1C::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722_1C::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722_1C::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722_1C::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722_1C::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722_1C::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722_1C::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG722_1C::ACMG722_1C(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- encoder_inst48_ptr_(NULL),
- encoder_inst48_ptr_right_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL),
- decoder_inst48_ptr_(NULL) {
- codec_id_ = codec_id;
- if (codec_id_ == ACMCodecDB::kG722_1C_24) {
- operational_rate_ = 24000;
- } else if (codec_id_ == ACMCodecDB::kG722_1C_32) {
- operational_rate_ = 32000;
- } else if (codec_id_ == ACMCodecDB::kG722_1C_48) {
- operational_rate_ = 48000;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for G722_1c.");
- operational_rate_ = -1;
- }
- return;
-}
-
-ACMG722_1C::~ACMG722_1C() {
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
-
- switch (operational_rate_) {
- case 24000: {
- encoder_inst24_ptr_ = NULL;
- encoder_inst24_ptr_right_ = NULL;
- decoder_inst24_ptr_ = NULL;
- break;
- }
- case 32000: {
- encoder_inst32_ptr_ = NULL;
- encoder_inst32_ptr_right_ = NULL;
- decoder_inst32_ptr_ = NULL;
- break;
- }
- case 48000: {
- encoder_inst48_ptr_ = NULL;
- encoder_inst48_ptr_right_ = NULL;
- decoder_inst48_ptr_ = NULL;
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong rate for G722_1c.");
- break;
- }
- }
- return;
-}
-
-int16_t ACMG722_1C::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t left_channel[640];
- int16_t right_channel[640];
- int16_t len_in_bytes;
- int16_t out_bits[240];
-
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- } else {
- memcpy(left_channel, &in_audio_[in_audio_ix_read_], 640);
- }
-
- switch (operational_rate_) {
- case 24000: {
- len_in_bytes = WebRtcG7221C_Encode24(encoder_inst24_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode24(encoder_inst24_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 32000: {
- len_in_bytes = WebRtcG7221C_Encode32(encoder_inst32_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode32(encoder_inst32_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 48000: {
- len_in_bytes = WebRtcG7221C_Encode48(encoder_inst48_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode48(encoder_inst48_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Wrong rate for G722_1c.");
- return -1;
- }
- }
-
- memcpy(bitstream, out_bits, len_in_bytes);
- *bitstream_len_byte = len_in_bytes;
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 640 * num_channels_;
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722_1C::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- int16_t ret;
-
- switch (operational_rate_) {
- case 24000: {
- ret = WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_);
- }
- case 32000: {
- ret = WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_);
- }
- case 48000: {
- ret = WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncode: Wrong rate for G722_1c.");
- return -1;
- }
- }
-}
-
-int16_t ACMG722_1C::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- switch (operational_rate_) {
- case 24000: {
- return WebRtcG7221C_DecoderInit24(decoder_inst24_ptr_);
- }
- case 32000: {
- return WebRtcG7221C_DecoderInit32(decoder_inst32_ptr_);
- }
- case 48000: {
- return WebRtcG7221C_DecoderInit48(decoder_inst48_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
-}
-
-int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: decoder not initialized for G722_1c");
- return -1;
- }
- // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
- // get an entry of that array (neteq wrapper will allocate memory)
- // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
- // be the index of the entry.
- // Fill up the given structure by calling
- // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- switch (operational_rate_) {
- case 24000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_24, codec_inst.pltype,
- decoder_inst24_ptr_, 32000);
- SET_G722_1C_24_FUNCTIONS((codec_def));
- break;
- }
- case 32000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_32, codec_inst.pltype,
- decoder_inst32_ptr_, 32000);
- SET_G722_1C_32_FUNCTIONS((codec_def));
- break;
- }
- case 48000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_32, codec_inst.pltype,
- decoder_inst48_ptr_, 32000);
- SET_G722_1C_48_FUNCTIONS((codec_def));
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-ACMGenericCodec*
-ACMG722_1C::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateEncoder() {
- if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
- return -1;
- }
- switch (operational_rate_) {
- case 24000: {
- WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_);
- WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_right_);
- break;
- }
- case 32000: {
- WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_);
- WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_right_);
- break;
- }
- case 48000: {
- WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_);
- WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_right_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1C::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- encoder_inst24_ptr_ = NULL;
- encoder_inst32_ptr_ = NULL;
- encoder_inst48_ptr_ = NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateDecoder() {
- if (decoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create decoder");
- return -1;
- }
- switch (operational_rate_) {
- case 24000: {
- WebRtcG7221C_CreateDec24(&decoder_inst24_ptr_);
- break;
- }
- case 32000: {
- WebRtcG7221C_CreateDec32(&decoder_inst32_ptr_);
- break;
- }
- case 48000: {
- WebRtcG7221C_CreateDec48(&decoder_inst48_ptr_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1C::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
- decoder_inst24_ptr_ = NULL;
- decoder_inst32_ptr_ = NULL;
- decoder_inst48_ptr_ = NULL;
-}
-
-void ACMG722_1C::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- delete ptr_inst;
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h
deleted file mode 100644
index d8875aa2fbf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G722_1C_24_encinst_t_;
-struct G722_1C_24_decinst_t_;
-struct G722_1C_32_encinst_t_;
-struct G722_1C_32_decinst_t_;
-struct G722_1C_48_encinst_t_;
-struct G722_1C_48_decinst_t_;
-struct G722_1_Inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG722_1C : public ACMGenericCodec {
- public:
- explicit ACMG722_1C(int16_t codec_id);
- ~ACMG722_1C();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(
- uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(
- WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(
- WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(
- uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(
- WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(
- void* ptr_inst);
-
- int32_t operational_rate_;
-
- G722_1_Inst_t_* encoder_inst_ptr_;
- G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
- G722_1_Inst_t_* decoder_inst_ptr_;
-
- // Only one set of these pointer is valid at any instance
- G722_1C_24_encinst_t_* encoder_inst24_ptr_;
- G722_1C_24_encinst_t_* encoder_inst24_ptr_right_;
- G722_1C_32_encinst_t_* encoder_inst32_ptr_;
- G722_1C_32_encinst_t_* encoder_inst32_ptr_right_;
- G722_1C_48_encinst_t_* encoder_inst48_ptr_;
- G722_1C_48_encinst_t_* encoder_inst48_ptr_right_;
-
- // Only one of these pointer is valid at any instance
- G722_1C_24_decinst_t_* decoder_inst24_ptr_;
- G722_1C_32_decinst_t_* decoder_inst32_ptr_;
- G722_1C_48_decinst_t_* decoder_inst48_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc;
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc
deleted file mode 100644
index 406bb61e48d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc
+++ /dev/null
@@ -1,366 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g729.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G729
-// NOTE! G.729 is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used G.729 API
-// file.
-#include "g729_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G729
-
-ACMG729::ACMG729(int16_t /* codec_id */)
-: encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMG729::~ACMG729() {
- return;
-}
-
-int16_t ACMG729::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG729::EnableDTX() {
- return -1;
-}
-
-int16_t ACMG729::DisableDTX() {
- return -1;
-}
-
-int32_t ACMG729::ReplaceInternalDTXSafe(
- const bool /*replace_internal_dtx */) {
- return -1;
-}
-
-int32_t ACMG729::IsInternalDTXReplacedSafe(
- bool* /* internal_dtx_replaced */) {
- return -1;
-}
-
-int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG729::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG729::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG729::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG729::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG729::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG729::DestructDecoderSafe() {
- return;
-}
-
-void ACMG729::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG729::ACMG729(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- return;
-}
-
-ACMG729::~ACMG729() {
- if (encoder_inst_ptr_ != NULL) {
- // Delete encoder memory
- WebRtcG729_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- // Delete decoder memory
- WebRtcG729_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMG729::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // Initialize before entering the loop
- int16_t num_encoded_samples = 0;
- int16_t tmp_len_byte = 0;
- int16_t vad_decision = 0;
- *bitstream_len_byte = 0;
- while (num_encoded_samples < frame_len_smpl_) {
- // Call G.729 encoder with pointer to encoder memory, input
- // audio, number of samples and bitsream
- tmp_len_byte = WebRtcG729_Encode(
- encoder_inst_ptr_, &in_audio_[in_audio_ix_read_], 80,
- (int16_t*)(&(bitstream[*bitstream_len_byte])));
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 80;
-
- // sanity check
- if (tmp_len_byte < 0) {
- // error has happened
- *bitstream_len_byte = 0;
- return -1;
- }
-
- // increment number of written bytes
- *bitstream_len_byte += tmp_len_byte;
- switch (tmp_len_byte) {
- case 0: {
- if (0 == num_encoded_samples) {
- // this is the first 10 ms in this packet and there is
- // no data generated, perhaps DTX is enabled and the
- // codec is not generating any bit-stream for this 10 ms.
- // we do not continue encoding this frame.
- return 0;
- }
- break;
- }
- case 2: {
- // check if G.729 internal DTX is enabled
- if (has_internal_dtx_ && dtx_enabled_) {
- vad_decision = 0;
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // we got a SID and have to send out this packet no matter
- // how much audio we have encoded
- return *bitstream_len_byte;
- }
- case 10: {
- vad_decision = 1;
- // this is a valid length just continue encoding
- break;
- }
- default: {
- return -1;
- }
- }
-
- // update number of encoded samples
- num_encoded_samples += 80;
- }
-
- // update VAD decision vector
- if (has_internal_dtx_ && !vad_decision && dtx_enabled_) {
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
-
- // done encoding, return number of encoded bytes
- return *bitstream_len_byte;
-}
-
-int16_t ACMG729::EnableDTX() {
- if (dtx_enabled_) {
- // DTX already enabled, do nothing
- return 0;
- } else if (encoder_exist_) {
- // Re-init the G.729 encoder to turn on DTX
- if (WebRtcG729_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMG729::DisableDTX() {
- if (!dtx_enabled_) {
- // DTX already dissabled, do nothing
- return 0;
- } else if (encoder_exist_) {
- // Re-init the G.729 decoder to turn off DTX
- if (WebRtcG729_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int32_t ACMG729::ReplaceInternalDTXSafe(const bool replace_internal_dtx) {
- // This function is used to disable the G.729 built in DTX and use an
- // external instead.
-
- if (replace_internal_dtx == has_internal_dtx_) {
- // Make sure we keep the DTX/VAD setting if possible
- bool old_enable_dtx = dtx_enabled_;
- bool old_enable_vad = vad_enabled_;
- ACMVADMode old_mode = vad_mode_;
- if (replace_internal_dtx) {
- // Disable internal DTX before enabling external DTX
- DisableDTX();
- } else {
- // Disable external DTX before enabling internal
- ACMGenericCodec::DisableDTX();
- }
- has_internal_dtx_ = !replace_internal_dtx;
- int16_t status = SetVADSafe(old_enable_dtx, old_enable_vad, old_mode);
- // Check if VAD status has changed from inactive to active, or if error was
- // reported
- if (status == 1) {
- vad_enabled_ = true;
- return status;
- } else if (status < 0) {
- has_internal_dtx_ = replace_internal_dtx;
- return -1;
- }
- }
- return 0;
-}
-
-int32_t ACMG729::IsInternalDTXReplacedSafe(bool* internal_dtx_replaced) {
- // Get status of wether DTX is replaced or not
- *internal_dtx_replaced = !has_internal_dtx_;
- return 0;
-}
-
-int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- // This function is not used. G.729 decoder is called from inside NetEQ
- return 0;
-}
-
-int16_t ACMG729::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // Init G.729 encoder
- return WebRtcG729_EncoderInit(encoder_inst_ptr_,
- ((codec_params->enable_dtx) ? 1 : 0));
-}
-
-int16_t ACMG729::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // Init G.729 decoder
- return WebRtcG729_DecoderInit(decoder_inst_ptr_);
-}
-
-int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderG729, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_G729_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMG729::CreateInstance(void) {
- // Function not used
- return NULL;
-}
-
-int16_t ACMG729::InternalCreateEncoder() {
- // Create encoder memory
- return WebRtcG729_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMG729::DestructEncoderSafe() {
- // Free encoder memory
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG729_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMG729::InternalCreateDecoder() {
- // Create decoder memory
- return WebRtcG729_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMG729::DestructDecoderSafe() {
- // Free decoder memory
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG729_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMG729::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcG729_FreeEnc((G729_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h
deleted file mode 100644
index 5cfff63b69a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G729_encinst_t_;
-struct G729_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG729 : public ACMGenericCodec {
- public:
- explicit ACMG729(int16_t codec_id);
- ~ACMG729();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
- int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
-
- int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
-
- G729_encinst_t_* encoder_inst_ptr_;
- G729_decinst_t_* decoder_inst_ptr_;
-
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc
deleted file mode 100644
index 0da6c99d21e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc
+++ /dev/null
@@ -1,349 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7291.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_CODEC_G729_1
-// NOTE! G.729.1 is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used G.729.1 API
-// file.
-#include "g7291_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G729_1
-
-ACMG729_1::ACMG729_1(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- my_rate_(32000),
- flag_8khz_(0),
- flag_g729_mode_(0) {
- return;
-}
-
-ACMG729_1::~ACMG729_1() {
- return;
-}
-
-int16_t ACMG729_1::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG729_1::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG729_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG729_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729_1::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG729_1::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG729_1::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG729_1::DestructDecoderSafe() {
- return;
-}
-
-void ACMG729_1::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMG729_1::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-struct G729_1_inst_t_;
-
-ACMG729_1::ACMG729_1(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- my_rate_(32000), // Default rate.
- flag_8khz_(0),
- flag_g729_mode_(0) {
- // TODO(tlegrand): We should add codec_id as a input variable to the
- // constructor of ACMGenericCodec.
- codec_id_ = codec_id;
- return;
-}
-
-ACMG729_1::~ACMG729_1() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMG729_1::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
-
- // Initialize before entering the loop
- int16_t num_encoded_samples = 0;
- *bitstream_len_byte = 0;
-
- int16_t byte_length_frame = 0;
-
- // Derive number of 20ms frames per encoded packet.
- // [1,2,3] <=> [20,40,60]ms <=> [320,640,960] samples
- int16_t num_20ms_frames = (frame_len_smpl_ / 320);
- // Byte length for the frame. +1 is for rate information.
- byte_length_frame = my_rate_ / (8 * 50) * num_20ms_frames + (1 -
- flag_g729_mode_);
-
- // The following might be revised if we have G729.1 Annex C (support for DTX);
- do {
- *bitstream_len_byte = WebRtcG7291_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- (int16_t*) bitstream,
- my_rate_, num_20ms_frames);
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 160;
-
- // sanity check
- if (*bitstream_len_byte < 0) {
- // error has happened
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for G729_1");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- num_encoded_samples += 160;
- } while (*bitstream_len_byte == 0);
-
- // This criteria will change if we have Annex C.
- if (*bitstream_len_byte != byte_length_frame) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for G729_1");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- if (num_encoded_samples != frame_len_smpl_) {
- *bitstream_len_byte = 0;
- return -1;
- }
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG729_1::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- //set the bit rate and initialize
- my_rate_ = codec_params->codec_inst.rate;
- return SetBitRateSafe((uint32_t) my_rate_);
-}
-
-int16_t ACMG729_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- if (WebRtcG7291_DecoderInit(decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Decoder uninitialized for G729_1");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderG729_1, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- SET_G729_1_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMG729_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729_1::InternalCreateEncoder() {
- if (WebRtcG7291_Create(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: create encoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-void ACMG729_1::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMG729_1::InternalCreateDecoder() {
- if (WebRtcG7291_Create(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: create decoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-void ACMG729_1::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMG729_1::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- // WebRtcG7291_Free((G729_1_inst_t*)ptrInst);
- }
- return;
-}
-
-int16_t ACMG729_1::SetBitRateSafe(const int32_t rate) {
- // allowed rates: { 8000, 12000, 14000, 16000, 18000, 20000,
- // 22000, 24000, 26000, 28000, 30000, 32000};
- // TODO(tlegrand): This check exists in one other place two. Should be
- // possible to reuse code.
- switch (rate) {
- case 8000: {
- my_rate_ = 8000;
- break;
- }
- case 12000: {
- my_rate_ = 12000;
- break;
- }
- case 14000: {
- my_rate_ = 14000;
- break;
- }
- case 16000: {
- my_rate_ = 16000;
- break;
- }
- case 18000: {
- my_rate_ = 18000;
- break;
- }
- case 20000: {
- my_rate_ = 20000;
- break;
- }
- case 22000: {
- my_rate_ = 22000;
- break;
- }
- case 24000: {
- my_rate_ = 24000;
- break;
- }
- case 26000: {
- my_rate_ = 26000;
- break;
- }
- case 28000: {
- my_rate_ = 28000;
- break;
- }
- case 30000: {
- my_rate_ = 30000;
- break;
- }
- case 32000: {
- my_rate_ = 32000;
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate G729_1");
- return -1;
- }
- }
-
- // Re-init with new rate
- if (WebRtcG7291_EncoderInit(encoder_inst_ptr_, my_rate_, flag_8khz_,
- flag_g729_mode_) >= 0) {
- encoder_params_.codec_inst.rate = my_rate_;
- return 0;
- } else {
- return -1;
- }
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h
deleted file mode 100644
index bac7faf8368..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G729_1_inst_t_;
-struct G729_1_inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG729_1 : public ACMGenericCodec {
- public:
- explicit ACMG729_1(int16_t codec_id);
- ~ACMG729_1();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t SetBitRateSafe(const int32_t rate);
-
- G729_1_inst_t_* encoder_inst_ptr_;
- G729_1_inst_t_* decoder_inst_ptr_;
-
- uint16_t my_rate_;
- int16_t flag_8khz_;
- int16_t flag_g729_mode_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc
deleted file mode 100644
index 4e53b873a1f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc
+++ /dev/null
@@ -1,1263 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-// Enum for CNG
-enum {
- kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
- kNewCNGNumPLCParams = 8
-};
-
-// Interval for sending new CNG parameters (SID frames) is 100 msec.
-enum {
- kCngSidIntervalMsec = 100
-};
-
-// We set some of the variables to invalid values as a check point
-// if a proper initialization has happened. Another approach is
-// to initialize to a default codec that we are sure is always included.
-ACMGenericCodec::ACMGenericCodec()
- : in_audio_ix_write_(0),
- in_audio_ix_read_(0),
- in_timestamp_ix_write_(0),
- in_audio_(NULL),
- in_timestamp_(NULL),
- frame_len_smpl_(-1), // invalid value
- num_channels_(1),
- codec_id_(-1), // invalid value
- num_missed_samples_(0),
- encoder_exist_(false),
- decoder_exist_(false),
- encoder_initialized_(false),
- decoder_initialized_(false),
- registered_in_neteq_(false),
- has_internal_dtx_(false),
- ptr_vad_inst_(NULL),
- vad_enabled_(false),
- vad_mode_(VADNormal),
- dtx_enabled_(false),
- ptr_dtx_inst_(NULL),
- num_lpc_params_(kNewCNGNumPLCParams),
- sent_cn_previous_(false),
- is_master_(true),
- prev_frame_cng_(0),
- neteq_decode_lock_(NULL),
- codec_wrapper_lock_(*RWLockWrapper::CreateRWLock()),
- last_encoded_timestamp_(0),
- last_timestamp_(0xD87F3F9F),
- is_audio_buff_fresh_(true),
- unique_id_(0) {
- // Initialize VAD vector.
- for (int i = 0; i < MAX_FRAME_SIZE_10MSEC; i++) {
- vad_label_[i] = 0;
- }
- // Nullify memory for encoder and decoder, and set payload type to an
- // invalid value.
- memset(&encoder_params_, 0, sizeof(WebRtcACMCodecParams));
- encoder_params_.codec_inst.pltype = -1;
- memset(&decoder_params_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params_.codec_inst.pltype = -1;
-}
-
-ACMGenericCodec::~ACMGenericCodec() {
- // Check all the members which are pointers, and if they are not NULL
- // delete/free them.
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- if (in_audio_ != NULL) {
- delete[] in_audio_;
- in_audio_ = NULL;
- }
- if (in_timestamp_ != NULL) {
- delete[] in_timestamp_;
- in_timestamp_ = NULL;
- }
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- delete &codec_wrapper_lock_;
-}
-
-int32_t ACMGenericCodec::Add10MsData(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return Add10MsDataSafe(timestamp, data, length_smpl, audio_channel);
-}
-
-int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- // The codec expects to get data in correct sampling rate. Get the sampling
- // frequency of the codec.
- uint16_t plfreq_hz;
- if (EncoderSampFreq(plfreq_hz) < 0) {
- return -1;
- }
-
- // Sanity check to make sure the length of the input corresponds to 10 ms.
- if ((plfreq_hz / 100) != length_smpl) {
- // This is not 10 ms of audio, given the sampling frequency of the codec.
- return -1;
- }
-
- if (last_timestamp_ == timestamp) {
- // Same timestamp as the last time, overwrite.
- if ((in_audio_ix_write_ >= length_smpl * audio_channel) &&
- (in_timestamp_ix_write_ > 0)) {
- in_audio_ix_write_ -= length_smpl * audio_channel;
- in_timestamp_ix_write_--;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "Adding 10ms with previous timestamp, overwriting the "
- "previous 10ms");
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "Adding 10ms with previous timestamp, this will sound bad");
- }
- }
-
- last_timestamp_ = timestamp;
-
- // If the data exceeds the buffer size, we throw away the oldest data and
- // add the newly received 10 msec at the end.
- if ((in_audio_ix_write_ + length_smpl * audio_channel) >
- AUDIO_BUFFER_SIZE_W16) {
- // Get the number of samples to be overwritten.
- int16_t missed_samples = in_audio_ix_write_ + length_smpl * audio_channel -
- AUDIO_BUFFER_SIZE_W16;
-
- // Move the data (overwrite the old data).
- memmove(in_audio_, in_audio_ + missed_samples,
- (AUDIO_BUFFER_SIZE_W16 - length_smpl * audio_channel) *
- sizeof(int16_t));
-
- // Copy the new data.
- memcpy(in_audio_ + (AUDIO_BUFFER_SIZE_W16 - length_smpl * audio_channel),
- data, length_smpl * audio_channel * sizeof(int16_t));
-
- // Get the number of 10 ms blocks which are overwritten.
- int16_t missed_10ms_blocks =static_cast<int16_t>(
- (missed_samples / audio_channel * 100) / plfreq_hz);
-
- // Move the timestamps.
- memmove(in_timestamp_, in_timestamp_ + missed_10ms_blocks,
- (in_timestamp_ix_write_ - missed_10ms_blocks) * sizeof(uint32_t));
- in_timestamp_ix_write_ -= missed_10ms_blocks;
- assert(in_timestamp_ix_write_ >= 0);
- in_timestamp_[in_timestamp_ix_write_] = timestamp;
- in_timestamp_ix_write_++;
-
- // Buffer is full.
- in_audio_ix_write_ = AUDIO_BUFFER_SIZE_W16;
- IncreaseNoMissedSamples(missed_samples);
- is_audio_buff_fresh_ = false;
- return -missed_samples;
- }
-
- // Store the input data in our data buffer.
- memcpy(in_audio_ + in_audio_ix_write_, data,
- length_smpl * audio_channel * sizeof(int16_t));
- in_audio_ix_write_ += length_smpl * audio_channel;
-
- assert(in_timestamp_ix_write_ < TIMESTAMP_BUFFER_SIZE_W32);
- assert(in_timestamp_ix_write_ >= 0);
-
- in_timestamp_[in_timestamp_ix_write_] = timestamp;
- in_timestamp_ix_write_++;
- is_audio_buff_fresh_ = false;
- return 0;
-}
-
-bool ACMGenericCodec::HasFrameToEncode() const {
- ReadLockScoped lockCodec(codec_wrapper_lock_);
- if (in_audio_ix_write_ < frame_len_smpl_ * num_channels_)
- return false;
- return true;
-}
-
-int16_t ACMGenericCodec::Encode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type) {
- if (!HasFrameToEncode()) {
- // There is not enough audio
- *timestamp = 0;
- *bitstream_len_byte = 0;
- // Doesn't really matter what this parameter set to
- *encoding_type = kNoEncoding;
- return 0;
- }
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
-
- // Not all codecs accept the whole frame to be pushed into encoder at once.
- // Some codecs needs to be feed with a specific number of samples different
- // from the frame size. If this is the case, |myBasicCodingBlockSmpl| will
- // report a number different from 0, and we will loop over calls to encoder
- // further down, until we have encode a complete frame.
- const int16_t my_basic_coding_block_smpl =
- ACMCodecDB::BasicCodingBlock(codec_id_);
- if (my_basic_coding_block_smpl < 0 || !encoder_initialized_ ||
- !encoder_exist_) {
- // This should not happen, but in case it does, report no encoding done.
- *timestamp = 0;
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncodeSafe: error, basic coding sample block is negative");
- return -1;
- }
- // This makes the internal encoder read from the beginning of the buffer.
- in_audio_ix_read_ = 0;
- *timestamp = in_timestamp_[0];
-
- // Process the audio through VAD. The function will set |_vad_labels|.
- // If VAD is disabled all entries in |_vad_labels| are set to ONE (active).
- int16_t status = 0;
- int16_t dtx_processed_samples = 0;
- status = ProcessFrameVADDTX(bitstream, bitstream_len_byte,
- &dtx_processed_samples);
- if (status < 0) {
- *timestamp = 0;
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- } else {
- if (dtx_processed_samples > 0) {
- // Dtx have processed some samples, and even if a bit-stream is generated
- // we should not do any encoding (normally there won't be enough data).
-
- // Setting the following makes sure that the move of audio data and
- // timestamps done correctly.
- in_audio_ix_read_ = dtx_processed_samples;
- // This will let the owner of ACMGenericCodec to know that the
- // generated bit-stream is DTX to use correct payload type.
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- if (samp_freq_hz == 8000) {
- *encoding_type = kPassiveDTXNB;
- } else if (samp_freq_hz == 16000) {
- *encoding_type = kPassiveDTXWB;
- } else if (samp_freq_hz == 32000) {
- *encoding_type = kPassiveDTXSWB;
- } else if (samp_freq_hz == 48000) {
- *encoding_type = kPassiveDTXFB;
- } else {
- status = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncodeSafe: Wrong sampling frequency for DTX.");
- }
-
- // Transport empty frame if we have an empty bitstream.
- if ((*bitstream_len_byte == 0) &&
- (sent_cn_previous_ ||
- ((in_audio_ix_write_ - in_audio_ix_read_) <= 0))) {
- // Makes sure we transmit an empty frame.
- *bitstream_len_byte = 1;
- *encoding_type = kNoEncoding;
- }
- sent_cn_previous_ = true;
- } else {
- // We should encode the audio frame. Either VAD and/or DTX is off, or the
- // audio was considered "active".
-
- sent_cn_previous_ = false;
- if (my_basic_coding_block_smpl == 0) {
- // This codec can handle all allowed frame sizes as basic coding block.
- status = InternalEncode(bitstream, bitstream_len_byte);
- if (status < 0) {
- // TODO(tlegrand): Maybe reseting the encoder to be fresh for the next
- // frame.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "EncodeSafe: error in internal_encode");
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- }
- } else {
- // A basic-coding-block for this codec is defined so we loop over the
- // audio with the steps of the basic-coding-block.
- int16_t tmp_bitstream_len_byte;
-
- // Reset the variables which will be incremented in the loop.
- *bitstream_len_byte = 0;
- do {
- status = InternalEncode(&bitstream[*bitstream_len_byte],
- &tmp_bitstream_len_byte);
- *bitstream_len_byte += tmp_bitstream_len_byte;
-
- // Guard Against errors and too large payloads.
- if ((status < 0) || (*bitstream_len_byte > MAX_PAYLOAD_SIZE_BYTE)) {
- // Error has happened, and even if we are in the middle of a full
- // frame we have to exit. Before exiting, whatever bits are in the
- // buffer are probably corrupted, so we ignore them.
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- // We might have come here because of the second condition.
- status = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "EncodeSafe: error in InternalEncode");
- // break from the loop
- break;
- }
- } while (in_audio_ix_read_ < frame_len_smpl_ * num_channels_);
- }
- if (status >= 0) {
- *encoding_type = (vad_label_[0] == 1) ? kActiveNormalEncoded :
- kPassiveNormalEncoded;
- // Transport empty frame if we have an empty bitstream.
- if ((*bitstream_len_byte == 0) &&
- ((in_audio_ix_write_ - in_audio_ix_read_) <= 0)) {
- // Makes sure we transmit an empty frame.
- *bitstream_len_byte = 1;
- *encoding_type = kNoEncoding;
- }
- }
- }
- }
-
- // Move the timestamp buffer according to the number of 10 ms blocks
- // which are read.
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- int16_t num_10ms_blocks = static_cast<int16_t>(
- (in_audio_ix_read_ / num_channels_ * 100) / samp_freq_hz);
- if (in_timestamp_ix_write_ > num_10ms_blocks) {
- memmove(in_timestamp_, in_timestamp_ + num_10ms_blocks,
- (in_timestamp_ix_write_ - num_10ms_blocks) * sizeof(int32_t));
- }
- in_timestamp_ix_write_ -= num_10ms_blocks;
- assert(in_timestamp_ix_write_ >= 0);
- // Remove encoded audio and move next audio to be encoded to the beginning
- // of the buffer. Accordingly, adjust the read and write indices.
- if (in_audio_ix_read_ < in_audio_ix_write_) {
- memmove(in_audio_, &in_audio_[in_audio_ix_read_],
- (in_audio_ix_write_ - in_audio_ix_read_) * sizeof(int16_t));
- }
- in_audio_ix_write_ -= in_audio_ix_read_;
- assert(in_timestamp_ix_write_ >= 0);
- in_audio_ix_read_ = 0;
- last_encoded_timestamp_ = *timestamp;
- return (status < 0) ? (-1) : (*bitstream_len_byte);
-}
-
-int16_t ACMGenericCodec::Decode(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return DecodeSafe(bitstream, bitstream_len_byte, audio, audio_samples,
- speech_type);
-}
-
-bool ACMGenericCodec::EncoderInitialized() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return encoder_initialized_;
-}
-
-bool ACMGenericCodec::DecoderInitialized() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return decoder_initialized_;
-}
-
-int32_t ACMGenericCodec::RegisterInNetEq(ACMNetEQ* neteq,
- const CodecInst& codec_inst) {
- WebRtcNetEQ_CodecDef codec_def;
- WriteLockScoped wl(codec_wrapper_lock_);
-
- if (CodecDef(codec_def, codec_inst) < 0) {
- // Failed to register the decoder.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "RegisterInNetEq: error, failed to register");
- registered_in_neteq_ = false;
- return -1;
- } else {
- if (neteq->AddCodec(&codec_def, is_master_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "RegisterInNetEq: error, failed to add codec");
- registered_in_neteq_ = false;
- return -1;
- }
- // Succeeded registering the decoder.
- registered_in_neteq_ = true;
- return 0;
- }
-}
-
-int16_t ACMGenericCodec::EncoderParams(WebRtcACMCodecParams* enc_params) {
- ReadLockScoped rl(codec_wrapper_lock_);
- return EncoderParamsSafe(enc_params);
-}
-
-int16_t ACMGenericCodec::EncoderParamsSafe(WebRtcACMCodecParams* enc_params) {
- // Codec parameters are valid only if the encoder is initialized.
- if (encoder_initialized_) {
- int32_t current_rate;
- memcpy(enc_params, &encoder_params_, sizeof(WebRtcACMCodecParams));
- current_rate = enc_params->codec_inst.rate;
- CurrentRate(current_rate);
- enc_params->codec_inst.rate = current_rate;
- return 0;
- } else {
- enc_params->codec_inst.plname[0] = '\0';
- enc_params->codec_inst.pltype = -1;
- enc_params->codec_inst.pacsize = 0;
- enc_params->codec_inst.rate = 0;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncoderParamsSafe: error, encoder not initialized");
- return -1;
- }
-}
-
-bool ACMGenericCodec::DecoderParams(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- ReadLockScoped rl(codec_wrapper_lock_);
- return DecoderParamsSafe(dec_params, payload_type);
-}
-
-bool ACMGenericCodec::DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- // Decoder parameters are valid only if decoder is initialized.
- if (decoder_initialized_) {
- if (payload_type == decoder_params_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- }
-
- dec_params->codec_inst.plname[0] = '\0';
- dec_params->codec_inst.pltype = -1;
- dec_params->codec_inst.pacsize = 0;
- dec_params->codec_inst.rate = 0;
- return false;
-}
-
-int16_t ACMGenericCodec::ResetEncoder() {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- return ResetEncoderSafe();
-}
-
-int16_t ACMGenericCodec::ResetEncoderSafe() {
- if (!encoder_exist_ || !encoder_initialized_) {
- // We don't reset if encoder doesn't exists or isn't initialized yet.
- return 0;
- }
-
- in_audio_ix_write_ = 0;
- in_audio_ix_read_ = 0;
- in_timestamp_ix_write_ = 0;
- num_missed_samples_ = 0;
- is_audio_buff_fresh_ = true;
- memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- memset(in_timestamp_, 0, TIMESTAMP_BUFFER_SIZE_W32 * sizeof(int32_t));
-
- // Store DTX/VAD parameters.
- bool enable_vad = vad_enabled_;
- bool enable_dtx = dtx_enabled_;
- ACMVADMode mode = vad_mode_;
-
- // Reset the encoder.
- if (InternalResetEncoder() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "ResetEncoderSafe: error in reset encoder");
- return -1;
- }
-
- // Disable DTX & VAD to delete the states and have a fresh start.
- DisableDTX();
- DisableVAD();
-
- // Set DTX/VAD.
- int status = SetVADSafe(&enable_dtx, &enable_vad, &mode);
- dtx_enabled_ = enable_dtx;
- vad_enabled_ = enable_vad;
- vad_mode_ = mode;
- return status;
-}
-
-int16_t ACMGenericCodec::InternalResetEncoder() {
- // Call the codecs internal encoder initialization/reset function.
- return InternalInitEncoder(&encoder_params_);
-}
-
-int16_t ACMGenericCodec::InitEncoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- return InitEncoderSafe(codec_params, force_initialization);
-}
-
-int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- // Check if we got a valid set of parameters.
- int mirrorID;
- int codec_number = ACMCodecDB::CodecNumber(&(codec_params->codec_inst),
- &mirrorID);
- if (codec_number < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: error, codec number negative");
- return -1;
- }
- // Check if the parameters are for this codec.
- if ((codec_id_ >= 0) && (codec_id_ != codec_number) &&
- (codec_id_ != mirrorID)) {
- // The current codec is not the same as the one given by codec_params.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: current codec is not the same as the one "
- "given by codec_params");
- return -1;
- }
-
- if (!CanChangeEncodingParam(codec_params->codec_inst)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: cannot change encoding parameters");
- return -1;
- }
-
- if (encoder_initialized_ && !force_initialization) {
- // The encoder is already initialized, and we don't want to force
- // initialization.
- return 0;
- }
- int16_t status;
- if (!encoder_exist_) {
- // New encoder, start with creating.
- encoder_initialized_ = false;
- status = CreateEncoder();
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: cannot create encoder");
- return -1;
- } else {
- encoder_exist_ = true;
- }
- }
- frame_len_smpl_ = (codec_params->codec_inst).pacsize;
- num_channels_ = codec_params->codec_inst.channels;
- status = InternalInitEncoder(codec_params);
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: error in init encoder");
- encoder_initialized_ = false;
- return -1;
- } else {
- // Store encoder parameters.
- memcpy(&encoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
- encoder_initialized_ = true;
- if (in_audio_ == NULL) {
- in_audio_ = new int16_t[AUDIO_BUFFER_SIZE_W16];
- if (in_audio_ == NULL) {
- return -1;
- }
- }
- if (in_timestamp_ == NULL) {
- in_timestamp_ = new uint32_t[TIMESTAMP_BUFFER_SIZE_W32];
- if (in_timestamp_ == NULL) {
- return -1;
- }
- }
- // Fresh start for audio buffer.
- is_audio_buff_fresh_ = true;
- memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- memset(in_timestamp_, 0, sizeof(uint32_t) * TIMESTAMP_BUFFER_SIZE_W32);
- in_audio_ix_write_ = 0;
- in_audio_ix_read_ = 0;
- in_timestamp_ix_write_ = 0;
- }
- status = SetVADSafe(&codec_params->enable_dtx, &codec_params->enable_vad,
- &codec_params->vad_mode);
- return status;
-}
-
-// TODO(tlegrand): Remove the function CanChangeEncodingParam. Returns true
-// for all codecs.
-bool ACMGenericCodec::CanChangeEncodingParam(CodecInst& /*codec_inst*/) {
- return true;
-}
-
-void ACMGenericCodec::CurrentRate(int32_t& /* rate_bps */) {
- return;
-}
-
-int16_t ACMGenericCodec::InitDecoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- WriteLockScoped lockCodc(codec_wrapper_lock_);
- WriteLockScoped lockNetEq(*neteq_decode_lock_);
- return InitDecoderSafe(codec_params, force_initialization);
-}
-
-int16_t ACMGenericCodec::InitDecoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- int mirror_id;
- // Check if we got a valid set of parameters.
- int codec_number = ACMCodecDB::ReceiverCodecNumber(&codec_params->codec_inst,
- &mirror_id);
- if (codec_number < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: error, invalid codec number");
- return -1;
- }
- // Check if the parameters are for this codec.
- if ((codec_id_ >= 0) && (codec_id_ != codec_number) &&
- (codec_id_ != mirror_id)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: current codec is not the same as the one "
- "given by codec_params");
- // The current codec is not the same as the one given by codec_params.
- return -1;
- }
-
- if (decoder_initialized_ && !force_initialization) {
- // The decoder is already initialized, and we don't want to force
- // initialization.
- return 0;
- }
-
- int16_t status;
- if (!decoder_exist_) {
- // New decoder, start with creating.
- decoder_initialized_ = false;
- status = CreateDecoder();
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: cannot create decoder");
- return -1;
- } else {
- decoder_exist_ = true;
- }
- }
-
- status = InternalInitDecoder(codec_params);
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: cannot init decoder");
- decoder_initialized_ = false;
- return -1;
- } else {
- // Store decoder parameters.
- SaveDecoderParamSafe(codec_params);
- decoder_initialized_ = true;
- }
- return 0;
-}
-
-int16_t ACMGenericCodec::ResetDecoder(int16_t payload_type) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- WriteLockScoped lockNetEq(*neteq_decode_lock_);
- return ResetDecoderSafe(payload_type);
-}
-
-int16_t ACMGenericCodec::ResetDecoderSafe(int16_t payload_type) {
- WebRtcACMCodecParams decoder_params;
- if (!decoder_exist_ || !decoder_initialized_) {
- return 0;
- }
- // Initialization of the decoder should work for all the codec. For codecs
- // that needs to keep some states an overloading implementation of
- // |DecoderParamsSafe| exists.
- DecoderParamsSafe(&decoder_params, static_cast<uint8_t>(payload_type));
- return InternalInitDecoder(&decoder_params);
-}
-
-void ACMGenericCodec::ResetNoMissedSamples() {
- WriteLockScoped cs(codec_wrapper_lock_);
- num_missed_samples_ = 0;
-}
-
-void ACMGenericCodec::IncreaseNoMissedSamples(const int16_t num_samples) {
- num_missed_samples_ += num_samples;
-}
-
-// Get the number of missed samples, this can be public.
-uint32_t ACMGenericCodec::NoMissedSamples() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return num_missed_samples_;
-}
-
-void ACMGenericCodec::DestructEncoder() {
- WriteLockScoped wl(codec_wrapper_lock_);
-
- // Disable VAD and delete the instance.
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- vad_enabled_ = false;
- vad_mode_ = VADNormal;
-
- // Disable DTX and delete the instance.
- dtx_enabled_ = false;
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- num_lpc_params_ = kNewCNGNumPLCParams;
-
- DestructEncoderSafe();
-}
-
-void ACMGenericCodec::DestructDecoder() {
- WriteLockScoped wl(codec_wrapper_lock_);
- decoder_params_.codec_inst.pltype = -1;
- DestructDecoderSafe();
-}
-
-int16_t ACMGenericCodec::SetBitRate(const int32_t bitrate_bps) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return SetBitRateSafe(bitrate_bps);
-}
-
-int16_t ACMGenericCodec::SetBitRateSafe(const int32_t bitrate_bps) {
- // If the codec can change the bit-rate this function is overloaded.
- // Otherwise the only acceptable value is the one that is in the database.
- CodecInst codec_params;
- if (ACMCodecDB::Codec(codec_id_, &codec_params) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: error in ACMCodecDB::Codec");
- return -1;
- }
- if (codec_params.rate != bitrate_bps) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: rate value is not acceptable");
- return -1;
- } else {
- return 0;
- }
-}
-
-// iSAC specific functions:
-int32_t ACMGenericCodec::GetEstimatedBandwidth() {
- WriteLockScoped wl(codec_wrapper_lock_);
- return GetEstimatedBandwidthSafe();
-}
-
-int32_t ACMGenericCodec::GetEstimatedBandwidthSafe() {
- // All codecs but iSAC will return -1.
- return -1;
-}
-
-int32_t ACMGenericCodec::SetEstimatedBandwidth(int32_t estimated_bandwidth) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return SetEstimatedBandwidthSafe(estimated_bandwidth);
-}
-
-int32_t ACMGenericCodec::SetEstimatedBandwidthSafe(
- int32_t /*estimated_bandwidth*/) {
- // All codecs but iSAC will return -1.
- return -1;
-}
-// End of iSAC specific functions.
-
-int32_t ACMGenericCodec::GetRedPayload(uint8_t* red_payload,
- int16_t* payload_bytes) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return GetRedPayloadSafe(red_payload, payload_bytes);
-}
-
-int32_t ACMGenericCodec::GetRedPayloadSafe(uint8_t* /* red_payload */,
- int16_t* /* payload_bytes */) {
- return -1; // Do nothing by default.
-}
-
-int16_t ACMGenericCodec::CreateEncoder() {
- int16_t status = 0;
- if (!encoder_exist_) {
- status = InternalCreateEncoder();
- // We just created the codec and obviously it is not initialized.
- encoder_initialized_ = false;
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CreateEncoder: error in internal create encoder");
- encoder_exist_ = false;
- } else {
- encoder_exist_ = true;
- }
- return status;
-}
-
-int16_t ACMGenericCodec::CreateDecoder() {
- int16_t status = 0;
- if (!decoder_exist_) {
- status = InternalCreateDecoder();
- // Decoder just created and obviously it is not initialized.
- decoder_initialized_ = false;
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CreateDecoder: error in internal create decoder");
- decoder_exist_ = false;
- } else {
- decoder_exist_ = true;
- }
- return status;
-}
-
-void ACMGenericCodec::DestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- InternalDestructEncoderInst(ptr_inst);
- }
-}
-
-// Get the current audio buffer including read and write states, and timestamps.
-int16_t ACMGenericCodec::AudioBuffer(WebRtcACMAudioBuff& audio_buff) {
- ReadLockScoped cs(codec_wrapper_lock_);
- memcpy(audio_buff.in_audio, in_audio_,
- AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- audio_buff.in_audio_ix_read = in_audio_ix_read_;
- audio_buff.in_audio_ix_write = in_audio_ix_write_;
- memcpy(audio_buff.in_timestamp, in_timestamp_,
- TIMESTAMP_BUFFER_SIZE_W32 * sizeof(uint32_t));
- audio_buff.in_timestamp_ix_write = in_timestamp_ix_write_;
- audio_buff.last_timestamp = last_timestamp_;
- return 0;
-}
-
-// Set the audio buffer.
-int16_t ACMGenericCodec::SetAudioBuffer(WebRtcACMAudioBuff& audio_buff) {
- WriteLockScoped cs(codec_wrapper_lock_);
- memcpy(in_audio_, audio_buff.in_audio,
- AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- in_audio_ix_read_ = audio_buff.in_audio_ix_read;
- in_audio_ix_write_ = audio_buff.in_audio_ix_write;
- memcpy(in_timestamp_, audio_buff.in_timestamp,
- TIMESTAMP_BUFFER_SIZE_W32 * sizeof(uint32_t));
- in_timestamp_ix_write_ = audio_buff.in_timestamp_ix_write;
- last_timestamp_ = audio_buff.last_timestamp;
- is_audio_buff_fresh_ = false;
- return 0;
-}
-
-uint32_t ACMGenericCodec::LastEncodedTimestamp() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return last_encoded_timestamp_;
-}
-
-uint32_t ACMGenericCodec::EarliestTimestamp() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return in_timestamp_[0];
-}
-
-int16_t ACMGenericCodec::SetVAD(bool* enable_dtx, bool* enable_vad,
- ACMVADMode* mode) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return SetVADSafe(enable_dtx, enable_vad, mode);
-}
-
-int16_t ACMGenericCodec::SetVADSafe(bool* enable_dtx, bool* enable_vad,
- ACMVADMode* mode) {
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "OPUS") ||
- encoder_params_.codec_inst.channels == 2 ) {
- // VAD/DTX is not supported for Opus (even if sending mono), or other
- // stereo codecs.
- DisableDTX();
- DisableVAD();
- *enable_dtx = false;
- *enable_vad = false;
- return 0;
- }
-
- if (*enable_dtx) {
- // Make G729 AnnexB a special case.
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "G729")
- && !has_internal_dtx_) {
- if (ACMGenericCodec::EnableDTX() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable DTX");
- *enable_dtx = false;
- *enable_vad = vad_enabled_;
- return -1;
- }
- } else {
- if (EnableDTX() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable DTX");
- *enable_dtx = false;
- *enable_vad = vad_enabled_;
- return -1;
- }
- }
-
- // If codec does not have internal DTX (normal case) enabling DTX requires
- // an active VAD. '*enable_dtx == true' overwrites VAD status.
- // If codec has internal DTX, practically we don't need WebRtc VAD, however,
- // we let the user to turn it on if they need call-backs on silence.
- if (!has_internal_dtx_) {
- // DTX is enabled, and VAD will be activated.
- *enable_vad = true;
- }
- } else {
- // Make G729 AnnexB a special case.
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "G729")
- && !has_internal_dtx_) {
- ACMGenericCodec::DisableDTX();
- *enable_dtx = false;
- } else {
- DisableDTX();
- *enable_dtx = false;
- }
- }
-
- int16_t status = (*enable_vad) ? EnableVAD(*mode) : DisableVAD();
- if (status < 0) {
- // Failed to set VAD, disable DTX.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable VAD");
- DisableDTX();
- *enable_dtx = false;
- *enable_vad = false;
- }
- return status;
-}
-
-int16_t ACMGenericCodec::EnableDTX() {
- if (has_internal_dtx_) {
- // We should not be here if we have internal DTX this function should be
- // overloaded by the derived class in this case.
- return -1;
- }
- if (!dtx_enabled_) {
- if (WebRtcCng_CreateEnc(&ptr_dtx_inst_) < 0) {
- ptr_dtx_inst_ = NULL;
- return -1;
- }
- uint16_t freq_hz;
- EncoderSampFreq(freq_hz);
- if (WebRtcCng_InitEnc(ptr_dtx_inst_, freq_hz, kCngSidIntervalMsec,
- num_lpc_params_) < 0) {
- // Couldn't initialize, has to return -1, and free the memory.
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- return -1;
- }
- dtx_enabled_ = true;
- }
- return 0;
-}
-
-int16_t ACMGenericCodec::DisableDTX() {
- if (has_internal_dtx_) {
- // We should not be here if we have internal DTX this function should be
- // overloaded by the derived class in this case.
- return -1;
- }
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- dtx_enabled_ = false;
- return 0;
-}
-
-int16_t ACMGenericCodec::EnableVAD(ACMVADMode mode) {
- if ((mode < VADNormal) || (mode > VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in VAD mode range");
- return -1;
- }
-
- if (!vad_enabled_) {
- if (WebRtcVad_Create(&ptr_vad_inst_) < 0) {
- ptr_vad_inst_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in create VAD");
- return -1;
- }
- if (WebRtcVad_Init(ptr_vad_inst_) < 0) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in init VAD");
- return -1;
- }
- }
-
- // Set the VAD mode to the given value.
- if (WebRtcVad_set_mode(ptr_vad_inst_, mode) < 0) {
- // We failed to set the mode and we have to return -1. If we already have a
- // working VAD (vad_enabled_ == true) then we leave it to work. Otherwise,
- // the following will be executed.
- if (!vad_enabled_) {
- // We just created the instance but cannot set the mode we have to free
- // the memory.
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: failed to set the VAD mode");
- return -1;
- }
- vad_mode_ = mode;
- vad_enabled_ = true;
- return 0;
-}
-
-int16_t ACMGenericCodec::DisableVAD() {
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- vad_enabled_ = false;
- return 0;
-}
-
-int32_t ACMGenericCodec::ReplaceInternalDTX(const bool replace_internal_dtx) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return ReplaceInternalDTXSafe(replace_internal_dtx);
-}
-
-int32_t ACMGenericCodec::ReplaceInternalDTXSafe(
- const bool /* replace_internal_dtx */) {
- return -1;
-}
-
-int32_t ACMGenericCodec::IsInternalDTXReplaced(bool* internal_dtx_replaced) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return IsInternalDTXReplacedSafe(internal_dtx_replaced);
-}
-
-int32_t ACMGenericCodec::IsInternalDTXReplacedSafe(
- bool* internal_dtx_replaced) {
- *internal_dtx_replaced = false;
- return 0;
-}
-
-int16_t ACMGenericCodec::ProcessFrameVADDTX(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t* samples_processed) {
- if (!vad_enabled_) {
- // VAD not enabled, set all |vad_lable_[]| to 1 (speech detected).
- for (int n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = 1;
- }
- *samples_processed = 0;
- return 0;
- }
-
- uint16_t freq_hz;
- EncoderSampFreq(freq_hz);
-
- // Calculate number of samples in 10 ms blocks, and number ms in one frame.
- int16_t samples_in_10ms = static_cast<int16_t>(freq_hz / 100);
- int32_t frame_len_ms = static_cast<int32_t>(frame_len_smpl_) * 1000 / freq_hz;
- int16_t status;
-
- // Vector for storing maximum 30 ms of mono audio at 48 kHz.
- int16_t audio[1440];
-
- // Calculate number of VAD-blocks to process, and number of samples in each
- // block.
- int num_samples_to_process[2];
- if (frame_len_ms == 40) {
- // 20 ms in each VAD block.
- num_samples_to_process[0] = num_samples_to_process[1] = 2 * samples_in_10ms;
- } else {
- // For 10-30 ms framesizes, second VAD block will be size zero ms,
- // for 50 and 60 ms first VAD block will be 30 ms.
- num_samples_to_process[0] =
- (frame_len_ms > 30) ? 3 * samples_in_10ms : frame_len_smpl_;
- num_samples_to_process[1] = frame_len_smpl_ - num_samples_to_process[0];
- }
-
- int offset = 0;
- int loops = (num_samples_to_process[1] > 0) ? 2 : 1;
- for (int i = 0; i < loops; i++) {
- // TODO(turajs): Do we need to care about VAD together with stereo?
- // If stereo, calculate mean of the two channels.
- if (num_channels_ == 2) {
- for (int j = 0; j < num_samples_to_process[i]; j++) {
- audio[j] = (in_audio_[(offset + j) * 2] +
- in_audio_[(offset + j) * 2 + 1]) / 2;
- }
- offset = num_samples_to_process[0];
- } else {
- // Mono, copy data from in_audio_ to continue work on.
- memcpy(audio, in_audio_, sizeof(int16_t) * num_samples_to_process[i]);
- }
-
- // Call VAD.
- status = static_cast<int16_t>(WebRtcVad_Process(ptr_vad_inst_,
- static_cast<int>(freq_hz),
- audio,
- num_samples_to_process[i]));
- vad_label_[i] = status;
-
- if (status < 0) {
- // This will force that the data be removed from the buffer.
- *samples_processed += num_samples_to_process[i];
- return -1;
- }
-
- // If VAD decision non-active, update DTX. NOTE! We only do this if the
- // first part of a frame gets the VAD decision "inactive". Otherwise DTX
- // might say it is time to transmit SID frame, but we will encode the whole
- // frame, because the first part is active.
- *samples_processed = 0;
- if ((status == 0) && (i == 0) && dtx_enabled_ && !has_internal_dtx_) {
- int16_t bitstream_len;
- int num_10ms_frames = num_samples_to_process[i] / samples_in_10ms;
- *bitstream_len_byte = 0;
- for (int n = 0; n < num_10ms_frames; n++) {
- // This block is (passive) && (vad enabled). If first CNG after
- // speech, force SID by setting last parameter to "1".
- status = WebRtcCng_Encode(ptr_dtx_inst_, &audio[n * samples_in_10ms],
- samples_in_10ms, bitstream, &bitstream_len,
- !prev_frame_cng_);
- if (status < 0) {
- return -1;
- }
-
- // Update previous frame was CNG.
- prev_frame_cng_ = 1;
-
- *samples_processed += samples_in_10ms * num_channels_;
-
- // |bitstream_len_byte| will only be > 0 once per 100 ms.
- *bitstream_len_byte += bitstream_len;
- }
-
- // Check if all samples got processed by the DTX.
- if (*samples_processed != num_samples_to_process[i] * num_channels_) {
- // Set to zero since something went wrong. Shouldn't happen.
- *samples_processed = 0;
- }
- } else {
- // Update previous frame was not CNG.
- prev_frame_cng_ = 0;
- }
-
- if (*samples_processed > 0) {
- // The block contains inactive speech, and is processed by DTX.
- // Discontinue running VAD.
- break;
- }
- }
-
- return status;
-}
-
-int16_t ACMGenericCodec::SamplesLeftToEncode() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return (frame_len_smpl_ <= in_audio_ix_write_) ? 0 :
- (frame_len_smpl_ - in_audio_ix_write_);
-}
-
-void ACMGenericCodec::SetUniqueID(const uint32_t id) {
- unique_id_ = id;
-}
-
-bool ACMGenericCodec::IsAudioBufferFresh() const {
- ReadLockScoped rl(codec_wrapper_lock_);
- return is_audio_buff_fresh_;
-}
-
-int16_t ACMGenericCodec::UpdateDecoderSampFreq(int16_t /* codec_id */) {
- return 0;
-}
-
-// This function is replaced by codec specific functions for some codecs.
-int16_t ACMGenericCodec::EncoderSampFreq(uint16_t& samp_freq_hz) {
- int32_t f;
- f = ACMCodecDB::CodecFreq(codec_id_);
- if (f < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncoderSampFreq: codec frequency is negative");
- return -1;
- } else {
- samp_freq_hz = static_cast<uint16_t>(f);
- return 0;
- }
-}
-
-int32_t ACMGenericCodec::ConfigISACBandwidthEstimator(
- const uint8_t /* init_frame_size_msec */,
- const uint16_t /* init_rate_bit_per_sec */,
- const bool /* enforce_frame_size */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to config iSAC bandwidth "
- "estimator.");
- return -1;
-}
-
-int32_t ACMGenericCodec::SetISACMaxRate(
- const uint32_t /* max_rate_bit_per_sec */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to set iSAC max rate.");
- return -1;
-}
-
-int32_t ACMGenericCodec::SetISACMaxPayloadSize(
- const uint16_t /* max_payload_len_bytes */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to set iSAC max "
- "payload-size.");
- return -1;
-}
-
-void ACMGenericCodec::SaveDecoderParam(
- const WebRtcACMCodecParams* codec_params) {
- WriteLockScoped wl(codec_wrapper_lock_);
- SaveDecoderParamSafe(codec_params);
-}
-
-void ACMGenericCodec::SaveDecoderParamSafe(
- const WebRtcACMCodecParams* codec_params) {
- memcpy(&decoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
-}
-
-int16_t ACMGenericCodec::UpdateEncoderSampFreq(
- uint16_t /* samp_freq_hz */) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "It is asked for a change in smapling frequency while the "
- "current send-codec supports only one sampling rate.");
- return -1;
-}
-
-void ACMGenericCodec::SetIsMaster(bool is_master) {
- WriteLockScoped wl(codec_wrapper_lock_);
- is_master_ = is_master;
-}
-
-int16_t ACMGenericCodec::REDPayloadISAC(const int32_t /* isac_rate */,
- const int16_t /* isac_bw_estimate */,
- uint8_t* /* payload */,
- int16_t* /* payload_len_bytes */) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error: REDPayloadISAC is an iSAC specific function");
- return -1;
-}
-
-bool ACMGenericCodec::IsTrueStereoCodec() { return false; }
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
deleted file mode 100644
index c1f9cdc554a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
+++ /dev/null
@@ -1,1224 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#define MAX_FRAME_SIZE_10MSEC 6
-
-// forward declaration
-struct WebRtcVadInst;
-struct WebRtcCngEncInst;
-
-namespace webrtc {
-
-// forward declaration
-struct CodecInst;
-struct WebRtcACMCodecParams;
-
-namespace acm1 {
-
-class ACMNetEQ;
-
-class ACMGenericCodec {
- public:
- ///////////////////////////////////////////////////////////////////////////
- // Constructor of the class
- //
- ACMGenericCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // Destructor of the class.
- //
- virtual ~ACMGenericCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // ACMGenericCodec* CreateInstance();
- // The function will be used for FEC. It is not implemented yet.
- //
- virtual ACMGenericCodec* CreateInstance() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t Encode()
- // The function is called to perform an encoding of the audio stored in
- // audio buffer. An encoding is performed only if enough audio, i.e. equal
- // to the frame-size of the codec, exist. The audio frame will be processed
- // by VAD and CN/DTX if required. There are few different cases.
- //
- // A) Neither VAD nor DTX is active; the frame is encoded by the encoder.
- //
- // B) VAD is enabled but not DTX; in this case the audio is processed by VAD
- // and encoded by the encoder. The "*encoding_type" will be either
- // "kActiveNormalEncode" or "kPassiveNormalEncode" if frame is active or
- // passive, respectively.
- //
- // C) DTX is enabled; if the codec has internal VAD/DTX we just encode the
- // frame by the encoder. Otherwise, the frame is passed through VAD and
- // if identified as passive, then it will be processed by CN/DTX. If the
- // frame is active it will be encoded by the encoder.
- //
- // This function acquires the appropriate locks and calls EncodeSafe() for
- // the actual processing.
- //
- // Outputs:
- // -bitstream : a buffer where bit-stream will be written to.
- // -bitstream_len_byte : contains the length of the bit-stream in
- // bytes.
- // -timestamp : contains the RTP timestamp, this is the
- // sampling time of the first sample encoded
- // (measured in number of samples).
- // -encoding_type : contains the type of encoding applied on the
- // audio samples. The alternatives are
- // (c.f. acm_common_types.h)
- // -kNoEncoding:
- // there was not enough data to encode. or
- // some error has happened that we could
- // not do encoding.
- // -kActiveNormalEncoded:
- // the audio frame is active and encoded by
- // the given codec.
- // -kPassiveNormalEncoded:
- // the audio frame is passive but coded with
- // the given codec (NO DTX).
- // -kPassiveDTXWB:
- // The audio frame is passive and used
- // wide-band CN to encode.
- // -kPassiveDTXNB:
- // The audio frame is passive and used
- // narrow-band CN to encode.
- //
- // Return value:
- // -1 if error is occurred, otherwise the length of the bit-stream in
- // bytes.
- //
- int16_t Encode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t Decode()
- // This function is used to decode a given bit-stream, without engaging
- // NetEQ.
- //
- // This function acquires the appropriate locks and calls DecodeSafe() for
- // the actual processing. Please note that this is not functional yet.
- //
- // Inputs:
- // -bitstream : a buffer where bit-stream will be read.
- // -bitstream_len_byte : the length of the bit-stream in bytes.
- //
- // Outputs:
- // -audio : pointer to a buffer where the audio will written.
- // -audio_samples : number of audio samples out of decoding the given
- // bit-stream.
- // -speech_type : speech type (for future use).
- //
- // Return value:
- // -1 if failed to decode,
- // 0 if succeeded.
- //
- int16_t Decode(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // void SplitStereoPacket()
- // This function is used to split stereo payloads in left and right channel.
- // Codecs which has stereo support has there own implementation of the
- // function.
- //
- // Input/Output:
- // -payload : a vector with the received payload data.
- // The function will reorder the data so that
- // first half holds the left channel data, and the
- // second half the right channel data.
- // -payload_length : length of payload in bytes. Will be changed to
- // twice the input in case of true stereo, where
- // we simply copy the data and return it both for
- // left channel and right channel decoding.
- //
- virtual void SplitStereoPacket(uint8_t* /* payload */,
- int32_t* /* payload_length */) {}
-
- ///////////////////////////////////////////////////////////////////////////
- // bool EncoderInitialized();
- //
- // Return value:
- // True if the encoder is successfully initialized,
- // false otherwise.
- //
- bool EncoderInitialized();
-
- ///////////////////////////////////////////////////////////////////////////
- // bool DecoderInitialized();
- //
- // Return value:
- // True if the decoder is successfully initialized,
- // false otherwise.
- //
- bool DecoderInitialized();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EncoderParams()
- // It is called to get encoder parameters. It will call
- // EncoderParamsSafe() in turn.
- //
- // Output:
- // -enc_params : a buffer where the encoder parameters is
- // written to. If the encoder is not
- // initialized this buffer is filled with
- // invalid values
- // Return value:
- // -1 if the encoder is not initialized,
- // 0 otherwise.
- //
- int16_t EncoderParams(WebRtcACMCodecParams *enc_params);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DecoderParams(...)
- // It is called to get decoder parameters. It will call DecoderParamsSafe()
- // in turn.
- //
- // Output:
- // -dec_params : a buffer where the decoder parameters is
- // written to. If the decoder is not initialized
- // this buffer is filled with invalid values
- //
- // Return value:
- // -1 if the decoder is not initialized,
- // 0 otherwise.
- //
- //
- bool DecoderParams(WebRtcACMCodecParams *dec_params,
- const uint8_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InitEncoder(...)
- // This function is called to initialize the encoder with the given
- // parameters.
- //
- // Input:
- // -codec_params : parameters of encoder.
- // -force_initialization: if false the initialization is invoked only if
- // the encoder is not initialized. If true the
- // encoder is forced to (re)initialize.
- //
- // Return value:
- // 0 if could initialize successfully,
- // -1 if failed to initialize.
- //
- //
- int16_t InitEncoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InitDecoder()
- // This function is called to initialize the decoder with the given
- // parameters. (c.f. acm_common_defs.h & common_types.h for the
- // definition of the structure)
- //
- // Input:
- // -codec_params : parameters of decoder.
- // -force_initialization: if false the initialization is invoked only
- // if the decoder is not initialized. If true
- // the encoder is forced to(re)initialize.
- //
- // Return value:
- // 0 if could initialize successfully,
- // -1 if failed to initialize.
- //
- //
- int16_t InitDecoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t RegisterInNetEq(...)
- // This function is called to register the decoder in NetEq, with the given
- // payload type.
- //
- // Inputs:
- // -neteq : pointer to NetEq Instance
- // -codec_inst : instance with of the codec settings of the codec
- //
- // Return values
- // -1 if failed to register,
- // 0 if successfully initialized.
- //
- int32_t RegisterInNetEq(ACMNetEQ* neteq, const CodecInst& codec_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Add10MsData(...)
- // This function is called to add 10 ms of audio to the audio buffer of
- // the codec.
- //
- // Inputs:
- // -timestamp : the timestamp of the 10 ms audio. the timestamp
- // is the sampling time of the
- // first sample measured in number of samples.
- // -data : a buffer that contains the audio. The codec
- // expects to get the audio in correct sampling
- // frequency
- // -length : the length of the audio buffer
- // -audio_channel : 0 for mono, 1 for stereo (not supported yet)
- //
- // Return values:
- // -1 if failed
- // 0 otherwise.
- //
- int32_t Add10MsData(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length,
- const uint8_t audio_channel);
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t NoMissedSamples()
- // This function returns the number of samples which are overwritten in
- // the audio buffer. The audio samples are overwritten if the input audio
- // buffer is full, but Add10MsData() is called. (We might remove this
- // function if it is not used)
- //
- // Return Value:
- // Number of samples which are overwritten.
- //
- uint32_t NoMissedSamples() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // void ResetNoMissedSamples()
- // This function resets the number of overwritten samples to zero.
- // (We might remove this function if we remove NoMissedSamples())
- //
- void ResetNoMissedSamples();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetBitRate()
- // The function is called to set the encoding rate.
- //
- // Input:
- // -bitrate_bps : encoding rate in bits per second
- //
- // Return value:
- // -1 if failed to set the rate, due to invalid input or given
- // codec is not rate-adjustable.
- // 0 if the rate is adjusted successfully
- //
- int16_t SetBitRate(const int32_t bitrate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // DestructEncoderInst()
- // This API is used in conferencing. It will free the memory that is pointed
- // by |ptr_inst|. |ptr_inst| is a pointer to encoder instance, created and
- // filled up by calling EncoderInst(...).
- //
- // Inputs:
- // -ptr_inst : pointer to an encoder instance to be deleted.
- //
- //
- void DestructEncoderInst(void* ptr_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t AudioBuffer()
- // This is used when synchronization of codecs is required. There are cases
- // that the audio buffers of two codecs have to be synched. By calling this
- // function on can get the audio buffer and other related parameters, such
- // as timestamps...
- //
- // Output:
- // -audio_buff : a pointer to WebRtcACMAudioBuff where the audio
- // buffer of this codec will be written to.
- //
- // Return value:
- // -1 if fails to copy the audio buffer,
- // 0 if succeeded.
- //
- int16_t AudioBuffer(WebRtcACMAudioBuff& audio_buff);
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t EarliestTimestamp()
- // Returns the timestamp of the first 10 ms in audio buffer. This is used
- // to identify if a synchronization of two encoders is required.
- //
- // Return value:
- // timestamp of the first 10 ms audio in the audio buffer.
- //
- uint32_t EarliestTimestamp() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetAudioBuffer()
- // This function is called to set the audio buffer and the associated
- // parameters to a given value.
- //
- // Return value:
- // -1 if fails to copy the audio buffer,
- // 0 if succeeded.
- //
- int16_t SetAudioBuffer(WebRtcACMAudioBuff& audio_buff);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetVAD()
- // This is called to set VAD & DTX. If the codec has internal DTX, it will
- // be used. If DTX is enabled and the codec does not have internal DTX,
- // WebRtc-VAD will be used to decide if the frame is active. If DTX is
- // disabled but VAD is enabled the audio is passed through VAD to label it
- // as active or passive, but the frame is encoded normally. However the
- // bit-stream is labeled properly so that ACM::Process() can use this
- // information. In case of failure, the previous states of the VAD & DTX
- // are kept.
- //
- // Input/Output:
- // -enable_dtx : if true DTX will be enabled otherwise the DTX is
- // disabled. If codec has internal DTX that will be
- // used, otherwise WebRtc-CNG is used. In the latter
- // case VAD is automatically activated.
- // -enable_vad : if true WebRtc-VAD is enabled, otherwise VAD is
- // disabled, except for the case that DTX is enabled
- // but codec doesn't have internal DTX. In this case
- // VAD is enabled regardless of the value of
- // |enable_vad|.
- // -mode : this specifies the aggressiveness of VAD.
- //
- // Return value
- // -1 if failed to set DTX & VAD as specified,
- // 0 if succeeded.
- //
- int16_t SetVAD(bool* enable_dtx,
- bool* enable_vad,
- ACMVADMode* mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ReplaceInternalDTX()
- // This is called to replace the codec internal DTX with WebRtc DTX.
- // This is only valid for G729 where the user has possibility to replace
- // AnnexB with WebRtc DTX. For other codecs this function has no effect.
- //
- // Input:
- // -replace_internal_dtx : if true the internal DTX is replaced with WebRtc.
- //
- // Return value
- // -1 if failed to replace internal DTX,
- // 0 if succeeded.
- //
- int32_t ReplaceInternalDTX(const bool replace_internal_dtx);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t IsInternalDTXReplaced()
- // This is called to check if the codec internal DTX is replaced by WebRtc
- // DTX. This is only valid for G729 where the user has possibility to replace
- // AnnexB with WebRtc DTX. For other codecs this function has no effect.
- //
- // Output:
- // -internal_dtx_replaced: if true the internal DTX is replaced with WebRtc.
- //
- // Return value
- // -1 if failed to check
- // 0 if succeeded.
- //
- int32_t IsInternalDTXReplaced(bool* internal_dtx_replaced);
-
- ///////////////////////////////////////////////////////////////////////////
- // void SetNetEqDecodeLock()
- // Passes the NetEq lock to the codec.
- //
- // Input:
- // -neteq_decode_lock : pointer to the lock associated with NetEQ of ACM.
- //
- void SetNetEqDecodeLock(RWLockWrapper* neteq_decode_lock) {
- neteq_decode_lock_ = neteq_decode_lock;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // bool HasInternalDTX()
- // Used to check if the codec has internal DTX.
- //
- // Return value:
- // true if the codec has an internal DTX, e.g. G729,
- // false otherwise.
- //
- bool HasInternalDTX() const {
- return has_internal_dtx_;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t GetEstimatedBandwidth()
- // Used to get decoder estimated bandwidth. Only iSAC will provide a value.
- //
- //
- // Return value:
- // -1 if fails to get decoder estimated bandwidth,
- // >0 estimated bandwidth in bits/sec.
- //
- int32_t GetEstimatedBandwidth();
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SetEstimatedBandwidth()
- // Used to set estiamted bandwidth sent out of band from other side. Only
- // iSAC will have use for the value.
- //
- // Input:
- // -estimated_bandwidth: estimated bandwidth in bits/sec
- //
- // Return value:
- // -1 if fails to set estimated bandwidth,
- // 0 on success.
- //
- int32_t SetEstimatedBandwidth(int32_t estimated_bandwidth);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t GetRedPayload()
- // Used to get codec specific RED payload (if such is implemented).
- // Currently only done in iSAC.
- //
- // Outputs:
- // -red_payload : a pointer to the data for RED payload.
- // -payload_bytes : number of bytes in RED payload.
- //
- // Return value:
- // -1 if fails to get codec specific RED,
- // 0 if succeeded.
- //
- int32_t GetRedPayload(uint8_t* red_payload,
- int16_t* payload_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ResetEncoder()
- // By calling this function you would re-initialize the encoder with the
- // current parameters. All the settings, e.g. VAD/DTX, frame-size... should
- // remain unchanged. (In case of iSAC we don't want to lose BWE history.)
- //
- // Return value
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ResetEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ResetEncoder()
- // By calling this function you would re-initialize the decoder with the
- // current parameters.
- //
- // Return value
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ResetDecoder(int16_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // void DestructEncoder()
- // This function is called to delete the encoder instance, if possible, to
- // have a fresh start. For codecs where encoder and decoder share the same
- // instance we cannot delete the encoder and instead we will initialize the
- // encoder. We also delete VAD and DTX if they have been created.
- //
- void DestructEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // void DestructDecoder()
- // This function is called to delete the decoder instance, if possible, to
- // have a fresh start. For codecs where encoder and decoder share the same
- // instance we cannot delete the encoder and instead we will initialize the
- // decoder. Before deleting decoder instance it has to be removed from the
- // NetEq list.
- //
- void DestructDecoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SamplesLeftToEncode()
- // Returns the number of samples required to be able to do encoding.
- //
- // Return value:
- // Number of samples.
- //
- int16_t SamplesLeftToEncode();
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t LastEncodedTimestamp()
- // Returns the timestamp of the last frame it encoded.
- //
- // Return value:
- // Timestamp.
- //
- uint32_t LastEncodedTimestamp() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // SetUniqueID()
- // Set a unique ID for the codec to be used for tracing and debugging
- //
- // Input
- // -id : A number to identify the codec.
- //
- void SetUniqueID(const uint32_t id);
-
- ///////////////////////////////////////////////////////////////////////////
- // IsAudioBufferFresh()
- // Specifies if ever audio is injected to this codec.
- //
- // Return value
- // -true; no audio is feed into this codec
- // -false; audio has already been fed to the codec.
- //
- bool IsAudioBufferFresh() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // UpdateDecoderSampFreq()
- // For most of the codecs this function does nothing. It must be
- // implemented for those codecs that one codec instance serves as the
- // decoder for different flavors of the codec. One example is iSAC. there,
- // iSAC 16 kHz and iSAC 32 kHz are treated as two different codecs with
- // different payload types, however, there is only one iSAC instance to
- // decode. The reason for that is we would like to decode and encode with
- // the same codec instance for bandwidth estimator to work.
- //
- // Each time that we receive a new payload type, we call this function to
- // prepare the decoder associated with the new payload. Normally, decoders
- // doesn't have to do anything. For iSAC the decoder has to change it's
- // sampling rate. The input parameter specifies the current flavor of the
- // codec in codec database. For instance, if we just got a SWB payload then
- // the input parameter is ACMCodecDB::isacswb.
- //
- // Input:
- // -codec_id : the ID of the codec associated with the
- // payload type that we just received.
- //
- // Return value:
- // 0 if succeeded in updating the decoder.
- // -1 if failed to update.
- //
- virtual int16_t UpdateDecoderSampFreq(int16_t /* codec_id */);
-
- ///////////////////////////////////////////////////////////////////////////
- // UpdateEncoderSampFreq()
- // Call this function to update the encoder sampling frequency. This
- // is for codecs where one payload-name supports several encoder sampling
- // frequencies. Otherwise, to change the sampling frequency we need to
- // register new codec. ACM will consider that as registration of a new
- // codec, not a change in parameter. For iSAC, switching from WB to SWB
- // is treated as a change in parameter. Therefore, we need this function.
- //
- // Input:
- // -samp_freq_hz : encoder sampling frequency.
- //
- // Return value:
- // -1 if failed, or if this is meaningless for the given codec.
- // 0 if succeeded.
- //
- virtual int16_t UpdateEncoderSampFreq(
- uint16_t samp_freq_hz);
-
- ///////////////////////////////////////////////////////////////////////////
- // EncoderSampFreq()
- // Get the sampling frequency that the encoder (WebRtc wrapper) expects.
- //
- // Output:
- // -samp_freq_hz : sampling frequency, in Hertz, which the encoder
- // should be fed with.
- //
- // Return value:
- // -1 if failed to output sampling rate.
- // 0 if the sample rate is returned successfully.
- //
- virtual int16_t EncoderSampFreq(uint16_t& samp_freq_hz);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ConfigISACBandwidthEstimator()
- // Call this function to configure the bandwidth estimator of ISAC.
- // During the adaptation of bit-rate, iSAC automatically adjusts the
- // frame-size (either 30 or 60 ms) to save on RTP header. The initial
- // frame-size can be specified by the first argument. The configuration also
- // regards the initial estimate of bandwidths. The estimator starts from
- // this point and converges to the actual bottleneck. This is given by the
- // second parameter. Furthermore, it is also possible to control the
- // adaptation of frame-size. This is specified by the last parameter.
- //
- // Input:
- // -init_frame_fize_ms : initial frame-size in milliseconds. For iSAC-wb
- // 30 ms and 60 ms (default) are acceptable values,
- // and for iSAC-swb 30 ms is the only acceptable
- // value. Zero indicates default value.
- // -init_rate_bps : initial estimate of the bandwidth. Values
- // between 10000 and 58000 are acceptable.
- // -enforce_frame_size : if true, the frame-size will not be adapted.
- //
- // Return value:
- // -1 if failed to configure the bandwidth estimator,
- // 0 if the configuration was successfully applied.
- //
- virtual int32_t ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bps,
- const bool enforce_frame_size);
-
- ///////////////////////////////////////////////////////////////////////////
- // SetISACMaxPayloadSize()
- // Set the maximum payload size of iSAC packets. No iSAC payload,
- // regardless of its frame-size, may exceed the given limit. For
- // an iSAC payload of size B bits and frame-size T sec we have;
- // (B < max_payload_len_bytes * 8) and (B/T < max_rate_bit_per_sec), c.f.
- // SetISACMaxRate().
- //
- // Input:
- // -max_payload_len_bytes : maximum payload size in bytes.
- //
- // Return value:
- // -1 if failed to set the maximum payload-size.
- // 0 if the given length is set successfully.
- //
- virtual int32_t SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // SetISACMaxRate()
- // Set the maximum instantaneous rate of iSAC. For a payload of B bits
- // with a frame-size of T sec the instantaneous rate is B/T bits per
- // second. Therefore, (B/T < max_rate_bit_per_sec) and
- // (B < max_payload_len_bytes * 8) are always satisfied for iSAC payloads,
- // c.f SetISACMaxPayloadSize().
- //
- // Input:
- // -max_rate_bps : maximum instantaneous bit-rate given in bits/sec.
- //
- // Return value:
- // -1 if failed to set the maximum rate.
- // 0 if the maximum rate is set successfully.
- //
- virtual int32_t SetISACMaxRate(const uint32_t max_rate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // SaveDecoderParamS()
- // Save the parameters of decoder.
- //
- // Input:
- // -codec_params : pointer to a structure where the parameters of
- // decoder is stored in.
- //
- void SaveDecoderParam(const WebRtcACMCodecParams* codec_params);
-
- int32_t FrameSize() {
- return frame_len_smpl_;
- }
-
- void SetIsMaster(bool is_master);
-
- ///////////////////////////////////////////////////////////////////////////
- // REDPayloadISAC()
- // This is an iSAC-specific function. The function is called to get RED
- // payload from a default-encoder.
- //
- // Inputs:
- // -isac_rate : the target rate of the main payload. A RED
- // payload is generated according to the rate of
- // main payload. Note that we are not specifying the
- // rate of RED payload, but the main payload.
- // -isac_bw_estimate : bandwidth information should be inserted in
- // RED payload.
- //
- // Output:
- // -payload : pointer to a buffer where the RED payload will
- // written to.
- // -payload_len_bytes : a place-holder to write the length of the RED
- // payload in Bytes.
- //
- // Return value:
- // -1 if an error occurs, otherwise the length of the payload (in Bytes)
- // is returned.
- //
- virtual int16_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // IsTrueStereoCodec()
- // Call to see if current encoder is a true stereo codec. This function
- // should be overwritten for codecs which are true stereo codecs
- // Return value:
- // -true if stereo codec
- // -false if not stereo codec.
- //
- virtual bool IsTrueStereoCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // HasFrameToEncode()
- // Returns true if there is enough audio buffered for encoding, such that
- // calling Encode() will return a payload.
- //
- bool HasFrameToEncode() const;
-
- protected:
- ///////////////////////////////////////////////////////////////////////////
- // All the functions with FunctionNameSafe(...) contain the actual
- // implementation of FunctionName(...). FunctionName() acquires an
- // appropriate lock and calls FunctionNameSafe() to do the actual work.
- // Therefore, for the description of functionality, input/output arguments
- // and return value we refer to FunctionName()
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // See Decode() for the description of function, input(s)/output(s) and
- // return value.
- //
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See Add10MsSafe() for the description of function, input(s)/output(s)
- // and return value.
- //
- virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length,
- const uint8_t audio_channel);
-
- ///////////////////////////////////////////////////////////////////////////
- // See RegisterInNetEq() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See EncoderParam() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t EncoderParamsSafe(WebRtcACMCodecParams *enc_params);
-
- ///////////////////////////////////////////////////////////////////////////
- // See DecoderParam for the description of function, input(s)/output(s)
- // and return value.
- //
- // Note:
- // Any Class where a single instance handle several flavors of the
- // same codec, therefore, several payload types are associated with
- // the same instance have to implement this function.
- //
- // Currently only iSAC is implementing it. A single iSAC instance is
- // used for decoding both WB & SWB stream. At one moment both WB & SWB
- // can be registered as receive codec. Hence two payloads are associated
- // with a single codec instance.
- //
- virtual bool DecoderParamsSafe(WebRtcACMCodecParams *dec_params,
- const uint8_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ResetEncoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t ResetEncoderSafe();
-
- ///////////////////////////////////////////////////////////////////////////
- // See InitEncoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t InitEncoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // See InitDecoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t InitDecoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ResetDecoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t ResetDecoderSafe(int16_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // See DestructEncoder() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual void DestructEncoderSafe() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See DestructDecoder() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual void DestructDecoderSafe() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetBitRate() for the description of function, input(s)/output(s)
- // and return value.
- //
- // Any codec that can change the bit-rate has to implement this.
- //
- virtual int16_t SetBitRateSafe(const int32_t bitrate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // See GetEstimatedBandwidth() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t GetEstimatedBandwidthSafe();
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetEstimatedBandwidth() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth);
-
- ///////////////////////////////////////////////////////////////////////////
- // See GetRedPayload() for the description of function, input(s)/output(s)
- // and return value.
- //
- virtual int32_t GetRedPayloadSafe(uint8_t* red_payload,
- int16_t* payload_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetVAD() for the description of function, input(s)/output(s) and
- // return value.
- //
- int16_t SetVADSafe(bool* enable_dtx,
- bool* enable_vad,
- ACMVADMode* mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ReplaceInternalDTX() for the description of function, input and
- // return value.
- //
- virtual int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
-
- ///////////////////////////////////////////////////////////////////////////
- // See IsInternalDTXReplaced() for the description of function, input and
- // return value.
- //
- virtual int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t CreateEncoder()
- // Creates the encoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t CreateEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t CreateDecoder()
- // Creates the decoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t CreateDecoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EnableVAD();
- // Enables VAD with the given mode. The VAD instance will be created if
- // it does not exists.
- //
- // Input:
- // -mode : VAD mode c.f. audio_coding_module_typedefs.h for
- // the options.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t EnableVAD(ACMVADMode mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DisableVAD()
- // Disables VAD.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t DisableVAD();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EnableDTX()
- // Enables DTX. This method should be overwritten for codecs which have
- // internal DTX.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t EnableDTX();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DisableDTX()
- // Disables usage of DTX. This method should be overwritten for codecs which
- // have internal DTX.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t DisableDTX();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalEncode()
- // This is a codec-specific function called in EncodeSafe() to actually
- // encode a frame of audio.
- //
- // Outputs:
- // -bitstream : pointer to a buffer where the bit-stream is
- // written to.
- // -bitstream_len_byte : the length of the bit-stream in bytes,
- // a negative value indicates error.
- //
- // Return value:
- // -1 if failed,
- // otherwise the length of the bit-stream is returned.
- //
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalInitEncoder()
- // This is a codec-specific function called in InitEncoderSafe(), it has to
- // do all codec-specific operation to initialize the encoder given the
- // encoder parameters.
- //
- // Input:
- // -codec_params : pointer to a structure that contains parameters to
- // initialize encoder.
- // Set codec_params->codec_inst.rate to -1 for
- // iSAC to operate in adaptive mode.
- // (to do: if frame-length is -1 frame-length will be
- // automatically adjusted, otherwise, given
- // frame-length is forced)
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams *codec_params) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalInitDecoder()
- // This is a codec-specific function called in InitDecoderSafe(), it has to
- // do all codec-specific operation to initialize the decoder given the
- // decoder parameters.
- //
- // Input:
- // -codec_params : pointer to a structure that contains parameters to
- // initialize encoder.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams *codec_params) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // void IncreaseNoMissedSamples()
- // This method is called to increase the number of samples that are
- // overwritten in the audio buffer.
- //
- // Input:
- // -num_samples : the number of overwritten samples is incremented
- // by this value.
- //
- void IncreaseNoMissedSamples(const int16_t num_samples);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalCreateEncoder()
- // This is a codec-specific method called in CreateEncoderSafe() it is
- // supposed to perform all codec-specific operations to create encoder
- // instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalCreateEncoder() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalCreateDecoder()
- // This is a codec-specific method called in CreateDecoderSafe() it is
- // supposed to perform all codec-specific operations to create decoder
- // instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalCreateDecoder() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // void InternalDestructEncoderInst()
- // This is a codec-specific method, used in conferencing, called from
- // DestructEncoderInst(). The input argument is pointer to encoder instance
- // (codec instance for codecs that encoder and decoder share the same
- // instance). This method is called to free the memory that |ptr_inst| is
- // pointing to.
- //
- // Input:
- // -ptr_inst : pointer to encoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual void InternalDestructEncoderInst(void* ptr_inst) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalResetEncoder()
- // This method is called to reset the states of encoder. However, the
- // current parameters, e.g. frame-length, should remain as they are. For
- // most of the codecs a re-initialization of the encoder is what needs to
- // be down. But for iSAC we like to keep the BWE history so we cannot
- // re-initialize. As soon as such an API is implemented in iSAC this method
- // has to be overwritten in ACMISAC class.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalResetEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ProcessFrameVADDTX()
- // This function is called when a full frame of audio is available. It will
- // break the audio frame into blocks such that each block could be processed
- // by VAD & CN/DTX. If a frame is divided into two blocks then there are two
- // cases. First, the first block is active, the second block will not be
- // processed by CN/DTX but only by VAD and return to caller with
- // '*samples_processed' set to zero. There, the audio frame will be encoded
- // by the encoder. Second, the first block is inactive and is processed by
- // CN/DTX, then we stop processing the next block and return to the caller
- // which is EncodeSafe(), with "*samples_processed" equal to the number of
- // samples in first block.
- //
- // Output:
- // -bitstream : pointer to a buffer where DTX frame, if
- // generated, will be written to.
- // -bitstream_len_byte : contains the length of bit-stream in bytes, if
- // generated. Zero if no bit-stream is generated.
- // -samples_processed : contains no of samples that actually CN has
- // processed. Those samples processed by CN will not
- // be encoded by the encoder, obviously. If
- // contains zero, it means that the frame has been
- // identified as active by VAD. Note that
- // "*samples_processed" might be non-zero but
- // "*bitstream_len_byte" be zero.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ProcessFrameVADDTX(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t* samples_processed);
-
- ///////////////////////////////////////////////////////////////////////////
- // CanChangeEncodingParam()
- // Check if the codec parameters can be changed. In conferencing normally
- // codec parameters cannot be changed. The exception is bit-rate of isac.
- //
- // return value:
- // -true if codec parameters are allowed to change.
- // -false otherwise.
- //
- virtual bool CanChangeEncodingParam(CodecInst& codec_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // CurrentRate()
- // Call to get the current encoding rate of the encoder. This function
- // should be overwritten for codecs which automatically change their
- // target rate. One example is iSAC. The output of the function is the
- // current target rate.
- //
- // Output:
- // -rate_bps : the current target rate of the codec.
- //
- virtual void CurrentRate(int32_t& /* rate_bps */);
-
- virtual void SaveDecoderParamSafe(const WebRtcACMCodecParams* codec_params);
-
- // &in_audio_[in_audio_ix_write_] always point to where new audio can be
- // written to
- int16_t in_audio_ix_write_;
-
- // &in_audio_[in_audio_ix_read_] points to where audio has to be read from
- int16_t in_audio_ix_read_;
-
- int16_t in_timestamp_ix_write_;
-
- // Where the audio is stored before encoding,
- // To save memory the following buffer can be allocated
- // dynamically for 80 ms depending on the sampling frequency
- // of the codec.
- int16_t* in_audio_;
- uint32_t* in_timestamp_;
-
- int16_t frame_len_smpl_;
- uint16_t num_channels_;
-
- // This will point to a static database of the supported codecs
- int16_t codec_id_;
-
- // This will account for the number of samples were not encoded
- // the case is rare, either samples are missed due to overwrite
- // at input buffer or due to encoding error
- uint32_t num_missed_samples_;
-
- // True if the encoder instance created
- bool encoder_exist_;
- bool decoder_exist_;
- // True if the encoder instance initialized
- bool encoder_initialized_;
- bool decoder_initialized_;
-
- bool registered_in_neteq_;
-
- // VAD/DTX
- bool has_internal_dtx_;
- WebRtcVadInst* ptr_vad_inst_;
- bool vad_enabled_;
- ACMVADMode vad_mode_;
- int16_t vad_label_[MAX_FRAME_SIZE_10MSEC];
- bool dtx_enabled_;
- WebRtcCngEncInst* ptr_dtx_inst_;
- uint8_t num_lpc_params_;
- bool sent_cn_previous_;
- bool is_master_;
- int16_t prev_frame_cng_;
-
- WebRtcACMCodecParams encoder_params_;
- WebRtcACMCodecParams decoder_params_;
-
- // Used as a global lock for all available decoders
- // so that no decoder is used when NetEQ decodes.
- RWLockWrapper* neteq_decode_lock_;
- // Used to lock wrapper internal data
- // such as buffers and state variables.
- RWLockWrapper& codec_wrapper_lock_;
-
- uint32_t last_encoded_timestamp_;
- uint32_t last_timestamp_;
- bool is_audio_buff_fresh_;
- uint32_t unique_id_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
deleted file mode 100644
index 5ea0c56d9f2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_gsmfr.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_CODEC_GSMFR
-// NOTE! GSM-FR is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used GSM-FR API
-// file.
-#include "gsmfr_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_GSMFR
-
-ACMGSMFR::ACMGSMFR(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMGSMFR::~ACMGSMFR() {
- return;
-}
-
-int16_t ACMGSMFR::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMGSMFR::EnableDTX() {
- return -1;
-}
-
-int16_t ACMGSMFR::DisableDTX() {
- return -1;
-}
-
-int16_t ACMGSMFR::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMGSMFR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMGSMFR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMGSMFR::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMGSMFR::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMGSMFR::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMGSMFR::DestructDecoderSafe() {
- return;
-}
-
-void ACMGSMFR::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMGSMFR::ACMGSMFR(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- return;
-}
-
-ACMGSMFR::~ACMGSMFR() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMGSMFR::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcGSMFR_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMGSMFR::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableDTX: cannot init encoder for GSMFR");
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMGSMFR::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "DisableDTX: cannot init encoder for GSMFR");
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMGSMFR::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_,
- ((codec_params->enable_dtx) ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncoder: cannot init encoder for GSMFR");
- }
- return 0;
-}
-
-int16_t ACMGSMFR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- if (WebRtcGSMFR_DecoderInit(decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: cannot init decoder for GSMFR");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: decoder is not initialized for GSMFR");
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_GSMFR_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderGSMFR, codec_inst.pltype,
- decoder_inst_ptr_, 8000);
- SET_GSMFR_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMGSMFR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMGSMFR::InternalCreateEncoder() {
- if (WebRtcGSMFR_CreateEnc(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create instance for GSMFR "
- "encoder");
- return -1;
- }
- return 0;
-}
-
-void ACMGSMFR::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMGSMFR::InternalCreateDecoder() {
- if (WebRtcGSMFR_CreateDec(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: cannot create instance for GSMFR "
- "decoder");
- return -1;
- }
- return 0;
-}
-
-void ACMGSMFR::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-void ACMGSMFR::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcGSMFR_FreeEnc((GSMFR_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
deleted file mode 100644
index aa499734af9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct GSMFR_encinst_t_;
-struct GSMFR_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMGSMFR : public ACMGenericCodec {
- public:
- explicit ACMGSMFR(int16_t codec_id);
- ~ACMGSMFR();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
- GSMFR_encinst_t_* encoder_inst_ptr_;
- GSMFR_decinst_t_* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
deleted file mode 100644
index 0f8049e8047..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/audio_coding/main/source/acm_ilbc.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_ILBC
-
-ACMILBC::ACMILBC(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMILBC::~ACMILBC() {
- return;
-}
-
-int16_t ACMILBC::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMILBC::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMILBC::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMILBC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMILBC::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMILBC::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMILBC::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMILBC::DestructDecoderSafe() {
- return;
-}
-
-void ACMILBC::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMILBC::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMILBC::ACMILBC(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- return;
-}
-
-ACMILBC::~ACMILBC() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMILBC::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcIlbcfix_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- if (*bitstream_len_byte < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: error in encode for ILBC");
- return -1;
- }
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMILBC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // initialize with a correct processing block length
- if ((160 == (codec_params->codec_inst).pacsize) ||
- (320 == (codec_params->codec_inst).pacsize)) {
- // processing block of 20ms
- return WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 20);
- } else if ((240 == (codec_params->codec_inst).pacsize) ||
- (480 == (codec_params->codec_inst).pacsize)) {
- // processing block of 30ms
- return WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 30);
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncoder: invalid processing block");
- return -1;
- }
-}
-
-int16_t ACMILBC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- // initialize with a correct processing block length
- if ((160 == (codec_params->codec_inst).pacsize) ||
- (320 == (codec_params->codec_inst).pacsize)) {
- // processing block of 20ms
- return WebRtcIlbcfix_DecoderInit(decoder_inst_ptr_, 20);
- } else if ((240 == (codec_params->codec_inst).pacsize) ||
- (480 == (codec_params->codec_inst).pacsize)) {
- // processing block of 30ms
- return WebRtcIlbcfix_DecoderInit(decoder_inst_ptr_, 30);
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: invalid processing block");
- return -1;
- }
-}
-
-int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: decoder not initialized for ILBC");
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_ILBC_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderILBC, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_ILBC_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMILBC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMILBC::InternalCreateEncoder() {
- if (WebRtcIlbcfix_EncoderCreate(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create instance for ILBC "
- "encoder");
- return -1;
- }
- return 0;
-}
-
-void ACMILBC::DestructEncoderSafe() {
- encoder_initialized_ = false;
- encoder_exist_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMILBC::InternalCreateDecoder() {
- if (WebRtcIlbcfix_DecoderCreate(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: cannot create instance for ILBC "
- "decoder");
- return -1;
- }
- return 0;
-}
-
-void ACMILBC::DestructDecoderSafe() {
- decoder_initialized_ = false;
- decoder_exist_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMILBC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcIlbcfix_EncoderFree((iLBC_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-int16_t ACMILBC::SetBitRateSafe(const int32_t rate) {
- // Check that rate is valid. No need to store the value
- if (rate == 13300) {
- WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 30);
- } else if (rate == 15200) {
- WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 20);
- } else {
- return -1;
- }
- encoder_params_.codec_inst.rate = rate;
-
- return 0;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h
deleted file mode 100644
index bd2495fe316..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct iLBC_encinst_t_;
-struct iLBC_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMILBC : public ACMGenericCodec {
- public:
- explicit ACMILBC(int16_t codec_id);
- virtual ~ACMILBC();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- iLBC_encinst_t_* encoder_inst_ptr_;
- iLBC_decinst_t_* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc
deleted file mode 100644
index 61fa32f6d6d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc
+++ /dev/null
@@ -1,903 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/audio_coding/main/source/acm_isac.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-// we need this otherwise we cannot use forward declaration
-// in the header file
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
-struct ACMISACInst {
- ACM_ISAC_STRUCT *inst;
-};
-#endif
-
-#define ISAC_MIN_RATE 10000
-#define ISAC_MAX_RATE 56000
-
-// Tables for bandwidth estimates
-#define NR_ISAC_BANDWIDTHS 24
-static const int32_t kIsacRatesWb[NR_ISAC_BANDWIDTHS] = {
- 10000, 11100, 12300, 13700, 15200, 16900,
- 18800, 20900, 23300, 25900, 28700, 31900,
- 10100, 11200, 12400, 13800, 15300, 17000,
- 18900, 21000, 23400, 26000, 28800, 32000
-};
-
-static const int32_t kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
- 10000, 11000, 12400, 13800, 15300, 17000,
- 18900, 21000, 23200, 25400, 27600, 29800,
- 32000, 34100, 36300, 38500, 40700, 42900,
- 45100, 47300, 49500, 51700, 53900, 56000,
-};
-
-#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
-
-ACMISAC::ACMISAC(int16_t /* codec_id */)
- : codec_inst_ptr_(NULL),
- is_enc_initialized_(false),
- isac_coding_mode_(CHANNEL_INDEPENDENT),
- enforce_frame_size_(false),
- isac_currentBN_(32000),
- samples_in10MsAudio_(160) { // Initiates to 16 kHz mode.
- // Initiate decoder parameters for the 32 kHz mode.
- memset(&decoder_params32kHz_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params32kHz_.codec_inst.pltype = -1;
-
- return;
-}
-
-ACMISAC::~ACMISAC() {
- return;
-}
-
-ACMGenericCodec* ACMISAC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMISAC::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMISAC::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMISAC::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMISAC::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMISAC::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMISAC::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMISAC::DestructEncoderSafe() {
- return;
-}
-
-int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMISAC::DeliverCachedIsacData(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- uint32_t* /* timestamp */,
- WebRtcACMEncodingType* /* encoding_type */,
- const uint16_t /* isac_rate */,
- const uint8_t /* isac_bw_estimate */) {
- return -1;
-}
-
-int16_t ACMISAC::Transcode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- int16_t /* q_bwe */,
- int32_t /* scale */,
- bool /* is_red */) {
- return -1;
-}
-
-int16_t ACMISAC::SetBitRateSafe(int32_t /* bit_rate */) {
- return -1;
-}
-
-int32_t ACMISAC::GetEstimatedBandwidthSafe() {
- return -1;
-}
-
-int32_t ACMISAC::SetEstimatedBandwidthSafe(
- int32_t /* estimated_bandwidth */) {
- return -1;
-}
-
-int32_t ACMISAC::GetRedPayloadSafe(uint8_t* /* red_payload */,
- int16_t* /* payload_bytes */) {
- return -1;
-}
-
-int16_t ACMISAC::UpdateDecoderSampFreq(int16_t /* codec_id */) {
- return -1;
-}
-
-int16_t ACMISAC::UpdateEncoderSampFreq(
- uint16_t /* encoder_samp_freq_hz */) {
- return -1;
-}
-
-int16_t ACMISAC::EncoderSampFreq(uint16_t& /* samp_freq_hz */) {
- return -1;
-}
-
-int32_t ACMISAC::ConfigISACBandwidthEstimator(
- const uint8_t /* init_frame_size_msec */,
- const uint16_t /* init_rate_bit_per_sec */,
- const bool /* enforce_frame_size */) {
- return -1;
-}
-
-int32_t ACMISAC::SetISACMaxPayloadSize(
- const uint16_t /* max_payload_len_bytes */) {
- return -1;
-}
-
-int32_t ACMISAC::SetISACMaxRate(
- const uint32_t /* max_rate_bit_per_sec */) {
- return -1;
-}
-
-void ACMISAC::UpdateFrameLen() {
- return;
-}
-
-void ACMISAC::CurrentRate(int32_t& /*rate_bit_per_sec */) {
- return;
-}
-
-bool
-ACMISAC::DecoderParamsSafe(
- WebRtcACMCodecParams* /* dec_params */,
- const uint8_t /* payload_type */) {
- return false;
-}
-
-void
-ACMISAC::SaveDecoderParamSafe(
- const WebRtcACMCodecParams* /* codec_params */) {
- return;
-}
-
-int16_t ACMISAC::REDPayloadISAC(
- const int32_t /* isac_rate */,
- const int16_t /* isac_bw_estimate */,
- uint8_t* /* payload */,
- int16_t* /* payload_len_bytes */) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-#ifdef WEBRTC_CODEC_ISACFX
-
-// How the scaling is computed. iSAC computes a gain based on the
-// bottleneck. It follows the following expression for that
-//
-// G(BN_kbps) = pow(10, (a + b * BN_kbps + c * BN_kbps * BN_kbps) / 20.0)
-// / 3.4641;
-//
-// Where for 30 ms framelength we have,
-//
-// a = -23; b = 0.48; c = 0;
-//
-// As the default encoder is operating at 32kbps we have the scale as
-//
-// S(BN_kbps) = G(BN_kbps) / G(32);
-
-#define ISAC_NUM_SUPPORTED_RATES 9
-
-static const uint16_t kIsacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
- 32000, 30000, 26000, 23000, 21000,
- 19000, 17000, 15000, 12000
-};
-
-static const float kIsacScale[ISAC_NUM_SUPPORTED_RATES] = {
- 1.0f, 0.8954f, 0.7178f, 0.6081f, 0.5445f,
- 0.4875f, 0.4365f, 0.3908f, 0.3311f
-};
-
-enum IsacSamplingRate {
- kIsacWideband = 16,
- kIsacSuperWideband = 32
-};
-
-static float ACMISACFixTranscodingScale(uint16_t rate) {
- // find the scale for transcoding, the scale is rounded
- // downward
- float scale = -1;
- for (int16_t n = 0; n < ISAC_NUM_SUPPORTED_RATES; n++) {
- if (rate >= kIsacSuportedRates[n]) {
- scale = kIsacScale[n];
- break;
- }
- }
- return scale;
-}
-
-static void ACMISACFixGetSendBitrate(ACM_ISAC_STRUCT* inst,
- int32_t* bottleneck) {
- *bottleneck = WebRtcIsacfix_GetUplinkBw(inst);
-}
-
-static int16_t ACMISACFixGetNewBitstream(ACM_ISAC_STRUCT* inst,
- int16_t bwe_index,
- int16_t /* jitter_index */,
- int32_t rate,
- int16_t* bitstream,
- bool is_red) {
- if (is_red) {
- // RED not supported with iSACFIX
- return -1;
- }
- float scale = ACMISACFixTranscodingScale((uint16_t) rate);
- return WebRtcIsacfix_GetNewBitStream(inst, bwe_index, scale, bitstream);
-}
-
-static int16_t ACMISACFixGetSendBWE(ACM_ISAC_STRUCT* inst,
- int16_t* rate_index,
- int16_t* /* dummy */) {
- int16_t local_rate_index;
- int16_t status = WebRtcIsacfix_GetDownLinkBwIndex(inst,
- &local_rate_index);
- if (status < 0) {
- return -1;
- } else {
- *rate_index = local_rate_index;
- return 0;
- }
-}
-
-static int16_t ACMISACFixControlBWE(ACM_ISAC_STRUCT* inst,
- int32_t rate_bps,
- int16_t frame_size_ms,
- int16_t enforce_frame_size) {
- return WebRtcIsacfix_ControlBwe(inst, (int16_t) rate_bps, frame_size_ms,
- enforce_frame_size);
-}
-
-static int16_t ACMISACFixControl(ACM_ISAC_STRUCT* inst,
- int32_t rate_bps,
- int16_t frame_size_ms) {
- return WebRtcIsacfix_Control(inst, (int16_t) rate_bps, frame_size_ms);
-}
-
-// The following two function should have the same signature as their counter
-// part in iSAC floating-point, i.e. WebRtcIsac_EncSampRate &
-// WebRtcIsac_DecSampRate.
-static uint16_t ACMISACFixGetEncSampRate(ACM_ISAC_STRUCT* /* inst */) {
- return 16000;
-}
-
-static uint16_t ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
- return 16000;
-}
-
-#endif
-
-ACMISAC::ACMISAC(int16_t codec_id)
- : is_enc_initialized_(false),
- isac_coding_mode_(CHANNEL_INDEPENDENT),
- enforce_frame_size_(false),
- isac_current_bn_(32000),
- samples_in_10ms_audio_(160) { // Initiates to 16 kHz mode.
- codec_id_ = codec_id;
-
- // Create codec instance.
- codec_inst_ptr_ = new ACMISACInst;
- if (codec_inst_ptr_ == NULL) {
- return;
- }
- codec_inst_ptr_->inst = NULL;
-
- // Initiate decoder parameters for the 32 kHz mode.
- memset(&decoder_params_32khz_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params_32khz_.codec_inst.pltype = -1;
-
- // TODO(tlegrand): Check if the following is really needed, now that
- // ACMGenericCodec has been updated to initialize this value.
- // Initialize values that can be used uninitialized otherwise
- decoder_params_.codec_inst.pltype = -1;
-}
-
-ACMISAC::~ACMISAC() {
- if (codec_inst_ptr_ != NULL) {
- if (codec_inst_ptr_->inst != NULL) {
- ACM_ISAC_FREE(codec_inst_ptr_->inst);
- codec_inst_ptr_->inst = NULL;
- }
- delete codec_inst_ptr_;
- codec_inst_ptr_ = NULL;
- }
- return;
-}
-
-ACMGenericCodec* ACMISAC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // ISAC takes 10ms audio everytime we call encoder, therefor,
- // it should be treated like codecs with 'basic coding block'
- // non-zero, and the following 'while-loop' should not be necessary.
- // However, due to a mistake in the codec the frame-size might change
- // at the first 10ms pushed in to iSAC if the bit-rate is low, this is
- // sort of a bug in iSAC. to address this we treat iSAC as the
- // following.
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- *bitstream_len_byte = 0;
- while ((*bitstream_len_byte == 0) && (in_audio_ix_read_ < frame_len_smpl_)) {
- if (in_audio_ix_read_ > in_audio_ix_write_) {
- // something is wrong.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "The actual fram-size of iSAC appears to be larger that "
- "expected. All audio pushed in but no bit-stream is "
- "generated.");
- return -1;
- }
- *bitstream_len_byte = ACM_ISAC_ENCODE(codec_inst_ptr_->inst,
- &in_audio_[in_audio_ix_read_],
- (int16_t*)bitstream);
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += samples_in_10ms_audio_;
- }
- if (*bitstream_len_byte == 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "ISAC Has encoded the whole frame but no bit-stream is "
- "generated.");
- }
-
- // a packet is generated iSAC, is set in adaptive mode may change
- // the frame length and we like to update the bottleneck value as
- // well, although updating bottleneck is not crucial
- if ((*bitstream_len_byte > 0) && (isac_coding_mode_ == ADAPTIVE)) {
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- }
- UpdateFrameLen();
- return *bitstream_len_byte;
-}
-
-int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_sample */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // if rate is set to -1 then iSAC has to be in adaptive mode
- if (codec_params->codec_inst.rate == -1) {
- isac_coding_mode_ = ADAPTIVE;
- } else if ((codec_params->codec_inst.rate >= ISAC_MIN_RATE) &&
- (codec_params->codec_inst.rate <= ISAC_MAX_RATE)) {
- // sanity check that rate is in acceptable range
- isac_coding_mode_ = CHANNEL_INDEPENDENT;
- isac_current_bn_ = codec_params->codec_inst.rate;
- } else {
- return -1;
- }
-
- // we need to set the encoder sampling frequency.
- if (UpdateEncoderSampFreq((uint16_t) codec_params->codec_inst.plfreq)
- < 0) {
- return -1;
- }
- if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
- return -1;
- }
-
- // apply the frame-size and rate if operating in
- // channel-independent mode
- if (isac_coding_mode_ == CHANNEL_INDEPENDENT) {
- if (ACM_ISAC_CONTROL(codec_inst_ptr_->inst,
- codec_params->codec_inst.rate,
- codec_params->codec_inst.pacsize /
- (codec_params->codec_inst.plfreq / 1000)) < 0) {
- return -1;
- }
- } else {
- // We need this for adaptive case and has to be called
- // after initialization
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- }
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- return 0;
-}
-
-int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
- } else {
- UpdateDecoderSampFreq(ACMCodecDB::kISAC);
- }
-
- // in a one-way communication we may never register send-codec.
- // However we like that the BWE to work properly so it has to
- // be initialized. The BWE is initialized when iSAC encoder is initialized.
- // Therefore, we need this.
- if (!encoder_initialized_) {
- // Since we don't require a valid rate or a valid packet size when
- // initializing the decoder, we set valid values before initializing encoder
- codec_params->codec_inst.rate = kIsacWbDefaultRate;
- codec_params->codec_inst.pacsize = kIsacPacSize960;
- if (InternalInitEncoder(codec_params) < 0) {
- return -1;
- }
- encoder_initialized_ = true;
- }
-
- return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
-}
-
-int16_t ACMISAC::InternalCreateDecoder() {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
-
- // specific to codecs with one instance for encoding and decoding
- encoder_initialized_ = false;
- if (status < 0) {
- encoder_exist_ = false;
- } else {
- encoder_exist_ = true;
- }
- return status;
-}
-
-void ACMISAC::DestructDecoderSafe() {
- // codec with shared instance cannot delete.
- decoder_initialized_ = false;
- return;
-}
-
-int16_t ACMISAC::InternalCreateEncoder() {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
-
- // specific to codecs with one instance for encoding and decoding
- decoder_initialized_ = false;
- if (status < 0) {
- decoder_exist_ = false;
- } else {
- decoder_exist_ = true;
- }
- return status;
-}
-
-void ACMISAC::DestructEncoderSafe() {
- // codec with shared instance cannot delete.
- encoder_initialized_ = false;
- return;
-}
-
-int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Sanity checks
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- if (!decoder_initialized_ || !decoder_exist_) {
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_ISAC_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.plfreq == 16000) {
- SET_CODEC_PAR((codec_def), kDecoderISAC, codec_inst.pltype,
- codec_inst_ptr_->inst, 16000);
-#ifdef WEBRTC_CODEC_ISAC
- SET_ISAC_FUNCTIONS((codec_def));
-#else
- SET_ISACfix_FUNCTIONS((codec_def));
-#endif
- } else {
-#ifdef WEBRTC_CODEC_ISAC
- // Decoder is either @ 16 kHz or 32 kHz. Even if encoder is set @ 48 kHz
- // decoding is @ 32 kHz.
- if (codec_inst.plfreq == 32000) {
- SET_CODEC_PAR((codec_def), kDecoderISACswb, codec_inst.pltype,
- codec_inst_ptr_->inst, 32000);
- SET_ISACSWB_FUNCTIONS((codec_def));
- } else {
- SET_CODEC_PAR((codec_def), kDecoderISACfb, codec_inst.pltype,
- codec_inst_ptr_->inst, 32000);
- SET_ISACFB_FUNCTIONS((codec_def));
- }
-#else
- return -1;
-#endif
- }
- return 0;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- ACM_ISAC_FREE((ACM_ISAC_STRUCT *) ptr_inst);
- }
- return;
-}
-
-int16_t ACMISAC::Transcode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t q_bwe,
- int32_t rate,
- bool is_red) {
- int16_t jitter_info = 0;
- // transcode from a higher rate to lower rate sanity check
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- *bitstream_len_byte = ACM_ISAC_GETNEWBITSTREAM(codec_inst_ptr_->inst, q_bwe,
- jitter_info, rate,
- (int16_t*)bitstream,
- (is_red) ? 1 : 0);
-
- if (*bitstream_len_byte < 0) {
- // error happened
- *bitstream_len_byte = 0;
- return -1;
- } else {
- return *bitstream_len_byte;
- }
-}
-
-int16_t ACMISAC::SetBitRateSafe(int32_t bit_rate) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- uint16_t encoder_samp_freq;
- EncoderSampFreq(encoder_samp_freq);
- bool reinit = false;
- // change the BN of iSAC
- if (bit_rate == -1) {
- // ADAPTIVE MODE
- // Check if it was already in adaptive mode
- if (isac_coding_mode_ != ADAPTIVE) {
- // was not in adaptive, then set the mode to adaptive
- // and flag for re-initialization
- isac_coding_mode_ = ADAPTIVE;
- reinit = true;
- }
- } else if ((bit_rate >= ISAC_MIN_RATE) && (bit_rate <= ISAC_MAX_RATE)) {
- // Sanity check if the rate valid
- // check if it was in channel-independent mode before
- if (isac_coding_mode_ != CHANNEL_INDEPENDENT) {
- // was not in channel independent, set the mode to
- // channel-independent and flag for re-initialization
- isac_coding_mode_ = CHANNEL_INDEPENDENT;
- reinit = true;
- }
- // store the bottleneck
- isac_current_bn_ = (uint16_t) bit_rate;
- } else {
- // invlaid rate
- return -1;
- }
-
- int16_t status = 0;
- if (reinit) {
- // initialize and check if it is successful
- if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
- // failed initialization
- return -1;
- }
- }
- if (isac_coding_mode_ == CHANNEL_INDEPENDENT) {
- status = ACM_ISAC_CONTROL(
- codec_inst_ptr_->inst, isac_current_bn_,
- (encoder_samp_freq == 32000 || encoder_samp_freq == 48000) ? 30 :
- (frame_len_smpl_ / 16));
- if (status < 0) {
- status = -1;
- }
- }
-
- // Update encoder parameters
- encoder_params_.codec_inst.rate = bit_rate;
-
- UpdateFrameLen();
- return status;
-}
-
-int32_t ACMISAC::GetEstimatedBandwidthSafe() {
- int16_t bandwidth_index = 0;
- int16_t delay_index = 0;
- int samp_rate;
-
- // Get bandwidth information
- ACM_ISAC_GETSENDBWE(codec_inst_ptr_->inst, &bandwidth_index, &delay_index);
-
- // Validy check of index
- if ((bandwidth_index < 0) || (bandwidth_index >= NR_ISAC_BANDWIDTHS)) {
- return -1;
- }
-
- // Check sample frequency
- samp_rate = ACM_ISAC_GETDECSAMPRATE(codec_inst_ptr_->inst);
- if (samp_rate == 16000) {
- return kIsacRatesWb[bandwidth_index];
- } else {
- return kIsacRatesSwb[bandwidth_index];
- }
-}
-
-int32_t ACMISAC::SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth) {
- int samp_rate;
- int16_t bandwidth_index;
-
- // Check sample frequency and choose appropriate table
- samp_rate = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
-
- if (samp_rate == 16000) {
- // Search through the WB rate table to find the index
- bandwidth_index = NR_ISAC_BANDWIDTHS / 2 - 1;
- for (int i = 0; i < (NR_ISAC_BANDWIDTHS / 2); i++) {
- if (estimated_bandwidth == kIsacRatesWb[i]) {
- bandwidth_index = i;
- break;
- } else if (estimated_bandwidth
- == kIsacRatesWb[i + NR_ISAC_BANDWIDTHS / 2]) {
- bandwidth_index = i + NR_ISAC_BANDWIDTHS / 2;
- break;
- } else if (estimated_bandwidth < kIsacRatesWb[i]) {
- bandwidth_index = i;
- break;
- }
- }
- } else {
- // Search through the SWB rate table to find the index
- bandwidth_index = NR_ISAC_BANDWIDTHS - 1;
- for (int i = 0; i < NR_ISAC_BANDWIDTHS; i++) {
- if (estimated_bandwidth <= kIsacRatesSwb[i]) {
- bandwidth_index = i;
- break;
- }
- }
- }
-
- // Set iSAC Bandwidth Estimate
- ACM_ISAC_SETBWE(codec_inst_ptr_->inst, bandwidth_index);
-
- return 0;
-}
-
-int32_t ACMISAC::GetRedPayloadSafe(
-#if (!defined(WEBRTC_CODEC_ISAC))
- uint8_t* /* red_payload */, int16_t* /* payload_bytes */) {
- return -1;
-#else
- uint8_t* red_payload, int16_t* payload_bytes) {
- int16_t bytes = WebRtcIsac_GetRedPayload(codec_inst_ptr_->inst,
- (int16_t*)red_payload);
- if (bytes < 0) {
- return -1;
- }
- *payload_bytes = bytes;
- return 0;
-#endif
-}
-
-int16_t ACMISAC::UpdateDecoderSampFreq(
-#ifdef WEBRTC_CODEC_ISAC
- int16_t codec_id) {
- // The decoder supports only wideband and super-wideband.
- if (ACMCodecDB::kISAC == codec_id) {
- return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 16000);
- } else if (ACMCodecDB::kISACSWB == codec_id ||
- ACMCodecDB::kISACFB == codec_id) {
- return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 32000);
- } else {
- return -1;
- }
-#else
- int16_t /* codec_id */) {
- return 0;
-#endif
-}
-
-int16_t ACMISAC::UpdateEncoderSampFreq(
-#ifdef WEBRTC_CODEC_ISAC
- uint16_t encoder_samp_freq_hz) {
- uint16_t current_samp_rate_hz;
- EncoderSampFreq(current_samp_rate_hz);
-
- if (current_samp_rate_hz != encoder_samp_freq_hz) {
- if ((encoder_samp_freq_hz != 16000) &&
- (encoder_samp_freq_hz != 32000) &&
- (encoder_samp_freq_hz != 48000)) {
- return -1;
- } else {
- in_audio_ix_read_ = 0;
- in_audio_ix_write_ = 0;
- in_timestamp_ix_write_ = 0;
- if (WebRtcIsac_SetEncSampRate(codec_inst_ptr_->inst,
- encoder_samp_freq_hz) < 0) {
- return -1;
- }
- samples_in_10ms_audio_ = encoder_samp_freq_hz / 100;
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
- encoder_params_.codec_inst.plfreq = encoder_samp_freq_hz;
- return 0;
- }
- }
-#else
- uint16_t /* codec_id */) {
-#endif
- return 0;
-}
-
-int16_t ACMISAC::EncoderSampFreq(uint16_t& samp_freq_hz) {
- samp_freq_hz = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
- return 0;
-}
-
-int32_t ACMISAC::ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size) {
- int16_t status;
- {
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- // TODO(turajs): at 32kHz we hardcode calling with 30ms and enforce
- // the frame-size otherwise we might get error. Revise if
- // control-bwe is changed.
- if (samp_freq_hz == 32000 || samp_freq_hz == 48000) {
- status = ACM_ISAC_CONTROL_BWE(codec_inst_ptr_->inst,
- init_rate_bit_per_sec, 30, 1);
- } else {
- status = ACM_ISAC_CONTROL_BWE(codec_inst_ptr_->inst,
- init_rate_bit_per_sec,
- init_frame_size_msec,
- enforce_frame_size ? 1 : 0);
- }
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Couldn't config iSAC BWE.");
- return -1;
- }
- UpdateFrameLen();
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- return 0;
-}
-
-int32_t ACMISAC::SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes) {
- return ACM_ISAC_SETMAXPAYLOADSIZE(codec_inst_ptr_->inst,
- max_payload_len_bytes);
-}
-
-int32_t ACMISAC::SetISACMaxRate(
- const uint32_t max_rate_bit_per_sec) {
- return ACM_ISAC_SETMAXRATE(codec_inst_ptr_->inst, max_rate_bit_per_sec);
-}
-
-void ACMISAC::UpdateFrameLen() {
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
-}
-
-void ACMISAC::CurrentRate(int32_t& rate_bit_per_sec) {
- if (isac_coding_mode_ == ADAPTIVE) {
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &rate_bit_per_sec);
- }
-}
-
-bool ACMISAC::DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- if (decoder_initialized_) {
- if (payload_type == decoder_params_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- if (payload_type == decoder_params_32khz_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_32khz_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- }
- return false;
-}
-
-void ACMISAC::SaveDecoderParamSafe(const WebRtcACMCodecParams* codec_params) {
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- memcpy(&decoder_params_32khz_, codec_params, sizeof(WebRtcACMCodecParams));
- } else {
- memcpy(&decoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
- }
-}
-
-int16_t ACMISAC::REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes) {
- int16_t status;
- ReadLockScoped rl(codec_wrapper_lock_);
- status = Transcode(payload, payload_len_bytes, isac_bw_estimate, isac_rate,
- true);
- return status;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h
deleted file mode 100644
index 20b6c5391be..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-struct ACMISACInst;
-
-enum IsacCodingMode {
- ADAPTIVE,
- CHANNEL_INDEPENDENT
-};
-
-class ACMISAC : public ACMGenericCodec {
- public:
- explicit ACMISAC(int16_t codec_id);
- virtual ~ACMISAC();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- int16_t DeliverCachedIsacData(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type,
- const uint16_t isac_rate,
- const uint8_t isac_bwestimate);
-
- int16_t DeliverCachedData(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- uint32_t* /* timestamp */,
- WebRtcACMEncodingType* /* encoding_type */) {
- return -1;
- }
-
- virtual int16_t UpdateDecoderSampFreq(int16_t codec_id) OVERRIDE;
-
- virtual int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz) OVERRIDE;
-
- virtual int16_t EncoderSampFreq(uint16_t& samp_freq_hz) OVERRIDE;
-
- virtual int32_t ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size) OVERRIDE;
-
- virtual int32_t SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes) OVERRIDE;
-
- virtual int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec) OVERRIDE;
-
- virtual int16_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t bit_rate) OVERRIDE;
-
- virtual int32_t GetEstimatedBandwidthSafe() OVERRIDE;
-
- virtual int32_t SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth) OVERRIDE;
-
- virtual int32_t GetRedPayloadSafe(uint8_t* red_payload,
- int16_t* payload_bytes) OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- int16_t Transcode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t q_bwe,
- int32_t rate,
- bool is_red);
-
- virtual void CurrentRate(int32_t& rate_bit_per_sec) OVERRIDE;
-
- void UpdateFrameLen();
-
- virtual bool DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) OVERRIDE;
-
- virtual void SaveDecoderParamSafe(
- const WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- ACMISACInst* codec_inst_ptr_;
- bool is_enc_initialized_;
- IsacCodingMode isac_coding_mode_;
- bool enforce_frame_size_;
- int32_t isac_current_bn_;
- uint16_t samples_in_10ms_audio_;
- WebRtcACMCodecParams decoder_params_32khz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h
deleted file mode 100644
index 01e1e44b3e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-
-#include "webrtc/engine_configurations.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifdef WEBRTC_CODEC_ISAC
-#define ACM_ISAC_CREATE WebRtcIsac_Create
-#define ACM_ISAC_FREE WebRtcIsac_Free
-#define ACM_ISAC_ENCODERINIT WebRtcIsac_EncoderInit
-#define ACM_ISAC_ENCODE WebRtcIsac_Encode
-#define ACM_ISAC_DECODERINIT WebRtcIsac_DecoderInit
-#define ACM_ISAC_DECODE_BWE WebRtcIsac_UpdateBwEstimate
-#define ACM_ISAC_DECODE_B WebRtcIsac_Decode
-#define ACM_ISAC_DECODEPLC WebRtcIsac_DecodePlc
-#define ACM_ISAC_CONTROL WebRtcIsac_Control
-#define ACM_ISAC_CONTROL_BWE WebRtcIsac_ControlBwe
-#define ACM_ISAC_GETFRAMELEN WebRtcIsac_ReadFrameLen
-#define ACM_ISAC_GETERRORCODE WebRtcIsac_GetErrorCode
-#define ACM_ISAC_GETSENDBITRATE WebRtcIsac_GetUplinkBw
-#define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsac_SetMaxPayloadSize
-#define ACM_ISAC_SETMAXRATE WebRtcIsac_SetMaxRate
-#define ACM_ISAC_GETNEWBITSTREAM WebRtcIsac_GetNewBitStream
-#define ACM_ISAC_GETSENDBWE WebRtcIsac_GetDownLinkBwIndex
-#define ACM_ISAC_SETBWE WebRtcIsac_UpdateUplinkBw
-#define ACM_ISAC_GETBWE WebRtcIsac_ReadBwIndex
-#define ACM_ISAC_GETNEWFRAMELEN WebRtcIsac_GetNewFrameLen
-#define ACM_ISAC_STRUCT ISACStruct
-#define ACM_ISAC_GETENCSAMPRATE WebRtcIsac_EncSampRate
-#define ACM_ISAC_GETDECSAMPRATE WebRtcIsac_DecSampRate
-#endif
-
-#ifdef WEBRTC_CODEC_ISACFX
-#define ACM_ISAC_CREATE WebRtcIsacfix_Create
-#define ACM_ISAC_FREE WebRtcIsacfix_Free
-#define ACM_ISAC_ENCODERINIT WebRtcIsacfix_EncoderInit
-#define ACM_ISAC_ENCODE WebRtcIsacfix_Encode
-#define ACM_ISAC_DECODERINIT WebRtcIsacfix_DecoderInit
-#define ACM_ISAC_DECODE_BWE WebRtcIsacfix_UpdateBwEstimate
-#define ACM_ISAC_DECODE_B WebRtcIsacfix_Decode
-#define ACM_ISAC_DECODEPLC WebRtcIsacfix_DecodePlc
-#define ACM_ISAC_CONTROL ACMISACFixControl // local Impl
-#define ACM_ISAC_CONTROL_BWE ACMISACFixControlBWE // local Impl
-#define ACM_ISAC_GETFRAMELEN WebRtcIsacfix_ReadFrameLen
-#define ACM_ISAC_GETERRORCODE WebRtcIsacfix_GetErrorCode
-#define ACM_ISAC_GETSENDBITRATE ACMISACFixGetSendBitrate // local Impl
-#define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsacfix_SetMaxPayloadSize
-#define ACM_ISAC_SETMAXRATE WebRtcIsacfix_SetMaxRate
-#define ACM_ISAC_GETNEWBITSTREAM ACMISACFixGetNewBitstream // local Impl
-#define ACM_ISAC_GETSENDBWE ACMISACFixGetSendBWE // local Impl
-#define ACM_ISAC_SETBWE WebRtcIsacfix_UpdateUplinkBw
-#define ACM_ISAC_GETBWE WebRtcIsacfix_ReadBwIndex
-#define ACM_ISAC_GETNEWFRAMELEN WebRtcIsacfix_GetNewFrameLen
-#define ACM_ISAC_STRUCT ISACFIX_MainStruct
-#define ACM_ISAC_GETENCSAMPRATE ACMISACFixGetEncSampRate // local Impl
-#define ACM_ISAC_GETDECSAMPRATE ACMISACFixGetDecSampRate // local Impl
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc
deleted file mode 100644
index 154cc54d004..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc
+++ /dev/null
@@ -1,1151 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-
-#include <stdlib.h> // malloc
-
-#include <algorithm> // sort
-#include <vector>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#define RTP_HEADER_SIZE 12
-#define NETEQ_INIT_FREQ 8000
-#define NETEQ_INIT_FREQ_KHZ (NETEQ_INIT_FREQ/1000)
-#define NETEQ_ERR_MSG_LEN_BYTE (WEBRTC_NETEQ_MAX_ERROR_NAME + 1)
-
-ACMNetEQ::ACMNetEQ()
- : id_(0),
- current_samp_freq_khz_(NETEQ_INIT_FREQ_KHZ),
- avt_playout_(false),
- playout_mode_(voice),
- neteq_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_status_(false),
- vad_mode_(VADNormal),
- decode_lock_(RWLockWrapper::CreateRWLock()),
- num_slaves_(0),
- received_stereo_(false),
- master_slave_info_(NULL),
- previous_audio_activity_(AudioFrame::kVadUnknown),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- min_of_max_num_packets_(0),
- min_of_buffer_size_bytes_(0),
- per_packet_overhead_bytes_(0),
- av_sync_(false),
- minimum_delay_ms_(0),
- maximum_delay_ms_(0) {
- for (int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++) {
- is_initialized_[n] = false;
- ptr_vadinst_[n] = NULL;
- inst_[n] = NULL;
- inst_mem_[n] = NULL;
- neteq_packet_buffer_[n] = NULL;
- }
-}
-
-ACMNetEQ::~ACMNetEQ() {
- {
- CriticalSectionScoped lock(neteq_crit_sect_);
- RemoveNetEQSafe(0); // Master.
- RemoveSlavesSafe();
- }
- if (neteq_crit_sect_ != NULL) {
- delete neteq_crit_sect_;
- }
-
- if (decode_lock_ != NULL) {
- delete decode_lock_;
- }
-
- if (callback_crit_sect_ != NULL) {
- delete callback_crit_sect_;
- }
-}
-
-int32_t ACMNetEQ::Init() {
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (InitByIdxSafe(idx) < 0) {
- return -1;
- }
- // delete VAD instance and start fresh if required.
- if (ptr_vadinst_[idx] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[idx]);
- ptr_vadinst_[idx] = NULL;
- }
- if (vad_status_) {
- // Has to enable VAD
- if (EnableVADByIdxSafe(idx) < 0) {
- // Failed to enable VAD.
- // Delete VAD instance, if it is created
- if (ptr_vadinst_[idx] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[idx]);
- ptr_vadinst_[idx] = NULL;
- }
- // We are at initialization of NetEq, if failed to
- // enable VAD, we delete the NetEq instance.
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- is_initialized_[idx] = false;
- return -1;
- }
- }
- is_initialized_[idx] = true;
- }
- if (EnableVAD() == -1) {
- return -1;
- }
- return 0;
-}
-
-int16_t ACMNetEQ::InitByIdxSafe(const int16_t idx) {
- int memory_size_bytes;
- if (WebRtcNetEQ_AssignSize(&memory_size_bytes) != 0) {
- LogError("AssignSize", idx);
- return -1;
- }
-
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- inst_mem_[idx] = malloc(memory_size_bytes);
- if (inst_mem_[idx] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not "
- "allocate memory for NetEq");
- is_initialized_[idx] = false;
- return -1;
- }
- if (WebRtcNetEQ_Assign(&inst_[idx], inst_mem_[idx]) != 0) {
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- LogError("Assign", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not Assign");
- is_initialized_[idx] = false;
- return -1;
- }
- if (WebRtcNetEQ_Init(inst_[idx], NETEQ_INIT_FREQ) != 0) {
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- LogError("Init", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not "
- "initialize NetEq");
- is_initialized_[idx] = false;
- return -1;
- }
- is_initialized_[idx] = true;
- return 0;
-}
-
-int16_t ACMNetEQ::EnableVADByIdxSafe(const int16_t idx) {
- if (ptr_vadinst_[idx] == NULL) {
- if (WebRtcVad_Create(&ptr_vadinst_[idx]) < 0) {
- ptr_vadinst_[idx] = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "create VAD");
- return -1;
- }
- }
-
- if (WebRtcNetEQ_SetVADInstance(
- inst_[idx], ptr_vadinst_[idx],
- (WebRtcNetEQ_VADInitFunction) WebRtcVad_Init,
- (WebRtcNetEQ_VADSetmodeFunction) WebRtcVad_set_mode,
- (WebRtcNetEQ_VADFunction) WebRtcVad_Process) < 0) {
- LogError("setVADinstance", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "set VAD instance");
- return -1;
- }
-
- if (WebRtcNetEQ_SetVADMode(inst_[idx], vad_mode_) < 0) {
- LogError("setVADmode", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "set VAD mode");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMNetEQ::AllocatePacketBuffer(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs) {
- // Due to WebRtcNetEQ_GetRecommendedBufferSize
- // the following has to be int otherwise we will have compiler error
- // if not casted
-
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (AllocatePacketBufferByIdxSafe(used_codecs, num_codecs, idx) < 0) {
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::AllocatePacketBufferByIdxSafe(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs,
- const int16_t idx) {
- int max_num_packets;
- int buffer_size_in_bytes;
- int per_packet_overhead_bytes;
-
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AllocatePacketBufferByIdxSafe: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetRecommendedBufferSize(inst_[idx], used_codecs,
- num_codecs,
- kTCPXLargeJitter,
- &max_num_packets,
- &buffer_size_in_bytes,
- &per_packet_overhead_bytes) != 0) {
- LogError("GetRecommendedBufferSize", idx);
- return -1;
- }
- if (idx == 0) {
- min_of_buffer_size_bytes_ = buffer_size_in_bytes;
- min_of_max_num_packets_ = max_num_packets;
- per_packet_overhead_bytes_ = per_packet_overhead_bytes;
- } else {
- min_of_buffer_size_bytes_ = std::min(min_of_buffer_size_bytes_,
- buffer_size_in_bytes);
- min_of_max_num_packets_ = std::min(min_of_max_num_packets_,
- max_num_packets);
- }
- if (neteq_packet_buffer_[idx] != NULL) {
- free(neteq_packet_buffer_[idx]);
- neteq_packet_buffer_[idx] = NULL;
- }
-
- neteq_packet_buffer_[idx] = (int16_t *) malloc(buffer_size_in_bytes);
- if (neteq_packet_buffer_[idx] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AllocatePacketBufferByIdxSafe: NetEq Initialization error: "
- "could not allocate memory for NetEq Packet Buffer");
- return -1;
- }
- if (WebRtcNetEQ_AssignBuffer(inst_[idx], max_num_packets,
- neteq_packet_buffer_[idx],
- buffer_size_in_bytes) != 0) {
- if (neteq_packet_buffer_[idx] != NULL) {
- free(neteq_packet_buffer_[idx]);
- neteq_packet_buffer_[idx] = NULL;
- }
- LogError("AssignBuffer", idx);
- return -1;
- }
- return 0;
-}
-
-int32_t ACMNetEQ::SetAVTPlayout(const bool enable) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (avt_playout_ != enable) {
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetAVTPlayout: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetAVTPlayout(inst_[idx], (enable) ? 1 : 0) < 0) {
- LogError("SetAVTPlayout", idx);
- return -1;
- }
- }
- }
- avt_playout_ = enable;
- return 0;
-}
-
-bool ACMNetEQ::avt_playout() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return avt_playout_;
-}
-
-int32_t ACMNetEQ::CurrentSampFreqHz() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "CurrentSampFreqHz: NetEq is not initialized.");
- return -1;
- }
- return (int32_t)(1000 * current_samp_freq_khz_);
-}
-
-int32_t ACMNetEQ::SetPlayoutMode(const AudioPlayoutMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (playout_mode_ == mode)
- return 0;
-
- enum WebRtcNetEQPlayoutMode playout_mode = kPlayoutOff;
- enum WebRtcNetEQBGNMode background_noise_mode = kBGNOn;
- switch (mode) {
- case voice:
- playout_mode = kPlayoutOn;
- background_noise_mode = kBGNOn;
- break;
- case fax:
- playout_mode = kPlayoutFax;
- WebRtcNetEQ_GetBGNMode(inst_[0], &background_noise_mode); // No change.
- break;
- case streaming:
- playout_mode = kPlayoutStreaming;
- background_noise_mode = kBGNOff;
- break;
- case off:
- playout_mode = kPlayoutOff;
- background_noise_mode = kBGNOff;
- break;
- }
-
- int err = 0;
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetPlayoutMode: NetEq is not initialized.");
- return -1;
- }
-
- if (WebRtcNetEQ_SetPlayoutMode(inst_[idx], playout_mode) < 0) {
- LogError("SetPlayoutMode", idx);
- err = -1;
- }
-
- if (WebRtcNetEQ_SetBGNMode(inst_[idx], kBGNOff) < 0) {
- LogError("SetPlayoutMode::SetBGNMode", idx);
- err = -1;
- }
- }
- if (err == 0)
- playout_mode_ = mode;
- return err;
-}
-
-AudioPlayoutMode ACMNetEQ::playout_mode() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return playout_mode_;
-}
-
-int32_t ACMNetEQ::NetworkStatistics(
- ACMNetworkStatistics* statistics) const {
- WebRtcNetEQ_NetworkStatistics stats;
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetworkStatistics: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetNetworkStatistics(inst_[0], &stats) == 0) {
- statistics->currentAccelerateRate = stats.currentAccelerateRate;
- statistics->currentBufferSize = stats.currentBufferSize;
- statistics->jitterPeaksFound = (stats.jitterPeaksFound > 0);
- statistics->currentDiscardRate = stats.currentDiscardRate;
- statistics->currentExpandRate = stats.currentExpandRate;
- statistics->currentPacketLossRate = stats.currentPacketLossRate;
- statistics->currentPreemptiveRate = stats.currentPreemptiveRate;
- statistics->preferredBufferSize = stats.preferredBufferSize;
- statistics->clockDriftPPM = stats.clockDriftPPM;
- statistics->addedSamples = stats.addedSamples;
- } else {
- LogError("getNetworkStatistics", 0);
- return -1;
- }
- const int kArrayLen = 100;
- int waiting_times[kArrayLen];
- int waiting_times_len = WebRtcNetEQ_GetRawFrameWaitingTimes(inst_[0],
- kArrayLen,
- waiting_times);
- if (waiting_times_len > 0) {
- std::vector<int> waiting_times_vec(waiting_times,
- waiting_times + waiting_times_len);
- std::sort(waiting_times_vec.begin(), waiting_times_vec.end());
- size_t size = waiting_times_vec.size();
- assert(size == static_cast<size_t>(waiting_times_len));
- if (size % 2 == 0) {
- statistics->medianWaitingTimeMs = (waiting_times_vec[size / 2 - 1] +
- waiting_times_vec[size / 2]) / 2;
- } else {
- statistics->medianWaitingTimeMs = waiting_times_vec[size / 2];
- }
- statistics->minWaitingTimeMs = waiting_times_vec.front();
- statistics->maxWaitingTimeMs = waiting_times_vec.back();
- double sum = 0;
- for (size_t i = 0; i < size; ++i) {
- sum += waiting_times_vec[i];
- }
- statistics->meanWaitingTimeMs = static_cast<int>(sum / size);
- } else if (waiting_times_len == 0) {
- statistics->meanWaitingTimeMs = -1;
- statistics->medianWaitingTimeMs = -1;
- statistics->minWaitingTimeMs = -1;
- statistics->maxWaitingTimeMs = -1;
- } else {
- LogError("getRawFrameWaitingTimes", 0);
- return -1;
- }
- return 0;
-}
-
-// Should only be called in AV-sync mode.
-int ACMNetEQ::RecIn(const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp) {
- assert(av_sync_);
-
- // Translate to NetEq structure.
- WebRtcNetEQ_RTPInfo neteq_rtpinfo;
- neteq_rtpinfo.payloadType = rtp_info.header.payloadType;
- neteq_rtpinfo.sequenceNumber = rtp_info.header.sequenceNumber;
- neteq_rtpinfo.timeStamp = rtp_info.header.timestamp;
- neteq_rtpinfo.SSRC = rtp_info.header.ssrc;
- neteq_rtpinfo.markerBit = rtp_info.header.markerBit;
-
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- // Master should be initialized.
- assert(is_initialized_[0]);
-
- // Push into Master.
- int status = WebRtcNetEQ_RecInSyncRTP(inst_[0], &neteq_rtpinfo,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInSyncRTP", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn (sync): NetEq, error in pushing in Master");
- return -1;
- }
-
- // If the received stream is stereo, insert a sync payload into slave.
- if (rtp_info.type.Audio.channel == 2) {
- // Slave should be initialized.
- assert(is_initialized_[1]);
-
- // PUSH into Slave
- status = WebRtcNetEQ_RecInSyncRTP(inst_[1], &neteq_rtpinfo,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn (sync): NetEq, error in pushing in Slave");
- return -1;
- }
- }
- return status;
-}
-
-int32_t ACMNetEQ::RecIn(const uint8_t* incoming_payload,
- const int32_t length_payload,
- const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp) {
- int16_t payload_length = static_cast<int16_t>(length_payload);
-
- // Translate to NetEq structure.
- WebRtcNetEQ_RTPInfo neteq_rtpinfo;
- neteq_rtpinfo.payloadType = rtp_info.header.payloadType;
- neteq_rtpinfo.sequenceNumber = rtp_info.header.sequenceNumber;
- neteq_rtpinfo.timeStamp = rtp_info.header.timestamp;
- neteq_rtpinfo.SSRC = rtp_info.header.ssrc;
- neteq_rtpinfo.markerBit = rtp_info.header.markerBit;
-
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- int status;
- // In case of stereo payload, first half of the data should be pushed into
- // master, and the second half into slave.
- if (rtp_info.type.Audio.channel == 2) {
- payload_length = payload_length / 2;
- }
-
- // Check that master is initialized.
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq is not initialized.");
- return -1;
- }
- // Push into Master.
- status = WebRtcNetEQ_RecInRTPStruct(inst_[0], &neteq_rtpinfo,
- incoming_payload, payload_length,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq, error in pushing in Master");
- return -1;
- }
-
- // If the received stream is stereo, insert second half of paket into slave.
- if (rtp_info.type.Audio.channel == 2) {
- if (!is_initialized_[1]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq is not initialized.");
- return -1;
- }
- // Push into Slave.
- status = WebRtcNetEQ_RecInRTPStruct(inst_[1], &neteq_rtpinfo,
- &incoming_payload[payload_length],
- payload_length, receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq, error in pushing in Slave");
- return -1;
- }
- }
-
- return 0;
-}
-
-int32_t ACMNetEQ::RecOut(AudioFrame& audio_frame) {
- enum WebRtcNetEQOutputType type;
- int16_t payload_len_sample;
- enum WebRtcNetEQOutputType type_master;
- enum WebRtcNetEQOutputType type_slave;
-
- int16_t payload_len_sample_slave;
-
- CriticalSectionScoped lockNetEq(neteq_crit_sect_);
-
- if (!received_stereo_) {
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq is not initialized.");
- return -1;
- }
- {
- WriteLockScoped lockCodec(*decode_lock_);
- if (WebRtcNetEQ_RecOut(inst_[0], &(audio_frame.data_[0]),
- &payload_len_sample) != 0) {
- LogError("RecOut", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for mono case");
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[0]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- }
- WebRtcNetEQ_GetSpeechOutputType(inst_[0], &type);
- audio_frame.num_channels_ = 1;
- } else {
- if (!is_initialized_[0] || !is_initialized_[1]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq is not initialized.");
- return -1;
- }
- int16_t payload_master[480];
- int16_t payload_slave[480];
- {
- WriteLockScoped lockCodec(*decode_lock_);
- if (WebRtcNetEQ_RecOutMasterSlave(inst_[0], payload_master,
- &payload_len_sample, master_slave_info_,
- 1) != 0) {
- LogError("RecOutMasterSlave", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for master");
-
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[0]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- if (WebRtcNetEQ_RecOutMasterSlave(inst_[1], payload_slave,
- &payload_len_sample_slave,
- master_slave_info_, 0) != 0) {
- LogError("RecOutMasterSlave", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for slave");
-
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[1]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- }
-
- if (payload_len_sample != payload_len_sample_slave) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "RecOut: mismatch between the lenght of the decoded audio "
- "by Master (%d samples) and Slave (%d samples).",
- payload_len_sample, payload_len_sample_slave);
- if (payload_len_sample > payload_len_sample_slave) {
- memset(&payload_slave[payload_len_sample_slave], 0,
- (payload_len_sample - payload_len_sample_slave) *
- sizeof(int16_t));
- }
- }
-
- for (int16_t n = 0; n < payload_len_sample; n++) {
- audio_frame.data_[n << 1] = payload_master[n];
- audio_frame.data_[(n << 1) + 1] = payload_slave[n];
- }
- audio_frame.num_channels_ = 2;
-
- WebRtcNetEQ_GetSpeechOutputType(inst_[0], &type_master);
- WebRtcNetEQ_GetSpeechOutputType(inst_[1], &type_slave);
- if ((type_master == kOutputNormal) || (type_slave == kOutputNormal)) {
- type = kOutputNormal;
- } else {
- type = type_master;
- }
- }
-
- audio_frame.samples_per_channel_ =
- static_cast<uint16_t>(payload_len_sample);
- // NetEq always returns 10 ms of audio.
- current_samp_freq_khz_ =
- static_cast<float>(audio_frame.samples_per_channel_) / 10.0f;
- audio_frame.sample_rate_hz_ = audio_frame.samples_per_channel_ * 100;
- if (vad_status_) {
- if (type == kOutputVADPassive) {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputNormal) {
- audio_frame.vad_activity_ = AudioFrame::kVadActive;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputPLC) {
- audio_frame.vad_activity_ = previous_audio_activity_;
- audio_frame.speech_type_ = AudioFrame::kPLC;
- } else if (type == kOutputCNG) {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kCNG;
- } else {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kPLCCNG;
- }
- } else {
- // Always return kVadUnknown when receive VAD is inactive
- audio_frame.vad_activity_ = AudioFrame::kVadUnknown;
- if (type == kOutputNormal) {
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputPLC) {
- audio_frame.speech_type_ = AudioFrame::kPLC;
- } else if (type == kOutputPLCtoCNG) {
- audio_frame.speech_type_ = AudioFrame::kPLCCNG;
- } else if (type == kOutputCNG) {
- audio_frame.speech_type_ = AudioFrame::kCNG;
- } else {
- // type is kOutputVADPassive which
- // we don't expect to get if vad_status_ is false
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq returned kVadPassive while vad_status_ is "
- "false.");
- audio_frame.vad_activity_ = AudioFrame::kVadUnknown;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- }
- }
- previous_audio_activity_ = audio_frame.vad_activity_;
-
- WebRtcNetEQ_ProcessingActivity processing_stats;
- WebRtcNetEQ_GetProcessingActivity(inst_[0], &processing_stats);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "ACM::RecOut accelerate_bgn=%d accelerate_normal=%d"
- " expand_bgn=%d expand_normal=%d"
- " preemptive_bgn=%d preemptive_normal=%d"
- " merge_bgn=%d merge_normal=%d",
- processing_stats.accelerate_bgn_samples,
- processing_stats.accelerate_normal_samples,
- processing_stats.expand_bgn_sampels,
- processing_stats.expand_normal_samples,
- processing_stats.preemptive_expand_bgn_samples,
- processing_stats.preemptive_expand_normal_samples,
- processing_stats.merge_expand_bgn_samples,
- processing_stats.merge_expand_normal_samples);
- return 0;
-}
-
-// When ACMGenericCodec has set the codec specific parameters in codec_def
-// it calls AddCodec() to add the new codec to the NetEQ database.
-int32_t ACMNetEQ::AddCodec(WebRtcNetEQ_CodecDef* codec_def,
- bool to_master) {
- if (codec_def == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: error, codec_def is NULL");
- return -1;
- }
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- int16_t idx;
- if (to_master) {
- idx = 0;
- } else {
- idx = 1;
- }
-
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_CodecDbAdd(inst_[idx], codec_def) < 0) {
- LogError("CodecDB_Add", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: NetEq, error in adding codec");
- return -1;
- } else {
- return 0;
- }
-}
-
-// Creates a Word16 RTP packet out of a Word8 payload and an rtp info struct.
-// Must be byte order safe.
-void ACMNetEQ::RTPPack(int16_t* rtp_packet, const int8_t* payload,
- const int32_t payload_length_bytes,
- const WebRtcRTPHeader& rtp_info) {
- int32_t idx = 0;
- WEBRTC_SPL_SET_BYTE(rtp_packet, (int8_t) 0x80, idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, rtp_info.header.payloadType, idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.sequenceNumber), 1),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.sequenceNumber), 0),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 3),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 2),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 1),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 0),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc), 3), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 2), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 1), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 0), idx);
- idx++;
- for (int16_t i = 0; i < payload_length_bytes; i++) {
- WEBRTC_SPL_SET_BYTE(rtp_packet, payload[i], idx);
- idx++;
- }
- if (payload_length_bytes & 1) {
- // Our 16 bits buffer is one byte too large, set that
- // last byte to zero.
- WEBRTC_SPL_SET_BYTE(rtp_packet, 0x0, idx);
- }
-}
-
-int16_t ACMNetEQ::EnableVAD() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (vad_status_) {
- return 0;
- }
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADStatus: NetEq is not initialized.");
- return -1;
- }
- // VAD was off and we have to turn it on
- if (EnableVADByIdxSafe(idx) < 0) {
- return -1;
- }
-
- // Set previous VAD status to PASSIVE
- previous_audio_activity_ = AudioFrame::kVadPassive;
- }
- vad_status_ = true;
- return 0;
-}
-
-ACMVADMode ACMNetEQ::vad_mode() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return vad_mode_;
-}
-
-int16_t ACMNetEQ::SetVADMode(const ACMVADMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if ((mode < VADNormal) || (mode > VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADMode: NetEq error: could not set VAD mode, mode is not "
- "supported");
- return -1;
- } else {
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetVADMode(inst_[idx], mode) < 0) {
- LogError("SetVADmode", idx);
- return -1;
- }
- }
- vad_mode_ = mode;
- return 0;
- }
-}
-
-int32_t ACMNetEQ::FlushBuffers() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "FlushBuffers: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_FlushBuffers(inst_[idx]) < 0) {
- LogError("FlushBuffers", idx);
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo) {
- // sanity check
- if ((codec_idx <= kDecoderReservedStart) ||
- (codec_idx >= kDecoderReservedEnd)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RemoveCodec: NetEq error: could not Remove Codec, codec "
- "index out of range");
- return -1;
- }
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RemoveCodec: NetEq is not initialized.");
- return -1;
- }
-
- if (WebRtcNetEQ_CodecDbRemove(inst_[0], codec_idx) < 0) {
- LogError("CodecDB_Remove", 0);
- return -1;
- }
-
- if (is_stereo) {
- if (WebRtcNetEQ_CodecDbRemove(inst_[1], codec_idx) < 0) {
- LogError("CodecDB_Remove", 1);
- return -1;
- }
- }
-
- return 0;
-}
-
-int16_t ACMNetEQ::SetBackgroundNoiseMode(
- const ACMBackgroundNoiseMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetBackgroundNoiseMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetBGNMode(inst_[idx], (WebRtcNetEQBGNMode) mode) < 0) {
- LogError("SetBGNMode", idx);
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) {
- WebRtcNetEQBGNMode my_mode;
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "BackgroundNoiseMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetBGNMode(inst_[0], &my_mode) < 0) {
- LogError("WebRtcNetEQ_GetBGNMode", 0);
- return -1;
- } else {
- mode = (ACMBackgroundNoiseMode) my_mode;
- }
- return 0;
-}
-
-void ACMNetEQ::set_id(int32_t id) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- id_ = id;
-}
-
-void ACMNetEQ::LogError(const char* neteq_func_name,
- const int16_t idx) const {
- char error_name[NETEQ_ERR_MSG_LEN_BYTE];
- char my_func_name[50];
- int neteq_error_code = WebRtcNetEQ_GetErrorCode(inst_[idx]);
- WebRtcNetEQ_GetErrorName(neteq_error_code, error_name,
- NETEQ_ERR_MSG_LEN_BYTE - 1);
- strncpy(my_func_name, neteq_func_name, 49);
- error_name[NETEQ_ERR_MSG_LEN_BYTE - 1] = '\0';
- my_func_name[49] = '\0';
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetEq-%d Error in function %s, error-code: %d, error-string: "
- " %s", idx, my_func_name, neteq_error_code, error_name);
-}
-
-int32_t ACMNetEQ::PlayoutTimestamp(uint32_t& timestamp) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (WebRtcNetEQ_GetSpeechTimeStamp(inst_[0], &timestamp) < 0) {
- LogError("GetSpeechTimeStamp", 0);
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMNetEQ::RemoveSlaves() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- RemoveSlavesSafe();
-}
-
-void ACMNetEQ::RemoveSlavesSafe() {
- for (int i = 1; i < num_slaves_ + 1; i++) {
- RemoveNetEQSafe(i);
- }
-
- if (master_slave_info_ != NULL) {
- free(master_slave_info_);
- master_slave_info_ = NULL;
- }
- num_slaves_ = 0;
-}
-
-void ACMNetEQ::RemoveNetEQSafe(int index) {
- if (inst_mem_[index] != NULL) {
- free(inst_mem_[index]);
- inst_mem_[index] = NULL;
- inst_[index] = NULL;
- }
- if (neteq_packet_buffer_[index] != NULL) {
- free(neteq_packet_buffer_[index]);
- neteq_packet_buffer_[index] = NULL;
- }
- if (ptr_vadinst_[index] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[index]);
- ptr_vadinst_[index] = NULL;
- }
-}
-
-int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- const int16_t slave_idx = 1;
- if (num_slaves_ < 1) {
- // initialize the receiver, this also sets up VAD.
- if (InitByIdxSafe(slave_idx) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Initialize");
- return -1;
- }
-
- // Allocate buffer.
- if (AllocatePacketBufferByIdxSafe(used_codecs, num_codecs,
- slave_idx) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Allocate Packet "
- "Buffer");
- return -1;
- }
-
- if (master_slave_info_ != NULL) {
- free(master_slave_info_);
- master_slave_info_ = NULL;
- }
- int ms_info_size = WebRtcNetEQ_GetMasterSlaveInfoSize();
- master_slave_info_ = malloc(ms_info_size);
-
- if (master_slave_info_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Allocate memory for "
- "Master-Slave Info");
- return -1;
- }
-
- // We accept this as initialized NetEQ, the rest is to synchronize
- // Slave with Master.
- num_slaves_ = 1;
- is_initialized_[slave_idx] = true;
-
- // Set AVT
- if (WebRtcNetEQ_SetAVTPlayout(inst_[slave_idx],
- (avt_playout_) ? 1 : 0) < 0) {
- LogError("SetAVTPlayout", slave_idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not set AVT playout.");
- return -1;
- }
-
- // Set Background Noise
- WebRtcNetEQBGNMode current_mode;
- if (WebRtcNetEQ_GetBGNMode(inst_[0], &current_mode) < 0) {
- LogError("GetBGNMode", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AAddSlave: AddSlave Failed, Could not Get BGN form "
- "Master.");
- return -1;
- }
-
- if (WebRtcNetEQ_SetBGNMode(inst_[slave_idx],
- (WebRtcNetEQBGNMode) current_mode) < 0) {
- LogError("SetBGNMode", slave_idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not set BGN mode.");
- return -1;
- }
-
- enum WebRtcNetEQPlayoutMode playout_mode = kPlayoutOff;
- switch (playout_mode_) {
- case voice:
- playout_mode = kPlayoutOn;
- break;
- case fax:
- playout_mode = kPlayoutFax;
- break;
- case streaming:
- playout_mode = kPlayoutStreaming;
- break;
- case off:
- playout_mode = kPlayoutOff;
- break;
- }
- if (WebRtcNetEQ_SetPlayoutMode(inst_[slave_idx], playout_mode) < 0) {
- LogError("SetPlayoutMode", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Set Playout Mode.");
- return -1;
- }
-
- // Set AV-sync for the slave.
- WebRtcNetEQ_EnableAVSync(inst_[slave_idx], av_sync_ ? 1 : 0);
-
- // Set minimum delay.
- if (minimum_delay_ms_ > 0)
- WebRtcNetEQ_SetMinimumDelay(inst_[slave_idx], minimum_delay_ms_);
-
- // Set maximum delay.
- if (maximum_delay_ms_ > 0)
- WebRtcNetEQ_SetMaximumDelay(inst_[slave_idx], maximum_delay_ms_);
- }
-
- return 0;
-}
-
-void ACMNetEQ::set_received_stereo(bool received_stereo) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- received_stereo_ = received_stereo;
-}
-
-uint8_t ACMNetEQ::num_slaves() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return num_slaves_;
-}
-
-void ACMNetEQ::EnableAVSync(bool enable) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- av_sync_ = enable;
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- WebRtcNetEQ_EnableAVSync(inst_[i], enable ? 1 : 0);
- }
-}
-
-int ACMNetEQ::SetMinimumDelay(int minimum_delay_ms) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- if (WebRtcNetEQ_SetMinimumDelay(inst_[i], minimum_delay_ms) < 0)
- return -1;
- }
- minimum_delay_ms_ = minimum_delay_ms;
- return 0;
-}
-
-int ACMNetEQ::SetMaximumDelay(int maximum_delay_ms) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- if (WebRtcNetEQ_SetMaximumDelay(inst_[i], maximum_delay_ms) < 0)
- return -1;
- }
- maximum_delay_ms_ = maximum_delay_ms;
- return 0;
-}
-
-int ACMNetEQ::LeastRequiredDelayMs() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- assert(is_initialized_[0]);
-
- // Sufficient to query the master.
- return WebRtcNetEQ_GetRequiredDelayMs(inst_[0]);
-}
-
-bool ACMNetEQ::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (WebRtcNetEQ_DecodedRtpInfo(inst_[0], sequence_number, timestamp) < 0)
- return false;
- return true;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h
deleted file mode 100644
index e52ddc79571..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h
+++ /dev/null
@@ -1,399 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
-
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class RWLockWrapper;
-struct CodecInst;
-
-namespace acm1 {
-
-#define MAX_NUM_SLAVE_NETEQ 1
-
-class ACMNetEQ {
- public:
- enum JitterBuffer {
- kMasterJb = 0,
- kSlaveJb = 1
- };
-
- // Constructor of the class
- ACMNetEQ();
-
- // Destructor of the class.
- ~ACMNetEQ();
-
- //
- // Init()
- // Allocates memory for NetEQ and VAD and initializes them.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ or VAD returned an error or
- // if out of memory.
- //
- int32_t Init();
-
- //
- // RecIn()
- // Gives the payload to NetEQ.
- //
- // Input:
- // - incoming_payload : Incoming audio payload.
- // - length_payload : Length of incoming audio payload.
- // - rtp_info : RTP header for the incoming payload containing
- // information about payload type, sequence number,
- // timestamp, SSRC and marker bit.
- // - receive_timestamp : received timestamp.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t RecIn(const uint8_t* incoming_payload,
- const int32_t length_payload,
- const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp);
-
- //
- // RecIn()
- // Insert a sync payload to NetEq. Should only be called if |av_sync_| is
- // enabled;
- //
- // Input:
- // - rtp_info : RTP header for the incoming payload containing
- // information about payload type, sequence number,
- // timestamp, SSRC and marker bit.
- // - receive_timestamp : received timestamp.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int RecIn(const WebRtcRTPHeader& rtp_info, uint32_t receive_timestamp);
-
- //
- // RecOut()
- // Asks NetEQ for 10 ms of decoded audio.
- //
- // Input:
- // -audio_frame : an audio frame were output data and
- // associated parameters are written to.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ returned an error.
- //
- int32_t RecOut(AudioFrame& audio_frame);
-
- //
- // AddCodec()
- // Adds a new codec to the NetEQ codec database.
- //
- // Input:
- // - codec_def : The codec to be added.
- // - to_master : true if the codec has to be added to Master
- // NetEq, otherwise will be added to the Slave
- // NetEQ.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t AddCodec(WebRtcNetEQ_CodecDef *codec_def,
- bool to_master = true);
-
- //
- // AllocatePacketBuffer()
- // Allocates the NetEQ packet buffer.
- //
- // Input:
- // - used_codecs : An array of the codecs to be used by NetEQ.
- // - num_codecs : Number of codecs in used_codecs.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t AllocatePacketBuffer(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs);
-
- //
- // SetAVTPlayout()
- // Enable/disable playout of AVT payloads.
- //
- // Input:
- // - enable : Enable if true, disable if false.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t SetAVTPlayout(const bool enable);
-
- //
- // AVTPlayout()
- // Get the current AVT playout state.
- //
- // Return value : True if AVT playout is enabled.
- // False if AVT playout is disabled.
- //
- bool avt_playout() const;
-
- //
- // CurrentSampFreqHz()
- // Get the current sampling frequency in Hz.
- //
- // Return value : Sampling frequency in Hz.
- //
- int32_t CurrentSampFreqHz() const;
-
- //
- // SetPlayoutMode()
- // Sets the playout mode to voice or fax.
- //
- // Input:
- // - mode : The playout mode to be used, voice,
- // fax, or streaming.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t SetPlayoutMode(const AudioPlayoutMode mode);
-
- //
- // PlayoutMode()
- // Get the current playout mode.
- //
- // Return value : The current playout mode.
- //
- AudioPlayoutMode playout_mode() const;
-
- //
- // NetworkStatistics()
- // Get the current network statistics from NetEQ.
- //
- // Output:
- // - statistics : The current network statistics.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t NetworkStatistics(ACMNetworkStatistics* statistics) const;
-
- //
- // VADMode()
- // Get the current VAD Mode.
- //
- // Return value : The current VAD mode.
- //
- ACMVADMode vad_mode() const;
-
- //
- // SetVADMode()
- // Set the VAD mode.
- //
- // Input:
- // - mode : The new VAD mode.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t SetVADMode(const ACMVADMode mode);
-
- //
- // DecodeLock()
- // Get the decode lock used to protect decoder instances while decoding.
- //
- // Return value : Pointer to the decode lock.
- //
- RWLockWrapper* DecodeLock() const {
- return decode_lock_;
- }
-
- //
- // FlushBuffers()
- // Flushes the NetEQ packet and speech buffers.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ returned an error.
- //
- int32_t FlushBuffers();
-
- //
- // RemoveCodec()
- // Removes a codec from the NetEQ codec database.
- //
- // Input:
- // - codec_idx : Codec to be removed.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo = false);
-
- //
- // SetBackgroundNoiseMode()
- // Set the mode of the background noise.
- //
- // Input:
- // - mode : an enumerator specifying the mode of the
- // background noise.
- //
- // Return value : 0 if succeeded,
- // -1 if failed to set the mode.
- //
- int16_t SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
-
- //
- // BackgroundNoiseMode()
- // return the mode of the background noise.
- //
- // Return value : The mode of background noise.
- //
- int16_t BackgroundNoiseMode(ACMBackgroundNoiseMode& mode);
-
- void set_id(int32_t id);
-
- int32_t PlayoutTimestamp(uint32_t& timestamp);
-
- void set_received_stereo(bool received_stereo);
-
- uint8_t num_slaves();
-
- // Delete all slaves.
- void RemoveSlaves();
-
- int16_t AddSlave(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs);
-
- void BufferSpec(int& num_packets, int& size_bytes, int& overhead_bytes) {
- num_packets = min_of_max_num_packets_;
- size_bytes = min_of_buffer_size_bytes_;
- overhead_bytes = per_packet_overhead_bytes_;
- }
-
- //
- // Set AV-sync mode.
- //
- void EnableAVSync(bool enable);
-
- //
- // Get sequence number and timestamp of the last decoded RTP.
- //
- bool DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const;
-
- //
- // Set a minimum delay in NetEq. Unless channel condition dictates a longer
- // delay, the given delay is maintained by NetEq.
- //
- int SetMinimumDelay(int minimum_delay_ms);
-
- //
- // Set a maximum delay in NetEq.
- //
- int SetMaximumDelay(int maximum_delay_ms);
-
- //
- // The shortest latency, in milliseconds, required by jitter buffer. This
- // is computed based on inter-arrival times and playout mode of NetEq. The
- // actual delay is the maximum of least-required-delay and the minimum-delay
- // specified by SetMinumumPlayoutDelay() API.
- //
- int LeastRequiredDelayMs() const ;
-
- private:
- //
- // RTPPack()
- // Creates a Word16 RTP packet out of the payload data in Word16 and
- // a WebRtcRTPHeader.
- //
- // Input:
- // - payload : Payload to be packetized.
- // - payload_length_bytes : Length of the payload in bytes.
- // - rtp_info : RTP header structure.
- //
- // Output:
- // - rtp_packet : The RTP packet.
- //
- static void RTPPack(int16_t* rtp_packet, const int8_t* payload,
- const int32_t payload_length_bytes,
- const WebRtcRTPHeader& rtp_info);
-
- void LogError(const char* neteq_func_name, const int16_t idx) const;
-
- int16_t InitByIdxSafe(const int16_t idx);
-
- //
- // EnableVAD()
- // Enable VAD.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t EnableVAD();
-
- int16_t EnableVADByIdxSafe(const int16_t idx);
-
- int16_t AllocatePacketBufferByIdxSafe(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs,
- const int16_t idx);
-
- // Delete the NetEQ corresponding to |index|.
- void RemoveNetEQSafe(int index);
-
- void RemoveSlavesSafe();
-
- void* inst_[MAX_NUM_SLAVE_NETEQ + 1];
- void* inst_mem_[MAX_NUM_SLAVE_NETEQ + 1];
-
- int16_t* neteq_packet_buffer_[MAX_NUM_SLAVE_NETEQ + 1];
-
- int32_t id_;
- float current_samp_freq_khz_;
- bool avt_playout_;
- AudioPlayoutMode playout_mode_;
- CriticalSectionWrapper* neteq_crit_sect_;
-
- WebRtcVadInst* ptr_vadinst_[MAX_NUM_SLAVE_NETEQ + 1];
-
- bool vad_status_;
- ACMVADMode vad_mode_;
- RWLockWrapper* decode_lock_;
- bool is_initialized_[MAX_NUM_SLAVE_NETEQ + 1];
- uint8_t num_slaves_;
- bool received_stereo_;
- void* master_slave_info_;
- AudioFrame::VADActivity previous_audio_activity_;
-
- CriticalSectionWrapper* callback_crit_sect_;
- // Minimum of "max number of packets," among all NetEq instances.
- int min_of_max_num_packets_;
- // Minimum of buffer-size among all NetEq instances.
- int min_of_buffer_size_bytes_;
- int per_packet_overhead_bytes_;
-
- // Keep track of AV-sync. Just used to set the slave when a slave is added.
- bool av_sync_;
-
- int minimum_delay_ms_;
- int maximum_delay_ms_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
deleted file mode 100644
index 8b973ba230f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file contains unit tests for ACM's NetEQ wrapper (class ACMNetEQ).
-
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-
-#include <stdlib.h>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class AcmNetEqTest : public ::testing::Test {
- protected:
- static const size_t kMaxPayloadLen = 5760; // 60 ms, 48 kHz, 16 bit samples.
- static const int kPcm16WbPayloadType = 94;
- AcmNetEqTest() {}
- virtual void SetUp();
- virtual void TearDown() {}
-
- void InsertZeroPacket(uint16_t sequence_number,
- uint32_t timestamp,
- uint8_t payload_type,
- uint32_t ssrc,
- bool marker_bit,
- size_t len_payload_bytes);
- void PullData(int expected_num_samples);
-
- ACMNetEQ neteq_;
-};
-
-void AcmNetEqTest::SetUp() {
- ASSERT_EQ(0, neteq_.Init());
- ASSERT_EQ(0, neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs));
- WebRtcNetEQ_CodecDef codec_def;
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, kPcm16WbPayloadType, NULL, 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- ASSERT_EQ(0, neteq_.AddCodec(&codec_def, true));
-}
-
-void AcmNetEqTest::InsertZeroPacket(uint16_t sequence_number,
- uint32_t timestamp,
- uint8_t payload_type,
- uint32_t ssrc,
- bool marker_bit,
- size_t len_payload_bytes) {
- ASSERT_TRUE(len_payload_bytes <= kMaxPayloadLen);
- uint16_t payload[kMaxPayloadLen] = {0};
- WebRtcRTPHeader rtp_header;
- rtp_header.header.sequenceNumber = sequence_number;
- rtp_header.header.timestamp = timestamp;
- rtp_header.header.ssrc = ssrc;
- rtp_header.header.payloadType = payload_type;
- rtp_header.header.markerBit = marker_bit;
- rtp_header.type.Audio.channel = 1;
- // Receive timestamp can be set to send timestamp in this test.
- ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<uint8_t*>(payload),
- len_payload_bytes, rtp_header, timestamp));
-}
-
-void AcmNetEqTest::PullData(int expected_num_samples) {
- AudioFrame out_frame;
- ASSERT_EQ(0, neteq_.RecOut(out_frame));
- ASSERT_EQ(expected_num_samples, out_frame.samples_per_channel_);
-}
-
-TEST_F(AcmNetEqTest, NetworkStatistics) {
- // Use fax mode to avoid time-scaling. This is to simplify the testing of
- // packet waiting times in the packet buffer.
- neteq_.SetPlayoutMode(fax);
- // Insert 31 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
- int num_frames = 30;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- int i, j;
- for (i = 0; i < num_frames; ++i) {
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- }
- // Pull out data once.
- PullData(kSamples);
- // Insert one more packet (to produce different mean and median).
- i = num_frames;
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- // Pull out all data.
- for (j = 1; j < num_frames + 1; ++j) {
- PullData(kSamples);
- }
-
- ACMNetworkStatistics stats;
- ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
- EXPECT_EQ(0, stats.currentBufferSize);
- EXPECT_EQ(0, stats.preferredBufferSize);
- EXPECT_FALSE(stats.jitterPeaksFound);
- EXPECT_EQ(0, stats.currentPacketLossRate);
- EXPECT_EQ(0, stats.currentDiscardRate);
- EXPECT_EQ(0, stats.currentExpandRate);
- EXPECT_EQ(0, stats.currentPreemptiveRate);
- EXPECT_EQ(0, stats.currentAccelerateRate);
- EXPECT_EQ(-916, stats.clockDriftPPM); // Initial value is slightly off.
- EXPECT_EQ(300, stats.maxWaitingTimeMs);
- EXPECT_EQ(10, stats.minWaitingTimeMs);
- EXPECT_EQ(159, stats.meanWaitingTimeMs);
- EXPECT_EQ(160, stats.medianWaitingTimeMs);
-}
-
-TEST_F(AcmNetEqTest, TestZeroLengthWaitingTimesVector) {
- // Insert one packet.
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- int i = 0;
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- // Do not pull out any data.
-
- ACMNetworkStatistics stats;
- ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
- EXPECT_EQ(0, stats.currentBufferSize);
- EXPECT_EQ(0, stats.preferredBufferSize);
- EXPECT_FALSE(stats.jitterPeaksFound);
- EXPECT_EQ(0, stats.currentPacketLossRate);
- EXPECT_EQ(0, stats.currentDiscardRate);
- EXPECT_EQ(0, stats.currentExpandRate);
- EXPECT_EQ(0, stats.currentPreemptiveRate);
- EXPECT_EQ(0, stats.currentAccelerateRate);
- EXPECT_EQ(-916, stats.clockDriftPPM); // Initial value is slightly off.
- EXPECT_EQ(-1, stats.minWaitingTimeMs);
- EXPECT_EQ(-1, stats.maxWaitingTimeMs);
- EXPECT_EQ(-1, stats.meanWaitingTimeMs);
- EXPECT_EQ(-1, stats.medianWaitingTimeMs);
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc
deleted file mode 100644
index 413f3715fc6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_opus.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_OPUS
-
-ACMOpus::ACMOpus(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- sample_freq_(0),
- bitrate_(0),
- channels_(1) {
- return;
-}
-
-ACMOpus::~ACMOpus() {
- return;
-}
-
-int16_t ACMOpus::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMOpus::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMOpus::InternalInitDecoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMOpus::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMOpus::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMOpus::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMOpus::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMOpus::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMOpus::DestructDecoderSafe() {
- return;
-}
-
-void ACMOpus::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMOpus::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-bool ACMOpus::IsTrueStereoCodec() {
- return true;
-}
-
-void ACMOpus::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-ACMOpus::ACMOpus(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- sample_freq_(32000), // Default sampling frequency.
- bitrate_(20000), // Default bit-rate.
- channels_(1) { // Default mono
- codec_id_ = codec_id;
-
- // Opus has internal DTX, but we don't use it for now.
- has_internal_dtx_ = false;
-
- if (codec_id_ != ACMCodecDB::kOpus) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for Opus.");
- sample_freq_ = -1;
- bitrate_ = -1;
- }
- return;
-}
-
-ACMOpus::~ACMOpus() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcOpus_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMOpus::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // Call Encoder.
- *bitstream_len_byte = WebRtcOpus_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- MAX_PAYLOAD_SIZE_BYTE, bitstream);
- // Check for error reported from encoder.
- if (*bitstream_len_byte < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for Opus");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- // Increment the read index. This tells the caller how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * channels_;
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMOpus::DecodeSafe(uint8_t* bitstream, int16_t bitstream_len_byte,
- int16_t* audio, int16_t* audio_samples,
- int8_t* speech_type) {
- return 0;
-}
-
-int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- int16_t ret;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- ret = WebRtcOpus_EncoderCreate(&encoder_inst_ptr_,
- codec_params->codec_inst.channels);
- // Store number of channels.
- channels_ = codec_params->codec_inst.channels;
-
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Encoder creation failed for Opus");
- return ret;
- }
- ret = WebRtcOpus_SetBitRate(encoder_inst_ptr_,
- codec_params->codec_inst.rate);
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Setting initial bitrate failed for Opus");
- return ret;
- }
-
- // Store bitrate.
- bitrate_ = codec_params->codec_inst.rate;
-
- return 0;
-}
-
-int16_t ACMOpus::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (decoder_inst_ptr_ == NULL) {
- if (WebRtcOpus_DecoderCreate(&decoder_inst_ptr_,
- codec_params->codec_inst.channels) < 0) {
- return -1;
- }
- }
-
- // Number of channels in decoder should match the number in |codec_params|.
- assert(codec_params->codec_inst.channels ==
- WebRtcOpus_DecoderChannels(decoder_inst_ptr_));
-
- if (WebRtcOpus_DecoderInit(decoder_inst_ptr_) < 0) {
- return -1;
- }
- if (WebRtcOpus_DecoderInitSlave(decoder_inst_ptr_) < 0) {
- return -1;
- }
- return 0;
-}
-
-int32_t ACMOpus::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Decoder uninitialized for Opus");
- return -1;
- }
-
- // Fill up the structure by calling "SET_CODEC_PAR" & "SET_OPUS_FUNCTION."
- // Then call NetEQ to add the codec to its database.
- // TODO(tlegrand): Decoder is registered in NetEQ as a 32 kHz decoder, which
- // is true until we have a full 48 kHz system, and remove the downsampling
- // in the Opus decoder wrapper.
- SET_CODEC_PAR(codec_def, kDecoderOpus, codec_inst.pltype,
- decoder_inst_ptr_, 32000);
-
- // If this is the master of NetEQ, regular decoder will be added, otherwise
- // the slave decoder will be used.
- if (is_master_) {
- SET_OPUS_FUNCTIONS(codec_def);
- } else {
- SET_OPUSSLAVE_FUNCTIONS(codec_def);
- }
-
- return 0;
-}
-
-ACMGenericCodec* ACMOpus::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMOpus::InternalCreateEncoder() {
- // Real encoder will be created in InternalInitEncoder.
- return 0;
-}
-
-void ACMOpus::DestructEncoderSafe() {
- if (encoder_inst_ptr_) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMOpus::InternalCreateDecoder() {
- // Real decoder will be created in InternalInitDecoder
- return 0;
-}
-
-void ACMOpus::DestructDecoderSafe() {
- decoder_initialized_ = false;
- if (decoder_inst_ptr_) {
- WebRtcOpus_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMOpus::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcOpus_EncoderFree(reinterpret_cast<OpusEncInst*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMOpus::SetBitRateSafe(const int32_t rate) {
- if (rate < 6000 || rate > 510000) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate Opus");
- return -1;
- }
-
- bitrate_ = rate;
-
- // Ask the encoder for the new rate.
- if (WebRtcOpus_SetBitRate(encoder_inst_ptr_, bitrate_) >= 0) {
- encoder_params_.codec_inst.rate = bitrate_;
- return 0;
- }
-
- return -1;
-}
-
-bool ACMOpus::IsTrueStereoCodec() {
- return true;
-}
-
-// Copy the stereo packet so that NetEq will insert into both master and slave.
-void ACMOpus::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Duplicate the payload.
- memcpy(&payload[*payload_length], &payload[0],
- sizeof(uint8_t) * (*payload_length));
- // Double the size of the packet.
- *payload_length *= 2;
-}
-
-#endif // WEBRTC_CODEC_OPUS
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h
deleted file mode 100644
index 1e586ff41ad..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
-
-#include "webrtc/common_audio/resampler/include/resampler.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-struct WebRtcOpusEncInst;
-struct WebRtcOpusDecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMOpus : public ACMGenericCodec {
- public:
- explicit ACMOpus(int16_t codec_id);
- virtual ~ACMOpus();
-
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual bool IsTrueStereoCodec() OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- WebRtcOpusEncInst* encoder_inst_ptr_;
- WebRtcOpusDecInst* decoder_inst_ptr_;
- uint16_t sample_freq_;
- uint32_t bitrate_;
- int channels_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
deleted file mode 100644
index 6fe12f757ca..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcm16b.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_PCM16
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_PCM16
-
-ACMPCM16B::ACMPCM16B(int16_t /* codec_id */) {
- return;
-}
-
-ACMPCM16B::~ACMPCM16B() {
- return;
-}
-
-int16_t ACMPCM16B::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCM16B::InternalCreateEncoder() {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMPCM16B::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMPCM16B::DestructEncoderSafe() {
- return;
-}
-
-void ACMPCM16B::DestructDecoderSafe() {
- return;
-}
-
-void ACMPCM16B::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {
-}
-
-#else //===================== Actual Implementation =======================
-ACMPCM16B::ACMPCM16B(int16_t codec_id) {
- codec_id_ = codec_id;
- sampling_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
-}
-
-ACMPCM16B::~ACMPCM16B() {
- return;
-}
-
-int16_t ACMPCM16B::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcPcm16b_Encode(&in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- bitstream);
- // Increment the read index to tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCM16B::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCM16B::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling "SET_CODEC_PAR" & "SET_PCMU_FUNCTION".
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- switch (sampling_freq_hz_) {
- case 8000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16B, codec_inst.pltype, NULL, 8000);
- SET_PCM16B_FUNCTIONS(codec_def);
- break;
- }
- case 16000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, codec_inst.pltype, NULL,
- 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- break;
- }
- case 32000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bswb32kHz, codec_inst.pltype,
- NULL, 32000);
- SET_PCM16B_SWB32_FUNCTIONS(codec_def);
- break;
- }
- default: {
- return -1;
- }
- }
- } else {
- switch (sampling_freq_hz_) {
- case 8000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16B_2ch, codec_inst.pltype, NULL,
- 8000);
- SET_PCM16B_FUNCTIONS(codec_def);
- break;
- }
- case 16000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb_2ch, codec_inst.pltype,
- NULL, 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- break;
- }
- case 32000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bswb32kHz_2ch, codec_inst.pltype,
- NULL, 32000);
- SET_PCM16B_SWB32_FUNCTIONS(codec_def);
- break;
- }
- default: {
- return -1;
- }
- }
- }
- return 0;
-}
-
-ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCM16B::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCM16B::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCM16B::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCM16B::DestructEncoderSafe() {
- // PCM has no instance.
- encoder_exist_ = false;
- encoder_initialized_ = false;
- return;
-}
-
-void ACMPCM16B::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_exist_ = false;
- decoder_initialized_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCM16B::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte_msb;
- uint8_t right_byte_lsb;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move two bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
-
- for (int i = 0; i < *payload_length / 2; i += 2) {
- right_byte_msb = payload[i + 2];
- right_byte_lsb = payload[i + 3];
- memmove(&payload[i + 2], &payload[i + 4], *payload_length - i - 4);
- payload[*payload_length - 2] = right_byte_msb;
- payload[*payload_length - 1] = right_byte_lsb;
- }
-}
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
deleted file mode 100644
index a97589b57a9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCM16B : public ACMGenericCodec {
- public:
- explicit ACMPCM16B(int16_t codec_id);
- virtual ~ACMPCM16B();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- int32_t sampling_freq_hz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc
deleted file mode 100644
index 9e5514a9e9e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcma.h"
-
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Codec interface
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMPCMA::ACMPCMA(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMPCMA::~ACMPCMA() {
- return;
-}
-
-int16_t ACMPCMA::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcG711_EncodeA(NULL, &in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- (int16_t*) bitstream);
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCMA::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCMA::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMA::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCMA::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMA_FUNCTION."
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- // Mono mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMa, codec_inst.pltype, NULL, 8000);
- } else {
- // Stereo mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMa_2ch, codec_inst.pltype, NULL, 8000);
- }
- SET_PCMA_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMPCMA::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCMA::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMA::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCMA::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMA::DestructEncoderSafe() {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMA::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_initialized_ = false;
- decoder_exist_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCMA::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move one bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h
deleted file mode 100644
index cb506eaa6e9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCMA : public ACMGenericCodec {
- public:
- explicit ACMPCMA(int16_t codec_id);
- virtual ~ACMPCMA();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
deleted file mode 100644
index 6f4eb27aa7a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcmu.h"
-
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Codec interface
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMPCMU::ACMPCMU(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMPCMU::~ACMPCMU() {
- return;
-}
-
-int16_t ACMPCMU::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcG711_EncodeU(NULL, &in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- (int16_t*)bitstream);
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCMU::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCMU::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMU::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCMU::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- // Mono mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMu, codec_inst.pltype, NULL, 8000);
- } else {
- // Stereo mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMu_2ch, codec_inst.pltype, NULL, 8000);
- }
- SET_PCMU_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMPCMU::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCMU::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMU::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCMU::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMU::DestructEncoderSafe() {
- // PCM has no instance.
- encoder_exist_ = false;
- encoder_initialized_ = false;
- return;
-}
-
-void ACMPCMU::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_initialized_ = false;
- decoder_exist_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCMU::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move one bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h
deleted file mode 100644
index ea401d59c96..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCMU : public ACMGenericCodec {
- public:
- explicit ACMPCMU(int16_t codec_id);
- virtual ~ACMPCMU();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc
deleted file mode 100644
index 0d8134c171a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_red.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMRED::ACMRED(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMRED::~ACMRED() {
- return;
-}
-
-int16_t ACMRED::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- // RED is never used as an encoder
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMRED::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // RED has no instance
- return 0;
-}
-
-int32_t ACMRED::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderRED, codec_inst.pltype, NULL, 8000);
- SET_RED_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMRED::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMRED::InternalCreateEncoder() {
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::InternalCreateDecoder() {
- // RED has no instance
- return 0;
-}
-
-void ACMRED::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // RED has no instance
- return;
-}
-
-void ACMRED::DestructEncoderSafe() {
- // RED has no instance
- return;
-}
-
-void ACMRED::DestructDecoderSafe() {
- // RED has no instance
- return;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h
deleted file mode 100644
index ede18b5218e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMRED : public ACMGenericCodec {
- public:
- explicit ACMRED(int16_t codec_id);
- virtual ~ACMRED();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc
deleted file mode 100644
index 50ddab1d8b9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-
-#include <string.h>
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMResampler::ACMResampler() {
-}
-
-ACMResampler::~ACMResampler() {
-}
-
-int16_t ACMResampler::Resample10Msec(const int16_t* in_audio,
- int32_t in_freq_hz,
- int16_t* out_audio,
- int32_t out_freq_hz,
- uint8_t num_audio_channels) {
- if (in_freq_hz == out_freq_hz) {
- size_t length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- memcpy(out_audio, in_audio, length * sizeof(int16_t));
- return static_cast<int16_t>(in_freq_hz / 100);
- }
-
- // |max_length| is the maximum number of samples for 10ms at 48kHz.
- // TODO(turajs): is this actually the capacity of the |out_audio| buffer?
- int max_length = 480 * num_audio_channels;
- int in_length = in_freq_hz / 100 * num_audio_channels;
-
- if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
- num_audio_channels) != 0) {
- LOG_FERR3(LS_ERROR, InitializeIfNeeded, in_freq_hz, out_freq_hz,
- num_audio_channels);
- return -1;
- }
-
- int out_length = resampler_.Resample(in_audio, in_length, out_audio,
- max_length);
- if (out_length == -1) {
- LOG_FERR4(LS_ERROR, Resample, in_audio, in_length, out_audio, max_length);
- return -1;
- }
-
- return out_length / num_audio_channels;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h
deleted file mode 100644
index b50e722c443..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMResampler {
- public:
- ACMResampler();
- ~ACMResampler();
-
- int16_t Resample10Msec(const int16_t* in_audio,
- const int32_t in_freq_hz,
- int16_t* out_audio,
- const int32_t out_freq_hz,
- uint8_t num_audio_channels);
-
- private:
- PushResampler resampler_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc
deleted file mode 100644
index 1567929d860..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_speex.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_SPEEX
-// NOTE! Speex is not included in the open-source package. Modify this file or
-// your codec API to match the function calls and names of used Speex API file.
-#include "speex_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_SPEEX
-ACMSPEEX::ACMSPEEX(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- compl_mode_(0),
- vbr_enabled_(false),
- encoding_rate_(-1),
- sampling_frequency_(-1),
- samples_in_20ms_audio_(-1) {
- return;
-}
-
-ACMSPEEX::~ACMSPEEX() {
- return;
-}
-
-int16_t ACMSPEEX::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMSPEEX::EnableDTX() {
- return -1;
-}
-
-int16_t ACMSPEEX::DisableDTX() {
- return -1;
-}
-
-int16_t ACMSPEEX::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMSPEEX::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMSPEEX::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMSPEEX::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMSPEEX::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMSPEEX::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMSPEEX::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMSPEEX::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMSPEEX::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#ifdef UNUSEDSPEEX
-int16_t ACMSPEEX::EnableVBR() {
- return -1;
-}
-
-int16_t ACMSPEEX::DisableVBR() {
- return -1;
-}
-
-int16_t ACMSPEEX::SetComplMode(int16_t mode) {
- return -1;
-}
-#endif
-
-#else //===================== Actual Implementation =======================
-
-ACMSPEEX::ACMSPEEX(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
-
- // Set sampling frequency, frame size and rate Speex
- if (codec_id_ == ACMCodecDB::kSPEEX8) {
- sampling_frequency_ = 8000;
- samples_in_20ms_audio_ = 160;
- encoding_rate_ = 11000;
- } else if (codec_id_ == ACMCodecDB::kSPEEX16) {
- sampling_frequency_ = 16000;
- samples_in_20ms_audio_ = 320;
- encoding_rate_ = 22000;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for Speex.");
-
- sampling_frequency_ = -1;
- samples_in_20ms_audio_ = -1;
- encoding_rate_ = -1;
- }
-
- has_internal_dtx_ = true;
- dtx_enabled_ = false;
- vbr_enabled_ = false;
- compl_mode_ = 3; // default complexity value
-
- return;
-}
-
-ACMSPEEX::~ACMSPEEX() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMSPEEX::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t status;
- int16_t num_encoded_samples = 0;
- int16_t n = 0;
-
- while (num_encoded_samples < frame_len_smpl_) {
- status = WebRtcSpeex_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_], encoding_rate_);
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += samples_in_20ms_audio_;
- num_encoded_samples += samples_in_20ms_audio_;
-
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in Speex encoder");
- return status;
- }
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- vad_label_[n++] = status;
- vad_label_[n++] = status;
- }
-
- if (status == 0) {
- // This frame is detected as inactive. We need send whatever
- // encoded so far.
- *bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (int16_t*)bitstream);
- return *bitstream_len_byte;
- }
- }
-
- *bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (int16_t*)bitstream);
- return *bitstream_len_byte;
-}
-
-int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMSPEEX::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, (vbr_enabled_ ? 1 : 0),
- compl_mode_, 1) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot enable DTX for Speex");
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-
- return 0;
-}
-
-int16_t ACMSPEEX::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, (vbr_enabled_ ? 1 : 0),
- compl_mode_, 0) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot disable DTX for Speex");
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-
- return 0;
-}
-
-int16_t ACMSPEEX::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- // sanity check
- if (encoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot initialize Speex encoder, instance does not exist");
- return -1;
- }
-
- int16_t status = SetBitRateSafe((codec_params->codecInstant).rate);
- status +=
- (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, vbr_enabled_, compl_mode_,
- ((codec_params->enable_dtx) ? 1 : 0)) < 0) ?
- -1 : 0;
-
- if (status >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in initialization of Speex encoder");
- return -1;
- }
-}
-
-int16_t ACMSPEEX::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status;
-
- // sanity check
- if (decoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot initialize Speex decoder, instance does not exist");
- return -1;
- }
- status = ((WebRtcSpeex_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
-
- if (status >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in initialization of Speex decoder");
- return -1;
- }
-}
-
-int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error, Speex decoder is not initialized");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_SPEEX_FUNCTION."
- // Then call NetEQ to add the codec to its
- // database.
-
- switch (sampling_frequency_) {
- case 8000: {
- SET_CODEC_PAR((codec_def), kDecoderSPEEX_8, codec_inst.pltype,
- decoder_inst_ptr_, 8000);
- break;
- }
- case 16000: {
- SET_CODEC_PAR((codec_def), kDecoderSPEEX_16, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Unsupported sampling frequency for Speex");
-
- return -1;
- }
- }
-
- SET_SPEEX_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMSPEEX::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMSPEEX::InternalCreateEncoder() {
- return WebRtcSpeex_CreateEnc(&encoder_inst_ptr_, sampling_frequency_);
-}
-
-void ACMSPEEX::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_rate_ = 0;
-}
-
-int16_t ACMSPEEX::InternalCreateDecoder() {
- return WebRtcSpeex_CreateDec(&decoder_inst_ptr_, sampling_frequency_, 1);
-}
-
-void ACMSPEEX::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMSPEEX::SetBitRateSafe(const int32_t rate) {
- // Check if changed rate
- if (rate == encoding_rate_) {
- return 0;
- } else if (rate > 2000) {
- encoding_rate_ = rate;
- encoder_params_.codecInstant.rate = rate;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Unsupported encoding rate for Speex");
-
- return -1;
- }
-
- return 0;
-}
-
-void ACMSPEEX::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcSpeex_FreeEnc((SPEEX_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#ifdef UNUSEDSPEEX
-
-// This API is currently not in use. If requested to be able to enable/disable
-// VBR an ACM API need to be added.
-int16_t ACMSPEEX::EnableVBR() {
- if (vbr_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable Variable Bit Rate (VBR)
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 1, compl_mode_,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot enable VBR mode for Speex");
-
- return -1;
- }
- vbr_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-// This API is currently not in use. If requested to be able to enable/disable
-// VBR an ACM API need to be added.
-int16_t ACMSPEEX::DisableVBR() {
- if (!vbr_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 0, compl_mode_,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot disable DTX for Speex");
-
- return -1;
- }
- vbr_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-// This API is currently not in use. If requested to be able to set complexity
-// an ACM API need to be added.
-int16_t ACMSPEEX::SetComplMode(int16_t mode) {
- // Check if new mode
- if (mode == compl_mode_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // Set new mode
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 0, mode,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in complexity mode for Speex");
- return -1;
- }
- compl_mode_ = mode;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-#endif
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h
deleted file mode 100644
index 762aea8d9c2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct SPEEX_encinst_t_;
-struct SPEEX_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMSPEEX : public ACMGenericCodec {
- public:
- explicit ACMSPEEX(int16_t codec_id);
- ~ACMSPEEX();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t SetBitRateSafe(const int32_t rate);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
-#ifdef UNUSEDSPEEX
- int16_t EnableVBR();
-
- int16_t DisableVBR();
-
- int16_t SetComplMode(int16_t mode);
-#endif
-
- SPEEX_encinst_t_* encoder_inst_ptr_;
- SPEEX_decinst_t_* decoder_inst_ptr_;
- int16_t compl_mode_;
- bool vbr_enabled_;
- int32_t encoding_rate_;
- int16_t sampling_frequency_;
- uint16_t samples_in_20ms_audio_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
deleted file mode 100644
index a0389b03ef8..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'variables': {
- 'audio_coding_dependencies': [
- 'CNG',
- 'G711',
- 'G722',
- 'iLBC',
- 'iSAC',
- 'iSACFix',
- 'PCM16B',
- 'NetEq',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- ],
- 'audio_coding_defines': [],
- 'conditions': [
- ['include_opus==1', {
- 'audio_coding_dependencies': ['webrtc_opus',],
- 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
- }],
- ],
- },
- 'targets': [
- {
- 'target_name': 'audio_coding_module',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<@(audio_coding_dependencies)',
- 'acm2',
- ],
- 'include_dirs': [
- '../interface',
- '../../../interface',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '../interface',
- '../../../interface',
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- '../interface/audio_coding_module.h',
- '../interface/audio_coding_module_typedefs.h',
- 'acm_amr.cc',
- 'acm_amr.h',
- 'acm_amrwb.cc',
- 'acm_amrwb.h',
- 'acm_celt.cc',
- 'acm_celt.h',
- 'acm_cng.cc',
- 'acm_cng.h',
- 'acm_codec_database.cc',
- 'acm_codec_database.h',
- 'acm_dtmf_detection.cc',
- 'acm_dtmf_detection.h',
- 'acm_dtmf_playout.cc',
- 'acm_dtmf_playout.h',
- 'acm_g722.cc',
- 'acm_g722.h',
- 'acm_g7221.cc',
- 'acm_g7221.h',
- 'acm_g7221c.cc',
- 'acm_g7221c.h',
- 'acm_g729.cc',
- 'acm_g729.h',
- 'acm_g7291.cc',
- 'acm_g7291.h',
- 'acm_generic_codec.cc',
- 'acm_generic_codec.h',
- 'acm_gsmfr.cc',
- 'acm_gsmfr.h',
- 'acm_ilbc.cc',
- 'acm_ilbc.h',
- 'acm_isac.cc',
- 'acm_isac.h',
- 'acm_isac_macros.h',
- 'acm_neteq.cc',
- 'acm_neteq.h',
- 'acm_opus.cc',
- 'acm_opus.h',
- 'acm_speex.cc',
- 'acm_speex.h',
- 'acm_pcm16b.cc',
- 'acm_pcm16b.h',
- 'acm_pcma.cc',
- 'acm_pcma.h',
- 'acm_pcmu.cc',
- 'acm_pcmu.h',
- 'acm_red.cc',
- 'acm_red.h',
- 'acm_resampler.cc',
- 'acm_resampler.h',
- 'audio_coding_module_impl.cc',
- 'audio_coding_module_impl.h',
- ],
- },
- ],
- 'conditions': [
- ['include_tests==1', {
- 'targets': [
- {
- 'target_name': 'delay_test',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- '../test/delay_test.cc',
- '../test/Channel.cc',
- '../test/PCMFile.cc',
- '../test/utility.cc',
- ],
- }, # delay_test
- {
- 'target_name': 'insert_packet_with_timing',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- '../test/insert_packet_with_timing.cc',
- '../test/Channel.cc',
- '../test/PCMFile.cc',
- ],
- }, # delay_test
- ],
- }],
- ],
- 'includes': [
- '../acm2/audio_coding_module.gypi',
- ],
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
deleted file mode 100644
index 556f530ecfd..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
+++ /dev/null
@@ -1,3048 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <algorithm> // For std::max.
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/nack.h"
-#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-enum {
- kACMToneEnd = 999
-};
-
-// Maximum number of bytes in one packet (PCM16B, 20 ms packets, stereo).
-enum {
- kMaxPacketSize = 2560
-};
-
-// Maximum number of payloads that can be packed in one RED payload. For
-// regular FEC, we only pack two payloads. In case of dual-streaming, in worst
-// case we might pack 3 payloads in one RED payload.
-enum {
- kNumFecFragmentationVectors = 2,
- kMaxNumFragmentationVectors = 3
-};
-
-static const uint32_t kMaskTimestamp = 0x03ffffff;
-static const int kDefaultTimestampDiff = 960; // 20 ms @ 48 kHz.
-
-// If packet N is arrived all packets prior to N - |kNackThresholdPackets| which
-// are not received are considered as lost, and appear in NACK list.
-static const int kNackThresholdPackets = 2;
-
-namespace {
-
-bool IsCodecRED(const CodecInst* codec) {
- return (STR_CASE_CMP(codec->plname, "RED") == 0);
-}
-
-bool IsCodecRED(int index) {
- return (IsCodecRED(&ACMCodecDB::database_[index]));
-}
-
-bool IsCodecCN(const CodecInst* codec) {
- return (STR_CASE_CMP(codec->plname, "CN") == 0);
-}
-
-bool IsCodecCN(int index) {
- return (IsCodecCN(&ACMCodecDB::database_[index]));
-}
-
-// Stereo-to-mono can be used as in-place.
-int DownMix(const AudioFrame& frame, int length_out_buff, int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (int n = 0; n < frame.samples_per_channel_; ++n)
- out_buff[n] = (frame.data_[2 * n] + frame.data_[2 * n + 1]) >> 1;
- return 0;
-}
-
-// Mono-to-stereo can be used as in-place.
-int UpMix(const AudioFrame& frame, int length_out_buff, int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (int n = frame.samples_per_channel_ - 1; n >= 0; --n) {
- out_buff[2 * n + 1] = frame.data_[n];
- out_buff[2 * n] = frame.data_[n];
- }
- return 0;
-}
-
-// Return 1 if timestamp t1 is less than timestamp t2, while compensating for
-// wrap-around.
-int TimestampLessThan(uint32_t t1, uint32_t t2) {
- uint32_t kHalfFullRange = static_cast<uint32_t>(0xFFFFFFFF) / 2;
- if (t1 == t2) {
- return 0;
- } else if (t1 < t2) {
- if (t2 - t1 < kHalfFullRange)
- return 1;
- return 0;
- } else {
- if (t1 - t2 < kHalfFullRange)
- return 0;
- return 1;
- }
-}
-
-} // namespace
-
-AudioCodingModuleImpl::AudioCodingModuleImpl(const int32_t id, Clock* clock)
- : packetization_callback_(NULL),
- id_(id),
- last_timestamp_(0xD87F3F9F),
- last_in_timestamp_(0xD87F3F9F),
- send_codec_inst_(),
- cng_nb_pltype_(255),
- cng_wb_pltype_(255),
- cng_swb_pltype_(255),
- cng_fb_pltype_(255),
- red_pltype_(255),
- vad_enabled_(false),
- dtx_enabled_(false),
- vad_mode_(VADNormal),
- stereo_receive_registered_(false),
- stereo_send_(false),
- prev_received_channel_(0),
- expected_channels_(1),
- current_send_codec_idx_(-1),
- current_receive_codec_idx_(-1),
- send_codec_registered_(false),
- acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_callback_(NULL),
- last_recv_audio_codec_pltype_(255),
- is_first_red_(true),
- fec_enabled_(false),
- last_fec_timestamp_(0),
- receive_red_pltype_(255),
- previous_pltype_(255),
- dummy_rtp_header_(NULL),
- recv_pl_frame_size_smpls_(0),
- receiver_initialized_(false),
- dtmf_detector_(NULL),
- dtmf_callback_(NULL),
- last_detected_tone_(kACMToneEnd),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- secondary_send_codec_inst_(),
- initial_delay_ms_(0),
- num_packets_accumulated_(0),
- num_bytes_accumulated_(0),
- accumulated_audio_ms_(0),
- first_payload_received_(false),
- last_incoming_send_timestamp_(0),
- track_neteq_buffer_(false),
- playout_ts_(0),
- av_sync_(false),
- last_timestamp_diff_(kDefaultTimestampDiff),
- last_sequence_number_(0),
- last_ssrc_(0),
- last_packet_was_sync_(false),
- clock_(clock),
- nack_(),
- nack_enabled_(false) {
-
- // Nullify send codec memory, set payload type and set codec name to
- // invalid values.
- const char no_name[] = "noCodecRegistered";
- strncpy(send_codec_inst_.plname, no_name, RTP_PAYLOAD_NAME_SIZE - 1);
- send_codec_inst_.pltype = -1;
-
- strncpy(secondary_send_codec_inst_.plname, no_name,
- RTP_PAYLOAD_NAME_SIZE - 1);
- secondary_send_codec_inst_.pltype = -1;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- codecs_[i] = NULL;
- registered_pltypes_[i] = -1;
- stereo_receive_[i] = false;
- slave_codecs_[i] = NULL;
- mirror_codec_idx_[i] = -1;
- }
-
- neteq_.set_id(id_);
-
- // Allocate memory for RED.
- red_buffer_ = new uint8_t[MAX_PAYLOAD_SIZE_BYTE];
-
- // TODO(turajs): This might not be exactly how this class is supposed to work.
- // The external usage might be that |fragmentationVectorSize| has to match
- // the allocated space for the member-arrays, while here, we allocate
- // according to the maximum number of fragmentations and change
- // |fragmentationVectorSize| on-the-fly based on actual number of
- // fragmentations. However, due to copying to local variable before calling
- // SendData, the RTP module receives a "valid" fragmentation, where allocated
- // space matches |fragmentationVectorSize|, therefore, this should not cause
- // any problem. A better approach is not using RTPFragmentationHeader as
- // member variable, instead, use an ACM-specific structure to hold RED-related
- // data. See module_common_type.h for the definition of
- // RTPFragmentationHeader.
- fragmentation_.VerifyAndAllocateFragmentationHeader(
- kMaxNumFragmentationVectors);
-
- // Register the default payload type for RED and for CNG at sampling rates of
- // 8, 16, 32 and 48 kHz.
- for (int i = (ACMCodecDB::kNumCodecs - 1); i >= 0; i--) {
- if (IsCodecRED(i)) {
- red_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (IsCodecCN(i)) {
- if (ACMCodecDB::database_[i].plfreq == 8000) {
- cng_nb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 16000) {
- cng_wb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 32000) {
- cng_swb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 48000) {
- cng_fb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- }
- }
- }
-
- if (InitializeReceiverSafe() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot initialize receiver");
- }
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
-}
-
-AudioCodingModuleImpl::~AudioCodingModuleImpl() {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- current_send_codec_idx_ = -1;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (codecs_[i] != NULL) {
- // True stereo codecs share the same memory for master and
- // slave, so slave codec need to be nullified here, since the
- // memory will be deleted.
- if (slave_codecs_[i] == codecs_[i]) {
- slave_codecs_[i] = NULL;
- }
-
- // Mirror index holds the address of the codec memory.
- assert(mirror_codec_idx_[i] > -1);
- if (codecs_[mirror_codec_idx_[i]] != NULL) {
- delete codecs_[mirror_codec_idx_[i]];
- codecs_[mirror_codec_idx_[i]] = NULL;
- }
-
- codecs_[i] = NULL;
- }
-
- if (slave_codecs_[i] != NULL) {
- // Delete memory for stereo usage of mono codecs.
- assert(mirror_codec_idx_[i] > -1);
- if (slave_codecs_[mirror_codec_idx_[i]] != NULL) {
- delete slave_codecs_[mirror_codec_idx_[i]];
- slave_codecs_[mirror_codec_idx_[i]] = NULL;
- }
- slave_codecs_[i] = NULL;
- }
- }
-
- if (dtmf_detector_ != NULL) {
- delete dtmf_detector_;
- dtmf_detector_ = NULL;
- }
- if (dummy_rtp_header_ != NULL) {
- delete dummy_rtp_header_;
- dummy_rtp_header_ = NULL;
- }
- if (red_buffer_ != NULL) {
- delete[] red_buffer_;
- red_buffer_ = NULL;
- }
- }
-
- delete callback_crit_sect_;
- callback_crit_sect_ = NULL;
-
- delete acm_crit_sect_;
- acm_crit_sect_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_,
- "Destroyed");
-}
-
-int32_t AudioCodingModuleImpl::ChangeUniqueId(const int32_t id) {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- id_ = id;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (codecs_[i] != NULL) {
- codecs_[i]->SetUniqueID(id);
- }
- }
- }
-
- neteq_.set_id(id_);
- return 0;
-}
-
-// Returns the number of milliseconds until the module want a
-// worker thread to call Process.
-int32_t AudioCodingModuleImpl::TimeUntilNextProcess() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("TimeUntilNextProcess")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->SamplesLeftToEncode() /
- (send_codec_inst_.plfreq / 1000);
-}
-
-int32_t AudioCodingModuleImpl::Process() {
- bool dual_stream;
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- dual_stream = (secondary_encoder_.get() != NULL);
- }
- if (dual_stream) {
- return ProcessDualStream();
- }
- return ProcessSingleStream();
-}
-
-int AudioCodingModuleImpl::EncodeFragmentation(int fragmentation_index,
- int payload_type,
- uint32_t current_timestamp,
- ACMGenericCodec* encoder,
- uint8_t* stream) {
- int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
- uint32_t rtp_timestamp;
- WebRtcACMEncodingType encoding_type;
- if (encoder->Encode(stream, &len_bytes, &rtp_timestamp, &encoding_type) < 0) {
- return -1;
- }
- assert(encoding_type == kActiveNormalEncoded);
- assert(len_bytes > 0);
-
- fragmentation_.fragmentationLength[fragmentation_index] = len_bytes;
- fragmentation_.fragmentationPlType[fragmentation_index] = payload_type;
- fragmentation_.fragmentationTimeDiff[fragmentation_index] =
- static_cast<uint16_t>(current_timestamp - rtp_timestamp);
- fragmentation_.fragmentationVectorSize++;
- return len_bytes;
-}
-
-// Primary payloads are sent immediately, whereas a single secondary payload is
-// buffered to be combined with "the next payload."
-// Normally "the next payload" would be a primary payload. In case two
-// consecutive secondary payloads are generated with no primary payload in
-// between, then two secondary payloads are packed in one RED.
-int AudioCodingModuleImpl::ProcessDualStream() {
- uint8_t stream[kMaxNumFragmentationVectors * MAX_PAYLOAD_SIZE_BYTE];
- uint32_t current_timestamp;
- int16_t length_bytes = 0;
- RTPFragmentationHeader my_fragmentation;
-
- uint8_t my_red_payload_type;
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Check if there is an encoder before.
- if (!HaveValidEncoder("ProcessDualStream") ||
- secondary_encoder_.get() == NULL) {
- return -1;
- }
- ACMGenericCodec* primary_encoder = codecs_[current_send_codec_idx_];
- // If primary encoder has a full frame of audio to generate payload.
- bool primary_ready_to_encode = primary_encoder->HasFrameToEncode();
- // If the secondary encoder has a frame of audio to generate a payload.
- bool secondary_ready_to_encode = secondary_encoder_->HasFrameToEncode();
-
- if (!primary_ready_to_encode && !secondary_ready_to_encode) {
- // Nothing to send.
- return 0;
- }
- int len_bytes_previous_secondary = static_cast<int>(
- fragmentation_.fragmentationLength[2]);
- assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
- bool has_previous_payload = len_bytes_previous_secondary > 0;
-
- uint32_t primary_timestamp = primary_encoder->EarliestTimestamp();
- uint32_t secondary_timestamp = secondary_encoder_->EarliestTimestamp();
-
- if (!has_previous_payload && !primary_ready_to_encode &&
- secondary_ready_to_encode) {
- // Secondary payload will be the ONLY bit-stream. Encode by secondary
- // encoder, store the payload, and return. No packet is sent.
- int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
- WebRtcACMEncodingType encoding_type;
- if (secondary_encoder_->Encode(red_buffer_, &len_bytes,
- &last_fec_timestamp_,
- &encoding_type) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDual(): Encoding of secondary encoder Failed");
- return -1;
- }
- assert(len_bytes > 0);
- assert(encoding_type == kActiveNormalEncoded);
- assert(len_bytes <= MAX_PAYLOAD_SIZE_BYTE);
- fragmentation_.fragmentationLength[2] = len_bytes;
- return 0;
- }
-
- // Initialize with invalid but different values, so later can have sanity
- // check if they are different.
- int index_primary = -1;
- int index_secondary = -2;
- int index_previous_secondary = -3;
-
- if (primary_ready_to_encode) {
- index_primary = secondary_ready_to_encode ?
- TimestampLessThan(primary_timestamp, secondary_timestamp) : 0;
- index_primary += has_previous_payload ?
- TimestampLessThan(primary_timestamp, last_fec_timestamp_) : 0;
- }
-
- if (secondary_ready_to_encode) {
- // Timestamp of secondary payload can only be less than primary payload,
- // but is always larger than the timestamp of previous secondary payload.
- index_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, secondary_timestamp)) : 0;
- }
-
- if (has_previous_payload) {
- index_previous_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, last_fec_timestamp_)) : 0;
- // If secondary is ready it always have a timestamp larger than previous
- // secondary. So the index is either 0 or 1.
- index_previous_secondary += secondary_ready_to_encode ? 1 : 0;
- }
-
- // Indices must not be equal.
- assert(index_primary != index_secondary);
- assert(index_primary != index_previous_secondary);
- assert(index_secondary != index_previous_secondary);
-
- // One of the payloads has to be at position zero.
- assert(index_primary == 0 || index_secondary == 0 ||
- index_previous_secondary == 0);
-
- // Timestamp of the RED payload.
- if (index_primary == 0) {
- current_timestamp = primary_timestamp;
- } else if (index_secondary == 0) {
- current_timestamp = secondary_timestamp;
- } else {
- current_timestamp = last_fec_timestamp_;
- }
-
- fragmentation_.fragmentationVectorSize = 0;
- if (has_previous_payload) {
- assert(index_previous_secondary >= 0 &&
- index_previous_secondary < kMaxNumFragmentationVectors);
- assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
- memcpy(&stream[index_previous_secondary * MAX_PAYLOAD_SIZE_BYTE],
- red_buffer_, sizeof(stream[0]) * len_bytes_previous_secondary);
- fragmentation_.fragmentationLength[index_previous_secondary] =
- len_bytes_previous_secondary;
- fragmentation_.fragmentationPlType[index_previous_secondary] =
- secondary_send_codec_inst_.pltype;
- fragmentation_.fragmentationTimeDiff[index_previous_secondary] =
- static_cast<uint16_t>(current_timestamp - last_fec_timestamp_);
- fragmentation_.fragmentationVectorSize++;
- }
-
- if (primary_ready_to_encode) {
- assert(index_primary >= 0 && index_primary < kMaxNumFragmentationVectors);
- int i = index_primary * MAX_PAYLOAD_SIZE_BYTE;
- if (EncodeFragmentation(index_primary, send_codec_inst_.pltype,
- current_timestamp, primary_encoder,
- &stream[i]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDualStream(): Encoding of primary encoder Failed");
- return -1;
- }
- }
-
- if (secondary_ready_to_encode) {
- assert(index_secondary >= 0 &&
- index_secondary < kMaxNumFragmentationVectors - 1);
- int i = index_secondary * MAX_PAYLOAD_SIZE_BYTE;
- if (EncodeFragmentation(index_secondary,
- secondary_send_codec_inst_.pltype,
- current_timestamp, secondary_encoder_.get(),
- &stream[i]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDualStream(): Encoding of secondary encoder "
- "Failed");
- return -1;
- }
- }
- // Copy to local variable, as it will be used outside the ACM lock.
- my_fragmentation.CopyFrom(fragmentation_);
- my_red_payload_type = red_pltype_;
- length_bytes = 0;
- for (int n = 0; n < fragmentation_.fragmentationVectorSize; n++) {
- length_bytes += fragmentation_.fragmentationLength[n];
- }
- }
-
- {
- CriticalSectionScoped lock(callback_crit_sect_);
- if (packetization_callback_ != NULL) {
- // Callback with payload data, including redundant data (FEC/RED).
- if (packetization_callback_->SendData(kAudioFrameSpeech,
- my_red_payload_type,
- current_timestamp, stream,
- length_bytes,
- &my_fragmentation) < 0) {
- return -1;
- }
- }
- }
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Now that data is sent, clean up fragmentation.
- ResetFragmentation(0);
- }
- return 0;
-}
-
-// Process any pending tasks such as timeouts.
-int AudioCodingModuleImpl::ProcessSingleStream() {
- // Make room for 1 RED payload.
- uint8_t stream[2 * MAX_PAYLOAD_SIZE_BYTE];
- int16_t length_bytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
- int16_t red_length_bytes = length_bytes;
- uint32_t rtp_timestamp;
- int16_t status;
- WebRtcACMEncodingType encoding_type;
- FrameType frame_type = kAudioFrameSpeech;
- uint8_t current_payload_type = 0;
- bool has_data_to_send = false;
- bool fec_active = false;
- RTPFragmentationHeader my_fragmentation;
-
- // Keep the scope of the ACM critical section limited.
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Check if there is an encoder before.
- if (!HaveValidEncoder("ProcessSingleStream")) {
- return -1;
- }
- status = codecs_[current_send_codec_idx_]->Encode(stream, &length_bytes,
- &rtp_timestamp,
- &encoding_type);
- if (status < 0) {
- // Encode failed.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessSingleStream(): Encoding Failed");
- length_bytes = 0;
- return -1;
- } else if (status == 0) {
- // Not enough data.
- return 0;
- } else {
- switch (encoding_type) {
- case kNoEncoding: {
- current_payload_type = previous_pltype_;
- frame_type = kFrameEmpty;
- length_bytes = 0;
- break;
- }
- case kActiveNormalEncoded:
- case kPassiveNormalEncoded: {
- current_payload_type = static_cast<uint8_t>(send_codec_inst_.pltype);
- frame_type = kAudioFrameSpeech;
- break;
- }
- case kPassiveDTXNB: {
- current_payload_type = cng_nb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXWB: {
- current_payload_type = cng_wb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXSWB: {
- current_payload_type = cng_swb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXFB: {
- current_payload_type = cng_fb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- }
- has_data_to_send = true;
- previous_pltype_ = current_payload_type;
-
- // Redundancy encode is done here. The two bitstreams packetized into
- // one RTP packet and the fragmentation points are set.
- // Only apply RED on speech data.
- if ((fec_enabled_) &&
- ((encoding_type == kActiveNormalEncoded) ||
- (encoding_type == kPassiveNormalEncoded))) {
- // FEC is enabled within this scope.
- //
- // Note that, a special solution exists for iSAC since it is the only
- // codec for which GetRedPayload has a non-empty implementation.
- //
- // Summary of the FEC scheme below (use iSAC as example):
- //
- // 1st (is_first_red_ is true) encoded iSAC frame (primary #1) =>
- // - call GetRedPayload() and store redundancy for packet #1 in
- // second fragment of RED buffer (old data)
- // - drop the primary iSAC frame
- // - don't call SendData
- // 2nd (is_first_red_ is false) encoded iSAC frame (primary #2) =>
- // - store primary #2 in 1st fragment of RED buffer and send the
- // combined packet
- // - the transmitted packet contains primary #2 (new) and
- // reduncancy for packet #1 (old)
- // - call GetRed_Payload() and store redundancy for packet #2 in
- // second fragment of RED buffer
- //
- // ...
- //
- // Nth encoded iSAC frame (primary #N) =>
- // - store primary #N in 1st fragment of RED buffer and send the
- // combined packet
- // - the transmitted packet contains primary #N (new) and
- // reduncancy for packet #(N-1) (old)
- // - call GetRedPayload() and store redundancy for packet #N in
- // second fragment of RED buffer
- //
- // For all other codecs, GetRedPayload does nothing and returns -1 =>
- // redundant data is only a copy.
- //
- // First combined packet contains : #2 (new) and #1 (old)
- // Second combined packet contains: #3 (new) and #2 (old)
- // Third combined packet contains : #4 (new) and #3 (old)
- //
- // Hence, even if every second packet is dropped, perfect
- // reconstruction is possible.
- fec_active = true;
-
- has_data_to_send = false;
- // Skip the following part for the first packet in a RED session.
- if (!is_first_red_) {
- // Rearrange stream such that FEC packets are included.
- // Replace stream now that we have stored current stream.
- memcpy(stream + fragmentation_.fragmentationOffset[1], red_buffer_,
- fragmentation_.fragmentationLength[1]);
- // Update the fragmentation time difference vector, in number of
- // timestamps.
- uint16_t time_since_last = static_cast<uint16_t>(rtp_timestamp -
- last_fec_timestamp_);
-
- // Update fragmentation vectors.
- fragmentation_.fragmentationPlType[1] =
- fragmentation_.fragmentationPlType[0];
- fragmentation_.fragmentationTimeDiff[1] = time_since_last;
- has_data_to_send = true;
- }
-
- // Insert new packet length.
- fragmentation_.fragmentationLength[0] = length_bytes;
-
- // Insert new packet payload type.
- fragmentation_.fragmentationPlType[0] = current_payload_type;
- last_fec_timestamp_ = rtp_timestamp;
-
- // Can be modified by the GetRedPayload() call if iSAC is utilized.
- red_length_bytes = length_bytes;
-
- // A fragmentation header is provided => packetization according to
- // RFC 2198 (RTP Payload for Redundant Audio Data) will be used.
- // First fragment is the current data (new).
- // Second fragment is the previous data (old).
- length_bytes = static_cast<int16_t>(
- fragmentation_.fragmentationLength[0] +
- fragmentation_.fragmentationLength[1]);
-
- // Get, and store, redundant data from the encoder based on the recently
- // encoded frame.
- // NOTE - only iSAC contains an implementation; all other codecs does
- // nothing and returns -1.
- if (codecs_[current_send_codec_idx_]->GetRedPayload(
- red_buffer_,
- &red_length_bytes) == -1) {
- // The codec was not iSAC => use current encoder output as redundant
- // data instead (trivial FEC scheme).
- memcpy(red_buffer_, stream, red_length_bytes);
- }
-
- is_first_red_ = false;
- // Update payload type with RED payload type.
- current_payload_type = red_pltype_;
- // We have packed 2 payloads.
- fragmentation_.fragmentationVectorSize = kNumFecFragmentationVectors;
-
- // Copy to local variable, as it will be used outside ACM lock.
- my_fragmentation.CopyFrom(fragmentation_);
- // Store RED length.
- fragmentation_.fragmentationLength[1] = red_length_bytes;
- }
- }
- }
-
- if (has_data_to_send) {
- CriticalSectionScoped lock(callback_crit_sect_);
-
- if (packetization_callback_ != NULL) {
- if (fec_active) {
- // Callback with payload data, including redundant data (FEC/RED).
- packetization_callback_->SendData(frame_type, current_payload_type,
- rtp_timestamp, stream,
- length_bytes,
- &my_fragmentation);
- } else {
- // Callback with payload data.
- packetization_callback_->SendData(frame_type, current_payload_type,
- rtp_timestamp, stream,
- length_bytes, NULL);
- }
- }
-
- if (vad_callback_ != NULL) {
- // Callback with VAD decision.
- vad_callback_->InFrameType(static_cast<int16_t>(encoding_type));
- }
- }
- return length_bytes;
-}
-
-/////////////////////////////////////////
-// Sender
-//
-
-// Initialize send codec.
-int32_t AudioCodingModuleImpl::InitializeSender() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Start with invalid values.
- send_codec_registered_ = false;
- current_send_codec_idx_ = -1;
- send_codec_inst_.plname[0] = '\0';
-
- // Delete all encoders to start fresh.
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (codecs_[id] != NULL) {
- codecs_[id]->DestructEncoder();
- }
- }
-
- // Initialize FEC/RED.
- is_first_red_ = true;
- if (fec_enabled_ || secondary_encoder_.get() != NULL) {
- if (red_buffer_ != NULL) {
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
- }
- if (fec_enabled_) {
- ResetFragmentation(kNumFecFragmentationVectors);
- } else {
- ResetFragmentation(0);
- }
- }
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::ResetEncoder() {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!HaveValidEncoder("ResetEncoder")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->ResetEncoder();
-}
-
-void AudioCodingModuleImpl::UnregisterSendCodec() {
- CriticalSectionScoped lock(acm_crit_sect_);
- send_codec_registered_ = false;
- current_send_codec_idx_ = -1;
- // If send Codec is unregistered then remove the secondary codec as well.
- if (secondary_encoder_.get() != NULL)
- secondary_encoder_.reset();
- return;
-}
-
-ACMGenericCodec* AudioCodingModuleImpl::CreateCodec(const CodecInst& codec) {
- ACMGenericCodec* my_codec = NULL;
-
- my_codec = ACMCodecDB::CreateCodecInstance(&codec);
- if (my_codec == NULL) {
- // Error, could not create the codec.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMCodecDB::CreateCodecInstance() failed in CreateCodec()");
- return my_codec;
- }
- my_codec->SetUniqueID(id_);
- my_codec->SetNetEqDecodeLock(neteq_.DecodeLock());
-
- return my_codec;
-}
-
-// Check if the given codec is a valid to be registered as send codec.
-static int IsValidSendCodec(const CodecInst& send_codec,
- bool is_primary_encoder,
- int acm_id,
- int* mirror_id) {
- if ((send_codec.channels != 1) && (send_codec.channels != 2)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Wrong number of channels (%d, only mono and stereo are "
- "supported) for %s encoder", send_codec.channels,
- is_primary_encoder ? "primary" : "secondary");
- return -1;
- }
-
- int codec_id = ACMCodecDB::CodecNumber(&send_codec, mirror_id);
- if (codec_id < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Invalid settings for the send codec.");
- return -1;
- }
-
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
-
- // Telephone-event cannot be a send codec.
- if (!STR_CASE_CMP(send_codec.plname, "telephone-event")) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "telephone-event cannot be a send codec");
- *mirror_id = -1;
- return -1;
- }
-
- if (ACMCodecDB::codec_settings_[codec_id].channel_support
- < send_codec.channels) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "%d number of channels not supportedn for %s.",
- send_codec.channels, send_codec.plname);
- *mirror_id = -1;
- return -1;
- }
-
- if (!is_primary_encoder) {
- // If registering the secondary encoder, then RED and CN are not valid
- // choices as encoder.
- if (IsCodecRED(&send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "RED cannot be secondary codec");
- *mirror_id = -1;
- return -1;
- }
-
- if (IsCodecCN(&send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "DTX cannot be secondary codec");
- *mirror_id = -1;
- return -1;
- }
- }
- return codec_id;
-}
-
-int AudioCodingModuleImpl::RegisterSecondarySendCodec(
- const CodecInst& send_codec) {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!send_codec_registered_) {
- return -1;
- }
- // Primary and Secondary codecs should have the same sampling rates.
- if (send_codec.plfreq != send_codec_inst_.plfreq) {
- return -1;
- }
- int mirror_id;
- int codec_id = IsValidSendCodec(send_codec, false, id_, &mirror_id);
- if (codec_id < 0) {
- return -1;
- }
- ACMGenericCodec* encoder = CreateCodec(send_codec);
- WebRtcACMCodecParams codec_params;
- // Initialize the codec before registering. For secondary codec VAD & DTX are
- // disabled.
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = false;
- codec_params.enable_dtx = false;
- codec_params.vad_mode = VADNormal;
- // Force initialization.
- if (encoder->InitEncoder(&codec_params, true) < 0) {
- // Could not initialize, therefore cannot be registered.
- delete encoder;
- return -1;
- }
- secondary_encoder_.reset(encoder);
- memcpy(&secondary_send_codec_inst_, &send_codec, sizeof(send_codec));
-
- // Disable VAD & DTX.
- SetVADSafe(false, false, VADNormal);
-
- // Cleaning.
- if (red_buffer_) {
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
- }
- ResetFragmentation(0);
- return 0;
-}
-
-void AudioCodingModuleImpl::UnregisterSecondarySendCodec() {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (secondary_encoder_.get() == NULL) {
- return;
- }
- secondary_encoder_.reset();
- ResetFragmentation(0);
-}
-
-int AudioCodingModuleImpl::SecondarySendCodec(
- CodecInst* secondary_codec) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (secondary_encoder_.get() == NULL) {
- return -1;
- }
- memcpy(secondary_codec, &secondary_send_codec_inst_,
- sizeof(secondary_send_codec_inst_));
- return 0;
-}
-
-// Can be called multiple times for Codec, CNG, RED.
-int32_t AudioCodingModuleImpl::RegisterSendCodec(
- const CodecInst& send_codec) {
- int mirror_id;
- int codec_id = IsValidSendCodec(send_codec, true, id_, &mirror_id);
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Check for reported errors from function IsValidSendCodec().
- if (codec_id < 0) {
- if (!send_codec_registered_) {
- // This values has to be NULL if there is no codec registered.
- current_send_codec_idx_ = -1;
- }
- return -1;
- }
-
- // RED can be registered with other payload type. If not registered a default
- // payload type is used.
- if (IsCodecRED(&send_codec)) {
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
- // Set RED payload type.
- red_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
-
- // CNG can be registered with other payload type. If not registered the
- // default payload types from codec database will be used.
- if (IsCodecCN(&send_codec)) {
- // CNG is registered.
- switch (send_codec.plfreq) {
- case 8000: {
- cng_nb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 16000: {
- cng_wb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 32000: {
- cng_swb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 48000: {
- cng_fb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RegisterSendCodec() failed, invalid frequency for CNG "
- "registration");
- return -1;
- }
- }
- return 0;
- }
-
- // Set Stereo, and make sure VAD and DTX is turned off.
- if (send_codec.channels == 2) {
- stereo_send_ = true;
- if (vad_enabled_ || dtx_enabled_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX is turned off, not supported when sending stereo.");
- }
- vad_enabled_ = false;
- dtx_enabled_ = false;
- } else {
- stereo_send_ = false;
- }
-
- // Check if the codec is already registered as send codec.
- bool is_send_codec;
- if (send_codec_registered_) {
- int send_codec_mirror_id;
- int send_codec_id = ACMCodecDB::CodecNumber(&send_codec_inst_,
- &send_codec_mirror_id);
- assert(send_codec_id >= 0);
- is_send_codec = (send_codec_id == codec_id) ||
- (mirror_id == send_codec_mirror_id);
- } else {
- is_send_codec = false;
- }
-
- // If there is secondary codec registered and the new send codec has a
- // sampling rate different than that of secondary codec, then unregister the
- // secondary codec.
- if (secondary_encoder_.get() != NULL &&
- secondary_send_codec_inst_.plfreq != send_codec.plfreq) {
- secondary_encoder_.reset();
- ResetFragmentation(0);
- }
-
- // If new codec, or new settings, register.
- if (!is_send_codec) {
- if (codecs_[mirror_id] == NULL) {
- codecs_[mirror_id] = CreateCodec(send_codec);
- if (codecs_[mirror_id] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Create the codec");
- return -1;
- }
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
-
- if (mirror_id != codec_id) {
- codecs_[codec_id] = codecs_[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- ACMGenericCodec* codec_ptr = codecs_[codec_id];
- WebRtcACMCodecParams codec_params;
-
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = vad_enabled_;
- codec_params.enable_dtx = dtx_enabled_;
- codec_params.vad_mode = vad_mode_;
- // Force initialization.
- if (codec_ptr->InitEncoder(&codec_params, true) < 0) {
- // Could not initialize the encoder.
-
- // Check if already have a registered codec.
- // Depending on that different messages are logged.
- if (!send_codec_registered_) {
- current_send_codec_idx_ = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Initialize the encoder No Encoder is registered");
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Initialize the encoder, continue encoding with "
- "the previously registered codec");
- }
- return -1;
- }
-
- // Update states.
- dtx_enabled_ = codec_params.enable_dtx;
- vad_enabled_ = codec_params.enable_vad;
- vad_mode_ = codec_params.vad_mode;
-
- // Everything is fine so we can replace the previous codec with this one.
- if (send_codec_registered_) {
- // If we change codec we start fresh with FEC.
- // This is not strictly required by the standard.
- is_first_red_ = true;
-
- codec_ptr->SetVAD(&dtx_enabled_, &vad_enabled_, &vad_mode_);
- }
-
- current_send_codec_idx_ = codec_id;
- send_codec_registered_ = true;
- memcpy(&send_codec_inst_, &send_codec, sizeof(CodecInst));
- previous_pltype_ = send_codec_inst_.pltype;
- return 0;
- } else {
- // If codec is the same as already registered check if any parameters
- // has changed compared to the current values.
- // If any parameter is valid then apply it and record.
- bool force_init = false;
-
- if (mirror_id != codec_id) {
- codecs_[codec_id] = codecs_[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- // Check the payload type.
- if (send_codec.pltype != send_codec_inst_.pltype) {
- // At this point check if the given payload type is valid.
- // Record it later when the sampling frequency is changed
- // successfully.
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Out of range payload type");
- return -1;
- }
- }
-
- // If there is a codec that ONE instance of codec supports multiple
- // sampling frequencies, then we need to take care of it here.
- // one such a codec is iSAC. Both WB and SWB are encoded and decoded
- // with one iSAC instance. Therefore, we need to update the encoder
- // frequency if required.
- if (send_codec_inst_.plfreq != send_codec.plfreq) {
- force_init = true;
-
- // If sampling frequency is changed we have to start fresh with RED.
- is_first_red_ = true;
- }
-
- // If packet size or number of channels has changed, we need to
- // re-initialize the encoder.
- if (send_codec_inst_.pacsize != send_codec.pacsize) {
- force_init = true;
- }
- if (send_codec_inst_.channels != send_codec.channels) {
- force_init = true;
- }
-
- if (force_init) {
- WebRtcACMCodecParams codec_params;
-
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = vad_enabled_;
- codec_params.enable_dtx = dtx_enabled_;
- codec_params.vad_mode = vad_mode_;
-
- // Force initialization.
- if (codecs_[current_send_codec_idx_]->InitEncoder(&codec_params,
- true) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Could not change the codec packet-size.");
- return -1;
- }
-
- send_codec_inst_.plfreq = send_codec.plfreq;
- send_codec_inst_.pacsize = send_codec.pacsize;
- send_codec_inst_.channels = send_codec.channels;
- }
-
- // If the change of sampling frequency has been successful then
- // we store the payload-type.
- send_codec_inst_.pltype = send_codec.pltype;
-
- // Check if a change in Rate is required.
- if (send_codec.rate != send_codec_inst_.rate) {
- if (codecs_[codec_id]->SetBitRate(send_codec.rate) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Could not change the codec rate.");
- return -1;
- }
- send_codec_inst_.rate = send_codec.rate;
- }
- previous_pltype_ = send_codec_inst_.pltype;
-
- return 0;
- }
-}
-
-// Get current send codec.
-int32_t AudioCodingModuleImpl::SendCodec(
- CodecInst* current_codec) const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendCodec()");
- CriticalSectionScoped lock(acm_crit_sect_);
-
- assert(current_codec);
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendCodec Failed, no codec is registered");
-
- return -1;
- }
- WebRtcACMCodecParams encoder_param;
- codecs_[current_send_codec_idx_]->EncoderParams(&encoder_param);
- encoder_param.codec_inst.pltype = send_codec_inst_.pltype;
- memcpy(current_codec, &(encoder_param.codec_inst), sizeof(CodecInst));
-
- return 0;
-}
-
-// Get current send frequency.
-int32_t AudioCodingModuleImpl::SendFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency()");
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency Failed, no codec is registered");
-
- return -1;
- }
-
- return send_codec_inst_.plfreq;
-}
-
-// Get encode bitrate.
-// Adaptive rate codecs return their current encode target rate, while other
-// codecs return there longterm avarage or their fixed rate.
-int32_t AudioCodingModuleImpl::SendBitrate() const {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendBitrate Failed, no codec is registered");
-
- return -1;
- }
-
- WebRtcACMCodecParams encoder_param;
- codecs_[current_send_codec_idx_]->EncoderParams(&encoder_param);
-
- return encoder_param.codec_inst.rate;
-}
-
-// Set available bandwidth, inform the encoder about the estimated bandwidth
-// received from the remote party.
-int32_t AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
- const int32_t bw) {
- return codecs_[current_send_codec_idx_]->SetEstimatedBandwidth(bw);
-}
-
-// Register a transport callback which will be called to deliver
-// the encoded buffers.
-int32_t AudioCodingModuleImpl::RegisterTransportCallback(
- AudioPacketizationCallback* transport) {
- CriticalSectionScoped lock(callback_crit_sect_);
- packetization_callback_ = transport;
- return 0;
-}
-
-// Add 10MS of raw (PCM) audio data to the encoder.
-int32_t AudioCodingModuleImpl::Add10MsData(
- const AudioFrame& audio_frame) {
- if (audio_frame.samples_per_channel_ <= 0) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, payload length is negative or "
- "zero");
- return -1;
- }
-
- // Allow for 8, 16, 32 and 48kHz input audio.
- if ((audio_frame.sample_rate_hz_ != 8000)
- && (audio_frame.sample_rate_hz_ != 16000)
- && (audio_frame.sample_rate_hz_ != 32000)
- && (audio_frame.sample_rate_hz_ != 48000)) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency not valid");
- return -1;
- }
-
- // If the length and frequency matches. We currently just support raw PCM.
- if ((audio_frame.sample_rate_hz_ / 100)
- != audio_frame.samples_per_channel_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency and length doesn't"
- " match");
- return -1;
- }
-
- if (audio_frame.num_channels_ != 1 && audio_frame.num_channels_ != 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, invalid number of channels.");
- return -1;
- }
-
- CriticalSectionScoped lock(acm_crit_sect_);
- // Do we have a codec registered?
- if (!HaveValidEncoder("Add10MsData")) {
- return -1;
- }
-
- const AudioFrame* ptr_frame;
- // Perform a resampling, also down-mix if it is required and can be
- // performed before resampling (a down mix prior to resampling will take
- // place if both primary and secondary encoders are mono and input is in
- // stereo).
- if (PreprocessToAddData(audio_frame, &ptr_frame) < 0) {
- return -1;
- }
- TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Audio", ptr_frame->timestamp_,
- "now", clock_->TimeInMilliseconds());
-
- // Check whether we need an up-mix or down-mix?
- bool remix = ptr_frame->num_channels_ != send_codec_inst_.channels;
- if (secondary_encoder_.get() != NULL) {
- remix = remix ||
- (ptr_frame->num_channels_ != secondary_send_codec_inst_.channels);
- }
-
- // If a re-mix is required (up or down), this buffer will store re-mixed
- // version of the input.
- int16_t buffer[WEBRTC_10MS_PCM_AUDIO];
- if (remix) {
- if (ptr_frame->num_channels_ == 1) {
- if (UpMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, buffer) < 0)
- return -1;
- } else {
- if (DownMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, buffer) < 0)
- return -1;
- }
- }
-
- // When adding data to encoders this pointer is pointing to an audio buffer
- // with correct number of channels.
- const int16_t* ptr_audio = ptr_frame->data_;
-
- // For pushing data to primary, point the |ptr_audio| to correct buffer.
- if (send_codec_inst_.channels != ptr_frame->num_channels_)
- ptr_audio = buffer;
-
- if (codecs_[current_send_codec_idx_]->Add10MsData(
- ptr_frame->timestamp_, ptr_audio, ptr_frame->samples_per_channel_,
- send_codec_inst_.channels) < 0)
- return -1;
-
- if (secondary_encoder_.get() != NULL) {
- // For pushing data to secondary, point the |ptr_audio| to correct buffer.
- ptr_audio = ptr_frame->data_;
- if (secondary_send_codec_inst_.channels != ptr_frame->num_channels_)
- ptr_audio = buffer;
-
- if (secondary_encoder_->Add10MsData(
- ptr_frame->timestamp_, ptr_audio, ptr_frame->samples_per_channel_,
- secondary_send_codec_inst_.channels) < 0)
- return -1;
- }
-
- return 0;
-}
-
-// Perform a resampling and down-mix if required. We down-mix only if
-// encoder is mono and input is stereo. In case of dual-streaming, both
-// encoders has to be mono for down-mix to take place.
-// |*ptr_out| will point to the pre-processed audio-frame. If no pre-processing
-// is required, |*ptr_out| points to |in_frame|.
-int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out) {
- // Primary and secondary (if exists) should have the same sampling rate.
- assert((secondary_encoder_.get() != NULL) ?
- secondary_send_codec_inst_.plfreq == send_codec_inst_.plfreq : true);
-
- bool resample = static_cast<int32_t>(in_frame.sample_rate_hz_) !=
- send_codec_inst_.plfreq;
-
- // This variable is true if primary codec and secondary codec (if exists)
- // are both mono and input is stereo.
- bool down_mix;
- if (secondary_encoder_.get() != NULL) {
- down_mix = (in_frame.num_channels_ == 2) &&
- (send_codec_inst_.channels == 1) &&
- (secondary_send_codec_inst_.channels == 1);
- } else {
- down_mix = (in_frame.num_channels_ == 2) &&
- (send_codec_inst_.channels == 1);
- }
-
- if (!down_mix && !resample) {
- // No pre-processing is required.
- last_in_timestamp_ = in_frame.timestamp_;
- last_timestamp_ = in_frame.timestamp_;
- *ptr_out = &in_frame;
- return 0;
- }
-
- *ptr_out = &preprocess_frame_;
- preprocess_frame_.num_channels_ = in_frame.num_channels_;
- int16_t audio[WEBRTC_10MS_PCM_AUDIO];
- const int16_t* src_ptr_audio = in_frame.data_;
- int16_t* dest_ptr_audio = preprocess_frame_.data_;
- if (down_mix) {
- // If a resampling is required the output of a down-mix is written into a
- // local buffer, otherwise, it will be written to the output frame.
- if (resample)
- dest_ptr_audio = audio;
- if (DownMix(in_frame, WEBRTC_10MS_PCM_AUDIO, dest_ptr_audio) < 0)
- return -1;
- preprocess_frame_.num_channels_ = 1;
- // Set the input of the resampler is the down-mixed signal.
- src_ptr_audio = audio;
- }
-
- preprocess_frame_.timestamp_ = in_frame.timestamp_;
- preprocess_frame_.samples_per_channel_ = in_frame.samples_per_channel_;
- preprocess_frame_.sample_rate_hz_ = in_frame.sample_rate_hz_;
- // If it is required, we have to do a resampling.
- if (resample) {
- // The result of the resampler is written to output frame.
- dest_ptr_audio = preprocess_frame_.data_;
-
- uint32_t timestamp_diff;
-
- // Calculate the timestamp of this frame.
- if (last_in_timestamp_ > in_frame.timestamp_) {
- // A wrap around has happened.
- timestamp_diff = (static_cast<uint32_t>(0xFFFFFFFF) - last_in_timestamp_)
- + in_frame.timestamp_;
- } else {
- timestamp_diff = in_frame.timestamp_ - last_in_timestamp_;
- }
- preprocess_frame_.timestamp_ = last_timestamp_ +
- static_cast<uint32_t>(timestamp_diff *
- (static_cast<double>(send_codec_inst_.plfreq) /
- static_cast<double>(in_frame.sample_rate_hz_)));
-
- preprocess_frame_.samples_per_channel_ = input_resampler_.Resample10Msec(
- src_ptr_audio, in_frame.sample_rate_hz_, dest_ptr_audio,
- send_codec_inst_.plfreq, preprocess_frame_.num_channels_);
-
- if (preprocess_frame_.samples_per_channel_ < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add 10 ms audio, resmapling failed");
- return -1;
- }
- preprocess_frame_.sample_rate_hz_ = send_codec_inst_.plfreq;
- }
- last_in_timestamp_ = in_frame.timestamp_;
- last_timestamp_ = preprocess_frame_.timestamp_;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// (FEC) Forward Error Correction
-//
-
-bool AudioCodingModuleImpl::FECStatus() const {
- CriticalSectionScoped lock(acm_crit_sect_);
- return fec_enabled_;
-}
-
-// Configure FEC status i.e on/off.
-int32_t
-AudioCodingModuleImpl::SetFECStatus(
-#ifdef WEBRTC_CODEC_RED
- const bool enable_fec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (fec_enabled_ != enable_fec) {
- // Reset the RED buffer.
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
-
- // Reset fragmentation buffers.
- ResetFragmentation(kNumFecFragmentationVectors);
- // Set fec_enabled_.
- fec_enabled_ = enable_fec;
- }
- is_first_red_ = true; // Make sure we restart FEC.
- return 0;
-#else
- const bool /* enable_fec */) {
- fec_enabled_ = false;
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- " WEBRTC_CODEC_RED is undefined => fec_enabled_ = %d",
- fec_enabled_);
- return -1;
-#endif
-}
-
-/////////////////////////////////////////
-// (VAD) Voice Activity Detection
-//
-int32_t AudioCodingModuleImpl::SetVAD(bool enable_dtx, bool enable_vad,
- ACMVADMode mode) {
- CriticalSectionScoped lock(acm_crit_sect_);
- return SetVADSafe(enable_dtx, enable_vad, mode);
-}
-
-int AudioCodingModuleImpl::SetVADSafe(bool enable_dtx, bool enable_vad,
- ACMVADMode mode) {
- // Sanity check of the mode.
- if ((mode != VADNormal) && (mode != VADLowBitrate)
- && (mode != VADAggr) && (mode != VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid VAD Mode %d, no change is made to VAD/DTX status",
- static_cast<int>(mode));
- return -1;
- }
-
- // Check that the send codec is mono. We don't support VAD/DTX for stereo
- // sending.
- if ((enable_dtx || enable_vad) && stereo_send_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX not supported for stereo sending.");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- vad_mode_ = mode;
- return -1;
- }
-
- // We don't support VAD/DTX when dual-streaming is enabled, i.e.
- // secondary-encoder is registered.
- if ((enable_dtx || enable_vad) && secondary_encoder_.get() != NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX not supported when dual-streaming is enabled.");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- vad_mode_ = mode;
- return -1;
- }
-
- // Store VAD/DTX settings. Values can be changed in the call to "SetVAD"
- // below.
- dtx_enabled_ = enable_dtx;
- vad_enabled_ = enable_vad;
- vad_mode_ = mode;
-
- // If a send codec is registered, set VAD/DTX for the codec.
- if (HaveValidEncoder("SetVAD")) {
- if (codecs_[current_send_codec_idx_]->SetVAD(&dtx_enabled_, &vad_enabled_,
- &vad_mode_) < 0) {
- // SetVAD failed.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVAD failed");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- return -1;
- }
- }
-
- return 0;
-}
-
-// Get VAD/DTX settings.
-// TODO(tlegrand): Change this method to void.
-int32_t AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* mode) const {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- *dtx_enabled = dtx_enabled_;
- *vad_enabled = vad_enabled_;
- *mode = vad_mode_;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// Receiver
-//
-
-int32_t AudioCodingModuleImpl::InitializeReceiver() {
- CriticalSectionScoped lock(acm_crit_sect_);
- return InitializeReceiverSafe();
-}
-
-// Initialize receiver, resets codec database etc.
-int32_t AudioCodingModuleImpl::InitializeReceiverSafe() {
- initial_delay_ms_ = 0;
- num_packets_accumulated_ = 0;
- num_bytes_accumulated_ = 0;
- accumulated_audio_ms_ = 0;
- first_payload_received_ = 0;
- last_incoming_send_timestamp_ = 0;
- track_neteq_buffer_ = false;
- playout_ts_ = 0;
- // If the receiver is already initialized then we want to destroy any
- // existing decoders. After a call to this function, we should have a clean
- // start-up.
- if (receiver_initialized_) {
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (UnregisterReceiveCodecSafe(i) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitializeReceiver() failed, Could not unregister codec");
- return -1;
- }
- }
- }
- if (neteq_.Init() != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitializeReceiver() failed, Could not initialize NetEQ");
- return -1;
- }
- neteq_.set_id(id_);
- if (neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetEQ cannot allocate_packet Buffer");
- return -1;
- }
-
- // Register RED and CN.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (IsCodecRED(i) || IsCodecCN(i)) {
- if (RegisterRecCodecMSSafe(ACMCodecDB::database_[i], i, i,
- ACMNetEQ::kMasterJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
- registered_pltypes_[i] = ACMCodecDB::database_[i].pltype;
- }
- }
-
- receiver_initialized_ = true;
- return 0;
-}
-
-// Reset the decoder state.
-int32_t AudioCodingModuleImpl::ResetDecoder() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if ((codecs_[id] != NULL) && (registered_pltypes_[id] != -1)) {
- if (codecs_[id]->ResetDecoder(registered_pltypes_[id]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ResetDecoder failed:");
- return -1;
- }
- }
- }
- return neteq_.FlushBuffers();
-}
-
-// Get current receive frequency.
-int32_t AudioCodingModuleImpl::ReceiveFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "ReceiveFrequency()");
- WebRtcACMCodecParams codec_params;
-
- CriticalSectionScoped lock(acm_crit_sect_);
- if (DecoderParamByPlType(last_recv_audio_codec_pltype_, codec_params) < 0) {
- return neteq_.CurrentSampFreqHz();
- } else if (codec_params.codec_inst.plfreq == 48000) {
- // TODO(tlegrand): Remove this option when we have full 48 kHz support.
- return 32000;
- } else {
- return codec_params.codec_inst.plfreq;
- }
-}
-
-// Get current playout frequency.
-int32_t AudioCodingModuleImpl::PlayoutFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "PlayoutFrequency()");
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- return neteq_.CurrentSampFreqHz();
-}
-
-// Register possible receive codecs, can be called multiple times,
-// for codecs, CNG (NB, WB and SWB), DTMF, RED.
-int32_t AudioCodingModuleImpl::RegisterReceiveCodec(
- const CodecInst& receive_codec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (receive_codec.channels > 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "More than 2 audio channel is not supported.");
- return -1;
- }
-
- int mirror_id;
- int codec_id = ACMCodecDB::ReceiverCodecNumber(&receive_codec, &mirror_id);
-
- if (codec_id < 0 || codec_id >= ACMCodecDB::kNumCodecs) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Wrong codec params to be registered as receive codec");
- return -1;
- }
- // Check if the payload-type is valid.
- if (!ACMCodecDB::ValidPayloadType(receive_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid payload-type %d for %s.", receive_codec.pltype,
- receive_codec.plname);
- return -1;
- }
-
- if (!receiver_initialized_) {
- if (InitializeReceiverSafe() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot initialize reciver, so failed registering a codec.");
- return -1;
- }
- }
-
- // If codec already registered, unregister. Except for CN where we only
- // unregister if payload type is changing.
- if ((registered_pltypes_[codec_id] == receive_codec.pltype)
- && IsCodecCN(&receive_codec)) {
- // Codec already registered as receiver with this payload type. Nothing
- // to be done.
- return 0;
- } else if (registered_pltypes_[codec_id] != -1) {
- if (UnregisterReceiveCodecSafe(codec_id) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
- }
-
- if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
- ACMNetEQ::kMasterJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
-
- // TODO(andrew): Refactor how the slave is initialized. Can we instead
- // always start up a slave and pre-register CN and RED? We should be able
- // to get rid of stereo_receive_registered_.
- // http://code.google.com/p/webrtc/issues/detail?id=453
-
- // Register stereo codecs with the slave, or, if we've had already seen a
- // stereo codec, register CN or RED as a special case.
- if (receive_codec.channels == 2 ||
- (stereo_receive_registered_ && (IsCodecCN(&receive_codec) ||
- IsCodecRED(&receive_codec)))) {
- // TODO(andrew): refactor this block to combine with InitStereoSlave().
-
- if (!stereo_receive_registered_) {
- // This is the first time a stereo codec has been registered. Make
- // some stereo preparations.
-
- // Add a stereo slave.
- assert(neteq_.num_slaves() == 0);
- if (neteq_.AddSlave(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add slave jitter buffer to NetEQ.");
- return -1;
- }
-
- // Register any existing CN or RED codecs with the slave and as stereo.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (registered_pltypes_[i] != -1 && (IsCodecRED(i) || IsCodecCN(i))) {
- stereo_receive_[i] = true;
-
- CodecInst codec;
- memcpy(&codec, &ACMCodecDB::database_[i], sizeof(CodecInst));
- codec.pltype = registered_pltypes_[i];
- if (RegisterRecCodecMSSafe(codec, i, i, ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
- }
- }
- }
-
- if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
- ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
-
- if (!stereo_receive_[codec_id] &&
- (last_recv_audio_codec_pltype_ == receive_codec.pltype)) {
- // The last received payload type is the same as the one we are
- // registering. Expected number of channels to receive is one (mono),
- // but we are now registering the receiving codec as stereo (number of
- // channels is 2).
- // Set |last_recv_audio_coded_pltype_| to invalid value to trigger a
- // flush in NetEq, and a reset of expected number of channels next time a
- // packet is received in AudioCodingModuleImpl::IncomingPacket().
- last_recv_audio_codec_pltype_ = -1;
- }
-
- stereo_receive_[codec_id] = true;
- stereo_receive_registered_ = true;
- } else {
- if (last_recv_audio_codec_pltype_ == receive_codec.pltype &&
- expected_channels_ == 2) {
- // The last received payload type is the same as the one we are
- // registering. Expected number of channels to receive is two (stereo),
- // but we are now registering the receiving codec as mono (number of
- // channels is 1).
- // Set |last_recv_audio_coded_pl_type_| to invalid value to trigger a
- // flush in NetEq, and a reset of expected number of channels next time a
- // packet is received in AudioCodingModuleImpl::IncomingPacket().
- last_recv_audio_codec_pltype_ = -1;
- }
- stereo_receive_[codec_id] = false;
- }
-
- registered_pltypes_[codec_id] = receive_codec.pltype;
-
- if (IsCodecRED(&receive_codec)) {
- receive_red_pltype_ = receive_codec.pltype;
- }
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::RegisterRecCodecMSSafe(
- const CodecInst& receive_codec, int16_t codec_id,
- int16_t mirror_id, ACMNetEQ::JitterBuffer jitter_buffer) {
- ACMGenericCodec** codecs;
- if (jitter_buffer == ACMNetEQ::kMasterJb) {
- codecs = &codecs_[0];
- } else if (jitter_buffer == ACMNetEQ::kSlaveJb) {
- codecs = &slave_codecs_[0];
- if (codecs_[codec_id]->IsTrueStereoCodec()) {
- // True stereo codecs need to use the same codec memory
- // for both master and slave.
- slave_codecs_[mirror_id] = codecs_[mirror_id];
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RegisterReceiveCodecMSSafe failed, jitter_buffer is neither "
- "master or slave ");
- return -1;
- }
-
- if (codecs[mirror_id] == NULL) {
- codecs[mirror_id] = CreateCodec(receive_codec);
- if (codecs[mirror_id] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot create codec to register as receive codec");
- return -1;
- }
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
- if (mirror_id != codec_id) {
- codecs[codec_id] = codecs[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- codecs[codec_id]->SetIsMaster(jitter_buffer == ACMNetEQ::kMasterJb);
-
- int16_t status = 0;
- WebRtcACMCodecParams codec_params;
- memcpy(&(codec_params.codec_inst), &receive_codec, sizeof(CodecInst));
- codec_params.enable_vad = false;
- codec_params.enable_dtx = false;
- codec_params.vad_mode = VADNormal;
- if (!codecs[codec_id]->DecoderInitialized()) {
- // Force initialization.
- status = codecs[codec_id]->InitDecoder(&codec_params, true);
- if (status < 0) {
- // Could not initialize the decoder, we don't want to
- // continue if we could not initialize properly.
- WEBRTC_TRACE(
- webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "could not initialize the receive codec, codec not registered");
-
- return -1;
- }
- } else if (mirror_id != codec_id) {
- // Currently this only happens for iSAC.
- // We have to store the decoder parameters.
- codecs[codec_id]->SaveDecoderParam(&codec_params);
- }
-
- if (codecs[codec_id]->RegisterInNetEq(&neteq_, receive_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Receive codec could not be registered in NetEQ");
- return -1;
- }
- // Guarantee that the same payload-type that is
- // registered in NetEQ is stored in the codec.
- codecs[codec_id]->SaveDecoderParam(&codec_params);
-
- return status;
-}
-
-// Get current received codec.
-int32_t AudioCodingModuleImpl::ReceiveCodec(
- CodecInst* current_codec) const {
- WebRtcACMCodecParams decoder_param;
- CriticalSectionScoped lock(acm_crit_sect_);
-
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (codecs_[id] != NULL) {
- if (codecs_[id]->DecoderInitialized()) {
- if (codecs_[id]->DecoderParams(&decoder_param,
- last_recv_audio_codec_pltype_)) {
- memcpy(current_codec, &decoder_param.codec_inst,
- sizeof(CodecInst));
- return 0;
- }
- }
- }
- }
-
- // If we are here then we haven't found any codec. Set codec pltype to -1 to
- // indicate that the structure is invalid and return -1.
- current_codec->pltype = -1;
- return -1;
-}
-
-// Incoming packet from network parsed and ready for decode.
-int32_t AudioCodingModuleImpl::IncomingPacket(
- const uint8_t* incoming_payload,
- const int32_t payload_length,
- const WebRtcRTPHeader& rtp_info) {
- WebRtcRTPHeader rtp_header;
-
- memcpy(&rtp_header, &rtp_info, sizeof(WebRtcRTPHeader));
-
- if (payload_length < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPacket() Error, payload-length cannot be negative");
- return -1;
- }
-
- {
- // Store the payload Type. This will be used to retrieve "received codec"
- // and "received frequency."
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Check there are packets missed between the last injected packet, and the
- // latest received packet. If so and we are in AV-sync mode then we would
- // like to fill the gap. Shouldn't be the first payload.
- if (av_sync_ && first_payload_received_ &&
- rtp_info.header.sequenceNumber > last_sequence_number_ + 1) {
- // If the last packet pushed was sync-packet account for all missing
- // packets. Otherwise leave some room for PLC.
- if (last_packet_was_sync_) {
- while (rtp_info.header.sequenceNumber > last_sequence_number_ + 2) {
- PushSyncPacketSafe();
- }
- } else {
- // Leave two packet room for NetEq perform PLC.
- if (rtp_info.header.sequenceNumber > last_sequence_number_ + 3) {
- last_sequence_number_ += 2;
- last_incoming_send_timestamp_ += last_timestamp_diff_ * 2;
- last_receive_timestamp_ += 2 * last_timestamp_diff_;
- while (rtp_info.header.sequenceNumber > last_sequence_number_ + 1)
- PushSyncPacketSafe();
- }
- }
- }
-
- uint8_t my_payload_type;
-
- // Check if this is an RED payload.
- if (rtp_info.header.payloadType == receive_red_pltype_) {
- // Get the primary payload-type.
- my_payload_type = incoming_payload[0] & 0x7F;
- } else {
- my_payload_type = rtp_info.header.payloadType;
- }
-
- // If payload is audio, check if received payload is different from
- // previous.
- if (!rtp_info.type.Audio.isCNG) {
- // This is Audio not CNG.
-
- if (my_payload_type != last_recv_audio_codec_pltype_) {
- // We detect a change in payload type. It is necessary for iSAC
- // we are going to use ONE iSAC instance for decoding both WB and
- // SWB payloads. If payload is changed there might be a need to reset
- // sampling rate of decoder. depending what we have received "now".
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (registered_pltypes_[i] == my_payload_type) {
- if (UpdateUponReceivingCodec(i) != 0)
- return -1;
- break;
- }
- }
- // Codec is changed, there might be a jump in timestamp, therefore,
- // we have to reset some variables that track NetEq buffer.
- if (track_neteq_buffer_ || av_sync_) {
- last_incoming_send_timestamp_ = rtp_info.header.timestamp;
- }
-
- if (nack_enabled_) {
- assert(nack_.get());
- // Codec is changed, reset NACK and update sampling rate.
- nack_->Reset();
- nack_->UpdateSampleRate(
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq);
- }
- }
- last_recv_audio_codec_pltype_ = my_payload_type;
- }
-
- // Current timestamp based on the receiver sampling frequency.
- last_receive_timestamp_ = NowTimestamp(current_receive_codec_idx_);
-
- if (nack_enabled_) {
- assert(nack_.get());
- nack_->UpdateLastReceivedPacket(rtp_header.header.sequenceNumber,
- rtp_header.header.timestamp);
- }
- }
-
- int per_neteq_payload_length = payload_length;
- // Split the payload for stereo packets, so that first half of payload
- // vector holds left channel, and second half holds right channel.
- if (expected_channels_ == 2) {
- if (!rtp_info.type.Audio.isCNG) {
- // Create a new vector for the payload, maximum payload size.
- int32_t length = payload_length;
- uint8_t payload[kMaxPacketSize];
- assert(payload_length <= kMaxPacketSize);
- memcpy(payload, incoming_payload, payload_length);
- codecs_[current_receive_codec_idx_]->SplitStereoPacket(payload, &length);
- rtp_header.type.Audio.channel = 2;
- per_neteq_payload_length = length / 2;
- // Insert packet into NetEQ.
- if (neteq_.RecIn(payload, length, rtp_header,
- last_receive_timestamp_) < 0)
- return -1;
- } else {
- // If we receive a CNG packet while expecting stereo, we ignore the
- // packet and continue. CNG is not supported for stereo.
- return 0;
- }
- } else {
- if (neteq_.RecIn(incoming_payload, payload_length, rtp_header,
- last_receive_timestamp_) < 0)
- return -1;
- }
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Update buffering uses |last_incoming_send_timestamp_| so it should be
- // before the next block.
- if (track_neteq_buffer_)
- UpdateBufferingSafe(rtp_header, per_neteq_payload_length);
-
- if (av_sync_) {
- if (rtp_info.header.sequenceNumber == last_sequence_number_ + 1) {
- last_timestamp_diff_ = rtp_info.header.timestamp -
- last_incoming_send_timestamp_;
- }
- last_sequence_number_ = rtp_info.header.sequenceNumber;
- last_ssrc_ = rtp_info.header.ssrc;
- last_packet_was_sync_ = false;
- }
-
- if (av_sync_ || track_neteq_buffer_) {
- last_incoming_send_timestamp_ = rtp_info.header.timestamp;
- }
-
- // Set the following regardless of tracking NetEq buffer or being in
- // AV-sync mode. Only if the received packet is not CNG.
- if (!rtp_info.type.Audio.isCNG)
- first_payload_received_ = true;
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::UpdateUponReceivingCodec(int index) {
- if (codecs_[index] == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "IncomingPacket() error: payload type found but "
- "corresponding codec is NULL");
- return -1;
- }
- codecs_[index]->UpdateDecoderSampFreq(index);
- neteq_.set_received_stereo(stereo_receive_[index]);
- current_receive_codec_idx_ = index;
-
- // If we have a change in the expected number of channels, flush packet
- // buffers in NetEQ.
- if ((stereo_receive_[index] && (expected_channels_ == 1)) ||
- (!stereo_receive_[index] && (expected_channels_ == 2))) {
- neteq_.FlushBuffers();
- codecs_[index]->ResetDecoder(registered_pltypes_[index]);
- }
-
- if (stereo_receive_[index] && (expected_channels_ == 1)) {
- // When switching from a mono to stereo codec reset the slave.
- if (InitStereoSlave() != 0)
- return -1;
- }
-
- // Store number of channels we expect to receive for the current payload type.
- if (stereo_receive_[index]) {
- expected_channels_ = 2;
- } else {
- expected_channels_ = 1;
- }
-
- // Reset previous received channel.
- prev_received_channel_ = 0;
- return 0;
-}
-
-bool AudioCodingModuleImpl::IsCodecForSlave(int index) const {
- return (registered_pltypes_[index] != -1 && stereo_receive_[index]);
-}
-
-int AudioCodingModuleImpl::InitStereoSlave() {
- neteq_.RemoveSlaves();
-
- if (neteq_.AddSlave(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add slave jitter buffer to NetEQ.");
- return -1;
- }
-
- // Register all needed codecs with slave.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (codecs_[i] != NULL && IsCodecForSlave(i)) {
- WebRtcACMCodecParams decoder_params;
- if (codecs_[i]->DecoderParams(&decoder_params, registered_pltypes_[i])) {
- if (RegisterRecCodecMSSafe(decoder_params.codec_inst,
- i, ACMCodecDB::MirrorID(i),
- ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
- }
- }
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Don't let the extra delay modified while accumulating buffers in NetEq.
- if (track_neteq_buffer_ && first_payload_received_)
- return 0;
- }
- return neteq_.SetMinimumDelay(time_ms);
-}
-
-int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
- return neteq_.SetMaximumDelay(time_ms);
-}
-
-// Get Dtmf playout status.
-bool AudioCodingModuleImpl::DtmfPlayoutStatus() const {
-#ifndef WEBRTC_CODEC_AVT
- return false;
-#else
- return neteq_.avt_playout();
-#endif
-}
-
-// Configure Dtmf playout status i.e on/off playout the incoming outband
-// Dtmf tone.
-int32_t AudioCodingModuleImpl::SetDtmfPlayoutStatus(
-#ifndef WEBRTC_CODEC_AVT
- const bool /* enable */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "SetDtmfPlayoutStatus() failed: AVT is not supported.");
- return -1;
-#else
- const bool enable) {
- return neteq_.SetAVTPlayout(enable);
-#endif
-}
-
-// Estimate the Bandwidth based on the incoming stream, needed for one way
-// audio where the RTCP send the BW estimate.
-// This is also done in the RTP module.
-int32_t AudioCodingModuleImpl::DecoderEstimatedBandwidth() const {
- CodecInst codec;
- int16_t codec_id = -1;
- int pltype_wb;
- int pltype_swb;
-
- // Get iSAC settings.
- for (int id = 0; id < ACMCodecDB::kNumCodecs; id++) {
- // Store codec settings for codec number "codeCntr" in the output struct.
- ACMCodecDB::Codec(id, &codec);
-
- if (!STR_CASE_CMP(codec.plname, "isac")) {
- codec_id = 1;
- pltype_wb = codec.pltype;
-
- ACMCodecDB::Codec(id + 1, &codec);
- pltype_swb = codec.pltype;
-
- break;
- }
- }
-
- if (codec_id < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "DecoderEstimatedBandwidth failed");
- return -1;
- }
-
- if ((last_recv_audio_codec_pltype_ == pltype_wb) ||
- (last_recv_audio_codec_pltype_ == pltype_swb)) {
- return codecs_[codec_id]->GetEstimatedBandwidth();
- } else {
- return -1;
- }
-}
-
-// Set playout mode for: voice, fax, or streaming.
-int32_t AudioCodingModuleImpl::SetPlayoutMode(
- const AudioPlayoutMode mode) {
- if ((mode != voice) && (mode != fax) && (mode != streaming) &&
- (mode != off)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid playout mode.");
- return -1;
- }
- return neteq_.SetPlayoutMode(mode);
-}
-
-// Get playout mode voice, fax.
-AudioPlayoutMode AudioCodingModuleImpl::PlayoutMode() const {
- return neteq_.playout_mode();
-}
-
-// Get 10 milliseconds of raw audio data to play out.
-// Automatic resample to the requested frequency.
-int32_t AudioCodingModuleImpl::PlayoutData10Ms(
- int32_t desired_freq_hz, AudioFrame* audio_frame) {
- TRACE_EVENT_ASYNC_BEGIN0("webrtc", "ACM::PlayoutData10Ms", this);
- bool stereo_mode;
-
- if (GetSilence(desired_freq_hz, audio_frame)) {
- TRACE_EVENT_ASYNC_END1("webrtc", "ACM::PlayoutData10Ms", this,
- "silence", true);
- return 0; // Silence is generated, return.
- }
-
- // RecOut always returns 10 ms.
- if (neteq_.RecOut(audio_frame_) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "PlayoutData failed, RecOut Failed");
- return -1;
- }
- int decoded_seq_num;
- uint32_t decoded_timestamp;
- bool update_nack =
- neteq_.DecodedRtpInfo(&decoded_seq_num, &decoded_timestamp) &&
- nack_enabled_; // Update NACK only if it is enabled.
- audio_frame->num_channels_ = audio_frame_.num_channels_;
- audio_frame->vad_activity_ = audio_frame_.vad_activity_;
- audio_frame->speech_type_ = audio_frame_.speech_type_;
-
- stereo_mode = (audio_frame_.num_channels_ > 1);
-
- // For stereo playout:
- // Master and Slave samples are interleaved starting with Master.
- const uint16_t receive_freq =
- static_cast<uint16_t>(audio_frame_.sample_rate_hz_);
- bool tone_detected = false;
- int16_t last_detected_tone;
- int16_t tone;
-
- // Limit the scope of ACM Critical section.
- {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Update call statistics.
- call_stats_.DecodedByNetEq(audio_frame->speech_type_);
-
- if (update_nack) {
- assert(nack_.get());
- nack_->UpdateLastDecodedPacket(decoded_seq_num, decoded_timestamp);
- }
-
- // If we are in AV-sync and have already received an audio packet, but the
- // latest packet is too late, then insert sync packet.
- if (av_sync_ && first_payload_received_ &&
- NowTimestamp(current_receive_codec_idx_) > 5 * last_timestamp_diff_ +
- last_receive_timestamp_) {
- if (!last_packet_was_sync_) {
- // If the last packet inserted has been a regular packet Skip two
- // packets to give room for PLC.
- last_incoming_send_timestamp_ += 2 * last_timestamp_diff_;
- last_sequence_number_ += 2;
- last_receive_timestamp_ += 2 * last_timestamp_diff_;
- }
-
- // One sync packet.
- if (PushSyncPacketSafe() < 0)
- return -1;
- }
-
- if ((receive_freq != desired_freq_hz) && (desired_freq_hz != -1)) {
- TRACE_EVENT_ASYNC_END2("webrtc", "ACM::PlayoutData10Ms", this,
- "seqnum", decoded_seq_num,
- "now", clock_->TimeInMilliseconds());
- // Resample payload_data.
- int16_t temp_len = output_resampler_.Resample10Msec(
- audio_frame_.data_, receive_freq, audio_frame->data_,
- desired_freq_hz, audio_frame_.num_channels_);
-
- if (temp_len < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "PlayoutData failed, resampler failed");
- return -1;
- }
-
- // Set the payload data length from the resampler.
- audio_frame->samples_per_channel_ = static_cast<uint16_t>(temp_len);
- // Set the sampling frequency.
- audio_frame->sample_rate_hz_ = desired_freq_hz;
- } else {
- TRACE_EVENT_ASYNC_END2("webrtc", "ACM::PlayoutData10Ms", this,
- "seqnum", decoded_seq_num,
- "now", clock_->TimeInMilliseconds());
- memcpy(audio_frame->data_, audio_frame_.data_,
- audio_frame_.samples_per_channel_ * audio_frame->num_channels_
- * sizeof(int16_t));
- // Set the payload length.
- audio_frame->samples_per_channel_ =
- audio_frame_.samples_per_channel_;
- // Set the sampling frequency.
- audio_frame->sample_rate_hz_ = receive_freq;
- }
-
- // Tone detection done for master channel.
- if (dtmf_detector_ != NULL) {
- // Dtmf Detection.
- if (audio_frame->sample_rate_hz_ == 8000) {
- // Use audio_frame->data_ then Dtmf detector doesn't
- // need resampling.
- if (!stereo_mode) {
- dtmf_detector_->Detect(audio_frame->data_,
- audio_frame->samples_per_channel_,
- audio_frame->sample_rate_hz_, tone_detected,
- tone);
- } else {
- // We are in 8 kHz so the master channel needs only 80 samples.
- int16_t master_channel[80];
- for (int n = 0; n < 80; n++) {
- master_channel[n] = audio_frame->data_[n << 1];
- }
- dtmf_detector_->Detect(master_channel,
- audio_frame->samples_per_channel_,
- audio_frame->sample_rate_hz_, tone_detected,
- tone);
- }
- } else {
- // Do the detection on the audio that we got from NetEQ (audio_frame_).
- if (!stereo_mode) {
- dtmf_detector_->Detect(audio_frame_.data_,
- audio_frame_.samples_per_channel_,
- receive_freq, tone_detected, tone);
- } else {
- int16_t master_channel[WEBRTC_10MS_PCM_AUDIO];
- for (int n = 0; n < audio_frame_.samples_per_channel_; n++) {
- master_channel[n] = audio_frame_.data_[n << 1];
- }
- dtmf_detector_->Detect(master_channel,
- audio_frame_.samples_per_channel_,
- receive_freq, tone_detected, tone);
- }
- }
- }
-
- // We want to do this while we are in acm_crit_sect_.
- // (Doesn't really need to initialize the following
- // variable but Linux complains if we don't.)
- last_detected_tone = kACMToneEnd;
- if (tone_detected) {
- last_detected_tone = last_detected_tone_;
- last_detected_tone_ = tone;
- }
- }
-
- if (tone_detected) {
- // We will deal with callback here, so enter callback critical section.
- CriticalSectionScoped lock(callback_crit_sect_);
-
- if (dtmf_callback_ != NULL) {
- if (tone != kACMToneEnd) {
- // just a tone
- dtmf_callback_->IncomingDtmf(static_cast<uint8_t>(tone), false);
- } else if ((tone == kACMToneEnd) && (last_detected_tone != kACMToneEnd)) {
- // The tone is "END" and the previously detected tone is
- // not "END," so call fir an end.
- dtmf_callback_->IncomingDtmf(static_cast<uint8_t>(last_detected_tone),
- true);
- }
- }
- }
-
- audio_frame->id_ = id_;
- audio_frame->energy_ = -1;
- audio_frame->timestamp_ = 0;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// Statistics
-//
-
-int32_t AudioCodingModuleImpl::NetworkStatistics(
- ACMNetworkStatistics* statistics) {
- int32_t status;
- status = neteq_.NetworkStatistics(statistics);
- return status;
-}
-
-void AudioCodingModuleImpl::DestructEncoderInst(void* inst) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "DestructEncoderInst()");
- if (!HaveValidEncoder("DestructEncoderInst")) {
- return;
- }
-
- codecs_[current_send_codec_idx_]->DestructEncoderInst(inst);
-}
-
-int16_t AudioCodingModuleImpl::AudioBuffer(
- WebRtcACMAudioBuff& buffer) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "AudioBuffer()");
- if (!HaveValidEncoder("AudioBuffer")) {
- return -1;
- }
- buffer.last_in_timestamp = last_in_timestamp_;
- return codecs_[current_send_codec_idx_]->AudioBuffer(buffer);
-}
-
-int16_t AudioCodingModuleImpl::SetAudioBuffer(
- WebRtcACMAudioBuff& buffer) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "SetAudioBuffer()");
- if (!HaveValidEncoder("SetAudioBuffer")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->SetAudioBuffer(buffer);
-}
-
-uint32_t AudioCodingModuleImpl::EarliestTimestamp() const {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "EarliestTimestamp()");
- if (!HaveValidEncoder("EarliestTimestamp")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->EarliestTimestamp();
-}
-
-int32_t AudioCodingModuleImpl::RegisterVADCallback(
- ACMVADCallback* vad_callback) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "RegisterVADCallback()");
- CriticalSectionScoped lock(callback_crit_sect_);
- vad_callback_ = vad_callback;
- return 0;
-}
-
-// TODO(turajs): Remove this API if it is not used.
-// TODO(tlegrand): Modify this function to work for stereo, and add tests.
-// TODO(turajs): Receive timestamp in this method is incremented by frame-size
-// and does not reflect the true receive frame-size. Therefore, subsequent
-// jitter computations are not accurate.
-int32_t AudioCodingModuleImpl::IncomingPayload(
- const uint8_t* incoming_payload, const int32_t payload_length,
- const uint8_t payload_type, const uint32_t timestamp) {
- if (payload_length < 0) {
- // Log error in trace file.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPacket() Error, payload-length cannot be negative");
- return -1;
- }
-
- if (dummy_rtp_header_ == NULL) {
- // This is the first time that we are using |dummy_rtp_header_|
- // so we have to create it.
- WebRtcACMCodecParams codec_params;
- dummy_rtp_header_ = new WebRtcRTPHeader;
- if (dummy_rtp_header_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPayload() Error, out of memory");
- return -1;
- }
- dummy_rtp_header_->header.payloadType = payload_type;
- // Don't matter in this case.
- dummy_rtp_header_->header.ssrc = 0;
- dummy_rtp_header_->header.markerBit = false;
- // Start with random numbers.
- dummy_rtp_header_->header.sequenceNumber = rand();
- dummy_rtp_header_->header.timestamp =
- (static_cast<uint32_t>(rand()) << 16) +
- static_cast<uint32_t>(rand());
- dummy_rtp_header_->type.Audio.channel = 1;
-
- if (DecoderParamByPlType(payload_type, codec_params) < 0) {
- // We didn't find a codec with the given payload.
- // Something is wrong we exit, but we delete |dummy_rtp_header_|
- // and set it to NULL to start clean next time.
- delete dummy_rtp_header_;
- dummy_rtp_header_ = NULL;
- return -1;
- }
- recv_pl_frame_size_smpls_ = codec_params.codec_inst.pacsize;
- }
-
- if (payload_type != dummy_rtp_header_->header.payloadType) {
- // Payload type has changed since the last time we might need to
- // update the frame-size.
- WebRtcACMCodecParams codec_params;
- if (DecoderParamByPlType(payload_type, codec_params) < 0) {
- // We didn't find a codec with the given payload.
- return -1;
- }
- recv_pl_frame_size_smpls_ = codec_params.codec_inst.pacsize;
- dummy_rtp_header_->header.payloadType = payload_type;
- }
-
- if (timestamp > 0) {
- dummy_rtp_header_->header.timestamp = timestamp;
- }
-
- // Store the payload Type. this will be used to retrieve "received codec"
- // and "received frequency."
- last_recv_audio_codec_pltype_ = payload_type;
-
- last_receive_timestamp_ += recv_pl_frame_size_smpls_;
- // Insert in NetEQ.
- if (neteq_.RecIn(incoming_payload, payload_length, *dummy_rtp_header_,
- last_receive_timestamp_) < 0) {
- return -1;
- }
-
- // Get ready for the next payload.
- dummy_rtp_header_->header.sequenceNumber++;
- dummy_rtp_header_->header.timestamp += recv_pl_frame_size_smpls_;
- return 0;
-}
-
-int16_t AudioCodingModuleImpl::DecoderParamByPlType(
- const uint8_t payload_type,
- WebRtcACMCodecParams& codec_params) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs;
- id++) {
- if (codecs_[id] != NULL) {
- if (codecs_[id]->DecoderInitialized()) {
- if (codecs_[id]->DecoderParams(&codec_params, payload_type)) {
- return 0;
- }
- }
- }
- }
- // If we are here it means that we could not find a
- // codec with that payload type. reset the values to
- // not acceptable values and return -1.
- codec_params.codec_inst.plname[0] = '\0';
- codec_params.codec_inst.pacsize = 0;
- codec_params.codec_inst.rate = 0;
- codec_params.codec_inst.pltype = -1;
- return -1;
-}
-
-int16_t AudioCodingModuleImpl::DecoderListIDByPlName(
- const char* name, const uint16_t frequency) const {
- WebRtcACMCodecParams codec_params;
- CriticalSectionScoped lock(acm_crit_sect_);
- for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if ((codecs_[id] != NULL)) {
- if (codecs_[id]->DecoderInitialized()) {
- assert(registered_pltypes_[id] >= 0);
- assert(registered_pltypes_[id] <= 255);
- codecs_[id]->DecoderParams(
- &codec_params, static_cast<uint8_t>(registered_pltypes_[id]));
- if (!STR_CASE_CMP(codec_params.codec_inst.plname, name)) {
- // Check if the given sampling frequency matches.
- // A zero sampling frequency means we matching the names
- // is sufficient and we don't need to check for the
- // frequencies.
- // Currently it is only iSAC which has one name but two
- // sampling frequencies.
- if ((frequency == 0)||
- (codec_params.codec_inst.plfreq == frequency)) {
- return id;
- }
- }
- }
- }
- }
- // If we are here it means that we could not find a
- // codec with that payload type. return -1.
- return -1;
-}
-
-int32_t AudioCodingModuleImpl::LastEncodedTimestamp(
- uint32_t& timestamp) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!HaveValidEncoder("LastEncodedTimestamp")) {
- return -1;
- }
- timestamp = codecs_[current_send_codec_idx_]->LastEncodedTimestamp();
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(
- bool use_webrtc_dtx) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("ReplaceInternalDTXWithWebRtc")) {
- WEBRTC_TRACE(
- webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot replace codec internal DTX when no send codec is registered.");
- return -1;
- }
-
- int32_t res = codecs_[current_send_codec_idx_]->ReplaceInternalDTX(
- use_webrtc_dtx);
- // Check if VAD is turned on, or if there is any error.
- if (res == 1) {
- vad_enabled_ = true;
- } else if (res < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Failed to set ReplaceInternalDTXWithWebRtc(%d)",
- use_webrtc_dtx);
- return res;
- }
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(
- bool* uses_webrtc_dtx) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("IsInternalDTXReplacedWithWebRtc")) {
- return -1;
- }
- if (codecs_[current_send_codec_idx_]->IsInternalDTXReplaced(uses_webrtc_dtx)
- < 0) {
- return -1;
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::SetISACMaxRate(int max_bit_per_sec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("SetISACMaxRate")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->SetISACMaxRate(max_bit_per_sec);
-}
-
-int AudioCodingModuleImpl::SetISACMaxPayloadSize(int max_size_bytes) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("SetISACMaxPayloadSize")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->SetISACMaxPayloadSize(
- max_size_bytes);
-}
-
-int32_t AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
- int frame_size_ms,
- int rate_bit_per_sec,
- bool enforce_frame_size) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("ConfigISACBandwidthEstimator")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->ConfigISACBandwidthEstimator(
- frame_size_ms, rate_bit_per_sec, enforce_frame_size);
-}
-
-int32_t AudioCodingModuleImpl::PlayoutTimestamp(
- uint32_t* timestamp) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "PlayoutTimestamp()");
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (track_neteq_buffer_) {
- *timestamp = playout_ts_;
- return 0;
- }
- }
- return neteq_.PlayoutTimestamp(*timestamp);
-}
-
-bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
- if ((!send_codec_registered_) || (current_send_codec_idx_ < 0) ||
- (current_send_codec_idx_ >= ACMCodecDB::kNumCodecs)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: No send codec is registered.", caller_name);
- return false;
- }
- if ((current_send_codec_idx_ < 0) ||
- (current_send_codec_idx_ >= ACMCodecDB::kNumCodecs)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: Send codec index out of range.", caller_name);
- return false;
- }
- if (codecs_[current_send_codec_idx_] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: Send codec is NULL pointer.", caller_name);
- return false;
- }
- return true;
-}
-
-int AudioCodingModuleImpl::UnregisterReceiveCodec(uint8_t payload_type) {
- CriticalSectionScoped lock(acm_crit_sect_);
- int id;
-
- // Search through the list of registered payload types.
- for (id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (registered_pltypes_[id] == payload_type) {
- // We have found the id registered with the payload type.
- break;
- }
- }
-
- if (id >= ACMCodecDB::kNumCodecs) {
- // Payload type was not registered. No need to unregister.
- return 0;
- }
-
- // Unregister the codec with the given payload type.
- return UnregisterReceiveCodecSafe(id);
-}
-
-int32_t AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
- const int16_t codec_id) {
- const WebRtcNetEQDecoder *neteq_decoder = ACMCodecDB::NetEQDecoders();
- int16_t mirror_id = ACMCodecDB::MirrorID(codec_id);
- bool stereo_receiver = false;
-
- if (codecs_[codec_id] != NULL) {
- if (registered_pltypes_[codec_id] != -1) {
- // Store stereo information for future use.
- stereo_receiver = stereo_receive_[codec_id];
-
- // Before deleting the decoder instance unregister from NetEQ.
- if (neteq_.RemoveCodec(neteq_decoder[codec_id],
- stereo_receive_[codec_id]) < 0) {
- CodecInst codec;
- ACMCodecDB::Codec(codec_id, &codec);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Unregistering %s-%d from NetEQ failed.", codec.plname,
- codec.plfreq);
- return -1;
- }
-
- // CN is a special case for NetEQ, all three sampling frequencies
- // are unregistered if one is deleted.
- if (IsCodecCN(codec_id)) {
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (IsCodecCN(i)) {
- stereo_receive_[i] = false;
- registered_pltypes_[i] = -1;
- }
- }
- } else {
- if (codec_id == mirror_id) {
- codecs_[codec_id]->DestructDecoder();
- if (stereo_receive_[codec_id]) {
- slave_codecs_[codec_id]->DestructDecoder();
- stereo_receive_[codec_id] = false;
- }
- }
- }
-
- // Check if this is the last registered stereo receive codec.
- if (stereo_receiver) {
- bool no_stereo = true;
-
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (stereo_receive_[i]) {
- // We still have stereo codecs registered.
- no_stereo = false;
- break;
- }
- }
-
- // If we don't have any stereo codecs left, change status.
- if (no_stereo) {
- neteq_.RemoveSlaves(); // No longer need the slave.
- stereo_receive_registered_ = false;
- }
- }
- }
- }
-
- if (registered_pltypes_[codec_id] == receive_red_pltype_) {
- // RED is going to be unregistered, set to an invalid value.
- receive_red_pltype_ = 255;
- }
- registered_pltypes_[codec_id] = -1;
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::REDPayloadISAC(
- const int32_t isac_rate, const int16_t isac_bw_estimate,
- uint8_t* payload, int16_t* length_bytes) {
- if (!HaveValidEncoder("EncodeData")) {
- return -1;
- }
- int16_t status;
- status = codecs_[current_send_codec_idx_]->REDPayloadISAC(isac_rate,
- isac_bw_estimate,
- payload,
- length_bytes);
- return status;
-}
-
-void AudioCodingModuleImpl::ResetFragmentation(int vector_size) {
- for (int n = 0; n < kMaxNumFragmentationVectors; n++) {
- fragmentation_.fragmentationOffset[n] = n * MAX_PAYLOAD_SIZE_BYTE;
- }
- memset(fragmentation_.fragmentationLength, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationLength[0]));
- memset(fragmentation_.fragmentationTimeDiff, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationTimeDiff[0]));
- memset(fragmentation_.fragmentationPlType, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationPlType[0]));
- fragmentation_.fragmentationVectorSize =
- static_cast<uint16_t>(vector_size);
-}
-
-// TODO(turajs): Add second parameter to enable/disable AV-sync.
-int AudioCodingModuleImpl::SetInitialPlayoutDelay(int delay_ms) {
- if (delay_ms < 0 || delay_ms > 10000) {
- return -1;
- }
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Receiver should be initialized before this call processed.
- if (!receiver_initialized_) {
- InitializeReceiverSafe();
- }
-
- if (first_payload_received_) {
- // Too late for this API. Only works before a call is started.
- return -1;
- }
- initial_delay_ms_ = delay_ms;
-
- // If initial delay is zero, NetEq buffer should not be tracked, also we
- // don't want to be in AV-sync mode.
- track_neteq_buffer_ = delay_ms > 0;
- av_sync_ = delay_ms > 0;
-
- neteq_.EnableAVSync(av_sync_);
- return neteq_.SetMinimumDelay(delay_ms);
-}
-
-bool AudioCodingModuleImpl::GetSilence(int desired_sample_rate_hz,
- AudioFrame* frame) {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (initial_delay_ms_ == 0 || !track_neteq_buffer_) {
- return false;
- }
-
- if (accumulated_audio_ms_ >= initial_delay_ms_) {
- // We have enough data stored that match our initial delay target.
- track_neteq_buffer_ = false;
- return false;
- }
-
- // Record call to silence generator.
- call_stats_.DecodedBySilenceGenerator();
-
- // We stop accumulating packets, if the number of packets or the total size
- // exceeds a threshold.
- int max_num_packets;
- int buffer_size_bytes;
- int per_payload_overhead_bytes;
- neteq_.BufferSpec(max_num_packets, buffer_size_bytes,
- per_payload_overhead_bytes);
- int total_bytes_accumulated = num_bytes_accumulated_ +
- num_packets_accumulated_ * per_payload_overhead_bytes;
- if (num_packets_accumulated_ > max_num_packets * 0.9 ||
- total_bytes_accumulated > buffer_size_bytes * 0.9) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "GetSilence: Initial delay couldn't be achieved."
- " num_packets_accumulated=%d, total_bytes_accumulated=%d",
- num_packets_accumulated_, num_bytes_accumulated_);
- track_neteq_buffer_ = false;
- return false;
- }
-
- if (desired_sample_rate_hz > 0) {
- frame->sample_rate_hz_ = desired_sample_rate_hz;
- } else {
- frame->sample_rate_hz_ = 0;
- if (current_receive_codec_idx_ >= 0) {
- frame->sample_rate_hz_ =
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq;
- } else {
- // No payload received yet, use the default sampling rate of NetEq.
- frame->sample_rate_hz_ = neteq_.CurrentSampFreqHz();
- }
- }
- frame->num_channels_ = expected_channels_;
- frame->samples_per_channel_ = frame->sample_rate_hz_ / 100; // Always 10 ms.
- frame->speech_type_ = AudioFrame::kCNG;
- frame->vad_activity_ = AudioFrame::kVadPassive;
- frame->energy_ = 0;
- int samples = frame->samples_per_channel_ * frame->num_channels_;
- memset(frame->data_, 0, samples * sizeof(int16_t));
- return true;
-}
-
-// Must be called within the scope of ACM critical section.
-int AudioCodingModuleImpl::PushSyncPacketSafe() {
- assert(av_sync_);
- last_sequence_number_++;
- last_incoming_send_timestamp_ += last_timestamp_diff_;
- last_receive_timestamp_ += last_timestamp_diff_;
-
- WebRtcRTPHeader rtp_info;
- rtp_info.header.payloadType = last_recv_audio_codec_pltype_;
- rtp_info.header.ssrc = last_ssrc_;
- rtp_info.header.markerBit = false;
- rtp_info.header.sequenceNumber = last_sequence_number_;
- rtp_info.header.timestamp = last_incoming_send_timestamp_;
- rtp_info.type.Audio.channel = stereo_receive_[current_receive_codec_idx_] ?
- 2 : 1;
- last_packet_was_sync_ = true;
- int payload_len_bytes = neteq_.RecIn(rtp_info, last_receive_timestamp_);
-
- if (payload_len_bytes < 0)
- return -1;
-
- // This is to account for sync packets inserted during the buffering phase.
- if (track_neteq_buffer_)
- UpdateBufferingSafe(rtp_info, payload_len_bytes);
-
- return 0;
-}
-
-// Must be called within the scope of ACM critical section.
-void AudioCodingModuleImpl::UpdateBufferingSafe(const WebRtcRTPHeader& rtp_info,
- int payload_len_bytes) {
- const int in_sample_rate_khz =
- (ACMCodecDB::database_[current_receive_codec_idx_].plfreq / 1000);
- if (first_payload_received_ &&
- rtp_info.header.timestamp > last_incoming_send_timestamp_ &&
- in_sample_rate_khz > 0) {
- accumulated_audio_ms_ += (rtp_info.header.timestamp -
- last_incoming_send_timestamp_) / in_sample_rate_khz;
- }
-
- num_packets_accumulated_++;
- num_bytes_accumulated_ += payload_len_bytes;
-
- playout_ts_ = static_cast<uint32_t>(
- rtp_info.header.timestamp - static_cast<uint32_t>(
- initial_delay_ms_ * in_sample_rate_khz));
-}
-
-uint32_t AudioCodingModuleImpl::NowTimestamp(int codec_id) {
- // Down-cast the time to (32-6)-bit since we only care about
- // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
- // we masked 6 most significant bits of 32-bit so we don't lose resolution
- // when do the following multiplication.
- int sample_rate_khz = ACMCodecDB::database_[codec_id].plfreq / 1000;
- const uint32_t now_in_ms = static_cast<uint32_t>(
- clock_->TimeInMilliseconds() & kMaskTimestamp);
- return static_cast<uint32_t>(sample_rate_khz * now_in_ms);
-}
-
-std::vector<uint16_t> AudioCodingModuleImpl::GetNackList(
- int round_trip_time_ms) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (round_trip_time_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "GetNackList: round trip time cannot be negative."
- " round_trip_time_ms=%d", round_trip_time_ms);
- }
- if (nack_enabled_ && round_trip_time_ms >= 0) {
- assert(nack_.get());
- return nack_->GetNackList(round_trip_time_ms);
- }
- std::vector<uint16_t> empty_list;
- return empty_list;
-}
-
-int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
- return std::max(neteq_.LeastRequiredDelayMs(), initial_delay_ms_);
-}
-
-int AudioCodingModuleImpl::EnableNack(size_t max_nack_list_size) {
- // Don't do anything if |max_nack_list_size| is out of range.
- if (max_nack_list_size == 0 ||
- max_nack_list_size > acm2::Nack::kNackListSizeLimit)
- return -1;
-
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!nack_enabled_) {
- nack_.reset(acm2::Nack::Create(kNackThresholdPackets));
- nack_enabled_ = true;
-
- // Sampling rate might need to be updated if we change from disable to
- // enable. Do it if the receive codec is valid.
- if (current_receive_codec_idx_ >= 0) {
- nack_->UpdateSampleRate(
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq);
- }
- }
- return nack_->SetMaxNackListSize(max_nack_list_size);
-}
-
-void AudioCodingModuleImpl::DisableNack() {
- CriticalSectionScoped lock(acm_crit_sect_);
- nack_.reset(); // Memory is released.
- nack_enabled_ = false;
-}
-
-const char* AudioCodingModuleImpl::Version() const {
- return kLegacyAcmVersion;
-}
-
-void AudioCodingModuleImpl::GetDecodingCallStatistics(
- AudioDecodingCallStats* call_stats) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- *call_stats = call_stats_.GetDecodingStatistics();
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
deleted file mode 100644
index f0b22f11465..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
+++ /dev/null
@@ -1,455 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
-
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
-namespace webrtc {
-
-struct WebRtcACMAudioBuff;
-struct WebRtcACMCodecParams;
-class CriticalSectionWrapper;
-class RWLockWrapper;
-class Clock;
-
-namespace acm2 {
-class Nack;
-}
-
-namespace acm1 {
-
-class ACMDTMFDetection;
-class ACMGenericCodec;
-
-class AudioCodingModuleImpl : public AudioCodingModule {
- public:
- AudioCodingModuleImpl(const int32_t id, Clock* clock);
- ~AudioCodingModuleImpl();
-
- virtual const char* Version() const;
-
- // Change the unique identifier of this object.
- virtual int32_t ChangeUniqueId(const int32_t id);
-
- // Returns the number of milliseconds until the module want a worker thread
- // to call Process.
- int32_t TimeUntilNextProcess();
-
- // Process any pending tasks such as timeouts.
- int32_t Process();
-
- /////////////////////////////////////////
- // Sender
- //
-
- // Initialize send codec.
- int32_t InitializeSender();
-
- // Reset send codec.
- int32_t ResetEncoder();
-
- // Can be called multiple times for Codec, CNG, RED.
- int32_t RegisterSendCodec(const CodecInst& send_codec);
-
- // Register Secondary codec for dual-streaming. Dual-streaming is activated
- // right after the secondary codec is registered.
- int RegisterSecondarySendCodec(const CodecInst& send_codec);
-
- // Unregister the secondary codec. Dual-streaming is deactivated right after
- // deregistering secondary codec.
- void UnregisterSecondarySendCodec();
-
- // Get the secondary codec.
- int SecondarySendCodec(CodecInst* secondary_codec) const;
-
- // Get current send codec.
- int32_t SendCodec(CodecInst* current_codec) const;
-
- // Get current send frequency.
- int32_t SendFrequency() const;
-
- // Get encode bit-rate.
- // Adaptive rate codecs return their current encode target rate, while other
- // codecs return there long-term average or their fixed rate.
- int32_t SendBitrate() const;
-
- // Set available bandwidth, inform the encoder about the
- // estimated bandwidth received from the remote party.
- virtual int32_t SetReceivedEstimatedBandwidth(const int32_t bw);
-
- // Register a transport callback which will be
- // called to deliver the encoded buffers.
- int32_t RegisterTransportCallback(AudioPacketizationCallback* transport);
-
- // Add 10 ms of raw (PCM) audio data to the encoder.
- int32_t Add10MsData(const AudioFrame& audio_frame);
-
- /////////////////////////////////////////
- // (FEC) Forward Error Correction
- //
-
- // Configure FEC status i.e on/off.
- int32_t SetFECStatus(const bool enable_fec);
-
- // Get FEC status.
- bool FECStatus() const;
-
- /////////////////////////////////////////
- // (VAD) Voice Activity Detection
- // and
- // (CNG) Comfort Noise Generation
- //
-
- int32_t SetVAD(bool enable_dtx = true,
- bool enable_vad = false,
- ACMVADMode mode = VADNormal);
-
- int32_t VAD(bool* dtx_enabled, bool* vad_enabled, ACMVADMode* mode) const;
-
- int32_t RegisterVADCallback(ACMVADCallback* vad_callback);
-
- /////////////////////////////////////////
- // Receiver
- //
-
- // Initialize receiver, resets codec database etc.
- int32_t InitializeReceiver();
-
- // Reset the decoder state.
- int32_t ResetDecoder();
-
- // Get current receive frequency.
- int32_t ReceiveFrequency() const;
-
- // Get current playout frequency.
- int32_t PlayoutFrequency() const;
-
- // Register possible receive codecs, can be called multiple times,
- // for codecs, CNG, DTMF, RED.
- int32_t RegisterReceiveCodec(const CodecInst& receive_codec);
-
- // Get current received codec.
- int32_t ReceiveCodec(CodecInst* current_codec) const;
-
- // Incoming packet from network parsed and ready for decode.
- int32_t IncomingPacket(const uint8_t* incoming_payload,
- const int32_t payload_length,
- const WebRtcRTPHeader& rtp_info);
-
- // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
- // One usage for this API is when pre-encoded files are pushed in ACM.
- int32_t IncomingPayload(const uint8_t* incoming_payload,
- const int32_t payload_length,
- const uint8_t payload_type,
- const uint32_t timestamp = 0);
-
- // NetEq minimum playout delay (used for lip-sync). The actual target delay
- // is the max of |time_ms| and the required delay dictated by the channel.
- int SetMinimumPlayoutDelay(int time_ms);
-
- // NetEq maximum playout delay. The actual target delay is the min of
- // |time_ms| and the required delay dictated by the channel.
- int SetMaximumPlayoutDelay(int time_ms);
-
- // The shortest latency, in milliseconds, required by jitter buffer. This
- // is computed based on inter-arrival times and playout mode of NetEq. The
- // actual delay is the maximum of least-required-delay and the minimum-delay
- // specified by SetMinumumPlayoutDelay() API.
- //
- int LeastRequiredDelayMs() const ;
-
- // Configure Dtmf playout status i.e on/off playout the incoming outband Dtmf
- // tone.
- int32_t SetDtmfPlayoutStatus(const bool enable);
-
- // Get Dtmf playout status.
- bool DtmfPlayoutStatus() const;
-
- // Estimate the Bandwidth based on the incoming stream, needed
- // for one way audio where the RTCP send the BW estimate.
- // This is also done in the RTP module .
- int32_t DecoderEstimatedBandwidth() const;
-
- // Set playout mode voice, fax.
- int32_t SetPlayoutMode(const AudioPlayoutMode mode);
-
- // Get playout mode voice, fax.
- AudioPlayoutMode PlayoutMode() const;
-
- // Get playout timestamp.
- int32_t PlayoutTimestamp(uint32_t* timestamp);
-
- // Get 10 milliseconds of raw audio data to play out, and
- // automatic resample to the requested frequency if > 0.
- int32_t PlayoutData10Ms(int32_t desired_freq_hz,
- AudioFrame* audio_frame);
-
- /////////////////////////////////////////
- // Statistics
- //
-
- int32_t NetworkStatistics(ACMNetworkStatistics* statistics);
-
- void DestructEncoderInst(void* inst);
-
- int16_t AudioBuffer(WebRtcACMAudioBuff& buffer);
-
- // GET RED payload for iSAC. The method id called when 'this' ACM is
- // the default ACM.
- int32_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* length_bytes);
-
- int16_t SetAudioBuffer(WebRtcACMAudioBuff& buffer);
-
- uint32_t EarliestTimestamp() const;
-
- int32_t LastEncodedTimestamp(uint32_t& timestamp) const;
-
- int32_t ReplaceInternalDTXWithWebRtc(const bool use_webrtc_dtx);
-
- int32_t IsInternalDTXReplacedWithWebRtc(bool* uses_webrtc_dtx);
-
- int SetISACMaxRate(int max_bit_per_sec);
-
- int SetISACMaxPayloadSize(int max_size_bytes);
-
- int32_t ConfigISACBandwidthEstimator(
- int frame_size_ms,
- int rate_bit_per_sec,
- bool enforce_frame_size = false);
-
- int UnregisterReceiveCodec(uint8_t payload_type);
-
- std::vector<uint16_t> GetNackList(int round_trip_time_ms) const;
-
- protected:
- void UnregisterSendCodec();
-
- int32_t UnregisterReceiveCodecSafe(const int16_t id);
-
- ACMGenericCodec* CreateCodec(const CodecInst& codec);
-
- int16_t DecoderParamByPlType(const uint8_t payload_type,
- WebRtcACMCodecParams& codec_params) const;
-
- int16_t DecoderListIDByPlName(
- const char* name, const uint16_t frequency = 0) const;
-
- int32_t InitializeReceiverSafe();
-
- bool HaveValidEncoder(const char* caller_name) const;
-
- int32_t RegisterRecCodecMSSafe(const CodecInst& receive_codec,
- int16_t codec_id,
- int16_t mirror_id,
- ACMNetEQ::JitterBuffer jitter_buffer);
-
- // Set VAD/DTX status. This function does not acquire a lock, and it is
- // created to be called only from inside a critical section.
- int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode);
-
- // Process buffered audio when dual-streaming is not enabled (When RED is
- // enabled still this function is used.)
- int ProcessSingleStream();
-
- // Process buffered audio when dual-streaming is enabled, i.e. secondary send
- // codec is registered.
- int ProcessDualStream();
-
- // Preprocessing of input audio, including resampling and down-mixing if
- // required, before pushing audio into encoder's buffer.
- //
- // in_frame: input audio-frame
- // ptr_out: pointer to output audio_frame. If no preprocessing is required
- // |ptr_out| will be pointing to |in_frame|, otherwise pointing to
- // |preprocess_frame_|.
- //
- // Return value:
- // -1: if encountering an error.
- // 0: otherwise.
- int PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out);
-
- // Set initial playout delay.
- // -delay_ms: delay in millisecond.
- //
- // Return value:
- // -1: if cannot set the delay.
- // 0: if delay set successfully.
- int SetInitialPlayoutDelay(int delay_ms);
-
- // Enable NACK and set the maximum size of the NACK list.
- int EnableNack(size_t max_nack_list_size);
-
- // Disable NACK.
- void DisableNack();
-
- void GetDecodingCallStatistics(AudioDecodingCallStats* call_stats) const;
-
- private:
- // Change required states after starting to receive the codec corresponding
- // to |index|.
- int UpdateUponReceivingCodec(int index);
-
- // Remove all slaves and initialize a stereo slave with required codecs
- // from the master.
- int InitStereoSlave();
-
- // Returns true if the codec's |index| is registered with the master and
- // is a stereo codec, RED or CN.
- bool IsCodecForSlave(int index) const;
-
- int EncodeFragmentation(int fragmentation_index, int payload_type,
- uint32_t current_timestamp,
- ACMGenericCodec* encoder,
- uint8_t* stream);
-
- void ResetFragmentation(int vector_size);
-
- bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame);
-
- // Push a synchronization packet into NetEq. Such packets result in a frame
- // of zeros (not decoded by the corresponding decoder). The size of the frame
- // is the same as last decoding. NetEq has a special payload for this.
- // Call within the scope of ACM critical section.
- int PushSyncPacketSafe();
-
- // Update the parameters required in initial phase of buffering, when
- // initial playout delay is requested. Call within the scope of ACM critical
- // section.
- void UpdateBufferingSafe(const WebRtcRTPHeader& rtp_info,
- int payload_len_bytes);
-
- //
- // Return the timestamp of current time, computed according to sampling rate
- // of the codec identified by |codec_id|.
- //
- uint32_t NowTimestamp(int codec_id);
-
- AudioPacketizationCallback* packetization_callback_;
- int32_t id_;
- uint32_t last_timestamp_;
- uint32_t last_in_timestamp_;
- CodecInst send_codec_inst_;
- uint8_t cng_nb_pltype_;
- uint8_t cng_wb_pltype_;
- uint8_t cng_swb_pltype_;
- uint8_t cng_fb_pltype_;
- uint8_t red_pltype_;
- bool vad_enabled_;
- bool dtx_enabled_;
- ACMVADMode vad_mode_;
- ACMGenericCodec* codecs_[ACMCodecDB::kMaxNumCodecs];
- ACMGenericCodec* slave_codecs_[ACMCodecDB::kMaxNumCodecs];
- int16_t mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_receive_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_receive_registered_;
- bool stereo_send_;
- int prev_received_channel_;
- int expected_channels_;
- int32_t current_send_codec_idx_;
- int current_receive_codec_idx_;
- bool send_codec_registered_;
- ACMResampler input_resampler_;
- ACMResampler output_resampler_;
- ACMNetEQ neteq_;
- CriticalSectionWrapper* acm_crit_sect_;
- ACMVADCallback* vad_callback_;
- uint8_t last_recv_audio_codec_pltype_;
-
- // RED/FEC.
- bool is_first_red_;
- bool fec_enabled_;
- // TODO(turajs): |red_buffer_| is allocated in constructor, why having them
- // as pointers and not an array. If concerned about the memory, then make a
- // set-up function to allocate them only when they are going to be used, i.e.
- // FEC or Dual-streaming is enabled.
- uint8_t* red_buffer_;
- // TODO(turajs): we actually don't need |fragmentation_| as a member variable.
- // It is sufficient to keep the length & payload type of previous payload in
- // member variables.
- RTPFragmentationHeader fragmentation_;
- uint32_t last_fec_timestamp_;
- // If no RED is registered as receive codec this
- // will have an invalid value.
- uint8_t receive_red_pltype_;
-
- // This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_;
-
- // This keeps track of payload types associated with codecs_[].
- // We define it as signed variable and initialize with -1 to indicate
- // unused elements.
- int16_t registered_pltypes_[ACMCodecDB::kMaxNumCodecs];
-
- // Used when payloads are pushed into ACM without any RTP info
- // One example is when pre-encoded bit-stream is pushed from
- // a file.
- WebRtcRTPHeader* dummy_rtp_header_;
- uint16_t recv_pl_frame_size_smpls_;
-
- bool receiver_initialized_;
- ACMDTMFDetection* dtmf_detector_;
-
- AudioCodingFeedback* dtmf_callback_;
- int16_t last_detected_tone_;
- CriticalSectionWrapper* callback_crit_sect_;
-
- AudioFrame audio_frame_;
- AudioFrame preprocess_frame_;
- CodecInst secondary_send_codec_inst_;
- scoped_ptr<ACMGenericCodec> secondary_encoder_;
-
- // Initial delay.
- int initial_delay_ms_;
- int num_packets_accumulated_;
- int num_bytes_accumulated_;
- int accumulated_audio_ms_;
- int first_payload_received_;
- uint32_t last_incoming_send_timestamp_;
- bool track_neteq_buffer_;
- uint32_t playout_ts_;
-
- // AV-sync is enabled. In AV-sync mode, sync packet pushed during long packet
- // losses.
- bool av_sync_;
-
- // Latest send timestamp difference of two consecutive packets.
- uint32_t last_timestamp_diff_;
- uint16_t last_sequence_number_;
- uint32_t last_ssrc_;
- bool last_packet_was_sync_;
- int64_t last_receive_timestamp_;
-
- Clock* clock_;
- scoped_ptr<acm2::Nack> nack_;
- bool nack_enabled_;
-
- acm2::CallStatistics call_stats_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk b/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk
deleted file mode 100644
index 84267becf2c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-include $(LOCAL_PATH)/../../../../android-webrtc.mk
-
-LOCAL_ARM_MODE := arm
-LOCAL_MODULE_CLASS := STATIC_LIBRARIES
-LOCAL_MODULE := libwebrtc_neteq
-LOCAL_MODULE_TAGS := optional
-LOCAL_SRC_FILES := \
- accelerate.c \
- automode.c \
- bgn_update.c \
- bufstats_decision.c \
- cng_internal.c \
- codec_db.c \
- correlator.c \
- dsp.c \
- dsp_helpfunctions.c \
- dtmf_buffer.c \
- dtmf_tonegen.c \
- expand.c \
- mcu_address_init.c \
- mcu_dsp_common.c \
- mcu_reset.c \
- merge.c \
- min_distortion.c \
- mix_voice_unvoice.c \
- mute_signal.c \
- normal.c \
- packet_buffer.c \
- peak_detection.c \
- preemptive_expand.c \
- random_vector.c \
- recin.c \
- recout.c \
- rtcp.c \
- rtp.c \
- set_fs.c \
- signal_mcu.c \
- split_and_insert.c \
- unmute_signal.c \
- webrtc_neteq.c
-
-# Flags passed to both C and C++ files.
-LOCAL_CFLAGS := \
- $(MY_WEBRTC_COMMON_DEFS) \
- '-DNETEQ_VOICEENGINE_CODECS'
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/interface \
- $(LOCAL_PATH)/../codecs/cng/include \
- $(LOCAL_PATH)/../../.. \
- $(LOCAL_PATH)/../../../common_audio/signal_processing/include
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libdl \
- libstlport
-
-ifndef NDK_ROOT
-include external/stlport/libstlport.mk
-endif
-include $(BUILD_STATIC_LIBRARY)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
index b5c79cef499..072e754998f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
@@ -1,3 +1,11 @@
henrik.lundin@webrtc.org
tina.legrand@webrtc.org
turaj@webrtc.org
+minyue@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c
deleted file mode 100644
index a345a8fdcd2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c
+++ /dev/null
@@ -1,493 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the Accelerate algorithm that is used to reduce
- * the delay by removing a part of the audio stream.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define ACCELERATE_CORR_LEN 50
-#define ACCELERATE_MIN_LAG 10
-#define ACCELERATE_MAX_LAG 60
-#define ACCELERATE_DOWNSAMPLED_LEN (ACCELERATE_CORR_LEN + ACCELERATE_MAX_LAG)
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_downSampSpeech 110 0 109
- int32_t pw32_corr 2*50 110 209
- int16_t pw16_corr 50 0 49
-
- Total: 110+2*50
- */
-
-#define SCRATCH_PW16_DS_SPEECH 0
-#define SCRATCH_PW32_CORR ACCELERATE_DOWNSAMPLED_LEN
-#define SCRATCH_PW16_CORR 0
-
-/****************************************************************************
- * WebRtcNetEQ_Accelerate(...)
- *
- * This function tries to shorten the audio data by removing one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - BGNonly : If non-zero, Accelerate will only remove the last
- * DEFAULT_TIME_ADJUST seconds of the input.
- * No signal matching is done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
-#ifdef SCRATCH
- /* Use scratch memory for internal temporary vectors */
- int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
-#else
- /* Allocate memory for temporary vectors */
- int16_t pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
- int32_t pw32_corr[ACCELERATE_CORR_LEN];
- int16_t pw16_corr[ACCELERATE_CORR_LEN];
-#endif
- int16_t w16_decodedMax = 0;
- int16_t w16_tmp;
- int16_t w16_tmp2;
- int32_t w32_tmp;
- int32_t w32_tmp2;
-
- const int16_t w16_startLag = ACCELERATE_MIN_LAG;
- const int16_t w16_endLag = ACCELERATE_MAX_LAG;
- const int16_t w16_corrLen = ACCELERATE_CORR_LEN;
- const int16_t *pw16_vec1, *pw16_vec2;
- int16_t *pw16_vectmp;
- int16_t w16_inc, w16_startfact;
- int16_t w16_bestIndex, w16_bestVal;
- int16_t w16_VAD = 1;
- int16_t fsMult;
- int16_t fsMult120;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1, w16_en2;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_sqrtEn1En2;
- int16_t w16_bestCorr = 0;
- int ok;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
-
- /* Pre-calculate common multiplication with fsMult */
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
-
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /* Sanity check for len variable; must be (almost) 30 ms
- (120*fsMult + max(bestIndex)) */
- if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
- {
- /* Length of decoded data too short */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
-
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /***********************************/
- /* Special operations for BGN only */
- /***********************************/
-
- /* Check if "background noise only" flag is set */
- if (BGNonly)
- {
- /* special operation for BGN only; simply remove a chunk of data */
- w16_bestIndex = DEFAULT_TIME_ADJUST * WEBRTC_SPL_LSHIFT_W16(fsMult, 3); /* X*fs/1000 */
-
- /* Sanity check for bestIndex */
- if (w16_bestIndex > len)
- { /* not good, do nothing instead */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
-
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /* set length parameter */
- *pw16_len = len - w16_bestIndex; /* we remove bestIndex samples */
-
- /* copy to output */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, *pw16_len);
-
- /* set mode */
- inst->w16_mode = MODE_LOWEN_ACCELERATE;
-
- /* update statistics */
- inst->statInst.accelerateLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.accelerate_bgn_samples += w16_bestIndex;
-
- return 0;
- } /* end of special code for BGN mode */
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- if (msInfo->msMode != NETEQ_SLAVE)
- {
- /* Find correlation lag only for non-slave instances */
-
-#endif
-
- /****************************************************************/
- /* Find the strongest correlation lag by downsampling to 4 kHz, */
- /* calculating correlation for downsampled signal and finding */
- /* the strongest correlation peak. */
- /****************************************************************/
-
- /* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* downsample the decoded speech to 4 kHz */
- ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
- ACCELERATE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
- if (ok != 0)
- {
- /* error */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
- return NETEQ_OTHER_ERROR;
- }
-
- /*
- * Set scaling factor for cross correlation to protect against overflow
- * (log2(50) => 6)
- */
- w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */
- WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_downSampSpeech[w16_endLag],
- &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
- &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- if (msInfo->extraInfo == ACC_FAIL)
- {
- /* Master has signaled an unsuccessful accelerate */
- w16_bestIndex = 0;
- }
- else
- {
- /* Get best index from master */
- w16_bestIndex = msInfo->bestIndex;
- }
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
- &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
-#endif /* NETEQ_STEREO */
-
-#ifdef NETEQ_STEREO
-
- if (msInfo->msMode != NETEQ_SLAVE)
- {
- /* Calculate correlation only for non-slave instances */
-
-#endif /* NETEQ_STEREO */
-
- /*****************************************************/
- /* Calculate correlation bestCorr for the found lag. */
- /* Also do a simple VAD decision. */
- /*****************************************************/
-
- /*
- * Calculate scaling to ensure that bestIndex samples can be square-summed
- * without overflowing
- */
- w16_tmp = (31
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
- w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
- w16_tmp -= 31;
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
- (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
- (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
-
- /* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
- w16_bestIndex, w16_tmp);
-
- /* Check VAD constraint
- ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
- if (inst->BGNInst.w16_initialized == 1)
- {
- w32_tmp2 = inst->BGNInst.w32_energy;
- }
- else
- {
- /* if BGN parameters have not been estimated, use a fixed threshold */
- w32_tmp2 = 75000;
- }
- w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
- w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
- w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
-
- /* Scale w32_tmp properly before comparing with w32_tmp2 */
- /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
- if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
- {
- /* Cannot scale only w32_tmp, must scale w32_temp2 too */
- int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
- w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
- }
- else
- {
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
- }
-
- if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- {
- /* The signal seems to be passive speech */
- w16_VAD = 0;
- w16_bestCorr = 0; /* Correlation does not matter */
- }
- else
- {
- /* The signal is active speech */
- w16_VAD = 1;
-
- /* Calculate correlation (cc/sqrt(en1*en2)) */
-
- /* Start with calculating scale values */
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
-
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- w16_en1Scale += 1;
- }
-
- /* Convert energies to int16_t */
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
-
- /* Calculate energy product */
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
-
- /* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
- w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
- w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
- }
-
-#ifdef NETEQ_STEREO
-
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
-#endif /* NETEQ_STEREO */
-
- /************************************************/
- /* Check accelerate criteria and remove samples */
- /************************************************/
-
- /* Check for strong correlation (>0.9) or passive speech */
-#ifdef NETEQ_STEREO
- if ((((w16_bestCorr > 14746) || (w16_VAD == 0)) && (msInfo->msMode != NETEQ_SLAVE))
- || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->extraInfo != ACC_FAIL)))
-#else
- if ((w16_bestCorr > 14746) || (w16_VAD == 0))
-#endif
- {
- /* Do accelerate operation by overlap add */
-
- /*
- * Calculate cross-fading slope so that the fading factor goes from
- * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
- */
- w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
- (int16_t) (w16_bestIndex + 1)); /* in Q14 */
-
- /* Initiate fading factor */
- w16_startfact = 16384 - w16_inc;
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Copy unmodified part [0 to 15 ms minus 1 pitch period] */
- w16_tmp = (fsMult120 - w16_bestIndex);
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_tmp);
-
- /* Generate interpolated part of length bestIndex (1 pitch period) */
- pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
- /* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec1,
- (int16_t*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
-
- /* Move the last part (also unmodified) */
- /* Take from decoded at 15 ms + 1 pitch period */
- pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
- WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
- (int16_t) (len - fsMult120 - w16_bestIndex));
-
- /* Set the mode flag */
- if (w16_VAD)
- {
- inst->w16_mode = MODE_SUCCESS_ACCELERATE;
- }
- else
- {
- inst->w16_mode = MODE_LOWEN_ACCELERATE;
- }
-
- /* Calculate resulting length = original length - pitch period */
- *pw16_len = len - w16_bestIndex;
-
- /* Update in-call statistics */
- inst->statInst.accelerateLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.accelarate_normal_samples += w16_bestIndex;
-
- return 0;
- }
- else
- {
- /* Accelerate not allowed */
-
-#ifdef NETEQ_STEREO
- /* Signal to slave(s) that this was unsuccessful */
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->extraInfo = ACC_FAIL;
- }
-#endif
-
- /* Set mode flag to unsuccessful accelerate */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
-
- /* Length is unmodified */
- *pw16_len = len;
-
- /* Simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return 0;
- }
-}
-
-#undef SCRATCH_PW16_DS_SPEECH
-#undef SCRATCH_PW32_CORR
-#undef SCRATCH_PW16_CORR
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc
new file mode 100644
index 00000000000..6acd778a233
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+Accelerate::ReturnCodes Accelerate::Process(
+ const int16_t* input,
+ size_t input_length,
+ AudioMultiVector* output,
+ int16_t* length_change_samples) {
+ // Input length must be (almost) 30 ms.
+ static const int k15ms = 120; // 15 ms = 120 samples at 8 kHz sample rate.
+ if (num_channels_ == 0 || static_cast<int>(input_length) / num_channels_ <
+ (2 * k15ms - 1) * fs_mult_) {
+ // Length of input data too short to do accelerate. Simply move all data
+ // from input to output.
+ output->PushBackInterleaved(input, input_length);
+ return kError;
+ }
+ return TimeStretch::Process(input, input_length, output,
+ length_change_samples);
+}
+
+void Accelerate::SetParametersForPassiveSpeech(size_t /*len*/,
+ int16_t* best_correlation,
+ int* /*peak_index*/) const {
+ // When the signal does not contain any active speech, the correlation does
+ // not matter. Simply set it to zero.
+ *best_correlation = 0;
+}
+
+Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch(
+ const int16_t* input, size_t input_length, size_t peak_index,
+ int16_t best_correlation, bool active_speech,
+ AudioMultiVector* output) const {
+ // Check for strong correlation or passive speech.
+ if ((best_correlation > kCorrelationThreshold) || !active_speech) {
+ // Do accelerate operation by overlap add.
+
+ // Pre-calculate common multiplication with |fs_mult_|.
+ // 120 corresponds to 15 ms.
+ size_t fs_mult_120 = fs_mult_ * 120;
+
+ assert(fs_mult_120 >= peak_index); // Should be handled in Process().
+ // Copy first part; 0 to 15 ms.
+ output->PushBackInterleaved(input, fs_mult_120 * num_channels_);
+ // Copy the |peak_index| starting at 15 ms to |temp_vector|.
+ AudioMultiVector temp_vector(num_channels_);
+ temp_vector.PushBackInterleaved(&input[fs_mult_120 * num_channels_],
+ peak_index * num_channels_);
+ // Cross-fade |temp_vector| onto the end of |output|.
+ output->CrossFade(temp_vector, peak_index);
+ // Copy the last unmodified part, 15 ms + pitch period until the end.
+ output->PushBackInterleaved(
+ &input[(fs_mult_120 + peak_index) * num_channels_],
+ input_length - (fs_mult_120 + peak_index) * num_channels_);
+
+ if (active_speech) {
+ return kSuccess;
+ } else {
+ return kSuccessLowEnergy;
+ }
+ } else {
+ // Accelerate not allowed. Simply move all data from decoded to outData.
+ output->PushBackInterleaved(input, input_length);
+ return kNoStretch;
+ }
+}
+
+Accelerate* AccelerateFactory::Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise) const {
+ return new Accelerate(sample_rate_hz, num_channels, background_noise);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h
new file mode 100644
index 00000000000..2da999326a3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
+
+#include <assert.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BackgroundNoise;
+
+// This class implements the Accelerate operation. Most of the work is done
+// in the base class TimeStretch, which is shared with the PreemptiveExpand
+// operation. In the Accelerate class, the operations that are specific to
+// Accelerate are implemented.
+class Accelerate : public TimeStretch {
+ public:
+ Accelerate(int sample_rate_hz, size_t num_channels,
+ const BackgroundNoise& background_noise)
+ : TimeStretch(sample_rate_hz, num_channels, background_noise) {
+ }
+
+ virtual ~Accelerate() {}
+
+ // This method performs the actual Accelerate operation. The samples are
+ // read from |input|, of length |input_length| elements, and are written to
+ // |output|. The number of samples removed through time-stretching is
+ // is provided in the output |length_change_samples|. The method returns
+ // the outcome of the operation as an enumerator value.
+ ReturnCodes Process(const int16_t* input,
+ size_t input_length,
+ AudioMultiVector* output,
+ int16_t* length_change_samples);
+
+ protected:
+ // Sets the parameters |best_correlation| and |peak_index| to suitable
+ // values when the signal contains no active speech.
+ virtual void SetParametersForPassiveSpeech(size_t len,
+ int16_t* best_correlation,
+ int* peak_index) const OVERRIDE;
+
+ // Checks the criteria for performing the time-stretching operation and,
+ // if possible, performs the time-stretching.
+ virtual ReturnCodes CheckCriteriaAndStretch(
+ const int16_t* input, size_t input_length, size_t peak_index,
+ int16_t best_correlation, bool active_speech,
+ AudioMultiVector* output) const OVERRIDE;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(Accelerate);
+};
+
+struct AccelerateFactory {
+ AccelerateFactory() {}
+ virtual ~AccelerateFactory() {}
+
+ virtual Accelerate* Create(int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise) const;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc
new file mode 100644
index 00000000000..cc4bc97c30c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_classifier.h"
+
+#include <assert.h>
+#include <string.h>
+
+namespace webrtc {
+
+static const int kDefaultSampleRateHz = 48000;
+static const int kDefaultFrameRateHz = 50;
+static const int kDefaultFrameSizeSamples =
+ kDefaultSampleRateHz / kDefaultFrameRateHz;
+static const float kDefaultThreshold = 0.5f;
+
+AudioClassifier::AudioClassifier()
+ : analysis_info_(),
+ is_music_(false),
+ music_probability_(0),
+ // This actually assigns the pointer to a static constant struct
+ // rather than creates a struct and |celt_mode_| does not need
+ // to be deleted.
+ celt_mode_(opus_custom_mode_create(kDefaultSampleRateHz,
+ kDefaultFrameSizeSamples,
+ NULL)),
+ analysis_state_() {
+ assert(celt_mode_);
+}
+
+AudioClassifier::~AudioClassifier() {}
+
+bool AudioClassifier::Analysis(const int16_t* input,
+ int input_length,
+ int channels) {
+ // Must be 20 ms frames at 48 kHz sampling.
+ assert((input_length / channels) == kDefaultFrameSizeSamples);
+
+ // Only mono or stereo are allowed.
+ assert(channels == 1 || channels == 2);
+
+ // Call Opus' classifier, defined in
+ // "third_party/opus/src/src/analysis.h", with lsb_depth = 16.
+ // Also uses a down-mixing function downmix_int, defined in
+ // "third_party/opus/src/src/opus_private.h", with
+ // constants c1 = 0, and c2 = -2.
+ run_analysis(&analysis_state_,
+ celt_mode_,
+ input,
+ kDefaultFrameSizeSamples,
+ kDefaultFrameSizeSamples,
+ 0,
+ -2,
+ channels,
+ kDefaultSampleRateHz,
+ 16,
+ downmix_int,
+ &analysis_info_);
+ music_probability_ = analysis_info_.music_prob;
+ is_music_ = music_probability_ > kDefaultThreshold;
+ return is_music_;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h
new file mode 100644
index 00000000000..e7b7807dbed
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+#include "celt.h"
+#include "analysis.h"
+#include "opus_private.h"
+#if defined(__cplusplus)
+}
+#endif
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This class provides a speech/music classification and is a wrapper over the
+// Opus classifier. It currently only supports 48 kHz mono or stereo with a
+// frame size of 20 ms.
+
+class AudioClassifier {
+ public:
+ AudioClassifier();
+ virtual ~AudioClassifier();
+
+ // Classifies one frame of audio data in input,
+ // input_length : must be channels * 960;
+ // channels : must be 1 (mono) or 2 (stereo).
+ bool Analysis(const int16_t* input, int input_length, int channels);
+
+ // Gets the current classification : true = music, false = speech.
+ virtual bool is_music() const { return is_music_; }
+
+ // Gets the current music probability.
+ float music_probability() const { return music_probability_; }
+
+ private:
+ AnalysisInfo analysis_info_;
+ bool is_music_;
+ float music_probability_;
+ const CELTMode* celt_mode_;
+ TonalityAnalysisState analysis_state_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc
new file mode 100644
index 00000000000..cf623ca0884
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_classifier.h"
+
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <string>
+
+#include "gtest/gtest.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+static const size_t kFrameSize = 960;
+
+TEST(AudioClassifierTest, AllZeroInput) {
+ int16_t in_mono[kFrameSize] = {0};
+
+ // Test all-zero vectors and let the classifier converge from its default
+ // to the expected value.
+ AudioClassifier zero_classifier;
+ for (int i = 0; i < 100; ++i) {
+ zero_classifier.Analysis(in_mono, kFrameSize, 1);
+ }
+ EXPECT_TRUE(zero_classifier.is_music());
+}
+
+void RunAnalysisTest(const std::string& audio_filename,
+ const std::string& data_filename,
+ size_t channels) {
+ AudioClassifier classifier;
+ scoped_ptr<int16_t[]> in(new int16_t[channels * kFrameSize]);
+ bool is_music_ref;
+
+ FILE* audio_file = fopen(audio_filename.c_str(), "rb");
+ ASSERT_TRUE(audio_file != NULL) << "Failed to open file " << audio_filename
+ << std::endl;
+ FILE* data_file = fopen(data_filename.c_str(), "rb");
+ ASSERT_TRUE(audio_file != NULL) << "Failed to open file " << audio_filename
+ << std::endl;
+ while (fread(in.get(), sizeof(int16_t), channels * kFrameSize, audio_file) ==
+ channels * kFrameSize) {
+ bool is_music =
+ classifier.Analysis(in.get(), channels * kFrameSize, channels);
+ EXPECT_EQ(is_music, classifier.is_music());
+ ASSERT_EQ(1u, fread(&is_music_ref, sizeof(is_music_ref), 1, data_file));
+ EXPECT_EQ(is_music_ref, is_music);
+ }
+ fclose(audio_file);
+ fclose(data_file);
+}
+
+TEST(AudioClassifierTest, DoAnalysisMono) {
+ RunAnalysisTest(test::ResourcePath("short_mixed_mono_48", "pcm"),
+ test::ResourcePath("short_mixed_mono_48", "dat"),
+ 1);
+}
+
+TEST(AudioClassifierTest, DoAnalysisStereo) {
+ RunAnalysisTest(test::ResourcePath("short_mixed_stereo_48", "pcm"),
+ test::ResourcePath("short_mixed_stereo_48", "dat"),
+ 2);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc
new file mode 100644
index 00000000000..f539bb2e1ed
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+
+namespace webrtc {
+
+int AudioDecoder::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ return Decode(encoded, encoded_len, decoded, speech_type);
+}
+
+bool AudioDecoder::HasDecodePlc() const { return false; }
+
+int AudioDecoder::DecodePlc(int num_frames, int16_t* decoded) { return -1; }
+
+int AudioDecoder::IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) {
+ return 0;
+}
+
+int AudioDecoder::ErrorCode() { return 0; }
+
+int AudioDecoder::PacketDuration(const uint8_t* encoded, size_t encoded_len) {
+ return kNotImplemented;
+}
+
+int AudioDecoder::PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return kNotImplemented;
+}
+
+bool AudioDecoder::PacketHasFec(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return false;
+}
+
+NetEqDecoder AudioDecoder::codec_type() const { return codec_type_; }
+
+bool AudioDecoder::CodecSupported(NetEqDecoder codec_type) {
+ switch (codec_type) {
+ case kDecoderPCMu:
+ case kDecoderPCMa:
+ case kDecoderPCMu_2ch:
+ case kDecoderPCMa_2ch:
+#ifdef WEBRTC_CODEC_ILBC
+ case kDecoderILBC:
+#endif
+#if defined(WEBRTC_CODEC_ISACFX) || defined(WEBRTC_CODEC_ISAC)
+ case kDecoderISAC:
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ case kDecoderISACswb:
+ case kDecoderISACfb:
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16B:
+ case kDecoderPCM16Bwb:
+ case kDecoderPCM16Bswb32kHz:
+ case kDecoderPCM16Bswb48kHz:
+ case kDecoderPCM16B_2ch:
+ case kDecoderPCM16Bwb_2ch:
+ case kDecoderPCM16Bswb32kHz_2ch:
+ case kDecoderPCM16Bswb48kHz_2ch:
+ case kDecoderPCM16B_5ch:
+#endif
+#ifdef WEBRTC_CODEC_G722
+ case kDecoderG722:
+ case kDecoderG722_2ch:
+#endif
+#ifdef WEBRTC_CODEC_CELT
+ case kDecoderCELT_32:
+ case kDecoderCELT_32_2ch:
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ case kDecoderOpus:
+ case kDecoderOpus_2ch:
+#endif
+ case kDecoderRED:
+ case kDecoderAVT:
+ case kDecoderCNGnb:
+ case kDecoderCNGwb:
+ case kDecoderCNGswb32kHz:
+ case kDecoderCNGswb48kHz:
+ case kDecoderArbitrary: {
+ return true;
+ }
+ default: {
+ return false;
+ }
+ }
+}
+
+int AudioDecoder::CodecSampleRateHz(NetEqDecoder codec_type) {
+ switch (codec_type) {
+ case kDecoderPCMu:
+ case kDecoderPCMa:
+ case kDecoderPCMu_2ch:
+ case kDecoderPCMa_2ch:
+#ifdef WEBRTC_CODEC_ILBC
+ case kDecoderILBC:
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16B:
+ case kDecoderPCM16B_2ch:
+ case kDecoderPCM16B_5ch:
+#endif
+ case kDecoderCNGnb: {
+ return 8000;
+ }
+#if defined(WEBRTC_CODEC_ISACFX) || defined(WEBRTC_CODEC_ISAC)
+ case kDecoderISAC:
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16Bwb:
+ case kDecoderPCM16Bwb_2ch:
+#endif
+#ifdef WEBRTC_CODEC_G722
+ case kDecoderG722:
+ case kDecoderG722_2ch:
+#endif
+ case kDecoderCNGwb: {
+ return 16000;
+ }
+#ifdef WEBRTC_CODEC_ISAC
+ case kDecoderISACswb:
+ case kDecoderISACfb:
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16Bswb32kHz:
+ case kDecoderPCM16Bswb32kHz_2ch:
+#endif
+#ifdef WEBRTC_CODEC_CELT
+ case kDecoderCELT_32:
+ case kDecoderCELT_32_2ch:
+#endif
+ case kDecoderCNGswb32kHz: {
+ return 32000;
+ }
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16Bswb48kHz:
+ case kDecoderPCM16Bswb48kHz_2ch: {
+ return 48000;
+ }
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ case kDecoderOpus:
+ case kDecoderOpus_2ch: {
+ return 32000;
+ }
+#endif
+ case kDecoderCNGswb48kHz: {
+ // TODO(tlegrand): Remove limitation once ACM has full 48 kHz support.
+ return 32000;
+ }
+ default: {
+ return -1; // Undefined sample rate.
+ }
+ }
+}
+
+AudioDecoder* AudioDecoder::CreateAudioDecoder(NetEqDecoder codec_type) {
+ if (!CodecSupported(codec_type)) {
+ return NULL;
+ }
+ switch (codec_type) {
+ case kDecoderPCMu:
+ return new AudioDecoderPcmU;
+ case kDecoderPCMa:
+ return new AudioDecoderPcmA;
+ case kDecoderPCMu_2ch:
+ return new AudioDecoderPcmUMultiCh(2);
+ case kDecoderPCMa_2ch:
+ return new AudioDecoderPcmAMultiCh(2);
+#ifdef WEBRTC_CODEC_ILBC
+ case kDecoderILBC:
+ return new AudioDecoderIlbc;
+#endif
+#if defined(WEBRTC_CODEC_ISACFX)
+ case kDecoderISAC:
+ return new AudioDecoderIsacFix;
+#elif defined(WEBRTC_CODEC_ISAC)
+ case kDecoderISAC:
+ return new AudioDecoderIsac;
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ case kDecoderISACswb:
+ return new AudioDecoderIsacSwb;
+ case kDecoderISACfb:
+ return new AudioDecoderIsacFb;
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+ case kDecoderPCM16B:
+ case kDecoderPCM16Bwb:
+ case kDecoderPCM16Bswb32kHz:
+ case kDecoderPCM16Bswb48kHz:
+ return new AudioDecoderPcm16B(codec_type);
+ case kDecoderPCM16B_2ch:
+ case kDecoderPCM16Bwb_2ch:
+ case kDecoderPCM16Bswb32kHz_2ch:
+ case kDecoderPCM16Bswb48kHz_2ch:
+ case kDecoderPCM16B_5ch:
+ return new AudioDecoderPcm16BMultiCh(codec_type);
+#endif
+#ifdef WEBRTC_CODEC_G722
+ case kDecoderG722:
+ return new AudioDecoderG722;
+ case kDecoderG722_2ch:
+ return new AudioDecoderG722Stereo;
+#endif
+#ifdef WEBRTC_CODEC_CELT
+ case kDecoderCELT_32:
+ case kDecoderCELT_32_2ch:
+ return new AudioDecoderCelt(codec_type);
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ case kDecoderOpus:
+ case kDecoderOpus_2ch:
+ return new AudioDecoderOpus(codec_type);
+#endif
+ case kDecoderCNGnb:
+ case kDecoderCNGwb:
+ case kDecoderCNGswb32kHz:
+ case kDecoderCNGswb48kHz:
+ return new AudioDecoderCng(codec_type);
+ case kDecoderRED:
+ case kDecoderAVT:
+ case kDecoderArbitrary:
+ default: {
+ return NULL;
+ }
+ }
+}
+
+AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) {
+ switch (type) {
+ case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech.
+ case 1:
+ return kSpeech;
+ case 2:
+ return kComfortNoise;
+ default:
+ assert(false);
+ return kSpeech;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
new file mode 100644
index 00000000000..6c7269a35fe
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
@@ -0,0 +1,516 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+
+#include <assert.h>
+#include <string.h> // memmove
+
+#ifdef WEBRTC_CODEC_CELT
+#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
+#endif
+#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
+#ifdef WEBRTC_CODEC_G722
+#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#endif
+
+namespace webrtc {
+
+// PCMu
+int AudioDecoderPcmU::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcG711_DecodeU(
+ state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
+ static_cast<int16_t>(encoded_len), decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderPcmU::PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) {
+ // One encoded byte per sample per channel.
+ return static_cast<int>(encoded_len / channels_);
+}
+
+// PCMa
+int AudioDecoderPcmA::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcG711_DecodeA(
+ state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
+ static_cast<int16_t>(encoded_len), decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderPcmA::PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) {
+ // One encoded byte per sample per channel.
+ return static_cast<int>(encoded_len / channels_);
+}
+
+// PCM16B
+#ifdef WEBRTC_CODEC_PCM16
+AudioDecoderPcm16B::AudioDecoderPcm16B(enum NetEqDecoder type)
+ : AudioDecoder(type) {
+ assert(type == kDecoderPCM16B ||
+ type == kDecoderPCM16Bwb ||
+ type == kDecoderPCM16Bswb32kHz ||
+ type == kDecoderPCM16Bswb48kHz);
+}
+
+int AudioDecoderPcm16B::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcPcm16b_DecodeW16(
+ state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
+ static_cast<int16_t>(encoded_len), decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderPcm16B::PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) {
+ // Two encoded byte per sample per channel.
+ return static_cast<int>(encoded_len / (2 * channels_));
+}
+
+AudioDecoderPcm16BMultiCh::AudioDecoderPcm16BMultiCh(
+ enum NetEqDecoder type)
+ : AudioDecoderPcm16B(kDecoderPCM16B) { // This will be changed below.
+ codec_type_ = type; // Changing to actual type here.
+ switch (codec_type_) {
+ case kDecoderPCM16B_2ch:
+ case kDecoderPCM16Bwb_2ch:
+ case kDecoderPCM16Bswb32kHz_2ch:
+ case kDecoderPCM16Bswb48kHz_2ch:
+ channels_ = 2;
+ break;
+ case kDecoderPCM16B_5ch:
+ channels_ = 5;
+ break;
+ default:
+ assert(false);
+ }
+}
+#endif
+
+// iLBC
+#ifdef WEBRTC_CODEC_ILBC
+AudioDecoderIlbc::AudioDecoderIlbc() : AudioDecoder(kDecoderILBC) {
+ WebRtcIlbcfix_DecoderCreate(reinterpret_cast<iLBC_decinst_t**>(&state_));
+}
+
+AudioDecoderIlbc::~AudioDecoderIlbc() {
+ WebRtcIlbcfix_DecoderFree(static_cast<iLBC_decinst_t*>(state_));
+}
+
+int AudioDecoderIlbc::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcIlbcfix_Decode(static_cast<iLBC_decinst_t*>(state_),
+ reinterpret_cast<const int16_t*>(encoded),
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderIlbc::DecodePlc(int num_frames, int16_t* decoded) {
+ return WebRtcIlbcfix_NetEqPlc(static_cast<iLBC_decinst_t*>(state_),
+ decoded, num_frames);
+}
+
+int AudioDecoderIlbc::Init() {
+ return WebRtcIlbcfix_Decoderinit30Ms(static_cast<iLBC_decinst_t*>(state_));
+}
+#endif
+
+// iSAC float
+#ifdef WEBRTC_CODEC_ISAC
+AudioDecoderIsac::AudioDecoderIsac() : AudioDecoder(kDecoderISAC) {
+ WebRtcIsac_Create(reinterpret_cast<ISACStruct**>(&state_));
+ WebRtcIsac_SetDecSampRate(static_cast<ISACStruct*>(state_), 16000);
+}
+
+AudioDecoderIsac::~AudioDecoderIsac() {
+ WebRtcIsac_Free(static_cast<ISACStruct*>(state_));
+}
+
+int AudioDecoderIsac::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcIsac_Decode(static_cast<ISACStruct*>(state_),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderIsac::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len, int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcIsac_DecodeRcu(static_cast<ISACStruct*>(state_),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderIsac::DecodePlc(int num_frames, int16_t* decoded) {
+ return WebRtcIsac_DecodePlc(static_cast<ISACStruct*>(state_),
+ decoded, num_frames);
+}
+
+int AudioDecoderIsac::Init() {
+ return WebRtcIsac_DecoderInit(static_cast<ISACStruct*>(state_));
+}
+
+int AudioDecoderIsac::IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) {
+ return WebRtcIsac_UpdateBwEstimate(static_cast<ISACStruct*>(state_),
+ reinterpret_cast<const uint16_t*>(payload),
+ static_cast<int32_t>(payload_len),
+ rtp_sequence_number,
+ rtp_timestamp,
+ arrival_timestamp);
+}
+
+int AudioDecoderIsac::ErrorCode() {
+ return WebRtcIsac_GetErrorCode(static_cast<ISACStruct*>(state_));
+}
+
+// iSAC SWB
+AudioDecoderIsacSwb::AudioDecoderIsacSwb() : AudioDecoderIsac() {
+ codec_type_ = kDecoderISACswb;
+ WebRtcIsac_SetDecSampRate(static_cast<ISACStruct*>(state_), 32000);
+}
+
+// iSAC FB
+AudioDecoderIsacFb::AudioDecoderIsacFb() : AudioDecoderIsacSwb() {
+ codec_type_ = kDecoderISACfb;
+}
+#endif
+
+// iSAC fix
+#ifdef WEBRTC_CODEC_ISACFX
+AudioDecoderIsacFix::AudioDecoderIsacFix() : AudioDecoder(kDecoderISAC) {
+ WebRtcIsacfix_Create(reinterpret_cast<ISACFIX_MainStruct**>(&state_));
+}
+
+AudioDecoderIsacFix::~AudioDecoderIsacFix() {
+ WebRtcIsacfix_Free(static_cast<ISACFIX_MainStruct*>(state_));
+}
+
+int AudioDecoderIsacFix::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcIsacfix_Decode(static_cast<ISACFIX_MainStruct*>(state_),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderIsacFix::Init() {
+ return WebRtcIsacfix_DecoderInit(static_cast<ISACFIX_MainStruct*>(state_));
+}
+
+int AudioDecoderIsacFix::IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) {
+ return WebRtcIsacfix_UpdateBwEstimate(
+ static_cast<ISACFIX_MainStruct*>(state_),
+ reinterpret_cast<const uint16_t*>(payload),
+ static_cast<int32_t>(payload_len),
+ rtp_sequence_number, rtp_timestamp, arrival_timestamp);
+}
+
+int AudioDecoderIsacFix::ErrorCode() {
+ return WebRtcIsacfix_GetErrorCode(static_cast<ISACFIX_MainStruct*>(state_));
+}
+#endif
+
+// G.722
+#ifdef WEBRTC_CODEC_G722
+AudioDecoderG722::AudioDecoderG722() : AudioDecoder(kDecoderG722) {
+ WebRtcG722_CreateDecoder(reinterpret_cast<G722DecInst**>(&state_));
+}
+
+AudioDecoderG722::~AudioDecoderG722() {
+ WebRtcG722_FreeDecoder(static_cast<G722DecInst*>(state_));
+}
+
+int AudioDecoderG722::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcG722_Decode(
+ static_cast<G722DecInst*>(state_),
+ const_cast<int16_t*>(reinterpret_cast<const int16_t*>(encoded)),
+ static_cast<int16_t>(encoded_len), decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderG722::Init() {
+ return WebRtcG722_DecoderInit(static_cast<G722DecInst*>(state_));
+}
+
+int AudioDecoderG722::PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) {
+ // 1/2 encoded byte per sample per channel.
+ return static_cast<int>(2 * encoded_len / channels_);
+}
+
+AudioDecoderG722Stereo::AudioDecoderG722Stereo()
+ : AudioDecoderG722(),
+ state_left_(state_), // Base member |state_| is used for left channel.
+ state_right_(NULL) {
+ channels_ = 2;
+ // |state_left_| already created by the base class AudioDecoderG722.
+ WebRtcG722_CreateDecoder(reinterpret_cast<G722DecInst**>(&state_right_));
+}
+
+AudioDecoderG722Stereo::~AudioDecoderG722Stereo() {
+ // |state_left_| will be freed by the base class AudioDecoderG722.
+ WebRtcG722_FreeDecoder(static_cast<G722DecInst*>(state_right_));
+}
+
+int AudioDecoderG722Stereo::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ // De-interleave the bit-stream into two separate payloads.
+ uint8_t* encoded_deinterleaved = new uint8_t[encoded_len];
+ SplitStereoPacket(encoded, encoded_len, encoded_deinterleaved);
+ // Decode left and right.
+ int16_t ret = WebRtcG722_Decode(
+ static_cast<G722DecInst*>(state_left_),
+ reinterpret_cast<int16_t*>(encoded_deinterleaved),
+ static_cast<int16_t>(encoded_len / 2), decoded, &temp_type);
+ if (ret >= 0) {
+ int decoded_len = ret;
+ ret = WebRtcG722_Decode(
+ static_cast<G722DecInst*>(state_right_),
+ reinterpret_cast<int16_t*>(&encoded_deinterleaved[encoded_len / 2]),
+ static_cast<int16_t>(encoded_len / 2), &decoded[decoded_len], &temp_type);
+ if (ret == decoded_len) {
+ decoded_len += ret;
+ // Interleave output.
+ for (int k = decoded_len / 2; k < decoded_len; k++) {
+ int16_t temp = decoded[k];
+ memmove(&decoded[2 * k - decoded_len + 2],
+ &decoded[2 * k - decoded_len + 1],
+ (decoded_len - k - 1) * sizeof(int16_t));
+ decoded[2 * k - decoded_len + 1] = temp;
+ }
+ ret = decoded_len; // Return total number of samples.
+ }
+ }
+ *speech_type = ConvertSpeechType(temp_type);
+ delete [] encoded_deinterleaved;
+ return ret;
+}
+
+int AudioDecoderG722Stereo::Init() {
+ int ret = WebRtcG722_DecoderInit(static_cast<G722DecInst*>(state_right_));
+ if (ret != 0) {
+ return ret;
+ }
+ return AudioDecoderG722::Init();
+}
+
+// Split the stereo packet and place left and right channel after each other
+// in the output array.
+void AudioDecoderG722Stereo::SplitStereoPacket(const uint8_t* encoded,
+ size_t encoded_len,
+ uint8_t* encoded_deinterleaved) {
+ assert(encoded);
+ // Regroup the 4 bits/sample so |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
+ // where "lx" is 4 bits representing left sample number x, and "rx" right
+ // sample. Two samples fit in one byte, represented with |...|.
+ for (size_t i = 0; i + 1 < encoded_len; i += 2) {
+ uint8_t right_byte = ((encoded[i] & 0x0F) << 4) + (encoded[i + 1] & 0x0F);
+ encoded_deinterleaved[i] = (encoded[i] & 0xF0) + (encoded[i + 1] >> 4);
+ encoded_deinterleaved[i + 1] = right_byte;
+ }
+
+ // Move one byte representing right channel each loop, and place it at the
+ // end of the bytestream vector. After looping the data is reordered to:
+ // |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
+ // where N is the total number of samples.
+ for (size_t i = 0; i < encoded_len / 2; i++) {
+ uint8_t right_byte = encoded_deinterleaved[i + 1];
+ memmove(&encoded_deinterleaved[i + 1], &encoded_deinterleaved[i + 2],
+ encoded_len - i - 2);
+ encoded_deinterleaved[encoded_len - 1] = right_byte;
+ }
+}
+#endif
+
+// CELT
+#ifdef WEBRTC_CODEC_CELT
+AudioDecoderCelt::AudioDecoderCelt(enum NetEqDecoder type)
+ : AudioDecoder(type) {
+ assert(type == kDecoderCELT_32 || type == kDecoderCELT_32_2ch);
+ if (type == kDecoderCELT_32) {
+ channels_ = 1;
+ } else {
+ channels_ = 2;
+ }
+ WebRtcCelt_CreateDec(reinterpret_cast<CELT_decinst_t**>(&state_),
+ static_cast<int>(channels_));
+}
+
+AudioDecoderCelt::~AudioDecoderCelt() {
+ WebRtcCelt_FreeDec(static_cast<CELT_decinst_t*>(state_));
+}
+
+int AudioDecoderCelt::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default to speech.
+ int ret = WebRtcCelt_DecodeUniversal(static_cast<CELT_decinst_t*>(state_),
+ encoded, static_cast<int>(encoded_len),
+ decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ if (ret < 0) {
+ return -1;
+ }
+ // Return the total number of samples.
+ return ret * static_cast<int>(channels_);
+}
+
+int AudioDecoderCelt::Init() {
+ return WebRtcCelt_DecoderInit(static_cast<CELT_decinst_t*>(state_));
+}
+
+bool AudioDecoderCelt::HasDecodePlc() const { return true; }
+
+int AudioDecoderCelt::DecodePlc(int num_frames, int16_t* decoded) {
+ int ret = WebRtcCelt_DecodePlc(static_cast<CELT_decinst_t*>(state_),
+ decoded, num_frames);
+ if (ret < 0) {
+ return -1;
+ }
+ // Return the total number of samples.
+ return ret * static_cast<int>(channels_);
+}
+#endif
+
+// Opus
+#ifdef WEBRTC_CODEC_OPUS
+AudioDecoderOpus::AudioDecoderOpus(enum NetEqDecoder type)
+ : AudioDecoder(type) {
+ if (type == kDecoderOpus_2ch) {
+ channels_ = 2;
+ } else {
+ channels_ = 1;
+ }
+ WebRtcOpus_DecoderCreate(reinterpret_cast<OpusDecInst**>(&state_),
+ static_cast<int>(channels_));
+}
+
+AudioDecoderOpus::~AudioDecoderOpus() {
+ WebRtcOpus_DecoderFree(static_cast<OpusDecInst*>(state_));
+}
+
+int AudioDecoderOpus::Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcOpus_DecodeNew(static_cast<OpusDecInst*>(state_), encoded,
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ if (ret > 0)
+ ret *= static_cast<int16_t>(channels_); // Return total number of samples.
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderOpus::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len, int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcOpus_DecodeFec(static_cast<OpusDecInst*>(state_), encoded,
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ if (ret > 0)
+ ret *= static_cast<int16_t>(channels_); // Return total number of samples.
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int AudioDecoderOpus::Init() {
+ return WebRtcOpus_DecoderInitNew(static_cast<OpusDecInst*>(state_));
+}
+
+int AudioDecoderOpus::PacketDuration(const uint8_t* encoded,
+ size_t encoded_len) {
+ return WebRtcOpus_DurationEst(static_cast<OpusDecInst*>(state_),
+ encoded, static_cast<int>(encoded_len));
+}
+
+int AudioDecoderOpus::PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return WebRtcOpus_FecDurationEst(encoded, static_cast<int>(encoded_len));
+}
+
+bool AudioDecoderOpus::PacketHasFec(const uint8_t* encoded,
+ size_t encoded_len) const {
+ int fec;
+ fec = WebRtcOpus_PacketHasFec(encoded, static_cast<int>(encoded_len));
+ return (fec == 1);
+}
+#endif
+
+AudioDecoderCng::AudioDecoderCng(enum NetEqDecoder type)
+ : AudioDecoder(type) {
+ assert(type == kDecoderCNGnb || type == kDecoderCNGwb ||
+ kDecoderCNGswb32kHz || type == kDecoderCNGswb48kHz);
+ WebRtcCng_CreateDec(reinterpret_cast<CNG_dec_inst**>(&state_));
+ assert(state_);
+}
+
+AudioDecoderCng::~AudioDecoderCng() {
+ if (state_) {
+ WebRtcCng_FreeDec(static_cast<CNG_dec_inst*>(state_));
+ }
+}
+
+int AudioDecoderCng::Init() {
+ assert(state_);
+ return WebRtcCng_InitDec(static_cast<CNG_dec_inst*>(state_));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
new file mode 100644
index 00000000000..265d660bd79
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
+
+#include <assert.h>
+
+#ifndef AUDIO_DECODER_UNITTEST
+// If this is compiled as a part of the audio_deoder_unittest, the codec
+// selection is made in the gypi file instead of in engine_configurations.h.
+#include "webrtc/engine_configurations.h"
+#endif
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioDecoderPcmU : public AudioDecoder {
+ public:
+ AudioDecoderPcmU() : AudioDecoder(kDecoderPCMu) {}
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init() { return 0; }
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU);
+};
+
+class AudioDecoderPcmA : public AudioDecoder {
+ public:
+ AudioDecoderPcmA() : AudioDecoder(kDecoderPCMa) {}
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init() { return 0; }
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA);
+};
+
+class AudioDecoderPcmUMultiCh : public AudioDecoderPcmU {
+ public:
+ explicit AudioDecoderPcmUMultiCh(size_t channels) : AudioDecoderPcmU() {
+ assert(channels > 0);
+ channels_ = channels;
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmUMultiCh);
+};
+
+class AudioDecoderPcmAMultiCh : public AudioDecoderPcmA {
+ public:
+ explicit AudioDecoderPcmAMultiCh(size_t channels) : AudioDecoderPcmA() {
+ assert(channels > 0);
+ channels_ = channels;
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmAMultiCh);
+};
+
+#ifdef WEBRTC_CODEC_PCM16
+// This class handles all four types (i.e., sample rates) of PCM16B codecs.
+// The type is specified in the constructor parameter |type|.
+class AudioDecoderPcm16B : public AudioDecoder {
+ public:
+ explicit AudioDecoderPcm16B(enum NetEqDecoder type);
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init() { return 0; }
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B);
+};
+
+// This class handles all four types (i.e., sample rates) of PCM16B codecs.
+// The type is specified in the constructor parameter |type|, and the number
+// of channels is derived from the type.
+class AudioDecoderPcm16BMultiCh : public AudioDecoderPcm16B {
+ public:
+ explicit AudioDecoderPcm16BMultiCh(enum NetEqDecoder type);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16BMultiCh);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_ILBC
+class AudioDecoderIlbc : public AudioDecoder {
+ public:
+ AudioDecoderIlbc();
+ virtual ~AudioDecoderIlbc();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual bool HasDecodePlc() const { return true; }
+ virtual int DecodePlc(int num_frames, int16_t* decoded);
+ virtual int Init();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbc);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_ISAC
+class AudioDecoderIsac : public AudioDecoder {
+ public:
+ AudioDecoderIsac();
+ virtual ~AudioDecoderIsac();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual bool HasDecodePlc() const { return true; }
+ virtual int DecodePlc(int num_frames, int16_t* decoded);
+ virtual int Init();
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp);
+ virtual int ErrorCode();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsac);
+};
+
+class AudioDecoderIsacSwb : public AudioDecoderIsac {
+ public:
+ AudioDecoderIsacSwb();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacSwb);
+};
+
+class AudioDecoderIsacFb : public AudioDecoderIsacSwb {
+ public:
+ AudioDecoderIsacFb();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacFb);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_ISACFX
+class AudioDecoderIsacFix : public AudioDecoder {
+ public:
+ AudioDecoderIsacFix();
+ virtual ~AudioDecoderIsacFix();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init();
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp);
+ virtual int ErrorCode();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacFix);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_G722
+class AudioDecoderG722 : public AudioDecoder {
+ public:
+ AudioDecoderG722();
+ virtual ~AudioDecoderG722();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual bool HasDecodePlc() const { return false; }
+ virtual int Init();
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722);
+};
+
+class AudioDecoderG722Stereo : public AudioDecoderG722 {
+ public:
+ AudioDecoderG722Stereo();
+ virtual ~AudioDecoderG722Stereo();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init();
+
+ private:
+ // Splits the stereo-interleaved payload in |encoded| into separate payloads
+ // for left and right channels. The separated payloads are written to
+ // |encoded_deinterleaved|, which must hold at least |encoded_len| samples.
+ // The left channel starts at offset 0, while the right channel starts at
+ // offset encoded_len / 2 into |encoded_deinterleaved|.
+ void SplitStereoPacket(const uint8_t* encoded, size_t encoded_len,
+ uint8_t* encoded_deinterleaved);
+
+ void* const state_left_;
+ void* state_right_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Stereo);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_CELT
+class AudioDecoderCelt : public AudioDecoder {
+ public:
+ explicit AudioDecoderCelt(enum NetEqDecoder type);
+ virtual ~AudioDecoderCelt();
+
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init();
+ virtual bool HasDecodePlc() const;
+ virtual int DecodePlc(int num_frames, int16_t* decoded);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderCelt);
+};
+#endif
+
+#ifdef WEBRTC_CODEC_OPUS
+class AudioDecoderOpus : public AudioDecoder {
+ public:
+ explicit AudioDecoderOpus(enum NetEqDecoder type);
+ virtual ~AudioDecoderOpus();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+ virtual int Init();
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+ virtual int PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const;
+ virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpus);
+};
+#endif
+
+// AudioDecoderCng is a special type of AudioDecoder. It inherits from
+// AudioDecoder just to fit in the DecoderDatabase. None of the class methods
+// should be used, except constructor, destructor, and accessors.
+// TODO(hlundin): Consider the possibility to create a super-class to
+// AudioDecoder that is stored in DecoderDatabase. Then AudioDecoder and a
+// specific CngDecoder class could both inherit from that class.
+class AudioDecoderCng : public AudioDecoder {
+ public:
+ explicit AudioDecoderCng(enum NetEqDecoder type);
+ virtual ~AudioDecoderCng();
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) { return -1; }
+ virtual int Init();
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) { return -1; }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderCng);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
new file mode 100644
index 00000000000..f82644cbc26
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
@@ -0,0 +1,931 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "gtest/gtest.h"
+#include "webrtc/common_audio/resampler/include/resampler.h"
+#ifdef WEBRTC_CODEC_CELT
+#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
+#endif
+#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
+#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
+#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/system_wrappers/interface/data_log.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+class AudioDecoderTest : public ::testing::Test {
+ protected:
+ AudioDecoderTest()
+ : input_fp_(NULL),
+ input_(NULL),
+ encoded_(NULL),
+ decoded_(NULL),
+ frame_size_(0),
+ data_length_(0),
+ encoded_bytes_(0),
+ channels_(1),
+ decoder_(NULL) {
+ input_file_ = webrtc::test::ProjectRootPath() +
+ "resources/audio_coding/testfile32kHz.pcm";
+ }
+
+ virtual ~AudioDecoderTest() {}
+
+ virtual void SetUp() {
+ // Create arrays.
+ ASSERT_GT(data_length_, 0u) << "The test must set data_length_ > 0";
+ input_ = new int16_t[data_length_];
+ encoded_ = new uint8_t[data_length_ * 2];
+ decoded_ = new int16_t[data_length_ * channels_];
+ // Open input file.
+ input_fp_ = fopen(input_file_.c_str(), "rb");
+ ASSERT_TRUE(input_fp_ != NULL) << "Failed to open file " << input_file_;
+ // Read data to |input_|.
+ ASSERT_EQ(data_length_,
+ fread(input_, sizeof(int16_t), data_length_, input_fp_)) <<
+ "Could not read enough data from file";
+ // Logging to view input and output in Matlab.
+ // Use 'gyp -Denable_data_logging=1' to enable logging.
+ DataLog::CreateLog();
+ DataLog::AddTable("CodecTest");
+ DataLog::AddColumn("CodecTest", "input", 1);
+ DataLog::AddColumn("CodecTest", "output", 1);
+ }
+
+ virtual void TearDown() {
+ delete decoder_;
+ decoder_ = NULL;
+ // Close input file.
+ fclose(input_fp_);
+ // Delete arrays.
+ delete [] input_;
+ input_ = NULL;
+ delete [] encoded_;
+ encoded_ = NULL;
+ delete [] decoded_;
+ decoded_ = NULL;
+ // Close log.
+ DataLog::ReturnLog();
+ }
+
+ virtual void InitEncoder() { }
+
+ // This method must be implemented for all tests derived from this class.
+ virtual int EncodeFrame(const int16_t* input, size_t input_len,
+ uint8_t* output) = 0;
+
+ // Encodes and decodes audio. The absolute difference between the input and
+ // output is compared vs |tolerance|, and the mean-squared error is compared
+ // with |mse|. The encoded stream should contain |expected_bytes|. For stereo
+ // audio, the absolute difference between the two channels is compared vs
+ // |channel_diff_tolerance|.
+ void EncodeDecodeTest(size_t expected_bytes, int tolerance, double mse,
+ int delay = 0, int channel_diff_tolerance = 0) {
+ ASSERT_GE(tolerance, 0) << "Test must define a tolerance >= 0";
+ ASSERT_GE(channel_diff_tolerance, 0) <<
+ "Test must define a channel_diff_tolerance >= 0";
+ size_t processed_samples = 0u;
+ encoded_bytes_ = 0u;
+ InitEncoder();
+ EXPECT_EQ(0, decoder_->Init());
+ while (processed_samples + frame_size_ <= data_length_) {
+ size_t enc_len = EncodeFrame(&input_[processed_samples], frame_size_,
+ &encoded_[encoded_bytes_]);
+ AudioDecoder::SpeechType speech_type;
+ size_t dec_len = decoder_->Decode(&encoded_[encoded_bytes_], enc_len,
+ &decoded_[processed_samples *
+ channels_],
+ &speech_type);
+ EXPECT_EQ(frame_size_ * channels_, dec_len);
+ encoded_bytes_ += enc_len;
+ processed_samples += frame_size_;
+ }
+ // For some codecs it doesn't make sense to check expected number of bytes,
+ // since the number can vary for different platforms. Opus and iSAC are
+ // such codecs. In this case expected_bytes is set to 0.
+ if (expected_bytes) {
+ EXPECT_EQ(expected_bytes, encoded_bytes_);
+ }
+ CompareInputOutput(processed_samples, tolerance, delay);
+ if (channels_ == 2)
+ CompareTwoChannels(processed_samples, channel_diff_tolerance);
+ EXPECT_LE(MseInputOutput(processed_samples, delay), mse);
+ }
+
+ // The absolute difference between the input and output (the first channel) is
+ // compared vs |tolerance|. The parameter |delay| is used to correct for codec
+ // delays.
+ virtual void CompareInputOutput(size_t num_samples, int tolerance,
+ int delay) const {
+ assert(num_samples <= data_length_);
+ for (unsigned int n = 0; n < num_samples - delay; ++n) {
+ ASSERT_NEAR(input_[n], decoded_[channels_ * n + delay], tolerance) <<
+ "Exit test on first diff; n = " << n;
+ DataLog::InsertCell("CodecTest", "input", input_[n]);
+ DataLog::InsertCell("CodecTest", "output", decoded_[channels_ * n]);
+ DataLog::NextRow("CodecTest");
+ }
+ }
+
+ // The absolute difference between the two channels in a stereo is compared vs
+ // |tolerance|.
+ virtual void CompareTwoChannels(size_t samples_per_channel,
+ int tolerance) const {
+ assert(samples_per_channel <= data_length_);
+ for (unsigned int n = 0; n < samples_per_channel; ++n)
+ ASSERT_NEAR(decoded_[channels_ * n], decoded_[channels_ * n + 1],
+ tolerance) << "Stereo samples differ.";
+ }
+
+ // Calculates mean-squared error between input and output (the first channel).
+ // The parameter |delay| is used to correct for codec delays.
+ virtual double MseInputOutput(size_t num_samples, int delay) const {
+ assert(num_samples <= data_length_);
+ if (num_samples == 0) return 0.0;
+ double squared_sum = 0.0;
+ for (unsigned int n = 0; n < num_samples - delay; ++n) {
+ squared_sum += (input_[n] - decoded_[channels_ * n + delay]) *
+ (input_[n] - decoded_[channels_ * n + delay]);
+ }
+ return squared_sum / (num_samples - delay);
+ }
+
+ // Encodes a payload and decodes it twice with decoder re-init before each
+ // decode. Verifies that the decoded result is the same.
+ void ReInitTest() {
+ uint8_t* encoded = encoded_;
+ uint8_t* encoded_copy = encoded_ + 2 * frame_size_;
+ int16_t* output1 = decoded_;
+ int16_t* output2 = decoded_ + frame_size_;
+ InitEncoder();
+ size_t enc_len = EncodeFrame(input_, frame_size_, encoded);
+ size_t dec_len;
+ // Copy payload since iSAC fix destroys it during decode.
+ // Issue: http://code.google.com/p/webrtc/issues/detail?id=845.
+ // TODO(hlundin): Remove if the iSAC bug gets fixed.
+ memcpy(encoded_copy, encoded, enc_len);
+ AudioDecoder::SpeechType speech_type1, speech_type2;
+ EXPECT_EQ(0, decoder_->Init());
+ dec_len = decoder_->Decode(encoded, enc_len, output1, &speech_type1);
+ EXPECT_EQ(frame_size_ * channels_, dec_len);
+ // Re-init decoder and decode again.
+ EXPECT_EQ(0, decoder_->Init());
+ dec_len = decoder_->Decode(encoded_copy, enc_len, output2, &speech_type2);
+ EXPECT_EQ(frame_size_ * channels_, dec_len);
+ for (unsigned int n = 0; n < frame_size_; ++n) {
+ ASSERT_EQ(output1[n], output2[n]) << "Exit test on first diff; n = " << n;
+ }
+ EXPECT_EQ(speech_type1, speech_type2);
+ }
+
+ // Call DecodePlc and verify that the correct number of samples is produced.
+ void DecodePlcTest() {
+ InitEncoder();
+ size_t enc_len = EncodeFrame(input_, frame_size_, encoded_);
+ AudioDecoder::SpeechType speech_type;
+ EXPECT_EQ(0, decoder_->Init());
+ size_t dec_len =
+ decoder_->Decode(encoded_, enc_len, decoded_, &speech_type);
+ EXPECT_EQ(frame_size_ * channels_, dec_len);
+ // Call DecodePlc and verify that we get one frame of data.
+ // (Overwrite the output from the above Decode call, but that does not
+ // matter.)
+ dec_len = decoder_->DecodePlc(1, decoded_);
+ EXPECT_EQ(frame_size_ * channels_, dec_len);
+ }
+
+ std::string input_file_;
+ FILE* input_fp_;
+ int16_t* input_;
+ uint8_t* encoded_;
+ int16_t* decoded_;
+ size_t frame_size_;
+ size_t data_length_;
+ size_t encoded_bytes_;
+ size_t channels_;
+ AudioDecoder* decoder_;
+};
+
+class AudioDecoderPcmUTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderPcmUTest() : AudioDecoderTest() {
+ frame_size_ = 160;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderPcmU;
+ assert(decoder_);
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ int enc_len_bytes =
+ WebRtcG711_EncodeU(NULL, const_cast<int16_t*>(input),
+ static_cast<int>(input_len_samples),
+ reinterpret_cast<int16_t*>(output));
+ EXPECT_EQ(input_len_samples, static_cast<size_t>(enc_len_bytes));
+ return enc_len_bytes;
+ }
+};
+
+class AudioDecoderPcmATest : public AudioDecoderTest {
+ protected:
+ AudioDecoderPcmATest() : AudioDecoderTest() {
+ frame_size_ = 160;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderPcmA;
+ assert(decoder_);
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ int enc_len_bytes =
+ WebRtcG711_EncodeA(NULL, const_cast<int16_t*>(input),
+ static_cast<int>(input_len_samples),
+ reinterpret_cast<int16_t*>(output));
+ EXPECT_EQ(input_len_samples, static_cast<size_t>(enc_len_bytes));
+ return enc_len_bytes;
+ }
+};
+
+class AudioDecoderPcm16BTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderPcm16BTest() : AudioDecoderTest() {
+ frame_size_ = 160;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderPcm16B(kDecoderPCM16B);
+ assert(decoder_);
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ int enc_len_bytes = WebRtcPcm16b_EncodeW16(
+ const_cast<int16_t*>(input), static_cast<int>(input_len_samples),
+ reinterpret_cast<int16_t*>(output));
+ EXPECT_EQ(2 * input_len_samples, static_cast<size_t>(enc_len_bytes));
+ return enc_len_bytes;
+ }
+};
+
+class AudioDecoderIlbcTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderIlbcTest() : AudioDecoderTest() {
+ frame_size_ = 240;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderIlbc;
+ assert(decoder_);
+ WebRtcIlbcfix_EncoderCreate(&encoder_);
+ }
+
+ ~AudioDecoderIlbcTest() {
+ WebRtcIlbcfix_EncoderFree(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ ASSERT_EQ(0, WebRtcIlbcfix_EncoderInit(encoder_, 30)); // 30 ms.
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ int enc_len_bytes =
+ WebRtcIlbcfix_Encode(encoder_, input,
+ static_cast<int>(input_len_samples),
+ reinterpret_cast<int16_t*>(output));
+ EXPECT_EQ(50, enc_len_bytes);
+ return enc_len_bytes;
+ }
+
+ // Overload the default test since iLBC's function WebRtcIlbcfix_NetEqPlc does
+ // not return any data. It simply resets a few states and returns 0.
+ void DecodePlcTest() {
+ InitEncoder();
+ size_t enc_len = EncodeFrame(input_, frame_size_, encoded_);
+ AudioDecoder::SpeechType speech_type;
+ EXPECT_EQ(0, decoder_->Init());
+ size_t dec_len =
+ decoder_->Decode(encoded_, enc_len, decoded_, &speech_type);
+ EXPECT_EQ(frame_size_, dec_len);
+ // Simply call DecodePlc and verify that we get 0 as return value.
+ EXPECT_EQ(0, decoder_->DecodePlc(1, decoded_));
+ }
+
+ iLBC_encinst_t* encoder_;
+};
+
+class AudioDecoderIsacFloatTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderIsacFloatTest() : AudioDecoderTest() {
+ input_size_ = 160;
+ frame_size_ = 480;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderIsac;
+ assert(decoder_);
+ WebRtcIsac_Create(&encoder_);
+ WebRtcIsac_SetEncSampRate(encoder_, 16000);
+ }
+
+ ~AudioDecoderIsacFloatTest() {
+ WebRtcIsac_Free(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ ASSERT_EQ(0, WebRtcIsac_EncoderInit(encoder_, 1)); // Fixed mode.
+ ASSERT_EQ(0, WebRtcIsac_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ // Insert 3 * 10 ms. Expect non-zero output on third call.
+ EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ int enc_len_bytes =
+ WebRtcIsac_Encode(encoder_, input, reinterpret_cast<int16_t*>(output));
+ EXPECT_GT(enc_len_bytes, 0);
+ return enc_len_bytes;
+ }
+
+ ISACStruct* encoder_;
+ int input_size_;
+};
+
+class AudioDecoderIsacSwbTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderIsacSwbTest() : AudioDecoderTest() {
+ input_size_ = 320;
+ frame_size_ = 960;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderIsacSwb;
+ assert(decoder_);
+ WebRtcIsac_Create(&encoder_);
+ WebRtcIsac_SetEncSampRate(encoder_, 32000);
+ }
+
+ ~AudioDecoderIsacSwbTest() {
+ WebRtcIsac_Free(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ ASSERT_EQ(0, WebRtcIsac_EncoderInit(encoder_, 1)); // Fixed mode.
+ ASSERT_EQ(0, WebRtcIsac_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ // Insert 3 * 10 ms. Expect non-zero output on third call.
+ EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ int enc_len_bytes =
+ WebRtcIsac_Encode(encoder_, input, reinterpret_cast<int16_t*>(output));
+ EXPECT_GT(enc_len_bytes, 0);
+ return enc_len_bytes;
+ }
+
+ ISACStruct* encoder_;
+ int input_size_;
+};
+
+// This test is identical to AudioDecoderIsacSwbTest, except that it creates
+// an AudioDecoderIsacFb decoder object.
+class AudioDecoderIsacFbTest : public AudioDecoderIsacSwbTest {
+ protected:
+ AudioDecoderIsacFbTest() : AudioDecoderIsacSwbTest() {
+ // Delete the |decoder_| that was created by AudioDecoderIsacSwbTest and
+ // create an AudioDecoderIsacFb object instead.
+ delete decoder_;
+ decoder_ = new AudioDecoderIsacFb;
+ assert(decoder_);
+ }
+};
+
+class AudioDecoderIsacFixTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderIsacFixTest() : AudioDecoderTest() {
+ input_size_ = 160;
+ frame_size_ = 480;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderIsacFix;
+ assert(decoder_);
+ WebRtcIsacfix_Create(&encoder_);
+ }
+
+ ~AudioDecoderIsacFixTest() {
+ WebRtcIsacfix_Free(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ ASSERT_EQ(0, WebRtcIsacfix_EncoderInit(encoder_, 1)); // Fixed mode.
+ ASSERT_EQ(0,
+ WebRtcIsacfix_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ // Insert 3 * 10 ms. Expect non-zero output on third call.
+ EXPECT_EQ(0, WebRtcIsacfix_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ EXPECT_EQ(0, WebRtcIsacfix_Encode(encoder_, input,
+ reinterpret_cast<int16_t*>(output)));
+ input += input_size_;
+ int enc_len_bytes = WebRtcIsacfix_Encode(
+ encoder_, input, reinterpret_cast<int16_t*>(output));
+ EXPECT_GT(enc_len_bytes, 0);
+ return enc_len_bytes;
+ }
+
+ ISACFIX_MainStruct* encoder_;
+ int input_size_;
+};
+
+class AudioDecoderG722Test : public AudioDecoderTest {
+ protected:
+ AudioDecoderG722Test() : AudioDecoderTest() {
+ frame_size_ = 160;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderG722;
+ assert(decoder_);
+ WebRtcG722_CreateEncoder(&encoder_);
+ }
+
+ ~AudioDecoderG722Test() {
+ WebRtcG722_FreeEncoder(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ ASSERT_EQ(0, WebRtcG722_EncoderInit(encoder_));
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ int enc_len_bytes =
+ WebRtcG722_Encode(encoder_, const_cast<int16_t*>(input),
+ static_cast<int>(input_len_samples),
+ reinterpret_cast<int16_t*>(output));
+ EXPECT_EQ(80, enc_len_bytes);
+ return enc_len_bytes;
+ }
+
+ G722EncInst* encoder_;
+};
+
+class AudioDecoderG722StereoTest : public AudioDecoderG722Test {
+ protected:
+ AudioDecoderG722StereoTest() : AudioDecoderG722Test() {
+ channels_ = 2;
+ // Delete the |decoder_| that was created by AudioDecoderG722Test and
+ // create an AudioDecoderG722Stereo object instead.
+ delete decoder_;
+ decoder_ = new AudioDecoderG722Stereo;
+ assert(decoder_);
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ uint8_t* temp_output = new uint8_t[data_length_ * 2];
+ // Encode a mono payload using the base test class.
+ int mono_enc_len_bytes =
+ AudioDecoderG722Test::EncodeFrame(input, input_len_samples,
+ temp_output);
+ // The bit-stream consists of 4-bit samples:
+ // +--------+--------+--------+
+ // | s0 s1 | s2 s3 | s4 s5 |
+ // +--------+--------+--------+
+ //
+ // Duplicate them to the |output| such that the stereo stream becomes:
+ // +--------+--------+--------+
+ // | s0 s0 | s1 s1 | s2 s2 |
+ // +--------+--------+--------+
+ EXPECT_LE(mono_enc_len_bytes * 2, static_cast<int>(data_length_ * 2));
+ uint8_t* output_ptr = output;
+ for (int i = 0; i < mono_enc_len_bytes; ++i) {
+ *output_ptr = (temp_output[i] & 0xF0) + (temp_output[i] >> 4);
+ ++output_ptr;
+ *output_ptr = (temp_output[i] << 4) + (temp_output[i] & 0x0F);
+ ++output_ptr;
+ }
+ delete [] temp_output;
+ return mono_enc_len_bytes * 2;
+ }
+};
+
+#ifdef WEBRTC_CODEC_CELT
+class AudioDecoderCeltTest : public AudioDecoderTest {
+ protected:
+ static const int kEncodingRateBitsPerSecond = 64000;
+ AudioDecoderCeltTest() : AudioDecoderTest(), encoder_(NULL) {
+ frame_size_ = 640;
+ data_length_ = 10 * frame_size_;
+ decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32);
+ assert(decoder_);
+ WebRtcCelt_CreateEnc(&encoder_, static_cast<int>(channels_));
+ }
+
+ ~AudioDecoderCeltTest() {
+ WebRtcCelt_FreeEnc(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ assert(encoder_);
+ ASSERT_EQ(0, WebRtcCelt_EncoderInit(
+ encoder_, static_cast<int>(channels_), kEncodingRateBitsPerSecond));
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ assert(encoder_);
+ return WebRtcCelt_Encode(encoder_, input, output);
+ }
+
+ CELT_encinst_t* encoder_;
+};
+
+class AudioDecoderCeltStereoTest : public AudioDecoderTest {
+ protected:
+ static const int kEncodingRateBitsPerSecond = 64000;
+ AudioDecoderCeltStereoTest() : AudioDecoderTest(), encoder_(NULL) {
+ channels_ = 2;
+ frame_size_ = 640;
+ data_length_ = 10 * frame_size_;
+ decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32_2ch);
+ assert(decoder_);
+ stereo_input_ = new int16_t[frame_size_ * channels_];
+ WebRtcCelt_CreateEnc(&encoder_, static_cast<int>(channels_));
+ }
+
+ ~AudioDecoderCeltStereoTest() {
+ delete [] stereo_input_;
+ WebRtcCelt_FreeEnc(encoder_);
+ }
+
+ virtual void InitEncoder() {
+ assert(encoder_);
+ ASSERT_EQ(0, WebRtcCelt_EncoderInit(
+ encoder_, static_cast<int>(channels_), kEncodingRateBitsPerSecond));
+ }
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ assert(encoder_);
+ assert(stereo_input_);
+ for (size_t n = 0; n < frame_size_; ++n) {
+ stereo_input_[n * 2] = stereo_input_[n * 2 + 1] = input[n];
+ }
+ return WebRtcCelt_Encode(encoder_, stereo_input_, output);
+ }
+
+ int16_t* stereo_input_;
+ CELT_encinst_t* encoder_;
+};
+
+#endif
+
+class AudioDecoderOpusTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderOpusTest() : AudioDecoderTest() {
+ frame_size_ = 320;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderOpus(kDecoderOpus);
+ assert(decoder_);
+ WebRtcOpus_EncoderCreate(&encoder_, 1);
+ }
+
+ ~AudioDecoderOpusTest() {
+ WebRtcOpus_EncoderFree(encoder_);
+ }
+
+ virtual void InitEncoder() {}
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ // Upsample from 32 to 48 kHz.
+ Resampler rs;
+ rs.Reset(32000, 48000, kResamplerSynchronous);
+ const int max_resamp_len_samples = static_cast<int>(input_len_samples) *
+ 3 / 2;
+ int16_t* resamp_input = new int16_t[max_resamp_len_samples];
+ int resamp_len_samples;
+ EXPECT_EQ(0, rs.Push(input, static_cast<int>(input_len_samples),
+ resamp_input, max_resamp_len_samples,
+ resamp_len_samples));
+ EXPECT_EQ(max_resamp_len_samples, resamp_len_samples);
+ int enc_len_bytes =
+ WebRtcOpus_Encode(encoder_, resamp_input, resamp_len_samples,
+ static_cast<int>(data_length_), output);
+ EXPECT_GT(enc_len_bytes, 0);
+ delete [] resamp_input;
+ return enc_len_bytes;
+ }
+
+ OpusEncInst* encoder_;
+};
+
+class AudioDecoderOpusStereoTest : public AudioDecoderTest {
+ protected:
+ AudioDecoderOpusStereoTest() : AudioDecoderTest() {
+ channels_ = 2;
+ frame_size_ = 320;
+ data_length_ = 10 * frame_size_;
+ decoder_ = new AudioDecoderOpus(kDecoderOpus_2ch);
+ assert(decoder_);
+ WebRtcOpus_EncoderCreate(&encoder_, 2);
+ }
+
+ ~AudioDecoderOpusStereoTest() {
+ WebRtcOpus_EncoderFree(encoder_);
+ }
+
+ virtual void InitEncoder() {}
+
+ virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
+ uint8_t* output) {
+ // Create stereo by duplicating each sample in |input|.
+ const int input_stereo_samples = static_cast<int>(input_len_samples) * 2;
+ int16_t* input_stereo = new int16_t[input_stereo_samples];
+ for (size_t i = 0; i < input_len_samples; i++)
+ input_stereo[i * 2] = input_stereo[i * 2 + 1] = input[i];
+ // Upsample from 32 to 48 kHz.
+ Resampler rs;
+ rs.Reset(32000, 48000, kResamplerSynchronousStereo);
+ const int max_resamp_len_samples = input_stereo_samples * 3 / 2;
+ int16_t* resamp_input = new int16_t[max_resamp_len_samples];
+ int resamp_len_samples;
+ EXPECT_EQ(0, rs.Push(input_stereo, input_stereo_samples, resamp_input,
+ max_resamp_len_samples, resamp_len_samples));
+ EXPECT_EQ(max_resamp_len_samples, resamp_len_samples);
+ int enc_len_bytes =
+ WebRtcOpus_Encode(encoder_, resamp_input, resamp_len_samples / 2,
+ static_cast<int16_t>(data_length_), output);
+ EXPECT_GT(enc_len_bytes, 0);
+ delete [] resamp_input;
+ delete [] input_stereo;
+ return enc_len_bytes;
+ }
+
+ OpusEncInst* encoder_;
+};
+
+TEST_F(AudioDecoderPcmUTest, EncodeDecode) {
+ int tolerance = 251;
+ double mse = 1734.0;
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu));
+ EncodeDecodeTest(data_length_, tolerance, mse);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderPcmATest, EncodeDecode) {
+ int tolerance = 308;
+ double mse = 1931.0;
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa));
+ EncodeDecodeTest(data_length_, tolerance, mse);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderPcm16BTest, EncodeDecode) {
+ int tolerance = 0;
+ double mse = 0.0;
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz));
+ EncodeDecodeTest(2 * data_length_, tolerance, mse);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderIlbcTest, EncodeDecode) {
+ int tolerance = 6808;
+ double mse = 2.13e6;
+ int delay = 80; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderILBC));
+ EncodeDecodeTest(500, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+
+TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) {
+ int tolerance = 3399;
+ double mse = 434951.0;
+ int delay = 48; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
+ EncodeDecodeTest(0, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+
+TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
+ int tolerance = 19757;
+ double mse = 8.18e6;
+ int delay = 160; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
+ EncodeDecodeTest(0, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+
+TEST_F(AudioDecoderIsacFbTest, EncodeDecode) {
+ int tolerance = 19757;
+ double mse = 8.18e6;
+ int delay = 160; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
+ EncodeDecodeTest(0, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+
+TEST_F(AudioDecoderIsacFixTest, DISABLED_EncodeDecode) {
+ int tolerance = 11034;
+ double mse = 3.46e6;
+ int delay = 54; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
+ EncodeDecodeTest(735, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderG722Test, EncodeDecode) {
+ int tolerance = 6176;
+ double mse = 238630.0;
+ int delay = 22; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722));
+ EncodeDecodeTest(data_length_ / 2, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderG722StereoTest, CreateAndDestroy) {
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
+}
+
+TEST_F(AudioDecoderG722StereoTest, EncodeDecode) {
+ int tolerance = 6176;
+ int channel_diff_tolerance = 0;
+ double mse = 238630.0;
+ int delay = 22; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
+ EncodeDecodeTest(data_length_, tolerance, mse, delay, channel_diff_tolerance);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderOpusTest, EncodeDecode) {
+ int tolerance = 6176;
+ double mse = 238630.0;
+ int delay = 22; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus));
+ EncodeDecodeTest(0, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+TEST_F(AudioDecoderOpusStereoTest, EncodeDecode) {
+ int tolerance = 6176;
+ int channel_diff_tolerance = 0;
+ double mse = 238630.0;
+ int delay = 22; // Delay from input to output.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch));
+ EncodeDecodeTest(0, tolerance, mse, delay, channel_diff_tolerance);
+ ReInitTest();
+ EXPECT_FALSE(decoder_->HasDecodePlc());
+}
+
+#ifdef WEBRTC_CODEC_CELT
+// In the two following CELT tests, the low amplitude of the test signal allow
+// us to have such low error thresholds, i.e. |tolerance|, |mse|. Furthermore,
+// in general, stereo signals with identical channels do not result in identical
+// encoded channels.
+TEST_F(AudioDecoderCeltTest, EncodeDecode) {
+ int tolerance = 20;
+ double mse = 17.0;
+ int delay = 80; // Delay from input to output in samples.
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32));
+ EncodeDecodeTest(1600, tolerance, mse, delay);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+
+TEST_F(AudioDecoderCeltStereoTest, EncodeDecode) {
+ int tolerance = 20;
+ // If both channels are identical, CELT not necessarily decodes identical
+ // channels. However, for this input this is the case.
+ int channel_diff_tolerance = 0;
+ double mse = 20.0;
+ // Delay from input to output in samples, accounting for stereo.
+ int delay = 160;
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
+ EncodeDecodeTest(1600, tolerance, mse, delay, channel_diff_tolerance);
+ ReInitTest();
+ EXPECT_TRUE(decoder_->HasDecodePlc());
+ DecodePlcTest();
+}
+#endif
+
+TEST(AudioDecoder, CodecSampleRateHz) {
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMu));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMa));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMu_2ch));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMa_2ch));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderILBC));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderISAC));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderISACswb));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderISACfb));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bwb));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb32kHz));
+ EXPECT_EQ(48000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb48kHz));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B_2ch));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bwb_2ch));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb32kHz_2ch));
+ EXPECT_EQ(48000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb48kHz_2ch));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B_5ch));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderG722));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderG722_2ch));
+ EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderRED));
+ EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderAVT));
+ EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderCNGnb));
+ EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderCNGwb));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCNGswb32kHz));
+ // TODO(tlegrand): Change 32000 to 48000 below once ACM has 48 kHz support.
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCNGswb48kHz));
+ EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderArbitrary));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus_2ch));
+#ifdef WEBRTC_CODEC_CELT
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32));
+ EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch));
+#else
+ EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32));
+ EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch));
+#endif
+}
+
+TEST(AudioDecoder, CodecSupported) {
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderILBC));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACfb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B_5ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderRED));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderAVT));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGnb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGwb));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGswb32kHz));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGswb48kHz));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderArbitrary));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch));
+#ifdef WEBRTC_CODEC_CELT
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32));
+ EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
+#else
+ EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32));
+ EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
+#endif
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate
index bb57e74b34f..bb57e74b34f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
new file mode 100644
index 00000000000..5a208a6972a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
@@ -0,0 +1,213 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+AudioMultiVector::AudioMultiVector(size_t N) {
+ assert(N > 0);
+ if (N < 1) N = 1;
+ for (size_t n = 0; n < N; ++n) {
+ channels_.push_back(new AudioVector);
+ }
+ num_channels_ = N;
+}
+
+AudioMultiVector::AudioMultiVector(size_t N, size_t initial_size) {
+ assert(N > 0);
+ if (N < 1) N = 1;
+ for (size_t n = 0; n < N; ++n) {
+ channels_.push_back(new AudioVector(initial_size));
+ }
+ num_channels_ = N;
+}
+
+AudioMultiVector::~AudioMultiVector() {
+ std::vector<AudioVector*>::iterator it = channels_.begin();
+ while (it != channels_.end()) {
+ delete (*it);
+ ++it;
+ }
+}
+
+void AudioMultiVector::Clear() {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->Clear();
+ }
+}
+
+void AudioMultiVector::Zeros(size_t length) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->Clear();
+ channels_[i]->Extend(length);
+ }
+}
+
+void AudioMultiVector::CopyFrom(AudioMultiVector* copy_to) const {
+ if (copy_to) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->CopyFrom(&(*copy_to)[i]);
+ }
+ }
+}
+
+void AudioMultiVector::PushBackInterleaved(const int16_t* append_this,
+ size_t length) {
+ assert(length % num_channels_ == 0);
+ if (num_channels_ == 1) {
+ // Special case to avoid extra allocation and data shuffling.
+ channels_[0]->PushBack(append_this, length);
+ return;
+ }
+ size_t length_per_channel = length / num_channels_;
+ int16_t* temp_array = new int16_t[length_per_channel]; // Temporary storage.
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ // Copy elements to |temp_array|.
+ // Set |source_ptr| to first element of this channel.
+ const int16_t* source_ptr = &append_this[channel];
+ for (size_t i = 0; i < length_per_channel; ++i) {
+ temp_array[i] = *source_ptr;
+ source_ptr += num_channels_; // Jump to next element of this channel.
+ }
+ channels_[channel]->PushBack(temp_array, length_per_channel);
+ }
+ delete [] temp_array;
+}
+
+void AudioMultiVector::PushBack(const AudioMultiVector& append_this) {
+ assert(num_channels_ == append_this.num_channels_);
+ if (num_channels_ == append_this.num_channels_) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->PushBack(append_this[i]);
+ }
+ }
+}
+
+void AudioMultiVector::PushBackFromIndex(const AudioMultiVector& append_this,
+ size_t index) {
+ assert(index < append_this.Size());
+ index = std::min(index, append_this.Size() - 1);
+ size_t length = append_this.Size() - index;
+ assert(num_channels_ == append_this.num_channels_);
+ if (num_channels_ == append_this.num_channels_) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->PushBack(&append_this[i][index], length);
+ }
+ }
+}
+
+void AudioMultiVector::PopFront(size_t length) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->PopFront(length);
+ }
+}
+
+void AudioMultiVector::PopBack(size_t length) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->PopBack(length);
+ }
+}
+
+size_t AudioMultiVector::ReadInterleaved(size_t length,
+ int16_t* destination) const {
+ return ReadInterleavedFromIndex(0, length, destination);
+}
+
+size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index,
+ size_t length,
+ int16_t* destination) const {
+ if (!destination) {
+ return 0;
+ }
+ size_t index = 0; // Number of elements written to |destination| so far.
+ assert(start_index <= Size());
+ start_index = std::min(start_index, Size());
+ if (length + start_index > Size()) {
+ length = Size() - start_index;
+ }
+ if (num_channels_ == 1) {
+ // Special case to avoid the nested for loop below.
+ memcpy(destination, &(*this)[0][start_index], length * sizeof(int16_t));
+ return length;
+ }
+ for (size_t i = 0; i < length; ++i) {
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ destination[index] = (*this)[channel][i + start_index];
+ ++index;
+ }
+ }
+ return index;
+}
+
+size_t AudioMultiVector::ReadInterleavedFromEnd(size_t length,
+ int16_t* destination) const {
+ length = std::min(length, Size()); // Cannot read more than Size() elements.
+ return ReadInterleavedFromIndex(Size() - length, length, destination);
+}
+
+void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this,
+ size_t length,
+ size_t position) {
+ assert(num_channels_ == insert_this.num_channels_);
+ // Cap |length| at the length of |insert_this|.
+ assert(length <= insert_this.Size());
+ length = std::min(length, insert_this.Size());
+ if (num_channels_ == insert_this.num_channels_) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->OverwriteAt(&insert_this[i][0], length, position);
+ }
+ }
+}
+
+void AudioMultiVector::CrossFade(const AudioMultiVector& append_this,
+ size_t fade_length) {
+ assert(num_channels_ == append_this.num_channels_);
+ if (num_channels_ == append_this.num_channels_) {
+ for (size_t i = 0; i < num_channels_; ++i) {
+ channels_[i]->CrossFade(append_this[i], fade_length);
+ }
+ }
+}
+
+size_t AudioMultiVector::Size() const {
+ assert(channels_[0]);
+ return channels_[0]->Size();
+}
+
+void AudioMultiVector::AssertSize(size_t required_size) {
+ if (Size() < required_size) {
+ size_t extend_length = required_size - Size();
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ channels_[channel]->Extend(extend_length);
+ }
+ }
+}
+
+bool AudioMultiVector::Empty() const {
+ assert(channels_[0]);
+ return channels_[0]->Empty();
+}
+
+const AudioVector& AudioMultiVector::operator[](size_t index) const {
+ return *(channels_[index]);
+}
+
+AudioVector& AudioMultiVector::operator[](size_t index) {
+ return *(channels_[index]);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h
new file mode 100644
index 00000000000..908de936d54
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
+
+#include <string.h> // Access to size_t.
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioMultiVector {
+ public:
+ // Creates an empty AudioMultiVector with |N| audio channels. |N| must be
+ // larger than 0.
+ explicit AudioMultiVector(size_t N);
+
+ // Creates an AudioMultiVector with |N| audio channels, each channel having
+ // an initial size. |N| must be larger than 0.
+ AudioMultiVector(size_t N, size_t initial_size);
+
+ virtual ~AudioMultiVector();
+
+ // Deletes all values and make the vector empty.
+ virtual void Clear();
+
+ // Clears the vector and inserts |length| zeros into each channel.
+ virtual void Zeros(size_t length);
+
+ // Copies all values from this vector to |copy_to|. Any contents in |copy_to|
+ // are deleted. After the operation is done, |copy_to| will be an exact
+ // replica of this object. The source and the destination must have the same
+ // number of channels.
+ virtual void CopyFrom(AudioMultiVector* copy_to) const;
+
+ // Appends the contents of array |append_this| to the end of this
+ // object. The array is assumed to be channel-interleaved. |length| must be
+ // an even multiple of this object's number of channels.
+ // The length of this object is increased with the |length| divided by the
+ // number of channels.
+ virtual void PushBackInterleaved(const int16_t* append_this, size_t length);
+
+ // Appends the contents of AudioMultiVector |append_this| to this object. The
+ // length of this object is increased with the length of |append_this|.
+ virtual void PushBack(const AudioMultiVector& append_this);
+
+ // Appends the contents of AudioMultiVector |append_this| to this object,
+ // taken from |index| up until the end of |append_this|. The length of this
+ // object is increased.
+ virtual void PushBackFromIndex(const AudioMultiVector& append_this,
+ size_t index);
+
+ // Removes |length| elements from the beginning of this object, from each
+ // channel.
+ virtual void PopFront(size_t length);
+
+ // Removes |length| elements from the end of this object, from each
+ // channel.
+ virtual void PopBack(size_t length);
+
+ // Reads |length| samples from each channel and writes them interleaved to
+ // |destination|. The total number of elements written to |destination| is
+ // returned, i.e., |length| * number of channels. If the AudioMultiVector
+ // contains less than |length| samples per channel, this is reflected in the
+ // return value.
+ virtual size_t ReadInterleaved(size_t length, int16_t* destination) const;
+
+ // Like ReadInterleaved() above, but reads from |start_index| instead of from
+ // the beginning.
+ virtual size_t ReadInterleavedFromIndex(size_t start_index,
+ size_t length,
+ int16_t* destination) const;
+
+ // Like ReadInterleaved() above, but reads from the end instead of from
+ // the beginning.
+ virtual size_t ReadInterleavedFromEnd(size_t length,
+ int16_t* destination) const;
+
+ // Overwrites each channel in this AudioMultiVector with values taken from
+ // |insert_this|. The values are taken from the beginning of |insert_this| and
+ // are inserted starting at |position|. |length| values are written into each
+ // channel. If |length| and |position| are selected such that the new data
+ // extends beyond the end of the current AudioVector, the vector is extended
+ // to accommodate the new data. |length| is limited to the length of
+ // |insert_this|.
+ virtual void OverwriteAt(const AudioMultiVector& insert_this,
+ size_t length,
+ size_t position);
+
+ // Appends |append_this| to the end of the current vector. Lets the two
+ // vectors overlap by |fade_length| samples (per channel), and cross-fade
+ // linearly in this region.
+ virtual void CrossFade(const AudioMultiVector& append_this,
+ size_t fade_length);
+
+ // Returns the number of channels.
+ virtual size_t Channels() const { return num_channels_; }
+
+ // Returns the number of elements per channel in this AudioMultiVector.
+ virtual size_t Size() const;
+
+ // Verify that each channel can hold at least |required_size| elements. If
+ // not, extend accordingly.
+ virtual void AssertSize(size_t required_size);
+
+ virtual bool Empty() const;
+
+ // Accesses and modifies a channel (i.e., an AudioVector object) of this
+ // AudioMultiVector.
+ const AudioVector& operator[](size_t index) const;
+ AudioVector& operator[](size_t index);
+
+ protected:
+ std::vector<AudioVector*> channels_;
+ size_t num_channels_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioMultiVector);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc
new file mode 100644
index 00000000000..94760385264
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc
@@ -0,0 +1,308 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "gtest/gtest.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This is a value-parameterized test. The test cases are instantiated with
+// different values for the test parameter, which is used to determine the
+// number of channels in the AudioMultiBuffer. Note that it is not possible
+// to combine typed testing with value-parameterized testing, and since the
+// tests for AudioVector already covers a number of different type parameters,
+// this test focuses on testing different number of channels, and keeping the
+// value type constant.
+
+class AudioMultiVectorTest : public ::testing::TestWithParam<size_t> {
+ protected:
+ AudioMultiVectorTest()
+ : num_channels_(GetParam()), // Get the test parameter.
+ interleaved_length_(num_channels_ * array_length()) {
+ array_interleaved_ = new int16_t[num_channels_ * array_length()];
+ }
+
+ ~AudioMultiVectorTest() {
+ delete [] array_interleaved_;
+ }
+
+ virtual void SetUp() {
+ // Populate test arrays.
+ for (size_t i = 0; i < array_length(); ++i) {
+ array_[i] = static_cast<int16_t>(i);
+ }
+ int16_t* ptr = array_interleaved_;
+ // Write 100, 101, 102, ... for first channel.
+ // Write 200, 201, 202, ... for second channel.
+ // And so on.
+ for (size_t i = 0; i < array_length(); ++i) {
+ for (size_t j = 1; j <= num_channels_; ++j) {
+ *ptr = j * 100 + i;
+ ++ptr;
+ }
+ }
+ }
+
+ size_t array_length() const {
+ return sizeof(array_) / sizeof(array_[0]);
+ }
+
+ const size_t num_channels_;
+ size_t interleaved_length_;
+ int16_t array_[10];
+ int16_t* array_interleaved_;
+};
+
+// Create and destroy AudioMultiVector objects, both empty and with a predefined
+// length.
+TEST_P(AudioMultiVectorTest, CreateAndDestroy) {
+ AudioMultiVector vec1(num_channels_);
+ EXPECT_TRUE(vec1.Empty());
+ EXPECT_EQ(num_channels_, vec1.Channels());
+ EXPECT_EQ(0u, vec1.Size());
+
+ size_t initial_size = 17;
+ AudioMultiVector vec2(num_channels_, initial_size);
+ EXPECT_FALSE(vec2.Empty());
+ EXPECT_EQ(num_channels_, vec2.Channels());
+ EXPECT_EQ(initial_size, vec2.Size());
+}
+
+// Test the subscript operator [] for getting and setting.
+TEST_P(AudioMultiVectorTest, SubscriptOperator) {
+ AudioMultiVector vec(num_channels_, array_length());
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ for (size_t i = 0; i < array_length(); ++i) {
+ vec[channel][i] = static_cast<int16_t>(i);
+ // Make sure to use the const version.
+ const AudioVector& audio_vec = vec[channel];
+ EXPECT_EQ(static_cast<int16_t>(i), audio_vec[i]);
+ }
+ }
+}
+
+// Test the PushBackInterleaved method and the CopyFrom method. The Clear
+// method is also invoked.
+TEST_P(AudioMultiVectorTest, PushBackInterleavedAndCopy) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ AudioMultiVector vec_copy(num_channels_);
+ vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|.
+ ASSERT_EQ(num_channels_, vec.Channels());
+ ASSERT_EQ(array_length(), vec.Size());
+ ASSERT_EQ(num_channels_, vec_copy.Channels());
+ ASSERT_EQ(array_length(), vec_copy.Size());
+ for (size_t channel = 0; channel < vec.Channels(); ++channel) {
+ for (size_t i = 0; i < array_length(); ++i) {
+ EXPECT_EQ(static_cast<int16_t>((channel + 1) * 100 + i), vec[channel][i]);
+ EXPECT_EQ(vec[channel][i], vec_copy[channel][i]);
+ }
+ }
+
+ // Clear |vec| and verify that it is empty.
+ vec.Clear();
+ EXPECT_TRUE(vec.Empty());
+
+ // Now copy the empty vector and verify that the copy becomes empty too.
+ vec.CopyFrom(&vec_copy);
+ EXPECT_TRUE(vec_copy.Empty());
+}
+
+// Try to copy to a NULL pointer. Nothing should happen.
+TEST_P(AudioMultiVectorTest, CopyToNull) {
+ AudioMultiVector vec(num_channels_);
+ AudioMultiVector* vec_copy = NULL;
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ vec.CopyFrom(vec_copy);
+}
+
+// Test the PushBack method with another AudioMultiVector as input argument.
+TEST_P(AudioMultiVectorTest, PushBackVector) {
+ AudioMultiVector vec1(num_channels_, array_length());
+ AudioMultiVector vec2(num_channels_, array_length());
+ // Set the first vector to [0, 1, ..., array_length() - 1] +
+ // 100 * channel_number.
+ // Set the second vector to [array_length(), array_length() + 1, ...,
+ // 2 * array_length() - 1] + 100 * channel_number.
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ for (size_t i = 0; i < array_length(); ++i) {
+ vec1[channel][i] = static_cast<int16_t>(i + 100 * channel);
+ vec2[channel][i] =
+ static_cast<int16_t>(i + 100 * channel + array_length());
+ }
+ }
+ // Append vec2 to the back of vec1.
+ vec1.PushBack(vec2);
+ ASSERT_EQ(2u * array_length(), vec1.Size());
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ for (size_t i = 0; i < 2 * array_length(); ++i) {
+ EXPECT_EQ(static_cast<int16_t>(i + 100 * channel), vec1[channel][i]);
+ }
+ }
+}
+
+// Test the PushBackFromIndex method.
+TEST_P(AudioMultiVectorTest, PushBackFromIndex) {
+ AudioMultiVector vec1(num_channels_);
+ vec1.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ AudioMultiVector vec2(num_channels_);
+
+ // Append vec1 to the back of vec2 (which is empty). Read vec1 from the second
+ // last element.
+ vec2.PushBackFromIndex(vec1, array_length() - 2);
+ ASSERT_EQ(2u, vec2.Size());
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ for (size_t i = 0; i < 2; ++i) {
+ EXPECT_EQ(array_interleaved_[channel + num_channels_ *
+ (array_length() - 2 + i)], vec2[channel][i]);
+ }
+ }
+}
+
+// Starts with pushing some values to the vector, then test the Zeros method.
+TEST_P(AudioMultiVectorTest, Zeros) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ vec.Zeros(2 * array_length());
+ ASSERT_EQ(num_channels_, vec.Channels());
+ ASSERT_EQ(2u * array_length(), vec.Size());
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ for (size_t i = 0; i < 2 * array_length(); ++i) {
+ EXPECT_EQ(0, vec[channel][i]);
+ }
+ }
+}
+
+// Test the ReadInterleaved method
+TEST_P(AudioMultiVectorTest, ReadInterleaved) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ int16_t* output = new int16_t[interleaved_length_];
+ // Read 5 samples.
+ size_t read_samples = 5;
+ EXPECT_EQ(num_channels_ * read_samples,
+ vec.ReadInterleaved(read_samples, output));
+ EXPECT_EQ(0,
+ memcmp(array_interleaved_, output, read_samples * sizeof(int16_t)));
+
+ // Read too many samples. Expect to get all samples from the vector.
+ EXPECT_EQ(interleaved_length_,
+ vec.ReadInterleaved(array_length() + 1, output));
+ EXPECT_EQ(0,
+ memcmp(array_interleaved_, output, read_samples * sizeof(int16_t)));
+
+ delete [] output;
+}
+
+// Try to read to a NULL pointer. Expected to return 0.
+TEST_P(AudioMultiVectorTest, ReadInterleavedToNull) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ int16_t* output = NULL;
+ // Read 5 samples.
+ size_t read_samples = 5;
+ EXPECT_EQ(0u, vec.ReadInterleaved(read_samples, output));
+}
+
+// Test the PopFront method.
+TEST_P(AudioMultiVectorTest, PopFront) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ vec.PopFront(1); // Remove one element from each channel.
+ ASSERT_EQ(array_length() - 1u, vec.Size());
+ // Let |ptr| point to the second element of the first channel in the
+ // interleaved array.
+ int16_t* ptr = &array_interleaved_[num_channels_];
+ for (size_t i = 0; i < array_length() - 1; ++i) {
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ EXPECT_EQ(*ptr, vec[channel][i]);
+ ++ptr;
+ }
+ }
+ vec.PopFront(array_length()); // Remove more elements than vector size.
+ EXPECT_EQ(0u, vec.Size());
+}
+
+// Test the PopBack method.
+TEST_P(AudioMultiVectorTest, PopBack) {
+ AudioMultiVector vec(num_channels_);
+ vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ vec.PopBack(1); // Remove one element from each channel.
+ ASSERT_EQ(array_length() - 1u, vec.Size());
+ // Let |ptr| point to the first element of the first channel in the
+ // interleaved array.
+ int16_t* ptr = array_interleaved_;
+ for (size_t i = 0; i < array_length() - 1; ++i) {
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ EXPECT_EQ(*ptr, vec[channel][i]);
+ ++ptr;
+ }
+ }
+ vec.PopBack(array_length()); // Remove more elements than vector size.
+ EXPECT_EQ(0u, vec.Size());
+}
+
+// Test the AssertSize method.
+TEST_P(AudioMultiVectorTest, AssertSize) {
+ AudioMultiVector vec(num_channels_, array_length());
+ EXPECT_EQ(array_length(), vec.Size());
+ // Start with asserting with smaller sizes than already allocated.
+ vec.AssertSize(0);
+ vec.AssertSize(array_length() - 1);
+ // Nothing should have changed.
+ EXPECT_EQ(array_length(), vec.Size());
+ // Assert with one element longer than already allocated.
+ vec.AssertSize(array_length() + 1);
+ // Expect vector to have grown.
+ EXPECT_EQ(array_length() + 1, vec.Size());
+ // Also check the individual AudioVectors.
+ for (size_t channel = 0; channel < vec.Channels(); ++channel) {
+ EXPECT_EQ(array_length() + 1u, vec[channel].Size());
+ }
+}
+
+// Test the PushBack method with another AudioMultiVector as input argument.
+TEST_P(AudioMultiVectorTest, OverwriteAt) {
+ AudioMultiVector vec1(num_channels_);
+ vec1.PushBackInterleaved(array_interleaved_, interleaved_length_);
+ AudioMultiVector vec2(num_channels_);
+ vec2.Zeros(3); // 3 zeros in each channel.
+ // Overwrite vec2 at position 5.
+ vec1.OverwriteAt(vec2, 3, 5);
+ // Verify result.
+ // Length remains the same.
+ ASSERT_EQ(array_length(), vec1.Size());
+ int16_t* ptr = array_interleaved_;
+ for (size_t i = 0; i < array_length() - 1; ++i) {
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ if (i >= 5 && i <= 7) {
+ // Elements 5, 6, 7 should have been replaced with zeros.
+ EXPECT_EQ(0, vec1[channel][i]);
+ } else {
+ EXPECT_EQ(*ptr, vec1[channel][i]);
+ }
+ ++ptr;
+ }
+ }
+}
+
+INSTANTIATE_TEST_CASE_P(TestNumChannels,
+ AudioMultiVectorTest,
+ ::testing::Values(static_cast<size_t>(1),
+ static_cast<size_t>(2),
+ static_cast<size_t>(5)));
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
new file mode 100644
index 00000000000..d9fb4e58c24
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
@@ -0,0 +1,165 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+void AudioVector::Clear() {
+ first_free_ix_ = 0;
+}
+
+void AudioVector::CopyFrom(AudioVector* copy_to) const {
+ if (copy_to) {
+ copy_to->Reserve(Size());
+ assert(copy_to->capacity_ >= Size());
+ memcpy(copy_to->array_.get(), array_.get(), Size() * sizeof(int16_t));
+ copy_to->first_free_ix_ = first_free_ix_;
+ }
+}
+
+void AudioVector::PushFront(const AudioVector& prepend_this) {
+ size_t insert_length = prepend_this.Size();
+ Reserve(Size() + insert_length);
+ memmove(&array_[insert_length], &array_[0], Size() * sizeof(int16_t));
+ memcpy(&array_[0], &prepend_this.array_[0], insert_length * sizeof(int16_t));
+ first_free_ix_ += insert_length;
+}
+
+void AudioVector::PushFront(const int16_t* prepend_this, size_t length) {
+ // Same operation as InsertAt beginning.
+ InsertAt(prepend_this, length, 0);
+}
+
+void AudioVector::PushBack(const AudioVector& append_this) {
+ PushBack(append_this.array_.get(), append_this.Size());
+}
+
+void AudioVector::PushBack(const int16_t* append_this, size_t length) {
+ Reserve(Size() + length);
+ memcpy(&array_[first_free_ix_], append_this, length * sizeof(int16_t));
+ first_free_ix_ += length;
+}
+
+void AudioVector::PopFront(size_t length) {
+ if (length >= Size()) {
+ // Remove all elements.
+ Clear();
+ } else {
+ size_t remaining_samples = Size() - length;
+ memmove(&array_[0], &array_[length], remaining_samples * sizeof(int16_t));
+ first_free_ix_ -= length;
+ }
+}
+
+void AudioVector::PopBack(size_t length) {
+ // Never remove more than what is in the array.
+ length = std::min(length, Size());
+ first_free_ix_ -= length;
+}
+
+void AudioVector::Extend(size_t extra_length) {
+ Reserve(Size() + extra_length);
+ memset(&array_[first_free_ix_], 0, extra_length * sizeof(int16_t));
+ first_free_ix_ += extra_length;
+}
+
+void AudioVector::InsertAt(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ Reserve(Size() + length);
+ // Cap the position at the current vector length, to be sure the iterator
+ // does not extend beyond the end of the vector.
+ position = std::min(Size(), position);
+ int16_t* insert_position_ptr = &array_[position];
+ size_t samples_to_move = Size() - position;
+ memmove(insert_position_ptr + length, insert_position_ptr,
+ samples_to_move * sizeof(int16_t));
+ memcpy(insert_position_ptr, insert_this, length * sizeof(int16_t));
+ first_free_ix_ += length;
+}
+
+void AudioVector::InsertZerosAt(size_t length,
+ size_t position) {
+ Reserve(Size() + length);
+ // Cap the position at the current vector length, to be sure the iterator
+ // does not extend beyond the end of the vector.
+ position = std::min(capacity_, position);
+ int16_t* insert_position_ptr = &array_[position];
+ size_t samples_to_move = Size() - position;
+ memmove(insert_position_ptr + length, insert_position_ptr,
+ samples_to_move * sizeof(int16_t));
+ memset(insert_position_ptr, 0, length * sizeof(int16_t));
+ first_free_ix_ += length;
+}
+
+void AudioVector::OverwriteAt(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ // Cap the insert position at the current array length.
+ position = std::min(Size(), position);
+ Reserve(position + length);
+ memcpy(&array_[position], insert_this, length * sizeof(int16_t));
+ if (position + length > Size()) {
+ // Array was expanded.
+ first_free_ix_ += position + length - Size();
+ }
+}
+
+void AudioVector::CrossFade(const AudioVector& append_this,
+ size_t fade_length) {
+ // Fade length cannot be longer than the current vector or |append_this|.
+ assert(fade_length <= Size());
+ assert(fade_length <= append_this.Size());
+ fade_length = std::min(fade_length, Size());
+ fade_length = std::min(fade_length, append_this.Size());
+ size_t position = Size() - fade_length;
+ // Cross fade the overlapping regions.
+ // |alpha| is the mixing factor in Q14.
+ // TODO(hlundin): Consider skipping +1 in the denominator to produce a
+ // smoother cross-fade, in particular at the end of the fade.
+ int alpha_step = 16384 / (static_cast<int>(fade_length) + 1);
+ int alpha = 16384;
+ for (size_t i = 0; i < fade_length; ++i) {
+ alpha -= alpha_step;
+ array_[position + i] = (alpha * array_[position + i] +
+ (16384 - alpha) * append_this[i] + 8192) >> 14;
+ }
+ assert(alpha >= 0); // Verify that the slope was correct.
+ // Append what is left of |append_this|.
+ size_t samples_to_push_back = append_this.Size() - fade_length;
+ if (samples_to_push_back > 0)
+ PushBack(&append_this[fade_length], samples_to_push_back);
+}
+
+const int16_t& AudioVector::operator[](size_t index) const {
+ return array_[index];
+}
+
+int16_t& AudioVector::operator[](size_t index) {
+ return array_[index];
+}
+
+void AudioVector::Reserve(size_t n) {
+ if (capacity_ < n) {
+ scoped_ptr<int16_t[]> temp_array(new int16_t[n]);
+ memcpy(temp_array.get(), array_.get(), Size() * sizeof(int16_t));
+ array_.swap(temp_array);
+ capacity_ = n;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
new file mode 100644
index 00000000000..f8aabdb8e8e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
+
+#include <string.h> // Access to size_t.
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioVector {
+ public:
+ // Creates an empty AudioVector.
+ AudioVector()
+ : array_(new int16_t[kDefaultInitialSize]),
+ first_free_ix_(0),
+ capacity_(kDefaultInitialSize) {}
+
+ // Creates an AudioVector with an initial size.
+ explicit AudioVector(size_t initial_size)
+ : array_(new int16_t[initial_size]),
+ first_free_ix_(initial_size),
+ capacity_(initial_size) {
+ memset(array_.get(), 0, initial_size * sizeof(int16_t));
+ }
+
+ virtual ~AudioVector() {}
+
+ // Deletes all values and make the vector empty.
+ virtual void Clear();
+
+ // Copies all values from this vector to |copy_to|. Any contents in |copy_to|
+ // are deleted before the copy operation. After the operation is done,
+ // |copy_to| will be an exact replica of this object.
+ virtual void CopyFrom(AudioVector* copy_to) const;
+
+ // Prepends the contents of AudioVector |prepend_this| to this object. The
+ // length of this object is increased with the length of |prepend_this|.
+ virtual void PushFront(const AudioVector& prepend_this);
+
+ // Same as above, but with an array |prepend_this| with |length| elements as
+ // source.
+ virtual void PushFront(const int16_t* prepend_this, size_t length);
+
+ // Same as PushFront but will append to the end of this object.
+ virtual void PushBack(const AudioVector& append_this);
+
+ // Same as PushFront but will append to the end of this object.
+ virtual void PushBack(const int16_t* append_this, size_t length);
+
+ // Removes |length| elements from the beginning of this object.
+ virtual void PopFront(size_t length);
+
+ // Removes |length| elements from the end of this object.
+ virtual void PopBack(size_t length);
+
+ // Extends this object with |extra_length| elements at the end. The new
+ // elements are initialized to zero.
+ virtual void Extend(size_t extra_length);
+
+ // Inserts |length| elements taken from the array |insert_this| and insert
+ // them at |position|. The length of the AudioVector is increased by |length|.
+ // |position| = 0 means that the new values are prepended to the vector.
+ // |position| = Size() means that the new values are appended to the vector.
+ virtual void InsertAt(const int16_t* insert_this, size_t length,
+ size_t position);
+
+ // Like InsertAt, but inserts |length| zero elements at |position|.
+ virtual void InsertZerosAt(size_t length, size_t position);
+
+ // Overwrites |length| elements of this AudioVector with values taken from the
+ // array |insert_this|, starting at |position|. The definition of |position|
+ // is the same as for InsertAt(). If |length| and |position| are selected
+ // such that the new data extends beyond the end of the current AudioVector,
+ // the vector is extended to accommodate the new data.
+ virtual void OverwriteAt(const int16_t* insert_this,
+ size_t length,
+ size_t position);
+
+ // Appends |append_this| to the end of the current vector. Lets the two
+ // vectors overlap by |fade_length| samples, and cross-fade linearly in this
+ // region.
+ virtual void CrossFade(const AudioVector& append_this, size_t fade_length);
+
+ // Returns the number of elements in this AudioVector.
+ virtual size_t Size() const { return first_free_ix_; }
+
+ // Returns true if this AudioVector is empty.
+ virtual bool Empty() const { return (first_free_ix_ == 0); }
+
+ // Accesses and modifies an element of AudioVector.
+ const int16_t& operator[](size_t index) const;
+ int16_t& operator[](size_t index);
+
+ private:
+ static const size_t kDefaultInitialSize = 10;
+
+ void Reserve(size_t n);
+
+ scoped_ptr<int16_t[]> array_;
+ size_t first_free_ix_; // The first index after the last sample in array_.
+ // Note that this index may point outside of array_.
+ size_t capacity_; // Allocated number of samples in the array.
+
+ DISALLOW_COPY_AND_ASSIGN(AudioVector);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
new file mode 100644
index 00000000000..50da1fb46c4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
@@ -0,0 +1,394 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "gtest/gtest.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class AudioVectorTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ // Populate test array.
+ for (size_t i = 0; i < array_length(); ++i) {
+ array_[i] = i;
+ }
+ }
+
+ size_t array_length() const {
+ return sizeof(array_) / sizeof(array_[0]);
+ }
+
+ int16_t array_[10];
+};
+
+// Create and destroy AudioVector objects, both empty and with a predefined
+// length.
+TEST_F(AudioVectorTest, CreateAndDestroy) {
+ AudioVector vec1;
+ EXPECT_TRUE(vec1.Empty());
+ EXPECT_EQ(0u, vec1.Size());
+
+ size_t initial_size = 17;
+ AudioVector vec2(initial_size);
+ EXPECT_FALSE(vec2.Empty());
+ EXPECT_EQ(initial_size, vec2.Size());
+}
+
+// Test the subscript operator [] for getting and setting.
+TEST_F(AudioVectorTest, SubscriptOperator) {
+ AudioVector vec(array_length());
+ for (size_t i = 0; i < array_length(); ++i) {
+ vec[i] = static_cast<int16_t>(i);
+ const int16_t& value = vec[i]; // Make sure to use the const version.
+ EXPECT_EQ(static_cast<int16_t>(i), value);
+ }
+}
+
+// Test the PushBack method and the CopyFrom method. The Clear method is also
+// invoked.
+TEST_F(AudioVectorTest, PushBackAndCopy) {
+ AudioVector vec;
+ AudioVector vec_copy;
+ vec.PushBack(array_, array_length());
+ vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|.
+ ASSERT_EQ(array_length(), vec.Size());
+ ASSERT_EQ(array_length(), vec_copy.Size());
+ for (size_t i = 0; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[i]);
+ EXPECT_EQ(array_[i], vec_copy[i]);
+ }
+
+ // Clear |vec| and verify that it is empty.
+ vec.Clear();
+ EXPECT_TRUE(vec.Empty());
+
+ // Now copy the empty vector and verify that the copy becomes empty too.
+ vec.CopyFrom(&vec_copy);
+ EXPECT_TRUE(vec_copy.Empty());
+}
+
+// Try to copy to a NULL pointer. Nothing should happen.
+TEST_F(AudioVectorTest, CopyToNull) {
+ AudioVector vec;
+ AudioVector* vec_copy = NULL;
+ vec.PushBack(array_, array_length());
+ vec.CopyFrom(vec_copy);
+}
+
+// Test the PushBack method with another AudioVector as input argument.
+TEST_F(AudioVectorTest, PushBackVector) {
+ static const size_t kLength = 10;
+ AudioVector vec1(kLength);
+ AudioVector vec2(kLength);
+ // Set the first vector to [0, 1, ..., kLength - 1].
+ // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1].
+ for (size_t i = 0; i < kLength; ++i) {
+ vec1[i] = static_cast<int16_t>(i);
+ vec2[i] = static_cast<int16_t>(i + kLength);
+ }
+ // Append vec2 to the back of vec1.
+ vec1.PushBack(vec2);
+ ASSERT_EQ(2 * kLength, vec1.Size());
+ for (size_t i = 0; i < 2 * kLength; ++i) {
+ EXPECT_EQ(static_cast<int16_t>(i), vec1[i]);
+ }
+}
+
+// Test the PushFront method.
+TEST_F(AudioVectorTest, PushFront) {
+ AudioVector vec;
+ vec.PushFront(array_, array_length());
+ ASSERT_EQ(array_length(), vec.Size());
+ for (size_t i = 0; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[i]);
+ }
+}
+
+// Test the PushFront method with another AudioVector as input argument.
+TEST_F(AudioVectorTest, PushFrontVector) {
+ static const size_t kLength = 10;
+ AudioVector vec1(kLength);
+ AudioVector vec2(kLength);
+ // Set the first vector to [0, 1, ..., kLength - 1].
+ // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1].
+ for (size_t i = 0; i < kLength; ++i) {
+ vec1[i] = static_cast<int16_t>(i);
+ vec2[i] = static_cast<int16_t>(i + kLength);
+ }
+ // Prepend vec1 to the front of vec2.
+ vec2.PushFront(vec1);
+ ASSERT_EQ(2 * kLength, vec2.Size());
+ for (size_t i = 0; i < 2 * kLength; ++i) {
+ EXPECT_EQ(static_cast<int16_t>(i), vec2[i]);
+ }
+}
+
+// Test the PopFront method.
+TEST_F(AudioVectorTest, PopFront) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ vec.PopFront(1); // Remove one element.
+ EXPECT_EQ(array_length() - 1u, vec.Size());
+ for (size_t i = 0; i < array_length() - 1; ++i) {
+ EXPECT_EQ(static_cast<int16_t>(i + 1), vec[i]);
+ }
+ vec.PopFront(array_length()); // Remove more elements than vector size.
+ EXPECT_EQ(0u, vec.Size());
+}
+
+// Test the PopBack method.
+TEST_F(AudioVectorTest, PopBack) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ vec.PopBack(1); // Remove one element.
+ EXPECT_EQ(array_length() - 1u, vec.Size());
+ for (size_t i = 0; i < array_length() - 1; ++i) {
+ EXPECT_EQ(static_cast<int16_t>(i), vec[i]);
+ }
+ vec.PopBack(array_length()); // Remove more elements than vector size.
+ EXPECT_EQ(0u, vec.Size());
+}
+
+// Test the Extend method.
+TEST_F(AudioVectorTest, Extend) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ vec.Extend(5); // Extend with 5 elements, which should all be zeros.
+ ASSERT_EQ(array_length() + 5u, vec.Size());
+ // Verify that all are zero.
+ for (size_t i = array_length(); i < array_length() + 5; ++i) {
+ EXPECT_EQ(0, vec[i]);
+ }
+}
+
+// Test the InsertAt method with an insert position in the middle of the vector.
+TEST_F(AudioVectorTest, InsertAt) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ int insert_position = 5;
+ vec.InsertAt(new_array, kNewLength, insert_position);
+ // Verify that the vector looks as follows:
+ // {0, 1, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
+ // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
+ size_t pos = 0;
+ for (int i = 0; i < insert_position; ++i) {
+ EXPECT_EQ(array_[i], vec[pos]);
+ ++pos;
+ }
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+ for (size_t i = insert_position; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[pos]);
+ ++pos;
+ }
+}
+
+// Test the InsertZerosAt method with an insert position in the middle of the
+// vector. Use the InsertAt method as reference.
+TEST_F(AudioVectorTest, InsertZerosAt) {
+ AudioVector vec;
+ AudioVector vec_ref;
+ vec.PushBack(array_, array_length());
+ vec_ref.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int insert_position = 5;
+ vec.InsertZerosAt(kNewLength, insert_position);
+ int16_t new_array[kNewLength] = {0}; // All zero elements.
+ vec_ref.InsertAt(new_array, kNewLength, insert_position);
+ // Verify that the vectors are identical.
+ ASSERT_EQ(vec_ref.Size(), vec.Size());
+ for (size_t i = 0; i < vec.Size(); ++i) {
+ EXPECT_EQ(vec_ref[i], vec[i]);
+ }
+}
+
+// Test the InsertAt method with an insert position at the start of the vector.
+TEST_F(AudioVectorTest, InsertAtBeginning) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ int insert_position = 0;
+ vec.InsertAt(new_array, kNewLength, insert_position);
+ // Verify that the vector looks as follows:
+ // {100, 101, ..., 100 + kNewLength - 1,
+ // 0, 1, ..., kLength - 1}.
+ size_t pos = 0;
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+ for (size_t i = insert_position; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[pos]);
+ ++pos;
+ }
+}
+
+// Test the InsertAt method with an insert position at the end of the vector.
+TEST_F(AudioVectorTest, InsertAtEnd) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ int insert_position = array_length();
+ vec.InsertAt(new_array, kNewLength, insert_position);
+ // Verify that the vector looks as follows:
+ // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }.
+ size_t pos = 0;
+ for (size_t i = 0; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[pos]);
+ ++pos;
+ }
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+}
+
+// Test the InsertAt method with an insert position beyond the end of the
+// vector. Verify that a position beyond the end of the vector does not lead to
+// an error. The expected outcome is the same as if the vector end was used as
+// input position. That is, the input position should be capped at the maximum
+// allowed value.
+TEST_F(AudioVectorTest, InsertBeyondEnd) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ int insert_position = array_length() + 10; // Too large.
+ vec.InsertAt(new_array, kNewLength, insert_position);
+ // Verify that the vector looks as follows:
+ // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }.
+ size_t pos = 0;
+ for (size_t i = 0; i < array_length(); ++i) {
+ EXPECT_EQ(array_[i], vec[pos]);
+ ++pos;
+ }
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+}
+
+// Test the OverwriteAt method with a position such that all of the new values
+// fit within the old vector.
+TEST_F(AudioVectorTest, OverwriteAt) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ size_t insert_position = 2;
+ vec.OverwriteAt(new_array, kNewLength, insert_position);
+ // Verify that the vector looks as follows:
+ // {0, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
+ // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
+ size_t pos = 0;
+ for (pos = 0; pos < insert_position; ++pos) {
+ EXPECT_EQ(array_[pos], vec[pos]);
+ }
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+ for (; pos < array_length(); ++pos) {
+ EXPECT_EQ(array_[pos], vec[pos]);
+ }
+}
+
+// Test the OverwriteAt method with a position such that some of the new values
+// extend beyond the end of the current vector. This is valid, and the vector is
+// expected to expand to accommodate the new values.
+TEST_F(AudioVectorTest, OverwriteBeyondEnd) {
+ AudioVector vec;
+ vec.PushBack(array_, array_length());
+ static const int kNewLength = 5;
+ int16_t new_array[kNewLength];
+ // Set array elements to {100, 101, 102, ... }.
+ for (int i = 0; i < kNewLength; ++i) {
+ new_array[i] = 100 + i;
+ }
+ int insert_position = array_length() - 2;
+ vec.OverwriteAt(new_array, kNewLength, insert_position);
+ ASSERT_EQ(array_length() - 2u + kNewLength, vec.Size());
+ // Verify that the vector looks as follows:
+ // {0, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
+ // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
+ int pos = 0;
+ for (pos = 0; pos < insert_position; ++pos) {
+ EXPECT_EQ(array_[pos], vec[pos]);
+ }
+ for (int i = 0; i < kNewLength; ++i) {
+ EXPECT_EQ(new_array[i], vec[pos]);
+ ++pos;
+ }
+ // Verify that we checked to the end of |vec|.
+ EXPECT_EQ(vec.Size(), static_cast<size_t>(pos));
+}
+
+TEST_F(AudioVectorTest, CrossFade) {
+ static const size_t kLength = 100;
+ static const size_t kFadeLength = 10;
+ AudioVector vec1(kLength);
+ AudioVector vec2(kLength);
+ // Set all vector elements to 0 in |vec1| and 100 in |vec2|.
+ for (size_t i = 0; i < kLength; ++i) {
+ vec1[i] = 0;
+ vec2[i] = 100;
+ }
+ vec1.CrossFade(vec2, kFadeLength);
+ ASSERT_EQ(2 * kLength - kFadeLength, vec1.Size());
+ // First part untouched.
+ for (size_t i = 0; i < kLength - kFadeLength; ++i) {
+ EXPECT_EQ(0, vec1[i]);
+ }
+ // Check mixing zone.
+ for (size_t i = 0 ; i < kFadeLength; ++i) {
+ EXPECT_NEAR((i + 1) * 100 / (kFadeLength + 1),
+ vec1[kLength - kFadeLength + i], 1);
+ }
+ // Second part untouched.
+ for (size_t i = kLength; i < vec1.Size(); ++i) {
+ EXPECT_EQ(100, vec1[i]);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c
deleted file mode 100644
index 4dbd81ed665..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c
+++ /dev/null
@@ -1,783 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the implementation of automatic buffer level optimization.
- */
-
-#include "automode.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "neteq_defines.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-/* special code for offline delay logging */
-#include <stdio.h>
-#include "delay_logging.h"
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-#endif /* NETEQ_DELAY_LOGGING */
-
-// These two functions are copied from module_common_types.h, but adapted for C.
-int WebRtcNetEQ_IsNewerSequenceNumber(uint16_t sequence_number,
- uint16_t prev_sequence_number) {
- return sequence_number != prev_sequence_number &&
- ((uint16_t) (sequence_number - prev_sequence_number)) < 0x8000;
-}
-
-int WebRtcNetEQ_IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
- return timestamp != prev_timestamp &&
- ((uint32_t) (timestamp - prev_timestamp)) < 0x80000000;
-}
-
-int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- uint16_t seqNumber, uint32_t timeStamp,
- int32_t fsHz, int mdCodec, int streamingMode)
-{
- uint32_t timeIat; /* inter-arrival time */
- int i;
- int32_t tempsum = 0; /* temp summation */
- int32_t tempvar; /* temporary variable */
- int retval = 0; /* return value */
- int16_t packetLenSamp; /* packet speech length in samples */
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (maxBufLen <= 1 || fsHz <= 0)
- {
- /* maxBufLen must be at least 2 and fsHz must both be strictly positive */
- return -1;
- }
-
- /****************************/
- /* Update packet statistics */
- /****************************/
-
- /* Try calculating packet length from current and previous timestamps */
- if (!WebRtcNetEQ_IsNewerTimestamp(timeStamp, inst->lastTimeStamp) ||
- !WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo))
- {
- /* Wrong timestamp or sequence order; revert to backup plan */
- packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */
- }
- else
- {
- /* calculate timestamps per packet */
- packetLenSamp = (int16_t) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
- seqNumber - inst->lastSeqNo);
- }
-
- /* Check that the packet size is positive; if not, the statistics cannot be updated. */
- if (inst->firstPacketReceived && packetLenSamp > 0)
- { /* packet size ok */
-
- /* calculate inter-arrival time in integer packets (rounding down) */
- timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp);
-
- /* Special operations for streaming mode */
- if (streamingMode != 0)
- {
- /*
- * Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
- * than timeIat).
- */
- int16_t timeIatQ8 = (int16_t) WebRtcSpl_DivW32W16(
- WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
-
- /*
- * Calculate cumulative sum iat with sequence number compensation (ideal arrival
- * times makes this sum zero).
- */
- inst->cSumIatQ8 += (timeIatQ8
- - WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8));
-
- /* subtract drift term */
- inst->cSumIatQ8 -= CSUM_IAT_DRIFT;
-
- /* ensure not negative */
- inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0);
-
- /* remember max */
- if (inst->cSumIatQ8 > inst->maxCSumIatQ8)
- {
- inst->maxCSumIatQ8 = inst->cSumIatQ8;
- inst->maxCSumUpdateTimer = 0;
- }
-
- /* too long since the last maximum was observed; decrease max value */
- if (inst->maxCSumUpdateTimer > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
- MAX_STREAMING_PEAK_PERIOD))
- {
- inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
- }
- } /* end of streaming mode */
-
- /* check for discontinuous packet sequence and re-ordering */
- if (WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo + 1))
- {
- /* Compensate for gap in the sequence numbers.
- * Reduce IAT with expected extra time due to lost packets, but ensure that
- * the IAT is not negative.
- */
- timeIat -= WEBRTC_SPL_MIN(timeIat,
- (uint16_t) (seqNumber - (uint16_t) (inst->lastSeqNo + 1)));
- }
- else if (!WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo))
- {
- /* compensate for re-ordering */
- timeIat += (uint16_t) (inst->lastSeqNo + 1 - seqNumber);
- }
-
- /* saturate IAT at maximum value */
- timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT );
-
- /* update iatProb = forgetting_factor * iatProb for all elements */
- for (i = 0; i <= MAX_IAT; i++)
- {
- int32_t tempHi, tempLo; /* Temporary variables */
-
- /*
- * Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
- * to come back to Q30. The operation is done in two steps:
- */
-
- /*
- * 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
- * 16 steps right to get the high 16 bits in a int16_t prior to
- * multiplication, and left-shift with 1 afterwards to come back to
- * Q30 = (Q15 * (Q30>>16)) << 1.
- */
- tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
- (int16_t) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
- tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
-
- /*
- * 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps
- * afterwards to come back to Q30 = (Q15 * Q30) >> 15.
- */
- tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
- tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
- (uint16_t) tempLo);
- tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
-
- /* Finally, add the high and low parts */
- inst->iatProb[i] = tempHi + tempLo;
-
- /* Sum all vector elements while we are at it... */
- tempsum += inst->iatProb[i];
- }
-
- /*
- * Increase the probability for the currently observed inter-arrival time
- * with 1 - iatProbFact. The factor is in Q15, iatProb in Q30;
- * hence, left-shift 15 steps to obtain result in Q30.
- */
- inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15;
-
- tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */
-
- /*
- * Update iatProbFact (changes only during the first seconds after reset)
- * The factor converges to IAT_PROB_FACT.
- */
- inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2;
-
- /* iatProb should sum up to 1 (in Q30). */
- tempsum -= 1 << 30; /* should be zero */
-
- /* Check if it does, correct if it doesn't. */
- if (tempsum > 0)
- {
- /* tempsum too large => decrease a few values in the beginning */
- i = 0;
- while (i <= MAX_IAT && tempsum > 0)
- {
- /* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */
- tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4);
- inst->iatProb[i++] -= tempvar;
- tempsum -= tempvar;
- }
- }
- else if (tempsum < 0)
- {
- /* tempsum too small => increase a few values in the beginning */
- i = 0;
- while (i <= MAX_IAT && tempsum < 0)
- {
- /* Add iatProb[i] / 16 to iatProb, but not more than tempsum */
- tempvar = WEBRTC_SPL_MIN(-tempsum, inst->iatProb[i] >> 4);
- inst->iatProb[i++] += tempvar;
- tempsum += tempvar;
- }
- }
-
- /* Calculate optimal buffer level based on updated statistics */
- tempvar = (int32_t) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
- streamingMode);
- if (tempvar > 0)
- {
- int high_lim_delay;
- /* Convert the minimum delay from milliseconds to packets in Q8.
- * |fsHz| is sampling rate in Hertz, and |packetLenSamp|
- * is the number of samples per packet (according to the last
- * decoding).
- */
- int32_t minimum_delay_q8 = ((inst->minimum_delay_ms *
- (fsHz / 1000)) << 8) / packetLenSamp;
-
- int32_t maximum_delay_q8 = ((inst->maximum_delay_ms *
- (fsHz / 1000)) << 8) / packetLenSamp;
-
- inst->optBufLevel = tempvar;
-
- if (streamingMode != 0)
- {
- inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
- inst->maxCSumIatQ8);
- }
-
- /* The required delay. */
- inst->required_delay_q8 = inst->optBufLevel;
-
- // Maintain the target delay.
- inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
- minimum_delay_q8);
-
- if (maximum_delay_q8 > 0) {
- // Make sure that max is at least one packet length.
- maximum_delay_q8 = WEBRTC_SPL_MAX(maximum_delay_q8, (1 << 8));
- inst->optBufLevel = WEBRTC_SPL_MIN(inst->optBufLevel,
- maximum_delay_q8);
- }
- /*********/
- /* Limit */
- /*********/
-
- /* Subtract extra delay from maxBufLen */
- if (inst->extraDelayMs > 0 && inst->packetSpeechLenSamp > 0)
- {
- maxBufLen -= inst->extraDelayMs / inst->packetSpeechLenSamp * fsHz / 1000;
- maxBufLen = WEBRTC_SPL_MAX(maxBufLen, 1); // sanity: at least one packet
- }
-
- maxBufLen = WEBRTC_SPL_LSHIFT_W32(maxBufLen, 8); /* shift to Q8 */
-
- /* Enforce upper limit; 75% of maxBufLen */
- /* 1/2 + 1/4 = 75% */
- high_lim_delay = (maxBufLen >> 1) + (maxBufLen >> 2);
- inst->optBufLevel = WEBRTC_SPL_MIN(inst->optBufLevel,
- high_lim_delay);
- inst->required_delay_q8 = WEBRTC_SPL_MIN(inst->required_delay_q8,
- high_lim_delay);
- }
- else
- {
- retval = (int) tempvar;
- }
-
- } /* end if */
-
- /*******************************/
- /* Update post-call statistics */
- /*******************************/
-
- /* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
- timeIat = WEBRTC_SPL_UDIV(
- WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (int16_t) 1000),
- (uint32_t) fsHz);
-
- /* Increase counter corresponding to current inter-arrival time */
- if (timeIat > 2000)
- {
- inst->countIAT2000ms++;
- }
- else if (timeIat > 1000)
- {
- inst->countIAT1000ms++;
- }
- else if (timeIat > 500)
- {
- inst->countIAT500ms++;
- }
-
- if (timeIat > inst->longestIATms)
- {
- /* update maximum value */
- inst->longestIATms = timeIat;
- }
-
- /***********************************/
- /* Prepare for next packet arrival */
- /***********************************/
-
- inst->packetIatCountSamp = 0; /* reset inter-arrival time counter */
-
- inst->lastSeqNo = seqNumber; /* remember current sequence number */
-
- inst->lastTimeStamp = timeStamp; /* remember current timestamp */
-
- inst->firstPacketReceived = 1;
-
- return retval;
-}
-
-
-int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
- int mdCodec, uint32_t timeIatPkts,
- int streamingMode)
-{
-
- int32_t sum1 = 1 << 30; /* assign to 1 in Q30 */
- int16_t B;
- uint16_t Bopt;
- int i;
- int32_t betaInv; /* optimization parameter */
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- int temp_var;
-#endif
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (fsHz <= 0)
- {
- /* fsHz must be strictly positive */
- return -1;
- }
-
- /***********************************************/
- /* Get betaInv parameter based on playout mode */
- /***********************************************/
-
- if (streamingMode)
- {
- /* streaming (listen-only) mode */
- betaInv = AUTOMODE_STREAMING_BETA_INV_Q30;
- }
- else
- {
- /* normal mode */
- betaInv = AUTOMODE_BETA_INV_Q30;
- }
-
- /*******************************************************************/
- /* Calculate optimal buffer level without considering jitter peaks */
- /*******************************************************************/
-
- /*
- * Find the B for which the probability of observing an inter-arrival time larger
- * than or equal to B is less than or equal to betaInv.
- */
- B = 0; /* start from the beginning of iatProb */
- sum1 -= inst->iatProb[B]; /* ensure that optimal level is not less than 1 */
-
- do
- {
- /*
- * Subtract the probabilities one by one until the sum is no longer greater
- * than betaInv.
- */
- sum1 -= inst->iatProb[++B];
- }
- while ((sum1 > betaInv) && (B < MAX_IAT));
-
- Bopt = B; /* This is our primary value for the optimal buffer level Bopt */
-
- if (mdCodec)
- {
- /*
- * Use alternative cost function when multiple description codec is in use.
- * Do not have to re-calculate all points, just back off a few steps from
- * previous value of B.
- */
- int32_t sum2 = sum1; /* copy sum1 */
-
- while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
- {
- /* Go backwards in the sum until the modified cost function solution is found */
- sum2 += inst->iatProb[Bopt--];
- }
-
- Bopt++; /* This is the optimal level when using an MD codec */
-
- /* Now, Bopt and B can have different values. */
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF;
- if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
- return -1;
- }
- temp_var = (int) (Bopt * inst->packetSpeechLenSamp);
-#endif
-
- /******************************************************************/
- /* Make levelFiltFact adaptive: Larger B <=> larger levelFiltFact */
- /******************************************************************/
-
- switch (B)
- {
- case 0:
- case 1:
- {
- inst->levelFiltFact = 251;
- break;
- }
- case 2:
- case 3:
- {
- inst->levelFiltFact = 252;
- break;
- }
- case 4:
- case 5:
- case 6:
- case 7:
- {
- inst->levelFiltFact = 253;
- break;
- }
- default: /* B > 7 */
- {
- inst->levelFiltFact = 254;
- break;
- }
- }
-
- /************************/
- /* Peak mode operations */
- /************************/
-
- /* Compare current IAT with peak threshold
- *
- * If IAT > optimal level + threshold (+1 for MD codecs)
- * or if IAT > 2 * optimal level (note: optimal level is in Q8):
- */
- if (timeIatPkts > (uint32_t) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
- || timeIatPkts > (uint32_t) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
- {
- /* A peak is observed */
-
- if (inst->peakIndex == -1)
- {
- /* this is the first peak; prepare for next peak */
- inst->peakIndex = 0;
- /* set the mode-disable counter */
- inst->peakModeDisabled = WEBRTC_SPL_LSHIFT_W16(1, NUM_PEAKS_REQUIRED-2);
- }
- else if (inst->peakIatCountSamp
- <=
- (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
- {
- /* This is not the first peak and the period time is valid */
-
- /* store time elapsed since last peak */
- inst->peakPeriodSamp[inst->peakIndex] = inst->peakIatCountSamp;
-
- /* saturate height to 16 bits */
- inst->peakHeightPkt[inst->peakIndex]
- =
- (int16_t) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
-
- /* increment peakIndex and wrap/modulo */
- inst->peakIndex = (inst->peakIndex + 1) & PEAK_INDEX_MASK;
-
- /* process peak vectors */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
-
- for (i = 0; i < NUM_PEAKS; i++)
- {
- /* Find maximum of peak heights and peak periods */
- inst->curPeakHeight
- = WEBRTC_SPL_MAX(inst->curPeakHeight, inst->peakHeightPkt[i]);
- inst->curPeakPeriod
- = WEBRTC_SPL_MAX(inst->curPeakPeriod, inst->peakPeriodSamp[i]);
-
- }
-
- inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
-
- }
- else if (inst->peakIatCountSamp > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
- WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
- {
- /*
- * More than 2 * MAX_PEAK_PERIOD has elapsed since last peak;
- * too long time => reset peak statistics
- */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
- for (i = 0; i < NUM_PEAKS; i++)
- {
- inst->peakHeightPkt[i] = 0;
- inst->peakPeriodSamp[i] = 0;
- }
-
- inst->peakIndex = -1; /* Next peak is first peak */
- inst->peakIatCountSamp = 0;
- }
-
- inst->peakIatCountSamp = 0; /* Reset peak interval timer */
- } /* end if peak is observed */
-
- /* Evaluate peak mode conditions */
-
- /*
- * If not disabled (enough peaks have been observed) and
- * time since last peak is less than two peak periods.
- */
- inst->peakFound = 0;
- if ((!inst->peakModeDisabled) && (inst->peakIatCountSamp
- <= WEBRTC_SPL_LSHIFT_W32(inst->curPeakPeriod , 1)))
- {
- /* Engage peak mode */
- inst->peakFound = 1;
- /* Set optimal buffer level to curPeakHeight (if it's not already larger) */
- Bopt = WEBRTC_SPL_MAX(Bopt, inst->curPeakHeight);
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- temp_var = (int) -(Bopt * inst->packetSpeechLenSamp);
-#endif
- }
-
- /* Scale Bopt to Q8 */
- Bopt = WEBRTC_SPL_LSHIFT_U16(Bopt,8);
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
- return -1;
- }
-#endif
-
- /* Sanity check: Bopt must be strictly positive */
- if (Bopt <= 0)
- {
- Bopt = WEBRTC_SPL_LSHIFT_W16(1, 8); /* 1 in Q8 */
- }
-
- return Bopt; /* return value in Q8 */
-}
-
-
-int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, int16_t fsMult)
-{
-
- int16_t curSizeFrames;
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (sampPerCall <= 0 || fsMult <= 0)
- {
- /* sampPerCall and fsMult must both be strictly positive */
- return -1;
- }
-
- /* Check if packet size has been detected */
- if (inst->packetSpeechLenSamp > 0)
- {
- /*
- * Current buffer level in packet lengths
- * = (curSizeMs8 * fsMult) / packetSpeechLenSamp
- */
- curSizeFrames = (int16_t) WebRtcSpl_DivW32W16(
- WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
- }
- else
- {
- curSizeFrames = 0;
- }
-
- /* Filter buffer level */
- if (inst->levelFiltFact > 0) /* check that filter factor is set */
- {
- /* Filter:
- * buffLevelFilt = levelFiltFact * buffLevelFilt
- * + (1-levelFiltFact) * curSizeFrames
- *
- * levelFiltFact is in Q8
- */
- inst->buffLevelFilt = ((inst->levelFiltFact * inst->buffLevelFilt) >> 8) +
- (256 - inst->levelFiltFact) * curSizeFrames;
- }
-
- /* Account for time-scale operations (accelerate and pre-emptive expand) */
- if (inst->prevTimeScale)
- {
- /*
- * Time-scaling has been performed since last filter update.
- * Subtract the sampleMemory from buffLevelFilt after converting sampleMemory
- * from samples to packets in Q8. Make sure that the filtered value is
- * non-negative.
- */
- inst->buffLevelFilt = WEBRTC_SPL_MAX( inst->buffLevelFilt -
- WebRtcSpl_DivW32W16(
- WEBRTC_SPL_LSHIFT_W32(inst->sampleMemory, 8), /* sampleMemory in Q8 */
- inst->packetSpeechLenSamp ), /* divide by packetSpeechLenSamp */
- 0);
-
- /*
- * Reset flag and set timescaleHoldOff timer to prevent further time-scaling
- * for some time.
- */
- inst->prevTimeScale = 0;
- inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT;
- }
-
- /* Update time counters and HoldOff timer */
- inst->packetIatCountSamp += sampPerCall; /* packet inter-arrival time */
- inst->peakIatCountSamp += sampPerCall; /* peak inter-arrival time */
- inst->timescaleHoldOff >>= 1; /* time-scaling limiter */
- inst->maxCSumUpdateTimer += sampPerCall; /* cumulative-sum timer */
-
- return 0;
-
-}
-
-
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
- int32_t fsHz)
-{
-
- /* Sanity check for newLenSamp and fsHz */
- if (newLenSamp <= 0 || fsHz <= 0)
- {
- return -1;
- }
-
- inst->packetSpeechLenSamp = newLenSamp; /* Store packet size in instance */
-
- /* Make NetEQ wait for first regular packet before starting the timer */
- inst->lastPackCNGorDTMF = 1;
-
- inst->packetIatCountSamp = 0; /* Reset packet time counter */
-
- /*
- * Calculate peak threshold from packet size. The threshold is defined as
- * the (fractional) number of packets that corresponds to PEAK_HEIGHT
- * (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
- */
- inst->peakThresholdPkt = (uint16_t) WebRtcSpl_DivW32W16ResW16(
- WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
- (int16_t) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
-
- return 0;
-}
-
-
-int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets)
-{
-
- int i;
- uint16_t tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
-
- /* Sanity check for maxBufLenPackets */
- if (maxBufLenPackets <= 1)
- {
- /* Invalid value; set to 10 instead (arbitary small number) */
- maxBufLenPackets = 10;
- }
-
- /* Reset filtered buffer level */
- inst->buffLevelFilt = 0;
-
- /* Reset packet size to unknown */
- inst->packetSpeechLenSamp = 0;
-
- /*
- * Flag that last packet was special payload, so that automode will treat the next speech
- * payload as the first payload received.
- */
- inst->lastPackCNGorDTMF = 1;
-
- /* Reset peak detection parameters */
- inst->peakModeDisabled = 1; /* disable peak mode */
- inst->peakIatCountSamp = 0;
- inst->peakIndex = -1; /* indicates that no peak is registered */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
- for (i = 0; i < NUM_PEAKS; i++)
- {
- inst->peakHeightPkt[i] = 0;
- inst->peakPeriodSamp[i] = 0;
- }
-
- /*
- * Set the iatProb PDF vector to an exponentially decaying distribution
- * iatProb[i] = 0.5^(i+1), i = 0, 1, 2, ...
- * iatProb is in Q30.
- */
- for (i = 0; i <= MAX_IAT; i++)
- {
- /* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
- tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
- /* store in PDF vector */
- inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((int32_t) tempprob, 16);
- }
-
- /*
- * Calculate the optimal buffer level corresponding to the initial PDF.
- * No need to call WebRtcNetEQ_CalcOptimalBufLvl() since we have just hard-coded
- * all the variables that the buffer level depends on => we know the result
- */
- inst->optBufLevel = WEBRTC_SPL_MIN(4,
- (maxBufLenPackets >> 1) + (maxBufLenPackets >> 1)); /* 75% of maxBufLenPackets */
- inst->required_delay_q8 = inst->optBufLevel;
- inst->levelFiltFact = 253;
-
- /*
- * Reset the iat update forgetting factor to 0 to make the impact of the first
- * incoming packets greater.
- */
- inst->iatProbFact = 0;
-
- /* Reset packet inter-arrival time counter */
- inst->packetIatCountSamp = 0;
-
- /* Clear time-scaling related variables */
- inst->prevTimeScale = 0;
- inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT; /* don't allow time-scaling immediately */
-
- inst->cSumIatQ8 = 0;
- inst->maxCSumIatQ8 = 0;
-
- return 0;
-}
-
-int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst) {
- int i;
- int32_t sum_q24 = 0;
- assert(inst);
- for (i = 0; i <= MAX_IAT; ++i) {
- /* Shift 6 to fit worst case: 2^30 * 64. */
- sum_q24 += (inst->iatProb[i] >> 6) * i;
- }
- /* Subtract the nominal inter-arrival time 1 = 2^24 in Q24. */
- sum_q24 -= (1 << 24);
- /*
- * Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
- * Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
- */
- return ((sum_q24 >> 7) * 15625) >> 11;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h
deleted file mode 100644
index c5dd829b834..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the functionality for automatic buffer level optimization.
- */
-
-#ifndef AUTOMODE_H
-#define AUTOMODE_H
-
-#include "typedefs.h"
-
-/*************/
-/* Constants */
-/*************/
-
-/* The beta parameter defines the trade-off between delay and underrun probability. */
-/* It is defined through its inverse in Q30 */
-#define AUTOMODE_BETA_INV_Q30 53687091 /* 1/20 in Q30 */
-#define AUTOMODE_STREAMING_BETA_INV_Q30 536871 /* 1/2000 in Q30 */
-
-/* Forgetting factor for the inter-arrival time statistics */
-#define IAT_PROB_FACT 32745 /* 0.9993 in Q15 */
-
-/* Maximum inter-arrival time to register (in "packet-times") */
-#define MAX_IAT 64
-#define PEAK_HEIGHT 20 /* 0.08s in Q8 */
-
-/* The value (1<<5) sets maximum accelerate "speed" to about 100 ms/s */
-#define AUTOMODE_TIMESCALE_LIMIT (1<<5)
-
-/* Peak mode related parameters */
-/* Number of peaks in peak vector; must be a power of 2 */
-#define NUM_PEAKS 8
-
-/* Must be NUM_PEAKS-1 */
-#define PEAK_INDEX_MASK 0x0007
-
-/* Longest accepted peak distance */
-#define MAX_PEAK_PERIOD 10
-#define MAX_STREAMING_PEAK_PERIOD 600 /* 10 minutes */
-
-/* Number of peaks required before peak mode can be engaged */
-#define NUM_PEAKS_REQUIRED 3
-
-/* Drift term for cumulative sum */
-#define CSUM_IAT_DRIFT 2
-
-/*******************/
-/* Automode struct */
-/*******************/
-
-/* The automode struct is a sub-struct of the
- bufstats-struct (BufstatsInst_t). */
-
-typedef struct
-{
-
- /* Filtered current buffer level */
- uint16_t levelFiltFact; /* filter forgetting factor in Q8 */
- int buffLevelFilt; /* filtered buffer level in Q8 */
-
- /* Inter-arrival time (iat) statistics */
- int32_t iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
- int16_t iatProbFact; /* iat forgetting factor in Q15 */
- uint32_t packetIatCountSamp; /* time (in timestamps) elapsed since last
- packet arrival, based on RecOut calls */
- int optBufLevel; /* current optimal buffer level in Q8 */
-
- /* Packet related information */
- int16_t packetSpeechLenSamp; /* speech samples per incoming packet */
- int16_t lastPackCNGorDTMF; /* indicates that the last received packet
- contained special information */
- uint16_t lastSeqNo; /* sequence number for last packet received */
- uint32_t lastTimeStamp; /* timestamp for the last packet received */
- int firstPacketReceived; /* set to zero implicitly when the instance is
- filled with zeros */
- int32_t sampleMemory; /* memory position for keeping track of how many
- samples we cut during expand */
- int16_t prevTimeScale; /* indicates that the last mode was an accelerate
- or pre-emptive expand operation */
- uint32_t timescaleHoldOff; /* counter that is shifted one step right each
- RecOut call; time-scaling allowed when it has
- reached 0 */
- int16_t extraDelayMs; /* extra delay for sync with video */
-
- int minimum_delay_ms; /* Desired delay, NetEq maintains this amount of
- delay unless jitter statistics suggests a higher value. */
- int maximum_delay_ms; /* Max desired delay, NetEq will not go above this
- amount of delay even if jitter statistics suggests a higher value. */
-
- int required_delay_q8; /* Smallest delay required. This is computed
- according to inter-arrival time and playout mode. It has the same unit
- as |optBufLevel|. */
-
- /* Peak-detection */
- /* vector with the latest peak periods (peak spacing in samples) */
- uint32_t peakPeriodSamp[NUM_PEAKS];
- /* vector with the latest peak heights (in packets) */
- int16_t peakHeightPkt[NUM_PEAKS];
- int16_t peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
- -1 if still waiting for first peak */
- uint16_t peakThresholdPkt; /* definition of peak (in packets);
- calculated from PEAK_HEIGHT */
- uint32_t peakIatCountSamp; /* samples elapsed since last peak was observed */
- uint32_t curPeakPeriod; /* current maximum of peakPeriodSamp vector */
- int16_t curPeakHeight; /* derived from peakHeightPkt vector;
- used as optimal buffer level in peak mode */
- int16_t peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
- uint16_t peakFound; /* 1 if peaks are detected and extra delay is applied;
- * 0 otherwise. */
-
- /* Post-call statistics */
- uint32_t countIAT500ms; /* number of times we got small network outage */
- uint32_t countIAT1000ms; /* number of times we got medium network outage */
- uint32_t countIAT2000ms; /* number of times we got large network outage */
- uint32_t longestIATms; /* mSec duration of longest network outage */
-
- int16_t cSumIatQ8; /* cumulative sum of inter-arrival times */
- int16_t maxCSumIatQ8; /* max cumulative sum IAT */
- uint32_t maxCSumUpdateTimer;/* time elapsed since maximum was observed */
-} AutomodeInst_t;
-
-/*************/
-/* Functions */
-/*************/
-
-/****************************************************************************
- * WebRtcNetEQ_UpdateIatStatistics(...)
- *
- * Update the packet inter-arrival time statistics when a new packet arrives.
- * This function should be called for every arriving packet, with some
- * exceptions when using DTX/VAD and DTMF. A new optimal buffer level is
- * calculated after the update.
- *
- * Input:
- * - inst : Automode instance
- * - maxBufLen : Maximum number of packets the buffer can hold
- * - seqNumber : RTP sequence number of incoming packet
- * - timeStamp : RTP timestamp of incoming packet
- * - fsHz : Sample rate in Hz
- * - mdCodec : Non-zero if the current codec is a multiple-
- * description codec
- * - streamingMode : A non-zero value will increase jitter robustness (and delay)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- uint16_t seqNumber, uint32_t timeStamp,
- int32_t fsHz, int mdCodec, int streamingMode);
-
-/****************************************************************************
- * WebRtcNetEQ_CalcOptimalBufLvl(...)
- *
- * Calculate the optimal buffer level based on packet inter-arrival time
- * statistics.
- *
- * Input:
- * - inst : Automode instance
- * - fsHz : Sample rate in Hz
- * - mdCodec : Non-zero if the current codec is a multiple-
- * description codec
- * - timeIatPkts : Currently observed inter-arrival time in packets
- * - streamingMode : A non-zero value will increase jitter robustness (and delay)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : >0 - Optimal buffer level
- * <0 - Error
- */
-
-int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
- int mdCodec, uint32_t timeIatPkts,
- int streamingMode);
-
-/****************************************************************************
- * WebRtcNetEQ_BufferLevelFilter(...)
- *
- * Update filtered buffer level. The function must be called once for each
- * RecOut call, since the timing of automode hinges on counters that are
- * updated by this function.
- *
- * Input:
- * - curSizeMs8 : Total length of unused speech data in packet buffer
- * and sync buffer, in ms * 8
- * - inst : Automode instance
- * - sampPerCall : Number of samples per RecOut call
- * - fsMult : Sample rate in Hz divided by 8000
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * : <0 - Error
- */
-
-int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, int16_t fsMult);
-
-/****************************************************************************
- * WebRtcNetEQ_SetPacketSpeechLen(...)
- *
- * Provide the number of speech samples extracted from a packet to the
- * automode instance. Several of the calculations within automode depend
- * on knowing the packet size.
- *
- *
- * Input:
- * - inst : Automode instance
- * - newLenSamp : Number of samples per RecOut call
- * - fsHz : Sample rate in Hz
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
- int32_t fsHz);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetAutomode(...)
- *
- * Reset the automode instance.
- *
- *
- * Input:
- * - inst : Automode instance
- * - maxBufLenPackets : Maximum number of packets that the packet
- * buffer can hold (>1)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- */
-
-int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets);
-
-/****************************************************************************
- * WebRtcNetEQ_AverageIAT(...)
- *
- * Calculate the average inter-arrival time based on current statistics.
- * The average is expressed in parts per million relative the nominal. That is,
- * if the average inter-arrival time is equal to the nominal frame time,
- * the return value is zero. A positive value corresponds to packet spacing
- * being too large, while a negative value means that the packets arrive with
- * less spacing than expected.
- *
- *
- * Input:
- * - inst : Automode instance.
- *
- * Return value : Average relative inter-arrival time in samples.
- */
-
-int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst);
-
-#endif /* AUTOMODE_H */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
new file mode 100644
index 00000000000..e00c4f65e5c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
@@ -0,0 +1,260 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+
+#include <assert.h>
+#include <string.h> // memcpy
+
+#include <algorithm> // min, max
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
+
+namespace webrtc {
+
+BackgroundNoise::BackgroundNoise(size_t num_channels)
+ : num_channels_(num_channels),
+ channel_parameters_(new ChannelParameters[num_channels_]),
+ mode_(kBgnOn) {
+ Reset();
+}
+
+BackgroundNoise::~BackgroundNoise() {}
+
+void BackgroundNoise::Reset() {
+ initialized_ = false;
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ channel_parameters_[channel].Reset();
+ }
+ // Keep _bgnMode as it is.
+}
+
+void BackgroundNoise::Update(const AudioMultiVector& input,
+ const PostDecodeVad& vad) {
+ if (vad.running() && vad.active_speech()) {
+ // Do not update the background noise parameters if we know that the signal
+ // is active speech.
+ return;
+ }
+
+ int32_t auto_correlation[kMaxLpcOrder + 1];
+ int16_t fiter_output[kMaxLpcOrder + kResidualLength];
+ int16_t reflection_coefficients[kMaxLpcOrder];
+ int16_t lpc_coefficients[kMaxLpcOrder + 1];
+
+ for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
+ ChannelParameters& parameters = channel_parameters_[channel_ix];
+ int16_t temp_signal_array[kVecLen + kMaxLpcOrder] = {0};
+ int16_t* temp_signal = &temp_signal_array[kMaxLpcOrder];
+ memcpy(temp_signal,
+ &input[channel_ix][input.Size() - kVecLen],
+ sizeof(int16_t) * kVecLen);
+
+ int32_t sample_energy = CalculateAutoCorrelation(temp_signal, kVecLen,
+ auto_correlation);
+
+ if ((!vad.running() &&
+ sample_energy < parameters.energy_update_threshold) ||
+ (vad.running() && !vad.active_speech())) {
+ // Generate LPC coefficients.
+ if (auto_correlation[0] > 0) {
+ // Regardless of whether the filter is actually updated or not,
+ // update energy threshold levels, since we have in fact observed
+ // a low energy signal.
+ if (sample_energy < parameters.energy_update_threshold) {
+ // Never go under 1.0 in average sample energy.
+ parameters.energy_update_threshold = std::max(sample_energy, 1);
+ parameters.low_energy_update_threshold = 0;
+ }
+
+ // Only update BGN if filter is stable, i.e., if return value from
+ // Levinson-Durbin function is 1.
+ if (WebRtcSpl_LevinsonDurbin(auto_correlation, lpc_coefficients,
+ reflection_coefficients,
+ kMaxLpcOrder) != 1) {
+ return;
+ }
+ } else {
+ // Center value in auto-correlation is not positive. Do not update.
+ return;
+ }
+
+ // Generate the CNG gain factor by looking at the energy of the residual.
+ WebRtcSpl_FilterMAFastQ12(temp_signal + kVecLen - kResidualLength,
+ fiter_output, lpc_coefficients,
+ kMaxLpcOrder + 1, kResidualLength);
+ int32_t residual_energy = WebRtcSpl_DotProductWithScale(fiter_output,
+ fiter_output,
+ kResidualLength,
+ 0);
+
+ // Check spectral flatness.
+ // Comparing the residual variance with the input signal variance tells
+ // if the spectrum is flat or not.
+ // If 20 * residual_energy >= sample_energy << 6, the spectrum is flat
+ // enough. Also ensure that the energy is non-zero.
+ if ((residual_energy * 20 >= (sample_energy << 6)) &&
+ (sample_energy > 0)) {
+ // Spectrum is flat enough; save filter parameters.
+ // |temp_signal| + |kVecLen| - |kMaxLpcOrder| points at the first of the
+ // |kMaxLpcOrder| samples in the residual signal, which will form the
+ // filter state for the next noise generation.
+ SaveParameters(channel_ix, lpc_coefficients,
+ temp_signal + kVecLen - kMaxLpcOrder, sample_energy,
+ residual_energy);
+ }
+ } else {
+ // Will only happen if post-decode VAD is disabled and |sample_energy| is
+ // not low enough. Increase the threshold for update so that it increases
+ // by a factor 4 in 4 seconds.
+ IncrementEnergyThreshold(channel_ix, sample_energy);
+ }
+ }
+ return;
+}
+
+int32_t BackgroundNoise::Energy(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].energy;
+}
+
+void BackgroundNoise::SetMuteFactor(size_t channel, int16_t value) {
+ assert(channel < num_channels_);
+ channel_parameters_[channel].mute_factor = value;
+}
+
+int16_t BackgroundNoise::MuteFactor(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].mute_factor;
+}
+
+const int16_t* BackgroundNoise::Filter(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].filter;
+}
+
+const int16_t* BackgroundNoise::FilterState(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].filter_state;
+}
+
+void BackgroundNoise::SetFilterState(size_t channel, const int16_t* input,
+ size_t length) {
+ assert(channel < num_channels_);
+ length = std::min(length, static_cast<size_t>(kMaxLpcOrder));
+ memcpy(channel_parameters_[channel].filter_state, input,
+ length * sizeof(int16_t));
+}
+
+int16_t BackgroundNoise::Scale(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].scale;
+}
+int16_t BackgroundNoise::ScaleShift(size_t channel) const {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].scale_shift;
+}
+
+int32_t BackgroundNoise::CalculateAutoCorrelation(
+ const int16_t* signal, int length, int32_t* auto_correlation) const {
+ int16_t signal_max = WebRtcSpl_MaxAbsValueW16(signal, length);
+ int correlation_scale = kLogVecLen -
+ WebRtcSpl_NormW32(signal_max * signal_max);
+ correlation_scale = std::max(0, correlation_scale);
+
+ static const int kCorrelationStep = -1;
+ WebRtcSpl_CrossCorrelation(auto_correlation, signal, signal, length,
+ kMaxLpcOrder + 1, correlation_scale,
+ kCorrelationStep);
+
+ // Number of shifts to normalize energy to energy/sample.
+ int energy_sample_shift = kLogVecLen - correlation_scale;
+ return auto_correlation[0] >> energy_sample_shift;
+}
+
+void BackgroundNoise::IncrementEnergyThreshold(size_t channel,
+ int32_t sample_energy) {
+ // TODO(hlundin): Simplify the below threshold update. What this code
+ // does is simply "threshold += (increment * threshold) >> 16", but due
+ // to the limited-width operations, it is not exactly the same. The
+ // difference should be inaudible, but bit-exactness would not be
+ // maintained.
+ assert(channel < num_channels_);
+ ChannelParameters& parameters = channel_parameters_[channel];
+ int32_t temp_energy =
+ WEBRTC_SPL_MUL_16_16_RSFT(kThresholdIncrement,
+ parameters.low_energy_update_threshold, 16);
+ temp_energy += kThresholdIncrement *
+ (parameters.energy_update_threshold & 0xFF);
+ temp_energy += (kThresholdIncrement *
+ ((parameters.energy_update_threshold>>8) & 0xFF)) << 8;
+ parameters.low_energy_update_threshold += temp_energy;
+
+ parameters.energy_update_threshold += kThresholdIncrement *
+ (parameters.energy_update_threshold>>16);
+ parameters.energy_update_threshold +=
+ parameters.low_energy_update_threshold >> 16;
+ parameters.low_energy_update_threshold =
+ parameters.low_energy_update_threshold & 0x0FFFF;
+
+ // Update maximum energy.
+ // Decrease by a factor 1/1024 each time.
+ parameters.max_energy = parameters.max_energy -
+ (parameters.max_energy >> 10);
+ if (sample_energy > parameters.max_energy) {
+ parameters.max_energy = sample_energy;
+ }
+
+ // Set |energy_update_threshold| to no less than 60 dB lower than
+ // |max_energy_|. Adding 524288 assures proper rounding.
+ int32_t energy_update_threshold = (parameters.max_energy + 524288) >> 20;
+ if (energy_update_threshold > parameters.energy_update_threshold) {
+ parameters.energy_update_threshold = energy_update_threshold;
+ }
+}
+
+void BackgroundNoise::SaveParameters(size_t channel,
+ const int16_t* lpc_coefficients,
+ const int16_t* filter_state,
+ int32_t sample_energy,
+ int32_t residual_energy) {
+ assert(channel < num_channels_);
+ ChannelParameters& parameters = channel_parameters_[channel];
+ memcpy(parameters.filter, lpc_coefficients,
+ (kMaxLpcOrder+1) * sizeof(int16_t));
+ memcpy(parameters.filter_state, filter_state,
+ kMaxLpcOrder * sizeof(int16_t));
+ // Save energy level and update energy threshold levels.
+ // Never get under 1.0 in average sample energy.
+ parameters.energy = std::max(sample_energy, 1);
+ parameters.energy_update_threshold = parameters.energy;
+ parameters.low_energy_update_threshold = 0;
+
+ // Normalize residual_energy to 29 or 30 bits before sqrt.
+ int norm_shift = WebRtcSpl_NormW32(residual_energy) - 1;
+ if (norm_shift & 0x1) {
+ norm_shift -= 1; // Even number of shifts required.
+ }
+ assert(norm_shift >= 0); // Should always be positive.
+ residual_energy = residual_energy << norm_shift;
+
+ // Calculate scale and shift factor.
+ parameters.scale = WebRtcSpl_SqrtFloor(residual_energy);
+ // Add 13 to the |scale_shift_|, since the random numbers table is in
+ // Q13.
+ // TODO(hlundin): Move the "13" to where the |scale_shift_| is used?
+ parameters.scale_shift = 13 + ((kLogResidualLength + norm_shift) / 2);
+
+ initialized_ = true;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h
new file mode 100644
index 00000000000..8fb310ea8b6
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
+
+#include <string.h> // size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class PostDecodeVad;
+
+// This class handles estimation of background noise parameters.
+class BackgroundNoise {
+ public:
+ // TODO(hlundin): For 48 kHz support, increase kMaxLpcOrder to 10.
+ // Will work anyway, but probably sound a little worse.
+ static const int kMaxLpcOrder = 8; // 32000 / 8000 + 4.
+
+ explicit BackgroundNoise(size_t num_channels);
+ virtual ~BackgroundNoise();
+
+ void Reset();
+
+ // Updates the parameter estimates based on the signal currently in the
+ // |sync_buffer|, and on the latest decision in |vad| if it is running.
+ void Update(const AudioMultiVector& sync_buffer,
+ const PostDecodeVad& vad);
+
+ // Returns |energy_| for |channel|.
+ int32_t Energy(size_t channel) const;
+
+ // Sets the value of |mute_factor_| for |channel| to |value|.
+ void SetMuteFactor(size_t channel, int16_t value);
+
+ // Returns |mute_factor_| for |channel|.
+ int16_t MuteFactor(size_t channel) const;
+
+ // Returns a pointer to |filter_| for |channel|.
+ const int16_t* Filter(size_t channel) const;
+
+ // Returns a pointer to |filter_state_| for |channel|.
+ const int16_t* FilterState(size_t channel) const;
+
+ // Copies |length| elements from |input| to the filter state. Will not copy
+ // more than |kMaxLpcOrder| elements.
+ void SetFilterState(size_t channel, const int16_t* input, size_t length);
+
+ // Returns |scale_| for |channel|.
+ int16_t Scale(size_t channel) const;
+
+ // Returns |scale_shift_| for |channel|.
+ int16_t ScaleShift(size_t channel) const;
+
+ // Accessors.
+ bool initialized() const { return initialized_; }
+ NetEqBackgroundNoiseMode mode() const { return mode_; }
+
+ // Sets the mode of the background noise playout for cases when there is long
+ // duration of packet loss.
+ void set_mode(NetEqBackgroundNoiseMode mode) { mode_ = mode; }
+
+ private:
+ static const int kThresholdIncrement = 229; // 0.0035 in Q16.
+ static const int kVecLen = 256;
+ static const int kLogVecLen = 8; // log2(kVecLen).
+ static const int kResidualLength = 64;
+ static const int kLogResidualLength = 6; // log2(kResidualLength)
+
+ struct ChannelParameters {
+ // Constructor.
+ ChannelParameters() {
+ Reset();
+ }
+
+ void Reset() {
+ energy = 2500;
+ max_energy = 0;
+ energy_update_threshold = 500000;
+ low_energy_update_threshold = 0;
+ memset(filter_state, 0, sizeof(filter_state));
+ memset(filter, 0, sizeof(filter));
+ filter[0] = 4096;
+ mute_factor = 0,
+ scale = 20000;
+ scale_shift = 24;
+ }
+
+ int32_t energy;
+ int32_t max_energy;
+ int32_t energy_update_threshold;
+ int32_t low_energy_update_threshold;
+ int16_t filter_state[kMaxLpcOrder];
+ int16_t filter[kMaxLpcOrder + 1];
+ int16_t mute_factor;
+ int16_t scale;
+ int16_t scale_shift;
+ };
+
+ int32_t CalculateAutoCorrelation(const int16_t* signal,
+ int length,
+ int32_t* auto_correlation) const;
+
+ // Increments the energy threshold by a factor 1 + |kThresholdIncrement|.
+ void IncrementEnergyThreshold(size_t channel, int32_t sample_energy);
+
+ // Updates the filter parameters.
+ void SaveParameters(size_t channel,
+ const int16_t* lpc_coefficients,
+ const int16_t* filter_state,
+ int32_t sample_energy,
+ int32_t residual_energy);
+
+ size_t num_channels_;
+ scoped_ptr<ChannelParameters[]> channel_parameters_;
+ bool initialized_;
+ NetEqBackgroundNoiseMode mode_;
+
+ DISALLOW_COPY_AND_ASSIGN(BackgroundNoise);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc
new file mode 100644
index 00000000000..0aee62c97be
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for BackgroundNoise class.
+
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(BackgroundNoise, CreateAndDestroy) {
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c
deleted file mode 100644
index 4d660ff55b6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for updating the background noise estimate.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
- Designed for BGN_LPC_ORDER <= 10
-
- Type Name size startpos endpos
- int32_t pw32_autoCorr 22 0 21 (Length (BGN_LPC_ORDER + 1)*2)
- int16_t pw16_tempVec 10 22 31 (Length BGN_LPC_ORDER)
- int16_t pw16_rc 10 32 41 (Length BGN_LPC_ORDER)
- int16_t pw16_outVec 74 0 73 (Length BGN_LPC_ORDER + 64)
-
- Total: 74
- */
-
-#if (BGN_LPC_ORDER > 10) && (defined SCRATCH)
-#error BGN_LPC_ORDER is too large for current scratch memory allocation
-#endif
-
-#define SCRATCH_PW32_AUTO_CORR 0
-#define SCRATCH_PW16_TEMP_VEC 22
-#define SCRATCH_PW16_RC 32
-#define SCRATCH_PW16_OUT_VEC 0
-
-#define NETEQFIX_BGNFRAQINCQ16 229 /* 0.0035 in Q16 */
-
-/****************************************************************************
- * WebRtcNetEQ_BGNUpdate(...)
- *
- * This function updates the background noise parameter estimates.
- *
- * Input:
- * - inst : NetEQ instance, where the speech history is stored.
- * - scratchPtr : Pointer to scratch vector.
- *
- * Output:
- * - inst : Updated information about the BGN characteristics.
- *
- * Return value : No return value
- */
-
-void WebRtcNetEQ_BGNUpdate(
-#ifdef SCRATCH
- DSPInst_t *inst, int16_t *pw16_scratchPtr
-#else
- DSPInst_t *inst
-#endif
-)
-{
- const int16_t w16_vecLen = 256;
- BGNInst_t *BGN_Inst = &(inst->BGNInst);
-#ifdef SCRATCH
- int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
- int16_t *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
- int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- int16_t *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
-#else
- int32_t pw32_autoCorr[BGN_LPC_ORDER + 1];
- int16_t pw16_tempVec[BGN_LPC_ORDER];
- int16_t pw16_outVec[BGN_LPC_ORDER + 64];
- int16_t pw16_rc[BGN_LPC_ORDER];
-#endif
- int16_t pw16_A[BGN_LPC_ORDER + 1];
- int32_t w32_tmp;
- int16_t *pw16_vec;
- int16_t w16_maxSample;
- int16_t w16_tmp, w16_tmp2;
- int16_t w16_enSampleShift;
- int32_t w32_en, w32_enBGN;
- int32_t w32_enUpdateThreashold;
- int16_t stability;
-
- pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
-
-#ifdef NETEQ_VAD
- if( !inst->VADInst.VADEnabled /* we are not using post-decode VAD */
- || inst->VADInst.VADDecision == 0 )
- { /* ... or, post-decode VAD says passive speaker */
-#endif /* NETEQ_VAD */
-
- /*Insert zeros to guarantee that boundary values do not distort autocorrelation */
- WEBRTC_SPL_MEMCPY_W16(pw16_tempVec, pw16_vec - BGN_LPC_ORDER, BGN_LPC_ORDER);
- WebRtcSpl_MemSetW16(pw16_vec - BGN_LPC_ORDER, 0, BGN_LPC_ORDER);
-
- w16_maxSample = WebRtcSpl_MaxAbsValueW16(pw16_vec, w16_vecLen);
- w16_tmp = 8 /* log2(w16_veclen) = 8 */
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_maxSample, w16_maxSample));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcNetEQ_CrossCorr(pw32_autoCorr, pw16_vec, pw16_vec, w16_vecLen, BGN_LPC_ORDER + 1,
- w16_tmp, -1);
-
- /* Copy back data */
- WEBRTC_SPL_MEMCPY_W16(pw16_vec - BGN_LPC_ORDER, pw16_tempVec, BGN_LPC_ORDER);
-
- w16_enSampleShift = 8 - w16_tmp; /* Number of shifts to get energy/sample */
- /* pw32_autoCorr[0]>>w16_enSampleShift */
- w32_en = WEBRTC_SPL_RSHIFT_W32(pw32_autoCorr[0], w16_enSampleShift);
- if ((w32_en < BGN_Inst->w32_energyUpdate
-#ifdef NETEQ_VAD
- /* post-decode VAD disabled and w32_en sufficiently low */
- && !inst->VADInst.VADEnabled)
- /* ... or, post-decode VAD says passive speaker */
- || (inst->VADInst.VADEnabled && inst->VADInst.VADDecision == 0)
-#else
- ) /* just close the extra parenthesis */
-#endif /* NETEQ_VAD */
- )
- {
- /* Generate LPC coefficients */
- if (pw32_autoCorr[0] > 0)
- {
- /* regardless of whether the filter is actually updated or not,
- update energy threshold levels, since we have in fact observed
- a low energy signal */
- if (w32_en < BGN_Inst->w32_energyUpdate)
- {
- /* Never get under 1.0 in average sample energy */
- BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
- BGN_Inst->w32_energyUpdateLow = 0;
- }
-
- stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, pw16_A, pw16_rc, BGN_LPC_ORDER);
- /* Only update BGN if filter is stable */
- if (stability != 1)
- {
- return;
- }
- }
- else
- {
- /* Do not update */
- return;
- }
- /* Generate the CNG gain factor by looking at the energy of the residual */
- WebRtcSpl_FilterMAFastQ12(pw16_vec + w16_vecLen - 64, pw16_outVec, pw16_A,
- BGN_LPC_ORDER + 1, 64);
- w32_enBGN = WebRtcNetEQ_DotW16W16(pw16_outVec, pw16_outVec, 64, 0);
- /* Dot product should never overflow since it is BGN and residual! */
-
- /*
- * Check spectral flatness
- * Comparing the residual variance with the input signal variance tells
- * if the spectrum is flat or not.
- * (20*w32_enBGN) >= (w32_en<<6)
- * Also ensure that the energy is non-zero.
- */
- if ((WEBRTC_SPL_MUL_32_16(w32_enBGN, 20) >= WEBRTC_SPL_LSHIFT_W32(w32_en, 6))
- && (w32_en > 0))
- {
- /* spectrum is flat enough; save filter parameters */
-
- WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filter, pw16_A, BGN_LPC_ORDER+1);
- WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filterState,
- pw16_vec + w16_vecLen - BGN_LPC_ORDER, BGN_LPC_ORDER);
-
- /* Save energy level */
- BGN_Inst->w32_energy = WEBRTC_SPL_MAX(w32_en, 1);
-
- /* Update energy threshold levels */
- /* Never get under 1.0 in average sample energy */
- BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
- BGN_Inst->w32_energyUpdateLow = 0;
-
- /* Normalize w32_enBGN to 29 or 30 bits before sqrt */
- w16_tmp2 = WebRtcSpl_NormW32(w32_enBGN) - 1;
- if (w16_tmp2 & 0x1)
- {
- w16_tmp2 -= 1; /* Even number of shifts required */
- }
- w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
-
- /* Calculate scale and shift factor */
- BGN_Inst->w16_scale = (int16_t) WebRtcSpl_SqrtFloor(w32_enBGN);
- BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
- /* 6=log2(64) */
-
- BGN_Inst->w16_initialized = 1;
- }
-
- }
- else
- {
- /*
- * Will only happen if post-decode VAD is disabled and w32_en is not low enough.
- * Increase the threshold for update so that it increases by a factor 4 in four
- * seconds.
- * energy = energy * 1.0035
- */
- w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
- BGN_Inst->w32_energyUpdateLow, 16);
- w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)(BGN_Inst->w32_energyUpdate & 0xFF));
- w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
- BGN_Inst->w32_energyUpdateLow += w32_tmp;
-
- BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)(BGN_Inst->w32_energyUpdate>>16));
- BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
- BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
-
- /* Update maximum energy */
- /* Decrease by a factor 1/1024 each time */
- BGN_Inst->w32_energyMax = BGN_Inst->w32_energyMax - (BGN_Inst->w32_energyMax >> 10);
- if (w32_en > BGN_Inst->w32_energyMax)
- {
- BGN_Inst->w32_energyMax = w32_en;
- }
-
- /* Set update level to at the minimum 60.21dB lower then the maximum energy */
- w32_enUpdateThreashold = (BGN_Inst->w32_energyMax + 524288) >> 20;
- if (w32_enUpdateThreashold > BGN_Inst->w32_energyUpdate)
- {
- BGN_Inst->w32_energyUpdate = w32_enUpdateThreashold;
- }
- }
-
-#ifdef NETEQ_VAD
-} /* closing initial if-statement */
-#endif /* NETEQ_VAD */
-
- return;
-}
-
-#undef SCRATCH_PW32_AUTO_CORR
-#undef SCRATCH_PW16_TEMP_VEC
-#undef SCRATCH_PW16_RC
-#undef SCRATCH_PW16_OUT_VEC
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc
new file mode 100644
index 00000000000..0388b195024
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+
+#include <algorithm> // Provide access to std::max.
+
+namespace webrtc {
+
+BufferLevelFilter::BufferLevelFilter() {
+ Reset();
+}
+
+void BufferLevelFilter::Reset() {
+ filtered_current_level_ = 0;
+ level_factor_ = 253;
+}
+
+void BufferLevelFilter::Update(int buffer_size_packets,
+ int time_stretched_samples,
+ int packet_len_samples) {
+ // Filter:
+ // |filtered_current_level_| = |level_factor_| * |filtered_current_level_| +
+ // (1 - |level_factor_|) * |buffer_size_packets|
+ // |level_factor_| and |filtered_current_level_| are in Q8.
+ // |buffer_size_packets| is in Q0.
+ filtered_current_level_ = ((level_factor_ * filtered_current_level_) >> 8) +
+ ((256 - level_factor_) * buffer_size_packets);
+
+ // Account for time-scale operations (accelerate and pre-emptive expand).
+ if (time_stretched_samples && packet_len_samples > 0) {
+ // Time-scaling has been performed since last filter update. Subtract the
+ // value of |time_stretched_samples| from |filtered_current_level_| after
+ // converting |time_stretched_samples| from samples to packets in Q8.
+ // Make sure that the filtered value remains non-negative.
+ filtered_current_level_ = std::max(0,
+ filtered_current_level_ -
+ (time_stretched_samples << 8) / packet_len_samples);
+ }
+}
+
+void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level) {
+ if (target_buffer_level <= 1) {
+ level_factor_ = 251;
+ } else if (target_buffer_level <= 3) {
+ level_factor_ = 252;
+ } else if (target_buffer_level <= 7) {
+ level_factor_ = 253;
+ } else {
+ level_factor_ = 254;
+ }
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h
new file mode 100644
index 00000000000..48f7f564c98
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+
+class BufferLevelFilter {
+ public:
+ BufferLevelFilter();
+ virtual ~BufferLevelFilter() {}
+ virtual void Reset();
+
+ // Updates the filter. Current buffer size is |buffer_size_packets| (Q0).
+ // If |time_stretched_samples| is non-zero, the value is converted to the
+ // corresponding number of packets, and is subtracted from the filtered
+ // value (thus bypassing the filter operation). |packet_len_samples| is the
+ // number of audio samples carried in each incoming packet.
+ virtual void Update(int buffer_size_packets, int time_stretched_samples,
+ int packet_len_samples);
+
+ // Set the current target buffer level (obtained from
+ // DelayManager::base_target_level()). Used to select the appropriate
+ // filter coefficient.
+ virtual void SetTargetBufferLevel(int target_buffer_level);
+
+ virtual int filtered_current_level() const { return filtered_current_level_; }
+
+ private:
+ int level_factor_; // Filter factor for the buffer level filter in Q8.
+ int filtered_current_level_; // Filtered current buffer level in Q8.
+
+ DISALLOW_COPY_AND_ASSIGN(BufferLevelFilter);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc
new file mode 100644
index 00000000000..9589099d4a0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for BufferLevelFilter class.
+
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+
+#include <math.h> // Access to pow function.
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(BufferLevelFilter, CreateAndDestroy) {
+ BufferLevelFilter* filter = new BufferLevelFilter();
+ EXPECT_EQ(0, filter->filtered_current_level());
+ delete filter;
+}
+
+TEST(BufferLevelFilter, ConvergenceTest) {
+ BufferLevelFilter filter;
+ for (int times = 10; times <= 50; times += 10) {
+ for (int value = 100; value <= 200; value += 10) {
+ filter.Reset();
+ filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
+ std::ostringstream ss;
+ ss << "times = " << times << ", value = " << value;
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ for (int i = 0; i < times; ++i) {
+ filter.Update(value, 0 /* time_stretched_samples */,
+ 160 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be (theoretically)
+ // (1 - (251/256) ^ |times|) * |value|.
+ double expected_value_double =
+ (1 - pow(251.0 / 256.0, times)) * value;
+ int expected_value = static_cast<int>(expected_value_double);
+ // filtered_current_level() returns the value in Q8.
+ // The actual value may differ slightly from the expected value due to
+ // intermediate-stage rounding errors in the filter implementation.
+ // This is why we have to use EXPECT_NEAR with a tolerance of +/-1.
+ EXPECT_NEAR(expected_value, filter.filtered_current_level() >> 8, 1);
+ }
+ }
+}
+
+// Verify that target buffer level impacts on the filter convergence.
+TEST(BufferLevelFilter, FilterFactor) {
+ BufferLevelFilter filter;
+ // Update 10 times with value 100.
+ const int kTimes = 10;
+ const int kValue = 100;
+
+ filter.SetTargetBufferLevel(3); // Makes filter coefficient 252/256.
+ for (int i = 0; i < kTimes; ++i) {
+ filter.Update(kValue, 0 /* time_stretched_samples */,
+ 160 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be
+ // (1 - (252/256) ^ |kTimes|) * |kValue|.
+ int expected_value = 14;
+ // filtered_current_level() returns the value in Q8.
+ EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
+
+ filter.Reset();
+ filter.SetTargetBufferLevel(7); // Makes filter coefficient 253/256.
+ for (int i = 0; i < kTimes; ++i) {
+ filter.Update(kValue, 0 /* time_stretched_samples */,
+ 160 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be
+ // (1 - (253/256) ^ |kTimes|) * |kValue|.
+ expected_value = 11;
+ // filtered_current_level() returns the value in Q8.
+ EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
+
+ filter.Reset();
+ filter.SetTargetBufferLevel(8); // Makes filter coefficient 254/256.
+ for (int i = 0; i < kTimes; ++i) {
+ filter.Update(kValue, 0 /* time_stretched_samples */,
+ 160 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be
+ // (1 - (254/256) ^ |kTimes|) * |kValue|.
+ expected_value = 7;
+ // filtered_current_level() returns the value in Q8.
+ EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
+}
+
+
+TEST(BufferLevelFilter, TimeStretchedSamples) {
+ BufferLevelFilter filter;
+ filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
+ // Update 10 times with value 100.
+ const int kTimes = 10;
+ const int kValue = 100;
+ const int kPacketSizeSamples = 160;
+ const int kNumPacketsStretched = 2;
+ const int kTimeStretchedSamples = kNumPacketsStretched * kPacketSizeSamples;
+ for (int i = 0; i < kTimes; ++i) {
+ // Packet size set to 0. Do not expect the parameter
+ // |kTimeStretchedSamples| to have any effect.
+ filter.Update(kValue, kTimeStretchedSamples, 0 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be
+ // (1 - (251/256) ^ |kTimes|) * |kValue|.
+ const int kExpectedValue = 17;
+ // filtered_current_level() returns the value in Q8.
+ EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
+
+ // Update filter again, now with non-zero value for packet length.
+ // Set the current filtered value to be the input, in order to isolate the
+ // impact of |kTimeStretchedSamples|.
+ filter.Update(filter.filtered_current_level() >> 8, kTimeStretchedSamples,
+ kPacketSizeSamples);
+ EXPECT_EQ(kExpectedValue - kNumPacketsStretched,
+ filter.filtered_current_level() >> 8);
+ // Try negative value and verify that we come back to the previous result.
+ filter.Update(filter.filtered_current_level() >> 8, -kTimeStretchedSamples,
+ kPacketSizeSamples);
+ EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
+}
+
+TEST(BufferLevelFilter, TimeStretchedSamplesNegativeUnevenFrames) {
+ BufferLevelFilter filter;
+ filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
+ // Update 10 times with value 100.
+ const int kTimes = 10;
+ const int kValue = 100;
+ const int kPacketSizeSamples = 160;
+ const int kTimeStretchedSamples = -3.1415 * kPacketSizeSamples;
+ for (int i = 0; i < kTimes; ++i) {
+ // Packet size set to 0. Do not expect the parameter
+ // |kTimeStretchedSamples| to have any effect.
+ filter.Update(kValue, kTimeStretchedSamples, 0 /* packet_len_samples */);
+ }
+ // Expect the filtered value to be
+ // (1 - (251/256) ^ |kTimes|) * |kValue|.
+ const int kExpectedValue = 17;
+ // filtered_current_level() returns the value in Q8.
+ EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
+
+ // Update filter again, now with non-zero value for packet length.
+ // Set the current filtered value to be the input, in order to isolate the
+ // impact of |kTimeStretchedSamples|.
+ filter.Update(filter.filtered_current_level() >> 8, kTimeStretchedSamples,
+ kPacketSizeSamples);
+ EXPECT_EQ(21, filter.filtered_current_level() >> 8);
+ // Try negative value and verify that we come back to the previous result.
+ filter.Update(filter.filtered_current_level() >> 8, -kTimeStretchedSamples,
+ kPacketSizeSamples);
+ EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h
deleted file mode 100644
index 722f477ea04..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Calculates and stores the packet buffer statistics.
- */
-
-#ifndef BUFFER_STATS_H
-#define BUFFER_STATS_H
-
-#include "automode.h"
-#include "webrtc_neteq.h" /* to define enum WebRtcNetEQPlayoutMode */
-
-/* NetEQ related decisions */
-#define BUFSTATS_DO_NORMAL 0
-#define BUFSTATS_DO_ACCELERATE 1
-#define BUFSTATS_DO_MERGE 2
-#define BUFSTATS_DO_EXPAND 3
-#define BUFSTAT_REINIT 4
-#define BUFSTATS_DO_RFC3389CNG_PACKET 5
-#define BUFSTATS_DO_RFC3389CNG_NOPACKET 6
-#define BUFSTATS_DO_INTERNAL_CNG_NOPACKET 7
-#define BUFSTATS_DO_PREEMPTIVE_EXPAND 8
-#define BUFSTAT_REINIT_DECODER 9
-#define BUFSTATS_DO_DTMF_ONLY 10
-/* Decisions related to when NetEQ is switched off (or in FAX mode) */
-#define BUFSTATS_DO_ALTERNATIVE_PLC 11
-#define BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS 12
-#define BUFSTATS_DO_AUDIO_REPETITION 13
-#define BUFSTATS_DO_AUDIO_REPETITION_INC_TS 14
-
-/* Reinit decoder states after this number of expands (upon arrival of new packet) */
-#define REINIT_AFTER_EXPANDS 100
-
-/* Wait no longer than this number of RecOut calls before using an "early" packet */
-#define MAX_WAIT_FOR_PACKET 10
-
-/* CNG modes */
-#define CNG_OFF 0
-#define CNG_RFC3389_ON 1
-#define CNG_INTERNAL_ON 2
-
-typedef struct
-{
-
- /* store statistical data here */
- int16_t w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
- int16_t w16_noExpand;
- int32_t uw32_CNGplayedTS;
-
- /* VQmon data */
- uint16_t avgDelayMsQ8;
- int16_t maxDelayMs;
-
- AutomodeInst_t Automode_inst;
-
-} BufstatsInst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_BufstatsDecision()
- *
- * Gives a decision about what action that is currently desired
- *
- *
- * Input:
- * inst: The bufstat instance
- * cur_size: Current buffer size in ms in Q3 domain
- * targetTS: The desired timestamp to start playout from
- * availableTS: The closest future value available in buffer
- * noPacket 1 if no packet is available, makes availableTS undefined
- * prevPlayMode mode of last NetEq playout
- * timestampsPerCall number of timestamp for 10ms
- *
- * Output:
- * Returns: A decision, as defined above (see top of file)
- *
- */
-
-uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
- int32_t cur_size, uint32_t targetTS,
- uint32_t availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- int16_t fs_mult,
- int16_t lastModeBGNonly, int playDtmf);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c
deleted file mode 100644
index 352e0507746..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c
+++ /dev/null
@@ -1,427 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function where the main decision logic for buffer level
- * adaptation happens.
- */
-
-#include "buffer_stats.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "automode.h"
-#include "neteq_defines.h"
-#include "neteq_error_codes.h"
-#include "webrtc_neteq.h"
-
-#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7 */
-
-uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
- int32_t cur_size, uint32_t targetTS,
- uint32_t availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- int16_t fs_mult,
- int16_t lastModeBGNonly, int playDtmf)
-{
-
- int currentDelayMs;
- int32_t currSizeSamples = cur_size;
- int extraDelayPacketsQ8 = 0;
-
- /* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
- int32_t curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
- int level_limit_hi, level_limit_lo;
-
- inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
- || prevPlayMode == MODE_LOWEN_ACCELERATE || prevPlayMode == MODE_SUCCESS_PREEMPTIVE
- || prevPlayMode == MODE_LOWEN_PREEMPTIVE);
-
- if ((prevPlayMode != MODE_RFC3389CNG) && (prevPlayMode != MODE_CODEC_INTERNAL_CNG))
- {
- /*
- * Do not update buffer history if currently playing CNG
- * since it will bias the filtered buffer level.
- */
- WebRtcNetEQ_BufferLevelFilter(cur_size, &(inst->Automode_inst), timestampsPerCall,
- fs_mult);
- }
- else
- {
- /* only update time counters */
- inst->Automode_inst.packetIatCountSamp += timestampsPerCall; /* packet inter-arrival time */
- inst->Automode_inst.peakIatCountSamp += timestampsPerCall; /* peak inter-arrival time */
- inst->Automode_inst.timescaleHoldOff >>= 1; /* time-scaling limiter */
- }
- cur_size = WEBRTC_SPL_MIN(curr_sizeQ7, WEBRTC_SPL_WORD16_MAX);
-
- /* Calculate VQmon related variables */
- /* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
- inst->avgDelayMsQ8 = (int16_t) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
- + (cur_size >> 9));
-
- /* Update maximum delay if needed */
- currentDelayMs = (curr_sizeQ7 >> 7);
- if (currentDelayMs > inst->maxDelayMs)
- {
- inst->maxDelayMs = currentDelayMs;
- }
-
- /* NetEQ is on with normal or steaming mode */
- if (playoutMode == kPlayoutOn || playoutMode == kPlayoutStreaming)
- {
- /* Guard for errors, so that it should not get stuck in error mode */
- if (prevPlayMode == MODE_ERROR)
- {
- if (noPacket)
- {
- return BUFSTATS_DO_EXPAND;
- }
- else
- {
- return BUFSTAT_REINIT;
- }
- }
-
- if (prevPlayMode != MODE_EXPAND && prevPlayMode != MODE_FADE_TO_BGN)
- {
- inst->w16_noExpand = 1;
- }
- else
- {
- inst->w16_noExpand = 0;
- }
-
- if (cngPacket)
- {
- /* signed difference between wanted and available TS */
- int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
- int32_t optimal_level_samp = (inst->Automode_inst.optBufLevel *
- inst->Automode_inst.packetSpeechLenSamp) >> 8;
- int32_t excess_waiting_time_samp = -diffTS - optimal_level_samp;
-
- if (excess_waiting_time_samp > optimal_level_samp / 2)
- {
- /* The waiting time for this packet will be longer than 1.5
- * times the wanted buffer delay. Advance the clock to cut
- * waiting time down to the optimal.
- */
- inst->uw32_CNGplayedTS += excess_waiting_time_samp;
- diffTS += excess_waiting_time_samp;
- }
-
- if ((diffTS) < 0 && (prevPlayMode == MODE_RFC3389CNG))
- {
- /* Not time to play this packet yet. Wait another round before using this
- * packet. Keep on playing CNG from previous CNG parameters. */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
-
- /* otherwise, go for the CNG packet now */
- return BUFSTATS_DO_RFC3389CNG_PACKET;
- }
-
- /*Check for expand/cng */
- if (noPacket)
- {
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- /* keep on playing CNG */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- /* keep on playing internal CNG */
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else if (playDtmf == 1)
- {
- /* we have not audio data, but can play DTMF */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- /* nothing to play => do Expand */
- return BUFSTATS_DO_EXPAND;
- }
- }
-
- /*
- * If the expand period was very long, reset NetEQ since it is likely that the
- * sender was restarted.
- */
- if (NoOfExpandCalls > REINIT_AFTER_EXPANDS) return BUFSTAT_REINIT_DECODER;
-
- /* Calculate extra delay in Q8 packets */
- if (inst->Automode_inst.extraDelayMs > 0 && inst->Automode_inst.packetSpeechLenSamp
- > 0)
- {
-
- /* (extra delay in samples in Q8) */
- extraDelayPacketsQ8 =
- ((inst->Automode_inst.extraDelayMs * 8 * fs_mult) << 8) /
- inst->Automode_inst.packetSpeechLenSamp;
- }
-
- /* Check if needed packet is available */
- if (targetTS == availableTS)
- {
-
- /* If last mode was not expand, and there is no DTMF to play */
- if (inst->w16_noExpand == 1 && playDtmf == 0)
- {
- /* If so check for accelerate */
-
- level_limit_lo = ((inst->Automode_inst.optBufLevel) >> 1) /* 50 % */
- + ((inst->Automode_inst.optBufLevel) >> 2); /* ... + 25% = 75% */
-
- /* set upper limit to optBufLevel, but make sure that window is at least 20ms */
- level_limit_hi = WEBRTC_SPL_MAX(inst->Automode_inst.optBufLevel,
- level_limit_lo +
- WebRtcSpl_DivW32W16ResW16((WEBRTC_SPL_MUL(20*8, fs_mult) << 8),
- inst->Automode_inst.packetSpeechLenSamp));
-
- /* if extra delay is non-zero, add it */
- if (extraDelayPacketsQ8 > 0)
- {
- level_limit_hi += extraDelayPacketsQ8;
- level_limit_lo += extraDelayPacketsQ8;
- }
-
- if (((inst->Automode_inst.buffLevelFilt >= level_limit_hi) &&
- (inst->Automode_inst.timescaleHoldOff == 0)) ||
- (inst->Automode_inst.buffLevelFilt >= level_limit_hi << 2))
- {
- /*
- * Buffer level higher than limit and time-scaling allowed,
- * OR buffer level _really_ high.
- */
- return BUFSTATS_DO_ACCELERATE;
- }
- else if ((inst->Automode_inst.buffLevelFilt < level_limit_lo)
- && (inst->Automode_inst.timescaleHoldOff == 0))
- {
- return BUFSTATS_DO_PREEMPTIVE_EXPAND;
- }
- }
- return BUFSTATS_DO_NORMAL;
- }
-
- /* Check for Merge */
- else if (availableTS > targetTS)
- {
-
- /* Check that we do not play a packet "too early" */
- if ((prevPlayMode == MODE_EXPAND)
- && (availableTS - targetTS
- < (uint32_t) WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
- (int16_t)REINIT_AFTER_EXPANDS))
- && (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
- && (availableTS
- > targetTS
- + WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
- (int16_t)NoOfExpandCalls))
- && (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
- + extraDelayPacketsQ8))
- {
- if (playDtmf == 1)
- {
- /* we still have DTMF to play, so do not perform expand */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- /* nothing to play */
- return BUFSTATS_DO_EXPAND;
- }
- }
-
- /* If previous was CNG period or BGNonly then no merge is needed */
- if ((prevPlayMode == MODE_RFC3389CNG) || (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
- || lastModeBGNonly)
- {
- /*
- * Keep the same delay as before the CNG (or maximum 70 ms in buffer as safety
- * precaution), but make sure that the number of samples in buffer is no
- * higher than 4 times the optimal level.
- */
- int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
- int val = ((inst->Automode_inst.optBufLevel +
- extraDelayPacketsQ8) *
- inst->Automode_inst.packetSpeechLenSamp) >> 6;
- if (diffTS >= 0 || val < currSizeSamples)
- {
- /* it is time to play this new packet */
- return BUFSTATS_DO_NORMAL;
- }
- else
- {
- /* it is too early to play this new packet => keep on playing CNG */
- if (prevPlayMode == MODE_RFC3389CNG)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else if (playDtmf == 1)
- {
- /* we have not audio data, but can play DTMF */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else /* lastModeBGNonly */
- {
- /* signal expand, but this will result in BGN again */
- return BUFSTATS_DO_EXPAND;
- }
- }
- }
-
- /* Do not merge unless we have done a Expand before (for complexity reasons) */
- if ((inst->w16_noExpand == 0) || ((frameSize < timestampsPerCall) && (cur_size
- > NETEQ_BUFSTAT_20MS_Q7)))
- {
- return BUFSTATS_DO_MERGE;
- }
- else if (playDtmf == 1)
- {
- /* play DTMF instead of expand */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- return BUFSTATS_DO_EXPAND;
- }
- }
- }
- else
- { /* kPlayoutOff or kPlayoutFax */
- if (cngPacket)
- {
- if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
- {
- /* time to play this packet now */
- return BUFSTATS_DO_RFC3389CNG_PACKET;
- }
- else
- {
- /* wait before playing this packet */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- }
- if (noPacket)
- {
- /*
- * No packet =>
- * 1. If in CNG mode play as usual
- * 2. Otherwise use other method to generate data and hold TS value
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- /* keep on playing CNG */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- /* keep on playing internal CNG */
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /* nothing to play => invent some data to play out */
- if (playoutMode == kPlayoutOff)
- {
- return BUFSTATS_DO_ALTERNATIVE_PLC;
- }
- else if (playoutMode == kPlayoutFax)
- {
- return BUFSTATS_DO_AUDIO_REPETITION;
- }
- else
- {
- /* UNDEFINED, should not get here... */
- assert(0);
- return BUFSTAT_REINIT;
- }
- }
- }
- else if (targetTS == availableTS)
- {
- return BUFSTATS_DO_NORMAL;
- }
- else
- {
- if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
- {
- return BUFSTATS_DO_NORMAL;
- }
- else if (playoutMode == kPlayoutOff)
- {
- /*
- * If currently playing CNG, continue with that. Don't increase TS
- * since uw32_CNGplayedTS will be increased.
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /*
- * Otherwise, do PLC and increase TS while waiting for the time to
- * play this packet.
- */
- return BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS;
- }
- }
- else if (playoutMode == kPlayoutFax)
- {
- /*
- * If currently playing CNG, continue with that don't increase TS since
- * uw32_CNGplayedTS will be increased.
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /*
- * Otherwise, do audio repetition and increase TS while waiting for the
- * time to play this packet.
- */
- return BUFSTATS_DO_AUDIO_REPETITION_INC_TS;
- }
- }
- else
- {
- /* UNDEFINED, should not get here... */
- assert(0);
- return BUFSTAT_REINIT;
- }
- }
- }
- /* We should not get here (but sometimes we do anyway...) */
- return BUFSTAT_REINIT;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c
deleted file mode 100644
index cb4878fee57..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for obtaining comfort noise from noise parameters
- * according to IETF RFC 3389.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-#include "webrtc_cng.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Cng(...)
- *
- * This function produces CNG according to RFC 3389.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - len : Number of samples to produce (max 640 or
- * 640 - fsHz*5/8000 for first-time CNG, governed by
- * the definition of WEBRTC_CNG_MAX_OUTSIZE_ORDER in
- * webrtc_cng.h)
- *
- * Output:
- * - pw16_outData : Output CNG
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-#ifdef NETEQ_CNG_CODEC
-/* Must compile NetEQ with CNG support to enable this function */
-
-int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len)
-{
- int16_t w16_winMute = 0; /* mixing factor for overlap data */
- int16_t w16_winUnMute = 0; /* mixing factor for comfort noise */
- int16_t w16_winMuteInc = 0; /* mixing factor increment (negative) */
- int16_t w16_winUnMuteInc = 0; /* mixing factor increment */
- int i;
-
- /*
- * Check if last RecOut call was other than RFC3389,
- * that is, this call is the first of a CNG period.
- */
- if (inst->w16_mode != MODE_RFC3389CNG)
- {
- /* Reset generation and overlap slightly with old data */
-
- /* Generate len samples + overlap */
- if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
- (int16_t) (len + inst->ExpandInst.w16_overlap), 1) < 0)
- {
- /* error returned */
- return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- }
-
- /* Set windowing parameters depending on sample rate */
- if (inst->fs == 8000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs == 16000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs == 32000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else if (inst->fs == 48000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
-#endif
- }
- else
- {
- /* Unsupported sample rate (should not be possible) */
- return NETEQ_OTHER_ERROR;
- }
-
- /* Do overlap add between new vector and overlap */
- for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
- {
- /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
- inst->ExpandInst.pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(
- inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
- WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
- + 16384, 15); /* shift with proper rounding */
-
- w16_winMute += w16_winMuteInc; /* decrease mute factor (inc<0) */
- w16_winUnMute += w16_winUnMuteInc; /* increase unmute factor (inc>0) */
-
- }
-
- /*
- * Shift the contents of the outData buffer by overlap samples, since we
- * already used these first samples in the overlapVec above
- */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_outData+inst->ExpandInst.w16_overlap, len);
-
- }
- else
- {
- /* This is a subsequent CNG call; no special overlap needed */
-
- /* Generate len samples */
- if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (int16_t) len, 0) < 0)
- {
- /* error returned */
- return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- }
- }
-
- return 0;
-
-}
-
-#endif /* NETEQ_CNG_CODEC */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c
deleted file mode 100644
index bb34f5e58d3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c
+++ /dev/null
@@ -1,782 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the codec database.
- */
-
-#include "codec_db.h"
-
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-/*
- * Resets the codec database.
- */
-
-int WebRtcNetEQ_DbReset(CodecDbInst_t *inst)
-{
- int i;
-
- WebRtcSpl_MemSetW16((int16_t*) inst, 0,
- sizeof(CodecDbInst_t) / sizeof(int16_t));
-
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- inst->position[i] = -1;
- }
-
- for (i = 0; i < NUM_CODECS; i++)
- {
- inst->payloadType[i] = -1;
- }
-
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- inst->CNGpayloadType[i] = -1;
- }
-
- return 0;
-}
-
-/*
- * Adds a new codec to the database.
- */
-
-int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- int16_t payloadType, FuncDecode funcDecode,
- FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
- FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
- FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
- FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
- FuncGetErrorCode funcGetErrorCode, void* codec_state,
- uint16_t codec_fs)
-{
-
- int temp;
- int insertCNGcodec = 0, overwriteCNGcodec = 0, CNGpos = -1;
-
-#ifndef NETEQ_RED_CODEC
- if (codec == kDecoderRED)
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-#endif
- if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
- >= (int) kDecoderReservedEnd))
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- if ((codec_fs != 8000)
-#ifdef NETEQ_WIDEBAND
- &&(codec_fs!=16000)
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- &&(codec_fs!=32000)
-#endif
-#if defined(NETEQ_48KHZ_WIDEBAND) || defined(NETEQ_OPUS_CODEC)
- &&(codec_fs!=48000)
-#endif
- )
- {
- return CODEC_DB_UNSUPPORTED_FS;
- }
-
- /* Ensure that the codec type is supported */
- switch (codec)
- {
-#ifdef NETEQ_PCM16B_CODEC
- case kDecoderPCM16B :
- case kDecoderPCM16B_2ch :
-#endif
-#ifdef NETEQ_G711_CODEC
- case kDecoderPCMu :
- case kDecoderPCMa :
- case kDecoderPCMu_2ch :
- case kDecoderPCMa_2ch :
-#endif
-#ifdef NETEQ_ILBC_CODEC
- case kDecoderILBC :
-#endif
-#ifdef NETEQ_ISAC_CODEC
- case kDecoderISAC :
-#endif
-#ifdef NETEQ_ISAC_SWB_CODEC
- case kDecoderISACswb :
-#endif
-#ifdef NETEQ_ISAC_FB_CODEC
- case kDecoderISACfb :
-#endif
-#ifdef NETEQ_OPUS_CODEC
- case kDecoderOpus :
-#endif
-#ifdef NETEQ_G722_CODEC
- case kDecoderG722 :
- case kDecoderG722_2ch :
-#endif
-#ifdef NETEQ_WIDEBAND
- case kDecoderPCM16Bwb :
- case kDecoderPCM16Bwb_2ch :
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case kDecoderPCM16Bswb32kHz :
- case kDecoderPCM16Bswb32kHz_2ch :
-#endif
-#ifdef NETEQ_CNG_CODEC
- case kDecoderCNG :
-#endif
-#ifdef NETEQ_ATEVENT_DECODE
- case kDecoderAVT :
-#endif
-#ifdef NETEQ_RED_CODEC
- case kDecoderRED :
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case kDecoderPCM16Bswb48kHz :
-#endif
-#ifdef NETEQ_ARBITRARY_CODEC
- case kDecoderArbitrary:
-#endif
-#ifdef NETEQ_G729_CODEC
- case kDecoderG729:
-#endif
-#ifdef NETEQ_G729_1_CODEC
- case kDecoderG729_1 :
-#endif
-#ifdef NETEQ_G726_CODEC
- case kDecoderG726_16 :
- case kDecoderG726_24 :
- case kDecoderG726_32 :
- case kDecoderG726_40 :
-#endif
-#ifdef NETEQ_G722_1_CODEC
- case kDecoderG722_1_16 :
- case kDecoderG722_1_24 :
- case kDecoderG722_1_32 :
-#endif
-#ifdef NETEQ_G722_1C_CODEC
- case kDecoderG722_1C_24 :
- case kDecoderG722_1C_32 :
- case kDecoderG722_1C_48 :
-#endif
-#ifdef NETEQ_SPEEX_CODEC
- case kDecoderSPEEX_8 :
- case kDecoderSPEEX_16 :
-#endif
-#ifdef NETEQ_CELT_CODEC
- case kDecoderCELT_32 :
- case kDecoderCELT_32_2ch :
-#endif
-#ifdef NETEQ_GSMFR_CODEC
- case kDecoderGSMFR :
-#endif
-#ifdef NETEQ_AMR_CODEC
- case kDecoderAMR :
-#endif
-#ifdef NETEQ_AMRWB_CODEC
- case kDecoderAMRWB :
-#endif
- {
- /* If we end up here, the inserted codec is supported => Do nothing */
- break;
- }
- default:
- {
- /* If we get to this point, the inserted codec is not supported */
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
- }
-
- /* Check to see if payload type is taken */
- if (WebRtcNetEQ_DbGetCodec(inst, payloadType) > 0)
- {
- return CODEC_DB_PAYLOAD_TAKEN;
- }
-
- /* Special case for CNG codecs */
- if (codec == kDecoderCNG)
- {
- /* check if this is first CNG codec to be registered */
- if (WebRtcNetEQ_DbGetPayload(inst, codec) == CODEC_DB_NOT_EXIST2)
- {
- /* no other CNG codec found */
- insertCNGcodec = 1;
- }
-
- /* find the appropriate insert position in CNG payload vector */
- switch (codec_fs)
- {
- case 8000:
- CNGpos = 0;
- /*
- * The 8 kHz CNG payload type is the one associated with the regular codec DB
- * should override any other setting.
- * Overwrite if this isn't the first CNG
- */
- overwriteCNGcodec = !insertCNGcodec;
- break;
-#ifdef NETEQ_WIDEBAND
- case 16000:
- CNGpos = 1;
- break;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- CNGpos = 2;
- break;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- CNGpos = 3;
- break;
-#endif
- default:
- /* If we get to this point, the inserted codec is not supported */
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- /* insert CNG payload type */
- inst->CNGpayloadType[CNGpos] = payloadType;
-
- }
-
- if ((codec != kDecoderCNG) || (insertCNGcodec == 1) || (overwriteCNGcodec == 1))
- {
- /* Check if we have reached the maximum numbers of simultaneous codecs */
- if (inst->nrOfCodecs == NUM_CODECS) return CODEC_DB_FULL;
-
- /* Check that codec has not already been initialized to DB =>
- remove it and reinitialize according to new spec */
- if ((inst->position[codec] != -1) && (overwriteCNGcodec != 1))
- { /* if registering multiple CNG codecs, don't remove, just overwrite */
- WebRtcNetEQ_DbRemove(inst, codec);
- }
-
- if (overwriteCNGcodec == 1)
- {
- temp = inst->position[codec];
- }
- else
- {
- temp = inst->nrOfCodecs; /* Store this codecs position */
- inst->position[codec] = temp;
- inst->nrOfCodecs++;
- }
-
- inst->payloadType[temp] = payloadType;
-
- /* Copy to database */
- inst->codec_state[temp] = codec_state;
- inst->funcDecode[temp] = funcDecode;
- inst->funcDecodeRCU[temp] = funcDecodeRCU;
- inst->funcAddLatePkt[temp] = funcAddLatePkt;
- inst->funcDecodeInit[temp] = funcDecodeInit;
- inst->funcDecodePLC[temp] = funcDecodePLC;
- inst->funcGetMDinfo[temp] = funcGetMDinfo;
- inst->funcGetPitch[temp] = funcGetPitch;
- inst->funcUpdBWEst[temp] = funcUpdBWEst;
- inst->funcDurationEst[temp] = funcDurationEst;
- inst->funcGetErrorCode[temp] = funcGetErrorCode;
- inst->codec_fs[temp] = codec_fs;
-
- }
-
- return 0;
-}
-
-/*
- * Removes a codec from the database.
- */
-
-int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec)
-{
- int i;
- int pos = -1;
-
-#ifndef NETEQ_RED_CODEC
- if (codec == kDecoderRED)
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-#endif
- if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
- >= (int) kDecoderReservedEnd))
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- pos = inst->position[codec];
- if (pos == -1)
- {
- return CODEC_DB_NOT_EXIST4;
- }
- else
- {
- /* Remove this codec */
- inst->position[codec] = -1;
- for (i = pos; i < (inst->nrOfCodecs - 1); i++)
- {
- inst->payloadType[i] = inst->payloadType[i + 1];
- inst->codec_state[i] = inst->codec_state[i + 1];
- inst->funcDecode[i] = inst->funcDecode[i + 1];
- inst->funcDecodeRCU[i] = inst->funcDecodeRCU[i + 1];
- inst->funcAddLatePkt[i] = inst->funcAddLatePkt[i + 1];
- inst->funcDecodeInit[i] = inst->funcDecodeInit[i + 1];
- inst->funcDecodePLC[i] = inst->funcDecodePLC[i + 1];
- inst->funcGetMDinfo[i] = inst->funcGetMDinfo[i + 1];
- inst->funcGetPitch[i] = inst->funcGetPitch[i + 1];
- inst->funcDurationEst[i] = inst->funcDurationEst[i + 1];
- inst->funcUpdBWEst[i] = inst->funcUpdBWEst[i + 1];
- inst->funcGetErrorCode[i] = inst->funcGetErrorCode[i + 1];
- inst->codec_fs[i] = inst->codec_fs[i + 1];
- }
- inst->payloadType[i] = -1;
- inst->codec_state[i] = NULL;
- inst->funcDecode[i] = NULL;
- inst->funcDecodeRCU[i] = NULL;
- inst->funcAddLatePkt[i] = NULL;
- inst->funcDecodeInit[i] = NULL;
- inst->funcDecodePLC[i] = NULL;
- inst->funcGetMDinfo[i] = NULL;
- inst->funcGetPitch[i] = NULL;
- inst->funcDurationEst[i] = NULL;
- inst->funcUpdBWEst[i] = NULL;
- inst->funcGetErrorCode[i] = NULL;
- inst->codec_fs[i] = 0;
- /* Move down all the codecs above this one */
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- if (inst->position[i] >= pos)
- {
- inst->position[i] = inst->position[i] - 1;
- }
- }
- inst->nrOfCodecs--;
-
- if (codec == kDecoderCNG)
- {
- /* also remove all registered CNG payload types */
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- inst->CNGpayloadType[i] = -1;
- }
- }
- }
- return 0;
-}
-
-/*
- * Get the decoder function pointers for a codec.
- */
-
-int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- CodecFuncInst_t *ptr_inst)
-{
-
- int pos = inst->position[codec];
- if ((codec <= kDecoderReservedStart) || (codec >= kDecoderReservedEnd) || (codec
- > NUM_TOTAL_CODECS))
- {
- /* ERROR */
- pos = -1;
- }
- if (pos >= 0)
- {
- ptr_inst->codec_state = inst->codec_state[pos];
- ptr_inst->funcAddLatePkt = inst->funcAddLatePkt[pos];
- ptr_inst->funcDecode = inst->funcDecode[pos];
- ptr_inst->funcDecodeRCU = inst->funcDecodeRCU[pos];
- ptr_inst->funcDecodeInit = inst->funcDecodeInit[pos];
- ptr_inst->funcDecodePLC = inst->funcDecodePLC[pos];
- ptr_inst->funcGetMDinfo = inst->funcGetMDinfo[pos];
- ptr_inst->funcUpdBWEst = inst->funcUpdBWEst[pos];
- ptr_inst->funcGetErrorCode = inst->funcGetErrorCode[pos];
- ptr_inst->codec_fs = inst->codec_fs[pos];
- return 0;
- }
- else
- {
- WebRtcSpl_MemSetW16((int16_t*) ptr_inst, 0,
- sizeof(CodecFuncInst_t) / sizeof(int16_t));
- return CODEC_DB_NOT_EXIST1;
- }
-}
-
-/*
- * Returns payload number given a codec identifier.
- */
-
-int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID)
-{
- if (inst->position[codecID] == -1)
- return CODEC_DB_NOT_EXIST2;
- else
- return (inst->payloadType[inst->position[codecID]]);
-
-}
-
-/*
- * Returns codec identifier given a payload number.
- * Returns -1 if the payload type does not exist.
- */
-
-int WebRtcNetEQ_DbGetCodec(const CodecDbInst_t *inst, int payloadType)
-{
- int i, pos;
-
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- pos = inst->position[i];
- if (pos != -1)
- {
- if (inst->payloadType[pos] == payloadType) return i;
- }
- }
-
- /* did not find payload type */
- /* check if it's a CNG codec */
- if (WebRtcNetEQ_DbIsCNGPayload(inst, payloadType))
- {
- return kDecoderCNG;
- }
-
- /* found no match */
- return CODEC_DB_NOT_EXIST3;
-}
-
-/*
- * Extracts the Payload Split information of the codec with the specified payloadType.
- */
-
-int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
- int codedsize)
-{
-
- switch (codecID)
- {
-#ifdef NETEQ_ISAC_CODEC
- case kDecoderISAC:
-#endif
-#ifdef NETEQ_ISAC_SWB_CODEC
- case kDecoderISACswb:
-#endif
-#ifdef NETEQ_ISAC_FB_CODEC
- case kDecoderISACfb:
-#endif
-#ifdef NETEQ_OPUS_CODEC
- case kDecoderOpus:
-#endif
-#ifdef NETEQ_ARBITRARY_CODEC
- case kDecoderArbitrary:
-#endif
-#ifdef NETEQ_AMR_CODEC
- case kDecoderAMR:
-#endif
-#ifdef NETEQ_AMRWB_CODEC
- case kDecoderAMRWB:
-#endif
-#ifdef NETEQ_G726_CODEC
- /* Treat G726 as non-splittable to simplify the implementation */
- case kDecoderG726_16:
- case kDecoderG726_24:
- case kDecoderG726_32:
- case kDecoderG726_40:
-#endif
-#ifdef NETEQ_SPEEX_CODEC
- case kDecoderSPEEX_8:
- case kDecoderSPEEX_16:
-#endif
-#ifdef NETEQ_CELT_CODEC
- case kDecoderCELT_32 :
- case kDecoderCELT_32_2ch :
-#endif
-#ifdef NETEQ_G729_1_CODEC
- case kDecoderG729_1:
-#endif
- {
- /* These codecs' payloads are not splittable */
- inst->deltaBytes = NO_SPLIT;
- return 0;
- }
-
- /*
- * Sample based coders are a special case.
- * In this case, deltaTime signals the number of bytes per timestamp unit times 2
- * in log2 domain.
- */
-#if (defined NETEQ_G711_CODEC)
- case kDecoderPCMu:
- case kDecoderPCMa:
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch:
- {
- inst->deltaBytes = -12;
- inst->deltaTime = 1;
- return 0;
- }
-#endif
-#if (defined NETEQ_G722_CODEC)
- case kDecoderG722:
- case kDecoderG722_2ch:
- {
- inst->deltaBytes = -14;
- inst->deltaTime = 0;
- return 0;
- }
-#endif
-#if (defined NETEQ_PCM16B_CODEC)
- case kDecoderPCM16B:
- case kDecoderPCM16B_2ch:
- {
- inst->deltaBytes = -12;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_WIDEBAND))
- case kDecoderPCM16Bwb:
- case kDecoderPCM16Bwb_2ch:
- {
- inst->deltaBytes = -14;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_32KHZ_WIDEBAND))
- case kDecoderPCM16Bswb32kHz:
- case kDecoderPCM16Bswb32kHz_2ch:
- {
- inst->deltaBytes = -18;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_48KHZ_WIDEBAND))
- case kDecoderPCM16Bswb48kHz:
- {
- inst->deltaBytes = -22;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-
- /* Splittable payloads */
-#ifdef NETEQ_G722_1_CODEC
- case kDecoderG722_1_16:
- {
- inst->deltaBytes = 40;
- inst->deltaTime = 320;
- return 0;
- }
- case kDecoderG722_1_24:
- {
- inst->deltaBytes = 60;
- inst->deltaTime = 320;
- return 0;
- }
- case kDecoderG722_1_32:
- {
- inst->deltaBytes = 80;
- inst->deltaTime = 320;
- return 0;
- }
-#endif
-#ifdef NETEQ_G722_1C_CODEC
- case kDecoderG722_1C_24:
- {
- inst->deltaBytes = 60;
- inst->deltaTime = 640;
- return 0;
- }
- case kDecoderG722_1C_32:
- {
- inst->deltaBytes = 80;
- inst->deltaTime = 640;
- return 0;
- }
- case kDecoderG722_1C_48:
- {
- inst->deltaBytes = 120;
- inst->deltaTime = 640;
- return 0;
- }
-#endif
-#ifdef NETEQ_G729_CODEC
- case kDecoderG729:
- {
- inst->deltaBytes = 10;
- inst->deltaTime = 80;
- return 0;
- }
-#endif
-#ifdef NETEQ_ILBC_CODEC
- case kDecoderILBC:
- {
- /* Check for splitting of iLBC packets.
- * If payload size is a multiple of 50 bytes it should be split into 30ms frames.
- * If payload size is a multiple of 38 bytes it should be split into 20ms frames.
- * Least common multiplier between 38 and 50 is 950, so the payload size must be less than
- * 950 bytes in order to resolve the frames unambiguously.
- * Currently max 12 frames in one bundle.
- */
- switch (codedsize)
- {
- case 50:
- case 100:
- case 150:
- case 200:
- case 250:
- case 300:
- case 350:
- case 400:
- case 450:
- case 500:
- case 550:
- case 600:
- {
- inst->deltaBytes = 50;
- inst->deltaTime = 240;
- break;
- }
- case 38:
- case 76:
- case 114:
- case 152:
- case 190:
- case 228:
- case 266:
- case 304:
- case 342:
- case 380:
- case 418:
- case 456:
- {
- inst->deltaBytes = 38;
- inst->deltaTime = 160;
- break;
- }
- default:
- {
- return AMBIGUOUS_ILBC_FRAME_SIZE; /* Something not supported... */
- }
- }
- return 0;
- }
-#endif
-#ifdef NETEQ_GSMFR_CODEC
- case kDecoderGSMFR:
- {
- inst->deltaBytes = 33;
- inst->deltaTime = 160;
- return 0;
- }
-#endif
- default:
- { /*Unknown codec */
- inst->deltaBytes = NO_SPLIT;
- return CODEC_DB_UNKNOWN_CODEC;
- }
- } /* end of switch */
-}
-
-/*
- * Returns 1 if codec is multiple description, 0 otherwise.
- * NOTE: This function is a stub, since there currently are no MD codecs.
- */
-int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID)
-{
- if (0) /* Add test for MD codecs here */
- return 1;
- else
- return 0;
-}
-
-/*
- * Returns 1 if payload type is registered as a CNG codec, 0 otherwise
- */
-int WebRtcNetEQ_DbIsCNGPayload(const CodecDbInst_t *inst, int payloadType)
-{
-#ifdef NETEQ_CNG_CODEC
- int i;
-
- for(i=0; i<NUM_CNG_CODECS; i++)
- {
- if( (inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType) )
- {
- return 1;
- }
- }
-#endif
-
- return 0;
-
-}
-
-/*
- * Return the sample rate for the codec with the given payload type, 0 if error
- */
-uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
-{
- int i;
- CodecFuncInst_t codecInst;
-
- /* Sanity */
- if (inst == NULL)
- {
- /* return 0 Hz */
- return 0;
- }
-
- /* Check among CNG payloads */
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- if ((inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType))
- {
- switch (i)
- {
-#ifdef NETEQ_WIDEBAND
- case 1:
- return 16000;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 2:
- return 32000;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 3:
- return 48000;
-#endif
- default:
- return 8000;
- }
- }
- }
-
- /* Not a CNG payload, check the other payloads */
- i = WebRtcNetEQ_DbGetCodec(inst, payloadType);
- if (i >= 0)
- {
- if (WebRtcNetEQ_DbGetPtrs(inst, (enum WebRtcNetEQDecoder) i, &codecInst) != 0)
- {
- /* Unexpected error, return 0 Hz */
- return 0;
- }
- return codecInst.codec_fs;
- }
-
- /* If we end up here, we got an error, return 0 Hz */
- return 0;
-
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h
deleted file mode 100644
index cc4b48e6f26..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Interface for the codec database.
- */
-
-#ifndef CODEC_DB_H
-#define CODEC_DB_H
-
-#include "typedefs.h"
-
-#include "webrtc_neteq.h"
-#include "codec_db_defines.h"
-#include "neteq_defines.h"
-
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define NUM_CNG_CODECS 4
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define NUM_CNG_CODECS 3
-#elif defined(NETEQ_WIDEBAND)
- #define NUM_CNG_CODECS 2
-#else
- #define NUM_CNG_CODECS 1
-#endif
-
-typedef struct
-{
-
- int16_t position[NUM_TOTAL_CODECS];
- int16_t nrOfCodecs;
-
- int16_t payloadType[NUM_CODECS];
- FuncDecode funcDecode[NUM_CODECS];
- FuncDecode funcDecodeRCU[NUM_CODECS];
- FuncDecodePLC funcDecodePLC[NUM_CODECS];
- FuncDecodeInit funcDecodeInit[NUM_CODECS];
- FuncAddLatePkt funcAddLatePkt[NUM_CODECS];
- FuncGetMDinfo funcGetMDinfo[NUM_CODECS];
- FuncGetPitchInfo funcGetPitch[NUM_CODECS];
- FuncUpdBWEst funcUpdBWEst[NUM_CODECS];
- FuncDurationEst funcDurationEst[NUM_CODECS];
- FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
- void * codec_state[NUM_CODECS];
- uint16_t codec_fs[NUM_CODECS];
- int16_t CNGpayloadType[NUM_CNG_CODECS];
-
-} CodecDbInst_t;
-
-#define NO_SPLIT -1 /* codec payload cannot be split */
-
-typedef struct
-{
- int16_t deltaBytes;
- int16_t deltaTime;
-} SplitInfo_t;
-
-/*
- * Resets the codec database.
- */
-int WebRtcNetEQ_DbReset(CodecDbInst_t *inst);
-
-/*
- * Adds a new codec to the database.
- */
-int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- int16_t payloadType, FuncDecode funcDecode,
- FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
- FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
- FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
- FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
- FuncGetErrorCode funcGetErrorCode, void* codec_state,
- uint16_t codec_fs);
-
-/*
- * Removes a codec from the database.
- */
-int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec);
-
-/*
- * Get the decoder function pointers for a codec.
- */
-int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder,
- CodecFuncInst_t *ptr_inst);
-
-/*
- * Returns payload number given a codec identifier.
- */
-
-int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID);
-
-/*
- * Returns codec identifier given a payload number.
- */
-
-int WebRtcNetEQ_DbGetCodec(const CodecDbInst_t *inst, int payloadType);
-
-/*
- * Extracts the Payload Split information of the codec with the specified payloadType.
- */
-
-int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
- int codedsize);
-
-/*
- * Returns 1 if codec is multiple description type, 0 otherwise.
- */
-int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID);
-
-/*
- * Returns 1 if payload type is registered as a CNG codec, 0 otherwise.
- */
-int WebRtcNetEQ_DbIsCNGPayload(const CodecDbInst_t *inst, int payloadType);
-
-/*
- * Return the sample rate for the codec with the given payload type, 0 if error.
- */
-uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h
deleted file mode 100644
index d97306a333c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Some definitions related to the codec database.
- */
-
-#ifndef CODEC_DB_DEFINES_H
-#define CODEC_DB_DEFINES_H
-
-#include "typedefs.h"
-
-#define NUM_CODECS 47 /* probably too large with the limited set of supported codecs*/
-#define NUM_TOTAL_CODECS kDecoderReservedEnd
-
-/*
- * Pointer to decoder function.
- */
-typedef int16_t (*FuncDecode)(void* state, int16_t* encoded, int16_t len,
- int16_t* decoded, int16_t* speechType);
-
-/*
- * Pointer to PLC function.
- */
-typedef int16_t (*FuncDecodePLC)(void* state, int16_t* decodec,
- int16_t frames);
-
-/*
- * Pointer to decoder init function.
- */
-typedef int16_t (*FuncDecodeInit)(void* state);
-
-/*
- * Pointer to add late packet function.
- */
-typedef int16_t
- (*FuncAddLatePkt)(void* state, int16_t* encoded, int16_t len);
-
-/*
- * Pointer to get MD infofunction.
- */
-typedef int16_t (*FuncGetMDinfo)(void* state);
-
-/*
- * Pointer to pitch info function.
- * Return 0 for unvoiced, -1 if pitch not availiable.
- */
-typedef int16_t (*FuncGetPitchInfo)(void* state, int16_t* encoded,
- int16_t* length);
-
-/*
- * Pointer to the update bandwidth estimate function
- */
-typedef int16_t (*FuncUpdBWEst)(void* state, const uint16_t *encoded,
- int32_t packet_size,
- uint16_t rtp_seq_number, uint32_t send_ts,
- uint32_t arr_ts);
-
-/*
- * Pointer to the frame size estimate function.
- * Returns the estimated number of samples in the packet.
- */
-typedef int (*FuncDurationEst)(void* state, const uint8_t* payload,
- int payload_length_bytes);
-
-/*
- * Pointer to error code function
- */
-typedef int16_t (*FuncGetErrorCode)(void* state);
-
-typedef struct CodecFuncInst_t_
-{
-
- FuncDecode funcDecode;
- FuncDecode funcDecodeRCU;
- FuncDecodePLC funcDecodePLC;
- FuncDecodeInit funcDecodeInit;
- FuncAddLatePkt funcAddLatePkt;
- FuncGetMDinfo funcGetMDinfo;
- FuncUpdBWEst funcUpdBWEst; /* Currently in use for the ISAC family (without LC) only*/
- FuncDurationEst funcDurationEst;
- FuncGetErrorCode funcGetErrorCode;
- void * codec_state;
- uint16_t codec_fs;
- uint32_t timeStamp;
-
-} CodecFuncInst_t;
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
new file mode 100644
index 00000000000..31bb40c9275
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+void ComfortNoise::Reset() {
+ first_call_ = true;
+ internal_error_code_ = 0;
+}
+
+int ComfortNoise::UpdateParameters(Packet* packet) {
+ assert(packet); // Existence is verified by caller.
+ // Get comfort noise decoder.
+ AudioDecoder* cng_decoder = decoder_database_->GetDecoder(
+ packet->header.payloadType);
+ if (!cng_decoder) {
+ delete [] packet->payload;
+ delete packet;
+ return kUnknownPayloadType;
+ }
+ decoder_database_->SetActiveCngDecoder(packet->header.payloadType);
+ CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
+ int16_t ret = WebRtcCng_UpdateSid(cng_inst,
+ packet->payload,
+ packet->payload_length);
+ delete [] packet->payload;
+ delete packet;
+ if (ret < 0) {
+ internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
+ return kInternalError;
+ }
+ return kOK;
+}
+
+int ComfortNoise::Generate(size_t requested_length,
+ AudioMultiVector* output) {
+ // TODO(hlundin): Change to an enumerator and skip assert.
+ assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 ||
+ fs_hz_ == 48000);
+ // Not adapted for multi-channel yet.
+ if (output->Channels() != 1) {
+ return kMultiChannelNotSupported;
+ }
+
+ size_t number_of_samples = requested_length;
+ int16_t new_period = 0;
+ if (first_call_) {
+ // Generate noise and overlap slightly with old data.
+ number_of_samples = requested_length + overlap_length_;
+ new_period = 1;
+ }
+ output->AssertSize(number_of_samples);
+ // Get the decoder from the database.
+ AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ if (!cng_decoder) {
+ return kUnknownPayloadType;
+ }
+ CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
+ // The expression &(*output)[0][0] is a pointer to the first element in
+ // the first channel.
+ if (WebRtcCng_Generate(cng_inst, &(*output)[0][0],
+ static_cast<int16_t>(number_of_samples),
+ new_period) < 0) {
+ // Error returned.
+ output->Zeros(requested_length);
+ internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
+ return kInternalError;
+ }
+
+ if (first_call_) {
+ // Set tapering window parameters. Values are in Q15.
+ int16_t muting_window; // Mixing factor for overlap data.
+ int16_t muting_window_increment; // Mixing factor increment (negative).
+ int16_t unmuting_window; // Mixing factor for comfort noise.
+ int16_t unmuting_window_increment; // Mixing factor increment.
+ if (fs_hz_ == 8000) {
+ muting_window = DspHelper::kMuteFactorStart8kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement8kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart8kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement8kHz;
+ } else if (fs_hz_ == 16000) {
+ muting_window = DspHelper::kMuteFactorStart16kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement16kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart16kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement16kHz;
+ } else if (fs_hz_ == 32000) {
+ muting_window = DspHelper::kMuteFactorStart32kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement32kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart32kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement32kHz;
+ } else { // fs_hz_ == 48000
+ muting_window = DspHelper::kMuteFactorStart48kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement48kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart48kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement48kHz;
+ }
+
+ // Do overlap-add between new vector and overlap.
+ size_t start_ix = sync_buffer_->Size() - overlap_length_;
+ for (size_t i = 0; i < overlap_length_; i++) {
+ /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
+ // The expression (*output)[0][i] is the i-th element in the first
+ // channel.
+ (*sync_buffer_)[0][start_ix + i] =
+ (((*sync_buffer_)[0][start_ix + i] * muting_window) +
+ ((*output)[0][i] * unmuting_window) + 16384) >> 15;
+ muting_window += muting_window_increment;
+ unmuting_window += unmuting_window_increment;
+ }
+ // Remove |overlap_length_| samples from the front of |output| since they
+ // were mixed into |sync_buffer_| above.
+ output->PopFront(overlap_length_);
+ }
+ first_call_ = false;
+ return kOK;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
new file mode 100644
index 00000000000..d4655962456
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class DecoderDatabase;
+class SyncBuffer;
+struct Packet;
+
+// This class acts as an interface to the CNG generator.
+class ComfortNoise {
+ public:
+ enum ReturnCodes {
+ kOK = 0,
+ kUnknownPayloadType,
+ kInternalError,
+ kMultiChannelNotSupported
+ };
+
+ ComfortNoise(int fs_hz, DecoderDatabase* decoder_database,
+ SyncBuffer* sync_buffer)
+ : fs_hz_(fs_hz),
+ first_call_(true),
+ overlap_length_(5 * fs_hz_ / 8000),
+ decoder_database_(decoder_database),
+ sync_buffer_(sync_buffer),
+ internal_error_code_(0) {
+ }
+
+ // Resets the state. Should be called before each new comfort noise period.
+ void Reset();
+
+ // Update the comfort noise generator with the parameters in |packet|.
+ // Will delete the packet.
+ int UpdateParameters(Packet* packet);
+
+ // Generates |requested_length| samples of comfort noise and writes to
+ // |output|. If this is the first in call after Reset (or first after creating
+ // the object), it will also mix in comfort noise at the end of the
+ // SyncBuffer object provided in the constructor.
+ int Generate(size_t requested_length, AudioMultiVector* output);
+
+ // Returns the last error code that was produced by the comfort noise
+ // decoder. Returns 0 if no error has been encountered since the last reset.
+ int internal_error_code() { return internal_error_code_; }
+
+ private:
+ int fs_hz_;
+ bool first_call_;
+ size_t overlap_length_;
+ DecoderDatabase* decoder_database_;
+ SyncBuffer* sync_buffer_;
+ int internal_error_code_;
+ DISALLOW_COPY_AND_ASSIGN(ComfortNoise);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc
new file mode 100644
index 00000000000..6a1bbe0d143
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for ComfortNoise class.
+
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+TEST(ComfortNoise, CreateAndDestroy) {
+ int fs = 8000;
+ MockDecoderDatabase db;
+ SyncBuffer sync_buffer(1, 1000);
+ ComfortNoise cn(fs, &db, &sync_buffer);
+ EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c
deleted file mode 100644
index 0a4404a432b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_corrVec 62 0 61
- int16_t pw16_data_ds 124 0 123
- int32_t pw32_corr 2*54 124 231
-
- Total: 232
- */
-
-#define SCRATCH_pw16_corrVec 0
-#define SCRATCH_pw16_data_ds 0
-#define SCRATCH_pw32_corr 124
-
-#define NETEQ_CORRELATOR_DSVECLEN 124 /* 124 = 60 + 10 + 54 */
-
-int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_data,
- int16_t w16_dataLen,
- int16_t *pw16_corrOut,
- int16_t *pw16_corrScale)
-{
- int16_t w16_corrLen = 60;
-#ifdef SCRATCH
- int16_t *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- /* int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
-#else
- int16_t pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
- int32_t pw32_corr[54];
- /* int16_t pw16_corrVec[4+54+4];*/
-#endif
- /* int16_t *pw16_corr=&pw16_corrVec[4];*/
- int16_t w16_maxVal;
- int32_t w32_maxVal;
- int16_t w16_normVal;
- int16_t w16_normVal2;
- /* int16_t w16_corrUpsLen;*/
- int16_t *pw16_B = NULL;
- int16_t w16_Blen = 0;
- int16_t w16_factor = 0;
-
- /* Set constants depending on frequency used */
- if (inst->fs == 8000)
- {
- w16_Blen = 3;
- w16_factor = 2;
- pw16_B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs==16000)
- {
- w16_Blen = 5;
- w16_factor = 4;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs==32000)
- {
- w16_Blen = 7;
- w16_factor = 8;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if inst->fs==48000 */
- {
- w16_Blen = 7;
- w16_factor = 12;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl;
-#endif
- }
-
- /* Downsample data in order to work on a 4 kHz sampled signal */
- WebRtcSpl_DownsampleFast(
- pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
- (int16_t) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
- NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (int16_t) 0);
-
- /* Normalize downsampled vector to using entire 16 bit */
- w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
- w16_normVal = 16 - WebRtcSpl_NormW32((int32_t) w16_maxVal);
- WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
- w16_normVal);
-
- /* Correlate from lag 10 to lag 60 (20..120 in NB and 40..240 in WB) */
-
- WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen],
- &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen - 10], 60, 54,
- 6 /*maxValue... shifts*/, -1);
-
- /*
- * Move data from w32 to w16 vector.
- * Normalize downsampled vector to using all 14 bits
- */
- w32_maxVal = WebRtcSpl_MaxAbsValueW32(pw32_corr, 54);
- w16_normVal2 = 18 - WebRtcSpl_NormW32(w32_maxVal);
- w16_normVal2 = WEBRTC_SPL_MAX(w16_normVal2, 0);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corrOut, 54, pw32_corr, w16_normVal2);
-
- /* Total scale factor (right shifts) of correlation value */
- *pw16_corrScale = 2 * w16_normVal + 6 + w16_normVal2;
-
- return (50 + 1);
-}
-
-#undef SCRATCH_pw16_corrVec
-#undef SCRATCH_pw16_data_ds
-#undef SCRATCH_pw32_corr
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
new file mode 100644
index 00000000000..5fb054c7850
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+
+#include <algorithm>
+
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_fax.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_normal.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+DecisionLogic* DecisionLogic::Create(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter) {
+ switch (playout_mode) {
+ case kPlayoutOn:
+ case kPlayoutStreaming:
+ return new DecisionLogicNormal(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter);
+ case kPlayoutFax:
+ case kPlayoutOff:
+ return new DecisionLogicFax(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter);
+ }
+ // This line cannot be reached, but must be here to avoid compiler errors.
+ assert(false);
+ return NULL;
+}
+
+DecisionLogic::DecisionLogic(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter)
+ : decoder_database_(decoder_database),
+ packet_buffer_(packet_buffer),
+ delay_manager_(delay_manager),
+ buffer_level_filter_(buffer_level_filter),
+ cng_state_(kCngOff),
+ generated_noise_samples_(0),
+ packet_length_samples_(0),
+ sample_memory_(0),
+ prev_time_scale_(false),
+ timescale_hold_off_(kMinTimescaleInterval),
+ num_consecutive_expands_(0),
+ playout_mode_(playout_mode) {
+ delay_manager_->set_streaming_mode(playout_mode_ == kPlayoutStreaming);
+ SetSampleRate(fs_hz, output_size_samples);
+}
+
+void DecisionLogic::Reset() {
+ cng_state_ = kCngOff;
+ generated_noise_samples_ = 0;
+ packet_length_samples_ = 0;
+ sample_memory_ = 0;
+ prev_time_scale_ = false;
+ timescale_hold_off_ = 0;
+ num_consecutive_expands_ = 0;
+}
+
+void DecisionLogic::SoftReset() {
+ packet_length_samples_ = 0;
+ sample_memory_ = 0;
+ prev_time_scale_ = false;
+ timescale_hold_off_ = kMinTimescaleInterval;
+}
+
+void DecisionLogic::SetSampleRate(int fs_hz, int output_size_samples) {
+ // TODO(hlundin): Change to an enumerator and skip assert.
+ assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
+ fs_mult_ = fs_hz / 8000;
+ output_size_samples_ = output_size_samples;
+}
+
+Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf, bool* reset_decoder) {
+ if (prev_mode == kModeRfc3389Cng ||
+ prev_mode == kModeCodecInternalCng ||
+ prev_mode == kModeExpand) {
+ // If last mode was CNG (or Expand, since this could be covering up for
+ // a lost CNG packet), increase the |generated_noise_samples_| counter.
+ generated_noise_samples_ += output_size_samples_;
+ // Remember that CNG is on. This is needed if comfort noise is interrupted
+ // by DTMF.
+ if (prev_mode == kModeRfc3389Cng) {
+ cng_state_ = kCngRfc3389On;
+ } else if (prev_mode == kModeCodecInternalCng) {
+ cng_state_ = kCngInternalOn;
+ }
+ }
+
+ const int samples_left = static_cast<int>(
+ sync_buffer.FutureLength() - expand.overlap_length());
+ const int cur_size_samples =
+ samples_left + packet_buffer_.NumSamplesInBuffer(decoder_database_,
+ decoder_frame_length);
+ LOG(LS_VERBOSE) << "Buffers: " << packet_buffer_.NumPacketsInBuffer() <<
+ " packets * " << decoder_frame_length << " samples/packet + " <<
+ samples_left << " samples in sync buffer = " << cur_size_samples;
+
+ prev_time_scale_ = prev_time_scale_ &&
+ (prev_mode == kModeAccelerateSuccess ||
+ prev_mode == kModeAccelerateLowEnergy ||
+ prev_mode == kModePreemptiveExpandSuccess ||
+ prev_mode == kModePreemptiveExpandLowEnergy);
+
+ FilterBufferLevel(cur_size_samples, prev_mode);
+
+ return GetDecisionSpecialized(sync_buffer, expand, decoder_frame_length,
+ packet_header, prev_mode, play_dtmf,
+ reset_decoder);
+}
+
+void DecisionLogic::ExpandDecision(Operations operation) {
+ if (operation == kExpand) {
+ num_consecutive_expands_++;
+ } else {
+ num_consecutive_expands_ = 0;
+ }
+}
+
+void DecisionLogic::FilterBufferLevel(int buffer_size_samples,
+ Modes prev_mode) {
+ const int elapsed_time_ms = output_size_samples_ / (8 * fs_mult_);
+ delay_manager_->UpdateCounters(elapsed_time_ms);
+
+ // Do not update buffer history if currently playing CNG since it will bias
+ // the filtered buffer level.
+ if ((prev_mode != kModeRfc3389Cng) && (prev_mode != kModeCodecInternalCng)) {
+ buffer_level_filter_->SetTargetBufferLevel(
+ delay_manager_->base_target_level());
+
+ int buffer_size_packets = 0;
+ if (packet_length_samples_ > 0) {
+ // Calculate size in packets.
+ buffer_size_packets = buffer_size_samples / packet_length_samples_;
+ }
+ int sample_memory_local = 0;
+ if (prev_time_scale_) {
+ sample_memory_local = sample_memory_;
+ timescale_hold_off_ = kMinTimescaleInterval;
+ }
+ buffer_level_filter_->Update(buffer_size_packets, sample_memory_local,
+ packet_length_samples_);
+ prev_time_scale_ = false;
+ }
+
+ timescale_hold_off_ = std::max(timescale_hold_off_ - 1, 0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
new file mode 100644
index 00000000000..672ce939d41
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BufferLevelFilter;
+class DecoderDatabase;
+class DelayManager;
+class Expand;
+class PacketBuffer;
+class SyncBuffer;
+struct RTPHeader;
+
+// This is the base class for the decision tree implementations. Derived classes
+// must implement the method GetDecisionSpecialized().
+class DecisionLogic {
+ public:
+ // Static factory function which creates different types of objects depending
+ // on the |playout_mode|.
+ static DecisionLogic* Create(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter);
+
+ // Constructor.
+ DecisionLogic(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter);
+
+ // Destructor.
+ virtual ~DecisionLogic() {}
+
+ // Resets object to a clean state.
+ void Reset();
+
+ // Resets parts of the state. Typically done when switching codecs.
+ void SoftReset();
+
+ // Sets the sample rate and the output block size.
+ void SetSampleRate(int fs_hz, int output_size_samples);
+
+ // Returns the operation that should be done next. |sync_buffer| and |expand|
+ // are provided for reference. |decoder_frame_length| is the number of samples
+ // obtained from the last decoded frame. If there is a packet available, the
+ // packet header should be supplied in |packet_header|; otherwise it should
+ // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
+ // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
+ // should be set to true. The output variable |reset_decoder| will be set to
+ // true if a reset is required; otherwise it is left unchanged (i.e., it can
+ // remain true if it was true before the call).
+ // This method end with calling GetDecisionSpecialized to get the actual
+ // return value.
+ Operations GetDecision(const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf,
+ bool* reset_decoder);
+
+ // These methods test the |cng_state_| for different conditions.
+ bool CngRfc3389On() const { return cng_state_ == kCngRfc3389On; }
+ bool CngOff() const { return cng_state_ == kCngOff; }
+
+ // Resets the |cng_state_| to kCngOff.
+ void SetCngOff() { cng_state_ = kCngOff; }
+
+ // Reports back to DecisionLogic whether the decision to do expand remains or
+ // not. Note that this is necessary, since an expand decision can be changed
+ // to kNormal in NetEqImpl::GetDecision if there is still enough data in the
+ // sync buffer.
+ virtual void ExpandDecision(Operations operation);
+
+ // Adds |value| to |sample_memory_|.
+ void AddSampleMemory(int32_t value) {
+ sample_memory_ += value;
+ }
+
+ // Accessors and mutators.
+ void set_sample_memory(int32_t value) { sample_memory_ = value; }
+ int generated_noise_samples() const { return generated_noise_samples_; }
+ void set_generated_noise_samples(int value) {
+ generated_noise_samples_ = value;
+ }
+ int packet_length_samples() const { return packet_length_samples_; }
+ void set_packet_length_samples(int value) {
+ packet_length_samples_ = value;
+ }
+ void set_prev_time_scale(bool value) { prev_time_scale_ = value; }
+ NetEqPlayoutMode playout_mode() const { return playout_mode_; }
+
+ protected:
+ // The value 6 sets maximum time-stretch rate to about 100 ms/s.
+ static const int kMinTimescaleInterval = 6;
+
+ enum CngState {
+ kCngOff,
+ kCngRfc3389On,
+ kCngInternalOn
+ };
+
+ // Returns the operation that should be done next. |sync_buffer| and |expand|
+ // are provided for reference. |decoder_frame_length| is the number of samples
+ // obtained from the last decoded frame. If there is a packet available, the
+ // packet header should be supplied in |packet_header|; otherwise it should
+ // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
+ // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
+ // should be set to true. The output variable |reset_decoder| will be set to
+ // true if a reset is required; otherwise it is left unchanged (i.e., it can
+ // remain true if it was true before the call).
+ // Should be implemented by derived classes.
+ virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf,
+ bool* reset_decoder) = 0;
+
+ // Updates the |buffer_level_filter_| with the current buffer level
+ // |buffer_size_packets|.
+ void FilterBufferLevel(int buffer_size_packets, Modes prev_mode);
+
+ DecoderDatabase* decoder_database_;
+ const PacketBuffer& packet_buffer_;
+ DelayManager* delay_manager_;
+ BufferLevelFilter* buffer_level_filter_;
+ int fs_mult_;
+ int output_size_samples_;
+ CngState cng_state_; // Remember if comfort noise is interrupted by other
+ // event (e.g., DTMF).
+ int generated_noise_samples_;
+ int packet_length_samples_;
+ int sample_memory_;
+ bool prev_time_scale_;
+ int timescale_hold_off_;
+ int num_consecutive_expands_;
+ const NetEqPlayoutMode playout_mode_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(DecisionLogic);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
new file mode 100644
index 00000000000..08a4c4cb646
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/decision_logic_fax.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+Operations DecisionLogicFax::GetDecisionSpecialized(
+ const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf,
+ bool* reset_decoder) {
+ assert(playout_mode_ == kPlayoutFax || playout_mode_ == kPlayoutOff);
+ uint32_t target_timestamp = sync_buffer.end_timestamp();
+ uint32_t available_timestamp = 0;
+ int is_cng_packet = 0;
+ if (packet_header) {
+ available_timestamp = packet_header->timestamp;
+ is_cng_packet =
+ decoder_database_->IsComfortNoise(packet_header->payloadType);
+ }
+ if (is_cng_packet) {
+ if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ - available_timestamp) >= 0) {
+ // Time to play this packet now.
+ return kRfc3389Cng;
+ } else {
+ // Wait before playing this packet.
+ return kRfc3389CngNoPacket;
+ }
+ }
+ if (!packet_header) {
+ // No packet. If in CNG mode, play as usual. Otherwise, use other method to
+ // generate data.
+ if (cng_state_ == kCngRfc3389On) {
+ // Continue playing comfort noise.
+ return kRfc3389CngNoPacket;
+ } else if (cng_state_ == kCngInternalOn) {
+ // Continue playing codec-internal comfort noise.
+ return kCodecInternalCng;
+ } else {
+ // Nothing to play. Generate some data to play out.
+ switch (playout_mode_) {
+ case kPlayoutOff:
+ return kAlternativePlc;
+ case kPlayoutFax:
+ return kAudioRepetition;
+ default:
+ assert(false);
+ return kUndefined;
+ }
+ }
+ } else if (target_timestamp == available_timestamp) {
+ return kNormal;
+ } else {
+ if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ - available_timestamp) >= 0) {
+ return kNormal;
+ } else {
+ // If currently playing comfort noise, continue with that. Do not
+ // increase the timestamp counter since generated_noise_samples_ will
+ // be increased.
+ if (cng_state_ == kCngRfc3389On) {
+ return kRfc3389CngNoPacket;
+ } else if (cng_state_ == kCngInternalOn) {
+ return kCodecInternalCng;
+ } else {
+ // Otherwise, do packet-loss concealment and increase the
+ // timestamp while waiting for the time to play this packet.
+ switch (playout_mode_) {
+ case kPlayoutOff:
+ return kAlternativePlcIncreaseTimestamp;
+ case kPlayoutFax:
+ return kAudioRepetitionIncreaseTimestamp;
+ default:
+ assert(0);
+ return kUndefined;
+ }
+ }
+ }
+ }
+}
+
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
new file mode 100644
index 00000000000..01a948fa429
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Implementation of the DecisionLogic class for playout modes kPlayoutFax and
+// kPlayoutOff.
+class DecisionLogicFax : public DecisionLogic {
+ public:
+ // Constructor.
+ DecisionLogicFax(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter)
+ : DecisionLogic(fs_hz, output_size_samples, playout_mode,
+ decoder_database, packet_buffer, delay_manager,
+ buffer_level_filter) {
+ }
+
+ // Destructor.
+ virtual ~DecisionLogicFax() {}
+
+ protected:
+ // Returns the operation that should be done next. |sync_buffer| and |expand|
+ // are provided for reference. |decoder_frame_length| is the number of samples
+ // obtained from the last decoded frame. If there is a packet available, the
+ // packet header should be supplied in |packet_header|; otherwise it should
+ // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
+ // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
+ // should be set to true. The output variable |reset_decoder| will be set to
+ // true if a reset is required; otherwise it is left unchanged (i.e., it can
+ // remain true if it was true before the call).
+ virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf,
+ bool* reset_decoder) OVERRIDE;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(DecisionLogicFax);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
new file mode 100644
index 00000000000..97a8843ae0a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
@@ -0,0 +1,235 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/decision_logic_normal.h"
+
+#include <assert.h>
+
+#include <algorithm>
+
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+Operations DecisionLogicNormal::GetDecisionSpecialized(
+ const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode,
+ bool play_dtmf,
+ bool* reset_decoder) {
+ assert(playout_mode_ == kPlayoutOn || playout_mode_ == kPlayoutStreaming);
+ // Guard for errors, to avoid getting stuck in error mode.
+ if (prev_mode == kModeError) {
+ if (!packet_header) {
+ return kExpand;
+ } else {
+ return kUndefined; // Use kUndefined to flag for a reset.
+ }
+ }
+
+ uint32_t target_timestamp = sync_buffer.end_timestamp();
+ uint32_t available_timestamp = 0;
+ bool is_cng_packet = false;
+ if (packet_header) {
+ available_timestamp = packet_header->timestamp;
+ is_cng_packet =
+ decoder_database_->IsComfortNoise(packet_header->payloadType);
+ }
+
+ if (is_cng_packet) {
+ return CngOperation(prev_mode, target_timestamp, available_timestamp);
+ }
+
+ // Handle the case with no packet at all available (except maybe DTMF).
+ if (!packet_header) {
+ return NoPacket(play_dtmf);
+ }
+
+ // If the expand period was very long, reset NetEQ since it is likely that the
+ // sender was restarted.
+ if (num_consecutive_expands_ > kReinitAfterExpands) {
+ *reset_decoder = true;
+ return kNormal;
+ }
+
+ // Check if the required packet is available.
+ if (target_timestamp == available_timestamp) {
+ return ExpectedPacketAvailable(prev_mode, play_dtmf);
+ } else if (IsNewerTimestamp(available_timestamp, target_timestamp)) {
+ return FuturePacketAvailable(sync_buffer, expand, decoder_frame_length,
+ prev_mode, target_timestamp,
+ available_timestamp, play_dtmf);
+ } else {
+ // This implies that available_timestamp < target_timestamp, which can
+ // happen when a new stream or codec is received. Signal for a reset.
+ return kUndefined;
+ }
+}
+
+Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp) {
+ // Signed difference between target and available timestamp.
+ int32_t timestamp_diff = (generated_noise_samples_ + target_timestamp) -
+ available_timestamp;
+ int32_t optimal_level_samp =
+ (delay_manager_->TargetLevel() * packet_length_samples_) >> 8;
+ int32_t excess_waiting_time_samp = -timestamp_diff - optimal_level_samp;
+
+ if (excess_waiting_time_samp > optimal_level_samp / 2) {
+ // The waiting time for this packet will be longer than 1.5
+ // times the wanted buffer delay. Advance the clock to cut
+ // waiting time down to the optimal.
+ generated_noise_samples_ += excess_waiting_time_samp;
+ timestamp_diff += excess_waiting_time_samp;
+ }
+
+ if (timestamp_diff < 0 && prev_mode == kModeRfc3389Cng) {
+ // Not time to play this packet yet. Wait another round before using this
+ // packet. Keep on playing CNG from previous CNG parameters.
+ return kRfc3389CngNoPacket;
+ } else {
+ // Otherwise, go for the CNG packet now.
+ return kRfc3389Cng;
+ }
+}
+
+Operations DecisionLogicNormal::NoPacket(bool play_dtmf) {
+ if (cng_state_ == kCngRfc3389On) {
+ // Keep on playing comfort noise.
+ return kRfc3389CngNoPacket;
+ } else if (cng_state_ == kCngInternalOn) {
+ // Keep on playing codec internal comfort noise.
+ return kCodecInternalCng;
+ } else if (play_dtmf) {
+ return kDtmf;
+ } else {
+ // Nothing to play, do expand.
+ return kExpand;
+ }
+}
+
+Operations DecisionLogicNormal::ExpectedPacketAvailable(Modes prev_mode,
+ bool play_dtmf) {
+ if (prev_mode != kModeExpand && !play_dtmf) {
+ // Check criterion for time-stretching.
+ int low_limit, high_limit;
+ delay_manager_->BufferLimits(&low_limit, &high_limit);
+ if ((buffer_level_filter_->filtered_current_level() >= high_limit &&
+ TimescaleAllowed()) ||
+ buffer_level_filter_->filtered_current_level() >= high_limit << 2) {
+ // Buffer level higher than limit and time-scaling allowed,
+ // or buffer level really high.
+ return kAccelerate;
+ } else if ((buffer_level_filter_->filtered_current_level() < low_limit)
+ && TimescaleAllowed()) {
+ return kPreemptiveExpand;
+ }
+ }
+ return kNormal;
+}
+
+Operations DecisionLogicNormal::FuturePacketAvailable(
+ const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp,
+ bool play_dtmf) {
+ // Required packet is not available, but a future packet is.
+ // Check if we should continue with an ongoing expand because the new packet
+ // is too far into the future.
+ uint32_t timestamp_leap = available_timestamp - target_timestamp;
+ if ((prev_mode == kModeExpand) &&
+ !ReinitAfterExpands(timestamp_leap) &&
+ !MaxWaitForPacket() &&
+ PacketTooEarly(timestamp_leap) &&
+ UnderTargetLevel()) {
+ if (play_dtmf) {
+ // Still have DTMF to play, so do not do expand.
+ return kDtmf;
+ } else {
+ // Nothing to play.
+ return kExpand;
+ }
+ }
+
+ const int samples_left = static_cast<int>(sync_buffer.FutureLength() -
+ expand.overlap_length());
+ const int cur_size_samples = samples_left +
+ packet_buffer_.NumPacketsInBuffer() * decoder_frame_length;
+
+ // If previous was comfort noise, then no merge is needed.
+ if (prev_mode == kModeRfc3389Cng ||
+ prev_mode == kModeCodecInternalCng) {
+ // Keep the same delay as before the CNG (or maximum 70 ms in buffer as
+ // safety precaution), but make sure that the number of samples in buffer
+ // is no higher than 4 times the optimal level. (Note that TargetLevel()
+ // is in Q8.)
+ int32_t timestamp_diff = (generated_noise_samples_ + target_timestamp) -
+ available_timestamp;
+ if (timestamp_diff >= 0 ||
+ cur_size_samples >
+ 4 * ((delay_manager_->TargetLevel() * packet_length_samples_) >> 8)) {
+ // Time to play this new packet.
+ return kNormal;
+ } else {
+ // Too early to play this new packet; keep on playing comfort noise.
+ if (prev_mode == kModeRfc3389Cng) {
+ return kRfc3389CngNoPacket;
+ } else { // prevPlayMode == kModeCodecInternalCng.
+ return kCodecInternalCng;
+ }
+ }
+ }
+ // Do not merge unless we have done an expand before.
+ // (Convert kAllowMergeWithoutExpand from ms to samples by multiplying with
+ // fs_mult_ * 8 = fs / 1000.)
+ if (prev_mode == kModeExpand ||
+ (decoder_frame_length < output_size_samples_ &&
+ cur_size_samples > kAllowMergeWithoutExpandMs * fs_mult_ * 8)) {
+ return kMerge;
+ } else if (play_dtmf) {
+ // Play DTMF instead of expand.
+ return kDtmf;
+ } else {
+ return kExpand;
+ }
+}
+
+bool DecisionLogicNormal::UnderTargetLevel() const {
+ return buffer_level_filter_->filtered_current_level() <=
+ delay_manager_->TargetLevel();
+}
+
+bool DecisionLogicNormal::ReinitAfterExpands(uint32_t timestamp_leap) const {
+ return timestamp_leap >=
+ static_cast<uint32_t>(output_size_samples_ * kReinitAfterExpands);
+}
+
+bool DecisionLogicNormal::PacketTooEarly(uint32_t timestamp_leap) const {
+ return timestamp_leap >
+ static_cast<uint32_t>(output_size_samples_ * num_consecutive_expands_);
+}
+
+bool DecisionLogicNormal::MaxWaitForPacket() const {
+ return num_consecutive_expands_ >= kMaxWaitForPacket;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
new file mode 100644
index 00000000000..a339d160f29
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Implementation of the DecisionLogic class for playout modes kPlayoutOn and
+// kPlayoutStreaming.
+class DecisionLogicNormal : public DecisionLogic {
+ public:
+ // Constructor.
+ DecisionLogicNormal(int fs_hz,
+ int output_size_samples,
+ NetEqPlayoutMode playout_mode,
+ DecoderDatabase* decoder_database,
+ const PacketBuffer& packet_buffer,
+ DelayManager* delay_manager,
+ BufferLevelFilter* buffer_level_filter)
+ : DecisionLogic(fs_hz, output_size_samples, playout_mode,
+ decoder_database, packet_buffer, delay_manager,
+ buffer_level_filter) {
+ }
+
+ // Destructor.
+ virtual ~DecisionLogicNormal() {}
+
+ protected:
+ static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
+ static const int kReinitAfterExpands = 100;
+ static const int kMaxWaitForPacket = 10;
+
+ // Returns the operation that should be done next. |sync_buffer| and |expand|
+ // are provided for reference. |decoder_frame_length| is the number of samples
+ // obtained from the last decoded frame. If there is a packet available, the
+ // packet header should be supplied in |packet_header|; otherwise it should
+ // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
+ // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
+ // should be set to true. The output variable |reset_decoder| will be set to
+ // true if a reset is required; otherwise it is left unchanged (i.e., it can
+ // remain true if it was true before the call).
+ virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length,
+ const RTPHeader* packet_header,
+ Modes prev_mode, bool play_dtmf,
+ bool* reset_decoder);
+
+ // Returns the operation to do given that the expected packet is not
+ // available, but a packet further into the future is at hand.
+ virtual Operations FuturePacketAvailable(
+ const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length, Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp,
+ bool play_dtmf);
+
+ // Returns the operation to do given that the expected packet is available.
+ virtual Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
+
+ // Returns the operation given that no packets are available (except maybe
+ // a DTMF event, flagged by setting |play_dtmf| true).
+ virtual Operations NoPacket(bool play_dtmf);
+
+ private:
+ // Returns the operation given that the next available packet is a comfort
+ // noise payload (RFC 3389 only, not codec-internal).
+ Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
+ uint32_t available_timestamp);
+
+ // Checks if enough time has elapsed since the last successful timescale
+ // operation was done (i.e., accelerate or preemptive expand).
+ bool TimescaleAllowed() const { return timescale_hold_off_ == 0; }
+
+ // Checks if the current (filtered) buffer level is under the target level.
+ bool UnderTargetLevel() const;
+
+ // Checks if |timestamp_leap| is so long into the future that a reset due
+ // to exceeding kReinitAfterExpands will be done.
+ bool ReinitAfterExpands(uint32_t timestamp_leap) const;
+
+ // Checks if we still have not done enough expands to cover the distance from
+ // the last decoded packet to the next available packet, the distance beeing
+ // conveyed in |timestamp_leap|.
+ bool PacketTooEarly(uint32_t timestamp_leap) const;
+
+ // Checks if num_consecutive_expands_ >= kMaxWaitForPacket.
+ bool MaxWaitForPacket() const;
+
+ DISALLOW_COPY_AND_ASSIGN(DecisionLogicNormal);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
new file mode 100644
index 00000000000..f9056a6cbae
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for DecisionLogic class and derived classes.
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+
+namespace webrtc {
+
+TEST(DecisionLogic, CreateAndDestroy) {
+ int fs_hz = 8000;
+ int output_size_samples = fs_hz / 100; // Samples per 10 ms.
+ DecoderDatabase decoder_database;
+ PacketBuffer packet_buffer(10);
+ DelayPeakDetector delay_peak_detector;
+ DelayManager delay_manager(240, &delay_peak_detector);
+ BufferLevelFilter buffer_level_filter;
+ DecisionLogic* logic = DecisionLogic::Create(fs_hz, output_size_samples,
+ kPlayoutOn, &decoder_database,
+ packet_buffer, &delay_manager,
+ &buffer_level_filter);
+ delete logic;
+ logic = DecisionLogic::Create(fs_hz, output_size_samples,
+ kPlayoutStreaming,
+ &decoder_database,
+ packet_buffer, &delay_manager,
+ &buffer_level_filter);
+ delete logic;
+ logic = DecisionLogic::Create(fs_hz, output_size_samples,
+ kPlayoutFax,
+ &decoder_database,
+ packet_buffer, &delay_manager,
+ &buffer_level_filter);
+ delete logic;
+ logic = DecisionLogic::Create(fs_hz, output_size_samples,
+ kPlayoutOff,
+ &decoder_database,
+ packet_buffer, &delay_manager,
+ &buffer_level_filter);
+ delete logic;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
new file mode 100644
index 00000000000..5049962b45b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
@@ -0,0 +1,260 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+
+#include <assert.h>
+#include <utility> // pair
+
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+namespace webrtc {
+
+DecoderDatabase::DecoderDatabase()
+ : active_decoder_(-1), active_cng_decoder_(-1) {}
+
+DecoderDatabase::~DecoderDatabase() {}
+
+DecoderDatabase::DecoderInfo::~DecoderInfo() {
+ if (!external) delete decoder;
+}
+
+bool DecoderDatabase::Empty() const { return decoders_.empty(); }
+
+int DecoderDatabase::Size() const { return static_cast<int>(decoders_.size()); }
+
+void DecoderDatabase::Reset() {
+ decoders_.clear();
+ active_decoder_ = -1;
+ active_cng_decoder_ = -1;
+}
+
+int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type) {
+ if (rtp_payload_type > kMaxRtpPayloadType) {
+ return kInvalidRtpPayloadType;
+ }
+ if (!AudioDecoder::CodecSupported(codec_type)) {
+ return kCodecNotSupported;
+ }
+ int fs_hz = AudioDecoder::CodecSampleRateHz(codec_type);
+ std::pair<DecoderMap::iterator, bool> ret;
+ DecoderInfo info(codec_type, fs_hz, NULL, false);
+ ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ if (ret.second == false) {
+ // Database already contains a decoder with type |rtp_payload_type|.
+ return kDecoderExists;
+ }
+ return kOK;
+}
+
+int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type,
+ int fs_hz,
+ AudioDecoder* decoder) {
+ if (rtp_payload_type > 0x7F) {
+ return kInvalidRtpPayloadType;
+ }
+ if (!AudioDecoder::CodecSupported(codec_type)) {
+ return kCodecNotSupported;
+ }
+ if (fs_hz != 8000 && fs_hz != 16000 && fs_hz != 32000 && fs_hz != 48000) {
+ return kInvalidSampleRate;
+ }
+ if (!decoder) {
+ return kInvalidPointer;
+ }
+ decoder->Init();
+ std::pair<DecoderMap::iterator, bool> ret;
+ DecoderInfo info(codec_type, fs_hz, decoder, true);
+ ret = decoders_.insert(
+ std::pair<uint8_t, DecoderInfo>(rtp_payload_type, info));
+ if (ret.second == false) {
+ // Database already contains a decoder with type |rtp_payload_type|.
+ return kDecoderExists;
+ }
+ return kOK;
+}
+
+int DecoderDatabase::Remove(uint8_t rtp_payload_type) {
+ if (decoders_.erase(rtp_payload_type) == 0) {
+ // No decoder with that |rtp_payload_type|.
+ return kDecoderNotFound;
+ }
+ if (active_decoder_ == rtp_payload_type) {
+ active_decoder_ = -1; // No active decoder.
+ }
+ if (active_cng_decoder_ == rtp_payload_type) {
+ active_cng_decoder_ = -1; // No active CNG decoder.
+ }
+ return kOK;
+}
+
+const DecoderDatabase::DecoderInfo* DecoderDatabase::GetDecoderInfo(
+ uint8_t rtp_payload_type) const {
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
+ return NULL;
+ }
+ return &(*it).second;
+}
+
+uint8_t DecoderDatabase::GetRtpPayloadType(
+ NetEqDecoder codec_type) const {
+ DecoderMap::const_iterator it;
+ for (it = decoders_.begin(); it != decoders_.end(); ++it) {
+ if ((*it).second.codec_type == codec_type) {
+ // Match found.
+ return (*it).first;
+ }
+ }
+ // No match.
+ return kRtpPayloadTypeError;
+}
+
+AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
+ if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type)) {
+ // These are not real decoders.
+ return NULL;
+ }
+ DecoderMap::iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
+ return NULL;
+ }
+ DecoderInfo* info = &(*it).second;
+ if (!info->decoder) {
+ // Create the decoder object.
+ AudioDecoder* decoder = AudioDecoder::CreateAudioDecoder(info->codec_type);
+ assert(decoder); // Should not be able to have an unsupported codec here.
+ info->decoder = decoder;
+ info->decoder->Init();
+ }
+ return info->decoder;
+}
+
+bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type) const {
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
+ return false;
+ }
+ return ((*it).second.codec_type == codec_type);
+}
+
+bool DecoderDatabase::IsComfortNoise(uint8_t rtp_payload_type) const {
+ if (IsType(rtp_payload_type, kDecoderCNGnb) ||
+ IsType(rtp_payload_type, kDecoderCNGwb) ||
+ IsType(rtp_payload_type, kDecoderCNGswb32kHz) ||
+ IsType(rtp_payload_type, kDecoderCNGswb48kHz)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool DecoderDatabase::IsDtmf(uint8_t rtp_payload_type) const {
+ return IsType(rtp_payload_type, kDecoderAVT);
+}
+
+bool DecoderDatabase::IsRed(uint8_t rtp_payload_type) const {
+ return IsType(rtp_payload_type, kDecoderRED);
+}
+
+int DecoderDatabase::SetActiveDecoder(uint8_t rtp_payload_type,
+ bool* new_decoder) {
+ // Check that |rtp_payload_type| exists in the database.
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
+ return kDecoderNotFound;
+ }
+ assert(new_decoder);
+ *new_decoder = false;
+ if (active_decoder_ < 0) {
+ // This is the first active decoder.
+ *new_decoder = true;
+ } else if (active_decoder_ != rtp_payload_type) {
+ // Moving from one active decoder to another. Delete the first one.
+ DecoderMap::iterator it = decoders_.find(active_decoder_);
+ if (it == decoders_.end()) {
+ // Decoder not found. This should not be possible.
+ assert(false);
+ return kDecoderNotFound;
+ }
+ if (!(*it).second.external) {
+ // Delete the AudioDecoder object, unless it is an externally created
+ // decoder.
+ delete (*it).second.decoder;
+ (*it).second.decoder = NULL;
+ }
+ *new_decoder = true;
+ }
+ active_decoder_ = rtp_payload_type;
+ return kOK;
+}
+
+AudioDecoder* DecoderDatabase::GetActiveDecoder() {
+ if (active_decoder_ < 0) {
+ // No active decoder.
+ return NULL;
+ }
+ return GetDecoder(active_decoder_);
+}
+
+int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
+ // Check that |rtp_payload_type| exists in the database.
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
+ return kDecoderNotFound;
+ }
+ if (active_cng_decoder_ >= 0 && active_cng_decoder_ != rtp_payload_type) {
+ // Moving from one active CNG decoder to another. Delete the first one.
+ DecoderMap::iterator it = decoders_.find(active_cng_decoder_);
+ if (it == decoders_.end()) {
+ // Decoder not found. This should not be possible.
+ assert(false);
+ return kDecoderNotFound;
+ }
+ if (!(*it).second.external) {
+ // Delete the AudioDecoder object, unless it is an externally created
+ // decoder.
+ delete (*it).second.decoder;
+ (*it).second.decoder = NULL;
+ }
+ }
+ active_cng_decoder_ = rtp_payload_type;
+ return kOK;
+}
+
+AudioDecoder* DecoderDatabase::GetActiveCngDecoder() {
+ if (active_cng_decoder_ < 0) {
+ // No active CNG decoder.
+ return NULL;
+ }
+ return GetDecoder(active_cng_decoder_);
+}
+
+int DecoderDatabase::CheckPayloadTypes(const PacketList& packet_list) const {
+ PacketList::const_iterator it;
+ for (it = packet_list.begin(); it != packet_list.end(); ++it) {
+ if (decoders_.find((*it)->header.payloadType) == decoders_.end()) {
+ // Payload type is not found.
+ return kDecoderNotFound;
+ }
+ }
+ return kOK;
+}
+
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
new file mode 100644
index 00000000000..8a03f2123be
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
+
+#include <map>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h" // NULL
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class AudioDecoder;
+
+class DecoderDatabase {
+ public:
+ enum DatabaseReturnCodes {
+ kOK = 0,
+ kInvalidRtpPayloadType = -1,
+ kCodecNotSupported = -2,
+ kInvalidSampleRate = -3,
+ kDecoderExists = -4,
+ kDecoderNotFound = -5,
+ kInvalidPointer = -6
+ };
+
+ // Struct used to store decoder info in the database.
+ struct DecoderInfo {
+ // Constructors.
+ DecoderInfo()
+ : codec_type(kDecoderArbitrary),
+ fs_hz(8000),
+ decoder(NULL),
+ external(false) {
+ }
+ DecoderInfo(NetEqDecoder ct, int fs, AudioDecoder* dec, bool ext)
+ : codec_type(ct),
+ fs_hz(fs),
+ decoder(dec),
+ external(ext) {
+ }
+ // Destructor. (Defined in decoder_database.cc.)
+ ~DecoderInfo();
+
+ NetEqDecoder codec_type;
+ int fs_hz;
+ AudioDecoder* decoder;
+ bool external;
+ };
+
+ static const uint8_t kMaxRtpPayloadType = 0x7F; // Max for a 7-bit number.
+ // Maximum value for 8 bits, and an invalid RTP payload type (since it is
+ // only 7 bits).
+ static const uint8_t kRtpPayloadTypeError = 0xFF;
+
+ DecoderDatabase();
+
+ virtual ~DecoderDatabase();
+
+ // Returns true if the database is empty.
+ virtual bool Empty() const;
+
+ // Returns the number of decoders registered in the database.
+ virtual int Size() const;
+
+ // Resets the database, erasing all registered payload types, and deleting
+ // any AudioDecoder objects that were not externally created and inserted
+ // using InsertExternal().
+ virtual void Reset();
+
+ // Registers |rtp_payload_type| as a decoder of type |codec_type|. Returns
+ // kOK on success; otherwise an error code.
+ virtual int RegisterPayload(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type);
+
+ // Registers an externally created AudioDecoder object, and associates it
+ // as a decoder of type |codec_type| with |rtp_payload_type|.
+ virtual int InsertExternal(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type,
+ int fs_hz, AudioDecoder* decoder);
+
+ // Removes the entry for |rtp_payload_type| from the database.
+ // Returns kDecoderNotFound or kOK depending on the outcome of the operation.
+ virtual int Remove(uint8_t rtp_payload_type);
+
+ // Returns a pointer to the DecoderInfo struct for |rtp_payload_type|. If
+ // no decoder is registered with that |rtp_payload_type|, NULL is returned.
+ virtual const DecoderInfo* GetDecoderInfo(uint8_t rtp_payload_type) const;
+
+ // Returns one RTP payload type associated with |codec_type|, or
+ // kDecoderNotFound if no entry exists for that value. Note that one
+ // |codec_type| may be registered with several RTP payload types, and the
+ // method may return any of them.
+ virtual uint8_t GetRtpPayloadType(NetEqDecoder codec_type) const;
+
+ // Returns a pointer to the AudioDecoder object associated with
+ // |rtp_payload_type|, or NULL if none is registered. If the AudioDecoder
+ // object does not exist for that decoder, the object is created.
+ virtual AudioDecoder* GetDecoder(uint8_t rtp_payload_type);
+
+ // Returns true if |rtp_payload_type| is registered as a |codec_type|.
+ virtual bool IsType(uint8_t rtp_payload_type,
+ NetEqDecoder codec_type) const;
+
+ // Returns true if |rtp_payload_type| is registered as comfort noise.
+ virtual bool IsComfortNoise(uint8_t rtp_payload_type) const;
+
+ // Returns true if |rtp_payload_type| is registered as DTMF.
+ virtual bool IsDtmf(uint8_t rtp_payload_type) const;
+
+ // Returns true if |rtp_payload_type| is registered as RED.
+ virtual bool IsRed(uint8_t rtp_payload_type) const;
+
+ // Sets the active decoder to be |rtp_payload_type|. If this call results in a
+ // change of active decoder, |new_decoder| is set to true. The previous active
+ // decoder's AudioDecoder object is deleted.
+ virtual int SetActiveDecoder(uint8_t rtp_payload_type, bool* new_decoder);
+
+ // Returns the current active decoder, or NULL if no active decoder exists.
+ virtual AudioDecoder* GetActiveDecoder();
+
+ // Sets the active comfort noise decoder to be |rtp_payload_type|. If this
+ // call results in a change of active comfort noise decoder, the previous
+ // active decoder's AudioDecoder object is deleted.
+ virtual int SetActiveCngDecoder(uint8_t rtp_payload_type);
+
+ // Returns the current active comfort noise decoder, or NULL if no active
+ // comfort noise decoder exists.
+ virtual AudioDecoder* GetActiveCngDecoder();
+
+ // Returns kOK if all packets in |packet_list| carry payload types that are
+ // registered in the database. Otherwise, returns kDecoderNotFound.
+ virtual int CheckPayloadTypes(const PacketList& packet_list) const;
+
+ private:
+ typedef std::map<uint8_t, DecoderInfo> DecoderMap;
+
+ DecoderMap decoders_;
+ int active_decoder_;
+ int active_cng_decoder_;
+
+ DISALLOW_COPY_AND_ASSIGN(DecoderDatabase);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
new file mode 100644
index 00000000000..d0c6f5ae891
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+TEST(DecoderDatabase, CreateAndDestroy) {
+ DecoderDatabase db;
+ EXPECT_EQ(0, db.Size());
+ EXPECT_TRUE(db.Empty());
+}
+
+TEST(DecoderDatabase, InsertAndRemove) {
+ DecoderDatabase db;
+ const uint8_t kPayloadType = 0;
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, kDecoderPCMu));
+ EXPECT_EQ(1, db.Size());
+ EXPECT_FALSE(db.Empty());
+ EXPECT_EQ(DecoderDatabase::kOK, db.Remove(kPayloadType));
+ EXPECT_EQ(0, db.Size());
+ EXPECT_TRUE(db.Empty());
+}
+
+TEST(DecoderDatabase, GetDecoderInfo) {
+ DecoderDatabase db;
+ const uint8_t kPayloadType = 0;
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, kDecoderPCMu));
+ const DecoderDatabase::DecoderInfo* info;
+ info = db.GetDecoderInfo(kPayloadType);
+ ASSERT_TRUE(info != NULL);
+ EXPECT_EQ(kDecoderPCMu, info->codec_type);
+ EXPECT_EQ(NULL, info->decoder);
+ EXPECT_EQ(8000, info->fs_hz);
+ EXPECT_FALSE(info->external);
+ info = db.GetDecoderInfo(kPayloadType + 1); // Other payload type.
+ EXPECT_TRUE(info == NULL); // Should not be found.
+}
+
+TEST(DecoderDatabase, GetRtpPayloadType) {
+ DecoderDatabase db;
+ const uint8_t kPayloadType = 0;
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, kDecoderPCMu));
+ EXPECT_EQ(kPayloadType, db.GetRtpPayloadType(kDecoderPCMu));
+ const uint8_t expected_value = DecoderDatabase::kRtpPayloadTypeError;
+ EXPECT_EQ(expected_value,
+ db.GetRtpPayloadType(kDecoderISAC)); // iSAC is not registered.
+}
+
+TEST(DecoderDatabase, GetDecoder) {
+ DecoderDatabase db;
+ const uint8_t kPayloadType = 0;
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadType, kDecoderPCM16B));
+ AudioDecoder* dec = db.GetDecoder(kPayloadType);
+ ASSERT_TRUE(dec != NULL);
+}
+
+TEST(DecoderDatabase, TypeTests) {
+ DecoderDatabase db;
+ const uint8_t kPayloadTypePcmU = 0;
+ const uint8_t kPayloadTypeCng = 13;
+ const uint8_t kPayloadTypeDtmf = 100;
+ const uint8_t kPayloadTypeRed = 101;
+ const uint8_t kPayloadNotUsed = 102;
+ // Load into database.
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypePcmU, kDecoderPCMu));
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypeCng, kDecoderCNGnb));
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypeDtmf, kDecoderAVT));
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(kPayloadTypeRed, kDecoderRED));
+ EXPECT_EQ(4, db.Size());
+ // Test.
+ EXPECT_FALSE(db.IsComfortNoise(kPayloadNotUsed));
+ EXPECT_FALSE(db.IsDtmf(kPayloadNotUsed));
+ EXPECT_FALSE(db.IsRed(kPayloadNotUsed));
+ EXPECT_FALSE(db.IsComfortNoise(kPayloadTypePcmU));
+ EXPECT_FALSE(db.IsDtmf(kPayloadTypePcmU));
+ EXPECT_FALSE(db.IsRed(kPayloadTypePcmU));
+ EXPECT_FALSE(db.IsType(kPayloadTypePcmU, kDecoderISAC));
+ EXPECT_TRUE(db.IsType(kPayloadTypePcmU, kDecoderPCMu));
+ EXPECT_TRUE(db.IsComfortNoise(kPayloadTypeCng));
+ EXPECT_TRUE(db.IsDtmf(kPayloadTypeDtmf));
+ EXPECT_TRUE(db.IsRed(kPayloadTypeRed));
+}
+
+TEST(DecoderDatabase, ExternalDecoder) {
+ DecoderDatabase db;
+ const uint8_t kPayloadType = 0;
+ MockAudioDecoder decoder;
+ // Load into database.
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.InsertExternal(kPayloadType, kDecoderPCMu, 8000,
+ &decoder));
+ EXPECT_EQ(1, db.Size());
+ // Get decoder and make sure we get the external one.
+ EXPECT_EQ(&decoder, db.GetDecoder(kPayloadType));
+ // Get the decoder info struct and check it too.
+ const DecoderDatabase::DecoderInfo* info;
+ info = db.GetDecoderInfo(kPayloadType);
+ ASSERT_TRUE(info != NULL);
+ EXPECT_EQ(kDecoderPCMu, info->codec_type);
+ EXPECT_EQ(&decoder, info->decoder);
+ EXPECT_EQ(8000, info->fs_hz);
+ EXPECT_TRUE(info->external);
+ // Expect not to delete the decoder when removing it from the database, since
+ // it was declared externally.
+ EXPECT_CALL(decoder, Die()).Times(0);
+ EXPECT_EQ(DecoderDatabase::kOK, db.Remove(kPayloadType));
+ EXPECT_TRUE(db.Empty());
+
+ EXPECT_CALL(decoder, Die()).Times(1); // Will be called when |db| is deleted.
+}
+
+TEST(DecoderDatabase, CheckPayloadTypes) {
+ DecoderDatabase db;
+ // Load a number of payloads into the database. Payload types are 0, 1, ...,
+ // while the decoder type is the same for all payload types (this does not
+ // matter for the test).
+ const int kNumPayloads = 10;
+ for (uint8_t payload_type = 0; payload_type < kNumPayloads; ++payload_type) {
+ EXPECT_EQ(DecoderDatabase::kOK,
+ db.RegisterPayload(payload_type, kDecoderArbitrary));
+ }
+ PacketList packet_list;
+ for (int i = 0; i < kNumPayloads + 1; ++i) {
+ // Create packet with payload type |i|. The last packet will have a payload
+ // type that is not registered in the decoder database.
+ Packet* packet = new Packet;
+ packet->header.payloadType = i;
+ packet_list.push_back(packet);
+ }
+
+ // Expect to return false, since the last packet is of an unknown type.
+ EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
+ db.CheckPayloadTypes(packet_list));
+
+ delete packet_list.back();
+ packet_list.pop_back(); // Remove the unknown one.
+
+ EXPECT_EQ(DecoderDatabase::kOK, db.CheckPayloadTypes(packet_list));
+
+ // Delete all packets.
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ delete packet_list.front();
+ it = packet_list.erase(it);
+ }
+}
+
+// Test the methods for setting and getting active speech and CNG decoders.
+TEST(DecoderDatabase, ActiveDecoders) {
+ DecoderDatabase db;
+ // Load payload types.
+ ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(0, kDecoderPCMu));
+ ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(103, kDecoderISAC));
+ ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(13, kDecoderCNGnb));
+ // Verify that no decoders are active from the start.
+ EXPECT_EQ(NULL, db.GetActiveDecoder());
+ EXPECT_EQ(NULL, db.GetActiveCngDecoder());
+
+ // Set active speech codec.
+ bool changed; // Should be true when the active decoder changed.
+ EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(0, &changed));
+ EXPECT_TRUE(changed);
+ AudioDecoder* decoder = db.GetActiveDecoder();
+ ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ EXPECT_EQ(kDecoderPCMu, decoder->codec_type());
+
+ // Set the same again. Expect no change.
+ EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(0, &changed));
+ EXPECT_FALSE(changed);
+ decoder = db.GetActiveDecoder();
+ ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ EXPECT_EQ(kDecoderPCMu, decoder->codec_type());
+
+ // Change active decoder.
+ EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(103, &changed));
+ EXPECT_TRUE(changed);
+ decoder = db.GetActiveDecoder();
+ ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ EXPECT_EQ(kDecoderISAC, decoder->codec_type());
+
+ // Remove the active decoder, and verify that the active becomes NULL.
+ EXPECT_EQ(DecoderDatabase::kOK, db.Remove(103));
+ EXPECT_EQ(NULL, db.GetActiveDecoder());
+
+ // Set active CNG codec.
+ EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveCngDecoder(13));
+ decoder = db.GetActiveCngDecoder();
+ ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ EXPECT_EQ(kDecoderCNGnb, decoder->codec_type());
+
+ // Remove the active CNG decoder, and verify that the active becomes NULL.
+ EXPECT_EQ(DecoderDatabase::kOK, db.Remove(13));
+ EXPECT_EQ(NULL, db.GetActiveCngDecoder());
+
+ // Try to set non-existing codecs as active.
+ EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
+ db.SetActiveDecoder(17, &changed));
+ EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
+ db.SetActiveCngDecoder(17));
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h
new file mode 100644
index 00000000000..33d1bd9c3f0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
+
+namespace webrtc {
+
+enum Operations {
+ kNormal = 0,
+ kMerge,
+ kExpand,
+ kAccelerate,
+ kPreemptiveExpand,
+ kRfc3389Cng,
+ kRfc3389CngNoPacket,
+ kCodecInternalCng,
+ kDtmf,
+ kAlternativePlc,
+ kAlternativePlcIncreaseTimestamp,
+ kAudioRepetition,
+ kAudioRepetitionIncreaseTimestamp,
+ kUndefined = -1
+};
+
+enum Modes {
+ kModeNormal = 0,
+ kModeExpand,
+ kModeMerge,
+ kModeAccelerateSuccess,
+ kModeAccelerateLowEnergy,
+ kModeAccelerateFail,
+ kModePreemptiveExpandSuccess,
+ kModePreemptiveExpandLowEnergy,
+ kModePreemptiveExpandFail,
+ kModeRfc3389Cng,
+ kModeCodecInternalCng,
+ kModeDtmf,
+ kModeError,
+ kModeUndefined = -1
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h
deleted file mode 100644
index 04b1c401528..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Contains definitions for the delay logging functionality. Only used for debugging and
- * tracing purposes.
- */
-
-#ifndef DELAY_LOGGING_H
-#define DELAY_LOGGING_H
-
-#define NETEQ_DELAY_LOGGING_VERSION_STRING "2.0"
-
-#define NETEQ_DELAY_LOGGING_SIGNAL_RECIN 1
-#define NETEQ_DELAY_LOGGING_SIGNAL_FLUSH 2
-#define NETEQ_DELAY_LOGGING_SIGNAL_CLOCK 3
-#define NETEQ_DELAY_LOGGING_SIGNAL_EOF 4
-#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE 5
-#define NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS 6
-#define NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO 7
-#define NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO 8
-#define NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO 9
-#define NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO 10
-#define NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF 11
-#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC 12
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
new file mode 100644
index 00000000000..a935561eff4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
@@ -0,0 +1,425 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+
+#include <assert.h>
+#include <math.h>
+
+#include <algorithm> // max, min
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+DelayManager::DelayManager(int max_packets_in_buffer,
+ DelayPeakDetector* peak_detector)
+ : first_packet_received_(false),
+ max_packets_in_buffer_(max_packets_in_buffer),
+ iat_vector_(kMaxIat + 1, 0),
+ iat_factor_(0),
+ packet_iat_count_ms_(0),
+ base_target_level_(4), // In Q0 domain.
+ target_level_(base_target_level_ << 8), // In Q8 domain.
+ packet_len_ms_(0),
+ streaming_mode_(false),
+ last_seq_no_(0),
+ last_timestamp_(0),
+ minimum_delay_ms_(0),
+ least_required_delay_ms_(target_level_),
+ maximum_delay_ms_(target_level_),
+ iat_cumulative_sum_(0),
+ max_iat_cumulative_sum_(0),
+ max_timer_ms_(0),
+ peak_detector_(*peak_detector),
+ last_pack_cng_or_dtmf_(1) {
+ assert(peak_detector); // Should never be NULL.
+ Reset();
+}
+
+DelayManager::~DelayManager() {}
+
+const DelayManager::IATVector& DelayManager::iat_vector() const {
+ return iat_vector_;
+}
+
+// Set the histogram vector to an exponentially decaying distribution
+// iat_vector_[i] = 0.5^(i+1), i = 0, 1, 2, ...
+// iat_vector_ is in Q30.
+void DelayManager::ResetHistogram() {
+ // Set temp_prob to (slightly more than) 1 in Q14. This ensures that the sum
+ // of iat_vector_ is 1.
+ uint16_t temp_prob = 0x4002; // 16384 + 2 = 100000000000010 binary.
+ IATVector::iterator it = iat_vector_.begin();
+ for (; it < iat_vector_.end(); it++) {
+ temp_prob >>= 1;
+ (*it) = temp_prob << 16;
+ }
+ base_target_level_ = 4;
+ target_level_ = base_target_level_ << 8;
+}
+
+int DelayManager::Update(uint16_t sequence_number,
+ uint32_t timestamp,
+ int sample_rate_hz) {
+ if (sample_rate_hz <= 0) {
+ return -1;
+ }
+
+ if (!first_packet_received_) {
+ // Prepare for next packet arrival.
+ packet_iat_count_ms_ = 0;
+ last_seq_no_ = sequence_number;
+ last_timestamp_ = timestamp;
+ first_packet_received_ = true;
+ return 0;
+ }
+
+ // Try calculating packet length from current and previous timestamps.
+ int packet_len_ms;
+ if (!IsNewerTimestamp(timestamp, last_timestamp_) ||
+ !IsNewerSequenceNumber(sequence_number, last_seq_no_)) {
+ // Wrong timestamp or sequence order; use stored value.
+ packet_len_ms = packet_len_ms_;
+ } else {
+ // Calculate timestamps per packet and derive packet length in ms.
+ int packet_len_samp =
+ static_cast<uint32_t>(timestamp - last_timestamp_) /
+ static_cast<uint16_t>(sequence_number - last_seq_no_);
+ packet_len_ms = (1000 * packet_len_samp) / sample_rate_hz;
+ }
+
+ if (packet_len_ms > 0) {
+ // Cannot update statistics unless |packet_len_ms| is valid.
+ // Calculate inter-arrival time (IAT) in integer "packet times"
+ // (rounding down). This is the value used as index to the histogram
+ // vector |iat_vector_|.
+ int iat_packets = packet_iat_count_ms_ / packet_len_ms;
+
+ if (streaming_mode_) {
+ UpdateCumulativeSums(packet_len_ms, sequence_number);
+ }
+
+ // Check for discontinuous packet sequence and re-ordering.
+ if (IsNewerSequenceNumber(sequence_number, last_seq_no_ + 1)) {
+ // Compensate for gap in the sequence numbers. Reduce IAT with the
+ // expected extra time due to lost packets, but ensure that the IAT is
+ // not negative.
+ iat_packets -= static_cast<uint16_t>(sequence_number - last_seq_no_ - 1);
+ iat_packets = std::max(iat_packets, 0);
+ } else if (!IsNewerSequenceNumber(sequence_number, last_seq_no_)) {
+ iat_packets += static_cast<uint16_t>(last_seq_no_ + 1 - sequence_number);
+ }
+
+ // Saturate IAT at maximum value.
+ const int max_iat = kMaxIat;
+ iat_packets = std::min(iat_packets, max_iat);
+ UpdateHistogram(iat_packets);
+ // Calculate new |target_level_| based on updated statistics.
+ target_level_ = CalculateTargetLevel(iat_packets);
+ if (streaming_mode_) {
+ target_level_ = std::max(target_level_, max_iat_cumulative_sum_);
+ }
+
+ LimitTargetLevel();
+ } // End if (packet_len_ms > 0).
+
+ // Prepare for next packet arrival.
+ packet_iat_count_ms_ = 0;
+ last_seq_no_ = sequence_number;
+ last_timestamp_ = timestamp;
+ return 0;
+}
+
+void DelayManager::UpdateCumulativeSums(int packet_len_ms,
+ uint16_t sequence_number) {
+ // Calculate IAT in Q8, including fractions of a packet (i.e., more
+ // accurate than |iat_packets|.
+ int iat_packets_q8 = (packet_iat_count_ms_ << 8) / packet_len_ms;
+ // Calculate cumulative sum IAT with sequence number compensation. The sum
+ // is zero if there is no clock-drift.
+ iat_cumulative_sum_ += (iat_packets_q8 -
+ (static_cast<int>(sequence_number - last_seq_no_) << 8));
+ // Subtract drift term.
+ iat_cumulative_sum_ -= kCumulativeSumDrift;
+ // Ensure not negative.
+ iat_cumulative_sum_ = std::max(iat_cumulative_sum_, 0);
+ if (iat_cumulative_sum_ > max_iat_cumulative_sum_) {
+ // Found a new maximum.
+ max_iat_cumulative_sum_ = iat_cumulative_sum_;
+ max_timer_ms_ = 0;
+ }
+ if (max_timer_ms_ > kMaxStreamingPeakPeriodMs) {
+ // Too long since the last maximum was observed; decrease max value.
+ max_iat_cumulative_sum_ -= kCumulativeSumDrift;
+ }
+}
+
+// Each element in the vector is first multiplied by the forgetting factor
+// |iat_factor_|. Then the vector element indicated by |iat_packets| is then
+// increased (additive) by 1 - |iat_factor_|. This way, the probability of
+// |iat_packets| is slightly increased, while the sum of the histogram remains
+// constant (=1).
+// Due to inaccuracies in the fixed-point arithmetic, the histogram may no
+// longer sum up to 1 (in Q30) after the update. To correct this, a correction
+// term is added or subtracted from the first element (or elements) of the
+// vector.
+// The forgetting factor |iat_factor_| is also updated. When the DelayManager
+// is reset, the factor is set to 0 to facilitate rapid convergence in the
+// beginning. With each update of the histogram, the factor is increased towards
+// the steady-state value |kIatFactor_|.
+void DelayManager::UpdateHistogram(size_t iat_packets) {
+ assert(iat_packets < iat_vector_.size());
+ int vector_sum = 0; // Sum up the vector elements as they are processed.
+ // Multiply each element in |iat_vector_| with |iat_factor_|.
+ for (IATVector::iterator it = iat_vector_.begin();
+ it != iat_vector_.end(); ++it) {
+ *it = (static_cast<int64_t>(*it) * iat_factor_) >> 15;
+ vector_sum += *it;
+ }
+
+ // Increase the probability for the currently observed inter-arrival time
+ // by 1 - |iat_factor_|. The factor is in Q15, |iat_vector_| in Q30.
+ // Thus, left-shift 15 steps to obtain result in Q30.
+ iat_vector_[iat_packets] += (32768 - iat_factor_) << 15;
+ vector_sum += (32768 - iat_factor_) << 15; // Add to vector sum.
+
+ // |iat_vector_| should sum up to 1 (in Q30), but it may not due to
+ // fixed-point rounding errors.
+ vector_sum -= 1 << 30; // Should be zero. Compensate if not.
+ if (vector_sum != 0) {
+ // Modify a few values early in |iat_vector_|.
+ int flip_sign = vector_sum > 0 ? -1 : 1;
+ IATVector::iterator it = iat_vector_.begin();
+ while (it != iat_vector_.end() && abs(vector_sum) > 0) {
+ // Add/subtract 1/16 of the element, but not more than |vector_sum|.
+ int correction = flip_sign * std::min(abs(vector_sum), (*it) >> 4);
+ *it += correction;
+ vector_sum += correction;
+ ++it;
+ }
+ }
+ assert(vector_sum == 0); // Verify that the above is correct.
+
+ // Update |iat_factor_| (changes only during the first seconds after a reset).
+ // The factor converges to |kIatFactor_|.
+ iat_factor_ += (kIatFactor_ - iat_factor_ + 3) >> 2;
+}
+
+// Enforces upper and lower limits for |target_level_|. The upper limit is
+// chosen to be minimum of i) 75% of |max_packets_in_buffer_|, to leave some
+// headroom for natural fluctuations around the target, and ii) equivalent of
+// |maximum_delay_ms_| in packets. Note that in practice, if no
+// |maximum_delay_ms_| is specified, this does not have any impact, since the
+// target level is far below the buffer capacity in all reasonable cases.
+// The lower limit is equivalent of |minimum_delay_ms_| in packets. We update
+// |least_required_level_| while the above limits are applied.
+// TODO(hlundin): Move this check to the buffer logistics class.
+void DelayManager::LimitTargetLevel() {
+ least_required_delay_ms_ = (target_level_ * packet_len_ms_) >> 8;
+
+ if (packet_len_ms_ > 0 && minimum_delay_ms_ > 0) {
+ int minimum_delay_packet_q8 = (minimum_delay_ms_ << 8) / packet_len_ms_;
+ target_level_ = std::max(target_level_, minimum_delay_packet_q8);
+ }
+
+ if (maximum_delay_ms_ > 0 && packet_len_ms_ > 0) {
+ int maximum_delay_packet_q8 = (maximum_delay_ms_ << 8) / packet_len_ms_;
+ target_level_ = std::min(target_level_, maximum_delay_packet_q8);
+ }
+
+ // Shift to Q8, then 75%.;
+ int max_buffer_packets_q8 = (3 * (max_packets_in_buffer_ << 8)) / 4;
+ target_level_ = std::min(target_level_, max_buffer_packets_q8);
+
+ // Sanity check, at least 1 packet (in Q8).
+ target_level_ = std::max(target_level_, 1 << 8);
+}
+
+int DelayManager::CalculateTargetLevel(int iat_packets) {
+ int limit_probability = kLimitProbability;
+ if (streaming_mode_) {
+ limit_probability = kLimitProbabilityStreaming;
+ }
+
+ // Calculate target buffer level from inter-arrival time histogram.
+ // Find the |iat_index| for which the probability of observing an
+ // inter-arrival time larger than or equal to |iat_index| is less than or
+ // equal to |limit_probability|. The sought probability is estimated using
+ // the histogram as the reverse cumulant PDF, i.e., the sum of elements from
+ // the end up until |iat_index|. Now, since the sum of all elements is 1
+ // (in Q30) by definition, and since the solution is often a low value for
+ // |iat_index|, it is more efficient to start with |sum| = 1 and subtract
+ // elements from the start of the histogram.
+ size_t index = 0; // Start from the beginning of |iat_vector_|.
+ int sum = 1 << 30; // Assign to 1 in Q30.
+ sum -= iat_vector_[index]; // Ensure that target level is >= 1.
+
+ do {
+ // Subtract the probabilities one by one until the sum is no longer greater
+ // than limit_probability.
+ ++index;
+ sum -= iat_vector_[index];
+ } while ((sum > limit_probability) && (index < iat_vector_.size() - 1));
+
+ // This is the base value for the target buffer level.
+ int target_level = static_cast<int>(index);
+ base_target_level_ = static_cast<int>(index);
+
+ // Update detector for delay peaks.
+ bool delay_peak_found = peak_detector_.Update(iat_packets, target_level);
+ if (delay_peak_found) {
+ target_level = std::max(target_level, peak_detector_.MaxPeakHeight());
+ }
+
+ // Sanity check. |target_level| must be strictly positive.
+ target_level = std::max(target_level, 1);
+ // Scale to Q8 and assign to member variable.
+ target_level_ = target_level << 8;
+ return target_level_;
+}
+
+int DelayManager::SetPacketAudioLength(int length_ms) {
+ if (length_ms <= 0) {
+ LOG_F(LS_ERROR) << "length_ms = " << length_ms;
+ return -1;
+ }
+ packet_len_ms_ = length_ms;
+ peak_detector_.SetPacketAudioLength(packet_len_ms_);
+ packet_iat_count_ms_ = 0;
+ last_pack_cng_or_dtmf_ = 1; // TODO(hlundin): Legacy. Remove?
+ return 0;
+}
+
+
+void DelayManager::Reset() {
+ packet_len_ms_ = 0; // Packet size unknown.
+ streaming_mode_ = false;
+ peak_detector_.Reset();
+ ResetHistogram(); // Resets target levels too.
+ iat_factor_ = 0; // Adapt the histogram faster for the first few packets.
+ packet_iat_count_ms_ = 0;
+ max_timer_ms_ = 0;
+ iat_cumulative_sum_ = 0;
+ max_iat_cumulative_sum_ = 0;
+ last_pack_cng_or_dtmf_ = 1;
+}
+
+int DelayManager::AverageIAT() const {
+ int32_t sum_q24 = 0;
+ // Using an int for the upper limit of the following for-loop so the
+ // loop-counter can be int. Otherwise we need a cast where |sum_q24| is
+ // updated.
+ const int iat_vec_size = static_cast<int>(iat_vector_.size());
+ assert(iat_vector_.size() == 65); // Algorithm is hard-coded for this size.
+ for (int i = 0; i < iat_vec_size; ++i) {
+ // Shift 6 to fit worst case: 2^30 * 64.
+ sum_q24 += (iat_vector_[i] >> 6) * i;
+ }
+ // Subtract the nominal inter-arrival time 1 = 2^24 in Q24.
+ sum_q24 -= (1 << 24);
+ // Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
+ // Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
+ return ((sum_q24 >> 7) * 15625) >> 11;
+}
+
+bool DelayManager::PeakFound() const {
+ return peak_detector_.peak_found();
+}
+
+void DelayManager::UpdateCounters(int elapsed_time_ms) {
+ packet_iat_count_ms_ += elapsed_time_ms;
+ peak_detector_.IncrementCounter(elapsed_time_ms);
+ max_timer_ms_ += elapsed_time_ms;
+}
+
+void DelayManager::ResetPacketIatCount() { packet_iat_count_ms_ = 0; }
+
+// Note that |low_limit| and |higher_limit| are not assigned to
+// |minimum_delay_ms_| and |maximum_delay_ms_| defined by the client of this
+// class. They are computed from |target_level_| and used for decision making.
+void DelayManager::BufferLimits(int* lower_limit, int* higher_limit) const {
+ if (!lower_limit || !higher_limit) {
+ LOG_F(LS_ERROR) << "NULL pointers supplied as input";
+ assert(false);
+ return;
+ }
+
+ int window_20ms = 0x7FFF; // Default large value for legacy bit-exactness.
+ if (packet_len_ms_ > 0) {
+ window_20ms = (20 << 8) / packet_len_ms_;
+ }
+
+ // |target_level_| is in Q8 already.
+ *lower_limit = (target_level_ * 3) / 4;
+ // |higher_limit| is equal to |target_level_|, but should at
+ // least be 20 ms higher than |lower_limit_|.
+ *higher_limit = std::max(target_level_, *lower_limit + window_20ms);
+}
+
+int DelayManager::TargetLevel() const {
+ return target_level_;
+}
+
+void DelayManager::LastDecoderType(NetEqDecoder decoder_type) {
+ if (decoder_type == kDecoderAVT ||
+ decoder_type == kDecoderCNGnb ||
+ decoder_type == kDecoderCNGwb ||
+ decoder_type == kDecoderCNGswb32kHz ||
+ decoder_type == kDecoderCNGswb48kHz) {
+ last_pack_cng_or_dtmf_ = 1;
+ } else if (last_pack_cng_or_dtmf_ != 0) {
+ last_pack_cng_or_dtmf_ = -1;
+ }
+}
+
+bool DelayManager::SetMinimumDelay(int delay_ms) {
+ // Minimum delay shouldn't be more than maximum delay, if any maximum is set.
+ // Also, if possible check |delay| to less than 75% of
+ // |max_packets_in_buffer_|.
+ if ((maximum_delay_ms_ > 0 && delay_ms > maximum_delay_ms_) ||
+ (packet_len_ms_ > 0 &&
+ delay_ms > 3 * max_packets_in_buffer_ * packet_len_ms_ / 4)) {
+ return false;
+ }
+ minimum_delay_ms_ = delay_ms;
+ return true;
+}
+
+bool DelayManager::SetMaximumDelay(int delay_ms) {
+ if (delay_ms == 0) {
+ // Zero input unsets the maximum delay.
+ maximum_delay_ms_ = 0;
+ return true;
+ } else if (delay_ms < minimum_delay_ms_ || delay_ms < packet_len_ms_) {
+ // Maximum delay shouldn't be less than minimum delay or less than a packet.
+ return false;
+ }
+ maximum_delay_ms_ = delay_ms;
+ return true;
+}
+
+int DelayManager::least_required_delay_ms() const {
+ return least_required_delay_ms_;
+}
+
+int DelayManager::base_target_level() const { return base_target_level_; }
+void DelayManager::set_streaming_mode(bool value) { streaming_mode_ = value; }
+int DelayManager::last_pack_cng_or_dtmf() const {
+ return last_pack_cng_or_dtmf_;
+}
+
+void DelayManager::set_last_pack_cng_or_dtmf(int value) {
+ last_pack_cng_or_dtmf_ = value;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
new file mode 100644
index 00000000000..96b5e19ebdc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
+
+#include <string.h> // Provide access to size_t.
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class DelayPeakDetector;
+
+class DelayManager {
+ public:
+ typedef std::vector<int> IATVector;
+
+ // Create a DelayManager object. Notify the delay manager that the packet
+ // buffer can hold no more than |max_packets_in_buffer| packets (i.e., this
+ // is the number of packet slots in the buffer). Supply a PeakDetector
+ // object to the DelayManager.
+ DelayManager(int max_packets_in_buffer, DelayPeakDetector* peak_detector);
+
+ virtual ~DelayManager();
+
+ // Read the inter-arrival time histogram. Mainly for testing purposes.
+ virtual const IATVector& iat_vector() const;
+
+ // Updates the delay manager with a new incoming packet, with
+ // |sequence_number| and |timestamp| from the RTP header. This updates the
+ // inter-arrival time histogram and other statistics, as well as the
+ // associated DelayPeakDetector. A new target buffer level is calculated.
+ // Returns 0 on success, -1 on failure (invalid sample rate).
+ virtual int Update(uint16_t sequence_number,
+ uint32_t timestamp,
+ int sample_rate_hz);
+
+ // Calculates a new target buffer level. Called from the Update() method.
+ // Sets target_level_ (in Q8) and returns the same value. Also calculates
+ // and updates base_target_level_, which is the target buffer level before
+ // taking delay peaks into account.
+ virtual int CalculateTargetLevel(int iat_packets);
+
+ // Notifies the DelayManager of how much audio data is carried in each packet.
+ // The method updates the DelayPeakDetector too, and resets the inter-arrival
+ // time counter. Returns 0 on success, -1 on failure.
+ virtual int SetPacketAudioLength(int length_ms);
+
+ // Resets the DelayManager and the associated DelayPeakDetector.
+ virtual void Reset();
+
+ // Calculates the average inter-arrival time deviation from the histogram.
+ // The result is returned as parts-per-million deviation from the nominal
+ // inter-arrival time. That is, if the average inter-arrival time is equal to
+ // the nominal frame time, the return value is zero. A positive value
+ // corresponds to packet spacing being too large, while a negative value means
+ // that the packets arrive with less spacing than expected.
+ virtual int AverageIAT() const;
+
+ // Returns true if peak-mode is active. That is, delay peaks were observed
+ // recently. This method simply asks for the same information from the
+ // DelayPeakDetector object.
+ virtual bool PeakFound() const;
+
+ // Notifies the counters in DelayManager and DelayPeakDetector that
+ // |elapsed_time_ms| have elapsed.
+ virtual void UpdateCounters(int elapsed_time_ms);
+
+ // Reset the inter-arrival time counter to 0.
+ virtual void ResetPacketIatCount();
+
+ // Writes the lower and higher limits which the buffer level should stay
+ // within to the corresponding pointers. The values are in (fractions of)
+ // packets in Q8.
+ virtual void BufferLimits(int* lower_limit, int* higher_limit) const;
+
+ // Gets the target buffer level, in (fractions of) packets in Q8. This value
+ // includes any extra delay set through the set_extra_delay_ms() method.
+ virtual int TargetLevel() const;
+
+ virtual void LastDecoderType(NetEqDecoder decoder_type);
+
+ // Accessors and mutators.
+ // Assuming |delay| is in valid range.
+ virtual bool SetMinimumDelay(int delay_ms);
+ virtual bool SetMaximumDelay(int delay_ms);
+ virtual int least_required_delay_ms() const;
+ virtual int base_target_level() const;
+ virtual void set_streaming_mode(bool value);
+ virtual int last_pack_cng_or_dtmf() const;
+ virtual void set_last_pack_cng_or_dtmf(int value);
+
+ private:
+ static const int kLimitProbability = 53687091; // 1/20 in Q30.
+ static const int kLimitProbabilityStreaming = 536871; // 1/2000 in Q30.
+ static const int kMaxStreamingPeakPeriodMs = 600000; // 10 minutes in ms.
+ static const int kCumulativeSumDrift = 2; // Drift term for cumulative sum
+ // |iat_cumulative_sum_|.
+ // Steady-state forgetting factor for |iat_vector_|, 0.9993 in Q15.
+ static const int kIatFactor_ = 32745;
+ static const int kMaxIat = 64; // Max inter-arrival time to register.
+
+ // Sets |iat_vector_| to the default start distribution and sets the
+ // |base_target_level_| and |target_level_| to the corresponding values.
+ void ResetHistogram();
+
+ // Updates |iat_cumulative_sum_| and |max_iat_cumulative_sum_|. (These are
+ // used by the streaming mode.) This method is called by Update().
+ void UpdateCumulativeSums(int packet_len_ms, uint16_t sequence_number);
+
+ // Updates the histogram |iat_vector_|. The probability for inter-arrival time
+ // equal to |iat_packets| (in integer packets) is increased slightly, while
+ // all other entries are decreased. This method is called by Update().
+ void UpdateHistogram(size_t iat_packets);
+
+ // Makes sure that |target_level_| is not too large, taking
+ // |max_packets_in_buffer_| and |extra_delay_ms_| into account. This method is
+ // called by Update().
+ void LimitTargetLevel();
+
+ bool first_packet_received_;
+ const int max_packets_in_buffer_; // Capacity of the packet buffer.
+ IATVector iat_vector_; // Histogram of inter-arrival times.
+ int iat_factor_; // Forgetting factor for updating the IAT histogram (Q15).
+ int packet_iat_count_ms_; // Milliseconds elapsed since last packet.
+ int base_target_level_; // Currently preferred buffer level before peak
+ // detection and streaming mode (Q0).
+ // TODO(turajs) change the comment according to the implementation of
+ // minimum-delay.
+ int target_level_; // Currently preferred buffer level in (fractions)
+ // of packets (Q8), before adding any extra delay.
+ int packet_len_ms_; // Length of audio in each incoming packet [ms].
+ bool streaming_mode_;
+ uint16_t last_seq_no_; // Sequence number for last received packet.
+ uint32_t last_timestamp_; // Timestamp for the last received packet.
+ int minimum_delay_ms_; // Externally set minimum delay.
+ int least_required_delay_ms_; // Smallest preferred buffer level (same unit
+ // as |target_level_|), before applying
+ // |minimum_delay_ms_| and/or |maximum_delay_ms_|.
+ int maximum_delay_ms_; // Externally set maximum allowed delay.
+ int iat_cumulative_sum_; // Cumulative sum of delta inter-arrival times.
+ int max_iat_cumulative_sum_; // Max of |iat_cumulative_sum_|.
+ int max_timer_ms_; // Time elapsed since maximum was observed.
+ DelayPeakDetector& peak_detector_;
+ int last_pack_cng_or_dtmf_;
+
+ DISALLOW_COPY_AND_ASSIGN(DelayManager);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
new file mode 100644
index 00000000000..6f9733234d5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
@@ -0,0 +1,297 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for DelayManager class.
+
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+
+#include <math.h>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h"
+
+namespace webrtc {
+
+using ::testing::Return;
+using ::testing::_;
+
+class DelayManagerTest : public ::testing::Test {
+ protected:
+ static const int kMaxNumberOfPackets = 240;
+ static const int kTimeStepMs = 10;
+ static const int kFs = 8000;
+ static const int kFrameSizeMs = 20;
+ static const int kTsIncrement = kFrameSizeMs * kFs / 1000;
+
+ DelayManagerTest();
+ virtual void SetUp();
+ virtual void TearDown();
+ void SetPacketAudioLength(int lengt_ms);
+ void InsertNextPacket();
+ void IncreaseTime(int inc_ms);
+
+ DelayManager* dm_;
+ MockDelayPeakDetector detector_;
+ uint16_t seq_no_;
+ uint32_t ts_;
+};
+
+DelayManagerTest::DelayManagerTest()
+ : dm_(NULL),
+ seq_no_(0x1234),
+ ts_(0x12345678) {
+}
+
+void DelayManagerTest::SetUp() {
+ EXPECT_CALL(detector_, Reset())
+ .Times(1);
+ dm_ = new DelayManager(kMaxNumberOfPackets, &detector_);
+}
+
+void DelayManagerTest::SetPacketAudioLength(int lengt_ms) {
+ EXPECT_CALL(detector_, SetPacketAudioLength(lengt_ms));
+ dm_->SetPacketAudioLength(lengt_ms);
+}
+
+void DelayManagerTest::InsertNextPacket() {
+ EXPECT_EQ(0, dm_->Update(seq_no_, ts_, kFs));
+ seq_no_ += 1;
+ ts_ += kTsIncrement;
+}
+
+void DelayManagerTest::IncreaseTime(int inc_ms) {
+ for (int t = 0; t < inc_ms; t += kTimeStepMs) {
+ EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
+ .Times(1);
+ dm_->UpdateCounters(kTimeStepMs);
+ }
+}
+void DelayManagerTest::TearDown() {
+ EXPECT_CALL(detector_, Die());
+ delete dm_;
+}
+
+TEST_F(DelayManagerTest, CreateAndDestroy) {
+ // Nothing to do here. The test fixture creates and destroys the DelayManager
+ // object.
+}
+
+TEST_F(DelayManagerTest, VectorInitialization) {
+ const DelayManager::IATVector& vec = dm_->iat_vector();
+ double sum = 0.0;
+ for (size_t i = 0; i < vec.size(); i++) {
+ EXPECT_NEAR(ldexp(pow(0.5, static_cast<int>(i + 1)), 30), vec[i], 65536);
+ // Tolerance 65536 in Q30 corresponds to a delta of approximately 0.00006.
+ sum += vec[i];
+ }
+ EXPECT_EQ(1 << 30, static_cast<int>(sum)); // Should be 1 in Q30.
+}
+
+TEST_F(DelayManagerTest, SetPacketAudioLength) {
+ const int kLengthMs = 30;
+ // Expect DelayManager to pass on the new length to the detector object.
+ EXPECT_CALL(detector_, SetPacketAudioLength(kLengthMs))
+ .Times(1);
+ EXPECT_EQ(0, dm_->SetPacketAudioLength(kLengthMs));
+ EXPECT_EQ(-1, dm_->SetPacketAudioLength(-1)); // Illegal parameter value.
+}
+
+TEST_F(DelayManagerTest, PeakFound) {
+ // Expect DelayManager to pass on the question to the detector.
+ // Call twice, and let the detector return true the first time and false the
+ // second time.
+ EXPECT_CALL(detector_, peak_found())
+ .WillOnce(Return(true))
+ .WillOnce(Return(false));
+ EXPECT_TRUE(dm_->PeakFound());
+ EXPECT_FALSE(dm_->PeakFound());
+}
+
+TEST_F(DelayManagerTest, UpdateCounters) {
+ // Expect DelayManager to pass on the counter update to the detector.
+ EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
+ .Times(1);
+ dm_->UpdateCounters(kTimeStepMs);
+}
+
+TEST_F(DelayManagerTest, UpdateNormal) {
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Advance time by one frame size.
+ IncreaseTime(kFrameSizeMs);
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to 1 packet, and (base) target level equal to 1 as well.
+ // Return false to indicate no peaks found.
+ EXPECT_CALL(detector_, Update(1, 1))
+ .WillOnce(Return(false));
+ InsertNextPacket();
+ EXPECT_EQ(1 << 8, dm_->TargetLevel()); // In Q8.
+ EXPECT_EQ(1, dm_->base_target_level());
+ int lower, higher;
+ dm_->BufferLimits(&lower, &higher);
+ // Expect |lower| to be 75% of target level, and |higher| to be target level,
+ // but also at least 20 ms higher than |lower|, which is the limiting case
+ // here.
+ EXPECT_EQ((1 << 8) * 3 / 4, lower);
+ EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
+}
+
+TEST_F(DelayManagerTest, UpdateLongInterArrivalTime) {
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Advance time by two frame size.
+ IncreaseTime(2 * kFrameSizeMs);
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to 1 packet, and (base) target level equal to 1 as well.
+ // Return false to indicate no peaks found.
+ EXPECT_CALL(detector_, Update(2, 2))
+ .WillOnce(Return(false));
+ InsertNextPacket();
+ EXPECT_EQ(2 << 8, dm_->TargetLevel()); // In Q8.
+ EXPECT_EQ(2, dm_->base_target_level());
+ int lower, higher;
+ dm_->BufferLimits(&lower, &higher);
+ // Expect |lower| to be 75% of target level, and |higher| to be target level,
+ // but also at least 20 ms higher than |lower|, which is the limiting case
+ // here.
+ EXPECT_EQ((2 << 8) * 3 / 4, lower);
+ EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
+}
+
+TEST_F(DelayManagerTest, UpdatePeakFound) {
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Advance time by one frame size.
+ IncreaseTime(kFrameSizeMs);
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to 1 packet, and (base) target level equal to 1 as well.
+ // Return true to indicate that peaks are found. Let the peak height be 5.
+ EXPECT_CALL(detector_, Update(1, 1))
+ .WillOnce(Return(true));
+ EXPECT_CALL(detector_, MaxPeakHeight())
+ .WillOnce(Return(5));
+ InsertNextPacket();
+ EXPECT_EQ(5 << 8, dm_->TargetLevel());
+ EXPECT_EQ(1, dm_->base_target_level()); // Base target level is w/o peaks.
+ int lower, higher;
+ dm_->BufferLimits(&lower, &higher);
+ // Expect |lower| to be 75% of target level, and |higher| to be target level.
+ EXPECT_EQ((5 << 8) * 3 / 4, lower);
+ EXPECT_EQ(5 << 8, higher);
+}
+
+TEST_F(DelayManagerTest, TargetDelay) {
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Advance time by one frame size.
+ IncreaseTime(kFrameSizeMs);
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to 1 packet, and (base) target level equal to 1 as well.
+ // Return false to indicate no peaks found.
+ EXPECT_CALL(detector_, Update(1, 1))
+ .WillOnce(Return(false));
+ InsertNextPacket();
+ const int kExpectedTarget = 1;
+ EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel()); // In Q8.
+ EXPECT_EQ(1, dm_->base_target_level());
+ int lower, higher;
+ dm_->BufferLimits(&lower, &higher);
+ // Expect |lower| to be 75% of base target level, and |higher| to be
+ // lower + 20 ms headroom.
+ EXPECT_EQ((1 << 8) * 3 / 4, lower);
+ EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
+}
+
+TEST_F(DelayManagerTest, MaxAndRequiredDelay) {
+ const int kExpectedTarget = 5;
+ const int kTimeIncrement = kExpectedTarget * kFrameSizeMs;
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to |kExpectedTarget| packet. Return true to indicate peaks found.
+ EXPECT_CALL(detector_, Update(kExpectedTarget, _))
+ .WillRepeatedly(Return(true));
+ EXPECT_CALL(detector_, MaxPeakHeight())
+ .WillRepeatedly(Return(kExpectedTarget));
+ IncreaseTime(kTimeIncrement);
+ InsertNextPacket();
+
+ // No limit is set.
+ EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel());
+
+ int kMaxDelayPackets = kExpectedTarget - 2;
+ int kMaxDelayMs = kMaxDelayPackets * kFrameSizeMs;
+ EXPECT_TRUE(dm_->SetMaximumDelay(kMaxDelayMs));
+ IncreaseTime(kTimeIncrement);
+ InsertNextPacket();
+ EXPECT_EQ(kExpectedTarget * kFrameSizeMs, dm_->least_required_delay_ms());
+ EXPECT_EQ(kMaxDelayPackets << 8, dm_->TargetLevel());
+
+ // Target level at least should be one packet.
+ EXPECT_FALSE(dm_->SetMaximumDelay(kFrameSizeMs - 1));
+}
+
+TEST_F(DelayManagerTest, MinAndRequiredDelay) {
+ const int kExpectedTarget = 5;
+ const int kTimeIncrement = kExpectedTarget * kFrameSizeMs;
+ SetPacketAudioLength(kFrameSizeMs);
+ // First packet arrival.
+ InsertNextPacket();
+ // Second packet arrival.
+ // Expect detector update method to be called once with inter-arrival time
+ // equal to |kExpectedTarget| packet. Return true to indicate peaks found.
+ EXPECT_CALL(detector_, Update(kExpectedTarget, _))
+ .WillRepeatedly(Return(true));
+ EXPECT_CALL(detector_, MaxPeakHeight())
+ .WillRepeatedly(Return(kExpectedTarget));
+ IncreaseTime(kTimeIncrement);
+ InsertNextPacket();
+
+ // No limit is applied.
+ EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel());
+
+ int kMinDelayPackets = kExpectedTarget + 2;
+ int kMinDelayMs = kMinDelayPackets * kFrameSizeMs;
+ dm_->SetMinimumDelay(kMinDelayMs);
+ IncreaseTime(kTimeIncrement);
+ InsertNextPacket();
+ EXPECT_EQ(kExpectedTarget * kFrameSizeMs, dm_->least_required_delay_ms());
+ EXPECT_EQ(kMinDelayPackets << 8, dm_->TargetLevel());
+}
+
+TEST_F(DelayManagerTest, Failures) {
+ // Wrong sample rate.
+ EXPECT_EQ(-1, dm_->Update(0, 0, -1));
+ // Wrong packet size.
+ EXPECT_EQ(-1, dm_->SetPacketAudioLength(0));
+ EXPECT_EQ(-1, dm_->SetPacketAudioLength(-1));
+
+ // Minimum delay higher than a maximum delay is not accepted.
+ EXPECT_TRUE(dm_->SetMaximumDelay(10));
+ EXPECT_FALSE(dm_->SetMinimumDelay(20));
+
+ // Maximum delay less than minimum delay is not accepted.
+ EXPECT_TRUE(dm_->SetMaximumDelay(100));
+ EXPECT_TRUE(dm_->SetMinimumDelay(80));
+ EXPECT_FALSE(dm_->SetMaximumDelay(60));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
new file mode 100644
index 00000000000..5996d7d197a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+
+#include <algorithm> // max
+
+namespace webrtc {
+
+// The DelayPeakDetector keeps track of severe inter-arrival times, called
+// delay peaks. When a peak is observed, the "height" (the time elapsed since
+// the previous packet arrival) and the peak "period" (the time since the last
+// observed peak) is recorded in a vector. When enough peaks have been observed,
+// peak-mode is engaged and the DelayManager asks the DelayPeakDetector for
+// the worst peak height.
+
+DelayPeakDetector::DelayPeakDetector()
+ : peak_found_(false),
+ peak_detection_threshold_(0),
+ peak_period_counter_ms_(-1) {
+}
+
+void DelayPeakDetector::Reset() {
+ peak_period_counter_ms_ = -1; // Indicate that next peak is the first.
+ peak_found_ = false;
+ peak_history_.clear();
+}
+
+// Calculates the threshold in number of packets.
+void DelayPeakDetector::SetPacketAudioLength(int length_ms) {
+ if (length_ms > 0) {
+ peak_detection_threshold_ = kPeakHeightMs / length_ms;
+ }
+}
+
+int DelayPeakDetector::MaxPeakHeight() const {
+ int max_height = -1; // Returns -1 for an empty history.
+ std::list<Peak>::const_iterator it;
+ for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
+ max_height = std::max(max_height, it->peak_height_packets);
+ }
+ return max_height;
+}
+
+int DelayPeakDetector::MaxPeakPeriod() const {
+ int max_period = -1; // Returns -1 for an empty history.
+ std::list<Peak>::const_iterator it;
+ for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
+ max_period = std::max(max_period, it->period_ms);
+ }
+ return max_period;
+}
+
+bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
+ if (inter_arrival_time > target_level + peak_detection_threshold_ ||
+ inter_arrival_time > 2 * target_level) {
+ // A delay peak is observed.
+ if (peak_period_counter_ms_ == -1) {
+ // This is the first peak. Reset the period counter.
+ peak_period_counter_ms_ = 0;
+ } else if (peak_period_counter_ms_ <= kMaxPeakPeriodMs) {
+ // This is not the first peak, and the period is valid.
+ // Store peak data in the vector.
+ Peak peak_data;
+ peak_data.period_ms = peak_period_counter_ms_;
+ peak_data.peak_height_packets = inter_arrival_time;
+ peak_history_.push_back(peak_data);
+ while (peak_history_.size() > kMaxNumPeaks) {
+ // Delete the oldest data point.
+ peak_history_.pop_front();
+ }
+ peak_period_counter_ms_ = 0;
+ } else if (peak_period_counter_ms_ <= 2 * kMaxPeakPeriodMs) {
+ // Invalid peak due to too long period. Reset period counter and start
+ // looking for next peak.
+ peak_period_counter_ms_ = 0;
+ } else {
+ // More than 2 times the maximum period has elapsed since the last peak
+ // was registered. It seams that the network conditions have changed.
+ // Reset the peak statistics.
+ Reset();
+ }
+ }
+ return CheckPeakConditions();
+}
+
+void DelayPeakDetector::IncrementCounter(int inc_ms) {
+ if (peak_period_counter_ms_ >= 0) {
+ peak_period_counter_ms_ += inc_ms;
+ }
+}
+
+bool DelayPeakDetector::CheckPeakConditions() {
+ size_t s = peak_history_.size();
+ if (s >= kMinPeaksToTrigger &&
+ peak_period_counter_ms_ <= 2 * MaxPeakPeriod()) {
+ peak_found_ = true;
+ } else {
+ peak_found_ = false;
+ }
+ return peak_found_;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
new file mode 100644
index 00000000000..8bf6aba8b55
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
+
+#include <string.h> // size_t
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+
+class DelayPeakDetector {
+ public:
+ DelayPeakDetector();
+ virtual ~DelayPeakDetector() {}
+ virtual void Reset();
+
+ // Notifies the DelayPeakDetector of how much audio data is carried in each
+ // packet.
+ virtual void SetPacketAudioLength(int length_ms);
+
+ // Returns true if peak-mode is active. That is, delay peaks were observed
+ // recently.
+ virtual bool peak_found() { return peak_found_; }
+
+ // Calculates and returns the maximum delay peak height. Returns -1 if no
+ // delay peaks have been observed recently. The unit is number of packets.
+ virtual int MaxPeakHeight() const;
+
+ // Calculates and returns the maximum delay peak distance in ms.
+ // Returns -1 if no delay peaks have been observed recently.
+ virtual int MaxPeakPeriod() const;
+
+ // Updates the DelayPeakDetector with a new inter-arrival time (in packets)
+ // and the current target buffer level (needed to decide if a peak is observed
+ // or not). Returns true if peak-mode is active, false if not.
+ virtual bool Update(int inter_arrival_time, int target_level);
+
+ // Increments the |peak_period_counter_ms_| with |inc_ms|. Only increments
+ // the counter if it is non-negative. A negative denotes that no peak has
+ // been observed.
+ virtual void IncrementCounter(int inc_ms);
+
+ private:
+ static const size_t kMaxNumPeaks = 8;
+ static const size_t kMinPeaksToTrigger = 2;
+ static const int kPeakHeightMs = 78;
+ static const int kMaxPeakPeriodMs = 10000;
+
+ typedef struct {
+ int period_ms;
+ int peak_height_packets;
+ } Peak;
+
+ bool CheckPeakConditions();
+
+ std::list<Peak> peak_history_;
+ bool peak_found_;
+ int peak_detection_threshold_;
+ int peak_period_counter_ms_;
+
+ DISALLOW_COPY_AND_ASSIGN(DelayPeakDetector);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
new file mode 100644
index 00000000000..080309be014
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for DelayPeakDetector class.
+
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(DelayPeakDetector, CreateAndDestroy) {
+ DelayPeakDetector* detector = new DelayPeakDetector();
+ EXPECT_FALSE(detector->peak_found());
+ delete detector;
+}
+
+TEST(DelayPeakDetector, EmptyHistory) {
+ DelayPeakDetector detector;
+ EXPECT_EQ(-1, detector.MaxPeakHeight());
+ EXPECT_EQ(-1, detector.MaxPeakPeriod());
+}
+
+// Inject a series of packet arrivals into the detector. Three of the packets
+// have suffered delays. After the third delay peak, peak-mode is expected to
+// start. This should then continue until it is disengaged due to lack of peaks.
+TEST(DelayPeakDetector, TriggerPeakMode) {
+ DelayPeakDetector detector;
+ const int kPacketSizeMs = 30;
+ detector.SetPacketAudioLength(kPacketSizeMs);
+
+ // Load up normal arrival times; 0 ms, 30 ms, 60 ms, 90 ms, ...
+ const int kNumPackets = 1000;
+ int arrival_times_ms[kNumPackets];
+ for (int i = 0; i < kNumPackets; ++i) {
+ arrival_times_ms[i] = i * kPacketSizeMs;
+ }
+
+ // Delay three packets.
+ const int kPeakDelayMs = 100;
+ // First delay peak.
+ arrival_times_ms[100] += kPeakDelayMs;
+ // Second delay peak.
+ arrival_times_ms[200] += kPeakDelayMs;
+ // Third delay peak. Trigger peak-mode after this packet.
+ arrival_times_ms[400] += kPeakDelayMs;
+ // The second peak period is the longest, 200 packets.
+ const int kWorstPeakPeriod = 200 * kPacketSizeMs;
+ int peak_mode_start_ms = arrival_times_ms[400];
+ // Expect to disengage after no peaks are observed for two period times.
+ int peak_mode_end_ms = peak_mode_start_ms + 2 * kWorstPeakPeriod;
+
+ // Load into detector.
+ int time = 0;
+ int next = 1; // Start with the second packet to get a proper IAT.
+ while (next < kNumPackets) {
+ while (next < kNumPackets && arrival_times_ms[next] <= time) {
+ int iat_packets = (arrival_times_ms[next] - arrival_times_ms[next - 1]) /
+ kPacketSizeMs;
+ const int kTargetBufferLevel = 1; // Define peaks to be iat > 2.
+ if (time < peak_mode_start_ms || time > peak_mode_end_ms) {
+ EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
+ } else {
+ EXPECT_TRUE(detector.Update(iat_packets, kTargetBufferLevel));
+ EXPECT_EQ(kWorstPeakPeriod, detector.MaxPeakPeriod());
+ EXPECT_EQ(kPeakDelayMs / kPacketSizeMs + 1, detector.MaxPeakHeight());
+ }
+ ++next;
+ }
+ detector.IncrementCounter(10);
+ time += 10; // Increase time 10 ms.
+ }
+}
+
+// Same test as TriggerPeakMode, but with base target buffer level increased to
+// 2, in order to raise the bar for delay peaks to inter-arrival times > 4.
+// The delay pattern has peaks with delay = 3, thus should not trigger.
+TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
+ DelayPeakDetector detector;
+ const int kPacketSizeMs = 30;
+ detector.SetPacketAudioLength(kPacketSizeMs);
+
+ // Load up normal arrival times; 0 ms, 30 ms, 60 ms, 90 ms, ...
+ const int kNumPackets = 1000;
+ int arrival_times_ms[kNumPackets];
+ for (int i = 0; i < kNumPackets; ++i) {
+ arrival_times_ms[i] = i * kPacketSizeMs;
+ }
+
+ // Delay three packets.
+ const int kPeakDelayMs = 100;
+ // First delay peak.
+ arrival_times_ms[100] += kPeakDelayMs;
+ // Second delay peak.
+ arrival_times_ms[200] += kPeakDelayMs;
+ // Third delay peak.
+ arrival_times_ms[400] += kPeakDelayMs;
+
+ // Load into detector.
+ int time = 0;
+ int next = 1; // Start with the second packet to get a proper IAT.
+ while (next < kNumPackets) {
+ while (next < kNumPackets && arrival_times_ms[next] <= time) {
+ int iat_packets = (arrival_times_ms[next] - arrival_times_ms[next - 1]) /
+ kPacketSizeMs;
+ const int kTargetBufferLevel = 2; // Define peaks to be iat > 4.
+ EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
+ ++next;
+ }
+ detector.IncrementCounter(10);
+ time += 10; // Increase time 10 ms.
+ }
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c
deleted file mode 100644
index ea2fa87d501..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c
+++ /dev/null
@@ -1,532 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some DSP initialization functions and
- * constant table definitions.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-/* Filter coefficients used when downsampling from the indicated
- sample rates (8, 16, 32, 48 kHz) to 4 kHz.
- Coefficients are in Q12. */
-
-/* {0.3, 0.4, 0.3} */
-const int16_t WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
-
-#ifdef NETEQ_WIDEBAND
-/* {0.15, 0.2, 0.3, 0.2, 0.15} */
-const int16_t WebRtcNetEQ_kDownsample16kHzTbl[] =
-{ 614, 819, 1229, 819, 614};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
-const int16_t WebRtcNetEQ_kDownsample32kHzTbl[] =
-{ 584, 512, 625, 667, 625, 512, 584};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
-const int16_t WebRtcNetEQ_kDownsample48kHzTbl[] =
-{ 1019, 390, 427, 440, 427, 390, 1019};
-#endif
-
-/* Constants used in expand function WebRtcNetEQ_Expand */
-
-/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
-const int16_t WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
-
-/* Tabulated divisions to save complexity */
-/* 1049/{0, .., 6} */
-const int16_t WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
-
-/* 2097/{0, .., 6} */
-const int16_t WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
-
-/* 5243/{0, .., 6} */
-const int16_t WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
-
-#ifdef WEBRTC_NETEQ_40BITACC_TEST
-/*
- * Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
- * implementation where the main (spl and NetEQ) functions have been
- * 40-bit optimized. For testing purposes.
- */
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccCrossCorr(...)
- *
- * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
- * is fixed and seq2 slides as the pointer is increased with step
- *
- * Input:
- * - seq1 : First sequence (fixed throughout the correlation)
- * - seq2 : Second sequence (slided step_seq2 for each
- * new correlation)
- * - dimSeq : Number of samples to use in the cross correlation.
- * Should be no larger than 1024 to avoid overflow.
- * - dimCrossCorr : Number of CrossCorrelations to calculate (start
- * position for seq2 is updated for each new one)
- * - rShift : Number of right shifts to use
- * - step_seq2 : How many (positive or negative) steps the seq2
- * pointer should be updated for each new cross
- * correlation value
- *
- * Output:
- * - crossCorr : The cross correlation in Q-rShift
- */
-
-void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr,
- int16_t *seq1,
- int16_t *seq2,
- int16_t dimSeq,
- int16_t dimCrossCorr,
- int16_t rShift,
- int16_t step_seq2)
-{
- int i, j;
- int16_t *seq1Ptr, *seq2Ptr;
- int64_t acc;
-
- for (i = 0; i < dimCrossCorr; i++)
- {
- /* Set the pointer to the static vector, set the pointer to
- the sliding vector and initialize crossCorr */
- seq1Ptr = seq1;
- seq2Ptr = seq2 + (step_seq2 * i);
- acc = 0;
-
- /* Perform the cross correlation */
- for (j = 0; j < dimSeq; j++)
- {
- acc += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
- seq1Ptr++;
- seq2Ptr++;
- }
-
- (*crossCorr) = (int32_t) (acc >> rShift);
- crossCorr++;
- }
-}
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccDotW16W16(...)
- *
- * Calculates the dot product between two vectors (int16_t)
- *
- * Input:
- * - vector1 : Vector 1
- * - vector2 : Vector 2
- * - len : Number of samples in vector
- * Should be no larger than 1024 to avoid overflow.
- * - scaling : The number of left shifts required to avoid overflow
- * in the dot product
- * Return value : The dot product
- */
-
-int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1,
- int16_t *vector2,
- int len,
- int scaling)
-{
- int32_t sum;
- int i;
- int64_t acc;
-
- acc = 0;
- for (i = 0; i < len; i++)
- {
- acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
- }
-
- sum = (int32_t) (acc >> scaling);
-
- return(sum);
-}
-
-#endif /* WEBRTC_NETEQ_40BITACC_TEST */
-
-/****************************************************************************
- * WebRtcNetEQ_DSPInit(...)
- *
- * Initializes DSP side of NetEQ.
- *
- * Input:
- * - inst : NetEq DSP instance
- * - fs : Initial sample rate (may change when decoding data)
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs)
-{
-
- int res = 0;
- int16_t fs_mult;
-
- /* Pointers and values to save before clearing the instance */
-#ifdef NETEQ_CNG_CODEC
- void *savedPtr1 = inst->CNG_Codec_inst;
-#endif
- void *savedPtr2 = inst->pw16_readAddress;
- void *savedPtr3 = inst->pw16_writeAddress;
- void *savedPtr4 = inst->main_inst;
-#ifdef NETEQ_VAD
- void *savedVADptr = inst->VADInst.VADState;
- VADInitFunction savedVADinit = inst->VADInst.initFunction;
- VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
- VADFunction savedVADfunc = inst->VADInst.VADFunction;
- int16_t savedVADEnabled = inst->VADInst.VADEnabled;
- int savedVADMode = inst->VADInst.VADMode;
-#endif /* NETEQ_VAD */
- DSPStats_t saveStats;
- int16_t saveMsPerCall = inst->millisecondsPerCall;
- enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo* saveMSinfo = inst->msInfo;
-#endif
-
- /* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
- sizeof(DSPStats_t)/sizeof(int16_t));
-
- /* check that the sample rate is valid */
- if ((fs != 8000)
-#ifdef NETEQ_WIDEBAND
- &&(fs!=16000)
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- &&(fs!=32000)
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- &&(fs!=48000)
-#endif
- )
- {
- /* invalid rate */
- return (CODEC_DB_UNSUPPORTED_FS);
- }
-
- /* calcualte fs/8000 */
- fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
-
- /* Set everything to zero since most variables should be zero at start */
- WebRtcSpl_MemSetW16((int16_t *) inst, 0, sizeof(DSPInst_t) / sizeof(int16_t));
-
- /* Restore saved pointers */
-#ifdef NETEQ_CNG_CODEC
- inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
-#endif
- inst->pw16_readAddress = (int16_t *) savedPtr2;
- inst->pw16_writeAddress = (int16_t *) savedPtr3;
- inst->main_inst = savedPtr4;
-#ifdef NETEQ_VAD
- inst->VADInst.VADState = savedVADptr;
- inst->VADInst.initFunction = savedVADinit;
- inst->VADInst.setmodeFunction = savedVADsetmode;
- inst->VADInst.VADFunction = savedVADfunc;
- inst->VADInst.VADEnabled = savedVADEnabled;
- inst->VADInst.VADMode = savedVADMode;
-#endif /* NETEQ_VAD */
-
- /* Initialize main part */
- inst->fs = fs;
- inst->millisecondsPerCall = saveMsPerCall;
- inst->timestampsPerCall = inst->millisecondsPerCall * 8 * fs_mult;
- inst->ExpandInst.w16_overlap = 5 * fs_mult;
- inst->endPosition = 565 * fs_mult;
- inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
- inst->w16_seedInc = 1;
- inst->uw16_seed = 777;
- inst->w16_muteFactor = 16384; /* 1.0 in Q14 */
- inst->w16_frameLen = 3 * inst->timestampsPerCall; /* Dummy initialize to 30ms */
-
- inst->w16_speechHistoryLen = 256 * fs_mult;
- inst->pw16_speechHistory = &inst->speechBuffer[inst->endPosition
- - inst->w16_speechHistoryLen];
- inst->ExpandInst.pw16_overlapVec = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen
- - inst->ExpandInst.w16_overlap]);
-
- /* Reusage of memory in speechBuffer inside Expand */
- inst->ExpandInst.pw16_expVecs[0] = &inst->speechBuffer[0];
- inst->ExpandInst.pw16_expVecs[1] = &inst->speechBuffer[126 * fs_mult];
- inst->ExpandInst.pw16_arState = &inst->speechBuffer[2 * 126 * fs_mult];
- inst->ExpandInst.pw16_arFilter = &inst->speechBuffer[2 * 126 * fs_mult
- + UNVOICED_LPC_ORDER];
- /* Ends at 2*126*fs_mult+UNVOICED_LPC_ORDER+(UNVOICED_LPC_ORDER+1) */
-
- inst->ExpandInst.w16_expandMuteFactor = 16384; /* 1.0 in Q14 */
-
- /* Initialize BGN part */
- inst->BGNInst.pw16_filter[0] = 4096;
- inst->BGNInst.w16_scale = 20000;
- inst->BGNInst.w16_scaleShift = 24;
- inst->BGNInst.w32_energyUpdate = 500000;
- inst->BGNInst.w32_energyUpdateLow = 0;
- inst->BGNInst.w32_energy = 2500;
- inst->BGNInst.w16_initialized = 0;
- inst->BGNInst.bgnMode = saveBgnMode;
-
- /* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
- sizeof(DSPStats_t)/sizeof(int16_t));
-
-#ifdef NETEQ_STEREO
- /* Write back the pointer. */
- inst->msInfo = saveMSinfo;
-#endif
-
-#ifdef NETEQ_CNG_CODEC
- if (inst->CNG_Codec_inst!=NULL)
- {
- /* initialize comfort noise generator */
- res |= WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif
-
-#ifdef NETEQ_VAD
- /* initialize PostDecode VAD instance
- (don't bother checking for NULL instance, this is done inside init function) */
- res |= WebRtcNetEQ_InitVAD(&inst->VADInst, fs);
-#endif /* NETEQ_VAD */
-
- return (res);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_AddressInit(...)
- *
- * Initializes the shared-memory communication on the DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - data2McuAddress : Pointer to memory where DSP writes / MCU reads
- * - data2DspAddress : Pointer to memory where MCU writes / DSP reads
- * - mainInst : NetEQ main instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
- const void *data2DspAddress, const void *mainInst)
-{
-
- /* set shared-memory addresses in the DSP instance */
- inst->pw16_readAddress = (int16_t *) data2DspAddress;
- inst->pw16_writeAddress = (int16_t *) data2McuAddress;
-
- /* set pointer to main NetEQ instance */
- inst->main_inst = (void *) mainInst;
-
- /* set output frame size to 10 ms = 80 samples in narrowband */
- inst->millisecondsPerCall = 10;
- inst->timestampsPerCall = 80;
-
- return (0);
-
-}
-
-/****************************************************************************
- * NETEQDSP_clearInCallStats(...)
- *
- * Reset in-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst)
-{
- /* Reset statistics counters */
- inst->statInst.accelerateLength = 0;
- inst->statInst.expandLength = 0;
- inst->statInst.preemptiveLength = 0;
- inst->statInst.addedSamples = 0;
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_ClearPostCallStats(...)
- *
- * Reset post-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst)
-{
-
- /* Reset statistics counters */
- inst->statInst.expandedVoiceSamples = 0;
- inst->statInst.expandedNoiseSamples = 0;
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_ClearActivityStats(...)
- *
- * Reset processing activity statistics.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- */
-
-void WebRtcNetEQ_ClearActivityStats(DSPInst_t *inst) {
- memset(&inst->activity_stats, 0, sizeof(ActivityStats));
-}
-
-#ifdef NETEQ_VAD
-
-/****************************************************************************
- * WebRtcNetEQ_InitVAD(...)
- *
- * Initializes post-decode VAD instance.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - fs : Initial sample rate
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs)
-{
-
- int res = 0;
-
- /* initially, disable the post-decode VAD */
- VADInst->VADEnabled = 0;
-
- if (VADInst->VADState != NULL /* if VAD state is provided */
- && VADInst->initFunction != NULL /* and all function ... */
- && VADInst->setmodeFunction != NULL /* ... pointers ... */
- && VADInst->VADFunction != NULL) /* ... are defined */
- {
- res = VADInst->initFunction( VADInst->VADState ); /* call VAD init function */
- res |= WebRtcNetEQ_SetVADModeInternal( VADInst, VADInst->VADMode );
-
- if (res!=0)
- {
- /* something is wrong; play it safe and set the VADstate to NULL */
- VADInst->VADState = NULL;
- }
- else if (fs<=16000)
- {
- /* enable VAD if NB or WB (VAD cannot handle SWB) */
- VADInst->VADEnabled = 1;
- }
- }
-
- /* reset SID/CNG interval counter */
- VADInst->SIDintervalCounter = 0;
-
- /* initialize with active-speaker decision */
- VADInst->VADDecision = 1;
-
- return(res);
-
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADModeInternal(...)
- *
- * Set the VAD mode in the VAD struct, and communicate it to the VAD instance
- * if it exists.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - mode : Mode number passed on to the VAD function
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode)
-{
-
- int res = 0;
-
- VADInst->VADMode = mode;
-
- if (VADInst->VADState != NULL)
- {
- /* call setmode function */
- res = VADInst->setmodeFunction(VADInst->VADState, mode);
- }
-
- return(res);
-
-}
-
-#endif /* NETEQ_VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_FlushSpeechBuffer(...)
- *
- * Flush the speech buffer.
- *
- * Input:
- * - inst : NetEq DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
-{
- int16_t fs_mult;
-
- /* calcualte fs/8000 */
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
-
- /* clear buffer */
- WebRtcSpl_MemSetW16(inst->speechBuffer, 0, SPEECH_BUF_SIZE);
- inst->endPosition = 565 * fs_mult;
- inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h
deleted file mode 100644
index 9371938d5f6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h
+++ /dev/null
@@ -1,807 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some DSP initialization functions,
- * constant table definitions and other parameters.
- * Also contains definitions of all DSP-side data structures.
- */
-
-
-#ifndef DSP_H
-#define DSP_H
-
-#include "typedefs.h"
-
-#include "webrtc_cng.h"
-
-#include "codec_db_defines.h"
-#include "neteq_defines.h"
-#include "neteq_statistics.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-#include "dtmf_tonegen.h"
-#endif
-
-
-
-/*****************************/
-/* Pre-processor definitions */
-/*****************************/
-
-/* FSMULT is the sample rate divided by 8000 */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define FSMULT 6
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define FSMULT 4
-#elif defined(NETEQ_WIDEBAND)
- #define FSMULT 2
-#else
- #define FSMULT 1
-#endif
-
-/* Size of the speech buffer (or synchronization buffer). */
-/* 60 ms decoding + 10 ms syncbuff + 0.625ms lookahead */
-#define SPEECH_BUF_SIZE (565 * FSMULT)
-
-/* Misc definitions */
-#define BGN_LPC_ORDER (4 + FSMULT) /* 5, 6, 8, or 10 */
-#define UNVOICED_LPC_ORDER 6
-#define RANDVEC_NO_OF_SAMPLES 256
-
-/* Number of milliseconds to remove/add during accelerate/pre-emptive expand
- under BGNonly operation */
-#define DEFAULT_TIME_ADJUST 8
-
-/* Number of RecOut calls without CNG/SID before re-enabling post-decode VAD */
-#define POST_DECODE_VAD_AUTO_ENABLE 3000
-
-/* 8kHz windowing in Q15 (over 5 samples) */
-#define NETEQ_OVERLAP_WINMUTE_8KHZ_START 27307
-#define NETEQ_OVERLAP_WINMUTE_8KHZ_INC -5461
-#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_START 5461
-#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC 5461
-/* 16kHz windowing in Q15 (over 10 samples) */
-#define NETEQ_OVERLAP_WINMUTE_16KHZ_START 29789
-#define NETEQ_OVERLAP_WINMUTE_16KHZ_INC -2979
-#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_START 2979
-#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC 2979
-/* 32kHz windowing in Q15 (over 20 samples) */
-#define NETEQ_OVERLAP_WINMUTE_32KHZ_START 31208
-#define NETEQ_OVERLAP_WINMUTE_32KHZ_INC -1560
-#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_START 1560
-#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC 1560
-/* 48kHz windowing in Q15 (over 30 samples) */
-#define NETEQ_OVERLAP_WINMUTE_48KHZ_START 31711
-#define NETEQ_OVERLAP_WINMUTE_48KHZ_INC -1057
-#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_START 1057
-#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC 1057
-
-/* Fade BGN towards zero after this many Expand calls */
-#define FADE_BGN_TIME 200
-
-
-/*******************/
-/* Constant tables */
-/*******************/
-
-extern const int16_t WebRtcNetEQ_kDownsample8kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample16kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample32kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample48kHzTbl[];
-extern const int16_t WebRtcNetEQ_kRandnTbl[];
-extern const int16_t WebRtcNetEQ_kMixFractionFuncTbl[];
-extern const int16_t WebRtcNetEQ_k1049div[];
-extern const int16_t WebRtcNetEQ_k2097div[];
-extern const int16_t WebRtcNetEQ_k5243div[];
-
-
-
-/************/
-/* Typedefs */
-/************/
-
-enum BGNMode
-{
- BGN_ON, /* default "normal" behavior with eternal noise */
- BGN_FADE, /* noise fades to zero after some time */
- BGN_OFF /* background noise is always zero */
-};
-
-#ifdef NETEQ_STEREO
-enum MasterSlaveMode
-{
- NETEQ_MONO, /* stand-alone instance */
- NETEQ_MASTER, /* master instance in a spatial/stereo configuration */
- NETEQ_SLAVE /* slave instance in a spatial/stereo configuration */
-};
-
-enum MasterSlaveExtraInfo
-{
- NO_INFO, /* no info to convey */
- ACC_FAIL, /* signal that accelerate failed */
- PE_EXP_FAIL, /* signal that pre-emptive expand failed */
- DTMF_OVERDUB, /* signal that DTMF overdub is generated */
- DTMF_ONLY /* signal that DTMF only is played */
-};
-#endif
-
-/****************************/
-/* DSP-side data structures */
-/****************************/
-
-/* Background noise (BGN) instance for storing BGN parameters
- (sub-instance of NETEQDSP_inst) */
-typedef struct BGNInst_t_
-{
-
- int32_t w32_energy;
- int32_t w32_energyMax;
- int32_t w32_energyUpdate;
- int32_t w32_energyUpdateLow;
- int16_t pw16_filterState[BGN_LPC_ORDER];
- int16_t pw16_filter[BGN_LPC_ORDER + 1];
- int16_t w16_mutefactor;
- int16_t w16_scale;
- int16_t w16_scaleShift;
- int16_t w16_initialized;
- enum BGNMode bgnMode;
-
-} BGNInst_t;
-
-/* Expansion instance (sub-instance of NETEQDSP_inst) */
-typedef struct ExpandInst_t_
-{
-
- int16_t w16_overlap; /* Constant, 5 for NB and 10 for WB */
- int16_t w16_consecExp; /* Number of consecutive expand calls */
- int16_t *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1] */
- int16_t *pw16_arState; /* length [UNVOICED_LPC_ORDER] */
- int16_t w16_arGain;
- int16_t w16_arGainScale;
- int16_t w16_vFraction; /* Q14 */
- int16_t w16_currentVFraction; /* Q14 */
- int16_t *pw16_expVecs[2];
- int16_t w16_lags[3];
- int16_t w16_maxLag;
- int16_t *pw16_overlapVec; /* last samples of speech history */
- int16_t w16_lagsDirection;
- int16_t w16_lagsPosition;
- int16_t w16_expandMuteFactor; /* Q14 */
- int16_t w16_stopMuting;
- int16_t w16_onset;
- int16_t w16_muteSlope; /* Q20 */
-
-} ExpandInst_t;
-
-#ifdef NETEQ_VAD
-
-/*
- * VAD function pointer types, replicating the typedefs in webrtc_neteq_internal.h.
- * These function pointers match the definitions of WebRtc VAD functions WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
- */
-typedef int (*VADInitFunction)(void *VAD_inst);
-typedef int (*VADSetmodeFunction)(void *VAD_inst, int mode);
-typedef int (*VADFunction)(void *VAD_inst, int fs, int16_t *frame,
- int frameLen);
-
-/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
-typedef struct PostDecodeVAD_t_
-{
-
- void *VADState; /* pointer to a VAD instance */
-
- int16_t VADEnabled; /* 1 if enabled, 0 if disabled */
- int VADMode; /* mode parameter to pass to the VAD function */
- int VADDecision; /* 1 for active, 0 for passive */
- int16_t SIDintervalCounter; /* reset when decoding CNG/SID frame,
- increment for each recout call */
-
- /* Function pointers */
- VADInitFunction initFunction; /* VAD init function */
- VADSetmodeFunction setmodeFunction; /* VAD setmode function */
- VADFunction VADFunction; /* VAD function */
-
-} PostDecodeVAD_t;
-
-#endif /* NETEQ_VAD */
-
-#ifdef NETEQ_STEREO
-#define MAX_MS_DECODES 10
-
-typedef struct
-{
- /* Stand-alone, master, or slave */
- enum MasterSlaveMode msMode;
-
- enum MasterSlaveExtraInfo extraInfo;
-
- uint16_t instruction;
- int16_t distLag;
- int16_t corrLag;
- int16_t bestIndex;
-
- uint32_t endTimestamp;
- uint16_t samplesLeftWithOverlap;
-
-} MasterSlaveInfo;
-#endif
-
-
-/* "Main" NetEQ DSP instance */
-typedef struct DSPInst_t_
-{
-
- /* MCU/DSP Communication layer */
- int16_t *pw16_readAddress;
- int16_t *pw16_writeAddress;
- void *main_inst;
-
- /* Output frame size in ms and samples */
- int16_t millisecondsPerCall;
- int16_t timestampsPerCall;
-
- /*
- * Example of speech buffer
- *
- * -----------------------------------------------------------
- * | History T-60 to T | Future |
- * -----------------------------------------------------------
- * ^ ^
- * | |
- * curPosition endPosition
- *
- * History is gradually shifted out to the left when inserting
- * new data at the end.
- */
-
- int16_t speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
- int curPosition; /* Next sample to play */
- int endPosition; /* Position that ends future data */
- uint32_t endTimestamp; /* Timestamp value at end of future data */
- uint32_t videoSyncTimestamp; /* (Estimated) timestamp of the last
- played sample (usually same as
- endTimestamp-(endPosition-curPosition)
- except during Expand and CNG) */
- uint16_t fs; /* sample rate in Hz */
- int16_t w16_frameLen; /* decoder frame length in samples */
- int16_t w16_mode; /* operation used during last RecOut call */
- int16_t w16_muteFactor; /* speech mute factor in Q14 */
- int16_t *pw16_speechHistory; /* beginning of speech history during Expand */
- int16_t w16_speechHistoryLen; /* 256 for NB and 512 for WB */
-
- /* random noise seed parameters */
- int16_t w16_seedInc;
- uint32_t uw16_seed;
-
- /* VQmon related variable */
- int16_t w16_concealedTS;
-
- /*****************/
- /* Sub-instances */
- /*****************/
-
- /* Decoder data */
- CodecFuncInst_t codec_ptr_inst;
-
-#ifdef NETEQ_CNG_CODEC
- /* CNG "decoder" instance */
- CNG_dec_inst *CNG_Codec_inst;
-#endif /* NETEQ_CNG_CODEC */
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* DTMF generator instance */
- dtmf_tone_inst_t DTMFInst;
-#endif /* NETEQ_CNG_CODEC */
-
-#ifdef NETEQ_VAD
- /* Post-decode VAD instance */
- PostDecodeVAD_t VADInst;
-#endif /* NETEQ_VAD */
-
- /* Expand instance (defined above) */
- ExpandInst_t ExpandInst;
-
- /* Background noise instance (defined above) */
- BGNInst_t BGNInst;
-
- /* Internal statistics instance */
- DSPStats_t statInst;
-
- /* Internal instance for short-term processing activity. */
- ActivityStats activity_stats;
-
-#ifdef NETEQ_STEREO
- /* Pointer to Master/Slave info */
- MasterSlaveInfo *msInfo;
-#endif
-
-} DSPInst_t;
-
-
-/*************************/
-/* Function declarations */
-/*************************/
-
-/****************************************************************************
- * WebRtcNetEQ_DSPInit(...)
- *
- * Initializes DSP side of NetEQ.
- *
- * Input:
- * - inst : NetEq DSP instance
- * - fs : Initial sample rate (may change when decoding data)
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs);
-
-/****************************************************************************
- * WebRtcNetEQ_AddressInit(...)
- *
- * Initializes the shared-memory communication on the DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - data2McuAddress : Pointer to memory where DSP writes / MCU reads
- * - data2DspAddress : Pointer to memory where MCU writes / DSP reads
- * - mainInst : NetEQ main instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
- const void *data2DspAddress, const void *mainInst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearInCallStats(...)
- *
- * Reset in-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearPostCallStats(...)
- *
- * Reset post-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearActivityStats(...)
- *
- * Reset processing activity statistics.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- */
-
-void WebRtcNetEQ_ClearActivityStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutInternal(...)
- *
- * This function asks NetEQ for more speech/audio data.
- *
- * Input:
- * - inst : NetEQ instance, i.e. the user that requests more
- * speech/audio data.
- * - outdata : Pointer to a memory space where the output data
- * should be stored.
- * - BGNonly : If non-zero, RecOut will only produce background
- * noise. It will still draw packets from the packet
- * buffer, but they will never be decoded.
- * - av_sync : 1 if NetEQ is in AV-sync, 0 otherwise.
- *
- * Output:
- * - inst : Updated user information
- * - len : Number of samples that were outputted from NetEq
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
- int16_t *pw16_len, int16_t BGNonly, int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_Normal(...)
- *
- * This function has the possibility to modify data that is played out in Normal
- * mode, for example adjust the gain of the signal. The length of the signal
- * can not be changed.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - decoded : Pointer to vector of new data from decoder
- * - len : Number of input samples
- *
- * Output:
- * - inst : Updated user information
- * - pw16_len : Pointer to varibale where the number of samples
- * produced will be written
- *
- * Return value : >=0 - Number of samples written to outData
- * -1 - Error
- */
-
-int WebRtcNetEQ_Normal(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int16_t len,
- int16_t *pw16_outData, int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_Expand(...)
- *
- * This function produces one "chunk" of expansion data (PLC audio). The
- * lenght of the produced audio depends on the speech history.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - BGNonly : If non-zero, Expand will only produce background
- * noise.
- * - pw16_len : Desired number of samples (only for BGN mode).
- *
- * Output:
- * - inst : Updated user information
- * - outdata : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples that were outputted from NetEq
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Expand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_GenerateBGN(...)
- *
- * This function generates and writes len samples of background noise to the
- * output vector. The Expand function will be called repeteadly until the
- * correct number of samples is produced.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - len : Desired length of produced BGN.
- *
- *
- * Output:
- * - pw16_outData : Pointer to a memory space where the output data
- * should be stored
- *
- * Return value : >=0 - Number of noise samples produced and written
- * to output
- * -1 - Error
- */
-
-int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t len);
-
-/****************************************************************************
- * WebRtcNetEQ_PreEmptiveExpand(...)
- *
- * This function tries to extend the audio data by repeating one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low. The algorithm is the
- * reciprocal of the Accelerate algorithm.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - oldDataLen : Length of the part of decoded that has already been played out.
- * - BGNonly : If non-zero, Pre-emptive Expand will only copy
- * the first DEFAULT_TIME_ADJUST seconds of the
- * input and append to the end. No signal matching is
- * done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored. The vector must be at least
- * min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
- * elements long.
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len, int oldDataLen,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_Accelerate(...)
- *
- * This function tries to shorten the audio data by removing one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - BGNonly : If non-zero, Accelerate will only remove the last
- * DEFAULT_TIME_ADJUST seconds of the intput.
- * No signal matching is done.
- *
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_Merge(...)
- *
- * This function is used to merge new data from the decoder to the exisiting
- * stream in the synchronization buffer. The merge operation is typically
- * done after a packet loss, where the end of the expanded data does not
- * fit naturally with the new decoded data.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to new decoded speech.
- * - len : Number of samples in pw16_decoded.
- *
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to pw16_outData
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Merge(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int len, int16_t *pw16_outData,
- int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_Cng(...)
- *
- * This function produces CNG according to RFC 3389
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - len : Number of samples to produce
- *
- * Output:
- * - pw16_outData : Output CNG
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-#ifdef NETEQ_CNG_CODEC
-/* Must compile NetEQ with CNG support to enable this function */
-
-int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len);
-
-#endif /* NETEQ_CNG_CODEC */
-
-/****************************************************************************
- * WebRtcNetEQ_BGNUpdate(...)
- *
- * This function updates the background noise parameter estimates.
- *
- * Input:
- * - inst : NetEQ instance, where the speech history is stored.
- * - scratchPtr : Pointer to scratch vector.
- *
- * Output:
- * - inst : Updated information about the BGN characteristics.
- *
- * Return value : No return value
- */
-
-void WebRtcNetEQ_BGNUpdate(
-#ifdef SCRATCH
- DSPInst_t *inst, int16_t *pw16_scratchPtr
-#else
- DSPInst_t *inst
-#endif
- );
-
-#ifdef NETEQ_VAD
-/* Functions used by post-decode VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_InitVAD(...)
- *
- * Initializes post-decode VAD instance.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - fs : Initial sample rate
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADModeInternal(...)
- *
- * Set the VAD mode in the VAD struct, and communicate it to the VAD instance
- * if it exists.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - mode : Mode number passed on to the VAD function
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode);
-
-#endif /* NETEQ_VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_FlushSpeechBuffer(...)
- *
- * Flush the speech buffer.
- *
- * Input:
- * - inst : NetEq DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst);
-
-#ifndef WEBRTC_NETEQ_40BITACC_TEST
-
-#include "signal_processing_library.h"
-/* Map to regular SPL functions */
-#define WebRtcNetEQ_CrossCorr WebRtcSpl_CrossCorrelation
-#define WebRtcNetEQ_DotW16W16 WebRtcSpl_DotProductWithScale
-
-#else /* WEBRTC_NETEQ_40BITACC_TEST defined */
-/* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
- implementation where the main (splib and NetEQ) functions have been
- 40-bit optimized. */
-
-/* Map to special 40-bit optimized functions, defined below */
-#define WebRtcNetEQ_CrossCorr WebRtcNetEQ_40BitAccCrossCorr
-#define WebRtcNetEQ_DotW16W16 WebRtcNetEQ_40BitAccDotW16W16
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccCrossCorr(...)
- *
- * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
- * is fixed and seq2 slides as the pointer is increased with step
- *
- * Input:
- * - seq1 : First sequence (fixed throughout the correlation)
- * - seq2 : Second sequence (slided step_seq2 for each
- * new correlation)
- * - dimSeq : Number of samples to use in the cross correlation.
- * Should be no larger than 1024 to avoid overflow.
- * - dimCrossCorr : Number of CrossCorrelations to calculate (start
- * position for seq2 is updated for each new one)
- * - rShift : Number of right shifts to use
- * - step_seq2 : How many (positive or negative) steps the seq2
- * pointer should be updated for each new cross
- * correlation value
- *
- * Output:
- * - crossCorr : The cross correlation in Q-rShift
- */
-
-void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr, int16_t *seq1,
- int16_t *seq2, int16_t dimSeq,
- int16_t dimCrossCorr, int16_t rShift,
- int16_t step_seq2);
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccDotW16W16(...)
- *
- * Calculates the dot product between two vectors (int16_t)
- *
- * Input:
- * - vector1 : Vector 1
- * - vector2 : Vector 2
- * - len : Number of samples in vector
- * Should be no larger than 1024 to avoid overflow.
- * - scaling : The number of right shifts (after multiplication)
- * required to avoid overflow in the dot product.
- * Return value : The dot product
- */
-
-int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1, int16_t *vector2,
- int len, int scaling);
-
-#endif /* WEBRTC_NETEQ_40BITACC_TEST */
-
-#endif /* DSP_H */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
new file mode 100644
index 00000000000..7451ae26f8e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
@@ -0,0 +1,353 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+
+#include <assert.h>
+#include <string.h> // Access to memset.
+
+#include <algorithm> // Access to min, max.
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+// Table of constants used in method DspHelper::ParabolicFit().
+const int16_t DspHelper::kParabolaCoefficients[17][3] = {
+ { 120, 32, 64 },
+ { 140, 44, 75 },
+ { 150, 50, 80 },
+ { 160, 57, 85 },
+ { 180, 72, 96 },
+ { 200, 89, 107 },
+ { 210, 98, 112 },
+ { 220, 108, 117 },
+ { 240, 128, 128 },
+ { 260, 150, 139 },
+ { 270, 162, 144 },
+ { 280, 174, 149 },
+ { 300, 200, 160 },
+ { 320, 228, 171 },
+ { 330, 242, 176 },
+ { 340, 257, 181 },
+ { 360, 288, 192 } };
+
+// Filter coefficients used when downsampling from the indicated sample rates
+// (8, 16, 32, 48 kHz) to 4 kHz. Coefficients are in Q12. The corresponding Q0
+// values are provided in the comments before each array.
+
+// Q0 values: {0.3, 0.4, 0.3}.
+const int16_t DspHelper::kDownsample8kHzTbl[3] = { 1229, 1638, 1229 };
+
+// Q0 values: {0.15, 0.2, 0.3, 0.2, 0.15}.
+const int16_t DspHelper::kDownsample16kHzTbl[5] = { 614, 819, 1229, 819, 614 };
+
+// Q0 values: {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425}.
+const int16_t DspHelper::kDownsample32kHzTbl[7] = {
+ 584, 512, 625, 667, 625, 512, 584 };
+
+// Q0 values: {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487}.
+const int16_t DspHelper::kDownsample48kHzTbl[7] = {
+ 1019, 390, 427, 440, 427, 390, 1019 };
+
+int DspHelper::RampSignal(const int16_t* input,
+ size_t length,
+ int factor,
+ int increment,
+ int16_t* output) {
+ int factor_q20 = (factor << 6) + 32;
+ // TODO(hlundin): Add 32 to factor_q20 when converting back to Q14?
+ for (size_t i = 0; i < length; ++i) {
+ output[i] = (factor * input[i] + 8192) >> 14;
+ factor_q20 += increment;
+ factor_q20 = std::max(factor_q20, 0); // Never go negative.
+ factor = std::min(factor_q20 >> 6, 16384);
+ }
+ return factor;
+}
+
+int DspHelper::RampSignal(int16_t* signal,
+ size_t length,
+ int factor,
+ int increment) {
+ return RampSignal(signal, length, factor, increment, signal);
+}
+
+int DspHelper::RampSignal(AudioMultiVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment) {
+ assert(start_index + length <= signal->Size());
+ if (start_index + length > signal->Size()) {
+ // Wrong parameters. Do nothing and return the scale factor unaltered.
+ return factor;
+ }
+ int end_factor = 0;
+ // Loop over the channels, starting at the same |factor| each time.
+ for (size_t channel = 0; channel < signal->Channels(); ++channel) {
+ end_factor =
+ RampSignal(&(*signal)[channel][start_index], length, factor, increment);
+ }
+ return end_factor;
+}
+
+void DspHelper::PeakDetection(int16_t* data, int data_length,
+ int num_peaks, int fs_mult,
+ int* peak_index, int16_t* peak_value) {
+ int16_t min_index = 0;
+ int16_t max_index = 0;
+
+ for (int i = 0; i <= num_peaks - 1; i++) {
+ if (num_peaks == 1) {
+ // Single peak. The parabola fit assumes that an extra point is
+ // available; worst case it gets a zero on the high end of the signal.
+ // TODO(hlundin): This can potentially get much worse. It breaks the
+ // API contract, that the length of |data| is |data_length|.
+ data_length++;
+ }
+
+ peak_index[i] = WebRtcSpl_MaxIndexW16(data, data_length - 1);
+
+ if (i != num_peaks - 1) {
+ min_index = std::max(0, peak_index[i] - 2);
+ max_index = std::min(data_length - 1, peak_index[i] + 2);
+ }
+
+ if ((peak_index[i] != 0) && (peak_index[i] != (data_length - 2))) {
+ ParabolicFit(&data[peak_index[i] - 1], fs_mult, &peak_index[i],
+ &peak_value[i]);
+ } else {
+ if (peak_index[i] == data_length - 2) {
+ if (data[peak_index[i]] > data[peak_index[i] + 1]) {
+ ParabolicFit(&data[peak_index[i] - 1], fs_mult, &peak_index[i],
+ &peak_value[i]);
+ } else if (data[peak_index[i]] <= data[peak_index[i] + 1]) {
+ // Linear approximation.
+ peak_value[i] = (data[peak_index[i]] + data[peak_index[i] + 1]) >> 1;
+ peak_index[i] = (peak_index[i] * 2 + 1) * fs_mult;
+ }
+ } else {
+ peak_value[i] = data[peak_index[i]];
+ peak_index[i] = peak_index[i] * 2 * fs_mult;
+ }
+ }
+
+ if (i != num_peaks - 1) {
+ memset(&data[min_index], 0,
+ sizeof(data[0]) * (max_index - min_index + 1));
+ }
+ }
+}
+
+void DspHelper::ParabolicFit(int16_t* signal_points, int fs_mult,
+ int* peak_index, int16_t* peak_value) {
+ uint16_t fit_index[13];
+ if (fs_mult == 1) {
+ fit_index[0] = 0;
+ fit_index[1] = 8;
+ fit_index[2] = 16;
+ } else if (fs_mult == 2) {
+ fit_index[0] = 0;
+ fit_index[1] = 4;
+ fit_index[2] = 8;
+ fit_index[3] = 12;
+ fit_index[4] = 16;
+ } else if (fs_mult == 4) {
+ fit_index[0] = 0;
+ fit_index[1] = 2;
+ fit_index[2] = 4;
+ fit_index[3] = 6;
+ fit_index[4] = 8;
+ fit_index[5] = 10;
+ fit_index[6] = 12;
+ fit_index[7] = 14;
+ fit_index[8] = 16;
+ } else {
+ fit_index[0] = 0;
+ fit_index[1] = 1;
+ fit_index[2] = 3;
+ fit_index[3] = 4;
+ fit_index[4] = 5;
+ fit_index[5] = 7;
+ fit_index[6] = 8;
+ fit_index[7] = 9;
+ fit_index[8] = 11;
+ fit_index[9] = 12;
+ fit_index[10] = 13;
+ fit_index[11] = 15;
+ fit_index[12] = 16;
+ }
+
+ // num = -3 * signal_points[0] + 4 * signal_points[1] - signal_points[2];
+ // den = signal_points[0] - 2 * signal_points[1] + signal_points[2];
+ int32_t num = (signal_points[0] * -3) + (signal_points[1] * 4)
+ - signal_points[2];
+ int32_t den = signal_points[0] + (signal_points[1] * -2) + signal_points[2];
+ int32_t temp = num * 120;
+ int flag = 1;
+ int16_t stp = kParabolaCoefficients[fit_index[fs_mult]][0]
+ - kParabolaCoefficients[fit_index[fs_mult - 1]][0];
+ int16_t strt = (kParabolaCoefficients[fit_index[fs_mult]][0]
+ + kParabolaCoefficients[fit_index[fs_mult - 1]][0]) / 2;
+ int16_t lmt;
+ if (temp < -den * strt) {
+ lmt = strt - stp;
+ while (flag) {
+ if ((flag == fs_mult) || (temp > -den * lmt)) {
+ *peak_value = (den * kParabolaCoefficients[fit_index[fs_mult - flag]][1]
+ + num * kParabolaCoefficients[fit_index[fs_mult - flag]][2]
+ + signal_points[0] * 256) / 256;
+ *peak_index = *peak_index * 2 * fs_mult - flag;
+ flag = 0;
+ } else {
+ flag++;
+ lmt -= stp;
+ }
+ }
+ } else if (temp > -den * (strt + stp)) {
+ lmt = strt + 2 * stp;
+ while (flag) {
+ if ((flag == fs_mult) || (temp < -den * lmt)) {
+ int32_t temp_term_1 =
+ den * kParabolaCoefficients[fit_index[fs_mult+flag]][1];
+ int32_t temp_term_2 =
+ num * kParabolaCoefficients[fit_index[fs_mult+flag]][2];
+ int32_t temp_term_3 = signal_points[0] * 256;
+ *peak_value = (temp_term_1 + temp_term_2 + temp_term_3) / 256;
+ *peak_index = *peak_index * 2 * fs_mult + flag;
+ flag = 0;
+ } else {
+ flag++;
+ lmt += stp;
+ }
+ }
+ } else {
+ *peak_value = signal_points[1];
+ *peak_index = *peak_index * 2 * fs_mult;
+ }
+}
+
+int DspHelper::MinDistortion(const int16_t* signal, int min_lag,
+ int max_lag, int length,
+ int32_t* distortion_value) {
+ int best_index = -1;
+ int32_t min_distortion = WEBRTC_SPL_WORD32_MAX;
+ for (int i = min_lag; i <= max_lag; i++) {
+ int32_t sum_diff = 0;
+ const int16_t* data1 = signal;
+ const int16_t* data2 = signal - i;
+ for (int j = 0; j < length; j++) {
+ sum_diff += WEBRTC_SPL_ABS_W32(data1[j] - data2[j]);
+ }
+ // Compare with previous minimum.
+ if (sum_diff < min_distortion) {
+ min_distortion = sum_diff;
+ best_index = i;
+ }
+ }
+ *distortion_value = min_distortion;
+ return best_index;
+}
+
+void DspHelper::CrossFade(const int16_t* input1, const int16_t* input2,
+ size_t length, int16_t* mix_factor,
+ int16_t factor_decrement, int16_t* output) {
+ int16_t factor = *mix_factor;
+ int16_t complement_factor = 16384 - factor;
+ for (size_t i = 0; i < length; i++) {
+ output[i] =
+ (factor * input1[i] + complement_factor * input2[i] + 8192) >> 14;
+ factor -= factor_decrement;
+ complement_factor += factor_decrement;
+ }
+ *mix_factor = factor;
+}
+
+void DspHelper::UnmuteSignal(const int16_t* input, size_t length,
+ int16_t* factor, int16_t increment,
+ int16_t* output) {
+ uint16_t factor_16b = *factor;
+ int32_t factor_32b = (static_cast<int32_t>(factor_16b) << 6) + 32;
+ for (size_t i = 0; i < length; i++) {
+ output[i] = (factor_16b * input[i] + 8192) >> 14;
+ factor_32b = std::max(factor_32b + increment, 0);
+ factor_16b = std::min(16384, factor_32b >> 6);
+ }
+ *factor = factor_16b;
+}
+
+void DspHelper::MuteSignal(int16_t* signal, int16_t mute_slope, size_t length) {
+ int32_t factor = (16384 << 6) + 32;
+ for (size_t i = 0; i < length; i++) {
+ signal[i] = ((factor >> 6) * signal[i] + 8192) >> 14;
+ factor -= mute_slope;
+ }
+}
+
+int DspHelper::DownsampleTo4kHz(const int16_t* input, size_t input_length,
+ int output_length, int input_rate_hz,
+ bool compensate_delay, int16_t* output) {
+ // Set filter parameters depending on input frequency.
+ // NOTE: The phase delay values are wrong compared to the true phase delay
+ // of the filters. However, the error is preserved (through the +1 term) for
+ // consistency.
+ const int16_t* filter_coefficients; // Filter coefficients.
+ int16_t filter_length; // Number of coefficients.
+ int16_t filter_delay; // Phase delay in samples.
+ int16_t factor; // Conversion rate (inFsHz / 8000).
+ switch (input_rate_hz) {
+ case 8000: {
+ filter_length = 3;
+ factor = 2;
+ filter_coefficients = kDownsample8kHzTbl;
+ filter_delay = 1 + 1;
+ break;
+ }
+ case 16000: {
+ filter_length = 5;
+ factor = 4;
+ filter_coefficients = kDownsample16kHzTbl;
+ filter_delay = 2 + 1;
+ break;
+ }
+ case 32000: {
+ filter_length = 7;
+ factor = 8;
+ filter_coefficients = kDownsample32kHzTbl;
+ filter_delay = 3 + 1;
+ break;
+ }
+ case 48000: {
+ filter_length = 7;
+ factor = 12;
+ filter_coefficients = kDownsample48kHzTbl;
+ filter_delay = 3 + 1;
+ break;
+ }
+ default: {
+ assert(false);
+ return -1;
+ }
+ }
+
+ if (!compensate_delay) {
+ // Disregard delay compensation.
+ filter_delay = 0;
+ }
+
+ // Returns -1 if input signal is too short; 0 otherwise.
+ return WebRtcSpl_DownsampleFast(
+ &input[filter_length - 1], static_cast<int>(input_length) -
+ (filter_length - 1), output, output_length, filter_coefficients,
+ filter_length, factor, filter_delay);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
new file mode 100644
index 00000000000..af4f4d6c88c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
+
+#include <string.h> // Access to size_t.
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This class contains various signal processing functions, all implemented as
+// static methods.
+class DspHelper {
+ public:
+ // Filter coefficients used when downsampling from the indicated sample rates
+ // (8, 16, 32, 48 kHz) to 4 kHz. Coefficients are in Q12.
+ static const int16_t kDownsample8kHzTbl[3];
+ static const int16_t kDownsample16kHzTbl[5];
+ static const int16_t kDownsample32kHzTbl[7];
+ static const int16_t kDownsample48kHzTbl[7];
+
+ // Constants used to mute and unmute over 5 samples. The coefficients are
+ // in Q15.
+ static const int kMuteFactorStart8kHz = 27307;
+ static const int kMuteFactorIncrement8kHz = -5461;
+ static const int kUnmuteFactorStart8kHz = 5461;
+ static const int kUnmuteFactorIncrement8kHz = 5461;
+ static const int kMuteFactorStart16kHz = 29789;
+ static const int kMuteFactorIncrement16kHz = -2979;
+ static const int kUnmuteFactorStart16kHz = 2979;
+ static const int kUnmuteFactorIncrement16kHz = 2979;
+ static const int kMuteFactorStart32kHz = 31208;
+ static const int kMuteFactorIncrement32kHz = -1560;
+ static const int kUnmuteFactorStart32kHz = 1560;
+ static const int kUnmuteFactorIncrement32kHz = 1560;
+ static const int kMuteFactorStart48kHz = 31711;
+ static const int kMuteFactorIncrement48kHz = -1057;
+ static const int kUnmuteFactorStart48kHz = 1057;
+ static const int kUnmuteFactorIncrement48kHz = 1057;
+
+ // Multiplies the signal with a gradually changing factor.
+ // The first sample is multiplied with |factor| (in Q14). For each sample,
+ // |factor| is increased (additive) by the |increment| (in Q20), which can
+ // be negative. Returns the scale factor after the last increment.
+ static int RampSignal(const int16_t* input,
+ size_t length,
+ int factor,
+ int increment,
+ int16_t* output);
+
+ // Same as above, but with the samples of |signal| being modified in-place.
+ static int RampSignal(int16_t* signal,
+ size_t length,
+ int factor,
+ int increment);
+
+ // Same as above, but processes |length| samples from |signal|, starting at
+ // |start_index|.
+ static int RampSignal(AudioMultiVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment);
+
+ // Peak detection with parabolic fit. Looks for |num_peaks| maxima in |data|,
+ // having length |data_length| and sample rate multiplier |fs_mult|. The peak
+ // locations and values are written to the arrays |peak_index| and
+ // |peak_value|, respectively. Both arrays must hold at least |num_peaks|
+ // elements.
+ static void PeakDetection(int16_t* data, int data_length,
+ int num_peaks, int fs_mult,
+ int* peak_index, int16_t* peak_value);
+
+ // Estimates the height and location of a maximum. The three values in the
+ // array |signal_points| are used as basis for a parabolic fit, which is then
+ // used to find the maximum in an interpolated signal. The |signal_points| are
+ // assumed to be from a 4 kHz signal, while the maximum, written to
+ // |peak_index| and |peak_value| is given in the full sample rate, as
+ // indicated by the sample rate multiplier |fs_mult|.
+ static void ParabolicFit(int16_t* signal_points, int fs_mult,
+ int* peak_index, int16_t* peak_value);
+
+ // Calculates the sum-abs-diff for |signal| when compared to a displaced
+ // version of itself. Returns the displacement lag that results in the minimum
+ // distortion. The resulting distortion is written to |distortion_value|.
+ // The values of |min_lag| and |max_lag| are boundaries for the search.
+ static int MinDistortion(const int16_t* signal, int min_lag,
+ int max_lag, int length, int32_t* distortion_value);
+
+ // Mixes |length| samples from |input1| and |input2| together and writes the
+ // result to |output|. The gain for |input1| starts at |mix_factor| (Q14) and
+ // is decreased by |factor_decrement| (Q14) for each sample. The gain for
+ // |input2| is the complement 16384 - mix_factor.
+ static void CrossFade(const int16_t* input1, const int16_t* input2,
+ size_t length, int16_t* mix_factor,
+ int16_t factor_decrement, int16_t* output);
+
+ // Scales |input| with an increasing gain. Applies |factor| (Q14) to the first
+ // sample and increases the gain by |increment| (Q20) for each sample. The
+ // result is written to |output|. |length| samples are processed.
+ static void UnmuteSignal(const int16_t* input, size_t length, int16_t* factor,
+ int16_t increment, int16_t* output);
+
+ // Starts at unity gain and gradually fades out |signal|. For each sample,
+ // the gain is reduced by |mute_slope| (Q14). |length| samples are processed.
+ static void MuteSignal(int16_t* signal, int16_t mute_slope, size_t length);
+
+ // Downsamples |input| from |sample_rate_hz| to 4 kHz sample rate. The input
+ // has |input_length| samples, and the method will write |output_length|
+ // samples to |output|. Compensates for the phase delay of the downsampling
+ // filters if |compensate_delay| is true. Returns -1 if the input is too short
+ // to produce |output_length| samples, otherwise 0.
+ static int DownsampleTo4kHz(const int16_t* input, size_t input_length,
+ int output_length, int input_rate_hz,
+ bool compensate_delay, int16_t* output);
+
+ private:
+ // Table of constants used in method DspHelper::ParabolicFit().
+ static const int16_t kParabolaCoefficients[17][3];
+
+ DISALLOW_COPY_AND_ASSIGN(DspHelper);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc
new file mode 100644
index 00000000000..cbceff61945
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+TEST(DspHelper, RampSignalArray) {
+ static const int kLen = 100;
+ int16_t input[kLen];
+ int16_t output[kLen];
+ // Fill input with 1000.
+ for (int i = 0; i < kLen; ++i) {
+ input[i] = 1000;
+ }
+ int start_factor = 0;
+ // Ramp from 0 to 1 (in Q14) over the array. Note that |increment| is in Q20,
+ // while the factor is in Q14, hence the shift by 6.
+ int increment = (16384 << 6) / kLen;
+
+ // Test first method.
+ int stop_factor = DspHelper::RampSignal(input, kLen, start_factor, increment,
+ output);
+ EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
+ for (int i = 0; i < kLen; ++i) {
+ EXPECT_EQ(1000 * i / kLen, output[i]);
+ }
+
+ // Test second method. (Note that this modifies |input|.)
+ stop_factor = DspHelper::RampSignal(input, kLen, start_factor, increment);
+ EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
+ for (int i = 0; i < kLen; ++i) {
+ EXPECT_EQ(1000 * i / kLen, input[i]);
+ }
+}
+
+TEST(DspHelper, RampSignalAudioMultiVector) {
+ static const int kLen = 100;
+ static const int kChannels = 5;
+ AudioMultiVector input(kChannels, kLen * 3);
+ // Fill input with 1000.
+ for (int i = 0; i < kLen * 3; ++i) {
+ for (int channel = 0; channel < kChannels; ++channel) {
+ input[channel][i] = 1000;
+ }
+ }
+ // We want to start ramping at |start_index| and keep ramping for |kLen|
+ // samples.
+ int start_index = kLen;
+ int start_factor = 0;
+ // Ramp from 0 to 1 (in Q14) in |kLen| samples. Note that |increment| is in
+ // Q20, while the factor is in Q14, hence the shift by 6.
+ int increment = (16384 << 6) / kLen;
+
+ int stop_factor = DspHelper::RampSignal(&input, start_index, kLen,
+ start_factor, increment);
+ EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
+ // Verify that the first |kLen| samples are left untouched.
+ int i;
+ for (i = 0; i < kLen; ++i) {
+ for (int channel = 0; channel < kChannels; ++channel) {
+ EXPECT_EQ(1000, input[channel][i]);
+ }
+ }
+ // Verify that the next block of |kLen| samples are ramped.
+ for (; i < 2 * kLen; ++i) {
+ for (int channel = 0; channel < kChannels; ++channel) {
+ EXPECT_EQ(1000 * (i - kLen) / kLen, input[channel][i]);
+ }
+ }
+ // Verify the last |kLen| samples are left untouched.
+ for (; i < 3 * kLen; ++i) {
+ for (int channel = 0; channel < kChannels; ++channel) {
+ EXPECT_EQ(1000, input[channel][i]);
+ }
+ }
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
deleted file mode 100644
index ef721d55997..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some help functions that did not fit elsewhere.
- */
-
-#include "dsp_helpfunctions.h"
-
-
-int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz)
-{
- switch (fsHz)
- {
- case 8000:
- {
- return 1;
- }
- case 16000:
- {
- return 2;
- }
- case 32000:
- {
- return 4;
- }
- case 48000:
- {
- return 6;
- }
- default:
- {
- return 1;
- }
- }
-}
-
-
-int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
- int16_t *out, int outLen, int compensateDelay)
-{
- int16_t *B; /* filter coefficients */
- int16_t Blen; /* number of coefficients */
- int16_t filterDelay; /* phase delay in samples */
- int16_t factor; /* conversion rate (inFsHz/8000) */
- int ok;
-
- /* Set constants depending on frequency used */
- /* NOTE: The phase delay values are wrong compared to the true phase delay
- of the filters. However, the error is preserved (through the +1 term)
- for consistency. */
- switch (inFsHz)
- {
- case 8000:
- {
- Blen = 3;
- factor = 2;
- B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
- filterDelay = 1 + 1;
- break;
- }
-#ifdef NETEQ_WIDEBAND
- case 16000:
- {
- Blen = 5;
- factor = 4;
- B = (int16_t*) WebRtcNetEQ_kDownsample16kHzTbl;
- filterDelay = 2 + 1;
- break;
- }
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- {
- Blen = 7;
- factor = 8;
- B = (int16_t*) WebRtcNetEQ_kDownsample32kHzTbl;
- filterDelay = 3 + 1;
- break;
- }
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- {
- Blen = 7;
- factor = 12;
- B = (int16_t*) WebRtcNetEQ_kDownsample48kHzTbl;
- filterDelay = 3 + 1;
- break;
- }
-#endif
- default:
- {
- /* unsupported or wrong sample rate */
- return -1;
- }
- }
-
- if (!compensateDelay)
- {
- /* disregard delay compensation */
- filterDelay = 0;
- }
-
- ok = WebRtcSpl_DownsampleFast((int16_t*) &in[Blen - 1],
- (int16_t) (inLen - (Blen - 1)), /* number of input samples */
- out, (int16_t) outLen, /* number of output samples to produce */
- B, Blen, factor, filterDelay); /* filter parameters */
-
- return ok; /* return value is -1 if input signal is too short */
-
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
deleted file mode 100644
index 11119f1b825..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Various help functions used by the DSP functions.
- */
-
-#ifndef DSP_HELPFUNCTIONS_H
-#define DSP_HELPFUNCTIONS_H
-
-#include "typedefs.h"
-
-#include "dsp.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Correlator(...)
- *
- * Calculate signal correlation.
- *
- * Input:
- * - inst : DSP instance
- * - data : Speech history to do expand from (older history in data[-4..-1])
- * - dataLen : Length of data
- *
- * Output:
- * - corrOut : CC of downsampled signal
- * - corrScale : Scale factor for correlation (-Qdomain)
- *
- * Return value : Length of correlated data
- */
-
-int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_data, int16_t w16_dataLen,
- int16_t *pw16_corrOut,
- int16_t *pw16_corrScale);
-
-/****************************************************************************
- * WebRtcNetEQ_PeakDetection(...)
- *
- * Peak detection with parabolic fit.
- *
- * Input:
- * - data : Data sequence for peak detection
- * - dataLen : Length of data
- * - nmbPeaks : Number of peaks to detect
- * - fs_mult : Sample rate multiplier
- *
- * Output:
- * - corrIndex : Index of the peak
- * - winner : Value of the peak
- *
- * Return value : 0 for ok
- */
-
-int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
- int16_t w16_nmbPeaks, int16_t fs_mult,
- int16_t *pw16_corrIndex,
- int16_t *pw16_winners);
-
-/****************************************************************************
- * WebRtcNetEQ_PrblFit(...)
- *
- * Three-point parbola fit.
- *
- * Input:
- * - 3pts : Three input samples
- * - fs_mult : Sample rate multiplier
- *
- * Output:
- * - Ind : Index of the peak
- * - outVal : Value of the peak
- *
- * Return value : 0 for ok
- */
-
-int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
- int16_t *pw16_outVal, int16_t fs_mult);
-
-/****************************************************************************
- * WebRtcNetEQ_MinDistortion(...)
- *
- * Find the lag that results in minimum distortion.
- *
- * Input:
- * - data : Start of speech to perform distortion on, second vector is assumed
- * to be data[-Lag]
- * - minLag : Start lag
- * - maxLag : End lag
- * - len : Length to correlate
- *
- * Output:
- * - dist : Distorion value
- *
- * Return value : Lag for minimum distortion
- */
-
-int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
- int16_t w16_minLag, int16_t w16_maxLag,
- int16_t len, int32_t *pw16_dist);
-
-/****************************************************************************
- * WebRtcNetEQ_RandomVec(...)
- *
- * Generate random vector.
- *
- * Input:
- * - seed : Current seed (input/output)
- * - len : Number of samples to generate
- * - incVal : Jump step
- *
- * Output:
- * - randVec : Generated random vector
- */
-
-void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
- int16_t w16_len, int16_t w16_incval);
-
-/****************************************************************************
- * WebRtcNetEQ_MixVoiceUnvoice(...)
- *
- * Mix voiced and unvoiced signal.
- *
- * Input:
- * - voicedVec : Voiced input signal
- * - unvoicedVec : Unvoiced input signal
- * - current_vfraction : Current mixing factor
- * - vfraction_change : Mixing factor change per sample
- * - N : Number of samples
- *
- * Output:
- * - outData : Mixed signal
- */
-
-void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
- int16_t *pw16_unvoicedVec,
- int16_t *w16_current_vfraction,
- int16_t w16_vfraction_change, int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_UnmuteSignal(...)
- *
- * Gradually reduce attenuation.
- *
- * Input:
- * - inVec : Input signal
- * - startMuteFact : Starting attenuation
- * - unmuteFact : Factor to "unmute" with (Q20)
- * - N : Number of samples
- *
- * Output:
- * - outVec : Output signal
- */
-
-void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
- int16_t *pw16_outVec, int16_t unmuteFact,
- int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_MuteSignal(...)
- *
- * Gradually increase attenuation.
- *
- * Input:
- * - inout : Input/output signal
- * - muteSlope : Slope of muting
- * - N : Number of samples
- */
-
-void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
- int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_CalcFsMult(...)
- *
- * Calculate the sample rate divided by 8000.
- *
- * Input:
- * - fsHz : Sample rate in Hz in {8000, 16000, 32000, 48000}.
- *
- * Return value : fsHz/8000 for the valid values, 1 for other inputs
- */
-
-int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz);
-
-/****************************************************************************
- * WebRtcNetEQ_DownSampleTo4kHz(...)
- *
- * Lowpass filter and downsample a signal to 4 kHz sample rate.
- *
- * Input:
- * - in : Input signal samples.
- * - inLen : Number of input samples.
- * - inFsHz : Input sample rate in Hz.
- * - outLen : Desired number of samples in decimated signal.
- * - compensateDelay : If non-zero, compensate for the phase delay of
- * of the anti-alias filter.
- *
- * Output:
- * - out : Output signal samples.
- *
- * Return value : 0 - Ok
- * -1 - Error
- *
- */
-
-int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
- int16_t *out, int outLen, int compensateDelay);
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
deleted file mode 100644
index 1788635c7df..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of packet buffer for DTMF messages.
- */
-
-#include "dtmf_buffer.h"
-
-#include "typedefs.h" /* to define endianness */
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-
-#ifdef NETEQ_ATEVENT_DECODE
-
-int16_t WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
-{
-
- int i;
- for (i = 0; i < 3; i++)
- {
- DTMFdec_inst->EventQueue[i] = DTMFdec_inst->EventQueue[i + 1];
- DTMFdec_inst->EventQueueVolume[i] = DTMFdec_inst->EventQueueVolume[i + 1];
- DTMFdec_inst->EventQueueEnded[i] = DTMFdec_inst->EventQueueEnded[i + 1];
- DTMFdec_inst->EventQueueStartTime[i] = DTMFdec_inst->EventQueueStartTime[i + 1];
- DTMFdec_inst->EventQueueEndTime[i] = DTMFdec_inst->EventQueueEndTime[i + 1];
- }
- DTMFdec_inst->EventBufferSize--;
- DTMFdec_inst->EventQueue[3] = -1;
- DTMFdec_inst->EventQueueVolume[3] = 0;
- DTMFdec_inst->EventQueueEnded[3] = 0;
- DTMFdec_inst->EventQueueStartTime[3] = 0;
- DTMFdec_inst->EventQueueEndTime[3] = 0;
-
- return 0;
-}
-
-int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
- int16_t MaxPLCtime)
-{
- int i;
- if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
- if (fs == 8000)
- DTMFdec_inst->framelen = 80;
- else if (fs == 16000)
- DTMFdec_inst->framelen = 160;
- else if (fs == 32000)
- DTMFdec_inst->framelen = 320;
- else
- /* fs == 48000 */
- DTMFdec_inst->framelen = 480;
-
- DTMFdec_inst->MaxPLCtime = MaxPLCtime;
- DTMFdec_inst->CurrentPLCtime = 0;
- DTMFdec_inst->EventBufferSize = 0;
- for (i = 0; i < 4; i++)
- {
- DTMFdec_inst->EventQueue[i] = -1;
- DTMFdec_inst->EventQueueVolume[i] = 0;
- DTMFdec_inst->EventQueueEnded[i] = 0;
- DTMFdec_inst->EventQueueStartTime[i] = 0;
- DTMFdec_inst->EventQueueEndTime[i] = 0;
- }
- return 0;
-}
-
-int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const int16_t *encoded, int16_t len,
- uint32_t timeStamp)
-{
-
- int i;
- int16_t value;
- const int16_t *EventStart;
- int16_t endEvent;
- int16_t Volume;
- int16_t Duration;
- int16_t position = -1;
-
- /* Extract event */
- if (len == 4)
- {
- EventStart = encoded;
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- value=((*EventStart)>>8);
- endEvent=((*EventStart)&0x80)>>7;
- Volume=((*EventStart)&0x3F);
- Duration=EventStart[1];
-#else
- value = ((*EventStart) & 0xFF);
- endEvent = ((*EventStart) & 0x8000) >> 15;
- Volume = ((*EventStart) & 0x3F00) >> 8;
- Duration = (((((uint16_t) EventStart[1]) >> 8) & 0xFF)
- | (((uint16_t) (EventStart[1] & 0xFF)) << 8));
-#endif
- /* Only events between 0-15 are supported (DTMF tones) */
- if ((value < 0) || (value > 15))
- {
- return 0;
- }
-
- /* Discard all DTMF tones with really low volume (<-36dbm0) */
- if (Volume > 36)
- {
- return 0;
- }
-
- /*Are there any unended events of the same type? */
- for (i = 0; i < DTMFdec_inst->EventBufferSize; i++)
- {
- /* Going through the whole queue even when we have found a match will
- ensure that we add to the latest applicable event */
- if ((DTMFdec_inst->EventQueue[i] == value) && (!DTMFdec_inst->EventQueueEnded[i]
- || endEvent)) position = i;
- }
- if (position > -1)
- {
- DTMFdec_inst->EventQueueVolume[position] = Volume;
- if ((timeStamp + Duration) > DTMFdec_inst->EventQueueEndTime[position]) DTMFdec_inst->EventQueueEndTime[position]
- = DTMFdec_inst->EventQueueStartTime[position] + Duration;
- if (endEvent) DTMFdec_inst->EventQueueEnded[position] = 1;
- }
- else
- {
- if (DTMFdec_inst->EventBufferSize == MAX_DTMF_QUEUE_SIZE)
- { /* Buffer full */
- /* Remove one event */
- DTMFdec_inst->EventBufferSize--;
- }
- /* Store data in the instance on a new position*/
- DTMFdec_inst->EventQueue[DTMFdec_inst->EventBufferSize] = value;
- DTMFdec_inst->EventQueueVolume[DTMFdec_inst->EventBufferSize] = Volume;
- DTMFdec_inst->EventQueueEnded[DTMFdec_inst->EventBufferSize] = endEvent;
- DTMFdec_inst->EventQueueStartTime[DTMFdec_inst->EventBufferSize] = timeStamp;
- DTMFdec_inst->EventQueueEndTime[DTMFdec_inst->EventBufferSize] = timeStamp
- + Duration;
- DTMFdec_inst->EventBufferSize++;
- }
- return 0;
- }
- return DTMF_INSERT_ERROR;
-}
-
-int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
- int16_t *volume, uint32_t currTimeStamp)
-{
-
- if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
-
- /* We have events, is it time to play them? */
- if (currTimeStamp < DTMFdec_inst->EventQueueStartTime[0])
- {
- /*No, just return zero */
- return 0;
- }
-
- /* Continue on the event that is currently ongoing */
- *event = DTMFdec_inst->EventQueue[0];
- *volume = DTMFdec_inst->EventQueueVolume[0];
-
- if (DTMFdec_inst->EventQueueEndTime[0] >= (currTimeStamp + DTMFdec_inst->framelen))
- {
-
- /* Still at least framLen to play */
-
- DTMFdec_inst->CurrentPLCtime = 0;
- if ((DTMFdec_inst->EventQueueEndTime[0] == (currTimeStamp + DTMFdec_inst->framelen))
- && (DTMFdec_inst->EventQueueEnded[0]))
- { /* We are done */
- /*Remove the event from Queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- }
- return DTMFdec_inst->framelen;
-
- }
- else
- {
- if ((DTMFdec_inst->EventQueueEnded[0]) || (DTMFdec_inst->EventQueue[1] > -1))
- {
- /*
- * Less than frameLen to play and end of event or already received next event.
- * Give our a whole frame size of audio to simplify things.
- */
-
- /*Remove the event from Queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
-
- return DTMFdec_inst->framelen;
-
- }
- else
- {
- /* Less than frameLen to play and not end of event. */
- DTMFdec_inst->CurrentPLCtime = (int16_t) (currTimeStamp
- - DTMFdec_inst->EventQueueEndTime[0]);
-
- if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
- || (DTMFdec_inst->CurrentPLCtime < -DTMFdec_inst->MaxPLCtime))
- {
- /*Remove the event from queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
- }
-
- /* If we have a new event that it's time to play */
- if ((DTMFdec_inst->EventQueue[1] > -1) && (DTMFdec_inst->EventQueueStartTime[1]
- >= (currTimeStamp + DTMFdec_inst->framelen)))
- {
- /*Remove the event from queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
- }
-
- return DTMFdec_inst->framelen;
- }
- }
-}
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc
new file mode 100644
index 00000000000..91debee14e8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+
+#include <assert.h>
+#include <algorithm> // max
+
+// Modify the code to obtain backwards bit-exactness. Once bit-exactness is no
+// longer required, this #define should be removed (and the code that it
+// enables).
+#define LEGACY_BITEXACT
+
+namespace webrtc {
+
+// The ParseEvent method parses 4 bytes from |payload| according to this format
+// from RFC 4733:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | event |E|R| volume | duration |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// Legend (adapted from RFC 4733)
+// - event: The event field is a number between 0 and 255 identifying a
+// specific telephony event. The buffer will not accept any event
+// numbers larger than 15.
+// - E: If set to a value of one, the "end" bit indicates that this
+// packet contains the end of the event. For long-lasting events
+// that have to be split into segments, only the final packet for
+// the final segment will have the E bit set.
+// - R: Reserved.
+// - volume: For DTMF digits and other events representable as tones, this
+// field describes the power level of the tone, expressed in dBm0
+// after dropping the sign. Power levels range from 0 to -63 dBm0.
+// Thus, larger values denote lower volume. The buffer discards
+// values larger than 36 (i.e., lower than -36 dBm0).
+// - duration: The duration field indicates the duration of the event or segment
+// being reported, in timestamp units, expressed as an unsigned
+// integer in network byte order. For a non-zero value, the event
+// or segment began at the instant identified by the RTP timestamp
+// and has so far lasted as long as indicated by this parameter.
+// The event may or may not have ended. If the event duration
+// exceeds the maximum representable by the duration field, the
+// event is split into several contiguous segments. The buffer will
+// discard zero-duration events.
+//
+int DtmfBuffer::ParseEvent(uint32_t rtp_timestamp,
+ const uint8_t* payload,
+ int payload_length_bytes,
+ DtmfEvent* event) {
+ if (!payload || !event) {
+ return kInvalidPointer;
+ }
+ if (payload_length_bytes < 4) {
+ return kPayloadTooShort;
+ }
+
+ event->event_no = payload[0];
+ event->end_bit = ((payload[1] & 0x80) != 0);
+ event->volume = (payload[1] & 0x3F);
+ event->duration = payload[2] << 8 | payload[3];
+ event->timestamp = rtp_timestamp;
+ return kOK;
+}
+
+// Inserts a DTMF event into the buffer. The event should be parsed from the
+// bit stream using the ParseEvent method above before inserting it in the
+// buffer.
+// DTMF events can be quite long, and in most cases the duration of the event
+// is not known when the first packet describing it is sent. To deal with that,
+// the RFC 4733 specifies that multiple packets are sent for one and the same
+// event as it is being created (typically, as the user is pressing the key).
+// These packets will all share the same start timestamp and event number,
+// while the duration will be the cumulative duration from the start. When
+// inserting a new event, the InsertEvent method tries to find a matching event
+// already in the buffer. If so, the new event is simply merged with the
+// existing one.
+int DtmfBuffer::InsertEvent(const DtmfEvent& event) {
+ if (event.event_no < 0 || event.event_no > 15 ||
+ event.volume < 0 || event.volume > 36 ||
+ event.duration <= 0 || event.duration > 65535) {
+ return kInvalidEventParameters;
+ }
+ DtmfList::iterator it = buffer_.begin();
+ while (it != buffer_.end()) {
+ if (MergeEvents(it, event)) {
+ // A matching event was found and the new event was merged.
+ return kOK;
+ }
+ ++it;
+ }
+ buffer_.push_back(event);
+ // Sort the buffer using CompareEvents to rank the events.
+ buffer_.sort(CompareEvents);
+ return kOK;
+}
+
+bool DtmfBuffer::GetEvent(uint32_t current_timestamp, DtmfEvent* event) {
+ DtmfList::iterator it = buffer_.begin();
+ while (it != buffer_.end()) {
+ // |event_end| is an estimate of where the current event ends. If the end
+ // bit is set, we know that the event ends at |timestamp| + |duration|.
+ uint32_t event_end = it->timestamp + it->duration;
+#ifdef LEGACY_BITEXACT
+ bool next_available = false;
+#endif
+ if (!it->end_bit) {
+ // If the end bit is not set, we allow extrapolation of the event for
+ // some time.
+ event_end += max_extrapolation_samples_;
+ DtmfList::iterator next = it;
+ ++next;
+ if (next != buffer_.end()) {
+ // If there is a next event in the buffer, we will not extrapolate over
+ // the start of that new event.
+ event_end = std::min(event_end, next->timestamp);
+#ifdef LEGACY_BITEXACT
+ next_available = true;
+#endif
+ }
+ }
+ if (current_timestamp >= it->timestamp
+ && current_timestamp <= event_end) { // TODO(hlundin): Change to <.
+ // Found a matching event.
+ if (event) {
+ event->event_no = it->event_no;
+ event->end_bit = it->end_bit;
+ event->volume = it->volume;
+ event->duration = it->duration;
+ event->timestamp = it->timestamp;
+ }
+#ifdef LEGACY_BITEXACT
+ if (it->end_bit &&
+ current_timestamp + frame_len_samples_ >= event_end) {
+ // We are done playing this. Erase the event.
+ buffer_.erase(it);
+ }
+#endif
+ return true;
+ } else if (current_timestamp > event_end) { // TODO(hlundin): Change to >=.
+ // Erase old event. Operation returns a valid pointer to the next element
+ // in the list.
+#ifdef LEGACY_BITEXACT
+ if (!next_available) {
+ if (event) {
+ event->event_no = it->event_no;
+ event->end_bit = it->end_bit;
+ event->volume = it->volume;
+ event->duration = it->duration;
+ event->timestamp = it->timestamp;
+ }
+ it = buffer_.erase(it);
+ return true;
+ } else {
+ it = buffer_.erase(it);
+ }
+#else
+ it = buffer_.erase(it);
+#endif
+ } else {
+ ++it;
+ }
+ }
+ return false;
+}
+
+int DtmfBuffer::SetSampleRate(int fs_hz) {
+ if (fs_hz != 8000 &&
+ fs_hz != 16000 &&
+ fs_hz != 32000 &&
+ fs_hz != 48000) {
+ return kInvalidSampleRate;
+ }
+ max_extrapolation_samples_ = 7 * fs_hz / 100;
+ frame_len_samples_ = fs_hz / 100;
+ return kOK;
+}
+
+// The method returns true if the two events are considered to be the same.
+// The are defined as equal if they share the same timestamp and event number.
+// The special case with long-lasting events that have to be split into segments
+// is not handled in this method. These will be treated as separate events in
+// the buffer.
+bool DtmfBuffer::SameEvent(const DtmfEvent& a, const DtmfEvent& b) {
+ return (a.event_no == b.event_no) && (a.timestamp == b.timestamp);
+}
+
+bool DtmfBuffer::MergeEvents(DtmfList::iterator it, const DtmfEvent& event) {
+ if (SameEvent(*it, event)) {
+ if (!it->end_bit) {
+ // Do not extend the duration of an event for which the end bit was
+ // already received.
+ it->duration = std::max(event.duration, it->duration);
+ }
+ if (event.end_bit) {
+ it->end_bit = true;
+ }
+ return true;
+ } else {
+ return false;
+ }
+}
+
+// Returns true if |a| goes before |b| in the sorting order ("|a| < |b|").
+// The events are ranked using their start timestamp (taking wrap-around into
+// account). In the unlikely situation that two events share the same start
+// timestamp, the event number is used to rank the two. Note that packets
+// that belong to the same events, and therefore sharing the same start
+// timestamp, have already been merged before the sort method is called.
+bool DtmfBuffer::CompareEvents(const DtmfEvent& a, const DtmfEvent& b) {
+ if (a.timestamp == b.timestamp) {
+ return a.event_no < b.event_no;
+ }
+ // Take wrap-around into account.
+ return (static_cast<uint32_t>(b.timestamp - a.timestamp) < 0xFFFFFFFF / 2);
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
index 99c9e6a499b..5dd31c2d2e7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,94 +8,109 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * Packet buffer for DTMF messages.
- */
-
-#ifndef DTMF_BUFFER_H
-#define DTMF_BUFFER_H
-
-#include "typedefs.h"
-
-#include "neteq_defines.h"
-
-/* Include this code only if ATEVENT (DTMF) is defined in */
-#ifdef NETEQ_ATEVENT_DECODE
-
-#define MAX_DTMF_QUEUE_SIZE 4
-
-typedef struct dtmf_inst_t_
-{
- int16_t MaxPLCtime;
- int16_t CurrentPLCtime;
- int16_t EventQueue[MAX_DTMF_QUEUE_SIZE];
- int16_t EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
- int16_t EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
- uint32_t EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
- uint32_t EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
- int16_t EventBufferSize;
- int16_t framelen;
-} dtmf_inst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfDecoderInit(...)
- *
- * This function initializes a DTMF instance.
- *
- * Input:
- * - DTMF_decinst_t : DTMF instance
- * - fs : The sample rate used for the DTMF
- * - MaxPLCtime : Maximum length for a PLC before zeros should be inserted
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
- int16_t MaxPLCtime);
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfInsertEvent(...)
- *
- * This function decodes a packet with DTMF frames.
- *
- * Input:
- * - DTMFdec_inst : DTMF instance
- * - encoded : Encoded DTMF frame(s)
- * - len : Bytes in encoded vector
- *
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const int16_t *encoded, int16_t len,
- uint32_t timeStamp);
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfDecode(...)
- *
- * This function decodes a packet with DTMF frame(s). Output will be the
- * event that should be played for next 10 ms.
- *
- * Input:
- * - DTMFdec_inst : DTMF instance
- * - currTimeStamp : The current playout timestamp
- *
- * Output:
- * - event : Event number to be played
- * - volume : Event volume to be played
- *
- * Return value : >0 - There is a event to be played
- * 0 - No event to be played
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
- int16_t *volume, uint32_t currTimeStamp);
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
-#endif /* DTMF_BUFFER_H */
-
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
+
+#include <list>
+#include <string> // size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct DtmfEvent {
+ uint32_t timestamp;
+ int event_no;
+ int volume;
+ int duration;
+ bool end_bit;
+
+ // Constructors
+ DtmfEvent()
+ : timestamp(0),
+ event_no(0),
+ volume(0),
+ duration(0),
+ end_bit(false) {
+ }
+ DtmfEvent(uint32_t ts, int ev, int vol, int dur, bool end)
+ : timestamp(ts),
+ event_no(ev),
+ volume(vol),
+ duration(dur),
+ end_bit(end) {
+ }
+};
+
+// This is the buffer holding DTMF events while waiting for them to be played.
+class DtmfBuffer {
+ public:
+ enum BufferReturnCodes {
+ kOK = 0,
+ kInvalidPointer,
+ kPayloadTooShort,
+ kInvalidEventParameters,
+ kInvalidSampleRate
+ };
+
+ // Set up the buffer for use at sample rate |fs_hz|.
+ explicit DtmfBuffer(int fs_hz) {
+ SetSampleRate(fs_hz);
+ }
+
+ virtual ~DtmfBuffer() {}
+
+ // Flushes the buffer.
+ virtual void Flush() { buffer_.clear(); }
+
+ // Static method to parse 4 bytes from |payload| as a DTMF event (RFC 4733)
+ // and write the parsed information into the struct |event|. Input variable
+ // |rtp_timestamp| is simply copied into the struct.
+ static int ParseEvent(uint32_t rtp_timestamp,
+ const uint8_t* payload,
+ int payload_length_bytes,
+ DtmfEvent* event);
+
+ // Inserts |event| into the buffer. The method looks for a matching event and
+ // merges the two if a match is found.
+ virtual int InsertEvent(const DtmfEvent& event);
+
+ // Checks if a DTMF event should be played at time |current_timestamp|. If so,
+ // the method returns true; otherwise false. The parameters of the event to
+ // play will be written to |event|.
+ virtual bool GetEvent(uint32_t current_timestamp, DtmfEvent* event);
+
+ // Number of events in the buffer.
+ virtual size_t Length() const { return buffer_.size(); }
+
+ virtual bool Empty() const { return buffer_.empty(); }
+
+ // Set a new sample rate.
+ virtual int SetSampleRate(int fs_hz);
+
+ private:
+ typedef std::list<DtmfEvent> DtmfList;
+
+ int max_extrapolation_samples_;
+ int frame_len_samples_; // TODO(hlundin): Remove this later.
+
+ // Compares two events and returns true if they are the same.
+ static bool SameEvent(const DtmfEvent& a, const DtmfEvent& b);
+
+ // Merges |event| to the event pointed out by |it|. The method checks that
+ // the two events are the same (using the SameEvent method), and merges them
+ // if that was the case, returning true. If the events are not the same, false
+ // is returned.
+ bool MergeEvents(DtmfList::iterator it, const DtmfEvent& event);
+
+ // Method used by the sort algorithm to rank events in the buffer.
+ static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b);
+
+ DtmfList buffer_;
+
+ DISALLOW_COPY_AND_ASSIGN(DtmfBuffer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc
new file mode 100644
index 00000000000..83f981386b7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+
+#ifdef WIN32
+#include <winsock2.h> // ntohl()
+#else
+#include <arpa/inet.h> // ntohl()
+#endif
+
+#include <iostream>
+
+#include "gtest/gtest.h"
+
+// Modify the tests so that they pass with the modifications done to DtmfBuffer
+// for backwards bit-exactness. Once bit-exactness is no longer required, this
+// #define should be removed (and the code that it enables).
+#define LEGACY_BITEXACT
+
+namespace webrtc {
+
+static int sample_rate_hz = 8000;
+
+static uint32_t MakeDtmfPayload(int event, bool end, int volume, int duration) {
+ uint32_t payload = 0;
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | event |E|R| volume | duration |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ payload |= (event & 0x00FF) << 24;
+ payload |= (end ? 0x00800000 : 0x00000000);
+ payload |= (volume & 0x003F) << 16;
+ payload |= (duration & 0xFFFF);
+ payload = ntohl(payload);
+ return payload;
+}
+
+static bool EqualEvents(const DtmfEvent& a,
+ const DtmfEvent& b) {
+ return (a.duration == b.duration
+ && a.end_bit == b.end_bit
+ && a.event_no == b.event_no
+ && a.timestamp == b.timestamp
+ && a.volume == b.volume);
+}
+
+TEST(DtmfBuffer, CreateAndDestroy) {
+ DtmfBuffer* buffer = new DtmfBuffer(sample_rate_hz);
+ delete buffer;
+}
+
+// Test the event parser.
+TEST(DtmfBuffer, ParseEvent) {
+ int event_no = 7;
+ bool end_bit = true;
+ int volume = 17;
+ int duration = 4711;
+ uint32_t timestamp = 0x12345678;
+ uint32_t payload = MakeDtmfPayload(event_no, end_bit, volume, duration);
+ uint8_t* payload_ptr = reinterpret_cast<uint8_t*>(&payload);
+ DtmfEvent event;
+ EXPECT_EQ(DtmfBuffer::kOK,
+ DtmfBuffer::ParseEvent(timestamp, payload_ptr, sizeof(payload),
+ &event));
+ EXPECT_EQ(duration, event.duration);
+ EXPECT_EQ(end_bit, event.end_bit);
+ EXPECT_EQ(event_no, event.event_no);
+ EXPECT_EQ(timestamp, event.timestamp);
+ EXPECT_EQ(volume, event.volume);
+
+ EXPECT_EQ(DtmfBuffer::kInvalidPointer,
+ DtmfBuffer::ParseEvent(timestamp, NULL, 4, &event));
+
+ EXPECT_EQ(DtmfBuffer::kInvalidPointer,
+ DtmfBuffer::ParseEvent(timestamp, payload_ptr, 4, NULL));
+
+ EXPECT_EQ(DtmfBuffer::kPayloadTooShort,
+ DtmfBuffer::ParseEvent(timestamp, payload_ptr, 3, &event));
+}
+
+TEST(DtmfBuffer, SimpleInsertAndGet) {
+ int event_no = 7;
+ bool end_bit = true;
+ int volume = 17;
+ int duration = 4711;
+ uint32_t timestamp = 0x12345678;
+ DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
+ EXPECT_EQ(1u, buffer.Length());
+ EXPECT_FALSE(buffer.Empty());
+ DtmfEvent out_event;
+ // Too early to get event.
+ EXPECT_FALSE(buffer.GetEvent(timestamp - 10, &out_event));
+ EXPECT_EQ(1u, buffer.Length());
+ EXPECT_FALSE(buffer.Empty());
+ // Get the event at its starting timestamp.
+ EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
+ EXPECT_TRUE(EqualEvents(event, out_event));
+ EXPECT_EQ(1u, buffer.Length());
+ EXPECT_FALSE(buffer.Empty());
+ // Get the event some time into the event.
+ EXPECT_TRUE(buffer.GetEvent(timestamp + duration / 2, &out_event));
+ EXPECT_TRUE(EqualEvents(event, out_event));
+ EXPECT_EQ(1u, buffer.Length());
+ EXPECT_FALSE(buffer.Empty());
+ // Give a "current" timestamp after the event has ended.
+#ifdef LEGACY_BITEXACT
+ EXPECT_TRUE(buffer.GetEvent(timestamp + duration + 10, &out_event));
+#endif
+ EXPECT_FALSE(buffer.GetEvent(timestamp + duration + 10, &out_event));
+ EXPECT_EQ(0u, buffer.Length());
+ EXPECT_TRUE(buffer.Empty());
+}
+
+TEST(DtmfBuffer, MergingPackets) {
+ int event_no = 0;
+ bool end_bit = false;
+ int volume = 17;
+ int duration = 80;
+ uint32_t timestamp = 0x12345678;
+ DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
+
+ event.duration += 80;
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
+
+ event.duration += 80;
+ event.end_bit = true;
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
+
+ EXPECT_EQ(1u, buffer.Length());
+
+ DtmfEvent out_event;
+ EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
+ EXPECT_TRUE(EqualEvents(event, out_event));
+}
+
+// This test case inserts one shorter event completely overlapped by one longer
+// event. The expected outcome is that only the longer event is played.
+TEST(DtmfBuffer, OverlappingEvents) {
+ int event_no = 0;
+ bool end_bit = true;
+ int volume = 1;
+ int duration = 80;
+ uint32_t timestamp = 0x12345678 + 80;
+ DtmfEvent short_event(timestamp, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(short_event));
+
+ event_no = 10;
+ end_bit = false;
+ timestamp = 0x12345678;
+ DtmfEvent long_event(timestamp, event_no, volume, duration, end_bit);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
+
+ long_event.duration += 80;
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
+
+ long_event.duration += 80;
+ long_event.end_bit = true;
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
+
+ EXPECT_EQ(2u, buffer.Length());
+
+ DtmfEvent out_event;
+ // Expect to get the long event.
+ EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
+ EXPECT_TRUE(EqualEvents(long_event, out_event));
+ // Expect no more events.
+#ifdef LEGACY_BITEXACT
+ EXPECT_TRUE(buffer.GetEvent(timestamp + long_event.duration + 10,
+ &out_event));
+ EXPECT_TRUE(EqualEvents(long_event, out_event));
+ EXPECT_TRUE(buffer.GetEvent(timestamp + long_event.duration + 10,
+ &out_event));
+ EXPECT_TRUE(EqualEvents(short_event, out_event));
+#else
+ EXPECT_FALSE(buffer.GetEvent(timestamp + long_event.duration + 10,
+ &out_event));
+#endif
+ EXPECT_TRUE(buffer.Empty());
+}
+
+TEST(DtmfBuffer, ExtrapolationTime) {
+ int event_no = 0;
+ bool end_bit = false;
+ int volume = 1;
+ int duration = 80;
+ uint32_t timestamp = 0x12345678;
+ DtmfEvent event1(timestamp, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
+ EXPECT_EQ(1u, buffer.Length());
+
+ DtmfEvent out_event;
+ // Get the event at the start.
+ EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
+ EXPECT_TRUE(EqualEvents(event1, out_event));
+ // Also get the event 100 samples after the end of the event (since we're
+ // missing the end bit).
+ uint32_t timestamp_now = timestamp + duration + 100;
+ EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
+ EXPECT_TRUE(EqualEvents(event1, out_event));
+ // Insert another event starting back-to-back with the previous event.
+ timestamp += duration;
+ event_no = 1;
+ DtmfEvent event2(timestamp, event_no, volume, duration, end_bit);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
+ EXPECT_EQ(2u, buffer.Length());
+ // Now we expect to get the new event when supplying |timestamp_now|.
+ EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
+ EXPECT_TRUE(EqualEvents(event2, out_event));
+ // Expect the the first event to be erased now.
+ EXPECT_EQ(1u, buffer.Length());
+ // Move |timestamp_now| to more than 560 samples after the end of the second
+ // event. Expect that event to be erased.
+ timestamp_now = timestamp + duration + 600;
+#ifdef LEGACY_BITEXACT
+ EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
+#endif
+ EXPECT_FALSE(buffer.GetEvent(timestamp_now, &out_event));
+ EXPECT_TRUE(buffer.Empty());
+}
+
+TEST(DtmfBuffer, TimestampWraparound) {
+ int event_no = 0;
+ bool end_bit = true;
+ int volume = 1;
+ int duration = 80;
+ uint32_t timestamp1 = 0xFFFFFFFF - duration;
+ DtmfEvent event1(timestamp1, event_no, volume, duration, end_bit);
+ uint32_t timestamp2 = 0;
+ DtmfEvent event2(timestamp2, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
+ EXPECT_EQ(2u, buffer.Length());
+ DtmfEvent out_event;
+ EXPECT_TRUE(buffer.GetEvent(timestamp1, &out_event));
+ EXPECT_TRUE(EqualEvents(event1, out_event));
+#ifdef LEGACY_BITEXACT
+ EXPECT_EQ(1u, buffer.Length());
+#else
+ EXPECT_EQ(2u, buffer.Length());
+#endif
+
+ buffer.Flush();
+ // Reverse the insert order. Expect same results.
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
+ EXPECT_EQ(2u, buffer.Length());
+ EXPECT_TRUE(buffer.GetEvent(timestamp1, &out_event));
+ EXPECT_TRUE(EqualEvents(event1, out_event));
+#ifdef LEGACY_BITEXACT
+ EXPECT_EQ(1u, buffer.Length());
+#else
+ EXPECT_EQ(2u, buffer.Length());
+#endif
+}
+
+TEST(DtmfBuffer, InvalidEvents) {
+ int event_no = 0;
+ bool end_bit = true;
+ int volume = 1;
+ int duration = 80;
+ uint32_t timestamp = 0x12345678;
+ DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
+ DtmfBuffer buffer(sample_rate_hz);
+
+ // Invalid event number.
+ event.event_no = -1;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.event_no = 16;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.event_no = 0; // Valid value;
+
+ // Invalid volume.
+ event.volume = -1;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.volume = 37;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.volume = 0; // Valid value;
+
+ // Invalid duration.
+ event.duration = -1;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.duration = 0;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.duration = 0xFFFF + 1;
+ EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
+ event.duration = 1; // Valid value;
+
+ // Finish with a valid event, just to verify that all is ok.
+ EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc
new file mode 100644
index 00000000000..34c615d70f6
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This class provides a generator for DTMF tones. The tone generation is based
+// on a sinusoid recursion. Each sinusoid is generated using a recursion
+// formula; x[n] = a * x[n-1] - x[n-2], where the coefficient
+// a = 2*cos(2*pi*f/fs). The recursion is started with x[-1] = 0 and
+// x[-2] = sin(2*pi*f/fs). (Note that with this initialization, the resulting
+// sinusoid gets a "negative" rotation; x[n] = sin(-2*pi*f/fs * n + phi), but
+// kept this way due to historical reasons.)
+// TODO(hlundin): Change to positive rotation?
+//
+// Each key on the telephone keypad corresponds to an "event", 0-15. Each event
+// is mapped to a tone pair, with a low and a high frequency. There are four
+// low and four high frequencies, each corresponding to a row and column,
+// respectively, on the keypad as illustrated below.
+//
+// 1209 Hz 1336 Hz 1477 Hz 1633 Hz
+// 697 Hz 1 2 3 12
+// 770 Hz 4 5 6 13
+// 852 Hz 7 8 9 14
+// 941 Hz 10 0 11 15
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+// The filter coefficient a = 2*cos(2*pi*f/fs) for the low frequency tone, for
+// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
+// Values are in Q14.
+const int DtmfToneGenerator::kCoeff1[4][16] = {
+ { 24219, 27980, 27980, 27980, 26956, 26956, 26956, 25701, 25701, 25701,
+ 24219, 24219, 27980, 26956, 25701, 24219 },
+ { 30556, 31548, 31548, 31548, 31281, 31281, 31281, 30951, 30951, 30951,
+ 30556, 30556, 31548, 31281, 30951, 30556 },
+ { 32210, 32462, 32462, 32462, 32394, 32394, 32394, 32311, 32311, 32311,
+ 32210, 32210, 32462, 32394, 32311, 32210 },
+ { 32520, 32632, 32632, 32632, 32602, 32602, 32602, 32564, 32564, 32564,
+ 32520, 32520, 32632, 32602, 32564, 32520 } };
+
+// The filter coefficient a = 2*cos(2*pi*f/fs) for the high frequency tone, for
+// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
+// Values are in Q14.
+const int DtmfToneGenerator::kCoeff2[4][16] = {
+ { 16325, 19073, 16325, 13085, 19073, 16325, 13085, 19073, 16325, 13085,
+ 19073, 13085, 9315, 9315, 9315, 9315},
+ { 28361, 29144, 28361, 27409, 29144, 28361, 27409, 29144, 28361, 27409,
+ 29144, 27409, 26258, 26258, 26258, 26258},
+ { 31647, 31849, 31647, 31400, 31849, 31647, 31400, 31849, 31647, 31400,
+ 31849, 31400, 31098, 31098, 31098, 31098},
+ { 32268, 32359, 32268, 32157, 32359, 32268, 32157, 32359, 32268, 32157,
+ 32359, 32157, 32022, 32022, 32022, 32022} };
+
+// The initialization value x[-2] = sin(2*pi*f/fs) for the low frequency tone,
+// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
+// Values are in Q14.
+const int DtmfToneGenerator::kInitValue1[4][16] = {
+ { 11036, 8528, 8528, 8528, 9315, 9315, 9315, 10163, 10163, 10163, 11036,
+ 11036, 8528, 9315, 10163, 11036},
+ { 5918, 4429, 4429, 4429, 4879, 4879, 4879, 5380, 5380, 5380, 5918, 5918,
+ 4429, 4879, 5380, 5918},
+ { 3010, 2235, 2235, 2235, 2468, 2468, 2468, 2728, 2728, 2728, 3010, 3010,
+ 2235, 2468, 2728, 3010},
+ { 2013, 1493, 1493, 1493, 1649, 1649, 1649, 1823, 1823, 1823, 2013, 2013,
+ 1493, 1649, 1823, 2013 } };
+
+// The initialization value x[-2] = sin(2*pi*f/fs) for the high frequency tone,
+// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
+// Values are in Q14.
+const int DtmfToneGenerator::kInitValue2[4][16] = {
+ { 14206, 13323, 14206, 15021, 13323, 14206, 15021, 13323, 14206, 15021,
+ 13323, 15021, 15708, 15708, 15708, 15708},
+ { 8207, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8979,
+ 9801, 9801, 9801, 9801},
+ { 4249, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4685,
+ 5164, 5164, 5164, 5164},
+ { 2851, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 3148,
+ 3476, 3476, 3476, 3476} };
+
+// Amplitude multipliers for volume values 0 through 36, corresponding to
+// 0 dBm0 through -36 dBm0. Values are in Q14.
+const int DtmfToneGenerator::kAmplitude[37] = {
+ 16141, 14386, 12821, 11427, 10184, 9077, 8090, 7210, 6426, 5727, 5104, 4549,
+ 4054, 3614, 3221, 2870, 2558, 2280, 2032, 1811, 1614, 1439, 1282, 1143,
+ 1018, 908, 809, 721, 643, 573, 510, 455, 405, 361, 322, 287, 256 };
+
+// Constructor.
+DtmfToneGenerator::DtmfToneGenerator()
+ : initialized_(false),
+ coeff1_(0),
+ coeff2_(0),
+ amplitude_(0) {
+}
+
+// Initialize the DTMF generator with sample rate fs Hz (8000, 16000, 32000,
+// 48000), event (0-15) and attenuation (0-36 dB).
+// Returns 0 on success, otherwise an error code.
+int DtmfToneGenerator::Init(int fs, int event, int attenuation) {
+ initialized_ = false;
+ int fs_index;
+ if (fs == 8000) {
+ fs_index = 0;
+ } else if (fs == 16000) {
+ fs_index = 1;
+ } else if (fs == 32000) {
+ fs_index = 2;
+ } else if (fs == 48000) {
+ fs_index = 3;
+ } else {
+ assert(false);
+ fs_index = 1; // Default to 8000 Hz.
+ }
+
+ if (event < 0 || event > 15) {
+ return kParameterError; // Invalid event number.
+ }
+
+ if (attenuation < 0 || attenuation > 36) {
+ return kParameterError; // Invalid attenuation.
+ }
+
+ // Look up oscillator coefficient for low and high frequencies.
+ coeff1_ = kCoeff1[fs_index][event];
+ coeff2_ = kCoeff2[fs_index][event];
+ // Look up amplitude multiplier.
+ amplitude_ = kAmplitude[attenuation];
+ // Initialize sample history.
+ sample_history1_[0] = kInitValue1[fs_index][event];
+ sample_history1_[1] = 0;
+ sample_history2_[0] = kInitValue2[fs_index][event];
+ sample_history2_[1] = 0;
+
+ initialized_ = true;
+ return 0;
+}
+
+// Reset tone generator to uninitialized state.
+void DtmfToneGenerator::Reset() {
+ initialized_ = false;
+}
+
+// Generate num_samples of DTMF signal and write to |output|.
+int DtmfToneGenerator::Generate(int num_samples,
+ AudioMultiVector* output) {
+ if (!initialized_) {
+ return kNotInitialized;
+ }
+
+ if (num_samples < 0 || !output) {
+ return kParameterError;
+ }
+ assert(output->Channels() == 1); // Not adapted for multi-channel yet.
+ if (output->Channels() != 1) {
+ return kStereoNotSupported;
+ }
+
+ output->AssertSize(num_samples);
+ for (int i = 0; i < num_samples; ++i) {
+ // Use recursion formula y[n] = a * y[n - 1] - y[n - 2].
+ int16_t temp_val_low = ((coeff1_ * sample_history1_[1] + 8192) >> 14)
+ - sample_history1_[0];
+ int16_t temp_val_high = ((coeff2_ * sample_history2_[1] + 8192) >> 14)
+ - sample_history2_[0];
+
+ // Update recursion memory.
+ sample_history1_[0] = sample_history1_[1];
+ sample_history1_[1] = temp_val_low;
+ sample_history2_[0] = sample_history2_[1];
+ sample_history2_[1] = temp_val_high;
+
+ // Attenuate the low frequency tone 3 dB.
+ int32_t temp_val = kAmpMultiplier * temp_val_low + (temp_val_high << 15);
+ // Normalize the signal to Q14 with proper rounding.
+ temp_val = (temp_val + 16384) >> 15;
+ // Scale the signal to correct volume.
+ (*output)[0][i] =
+ static_cast<int16_t>((temp_val * amplitude_ + 8192) >> 14);
+ }
+
+ return num_samples;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h
new file mode 100644
index 00000000000..fc1e5e4ad3d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
+
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This class provides a generator for DTMF tones.
+class DtmfToneGenerator {
+ public:
+ enum ReturnCodes {
+ kNotInitialized = -1,
+ kParameterError = -2,
+ kStereoNotSupported = -3,
+ };
+
+ DtmfToneGenerator();
+ virtual ~DtmfToneGenerator() {}
+ virtual int Init(int fs, int event, int attenuation);
+ virtual void Reset();
+ virtual int Generate(int num_samples, AudioMultiVector* output);
+ virtual bool initialized() const { return initialized_; }
+
+ private:
+ static const int kCoeff1[4][16]; // 1st oscillator model coefficient table.
+ static const int kCoeff2[4][16]; // 2nd oscillator model coefficient table.
+ static const int kInitValue1[4][16]; // Initialization for 1st oscillator.
+ static const int kInitValue2[4][16]; // Initialization for 2nd oscillator.
+ static const int kAmplitude[37]; // Amplitude for 0 through -36 dBm0.
+ static const int16_t kAmpMultiplier = 23171; // 3 dB attenuation (in Q15).
+
+ bool initialized_; // True if generator is initialized properly.
+ int coeff1_; // 1st oscillator coefficient for this event.
+ int coeff2_; // 2nd oscillator coefficient for this event.
+ int amplitude_; // Amplitude for this event.
+ int16_t sample_history1_[2]; // Last 2 samples for the 1st oscillator.
+ int16_t sample_history2_[2]; // Last 2 samples for the 2nd oscillator.
+
+ DISALLOW_COPY_AND_ASSIGN(DtmfToneGenerator);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc
new file mode 100644
index 00000000000..94f79dc3456
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for DtmfToneGenerator class.
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+
+#include <math.h>
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+
+namespace webrtc {
+
+TEST(DtmfToneGenerator, CreateAndDestroy) {
+ DtmfToneGenerator* tone_gen = new DtmfToneGenerator();
+ delete tone_gen;
+}
+
+TEST(DtmfToneGenerator, TestErrors) {
+ DtmfToneGenerator tone_gen;
+ const int kNumSamples = 10;
+ AudioMultiVector signal(1); // One channel.
+
+ // Try to generate tones without initializing.
+ EXPECT_EQ(DtmfToneGenerator::kNotInitialized,
+ tone_gen.Generate(kNumSamples, &signal));
+
+ const int fs = 16000; // Valid sample rate.
+ const int event = 7; // Valid event.
+ const int attenuation = 0; // Valid attenuation.
+ // Initialize with invalid event -1.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError,
+ tone_gen.Init(fs, -1, attenuation));
+ // Initialize with invalid event 16.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError,
+ tone_gen.Init(fs, 16, attenuation));
+ // Initialize with invalid attenuation -1.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Init(fs, event, -1));
+ // Initialize with invalid attenuation 37.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Init(fs, event, 37));
+ EXPECT_FALSE(tone_gen.initialized()); // Should still be uninitialized.
+
+ // Initialize with valid parameters.
+ ASSERT_EQ(0, tone_gen.Init(fs, event, attenuation));
+ EXPECT_TRUE(tone_gen.initialized());
+ // Negative number of samples.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Generate(-1, &signal));
+ // NULL pointer to destination.
+ EXPECT_EQ(DtmfToneGenerator::kParameterError,
+ tone_gen.Generate(kNumSamples, NULL));
+}
+
+TEST(DtmfToneGenerator, TestTones) {
+ DtmfToneGenerator tone_gen;
+ const int kAttenuation = 0;
+ const int kNumSamples = 10;
+ AudioMultiVector signal(1); // One channel.
+
+ // Low and high frequencies for events 0 through 15.
+ const double low_freq_hz[] = { 941.0, 697.0, 697.0, 697.0, 770.0, 770.0,
+ 770.0, 852.0, 852.0, 852.0, 941.0, 941.0, 697.0, 770.0, 852.0, 941.0 };
+ const double hi_freq_hz[] = { 1336.0, 1209.0, 1336.0, 1477.0, 1209.0, 1336.0,
+ 1477.0, 1209.0, 1336.0, 1477.0, 1209.0, 1477.0, 1633.0, 1633.0, 1633.0,
+ 1633.0 };
+ const double attenuate_3dB = 23171.0 / 32768; // 3 dB attenuation.
+ const double base_attenuation = 16141.0 / 16384.0; // This is the attenuation
+ // applied to all cases.
+ const int fs_vec[] = { 8000, 16000, 32000, 48000 };
+ for (int f = 0; f < 4; ++f) {
+ int fs = fs_vec[f];
+ for (int event = 0; event <= 15; ++event) {
+ std::ostringstream ss;
+ ss << "Checking event " << event << " at sample rate " << fs;
+ SCOPED_TRACE(ss.str());
+ ASSERT_EQ(0, tone_gen.Init(fs, event, kAttenuation));
+ EXPECT_TRUE(tone_gen.initialized());
+ EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &signal));
+
+ double f1 = low_freq_hz[event];
+ double f2 = hi_freq_hz[event];
+ const double pi = 3.14159265358979323846;
+
+ for (int n = 0; n < kNumSamples; ++n) {
+ double x = attenuate_3dB * sin(2.0 * pi * f1 / fs * (-n - 1))
+ + sin(2.0 * pi * f2 / fs * (-n - 1));
+ x *= base_attenuation;
+ x = ldexp(x, 14); // Scale to Q14.
+ static const int kChannel = 0;
+ EXPECT_NEAR(x, static_cast<double>(signal[kChannel][n]), 25);
+ }
+
+ tone_gen.Reset();
+ EXPECT_FALSE(tone_gen.initialized());
+ }
+ }
+}
+
+TEST(DtmfToneGenerator, TestAmplitudes) {
+ DtmfToneGenerator tone_gen;
+ const int kNumSamples = 10;
+ AudioMultiVector signal(1); // One channel.
+ AudioMultiVector ref_signal(1); // One channel.
+
+ const int fs_vec[] = { 8000, 16000, 32000, 48000 };
+ const int event_vec[] = { 0, 4, 9, 13 }; // Test a few events.
+ for (int f = 0; f < 4; ++f) {
+ int fs = fs_vec[f];
+ int event = event_vec[f];
+ // Create full-scale reference.
+ ASSERT_EQ(0, tone_gen.Init(fs, event, 0)); // 0 attenuation.
+ EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &ref_signal));
+ // Test every 5 steps (to save time).
+ for (int attenuation = 1; attenuation <= 36; attenuation += 5) {
+ std::ostringstream ss;
+ ss << "Checking event " << event << " at sample rate " << fs;
+ ss << "; attenuation " << attenuation;
+ SCOPED_TRACE(ss.str());
+ ASSERT_EQ(0, tone_gen.Init(fs, event, attenuation));
+ EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &signal));
+ for (int n = 0; n < kNumSamples; ++n) {
+ double attenuation_factor =
+ pow(10, -static_cast<double>(attenuation)/20);
+ // Verify that the attenuation is correct.
+ static const int kChannel = 0;
+ EXPECT_NEAR(attenuation_factor * ref_signal[kChannel][n],
+ signal[kChannel][n], 2);
+ }
+
+ tone_gen.Reset();
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
deleted file mode 100644
index 8ea413c76f1..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the DTMF tone generator and its parameters.
- *
- * A sinusoid is generated using the recursive oscillator model
- *
- * y[n] = sin(w*n + phi) = 2*cos(w) * y[n-1] - y[n-2]
- * = a * y[n-1] - y[n-2]
- *
- * initialized with
- * y[-2] = 0
- * y[-1] = sin(w)
- *
- * A DTMF signal is a combination of two sinusoids, depending
- * on which event is sent (i.e, which key is pressed). The following
- * table maps each key (event codes in parentheses) into two tones:
- *
- * 1209 Hz 1336 Hz 1477 Hz 1633 Hz
- * 697 Hz 1 (ev. 1) 2 (ev. 2) 3 (ev. 3) A (ev. 12)
- * 770 Hz 4 (ev. 4) 5 (ev. 5) 6 (ev. 6) B (ev. 13)
- * 852 Hz 7 (ev. 7) 8 (ev. 8) 9 (ev. 9) C (ev. 14)
- * 941 Hz * (ev. 10) 0 (ev. 0) # (ev. 11) D (ev. 15)
- *
- * The two tones are added to form the DTMF signal.
- *
- */
-
-#include "dtmf_tonegen.h"
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-/* Must compile NetEQ with DTMF support to enable the functionality */
-
-/*******************/
-/* Constant tables */
-/*******************/
-
-/*
- * All tables corresponding to the oscillator model are organized so that
- * the coefficients for a specific frequency is found in the same position
- * in every table. The positions for the tones follow this layout:
- *
- * dummyVector[8] =
- * {
- * 697 Hz, 770 Hz, 852 Hz, 941 Hz,
- * 1209 Hz, 1336 Hz, 1477 Hz, 1633 Hz
- * };
- */
-
-/*
- * Tables for the constant a = 2*cos(w) = 2*cos(2*pi*f/fs)
- * in the oscillator model, for 8, 16, 32 and 48 kHz sample rate.
- * Table values in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_aTbl8Khz[8] =
-{
- 27980, 26956, 25701, 24219,
- 19073, 16325, 13085, 9315
-};
-
-#ifdef NETEQ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl16Khz[8]=
-{
- 31548, 31281, 30951, 30556,
- 29144, 28361, 27409, 26258
-};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl32Khz[8]=
-{
- 32462, 32394, 32311, 32210,
- 31849, 31647, 31400, 31098
-};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl48Khz[8]=
-{
- 32632, 32602, 32564, 32520,
- 32359, 32268, 32157, 32022
-};
-#endif
-
-/*
- * Initialization values y[-1] = sin(w) = sin(2*pi*f/fs), for 8, 16, 32 and 48 kHz sample rate.
- * Table values in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
-{
- 8528, 9315, 10163, 11036,
- 13323, 14206,15021, 15708
-};
-
-#ifdef NETEQ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
-{
- 4429, 4879, 5380, 5918,
- 7490, 8207, 8979, 9801
-};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
-{
- 2235, 2468, 2728, 3010,
- 3853, 4249, 4685, 5164
-};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
-{
- 1493, 1649, 1823, 2013,
- 2582, 2851, 3148, 3476
-};
-#endif
-
-/* Volume in dBm0 from 0 to -63, where 0 is the first table entry.
- Everything below -36 is discarded, wherefore the table stops at -36.
- Table entries are in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
- 7210, 6426, 5727, 5104, 4549, 4054, 3614,
- 3221, 2870, 2558, 2280, 2032, 1811, 1614,
- 1439, 1282, 1143, 1018, 908, 809, 721, 643,
- 573, 510, 455, 405, 361, 322, 287, 256 };
-
-/****************************************************************************
- * WebRtcNetEQ_DTMFGenerate(...)
- *
- * Generate 10 ms DTMF signal according to input parameters.
- *
- * Input:
- * - DTMFdecInst : DTMF instance
- * - value : DTMF event number (0-15)
- * - volume : Volume of generated signal (0-36)
- * Volume is given in negative dBm0, i.e., volume == 0
- * means 0 dBm0 while volume == 36 mean -36 dBm0.
- * - sampFreq : Sample rate in Hz
- *
- * Output:
- * - signal : Pointer to vector where DTMF signal is stored;
- * Vector must be at least sampFreq/100 samples long.
- * - DTMFdecInst : Updated DTMF instance
- *
- * Return value : >0 - Number of samples written to signal
- * : <0 - error
- */
-
-int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, int16_t value,
- int16_t volume, int16_t *signal,
- uint16_t sampFreq, int16_t extFrameLen)
-{
- const int16_t *aTbl; /* pointer to a-coefficient table */
- const int16_t *yInitTable; /* pointer to initialization value table */
- int16_t a1 = 0; /* a-coefficient for first tone (low tone) */
- int16_t a2 = 0; /* a-coefficient for second tone (high tone) */
- int i;
- int frameLen; /* number of samples to generate */
- int lowIndex = 0; /* Default to avoid compiler warnings. */
- int highIndex = 4; /* Default to avoid compiler warnings. */
- int32_t tempVal;
- int16_t tempValLow;
- int16_t tempValHigh;
-
- /* Sanity check for volume */
- if ((volume < 0) || (volume > 36))
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
-
- /* Sanity check for extFrameLen */
- if (extFrameLen < -1)
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
-
- /* Select oscillator coefficient tables based on sample rate */
- if (sampFreq == 8000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl8Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab8Khz;
- frameLen = 80;
-#ifdef NETEQ_WIDEBAND
- }
- else if (sampFreq == 16000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl16Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab16Khz;
- frameLen = 160;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (sampFreq == 32000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl32Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab32Khz;
- frameLen = 320;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else if (sampFreq == 48000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl48Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab48Khz;
- frameLen = 480;
-#endif
- }
- else
- {
- /* unsupported sample rate */
- return DTMF_GEN_UNKNOWN_SAMP_FREQ;
- }
-
- if (extFrameLen >= 0)
- {
- frameLen = extFrameLen;
- }
-
- /* select low frequency based on event value */
- switch (value)
- {
- case 1:
- case 2:
- case 3:
- case 12: /* first row on keypad */
- {
- lowIndex = 0; /* low frequency: 697 Hz */
- break;
- }
- case 4:
- case 5:
- case 6:
- case 13: /* second row on keypad */
- {
- lowIndex = 1; /* low frequency: 770 Hz */
- break;
- }
- case 7:
- case 8:
- case 9:
- case 14: /* third row on keypad */
- {
- lowIndex = 2; /* low frequency: 852 Hz */
- break;
- }
- case 0:
- case 10:
- case 11:
- case 15: /* fourth row on keypad */
- {
- lowIndex = 3; /* low frequency: 941 Hz */
- break;
- }
- default:
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
- } /* end switch */
-
- /* select high frequency based on event value */
- switch (value)
- {
- case 1:
- case 4:
- case 7:
- case 10: /* first column on keypad */
- {
- highIndex = 4; /* high frequency: 1209 Hz */
- break;
- }
- case 2:
- case 5:
- case 8:
- case 0: /* second column on keypad */
- {
- highIndex = 5;/* high frequency: 1336 Hz */
- break;
- }
- case 3:
- case 6:
- case 9:
- case 11: /* third column on keypad */
- {
- highIndex = 6;/* high frequency: 1477 Hz */
- break;
- }
- case 12:
- case 13:
- case 14:
- case 15: /* fourth column on keypad (special) */
- {
- highIndex = 7;/* high frequency: 1633 Hz */
- break;
- }
- } /* end switch */
-
- /* select coefficients based on results from switches above */
- a1 = aTbl[lowIndex]; /* coefficient for first (low) tone */
- a2 = aTbl[highIndex]; /* coefficient for second (high) tone */
-
- if (DTMFdecInst->reinit)
- {
- /* set initial values for the recursive model */
- DTMFdecInst->oldOutputLow[0] = yInitTable[lowIndex];
- DTMFdecInst->oldOutputLow[1] = 0;
- DTMFdecInst->oldOutputHigh[0] = yInitTable[highIndex];
- DTMFdecInst->oldOutputHigh[1] = 0;
-
- /* reset reinit flag */
- DTMFdecInst->reinit = 0;
- }
-
- /* generate signal sample by sample */
- for (i = 0; i < frameLen; i++)
- {
-
- /* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
- tempValLow
- = (int16_t) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
- + 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
- tempValHigh
- = (int16_t) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
- + 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
-
- /* Update recursion memory */
- DTMFdecInst->oldOutputLow[0] = DTMFdecInst->oldOutputLow[1];
- DTMFdecInst->oldOutputLow[1] = tempValLow;
- DTMFdecInst->oldOutputHigh[0] = DTMFdecInst->oldOutputHigh[1];
- DTMFdecInst->oldOutputHigh[1] = tempValHigh;
-
- /* scale high tone with 32768 (15 left shifts)
- and low tone with 23171 (3dB lower than high tone) */
- tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
- + WEBRTC_SPL_LSHIFT_W32((int32_t)tempValHigh, 15);
-
- /* Norm the signal to Q14 (with proper rounding) */
- tempVal = (tempVal + 16384) >> 15;
-
- /* Scale the signal to correct dbM0 value */
- signal[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
- + 8192), 14); /* volume value is in Q14; use proper rounding */
- }
-
- return frameLen;
-
-}
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
deleted file mode 100644
index 5f4489940c2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the DTMF tone generator function.
- */
-
-#ifndef DTMF_TONEGEN_H
-#define DTMF_TONEGEN_H
-
-#include "typedefs.h"
-
-#include "neteq_defines.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-/* Must compile NetEQ with DTMF support to enable the functionality */
-
-#define DTMF_AMP_LOW 23171 /* 3 dB lower than the high frequency */
-
-/* The DTMF generator struct (part of DSP main struct DSPInst_t) */
-typedef struct dtmf_tone_inst_t_
-{
-
- int16_t reinit; /* non-zero if the oscillator model should
- be reinitialized for next event */
- int16_t oldOutputLow[2]; /* oscillator recursion history (low tone) */
- int16_t oldOutputHigh[2]; /* oscillator recursion history (high tone) */
-
- int lastDtmfSample; /* index to the first non-DTMF sample in the
- speech history, if non-negative */
-}dtmf_tone_inst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_DTMFGenerate(...)
- *
- * Generate 10 ms DTMF signal according to input parameters.
- *
- * Input:
- * - DTMFdecInst : DTMF instance
- * - value : DTMF event number (0-15)
- * - volume : Volume of generated signal (0-36)
- * Volume is given in negative dBm0, i.e., volume == 0
- * means 0 dBm0 while volume == 36 mean -36 dBm0.
- * - sampFreq : Sample rate in Hz
- *
- * Output:
- * - signal : Pointer to vector where DTMF signal is stored;
- * Vector must be at least sampFreq/100 samples long.
- * - DTMFdecInst : Updated DTMF instance
- *
- * Return value : >0 - Number of samples written to signal
- * : <0 - Error
- */
-
-int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
- int16_t value,
- int16_t volume,
- int16_t *signal,
- uint16_t sampFreq,
- int16_t frameLen
-);
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
-#endif /* DTMF_TONEGEN_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c
deleted file mode 100644
index 9959f9222d7..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c
+++ /dev/null
@@ -1,1220 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the function to expand from the speech history, to produce concealment data or
- * increasing delay.
- */
-
-#include "dsp.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define CHECK_NO_OF_CORRMAX 3
-#define DISTLEN 20
-#define LPCANALASYSLEN 160
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- (First part of first expand)
- int16_t pw16_bestCorrIndex 3 0 2
- int16_t pw16_bestCorr 3 3 5
- int16_t pw16_bestDistIndex 3 6 8
- int16_t pw16_bestDist 3 9 11
- int16_t pw16_corrVec 102*fs/8000 12 11+102*fs/8000
- func WebRtcNetEQ_Correlator 232 12+102*fs/8000 243+102*fs/8000
-
- (Second part of first expand)
- int32_t pw32_corr2 99*fs/8000+1 0 99*fs/8000
- int32_t pw32_autoCorr 2*7 0 13
- int16_t pw16_rc 6 14 19
-
- Signal combination:
- int16_t pw16_randVec 30+120*fs/8000 0 29+120*fs/8000
- int16_t pw16_scaledRandVec 125*fs/8000 30+120*fs/8000 29+245*fs/8000
- int16_t pw16_unvoicedVecSpace 10+125*fs/8000 30+245*fs/8000 39+370*fs/8000
-
- Total: 40+370*fs/8000 (size depends on UNVOICED_LPC_ORDER and BGN_LPC_ORDER)
- */
-
-#if ((BGN_LPC_ORDER > 10) || (UNVOICED_LPC_ORDER > 10)) && (defined SCRATCH)
-#error BGN_LPC_ORDER and/or BGN_LPC_ORDER are too large for current scratch memory allocation
-#endif
-
-#define SCRATCH_PW16_BEST_CORR_INDEX 0
-#define SCRATCH_PW16_BEST_CORR 3
-#define SCRATCH_PW16_BEST_DIST_INDEX 6
-#define SCRATCH_PW16_BEST_DIST 9
-#define SCRATCH_PW16_CORR_VEC 12
-#define SCRATCH_PW16_CORR2 0
-#define SCRATCH_PW32_AUTO_CORR 0
-#define SCRATCH_PW16_RC 14
-#define SCRATCH_PW16_RAND_VEC 0
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 624
-#define SCRATCH_PW16_SCALED_RAND_VEC 750
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 1500
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 420
-#define SCRATCH_PW16_SCALED_RAND_VEC 510
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 1010
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 216
-#define SCRATCH_PW16_SCALED_RAND_VEC 270
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 520
-#else /* NB */
-#define SCRATCH_NETEQDSP_CORRELATOR 114
-#define SCRATCH_PW16_SCALED_RAND_VEC 150
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 275
-#endif
-
-/****************************************************************************
- * WebRtcNetEQ_Expand(...)
- *
- * This function produces one "chunk" of expansion data (PLC audio). The
- * length of the produced audio depends on the speech history.
- *
- * Input:
- * - inst : DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - outdata : Pointer to a memory space where the output data
- * should be stored
- * - BGNonly : If non-zero, "expand" will only produce background noise.
- * - pw16_len : Desired number of samples (only for BGN mode).
- *
- * Output:
- * - inst : Updated instance
- * - pw16_len : Number of samples that were output from NetEq
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Expand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
- int16_t fs_mult;
- ExpandInst_t *ExpandState = &(inst->ExpandInst);
- BGNInst_t *BGNState = &(inst->BGNInst);
- int i;
-#ifdef SCRATCH
- int16_t *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
- int16_t *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
- int16_t *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
-#else
- int16_t pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
- int16_t pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
- int16_t pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
-#endif
- /* 125 for NB and 250 for WB etc. Reuse pw16_outData[] for this vector */
- int16_t *pw16_voicedVecStorage = pw16_outData;
- int16_t *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
- int16_t *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
- int16_t *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
- int16_t w16_expVecsLen, w16_lag = 0, w16_expVecPos;
- int16_t w16_randLen;
- int16_t w16_vfractionChange; /* in Q14 */
- int16_t w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
- int32_t w32_tmp;
- int16_t w16_tmp, w16_tmp2;
- int16_t stability;
- enum BGNMode bgnMode = inst->BGNInst.bgnMode;
-
- /* Pre-calculate common multiplications with fs_mult */
- int16_t fsMult4;
- int16_t fsMult20;
- int16_t fsMult120;
- int16_t fsMultDistLen;
- int16_t fsMultLPCAnalasysLen;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- /* fs is uint16_t (to hold fs=48000) */
- fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs); /* calculate fs/8000 */
-
- /* Pre-calculate common multiplications with fs_mult */
- fsMult4 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
- fsMult20 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
- fsMultDistLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
- fsMultLPCAnalasysLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
-
- /*
- * Perform all the initial setup if it's the first expansion.
- * If background noise (BGN) only, this setup is not needed.
- */
- if (ExpandState->w16_consecExp == 0 && !BGNonly)
- {
- /* Setup more variables */
-#ifdef SCRATCH
- int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr
- + SCRATCH_PW32_AUTO_CORR);
- int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- int16_t *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
- int16_t *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
- int16_t *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
- int16_t *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
- int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
- int32_t *pw32_corr2 = (int32_t*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
-#else
- int32_t pw32_autoCorr[UNVOICED_LPC_ORDER+1];
- int16_t pw16_rc[UNVOICED_LPC_ORDER];
- int16_t pw16_corrVec[FSMULT*102]; /* 102 for NB */
- int16_t pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestCorr[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestDist[CHECK_NO_OF_CORRMAX];
- int32_t pw32_corr2[(99*FSMULT)+1];
-#endif
- int32_t pw32_bestDist[CHECK_NO_OF_CORRMAX];
- int16_t w16_ind = 0;
- int16_t w16_corrVecLen;
- int16_t w16_corrScale;
- int16_t w16_distScale;
- int16_t w16_indMin, w16_indMax;
- int16_t w16_len;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_en1, w16_en2;
- int32_t w32_en1_mul_en2;
- int16_t w16_sqrt_en1en2;
- int16_t w16_ccShiftL;
- int16_t w16_bestcorr; /* Correlation in Q14 */
- int16_t *pw16_vec1, *pw16_vec2;
- int16_t w16_factor;
- int16_t w16_DistLag, w16_CorrLag, w16_diffLag;
- int16_t w16_energyLen;
- int16_t w16_slope;
- int16_t w16_startInd;
- int16_t w16_noOfcorr2;
- int16_t w16_scale;
-
- /* Initialize some variables */
- ExpandState->w16_lagsDirection = 1;
- ExpandState->w16_lagsPosition = -1;
- ExpandState->w16_expandMuteFactor = 16384; /* Start from 1.0 (Q14) */
- BGNState->w16_mutefactor = 0; /* Start with 0 gain for BGN (value in Q14) */
- inst->w16_seedInc = 1;
-
-#ifdef NETEQ_STEREO
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- /*
- * Do not calculate correlations for slave instance(s)
- * unless lag info from master is corrupt
- */
- if ((msInfo->msMode != NETEQ_SLAVE)
- || ((msInfo->distLag <= 0) || (msInfo->corrLag <= 0)))
- {
-#endif
- /* Calculate correlation vector in downsampled domain (4 kHz sample rate) */
- w16_corrVecLen = WebRtcNetEQ_Correlator(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQDSP_CORRELATOR,
-#endif
- inst->pw16_speechHistory, inst->w16_speechHistoryLen, pw16_corrVec,
- &w16_corrScale);
-
- /* Find peaks in correlation vector using parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corrVec, w16_corrVecLen, CHECK_NO_OF_CORRMAX, fs_mult,
- pw16_bestCorrIndex, pw16_bestCorr);
-
- /*
- * Adjust peak locations; cross-correlation lags start at 2.5 ms
- * (20*fs_mult samples)
- */
- pw16_bestCorrIndex[0] += fsMult20;
- pw16_bestCorrIndex[1] += fsMult20;
- pw16_bestCorrIndex[2] += fsMult20;
-
- /* Calculate distortion around the 3 (CHECK_NO_OF_CORRMAX) best lags */
- w16_distScale = 0;
- for (i = 0; i < CHECK_NO_OF_CORRMAX; i++)
- {
- w16_tmp = fsMult20;
- w16_tmp2 = pw16_bestCorrIndex[i] - fsMult4;
- w16_indMin = WEBRTC_SPL_MAX(w16_tmp, w16_tmp2);
- w16_tmp = fsMult120 - 1;
- w16_tmp2 = pw16_bestCorrIndex[i] + fsMult4;
- w16_indMax = WEBRTC_SPL_MIN(w16_tmp, w16_tmp2);
-
- pw16_bestDistIndex[i] = WebRtcNetEQ_MinDistortion(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultDistLen]),
- w16_indMin, w16_indMax, fsMultDistLen, &pw32_bestDist[i]);
-
- w16_distScale
- = WEBRTC_SPL_MAX(16 - WebRtcSpl_NormW32(pw32_bestDist[i]), w16_distScale);
-
- }
-
- /* Shift the distortion values to fit in int16_t */
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_bestDist, CHECK_NO_OF_CORRMAX, pw32_bestDist,
- w16_distScale);
-
- /*
- * Find index of maximum criteria, where crit[i] = bestCorr[i])/(bestDist[i])
- * Do this by a cross multiplication.
- */
-
- w32_en1 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0],pw16_bestDist[1]);
- w32_en2 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[0]);
- if (w32_en1 >= w32_en2)
- {
- /* 0 wins over 1 */
- w32_en1
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0], pw16_bestDist[2]);
- w32_en2
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2], pw16_bestDist[0]);
- if (w32_en1 >= w32_en2)
- {
- /* 0 wins over 2 */
- w16_ind = 0;
- }
- else
- {
- /* 2 wins over 0 */
- w16_ind = 2;
- }
- }
- else
- {
- /* 1 wins over 0 */
- w32_en1
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[2]);
- w32_en2
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2],pw16_bestDist[1]);
- if ((int32_t) w32_en1 >= (int32_t) w32_en2)
- {
- /* 1 wins over 2 */
- w16_ind = 1;
- }
- else
- {
- /* 2 wins over 1 */
- w16_ind = 2;
- }
- }
-
-#ifdef NETEQ_STEREO
- }
-
- /* Store DistLag and CorrLag of the position with highest criteria */
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO)
- || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->distLag <= 0 || msInfo->corrLag
- <= 0)))
- {
- /* lags not provided externally */
- w16_DistLag = pw16_bestDistIndex[w16_ind];
- w16_CorrLag = pw16_bestCorrIndex[w16_ind];
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->distLag = w16_DistLag;
- msInfo->corrLag = w16_CorrLag;
- }
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* lags provided externally (from master) */
- w16_DistLag = msInfo->distLag;
- w16_CorrLag = msInfo->corrLag;
-
- /* sanity for lag values */
- if ((w16_DistLag <= 0) || (w16_CorrLag <= 0))
- {
- return MASTER_SLAVE_ERROR;
- }
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-#else /* not NETEQ_STEREO */
- w16_DistLag = pw16_bestDistIndex[w16_ind];
- w16_CorrLag = pw16_bestCorrIndex[w16_ind];
-#endif
-
- ExpandState->w16_maxLag = WEBRTC_SPL_MAX(w16_DistLag, w16_CorrLag);
-
- /* Calculate the exact best correlation (in the range within CorrLag-DistLag) */
- w16_len = w16_DistLag + 10;
- w16_len = WEBRTC_SPL_MIN(w16_len, fsMult120);
- w16_len = WEBRTC_SPL_MAX(w16_len, 60 * fs_mult);
-
- w16_startInd = WEBRTC_SPL_MIN(w16_DistLag, w16_CorrLag);
- w16_noOfcorr2 = WEBRTC_SPL_ABS_W16((w16_DistLag-w16_CorrLag)) + 1;
- /* w16_noOfcorr2 maximum value is 99*fs_mult + 1 */
-
- /* Calculate suitable scaling */
- w16_tmp
- = WebRtcSpl_MaxAbsValueW16(
- &inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd
- - w16_noOfcorr2],
- (int16_t) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
- w16_corrScale = ((31 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_tmp, w16_tmp)))
- + (31 - WebRtcSpl_NormW32(w16_len))) - 31;
- w16_corrScale = WEBRTC_SPL_MAX(0, w16_corrScale);
-
- /*
- * Perform the correlation, store in pw32_corr2
- */
-
- WebRtcNetEQ_CrossCorr(pw32_corr2,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd]),
- w16_len, w16_noOfcorr2, w16_corrScale, -1);
-
- /* Find maximizing index */
- w16_ind = WebRtcSpl_MaxIndexW32(pw32_corr2, w16_noOfcorr2);
- w32_cc = pw32_corr2[w16_ind]; /* this is maximum correlation */
- w16_ind = w16_ind + w16_startInd; /* correct index for start offset */
-
- /* Calculate energies */
- w32_en1 = WebRtcNetEQ_DotW16W16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]), w16_len,
- w16_corrScale);
- w32_en2 = WebRtcNetEQ_DotW16W16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
- w16_len, w16_corrScale);
-
- /* Calculate the correlation value w16_bestcorr */
- if ((w32_en1 > 0) && (w32_en2 > 0))
- {
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- /* if sum is odd */
- w16_en1Scale += 1;
- }
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
- w32_en1_mul_en2 = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
- w16_sqrt_en1en2 = (int16_t) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_ccShiftL = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_ccShiftL);
- w16_bestcorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
- w16_bestcorr = WEBRTC_SPL_MIN(16384, w16_bestcorr); /* set maximum to 1.0 */
-
- }
- else
- {
- /* if either en1 or en2 is zero */
- w16_bestcorr = 0;
- }
-
- /*
- * Extract the two vectors, pw16_expVecs[0][] and pw16_expVecs[1][],
- * from the SpeechHistory[]
- */
- w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
- pw16_vec1 = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_expVecsLen]);
- pw16_vec2 = pw16_vec1 - w16_DistLag;
- /* Normalize the second vector to the same energy as the first */
- w32_en1 = WebRtcNetEQ_DotW16W16(pw16_vec1, pw16_vec1, w16_expVecsLen, w16_corrScale);
- w32_en2 = WebRtcNetEQ_DotW16W16(pw16_vec2, pw16_vec2, w16_expVecsLen, w16_corrScale);
-
- /*
- * Confirm that energy factor sqrt(w32_en1/w32_en2) is within difference 0.5 - 2.0
- * w32_en1/w32_en2 within 0.25 - 4
- */
- if (((w32_en1 >> 2) < w32_en2) && ((w32_en1) > (w32_en2 >> 2)))
- {
-
- /* Energy constraint fulfilled => use both vectors and scale them accordingly */
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
- w16_en1Scale = w16_en2Scale - 13;
-
- /* calculate w32_en1/w32_en2 in Q13 */
- w32_en1_mul_en2 = WebRtcSpl_DivW32W16(
- WEBRTC_SPL_SHIFT_W32(w32_en1, -w16_en1Scale),
- (int16_t) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
-
- /* calculate factor in Q13 (sqrt of en1/en2 in Q26) */
- w16_factor = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32(w32_en1_mul_en2, 13));
-
- /* Copy the two vectors and give them the same energy */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
- WebRtcSpl_AffineTransformVector(ExpandState->pw16_expVecs[1], pw16_vec2,
- w16_factor, 4096, 13, w16_expVecsLen);
-
- }
- else
- {
- /* Energy change constraint not fulfilled => only use last vector */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[1], ExpandState->pw16_expVecs[0],
- w16_expVecsLen);
-
- /* Set the w16_factor since it is used by muting slope */
- if (((w32_en1 >> 2) < w32_en2) || (w32_en2 == 0))
- {
- w16_factor = 4096; /* 0.5 in Q13*/
- }
- else
- {
- w16_factor = 16384; /* 2.0 in Q13*/
- }
- }
-
- /* Set the 3 lag values */
- w16_diffLag = w16_DistLag - w16_CorrLag;
- if (w16_diffLag == 0)
- {
- /* DistLag and CorrLag are equal */
- ExpandState->w16_lags[0] = w16_DistLag;
- ExpandState->w16_lags[1] = w16_DistLag;
- ExpandState->w16_lags[2] = w16_DistLag;
- }
- else
- {
- /* DistLag and CorrLag are not equal; use different combinations of the two */
- ExpandState->w16_lags[0] = w16_DistLag; /* DistLag only */
- ExpandState->w16_lags[1] = ((w16_DistLag + w16_CorrLag) >> 1); /* 50/50 */
- /* Third lag, move one half-step towards CorrLag (in both cases) */
- if (w16_diffLag > 0)
- {
- ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag - 1) >> 1;
- }
- else
- {
- ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag + 1) >> 1;
- }
- }
-
- /*************************************************
- * Calculate the LPC and the gain of the filters *
- *************************************************/
-
- /* Calculate scale value needed for autocorrelation */
- w16_tmp = WebRtcSpl_MaxAbsValueW16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultLPCAnalasysLen]),
- fsMultLPCAnalasysLen);
-
- w16_tmp = 16 - WebRtcSpl_NormW32(w16_tmp);
- w16_tmp = WEBRTC_SPL_MIN(w16_tmp,0);
- w16_tmp = (w16_tmp << 1) + 7;
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
-
- /* set w16_ind to simplify the following expressions */
- w16_ind = inst->w16_speechHistoryLen - fsMultLPCAnalasysLen - UNVOICED_LPC_ORDER;
-
- /* store first UNVOICED_LPC_ORDER samples in pw16_rc */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_rc, &inst->pw16_speechHistory[w16_ind], UNVOICED_LPC_ORDER);
-
- /* set first samples to zero */
- WebRtcSpl_MemSetW16(&inst->pw16_speechHistory[w16_ind], 0, UNVOICED_LPC_ORDER);
-
- /* Calculate UNVOICED_LPC_ORDER+1 lags of the ACF */
-
- WebRtcNetEQ_CrossCorr(
- pw32_autoCorr, &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]),
- &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]), fsMultLPCAnalasysLen,
- UNVOICED_LPC_ORDER + 1, w16_tmp, -1);
-
- /* Recover the stored samples from pw16_rc */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[w16_ind], pw16_rc, UNVOICED_LPC_ORDER);
-
- if (pw32_autoCorr[0] > 0)
- { /* check that variance is positive */
-
- /* estimate AR filter parameters using Levinson-Durbin algorithm
- (UNVOICED_LPC_ORDER+1 filter coefficients) */
- stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, ExpandState->pw16_arFilter,
- pw16_rc, UNVOICED_LPC_ORDER);
-
- /* Only update BGN if filter is stable */
- if (stability != 1)
- {
- /* Set first coefficient to 4096 (1.0 in Q12)*/
- ExpandState->pw16_arFilter[0] = 4096;
- /* Set remaining UNVOICED_LPC_ORDER coefficients to zero */
- WebRtcSpl_MemSetW16(ExpandState->pw16_arFilter + 1, 0, UNVOICED_LPC_ORDER);
- }
-
- }
-
- if (w16_DistLag < 40)
- {
- w16_energyLen = 2 * w16_DistLag;
- }
- else
- {
- w16_energyLen = w16_DistLag;
- }
- w16_randLen = w16_energyLen + 30; /* Startup part */
-
- /* Extract a noise segment */
- if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
- {
- WEBRTC_SPL_MEMCPY_W16(pw16_randVec,
- (int16_t*) WebRtcNetEQ_kRandnTbl, w16_randLen);
- }
- else
- { /* only applies to SWB where length could be larger than 256 */
-#if FSMULT >= 2 /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
- WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (int16_t*) WebRtcNetEQ_kRandnTbl,
- RANDVEC_NO_OF_SAMPLES);
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- assert(w16_randLen <= FSMULT * 120 + 30);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
-#else
- assert(0);
-#endif
- }
-
- /* Set up state vector and calculate scale factor for unvoiced filtering */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
- WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128 - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
- WebRtcSpl_FilterMAFastQ12(&inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128],
- pw16_unvoicedVec, ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, 128);
- if (WebRtcSpl_MaxAbsValueW16(pw16_unvoicedVec, 128) > 4000)
- {
- w16_scale = 4;
- }
- else
- {
- w16_scale = 0;
- }
- w32_tmp = WebRtcNetEQ_DotW16W16(pw16_unvoicedVec, pw16_unvoicedVec, 128, w16_scale);
-
- /* Normalize w32_tmp to 28 or 29 bits to preserve sqrt() accuracy */
- w16_tmp = WebRtcSpl_NormW32(w32_tmp) - 3;
- w16_tmp += ((w16_tmp & 0x1) ^ 0x1); /* Make sure we do an odd number of shifts since we
- from earlier have 7 shifts from dividing with 128.*/
- w32_tmp = WEBRTC_SPL_SHIFT_W32(w32_tmp, w16_tmp);
- w32_tmp = WebRtcSpl_SqrtFloor(w32_tmp);
- ExpandState->w16_arGainScale = 13 + ((w16_tmp + 7 - w16_scale) >> 1);
- ExpandState->w16_arGain = (int16_t) w32_tmp;
-
- /********************************************************************
- * Calculate vfraction from bestcorr *
- * if (bestcorr>0.480665) *
- * vfraction = ((bestcorr-0.4)/(1-0.4)).^2 *
- * else vfraction = 0 *
- * *
- * approximation (coefficients in Q12): *
- * if (x>0.480665) (y(x)<0.3) *
- * y(x) = -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 *
- * else y(x) = 0; *
- ********************************************************************/
-
- if (w16_bestcorr > 7875)
- {
- /* if x>0.480665 */
- int16_t w16_x1, w16_x2, w16_x3;
- w16_x1 = w16_bestcorr;
- w32_tmp = WEBRTC_SPL_MUL_16_16((int32_t) w16_x1, w16_x1);
- w16_x2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_x1, w16_x2);
- w16_x3 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
- w32_tmp
- = (int32_t) WEBRTC_SPL_LSHIFT_W32((int32_t) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
- ExpandState->w16_vFraction = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
- ExpandState->w16_vFraction = WEBRTC_SPL_MIN(ExpandState->w16_vFraction, 16384);
- ExpandState->w16_vFraction = WEBRTC_SPL_MAX(ExpandState->w16_vFraction, 0);
- }
- else
- {
- ExpandState->w16_vFraction = 0;
- }
-
- /***********************************************************************
- * Calculate muting slope, reuse value from earlier scaling of ExpVecs *
- ***********************************************************************/
- w16_slope = w16_factor;
-
- if (w16_slope > 12288)
- {
- /* w16_slope > 1.5 ? */
- /* Calculate (1-(1/slope))/w16_DistLag = (slope-1)/(w16_DistLag*slope) */
- w32_tmp = w16_slope - 8192;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 12); /* Value in Q25 (13+12=25) */
- w16_tmp = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
- w16_slope, 8); /* Value in Q5 (13-8=5) */
- w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_tmp); /* Res in Q20 (25-5=20) */
-
- if (w16_slope > 14746)
- { /* w16_slope > 1.8 ? */
- ExpandState->w16_muteSlope = (w16_tmp + 1) >> 1;
- }
- else
- {
- ExpandState->w16_muteSlope = (w16_tmp + 4) >> 3;
- }
- ExpandState->w16_onset = 1;
- }
- else if (ExpandState->w16_vFraction > 13107)
- {
- /* w16_vFraction > 0.8 ? */
- if (w16_slope > 8028)
- {
- /* w16_vFraction > 0.98 ? */
- ExpandState->w16_muteSlope = 0;
- }
- else
- {
- /* Calculate (1-slope)/w16_DistLag */
- w32_tmp = 8192 - w16_slope;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
- ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_DistLag); /* Res in Q20 (20-0=20) */
- }
- ExpandState->w16_onset = 0;
- }
- else
- {
- /*
- * Use the minimum of 0.005 (0.9 on 50 samples in NB and the slope)
- * and ((1-slope)/w16_DistLag)
- */
- w32_tmp = 8192 - w16_slope;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
- w32_tmp = WEBRTC_SPL_MAX(w32_tmp, 0);
- ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_DistLag); /* Res in Q20 (20-0=20) */
- w16_tmp = WebRtcNetEQ_k5243div[fs_mult]; /* 0.005/fs_mult = 5243/fs_mult */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(w16_tmp, ExpandState->w16_muteSlope);
- ExpandState->w16_onset = 0;
- }
- }
- else
- {
- /* This is not the first Expansion, parameters are already estimated. */
-
- /* Extract a noise segment */
- if (BGNonly) /* If we should produce nothing but background noise */
- {
- if (*pw16_len > 0)
- {
- /*
- * Set length to input parameter length, but not more than length
- * of pw16_randVec
- */
- w16_lag = WEBRTC_SPL_MIN(*pw16_len, FSMULT * 120 + 30);
- }
- else
- {
- /* set length to 15 ms */
- w16_lag = fsMult120;
- }
- w16_randLen = w16_lag;
- }
- else
- {
- w16_randLen = ExpandState->w16_maxLag;
- }
-
- if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
- {
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, w16_randLen,
- inst->w16_seedInc);
- }
- else
- { /* only applies to SWB where length could be larger than 256 */
-#if FSMULT >= 2 /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, RANDVEC_NO_OF_SAMPLES,
- inst->w16_seedInc);
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- assert(w16_randLen <= FSMULT * 120 + 30);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
-#else
- assert(0);
-#endif
- }
- } /* end if(first expand or BGNonly) ... else ... */
-
- if (!BGNonly) /* Voiced and unvoiced parts not used if generating BGN only */
- {
-
- /*************************************************
- * Generate signal *
- *************************************************/
-
- /*
- * Voiced part
- */
-
- /* Linearly mute the use_vfraction value from 1 to vfraction */
- if (ExpandState->w16_consecExp == 0)
- {
- ExpandState->w16_currentVFraction = 16384; /* 1.0 in Q14 */
- }
-
- ExpandState->w16_lagsPosition = ExpandState->w16_lagsPosition
- + ExpandState->w16_lagsDirection;
-
- /* Change direction if needed */
- if (ExpandState->w16_lagsPosition == 0)
- {
- ExpandState->w16_lagsDirection = 1;
- }
- if (ExpandState->w16_lagsPosition == 2)
- {
- ExpandState->w16_lagsDirection = -1;
- }
-
- /* Generate a weighted vector with the selected lag */
- w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
- w16_lag = ExpandState->w16_lags[ExpandState->w16_lagsPosition];
- /* Copy lag+overlap data */
- w16_expVecPos = w16_expVecsLen - w16_lag - ExpandState->w16_overlap;
- w16_tmp = w16_lag + ExpandState->w16_overlap;
- if (ExpandState->w16_lagsPosition == 0)
- {
- WEBRTC_SPL_MEMCPY_W16(pw16_voicedVecStorage,
- &(ExpandState->pw16_expVecs[0][w16_expVecPos]), w16_tmp);
- }
- else if (ExpandState->w16_lagsPosition == 1)
- {
- WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 3,
- &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 2, pw16_voicedVecStorage,
- w16_tmp);
-
- }
- else if (ExpandState->w16_lagsPosition == 2)
- {
- WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 1,
- &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 1, pw16_voicedVecStorage,
- w16_tmp);
- }
-
- if (inst->fs == 8000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs == 16000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs == 32000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if (inst->fs==48000) */
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
-#endif
- }
-
- /* Smooth the expanded if it has not been muted to or vfraction is larger than 0.5 */
- if ((ExpandState->w16_expandMuteFactor > 819) && (ExpandState->w16_currentVFraction
- > 8192))
- {
- for (i = 0; i < ExpandState->w16_overlap; i++)
- {
- /* Do overlap add between new vector and overlap */
- ExpandState->pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(ExpandState->pw16_overlapVec[i], w16_winMute) +
- WEBRTC_SPL_MUL_16_16(
- WEBRTC_SPL_MUL_16_16_RSFT(ExpandState->w16_expandMuteFactor,
- pw16_voicedVecStorage[i], 14), w16_winUnMute) + 16384, 15);
- w16_winMute += w16_winMuteInc;
- w16_winUnMute += w16_winUnMuteInc;
- }
- }
- else if (ExpandState->w16_expandMuteFactor == 0
-#ifdef NETEQ_STEREO
- && msInfo->msMode == NETEQ_MONO /* only if mono mode is selected */
-#endif
- )
- {
- /* if ExpandState->w16_expandMuteFactor = 0 => all is CNG component
- set the output length to 15ms (for best CNG production) */
- w16_tmp = fsMult120;
- ExpandState->w16_maxLag = w16_tmp;
- ExpandState->w16_lags[0] = w16_tmp;
- ExpandState->w16_lags[1] = w16_tmp;
- ExpandState->w16_lags[2] = w16_tmp;
- }
-
- /*
- * Unvoiced part
- */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
- ExpandState->pw16_arState,
- UNVOICED_LPC_ORDER);
- if (ExpandState->w16_arGainScale > 0)
- {
- w32_tmp = ((int32_t) 1) << (ExpandState->w16_arGainScale - 1);
- }
- else
- {
- w32_tmp = 0;
- }
-
- /* Note that shift value can be >16 which complicates things for some DSPs */
- WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
- ExpandState->w16_arGain, w32_tmp, ExpandState->w16_arGainScale, w16_lag);
-
- WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_unvoicedVec,
- ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, w16_lag);
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
- &(pw16_unvoicedVec[w16_lag - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
-
- /*
- * Voiced + Unvoiced
- */
-
- /* For lag =
- <=31*fs_mult => go from 1 to 0 in about 8 ms
- (>=31..<=63)*fs_mult => go from 1 to 0 in about 16 ms
- >=64*fs_mult => go from 1 to 0 in about 32 ms
- */
- w16_tmp = (31 - WebRtcSpl_NormW32(ExpandState->w16_maxLag)) - 5; /* getbits(w16_maxLag) -5 */
- w16_vfractionChange = (int16_t) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
- if (ExpandState->w16_stopMuting == 1)
- {
- w16_vfractionChange = 0;
- }
-
- /* Create combined signal (unmuted) by shifting in more and more of unvoiced part */
- w16_tmp = 8 - w16_tmp; /* getbits(w16_vfractionChange) */
- w16_tmp = (ExpandState->w16_currentVFraction - ExpandState->w16_vFraction) >> w16_tmp;
- w16_tmp = WEBRTC_SPL_MIN(w16_tmp, w16_lag);
- WebRtcNetEQ_MixVoiceUnvoice(pw16_outData, pw16_voicedVec, pw16_unvoicedVec,
- &ExpandState->w16_currentVFraction, w16_vfractionChange, w16_tmp);
-
- if (w16_tmp < w16_lag)
- {
- if (w16_vfractionChange != 0)
- {
- ExpandState->w16_currentVFraction = ExpandState->w16_vFraction;
- }
- w16_tmp2 = 16384 - ExpandState->w16_currentVFraction;
- WebRtcSpl_ScaleAndAddVectorsWithRound(pw16_voicedVec + w16_tmp,
- ExpandState->w16_currentVFraction, pw16_unvoicedVec + w16_tmp, w16_tmp2, 14,
- pw16_outData + w16_tmp, (int16_t) (w16_lag - w16_tmp));
- }
-
- /* Select muting factor */
- if (ExpandState->w16_consecExp == 3)
- {
- /* 0.95 on 50 samples in NB (0.0010/fs_mult in Q20) */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
- WebRtcNetEQ_k1049div[fs_mult]);
- }
- if (ExpandState->w16_consecExp == 7)
- {
- /* 0.90 on 50 samples in NB (0.0020/fs_mult in Q20) */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
- WebRtcNetEQ_k2097div[fs_mult]);
- }
-
- /* Mute segment according to slope value */
- if ((ExpandState->w16_consecExp != 0) || (ExpandState->w16_onset != 1))
- {
- /* Mute to the previous level, then continue with the muting */
- WebRtcSpl_AffineTransformVector(pw16_outData, pw16_outData,
- ExpandState->w16_expandMuteFactor, 8192, 14, w16_lag);
-
- if ((ExpandState->w16_stopMuting != 1))
- {
- WebRtcNetEQ_MuteSignal(pw16_outData, ExpandState->w16_muteSlope, w16_lag);
-
- w16_tmp = 16384 - (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_lag,
- ExpandState->w16_muteSlope) + 8192) >> 6); /* 20-14 = 6 */
- w16_tmp = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
- ExpandState->w16_expandMuteFactor) + 8192) >> 14);
-
- /* Guard against getting stuck with very small (but sometimes audible) gain */
- if ((ExpandState->w16_consecExp > 3) && (w16_tmp
- >= ExpandState->w16_expandMuteFactor))
- {
- ExpandState->w16_expandMuteFactor = 0;
- }
- else
- {
- ExpandState->w16_expandMuteFactor = w16_tmp;
- }
- }
- }
-
- } /* end if(!BGNonly) */
-
- /*
- * BGN
- */
-
- if (BGNState->w16_initialized == 1)
- {
- /* BGN parameters are initialized; use them */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_cngVec - BGN_LPC_ORDER,
- BGNState->pw16_filterState,
- BGN_LPC_ORDER);
-
- if (BGNState->w16_scaleShift > 1)
- {
- w32_tmp = ((int32_t) 1) << (BGNState->w16_scaleShift - 1);
- }
- else
- {
- w32_tmp = 0;
- }
-
- /* Scale random vector to correct energy level */
- /* Note that shift value can be >16 which complicates things for some DSPs */
- WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
- BGNState->w16_scale, w32_tmp, BGNState->w16_scaleShift, w16_lag);
-
- WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_cngVec, BGNState->pw16_filter,
- BGN_LPC_ORDER + 1, w16_lag);
-
- WEBRTC_SPL_MEMCPY_W16(BGNState->pw16_filterState,
- &(pw16_cngVec[w16_lag-BGN_LPC_ORDER]),
- BGN_LPC_ORDER);
-
- /* Unmute the insertion of background noise */
-
- if (bgnMode == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME
- && BGNState->w16_mutefactor > 0)
- {
- /* fade BGN to zero */
- /* calculate muting slope, approx 2^18/fsHz */
- int16_t muteFactor;
- if (fs_mult == 1)
- {
- muteFactor = -32;
- }
- else if (fs_mult == 2)
- {
- muteFactor = -16;
- }
- else if (fs_mult == 4)
- {
- muteFactor = -8;
- }
- else
- {
- muteFactor = -5;
- }
- /* use UnmuteSignal function with negative slope */
- WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
- pw16_cngVec, muteFactor, /* In Q20 */
- w16_lag);
- }
- else if (BGNState->w16_mutefactor < 16384 && !BGNonly)
- {
- /* if (w16_mutefactor < 1) and not BGN only (since then we use no muting) */
-
- /*
- * If BGN_OFF, or if BNG_FADE has started fading,
- * mutefactor should not be increased.
- */
- if (ExpandState->w16_stopMuting != 1 && bgnMode != BGN_OFF && !(bgnMode
- == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME))
- {
- WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
- pw16_cngVec, ExpandState->w16_muteSlope, /* In Q20 */
- w16_lag);
- }
- else
- {
- /* BGN_ON and stop muting, or
- * BGN_OFF (mute factor is always 0), or
- * BGN_FADE has reached 0 */
- WebRtcSpl_AffineTransformVector(pw16_cngVec, pw16_cngVec,
- BGNState->w16_mutefactor, 8192, 14, w16_lag);
- }
- }
- }
- else
- {
- /* BGN parameters have not been initialized; use zero noise */
- WebRtcSpl_MemSetW16(pw16_cngVec, 0, w16_lag);
- }
-
- if (BGNonly)
- {
- /* Copy BGN to outdata */
- for (i = 0; i < w16_lag; i++)
- {
- pw16_outData[i] = pw16_cngVec[i];
- }
- }
- else
- {
- /* Add CNG vector to the Voiced + Unvoiced vectors */
- for (i = 0; i < w16_lag; i++)
- {
- pw16_outData[i] = pw16_outData[i] + pw16_cngVec[i];
- }
-
- /* increase call number */
- ExpandState->w16_consecExp = ExpandState->w16_consecExp + 1;
- if (ExpandState->w16_consecExp < 0) /* Guard against overflow */
- ExpandState->w16_consecExp = FADE_BGN_TIME; /* "Arbitrary" large num of expands */
- }
-
- inst->w16_mode = MODE_EXPAND;
- *pw16_len = w16_lag;
-
- /* Update in-call and post-call statistics */
- if (ExpandState->w16_stopMuting != 1 || BGNonly)
- {
- /*
- * Only do this if StopMuting != 1 or if explicitly BGNonly, otherwise Expand is
- * called from Merge or Normal and special measures must be taken.
- */
- inst->statInst.expandLength += (uint32_t) *pw16_len;
- if (ExpandState->w16_expandMuteFactor == 0 || BGNonly)
- {
- /* Only noise expansion */
- inst->statInst.expandedNoiseSamples += *pw16_len;
- /* Short-term activity statistics. */
- inst->activity_stats.expand_bgn_samples += *pw16_len;
- }
- else
- {
- /* Voice expand (note: not necessarily _voiced_) */
- inst->statInst.expandedVoiceSamples += *pw16_len;
- /* Short-term activity statistics. */
- inst->activity_stats.expand_normal_samples += *pw16_len;
- }
- }
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcNetEQ_GenerateBGN(...)
- *
- * This function generates and writes len samples of background noise to the
- * output vector. The Expand function will be called repeatedly until the
- * correct number of samples is produced.
- *
- * Input:
- * - inst : NetEq instance, i.e. the user that requests more
- * speech/audio data
- * - scratchPtr : Pointer to scratch vector
- * - len : Desired length of produced BGN.
- *
- *
- * Output:
- * - pw16_outData : Pointer to a memory space where the output data
- * should be stored
- *
- * Return value : >=0 - Number of noise samples produced and written
- * to output
- * -1 - Error
- */
-
-int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t len)
-{
-
- int16_t pos = 0;
- int16_t tempLen = len;
-
- while (tempLen > 0)
- {
- /* while we still need more noise samples, call Expand to obtain background noise */
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr,
-#endif
- &pw16_outData[pos], &tempLen, 1 /*BGNonly*/);
-
- pos += tempLen; /* we got this many samples */
- tempLen = len - pos; /* this is the number of samples we still need */
- }
-
- return pos;
-}
-
-#undef SCRATCH_PW16_BEST_CORR_INDEX
-#undef SCRATCH_PW16_BEST_CORR
-#undef SCRATCH_PW16_BEST_DIST_INDEX
-#undef SCRATCH_PW16_BEST_DIST
-#undef SCRATCH_PW16_CORR_VEC
-#undef SCRATCH_PW16_CORR2
-#undef SCRATCH_PW32_AUTO_CORR
-#undef SCRATCH_PW16_RC
-#undef SCRATCH_PW16_RAND_VEC
-#undef SCRATCH_NETEQDSP_CORRELATOR
-#undef SCRATCH_PW16_SCALED_RAND_VEC
-#undef SCRATCH_PW16_UNVOICED_VEC_SPACE
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
new file mode 100644
index 00000000000..14a77982234
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
@@ -0,0 +1,904 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+
+#include <assert.h>
+#include <string.h> // memset
+
+#include <algorithm> // min, max
+#include <limits> // numeric_limits<T>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+void Expand::Reset() {
+ first_expand_ = true;
+ consecutive_expands_ = 0;
+ max_lag_ = 0;
+ for (size_t ix = 0; ix < num_channels_; ++ix) {
+ channel_parameters_[ix].expand_vector0.Clear();
+ channel_parameters_[ix].expand_vector1.Clear();
+ }
+}
+
+int Expand::Process(AudioMultiVector* output) {
+ int16_t random_vector[kMaxSampleRate / 8000 * 120 + 30];
+ int16_t scaled_random_vector[kMaxSampleRate / 8000 * 125];
+ static const int kTempDataSize = 3600;
+ int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
+ int16_t* voiced_vector_storage = temp_data;
+ int16_t* voiced_vector = &voiced_vector_storage[overlap_length_];
+ static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
+ int16_t unvoiced_array_memory[kNoiseLpcOrder + kMaxSampleRate / 8000 * 125];
+ int16_t* unvoiced_vector = unvoiced_array_memory + kUnvoicedLpcOrder;
+ int16_t* noise_vector = unvoiced_array_memory + kNoiseLpcOrder;
+
+ int fs_mult = fs_hz_ / 8000;
+
+ if (first_expand_) {
+ // Perform initial setup if this is the first expansion since last reset.
+ AnalyzeSignal(random_vector);
+ first_expand_ = false;
+ } else {
+ // This is not the first expansion, parameters are already estimated.
+ // Extract a noise segment.
+ int16_t rand_length = max_lag_;
+ // This only applies to SWB where length could be larger than 256.
+ assert(rand_length <= kMaxSampleRate / 8000 * 120 + 30);
+ GenerateRandomVector(2, rand_length, random_vector);
+ }
+
+
+ // Generate signal.
+ UpdateLagIndex();
+
+ // Voiced part.
+ // Generate a weighted vector with the current lag.
+ size_t expansion_vector_length = max_lag_ + overlap_length_;
+ size_t current_lag = expand_lags_[current_lag_index_];
+ // Copy lag+overlap data.
+ size_t expansion_vector_position = expansion_vector_length - current_lag -
+ overlap_length_;
+ size_t temp_length = current_lag + overlap_length_;
+ for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
+ ChannelParameters& parameters = channel_parameters_[channel_ix];
+ if (current_lag_index_ == 0) {
+ // Use only expand_vector0.
+ assert(expansion_vector_position + temp_length <=
+ parameters.expand_vector0.Size());
+ memcpy(voiced_vector_storage,
+ &parameters.expand_vector0[expansion_vector_position],
+ sizeof(int16_t) * temp_length);
+ } else if (current_lag_index_ == 1) {
+ // Mix 3/4 of expand_vector0 with 1/4 of expand_vector1.
+ WebRtcSpl_ScaleAndAddVectorsWithRound(
+ &parameters.expand_vector0[expansion_vector_position], 3,
+ &parameters.expand_vector1[expansion_vector_position], 1, 2,
+ voiced_vector_storage, static_cast<int>(temp_length));
+ } else if (current_lag_index_ == 2) {
+ // Mix 1/2 of expand_vector0 with 1/2 of expand_vector1.
+ assert(expansion_vector_position + temp_length <=
+ parameters.expand_vector0.Size());
+ assert(expansion_vector_position + temp_length <=
+ parameters.expand_vector1.Size());
+ WebRtcSpl_ScaleAndAddVectorsWithRound(
+ &parameters.expand_vector0[expansion_vector_position], 1,
+ &parameters.expand_vector1[expansion_vector_position], 1, 1,
+ voiced_vector_storage, static_cast<int>(temp_length));
+ }
+
+ // Get tapering window parameters. Values are in Q15.
+ int16_t muting_window, muting_window_increment;
+ int16_t unmuting_window, unmuting_window_increment;
+ if (fs_hz_ == 8000) {
+ muting_window = DspHelper::kMuteFactorStart8kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement8kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart8kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement8kHz;
+ } else if (fs_hz_ == 16000) {
+ muting_window = DspHelper::kMuteFactorStart16kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement16kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart16kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement16kHz;
+ } else if (fs_hz_ == 32000) {
+ muting_window = DspHelper::kMuteFactorStart32kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement32kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart32kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement32kHz;
+ } else { // fs_ == 48000
+ muting_window = DspHelper::kMuteFactorStart48kHz;
+ muting_window_increment = DspHelper::kMuteFactorIncrement48kHz;
+ unmuting_window = DspHelper::kUnmuteFactorStart48kHz;
+ unmuting_window_increment = DspHelper::kUnmuteFactorIncrement48kHz;
+ }
+
+ // Smooth the expanded if it has not been muted to a low amplitude and
+ // |current_voice_mix_factor| is larger than 0.5.
+ if ((parameters.mute_factor > 819) &&
+ (parameters.current_voice_mix_factor > 8192)) {
+ size_t start_ix = sync_buffer_->Size() - overlap_length_;
+ for (size_t i = 0; i < overlap_length_; i++) {
+ // Do overlap add between new vector and overlap.
+ (*sync_buffer_)[channel_ix][start_ix + i] =
+ (((*sync_buffer_)[channel_ix][start_ix + i] * muting_window) +
+ (((parameters.mute_factor * voiced_vector_storage[i]) >> 14) *
+ unmuting_window) + 16384) >> 15;
+ muting_window += muting_window_increment;
+ unmuting_window += unmuting_window_increment;
+ }
+ } else if (parameters.mute_factor == 0) {
+ // The expanded signal will consist of only comfort noise if
+ // mute_factor = 0. Set the output length to 15 ms for best noise
+ // production.
+ // TODO(hlundin): This has been disabled since the length of
+ // parameters.expand_vector0 and parameters.expand_vector1 no longer
+ // match with expand_lags_, causing invalid reads and writes. Is it a good
+ // idea to enable this again, and solve the vector size problem?
+// max_lag_ = fs_mult * 120;
+// expand_lags_[0] = fs_mult * 120;
+// expand_lags_[1] = fs_mult * 120;
+// expand_lags_[2] = fs_mult * 120;
+ }
+
+ // Unvoiced part.
+ // Filter |scaled_random_vector| through |ar_filter_|.
+ memcpy(unvoiced_vector - kUnvoicedLpcOrder, parameters.ar_filter_state,
+ sizeof(int16_t) * kUnvoicedLpcOrder);
+ int32_t add_constant = 0;
+ if (parameters.ar_gain_scale > 0) {
+ add_constant = 1 << (parameters.ar_gain_scale - 1);
+ }
+ WebRtcSpl_AffineTransformVector(scaled_random_vector, random_vector,
+ parameters.ar_gain, add_constant,
+ parameters.ar_gain_scale,
+ static_cast<int>(current_lag));
+ WebRtcSpl_FilterARFastQ12(scaled_random_vector, unvoiced_vector,
+ parameters.ar_filter, kUnvoicedLpcOrder + 1,
+ static_cast<int>(current_lag));
+ memcpy(parameters.ar_filter_state,
+ &(unvoiced_vector[current_lag - kUnvoicedLpcOrder]),
+ sizeof(int16_t) * kUnvoicedLpcOrder);
+
+ // Combine voiced and unvoiced contributions.
+
+ // Set a suitable cross-fading slope.
+ // For lag =
+ // <= 31 * fs_mult => go from 1 to 0 in about 8 ms;
+ // (>= 31 .. <= 63) * fs_mult => go from 1 to 0 in about 16 ms;
+ // >= 64 * fs_mult => go from 1 to 0 in about 32 ms.
+ // temp_shift = getbits(max_lag_) - 5.
+ int temp_shift = (31 - WebRtcSpl_NormW32(max_lag_)) - 5;
+ int16_t mix_factor_increment = 256 >> temp_shift;
+ if (stop_muting_) {
+ mix_factor_increment = 0;
+ }
+
+ // Create combined signal by shifting in more and more of unvoiced part.
+ temp_shift = 8 - temp_shift; // = getbits(mix_factor_increment).
+ size_t temp_lenght = (parameters.current_voice_mix_factor -
+ parameters.voice_mix_factor) >> temp_shift;
+ temp_lenght = std::min(temp_lenght, current_lag);
+ DspHelper::CrossFade(voiced_vector, unvoiced_vector, temp_lenght,
+ &parameters.current_voice_mix_factor,
+ mix_factor_increment, temp_data);
+
+ // End of cross-fading period was reached before end of expanded signal
+ // path. Mix the rest with a fixed mixing factor.
+ if (temp_lenght < current_lag) {
+ if (mix_factor_increment != 0) {
+ parameters.current_voice_mix_factor = parameters.voice_mix_factor;
+ }
+ int temp_scale = 16384 - parameters.current_voice_mix_factor;
+ WebRtcSpl_ScaleAndAddVectorsWithRound(
+ voiced_vector + temp_lenght, parameters.current_voice_mix_factor,
+ unvoiced_vector + temp_lenght, temp_scale, 14,
+ temp_data + temp_lenght, static_cast<int>(current_lag - temp_lenght));
+ }
+
+ // Select muting slope depending on how many consecutive expands we have
+ // done.
+ if (consecutive_expands_ == 3) {
+ // Let the mute factor decrease from 1.0 to 0.95 in 6.25 ms.
+ // mute_slope = 0.0010 / fs_mult in Q20.
+ parameters.mute_slope = std::max(parameters.mute_slope,
+ static_cast<int16_t>(1049 / fs_mult));
+ }
+ if (consecutive_expands_ == 7) {
+ // Let the mute factor decrease from 1.0 to 0.90 in 6.25 ms.
+ // mute_slope = 0.0020 / fs_mult in Q20.
+ parameters.mute_slope = std::max(parameters.mute_slope,
+ static_cast<int16_t>(2097 / fs_mult));
+ }
+
+ // Mute segment according to slope value.
+ if ((consecutive_expands_ != 0) || !parameters.onset) {
+ // Mute to the previous level, then continue with the muting.
+ WebRtcSpl_AffineTransformVector(temp_data, temp_data,
+ parameters.mute_factor, 8192,
+ 14, static_cast<int>(current_lag));
+
+ if (!stop_muting_) {
+ DspHelper::MuteSignal(temp_data, parameters.mute_slope, current_lag);
+
+ // Shift by 6 to go from Q20 to Q14.
+ // TODO(hlundin): Adding 8192 before shifting 6 steps seems wrong.
+ // Legacy.
+ int16_t gain = static_cast<int16_t>(16384 -
+ (((current_lag * parameters.mute_slope) + 8192) >> 6));
+ gain = ((gain * parameters.mute_factor) + 8192) >> 14;
+
+ // Guard against getting stuck with very small (but sometimes audible)
+ // gain.
+ if ((consecutive_expands_ > 3) && (gain >= parameters.mute_factor)) {
+ parameters.mute_factor = 0;
+ } else {
+ parameters.mute_factor = gain;
+ }
+ }
+ }
+
+ // Background noise part.
+ GenerateBackgroundNoise(random_vector,
+ channel_ix,
+ channel_parameters_[channel_ix].mute_slope,
+ TooManyExpands(),
+ current_lag,
+ unvoiced_array_memory);
+
+ // Add background noise to the combined voiced-unvoiced signal.
+ for (size_t i = 0; i < current_lag; i++) {
+ temp_data[i] = temp_data[i] + noise_vector[i];
+ }
+ if (channel_ix == 0) {
+ output->AssertSize(current_lag);
+ } else {
+ assert(output->Size() == current_lag);
+ }
+ memcpy(&(*output)[channel_ix][0], temp_data,
+ sizeof(temp_data[0]) * current_lag);
+ }
+
+ // Increase call number and cap it.
+ consecutive_expands_ = consecutive_expands_ >= kMaxConsecutiveExpands ?
+ kMaxConsecutiveExpands : consecutive_expands_ + 1;
+ return 0;
+}
+
+void Expand::SetParametersForNormalAfterExpand() {
+ current_lag_index_ = 0;
+ lag_index_direction_ = 0;
+ stop_muting_ = true; // Do not mute signal any more.
+}
+
+void Expand::SetParametersForMergeAfterExpand() {
+ current_lag_index_ = -1; /* out of the 3 possible ones */
+ lag_index_direction_ = 1; /* make sure we get the "optimal" lag */
+ stop_muting_ = true;
+}
+
+void Expand::InitializeForAnExpandPeriod() {
+ lag_index_direction_ = 1;
+ current_lag_index_ = -1;
+ stop_muting_ = false;
+ random_vector_->set_seed_increment(1);
+ consecutive_expands_ = 0;
+ for (size_t ix = 0; ix < num_channels_; ++ix) {
+ channel_parameters_[ix].current_voice_mix_factor = 16384; // 1.0 in Q14.
+ channel_parameters_[ix].mute_factor = 16384; // 1.0 in Q14.
+ // Start with 0 gain for background noise.
+ background_noise_->SetMuteFactor(ix, 0);
+ }
+}
+
+bool Expand::TooManyExpands() {
+ return consecutive_expands_ >= kMaxConsecutiveExpands;
+}
+
+void Expand::AnalyzeSignal(int16_t* random_vector) {
+ int32_t auto_correlation[kUnvoicedLpcOrder + 1];
+ int16_t reflection_coeff[kUnvoicedLpcOrder];
+ int16_t correlation_vector[kMaxSampleRate / 8000 * 102];
+ int best_correlation_index[kNumCorrelationCandidates];
+ int16_t best_correlation[kNumCorrelationCandidates];
+ int16_t best_distortion_index[kNumCorrelationCandidates];
+ int16_t best_distortion[kNumCorrelationCandidates];
+ int32_t correlation_vector2[(99 * kMaxSampleRate / 8000) + 1];
+ int32_t best_distortion_w32[kNumCorrelationCandidates];
+ static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
+ int16_t unvoiced_array_memory[kNoiseLpcOrder + kMaxSampleRate / 8000 * 125];
+ int16_t* unvoiced_vector = unvoiced_array_memory + kUnvoicedLpcOrder;
+
+ int fs_mult = fs_hz_ / 8000;
+
+ // Pre-calculate common multiplications with fs_mult.
+ int fs_mult_4 = fs_mult * 4;
+ int fs_mult_20 = fs_mult * 20;
+ int fs_mult_120 = fs_mult * 120;
+ int fs_mult_dist_len = fs_mult * kDistortionLength;
+ int fs_mult_lpc_analysis_len = fs_mult * kLpcAnalysisLength;
+
+ const size_t signal_length = 256 * fs_mult;
+ const int16_t* audio_history =
+ &(*sync_buffer_)[0][sync_buffer_->Size() - signal_length];
+
+ // Initialize.
+ InitializeForAnExpandPeriod();
+
+ // Calculate correlation in downsampled domain (4 kHz sample rate).
+ int16_t correlation_scale;
+ int correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
+ // If it is decided to break bit-exactness |correlation_length| should be
+ // initialized to the return value of Correlation().
+ Correlation(audio_history, signal_length, correlation_vector,
+ &correlation_scale);
+
+ // Find peaks in correlation vector.
+ DspHelper::PeakDetection(correlation_vector, correlation_length,
+ kNumCorrelationCandidates, fs_mult,
+ best_correlation_index, best_correlation);
+
+ // Adjust peak locations; cross-correlation lags start at 2.5 ms
+ // (20 * fs_mult samples).
+ best_correlation_index[0] += fs_mult_20;
+ best_correlation_index[1] += fs_mult_20;
+ best_correlation_index[2] += fs_mult_20;
+
+ // Calculate distortion around the |kNumCorrelationCandidates| best lags.
+ int distortion_scale = 0;
+ for (int i = 0; i < kNumCorrelationCandidates; i++) {
+ int16_t min_index = std::max(fs_mult_20,
+ best_correlation_index[i] - fs_mult_4);
+ int16_t max_index = std::min(fs_mult_120 - 1,
+ best_correlation_index[i] + fs_mult_4);
+ best_distortion_index[i] = DspHelper::MinDistortion(
+ &(audio_history[signal_length - fs_mult_dist_len]), min_index,
+ max_index, fs_mult_dist_len, &best_distortion_w32[i]);
+ distortion_scale = std::max(16 - WebRtcSpl_NormW32(best_distortion_w32[i]),
+ distortion_scale);
+ }
+ // Shift the distortion values to fit in 16 bits.
+ WebRtcSpl_VectorBitShiftW32ToW16(best_distortion, kNumCorrelationCandidates,
+ best_distortion_w32, distortion_scale);
+
+ // Find the maximizing index |i| of the cost function
+ // f[i] = best_correlation[i] / best_distortion[i].
+ int32_t best_ratio = std::numeric_limits<int32_t>::min();
+ int best_index = -1;
+ for (int i = 0; i < kNumCorrelationCandidates; ++i) {
+ int32_t ratio;
+ if (best_distortion[i] > 0) {
+ ratio = (best_correlation[i] << 16) / best_distortion[i];
+ } else if (best_correlation[i] == 0) {
+ ratio = 0; // No correlation set result to zero.
+ } else {
+ ratio = std::numeric_limits<int32_t>::max(); // Denominator is zero.
+ }
+ if (ratio > best_ratio) {
+ best_index = i;
+ best_ratio = ratio;
+ }
+ }
+
+ int distortion_lag = best_distortion_index[best_index];
+ int correlation_lag = best_correlation_index[best_index];
+ max_lag_ = std::max(distortion_lag, correlation_lag);
+
+ // Calculate the exact best correlation in the range between
+ // |correlation_lag| and |distortion_lag|.
+ correlation_length = distortion_lag + 10;
+ correlation_length = std::min(correlation_length, fs_mult_120);
+ correlation_length = std::max(correlation_length, 60 * fs_mult);
+
+ int start_index = std::min(distortion_lag, correlation_lag);
+ int correlation_lags = WEBRTC_SPL_ABS_W16((distortion_lag-correlation_lag))
+ + 1;
+ assert(correlation_lags <= 99 * fs_mult + 1); // Cannot be larger.
+
+ for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
+ ChannelParameters& parameters = channel_parameters_[channel_ix];
+ // Calculate suitable scaling.
+ int16_t signal_max = WebRtcSpl_MaxAbsValueW16(
+ &audio_history[signal_length - correlation_length - start_index
+ - correlation_lags],
+ correlation_length + start_index + correlation_lags - 1);
+ correlation_scale = ((31 - WebRtcSpl_NormW32(signal_max * signal_max))
+ + (31 - WebRtcSpl_NormW32(correlation_length))) - 31;
+ correlation_scale = std::max(static_cast<int16_t>(0), correlation_scale);
+
+ // Calculate the correlation, store in |correlation_vector2|.
+ WebRtcSpl_CrossCorrelation(
+ correlation_vector2,
+ &(audio_history[signal_length - correlation_length]),
+ &(audio_history[signal_length - correlation_length - start_index]),
+ correlation_length, correlation_lags, correlation_scale, -1);
+
+ // Find maximizing index.
+ best_index = WebRtcSpl_MaxIndexW32(correlation_vector2, correlation_lags);
+ int32_t max_correlation = correlation_vector2[best_index];
+ // Compensate index with start offset.
+ best_index = best_index + start_index;
+
+ // Calculate energies.
+ int32_t energy1 = WebRtcSpl_DotProductWithScale(
+ &(audio_history[signal_length - correlation_length]),
+ &(audio_history[signal_length - correlation_length]),
+ correlation_length, correlation_scale);
+ int32_t energy2 = WebRtcSpl_DotProductWithScale(
+ &(audio_history[signal_length - correlation_length - best_index]),
+ &(audio_history[signal_length - correlation_length - best_index]),
+ correlation_length, correlation_scale);
+
+ // Calculate the correlation coefficient between the two portions of the
+ // signal.
+ int16_t corr_coefficient;
+ if ((energy1 > 0) && (energy2 > 0)) {
+ int energy1_scale = std::max(16 - WebRtcSpl_NormW32(energy1), 0);
+ int energy2_scale = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
+ // Make sure total scaling is even (to simplify scale factor after sqrt).
+ if ((energy1_scale + energy2_scale) & 1) {
+ // If sum is odd, add 1 to make it even.
+ energy1_scale += 1;
+ }
+ int16_t scaled_energy1 = energy1 >> energy1_scale;
+ int16_t scaled_energy2 = energy2 >> energy2_scale;
+ int16_t sqrt_energy_product = WebRtcSpl_SqrtFloor(
+ scaled_energy1 * scaled_energy2);
+ // Calculate max_correlation / sqrt(energy1 * energy2) in Q14.
+ int cc_shift = 14 - (energy1_scale + energy2_scale) / 2;
+ max_correlation = WEBRTC_SPL_SHIFT_W32(max_correlation, cc_shift);
+ corr_coefficient = WebRtcSpl_DivW32W16(max_correlation,
+ sqrt_energy_product);
+ corr_coefficient = std::min(static_cast<int16_t>(16384),
+ corr_coefficient); // Cap at 1.0 in Q14.
+ } else {
+ corr_coefficient = 0;
+ }
+
+ // Extract the two vectors expand_vector0 and expand_vector1 from
+ // |audio_history|.
+ int16_t expansion_length = static_cast<int16_t>(max_lag_ + overlap_length_);
+ const int16_t* vector1 = &(audio_history[signal_length - expansion_length]);
+ const int16_t* vector2 = vector1 - distortion_lag;
+ // Normalize the second vector to the same energy as the first.
+ energy1 = WebRtcSpl_DotProductWithScale(vector1, vector1, expansion_length,
+ correlation_scale);
+ energy2 = WebRtcSpl_DotProductWithScale(vector2, vector2, expansion_length,
+ correlation_scale);
+ // Confirm that amplitude ratio sqrt(energy1 / energy2) is within 0.5 - 2.0,
+ // i.e., energy1 / energy1 is within 0.25 - 4.
+ int16_t amplitude_ratio;
+ if ((energy1 / 4 < energy2) && (energy1 > energy2 / 4)) {
+ // Energy constraint fulfilled. Use both vectors and scale them
+ // accordingly.
+ int16_t scaled_energy2 = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
+ int16_t scaled_energy1 = scaled_energy2 - 13;
+ // Calculate scaled_energy1 / scaled_energy2 in Q13.
+ int32_t energy_ratio = WebRtcSpl_DivW32W16(
+ WEBRTC_SPL_SHIFT_W32(energy1, -scaled_energy1),
+ WEBRTC_SPL_RSHIFT_W32(energy2, scaled_energy2));
+ // Calculate sqrt ratio in Q13 (sqrt of en1/en2 in Q26).
+ amplitude_ratio = WebRtcSpl_SqrtFloor(energy_ratio << 13);
+ // Copy the two vectors and give them the same energy.
+ parameters.expand_vector0.Clear();
+ parameters.expand_vector0.PushBack(vector1, expansion_length);
+ parameters.expand_vector1.Clear();
+ if (parameters.expand_vector1.Size() <
+ static_cast<size_t>(expansion_length)) {
+ parameters.expand_vector1.Extend(
+ expansion_length - parameters.expand_vector1.Size());
+ }
+ WebRtcSpl_AffineTransformVector(&parameters.expand_vector1[0],
+ const_cast<int16_t*>(vector2),
+ amplitude_ratio,
+ 4096,
+ 13,
+ expansion_length);
+ } else {
+ // Energy change constraint not fulfilled. Only use last vector.
+ parameters.expand_vector0.Clear();
+ parameters.expand_vector0.PushBack(vector1, expansion_length);
+ // Copy from expand_vector0 to expand_vector1.
+ parameters.expand_vector0.CopyFrom(&parameters.expand_vector1);
+ // Set the energy_ratio since it is used by muting slope.
+ if ((energy1 / 4 < energy2) || (energy2 == 0)) {
+ amplitude_ratio = 4096; // 0.5 in Q13.
+ } else {
+ amplitude_ratio = 16384; // 2.0 in Q13.
+ }
+ }
+
+ // Set the 3 lag values.
+ int lag_difference = distortion_lag - correlation_lag;
+ if (lag_difference == 0) {
+ // |distortion_lag| and |correlation_lag| are equal.
+ expand_lags_[0] = distortion_lag;
+ expand_lags_[1] = distortion_lag;
+ expand_lags_[2] = distortion_lag;
+ } else {
+ // |distortion_lag| and |correlation_lag| are not equal; use different
+ // combinations of the two.
+ // First lag is |distortion_lag| only.
+ expand_lags_[0] = distortion_lag;
+ // Second lag is the average of the two.
+ expand_lags_[1] = (distortion_lag + correlation_lag) / 2;
+ // Third lag is the average again, but rounding towards |correlation_lag|.
+ if (lag_difference > 0) {
+ expand_lags_[2] = (distortion_lag + correlation_lag - 1) / 2;
+ } else {
+ expand_lags_[2] = (distortion_lag + correlation_lag + 1) / 2;
+ }
+ }
+
+ // Calculate the LPC and the gain of the filters.
+ // Calculate scale value needed for auto-correlation.
+ correlation_scale = WebRtcSpl_MaxAbsValueW16(
+ &(audio_history[signal_length - fs_mult_lpc_analysis_len]),
+ fs_mult_lpc_analysis_len);
+
+ correlation_scale = std::min(16 - WebRtcSpl_NormW32(correlation_scale), 0);
+ correlation_scale = std::max(correlation_scale * 2 + 7, 0);
+
+ // Calculate kUnvoicedLpcOrder + 1 lags of the auto-correlation function.
+ size_t temp_index = signal_length - fs_mult_lpc_analysis_len -
+ kUnvoicedLpcOrder;
+ // Copy signal to temporary vector to be able to pad with leading zeros.
+ int16_t* temp_signal = new int16_t[fs_mult_lpc_analysis_len
+ + kUnvoicedLpcOrder];
+ memset(temp_signal, 0,
+ sizeof(int16_t) * (fs_mult_lpc_analysis_len + kUnvoicedLpcOrder));
+ memcpy(&temp_signal[kUnvoicedLpcOrder],
+ &audio_history[temp_index + kUnvoicedLpcOrder],
+ sizeof(int16_t) * fs_mult_lpc_analysis_len);
+ WebRtcSpl_CrossCorrelation(auto_correlation,
+ &temp_signal[kUnvoicedLpcOrder],
+ &temp_signal[kUnvoicedLpcOrder],
+ fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1,
+ correlation_scale, -1);
+ delete [] temp_signal;
+
+ // Verify that variance is positive.
+ if (auto_correlation[0] > 0) {
+ // Estimate AR filter parameters using Levinson-Durbin algorithm;
+ // kUnvoicedLpcOrder + 1 filter coefficients.
+ int16_t stability = WebRtcSpl_LevinsonDurbin(auto_correlation,
+ parameters.ar_filter,
+ reflection_coeff,
+ kUnvoicedLpcOrder);
+
+ // Keep filter parameters only if filter is stable.
+ if (stability != 1) {
+ // Set first coefficient to 4096 (1.0 in Q12).
+ parameters.ar_filter[0] = 4096;
+ // Set remaining |kUnvoicedLpcOrder| coefficients to zero.
+ WebRtcSpl_MemSetW16(parameters.ar_filter + 1, 0, kUnvoicedLpcOrder);
+ }
+ }
+
+ if (channel_ix == 0) {
+ // Extract a noise segment.
+ int16_t noise_length;
+ if (distortion_lag < 40) {
+ noise_length = 2 * distortion_lag + 30;
+ } else {
+ noise_length = distortion_lag + 30;
+ }
+ if (noise_length <= RandomVector::kRandomTableSize) {
+ memcpy(random_vector, RandomVector::kRandomTable,
+ sizeof(int16_t) * noise_length);
+ } else {
+ // Only applies to SWB where length could be larger than
+ // |kRandomTableSize|.
+ memcpy(random_vector, RandomVector::kRandomTable,
+ sizeof(int16_t) * RandomVector::kRandomTableSize);
+ assert(noise_length <= kMaxSampleRate / 8000 * 120 + 30);
+ random_vector_->IncreaseSeedIncrement(2);
+ random_vector_->Generate(
+ noise_length - RandomVector::kRandomTableSize,
+ &random_vector[RandomVector::kRandomTableSize]);
+ }
+ }
+
+ // Set up state vector and calculate scale factor for unvoiced filtering.
+ memcpy(parameters.ar_filter_state,
+ &(audio_history[signal_length - kUnvoicedLpcOrder]),
+ sizeof(int16_t) * kUnvoicedLpcOrder);
+ memcpy(unvoiced_vector - kUnvoicedLpcOrder,
+ &(audio_history[signal_length - 128 - kUnvoicedLpcOrder]),
+ sizeof(int16_t) * kUnvoicedLpcOrder);
+ WebRtcSpl_FilterMAFastQ12(
+ const_cast<int16_t*>(&audio_history[signal_length - 128]),
+ unvoiced_vector, parameters.ar_filter, kUnvoicedLpcOrder + 1, 128);
+ int16_t unvoiced_prescale;
+ if (WebRtcSpl_MaxAbsValueW16(unvoiced_vector, 128) > 4000) {
+ unvoiced_prescale = 4;
+ } else {
+ unvoiced_prescale = 0;
+ }
+ int32_t unvoiced_energy = WebRtcSpl_DotProductWithScale(unvoiced_vector,
+ unvoiced_vector,
+ 128,
+ unvoiced_prescale);
+
+ // Normalize |unvoiced_energy| to 28 or 29 bits to preserve sqrt() accuracy.
+ int16_t unvoiced_scale = WebRtcSpl_NormW32(unvoiced_energy) - 3;
+ // Make sure we do an odd number of shifts since we already have 7 shifts
+ // from dividing with 128 earlier. This will make the total scale factor
+ // even, which is suitable for the sqrt.
+ unvoiced_scale += ((unvoiced_scale & 0x1) ^ 0x1);
+ unvoiced_energy = WEBRTC_SPL_SHIFT_W32(unvoiced_energy, unvoiced_scale);
+ int32_t unvoiced_gain = WebRtcSpl_SqrtFloor(unvoiced_energy);
+ parameters.ar_gain_scale = 13
+ + (unvoiced_scale + 7 - unvoiced_prescale) / 2;
+ parameters.ar_gain = unvoiced_gain;
+
+ // Calculate voice_mix_factor from corr_coefficient.
+ // Let x = corr_coefficient. Then, we compute:
+ // if (x > 0.48)
+ // voice_mix_factor = (-5179 + 19931x - 16422x^2 + 5776x^3) / 4096;
+ // else
+ // voice_mix_factor = 0;
+ if (corr_coefficient > 7875) {
+ int16_t x1, x2, x3;
+ x1 = corr_coefficient; // |corr_coefficient| is in Q14.
+ x2 = (x1 * x1) >> 14; // Shift 14 to keep result in Q14.
+ x3 = (x1 * x2) >> 14;
+ static const int kCoefficients[4] = { -5179, 19931, -16422, 5776 };
+ int32_t temp_sum = kCoefficients[0] << 14;
+ temp_sum += kCoefficients[1] * x1;
+ temp_sum += kCoefficients[2] * x2;
+ temp_sum += kCoefficients[3] * x3;
+ parameters.voice_mix_factor = temp_sum / 4096;
+ parameters.voice_mix_factor = std::min(parameters.voice_mix_factor,
+ static_cast<int16_t>(16384));
+ parameters.voice_mix_factor = std::max(parameters.voice_mix_factor,
+ static_cast<int16_t>(0));
+ } else {
+ parameters.voice_mix_factor = 0;
+ }
+
+ // Calculate muting slope. Reuse value from earlier scaling of
+ // |expand_vector0| and |expand_vector1|.
+ int16_t slope = amplitude_ratio;
+ if (slope > 12288) {
+ // slope > 1.5.
+ // Calculate (1 - (1 / slope)) / distortion_lag =
+ // (slope - 1) / (distortion_lag * slope).
+ // |slope| is in Q13, so 1 corresponds to 8192. Shift up to Q25 before
+ // the division.
+ // Shift the denominator from Q13 to Q5 before the division. The result of
+ // the division will then be in Q20.
+ int16_t temp_ratio = WebRtcSpl_DivW32W16((slope - 8192) << 12,
+ (distortion_lag * slope) >> 8);
+ if (slope > 14746) {
+ // slope > 1.8.
+ // Divide by 2, with proper rounding.
+ parameters.mute_slope = (temp_ratio + 1) / 2;
+ } else {
+ // Divide by 8, with proper rounding.
+ parameters.mute_slope = (temp_ratio + 4) / 8;
+ }
+ parameters.onset = true;
+ } else {
+ // Calculate (1 - slope) / distortion_lag.
+ // Shift |slope| by 7 to Q20 before the division. The result is in Q20.
+ parameters.mute_slope = WebRtcSpl_DivW32W16((8192 - slope) << 7,
+ distortion_lag);
+ if (parameters.voice_mix_factor <= 13107) {
+ // Make sure the mute factor decreases from 1.0 to 0.9 in no more than
+ // 6.25 ms.
+ // mute_slope >= 0.005 / fs_mult in Q20.
+ parameters.mute_slope = std::max(static_cast<int16_t>(5243 / fs_mult),
+ parameters.mute_slope);
+ } else if (slope > 8028) {
+ parameters.mute_slope = 0;
+ }
+ parameters.onset = false;
+ }
+ }
+}
+
+int16_t Expand::Correlation(const int16_t* input, size_t input_length,
+ int16_t* output, int16_t* output_scale) const {
+ // Set parameters depending on sample rate.
+ const int16_t* filter_coefficients;
+ int16_t num_coefficients;
+ int16_t downsampling_factor;
+ if (fs_hz_ == 8000) {
+ num_coefficients = 3;
+ downsampling_factor = 2;
+ filter_coefficients = DspHelper::kDownsample8kHzTbl;
+ } else if (fs_hz_ == 16000) {
+ num_coefficients = 5;
+ downsampling_factor = 4;
+ filter_coefficients = DspHelper::kDownsample16kHzTbl;
+ } else if (fs_hz_ == 32000) {
+ num_coefficients = 7;
+ downsampling_factor = 8;
+ filter_coefficients = DspHelper::kDownsample32kHzTbl;
+ } else { // fs_hz_ == 48000.
+ num_coefficients = 7;
+ downsampling_factor = 12;
+ filter_coefficients = DspHelper::kDownsample48kHzTbl;
+ }
+
+ // Correlate from lag 10 to lag 60 in downsampled domain.
+ // (Corresponds to 20-120 for narrow-band, 40-240 for wide-band, and so on.)
+ static const int kCorrelationStartLag = 10;
+ static const int kNumCorrelationLags = 54;
+ static const int kCorrelationLength = 60;
+ // Downsample to 4 kHz sample rate.
+ static const int kDownsampledLength = kCorrelationStartLag
+ + kNumCorrelationLags + kCorrelationLength;
+ int16_t downsampled_input[kDownsampledLength];
+ static const int kFilterDelay = 0;
+ WebRtcSpl_DownsampleFast(
+ input + input_length - kDownsampledLength * downsampling_factor,
+ kDownsampledLength * downsampling_factor, downsampled_input,
+ kDownsampledLength, filter_coefficients, num_coefficients,
+ downsampling_factor, kFilterDelay);
+
+ // Normalize |downsampled_input| to using all 16 bits.
+ int16_t max_value = WebRtcSpl_MaxAbsValueW16(downsampled_input,
+ kDownsampledLength);
+ int16_t norm_shift = 16 - WebRtcSpl_NormW32(max_value);
+ WebRtcSpl_VectorBitShiftW16(downsampled_input, kDownsampledLength,
+ downsampled_input, norm_shift);
+
+ int32_t correlation[kNumCorrelationLags];
+ static const int kCorrelationShift = 6;
+ WebRtcSpl_CrossCorrelation(
+ correlation,
+ &downsampled_input[kDownsampledLength - kCorrelationLength],
+ &downsampled_input[kDownsampledLength - kCorrelationLength
+ - kCorrelationStartLag],
+ kCorrelationLength, kNumCorrelationLags, kCorrelationShift, -1);
+
+ // Normalize and move data from 32-bit to 16-bit vector.
+ int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
+ kNumCorrelationLags);
+ int16_t norm_shift2 = std::max(18 - WebRtcSpl_NormW32(max_correlation), 0);
+ WebRtcSpl_VectorBitShiftW32ToW16(output, kNumCorrelationLags, correlation,
+ norm_shift2);
+ // Total scale factor (right shifts) of correlation value.
+ *output_scale = 2 * norm_shift + kCorrelationShift + norm_shift2;
+ return kNumCorrelationLags;
+}
+
+void Expand::UpdateLagIndex() {
+ current_lag_index_ = current_lag_index_ + lag_index_direction_;
+ // Change direction if needed.
+ if (current_lag_index_ <= 0) {
+ lag_index_direction_ = 1;
+ }
+ if (current_lag_index_ >= kNumLags - 1) {
+ lag_index_direction_ = -1;
+ }
+}
+
+Expand* ExpandFactory::Create(BackgroundNoise* background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ int fs,
+ size_t num_channels) const {
+ return new Expand(background_noise, sync_buffer, random_vector, fs,
+ num_channels);
+}
+
+// TODO(turajs): This can be moved to BackgroundNoise class.
+void Expand::GenerateBackgroundNoise(int16_t* random_vector,
+ size_t channel,
+ int16_t mute_slope,
+ bool too_many_expands,
+ size_t num_noise_samples,
+ int16_t* buffer) {
+ static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
+ int16_t scaled_random_vector[kMaxSampleRate / 8000 * 125];
+ assert(kMaxSampleRate / 8000 * 125 >= (int)num_noise_samples);
+ int16_t* noise_samples = &buffer[kNoiseLpcOrder];
+ if (background_noise_->initialized()) {
+ // Use background noise parameters.
+ memcpy(noise_samples - kNoiseLpcOrder,
+ background_noise_->FilterState(channel),
+ sizeof(int16_t) * kNoiseLpcOrder);
+
+ int dc_offset = 0;
+ if (background_noise_->ScaleShift(channel) > 1) {
+ dc_offset = 1 << (background_noise_->ScaleShift(channel) - 1);
+ }
+
+ // Scale random vector to correct energy level.
+ WebRtcSpl_AffineTransformVector(
+ scaled_random_vector, random_vector,
+ background_noise_->Scale(channel), dc_offset,
+ background_noise_->ScaleShift(channel),
+ static_cast<int>(num_noise_samples));
+
+ WebRtcSpl_FilterARFastQ12(scaled_random_vector, noise_samples,
+ background_noise_->Filter(channel),
+ kNoiseLpcOrder + 1,
+ static_cast<int>(num_noise_samples));
+
+ background_noise_->SetFilterState(
+ channel,
+ &(noise_samples[num_noise_samples - kNoiseLpcOrder]),
+ kNoiseLpcOrder);
+
+ // Unmute the background noise.
+ int16_t bgn_mute_factor = background_noise_->MuteFactor(channel);
+ NetEqBackgroundNoiseMode bgn_mode = background_noise_->mode();
+ if (bgn_mode == kBgnFade && too_many_expands && bgn_mute_factor > 0) {
+ // Fade BGN to zero.
+ // Calculate muting slope, approximately -2^18 / fs_hz.
+ int16_t mute_slope;
+ if (fs_hz_ == 8000) {
+ mute_slope = -32;
+ } else if (fs_hz_ == 16000) {
+ mute_slope = -16;
+ } else if (fs_hz_ == 32000) {
+ mute_slope = -8;
+ } else {
+ mute_slope = -5;
+ }
+ // Use UnmuteSignal function with negative slope.
+ // |bgn_mute_factor| is in Q14. |mute_slope| is in Q20.
+ DspHelper::UnmuteSignal(noise_samples,
+ num_noise_samples,
+ &bgn_mute_factor,
+ mute_slope,
+ noise_samples);
+ } else if (bgn_mute_factor < 16384) {
+ // If mode is kBgnOff, or if kBgnFade has started fading,
+ // Use regular |mute_slope|.
+ if (!stop_muting_ && bgn_mode != kBgnOff &&
+ !(bgn_mode == kBgnFade && too_many_expands)) {
+ DspHelper::UnmuteSignal(noise_samples,
+ static_cast<int>(num_noise_samples),
+ &bgn_mute_factor,
+ mute_slope,
+ noise_samples);
+ } else {
+ // kBgnOn and stop muting, or
+ // kBgnOff (mute factor is always 0), or
+ // kBgnFade has reached 0.
+ WebRtcSpl_AffineTransformVector(noise_samples, noise_samples,
+ bgn_mute_factor, 8192, 14,
+ static_cast<int>(num_noise_samples));
+ }
+ }
+ // Update mute_factor in BackgroundNoise class.
+ background_noise_->SetMuteFactor(channel, bgn_mute_factor);
+ } else {
+ // BGN parameters have not been initialized; use zero noise.
+ memset(noise_samples, 0, sizeof(int16_t) * num_noise_samples);
+ }
+}
+
+void Expand::GenerateRandomVector(int seed_increment,
+ size_t length,
+ int16_t* random_vector) {
+ // TODO(turajs): According to hlundin The loop should not be needed. Should be
+ // just as good to generate all of the vector in one call.
+ size_t samples_generated = 0;
+ const size_t kMaxRandSamples = RandomVector::kRandomTableSize;
+ while(samples_generated < length) {
+ size_t rand_length = std::min(length - samples_generated, kMaxRandSamples);
+ random_vector_->IncreaseSeedIncrement(seed_increment);
+ random_vector_->Generate(rand_length, &random_vector[samples_generated]);
+ samples_generated += rand_length;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
new file mode 100644
index 00000000000..1acf951b980
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
+
+#include <assert.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BackgroundNoise;
+class RandomVector;
+class SyncBuffer;
+
+// This class handles extrapolation of audio data from the sync_buffer to
+// produce packet-loss concealment.
+// TODO(hlundin): Refactor this class to divide the long methods into shorter
+// ones.
+class Expand {
+ public:
+ Expand(BackgroundNoise* background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ int fs,
+ size_t num_channels)
+ : random_vector_(random_vector),
+ sync_buffer_(sync_buffer),
+ first_expand_(true),
+ fs_hz_(fs),
+ num_channels_(num_channels),
+ consecutive_expands_(0),
+ background_noise_(background_noise),
+ overlap_length_(5 * fs / 8000),
+ lag_index_direction_(0),
+ current_lag_index_(0),
+ stop_muting_(false),
+ channel_parameters_(new ChannelParameters[num_channels_]) {
+ assert(fs == 8000 || fs == 16000 || fs == 32000 || fs == 48000);
+ assert(fs <= kMaxSampleRate); // Should not be possible.
+ assert(num_channels_ > 0);
+ memset(expand_lags_, 0, sizeof(expand_lags_));
+ Reset();
+ }
+
+ virtual ~Expand() {}
+
+ // Resets the object.
+ virtual void Reset();
+
+ // The main method to produce concealment data. The data is appended to the
+ // end of |output|.
+ virtual int Process(AudioMultiVector* output);
+
+ // Prepare the object to do extra expansion during normal operation following
+ // a period of expands.
+ virtual void SetParametersForNormalAfterExpand();
+
+ // Prepare the object to do extra expansion during merge operation following
+ // a period of expands.
+ virtual void SetParametersForMergeAfterExpand();
+
+ // Sets the mute factor for |channel| to |value|.
+ void SetMuteFactor(int16_t value, size_t channel) {
+ assert(channel < num_channels_);
+ channel_parameters_[channel].mute_factor = value;
+ }
+
+ // Returns the mute factor for |channel|.
+ int16_t MuteFactor(size_t channel) {
+ assert(channel < num_channels_);
+ return channel_parameters_[channel].mute_factor;
+ }
+
+ // Accessors and mutators.
+ virtual size_t overlap_length() const { return overlap_length_; }
+ int16_t max_lag() const { return max_lag_; }
+
+ protected:
+ static const int kMaxConsecutiveExpands = 200;
+ void GenerateRandomVector(int seed_increment,
+ size_t length,
+ int16_t* random_vector);
+
+ void GenerateBackgroundNoise(int16_t* random_vector,
+ size_t channel,
+ int16_t mute_slope,
+ bool too_many_expands,
+ size_t num_noise_samples,
+ int16_t* buffer);
+
+ // Initializes member variables at the beginning of an expand period.
+ void InitializeForAnExpandPeriod();
+
+ bool TooManyExpands();
+
+ // Analyzes the signal history in |sync_buffer_|, and set up all parameters
+ // necessary to produce concealment data.
+ void AnalyzeSignal(int16_t* random_vector);
+
+ RandomVector* random_vector_;
+ SyncBuffer* sync_buffer_;
+ bool first_expand_;
+ const int fs_hz_;
+ const size_t num_channels_;
+ int consecutive_expands_;
+
+ private:
+ static const int kUnvoicedLpcOrder = 6;
+ static const int kNumCorrelationCandidates = 3;
+ static const int kDistortionLength = 20;
+ static const int kLpcAnalysisLength = 160;
+ static const int kMaxSampleRate = 48000;
+ static const int kNumLags = 3;
+
+ struct ChannelParameters {
+ // Constructor.
+ ChannelParameters()
+ : mute_factor(16384),
+ ar_gain(0),
+ ar_gain_scale(0),
+ voice_mix_factor(0),
+ current_voice_mix_factor(0),
+ onset(false),
+ mute_slope(0) {
+ memset(ar_filter, 0, sizeof(ar_filter));
+ memset(ar_filter_state, 0, sizeof(ar_filter_state));
+ }
+ int16_t mute_factor;
+ int16_t ar_filter[kUnvoicedLpcOrder + 1];
+ int16_t ar_filter_state[kUnvoicedLpcOrder];
+ int16_t ar_gain;
+ int16_t ar_gain_scale;
+ int16_t voice_mix_factor; /* Q14 */
+ int16_t current_voice_mix_factor; /* Q14 */
+ AudioVector expand_vector0;
+ AudioVector expand_vector1;
+ bool onset;
+ int16_t mute_slope; /* Q20 */
+ };
+
+ // Calculate the auto-correlation of |input|, with length |input_length|
+ // samples. The correlation is calculated from a downsampled version of
+ // |input|, and is written to |output|. The scale factor is written to
+ // |output_scale|. Returns the length of the correlation vector.
+ int16_t Correlation(const int16_t* input, size_t input_length,
+ int16_t* output, int16_t* output_scale) const;
+
+ void UpdateLagIndex();
+
+ BackgroundNoise* background_noise_;
+ const size_t overlap_length_;
+ int16_t max_lag_;
+ size_t expand_lags_[kNumLags];
+ int lag_index_direction_;
+ int current_lag_index_;
+ bool stop_muting_;
+ scoped_ptr<ChannelParameters[]> channel_parameters_;
+
+ DISALLOW_COPY_AND_ASSIGN(Expand);
+};
+
+struct ExpandFactory {
+ ExpandFactory() {}
+ virtual ~ExpandFactory() {}
+
+ virtual Expand* Create(BackgroundNoise* background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ int fs,
+ size_t num_channels) const;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
new file mode 100644
index 00000000000..bd39f408ffa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for Expand class.
+
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+TEST(Expand, CreateAndDestroy) {
+ int fs = 8000;
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+ SyncBuffer sync_buffer(1, 1000);
+ RandomVector random_vector;
+ Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
+}
+
+TEST(Expand, CreateUsingFactory) {
+ int fs = 8000;
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+ SyncBuffer sync_buffer(1, 1000);
+ RandomVector random_vector;
+ ExpandFactory expand_factory;
+ Expand* expand =
+ expand_factory.Create(&bgn, &sync_buffer, &random_vector, fs, channels);
+ EXPECT_TRUE(expand != NULL);
+ delete expand;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h
new file mode 100644
index 00000000000..9a2fb8b4645
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
+
+#include <stdlib.h> // NULL
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+enum NetEqDecoder {
+ kDecoderPCMu,
+ kDecoderPCMa,
+ kDecoderPCMu_2ch,
+ kDecoderPCMa_2ch,
+ kDecoderILBC,
+ kDecoderISAC,
+ kDecoderISACswb,
+ kDecoderISACfb,
+ kDecoderPCM16B,
+ kDecoderPCM16Bwb,
+ kDecoderPCM16Bswb32kHz,
+ kDecoderPCM16Bswb48kHz,
+ kDecoderPCM16B_2ch,
+ kDecoderPCM16Bwb_2ch,
+ kDecoderPCM16Bswb32kHz_2ch,
+ kDecoderPCM16Bswb48kHz_2ch,
+ kDecoderPCM16B_5ch,
+ kDecoderG722,
+ kDecoderG722_2ch,
+ kDecoderRED,
+ kDecoderAVT,
+ kDecoderCNGnb,
+ kDecoderCNGwb,
+ kDecoderCNGswb32kHz,
+ kDecoderCNGswb48kHz,
+ kDecoderArbitrary,
+ kDecoderOpus,
+ kDecoderOpus_2ch,
+ kDecoderCELT_32,
+ kDecoderCELT_32_2ch,
+};
+
+// This is the interface class for decoders in NetEQ. Each codec type will have
+// and implementation of this class.
+class AudioDecoder {
+ public:
+ enum SpeechType {
+ kSpeech = 1,
+ kComfortNoise = 2
+ };
+
+ // Used by PacketDuration below. Save the value -1 for errors.
+ enum { kNotImplemented = -2 };
+
+ explicit AudioDecoder(enum NetEqDecoder type)
+ : codec_type_(type),
+ channels_(1),
+ state_(NULL) {
+ }
+
+ virtual ~AudioDecoder() {}
+
+ // Decodes |encode_len| bytes from |encoded| and writes the result in
+ // |decoded|. The number of samples from all channels produced is in
+ // the return value. If the decoder produced comfort noise, |speech_type|
+ // is set to kComfortNoise, otherwise it is kSpeech.
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) = 0;
+
+ // Same as Decode(), but interfaces to the decoders redundant decode function.
+ // The default implementation simply calls the regular Decode() method.
+ virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
+
+ // Indicates if the decoder implements the DecodePlc method.
+ virtual bool HasDecodePlc() const;
+
+ // Calls the packet-loss concealment of the decoder to update the state after
+ // one or several lost packets.
+ virtual int DecodePlc(int num_frames, int16_t* decoded);
+
+ // Initializes the decoder.
+ virtual int Init() = 0;
+
+ // Notifies the decoder of an incoming packet to NetEQ.
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp);
+
+ // Returns the last error code from the decoder.
+ virtual int ErrorCode();
+
+ // Returns the duration in samples of the payload in |encoded| which is
+ // |encoded_len| bytes long. Returns kNotImplemented if no duration estimate
+ // is available, or -1 in case of an error.
+ virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+
+ // Returns the duration in samples of the redandant payload in |encoded| which
+ // is |encoded_len| bytes long. Returns kNotImplemented if no duration
+ // estimate is available, or -1 in case of an error.
+ virtual int PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const;
+
+ // Detects whether a packet has forward error correction. The packet is
+ // comprised of the samples in |encoded| which is |encoded_len| bytes long.
+ // Returns true if the packet has FEC and false otherwise.
+ virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
+
+ virtual NetEqDecoder codec_type() const;
+
+ // Returns the underlying decoder state.
+ void* state() { return state_; }
+
+ // Returns true if |codec_type| is supported.
+ static bool CodecSupported(NetEqDecoder codec_type);
+
+ // Returns the sample rate for |codec_type|.
+ static int CodecSampleRateHz(NetEqDecoder codec_type);
+
+ // Creates an AudioDecoder object of type |codec_type|. Returns NULL for
+ // for unsupported codecs, and when creating an AudioDecoder is not
+ // applicable (e.g., for RED and DTMF/AVT types).
+ static AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type);
+
+ size_t channels() const { return channels_; }
+
+ protected:
+ static SpeechType ConvertSpeechType(int16_t type);
+
+ enum NetEqDecoder codec_type_;
+ size_t channels_;
+ void* state_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoder);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h
new file mode 100644
index 00000000000..c67ab12c6ce
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
+
+#include <string.h> // Provide access to size_t.
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+struct WebRtcRTPHeader;
+
+struct NetEqNetworkStatistics {
+ uint16_t current_buffer_size_ms; // Current jitter buffer size in ms.
+ uint16_t preferred_buffer_size_ms; // Target buffer size in ms.
+ uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky
+ // jitter; 0 otherwise.
+ uint16_t packet_loss_rate; // Loss rate (network + late) in Q14.
+ uint16_t packet_discard_rate; // Late loss rate in Q14.
+ uint16_t expand_rate; // Fraction (of original stream) of synthesized
+ // speech inserted through expansion (in Q14).
+ uint16_t preemptive_rate; // Fraction of data inserted through pre-emptive
+ // expansion (in Q14).
+ uint16_t accelerate_rate; // Fraction of data removed through acceleration
+ // (in Q14).
+ int32_t clockdrift_ppm; // Average clock-drift in parts-per-million
+ // (positive or negative).
+ int added_zero_samples; // Number of zero samples added in "off" mode.
+};
+
+enum NetEqOutputType {
+ kOutputNormal,
+ kOutputPLC,
+ kOutputCNG,
+ kOutputPLCtoCNG,
+ kOutputVADPassive
+};
+
+enum NetEqPlayoutMode {
+ kPlayoutOn,
+ kPlayoutOff,
+ kPlayoutFax,
+ kPlayoutStreaming
+};
+
+enum NetEqBackgroundNoiseMode {
+ kBgnOn, // Default behavior with eternal noise.
+ kBgnFade, // Noise fades to zero after some time.
+ kBgnOff // Background noise is always zero.
+};
+
+// This is the interface class for NetEq.
+class NetEq {
+ public:
+ struct Config {
+ Config()
+ : sample_rate_hz(16000),
+ enable_audio_classifier(false),
+ max_packets_in_buffer(50),
+ // |max_delay_ms| has the same effect as calling SetMaximumDelay().
+ max_delay_ms(2000) {}
+
+ int sample_rate_hz; // Initial vale. Will change with input data.
+ bool enable_audio_classifier;
+ int max_packets_in_buffer;
+ int max_delay_ms;
+ };
+
+ enum ReturnCodes {
+ kOK = 0,
+ kFail = -1,
+ kNotImplemented = -2
+ };
+
+ enum ErrorCodes {
+ kNoError = 0,
+ kOtherError,
+ kInvalidRtpPayloadType,
+ kUnknownRtpPayloadType,
+ kCodecNotSupported,
+ kDecoderExists,
+ kDecoderNotFound,
+ kInvalidSampleRate,
+ kInvalidPointer,
+ kAccelerateError,
+ kPreemptiveExpandError,
+ kComfortNoiseErrorCode,
+ kDecoderErrorCode,
+ kOtherDecoderError,
+ kInvalidOperation,
+ kDtmfParameterError,
+ kDtmfParsingError,
+ kDtmfInsertError,
+ kStereoNotSupported,
+ kSampleUnderrun,
+ kDecodedTooMuch,
+ kFrameSplitError,
+ kRedundancySplitError,
+ kPacketBufferCorruption,
+ kSyncPacketNotAccepted
+ };
+
+ // Creates a new NetEq object, with parameters set in |config|. The |config|
+ // object will only have to be valid for the duration of the call to this
+ // method.
+ static NetEq* Create(const NetEq::Config& config);
+
+ virtual ~NetEq() {}
+
+ // Inserts a new packet into NetEq. The |receive_timestamp| is an indication
+ // of the time when the packet was received, and should be measured with
+ // the same tick rate as the RTP timestamp of the current payload.
+ // Returns 0 on success, -1 on failure.
+ virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
+ const uint8_t* payload,
+ int length_bytes,
+ uint32_t receive_timestamp) = 0;
+
+ // Inserts a sync-packet into packet queue. Sync-packets are decoded to
+ // silence and are intended to keep AV-sync intact in an event of long packet
+ // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq
+ // might insert sync-packet when they observe that buffer level of NetEq is
+ // decreasing below a certain threshold, defined by the application.
+ // Sync-packets should have the same payload type as the last audio payload
+ // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change
+ // can be implied by inserting a sync-packet.
+ // Returns kOk on success, kFail on failure.
+ virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
+ uint32_t receive_timestamp) = 0;
+
+ // Instructs NetEq to deliver 10 ms of audio data. The data is written to
+ // |output_audio|, which can hold (at least) |max_length| elements.
+ // The number of channels that were written to the output is provided in
+ // the output variable |num_channels|, and each channel contains
+ // |samples_per_channel| elements. If more than one channel is written,
+ // the samples are interleaved.
+ // The speech type is written to |type|, if |type| is not NULL.
+ // Returns kOK on success, or kFail in case of an error.
+ virtual int GetAudio(size_t max_length, int16_t* output_audio,
+ int* samples_per_channel, int* num_channels,
+ NetEqOutputType* type) = 0;
+
+ // Associates |rtp_payload_type| with |codec| and stores the information in
+ // the codec database. Returns 0 on success, -1 on failure.
+ virtual int RegisterPayloadType(enum NetEqDecoder codec,
+ uint8_t rtp_payload_type) = 0;
+
+ // Provides an externally created decoder object |decoder| to insert in the
+ // decoder database. The decoder implements a decoder of type |codec| and
+ // associates it with |rtp_payload_type|. Returns kOK on success,
+ // kFail on failure.
+ virtual int RegisterExternalDecoder(AudioDecoder* decoder,
+ enum NetEqDecoder codec,
+ uint8_t rtp_payload_type) = 0;
+
+ // Removes |rtp_payload_type| from the codec database. Returns 0 on success,
+ // -1 on failure.
+ virtual int RemovePayloadType(uint8_t rtp_payload_type) = 0;
+
+ // Sets a minimum delay in millisecond for packet buffer. The minimum is
+ // maintained unless a higher latency is dictated by channel condition.
+ // Returns true if the minimum is successfully applied, otherwise false is
+ // returned.
+ virtual bool SetMinimumDelay(int delay_ms) = 0;
+
+ // Sets a maximum delay in milliseconds for packet buffer. The latency will
+ // not exceed the given value, even required delay (given the channel
+ // conditions) is higher. Calling this method has the same effect as setting
+ // the |max_delay_ms| value in the NetEq::Config struct.
+ virtual bool SetMaximumDelay(int delay_ms) = 0;
+
+ // The smallest latency required. This is computed bases on inter-arrival
+ // time and internal NetEq logic. Note that in computing this latency none of
+ // the user defined limits (applied by calling setMinimumDelay() and/or
+ // SetMaximumDelay()) are applied.
+ virtual int LeastRequiredDelayMs() const = 0;
+
+ // Not implemented.
+ virtual int SetTargetDelay() = 0;
+
+ // Not implemented.
+ virtual int TargetDelay() = 0;
+
+ // Not implemented.
+ virtual int CurrentDelay() = 0;
+
+ // Sets the playout mode to |mode|.
+ virtual void SetPlayoutMode(NetEqPlayoutMode mode) = 0;
+
+ // Returns the current playout mode.
+ virtual NetEqPlayoutMode PlayoutMode() const = 0;
+
+ // Writes the current network statistics to |stats|. The statistics are reset
+ // after the call.
+ virtual int NetworkStatistics(NetEqNetworkStatistics* stats) = 0;
+
+ // Writes the last packet waiting times (in ms) to |waiting_times|. The number
+ // of values written is no more than 100, but may be smaller if the interface
+ // is polled again before 100 packets has arrived.
+ virtual void WaitingTimes(std::vector<int>* waiting_times) = 0;
+
+ // Writes the current RTCP statistics to |stats|. The statistics are reset
+ // and a new report period is started with the call.
+ virtual void GetRtcpStatistics(RtcpStatistics* stats) = 0;
+
+ // Same as RtcpStatistics(), but does not reset anything.
+ virtual void GetRtcpStatisticsNoReset(RtcpStatistics* stats) = 0;
+
+ // Enables post-decode VAD. When enabled, GetAudio() will return
+ // kOutputVADPassive when the signal contains no speech.
+ virtual void EnableVad() = 0;
+
+ // Disables post-decode VAD.
+ virtual void DisableVad() = 0;
+
+ // Gets the RTP timestamp for the last sample delivered by GetAudio().
+ // Returns true if the RTP timestamp is valid, otherwise false.
+ virtual bool GetPlayoutTimestamp(uint32_t* timestamp) = 0;
+
+ // Not implemented.
+ virtual int SetTargetNumberOfChannels() = 0;
+
+ // Not implemented.
+ virtual int SetTargetSampleRate() = 0;
+
+ // Returns the error code for the last occurred error. If no error has
+ // occurred, 0 is returned.
+ virtual int LastError() = 0;
+
+ // Returns the error code last returned by a decoder (audio or comfort noise).
+ // When LastError() returns kDecoderErrorCode or kComfortNoiseErrorCode, check
+ // this method to get the decoder's error code.
+ virtual int LastDecoderError() = 0;
+
+ // Flushes both the packet buffer and the sync buffer.
+ virtual void FlushBuffers() = 0;
+
+ // Current usage of packet-buffer and it's limits.
+ virtual void PacketBufferStatistics(int* current_num_packets,
+ int* max_num_packets) const = 0;
+
+ // Get sequence number and timestamp of the latest RTP.
+ // This method is to facilitate NACK.
+ virtual int DecodedRtpInfo(int* sequence_number,
+ uint32_t* timestamp) const = 0;
+
+ // Sets the background noise mode.
+ virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) = 0;
+
+ // Gets the background noise mode.
+ virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const = 0;
+
+ protected:
+ NetEq() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(NetEq);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
deleted file mode 100644
index c2a01340b93..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
+++ /dev/null
@@ -1,230 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the main API for NetEQ. Helper macros are located in webrtc_neteq_help_macros.h,
- * while some internal API functions are found in webrtc_neteq_internal.h.
- */
-
-#include "typedefs.h"
-
-#ifndef WEBRTC_NETEQ_H
-#define WEBRTC_NETEQ_H
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-/**********************************************************
- * Definitions
- */
-
-enum WebRtcNetEQDecoder
-{
- kDecoderReservedStart,
- kDecoderPCMu,
- kDecoderPCMa,
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
- kDecoderILBC,
- kDecoderISAC,
- kDecoderISACswb,
- kDecoderISACfb,
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- kDecoderPCM16Bswb48kHz,
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
- kDecoderG722,
- kDecoderG722_2ch,
- kDecoderRED,
- kDecoderAVT,
- kDecoderCNG,
- kDecoderArbitrary,
- kDecoderG729,
- kDecoderG729_1,
- kDecoderG726_16,
- kDecoderG726_24,
- kDecoderG726_32,
- kDecoderG726_40,
- kDecoderG722_1_16,
- kDecoderG722_1_24,
- kDecoderG722_1_32,
- kDecoderG722_1C_24,
- kDecoderG722_1C_32,
- kDecoderG722_1C_48,
- kDecoderOpus,
- kDecoderSPEEX_8,
- kDecoderSPEEX_16,
- kDecoderCELT_32,
- kDecoderCELT_32_2ch,
- kDecoderGSMFR,
- kDecoderAMR,
- kDecoderAMRWB,
- kDecoderReservedEnd
-};
-
-enum WebRtcNetEQNetworkType
-{
- kUDPNormal,
- kUDPVideoSync,
- kTCPNormal,
- kTCPLargeJitter,
- kTCPXLargeJitter
-};
-
-enum WebRtcNetEQOutputType
-{
- kOutputNormal,
- kOutputPLC,
- kOutputCNG,
- kOutputPLCtoCNG,
- kOutputVADPassive
-};
-
-enum WebRtcNetEQPlayoutMode
-{
- kPlayoutOn, kPlayoutOff, kPlayoutFax, kPlayoutStreaming
-};
-
-/* Available modes for background noise (inserted after long expands) */
-enum WebRtcNetEQBGNMode
-{
- kBGNOn, /* default "normal" behavior with eternal noise */
- kBGNFade, /* noise fades to zero after some time */
- kBGNOff
-/* background noise is always zero */
-};
-
-/*************************************************
- * Definitions of decoder calls and the default
- * API function calls for each codec
- */
-
-typedef int16_t (*WebRtcNetEQ_FuncDecode)(void* state, int16_t* encoded,
- int16_t len, int16_t* decoded,
- int16_t* speechType);
-typedef int16_t (*WebRtcNetEQ_FuncDecodePLC)(void* state, int16_t* decoded,
- int16_t frames);
-typedef int16_t (*WebRtcNetEQ_FuncDecodeInit)(void* state);
-typedef int16_t (*WebRtcNetEQ_FuncAddLatePkt)(void* state, int16_t* encoded,
- int16_t len);
-typedef int16_t (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
-typedef int16_t (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, int16_t* encoded,
- int16_t* length);
-typedef int16_t (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const uint16_t *encoded,
- int32_t packet_size,
- uint16_t rtp_seq_number,
- uint32_t send_ts,
- uint32_t arr_ts);
-typedef int (*WebRtcNetEQ_FuncDurationEst)(void* state, const uint8_t* payload,
- int payload_length_bytes);
-typedef int16_t (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
-
-/**********************************************************
- * Structures
- */
-
-typedef struct
-{
- enum WebRtcNetEQDecoder codec;
- int16_t payloadType;
- WebRtcNetEQ_FuncDecode funcDecode;
- WebRtcNetEQ_FuncDecode funcDecodeRCU;
- WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
- WebRtcNetEQ_FuncDecodeInit funcDecodeInit;
- WebRtcNetEQ_FuncAddLatePkt funcAddLatePkt;
- WebRtcNetEQ_FuncGetMDinfo funcGetMDinfo;
- WebRtcNetEQ_FuncGetPitchInfo funcGetPitch;
- WebRtcNetEQ_FuncUpdBWEst funcUpdBWEst;
- WebRtcNetEQ_FuncDurationEst funcDurationEst;
- WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
- void* codec_state;
- uint16_t codec_fs;
-} WebRtcNetEQ_CodecDef;
-
-typedef struct
-{
- uint16_t fraction_lost;
- uint32_t cum_lost;
- uint32_t ext_max;
- uint32_t jitter;
-} WebRtcNetEQ_RTCPStat;
-
-/**********************************************************
- * NETEQ Functions
- */
-
-/* Info functions */
-
-#define WEBRTC_NETEQ_MAX_ERROR_NAME 40
-int WebRtcNetEQ_GetErrorCode(void *inst);
-int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen);
-
-/* Instance memory assign functions */
-
-int WebRtcNetEQ_AssignSize(int *sizeinbytes);
-int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr);
-int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
- int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
- int *MaxNoOfPackets, int *sizeinbytes,
- int* per_packet_overhead_bytes);
-int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
- int sizeinbytes);
-
-/* Init functions */
-
-int WebRtcNetEQ_Init(void *inst, uint16_t fs);
-int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
-int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
-int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
-int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode);
-int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode);
-
-/* Codec Database functions */
-
-int WebRtcNetEQ_CodecDbReset(void *inst);
-int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
-int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
- int16_t *MaxEntries);
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
- enum WebRtcNetEQDecoder *codec);
-
-/* Real-time functions */
-
-int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
- uint32_t uw32_timeRec);
-int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len);
-int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
-int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp);
-int WebRtcNetEQ_DecodedRtpInfo(const void* inst,
- int* sequence_number,
- uint32_t* timestamp);
-int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
-
-/* VQmon related functions */
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
- uint16_t *concealedVoiceDurationMs,
- uint8_t *concealedVoiceFlags);
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
- uint8_t *adaptationRate);
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
- uint16_t *maxDelayMs);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
deleted file mode 100644
index bd93328108e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
+++ /dev/null
@@ -1,454 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some helper macros that can be used when loading the
- * NetEQ codec database.
- */
-
-#ifndef WEBRTC_NETEQ_HELP_MACROS_H
-#define WEBRTC_NETEQ_HELP_MACROS_H
-
-#ifndef NULL
-#define NULL 0
-#endif
-
-/**********************************************************
- * Help macros for NetEQ initialization
- */
-
-#define SET_CODEC_PAR(inst,decoder,pt,state,fs) \
- inst.codec=decoder; \
- inst.payloadType=pt; \
- inst.codec_state=state; \
- inst.codec_fs=fs;
-
-#define SET_PCMU_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeU; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=WebRtcG711_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCMA_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeA; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=WebRtcG711_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_ILBC_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIlbcfix_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcIlbcfix_NetEqPlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIlbcfix_Decoderinit30Ms; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_ISAC_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_ISACfix_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsacfix_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsacfix_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsacfix_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsacfix_GetErrorCode;
-
-#define SET_ISACSWB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_ISACFB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_G729_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG729_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG729_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG729_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G729_1_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7291_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7291_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcG7291_DecodeBwe; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_WB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_SWB32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_SWB48_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG722_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG722_DecoderInit;\
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_16_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc16; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit16; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc24; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc32; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc24; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc32; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_48_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode48; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc48; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit48; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AMR_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmr_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmr_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmr_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AMRWB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmrWb_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmrWb_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmrWb_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_GSMFR_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcGSMFR_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcGSMFR_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcGSMFR_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_16_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit16; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_40_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode40; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit40; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_OPUS_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcOpus_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcOpus_DecodePlcMaster; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcOpus_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=(WebRtcNetEQ_FuncDurationEst)WebRtcOpus_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_OPUSSLAVE_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcOpus_DecodeSlave; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcOpus_DecodePlcSlave; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcOpus_DecoderInitSlave; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=(WebRtcNetEQ_FuncDurationEst)WebRtcOpus_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_SPEEX_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcSpeex_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcSpeex_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcSpeex_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CELT_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CELTSLAVE_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_DecodeSlave; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInitSlave; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_RED_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AVT_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CNG_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#endif /* WEBRTC_NETEQ_HELP_MACROS_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
deleted file mode 100644
index c46a3f62705..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the internal API functions.
- */
-
-#include "typedefs.h"
-
-#ifndef WEBRTC_NETEQ_INTERNAL_H
-#define WEBRTC_NETEQ_INTERNAL_H
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-typedef struct
-{
- uint8_t payloadType;
- uint16_t sequenceNumber;
- uint32_t timeStamp;
- uint32_t SSRC;
- uint8_t markerBit;
-} WebRtcNetEQ_RTPInfo;
-
-/****************************************************************************
- * WebRtcNetEQ_RecInRTPStruct(...)
- *
- * Alternative RecIn function, used when the RTP data has already been
- * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - payloadPtr : Pointer to the RTP payload (first byte after header)
- * - payloadLenBytes : Length (in bytes) of the payload in payloadPtr
- * - timeRec : Receive time (in timestamps of the used codec)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const uint8_t *payloadPtr, int16_t payloadLenBytes,
- uint32_t timeRec);
-
-/****************************************************************************
- * WebRtcNetEQ_GetMasterSlaveInfoSize(...)
- *
- * Get size in bytes for master/slave struct msInfo used in
- * WebRtcNetEQ_RecOutMasterSlave.
- *
- * Return value : Struct size in bytes
- *
- */
-
-int WebRtcNetEQ_GetMasterSlaveInfoSize();
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutMasterSlave(...)
- *
- * RecOut function for running several NetEQ instances in master/slave mode.
- * One master can be used to control several slaves.
- * The MasterSlaveInfo struct must be allocated outside NetEQ.
- * Use function WebRtcNetEQ_GetMasterSlaveInfoSize to get the size needed.
- *
- * Input:
- * - inst : NetEQ instance
- * - isMaster : Non-zero indicates that this is the master channel
- * - msInfo : (slave only) Information from master
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- * - msInfo : (master only) Information to slave(s)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len, void *msInfo,
- int16_t isMaster);
-
-typedef struct
-{
- uint16_t currentBufferSize; /* Current jitter buffer size in ms. */
- uint16_t preferredBufferSize; /* Preferred buffer size in ms. */
- uint16_t jitterPeaksFound; /* 1 if adding extra delay due to peaky
- * jitter; 0 otherwise. */
- uint16_t currentPacketLossRate; /* Loss rate (network + late) (Q14). */
- uint16_t currentDiscardRate; /* Late loss rate (Q14). */
- uint16_t currentExpandRate; /* Fraction (of original stream) of
- * synthesized speech inserted through
- * expansion (in Q14). */
- uint16_t currentPreemptiveRate; /* Fraction of data inserted through
- * pre-emptive expansion (in Q14). */
- uint16_t currentAccelerateRate; /* Fraction of data removed through
- * acceleration (in Q14). */
- int32_t clockDriftPPM; /* Average clock-drift in parts-per-
- * million (positive or negative). */
- int addedSamples; /* Number of zero samples added in off
- * mode */
-} WebRtcNetEQ_NetworkStatistics;
-
-/*
- * Get the "in-call" statistics from NetEQ.
- * The statistics are reset after the query.
- */
-int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats);
-
-
-typedef struct {
- /* Samples removed from background noise only segments. */
- int accelerate_bgn_samples;
-
- /* Samples removed from normal audio segments. */
- int accelerate_normal_samples;
-
- /* Number of samples synthesized during background noise only segments. */
- int expand_bgn_sampels;
-
- /* Number of samples synthesized during normal audio segments. */
- int expand_normal_samples;
-
- /* Number of samples synthesized during background noise only segments,
- * in preemptive mode. */
- int preemptive_expand_bgn_samples;
-
- /* Number of samples synthesized during normal audio segments, in preemptive
- * mode. */
- int preemptive_expand_normal_samples;
-
- /* Number of samples synthesized during background noise only segments,
- * while merging. */
- int merge_expand_bgn_samples;
-
- /* Number of samples synthesized during normal audio segments, while
- * merging. */
- int merge_expand_normal_samples;
-} WebRtcNetEQ_ProcessingActivity;
-
-/*
- * Get the processing activities from NetEQ.
- * The statistics are reset after the query.
- * This API is meant to obtain processing activities in high granularity,
- * e.g. per RecOut() call.
- */
-void WebRtcNetEQ_GetProcessingActivity(void* inst,
- WebRtcNetEQ_ProcessingActivity* stat);
-
-/*
- * Get the raw waiting times for decoded frames. The function writes the last
- * recorded waiting times (from frame arrival to frame decoding) to the memory
- * pointed to by waitingTimeMs. The number of elements written is in the return
- * value. No more than maxLength elements are written. Statistics are reset on
- * each query.
- */
-int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
- int max_length,
- int* waiting_times_ms);
-
-/***********************************************/
-/* Functions for post-decode VAD functionality */
-/***********************************************/
-
-/* NetEQ must be compiled with the flag NETEQ_VAD enabled for these functions to work. */
-
-/*
- * VAD function pointer types
- *
- * These function pointers match the definitions of webrtc VAD functions WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
- */
-typedef int (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
-typedef int (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, int mode);
-typedef int (*WebRtcNetEQ_VADFunction)(void *VAD_inst, int fs,
- int16_t *frame, int frameLen);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADInstance(...)
- *
- * Provide a pointer to an allocated VAD instance. If function is never
- * called or it is called with NULL pointer as VAD_inst, the post-decode
- * VAD functionality is disabled. Also provide pointers to init, setmode
- * and VAD functions. These are typically pointers to WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
- * interface file webrtc_vad.h.
- *
- * Input:
- * - NetEQ_inst : NetEQ instance
- * - VADinst : VAD instance
- * - initFunction : Pointer to VAD init function
- * - setmodeFunction : Pointer to VAD setmode function
- * - VADfunction : Pointer to VAD function
- *
- * Output:
- * - NetEQ_inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
- WebRtcNetEQ_VADInitFunction initFunction,
- WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
- WebRtcNetEQ_VADFunction VADFunction);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADMode(...)
- *
- * Pass an aggressiveness mode parameter to the post-decode VAD instance.
- * If this function is never called, mode 0 (quality mode) is used as default.
- *
- * Input:
- * - inst : NetEQ instance
- * - mode : mode parameter (same range as WebRtc VAD mode)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADMode(void *NetEQ_inst, int mode);
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutNoDecode(...)
- *
- * Special RecOut that does not do any decoding.
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_FlushBuffers(...)
- *
- * Flush packet and speech buffers. Does not reset codec database or
- * jitter statistics.
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_FlushBuffers(void *inst);
-
-/*****************************************************************************
- * void WebRtcNetEq_EnableAVSync(...)
- *
- * Enable AV-sync. If Enabled, NetEq will screen for sync payloads. For
- * each sync payload a silence frame is generated.
- *
- * Input:
- * - inst : NetEQ instance
- * - enable : non-zero to enable, otherwise disabled.
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- */
-
-void WebRtcNetEQ_EnableAVSync(void* inst, int enable);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInSyncRTP(...)
- *
- * Insert a sync packet with the given RTP specification.
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - receive_timestamp : Receive time (in timestamps of the used codec)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : if succeeded it returns the number of bytes pushed
- * in, otherwise returns -1.
- */
-
-int WebRtcNetEQ_RecInSyncRTP(void* inst,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint32_t receive_timestamp);
-
-/*
- * Set a minimum latency for the jitter buffer. The overall delay is the max of
- * |minimum_delay_ms| and the latency that is internally computed based on the
- * inter-arrival times.
- */
-int WebRtcNetEQ_SetMinimumDelay(void *inst, int minimum_delay_ms);
-
-/*
- * Set a maximum latency for the jitter buffer. The overall delay is the min of
- * |maximum_delay_ms| and the latency that is internally computed based on the
- * inter-arrival times.
- */
-int WebRtcNetEQ_SetMaximumDelay(void *inst, int maximum_delay_ms);
-
-/*
- * Get the least required delay in milliseconds given inter-arrival times
- * and playout mode.
- */
-int WebRtcNetEQ_GetRequiredDelayMs(const void* inst);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h
deleted file mode 100644
index 931e6dcf561..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h
+++ /dev/null
@@ -1,300 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * MCU struct and functions related to the MCU side operations.
- */
-
-#ifndef MCU_H
-#define MCU_H
-
-#include "typedefs.h"
-
-#include "codec_db.h"
-#include "rtcp.h"
-#include "packet_buffer.h"
-#include "buffer_stats.h"
-#include "neteq_statistics.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-#include "dtmf_buffer.h"
-#endif
-
-#define MAX_ONE_DESC 5 /* cannot do more than this many consecutive one-descriptor decodings */
-#define MAX_LOSS_REPORT_PERIOD 60 /* number of seconds between auto-reset */
-
-enum TsScaling
-{
- kTSnoScaling = 0,
- kTSscalingTwo,
- kTSscalingTwoThirds,
- kTSscalingFourThirds
-};
-
-enum { kLenWaitingTimes = 100 };
-
-typedef struct
-{
-
- int16_t current_Codec;
- int16_t current_Payload;
- uint32_t timeStamp; /* Next timestamp that should be played */
- int16_t millisecondsPerCall;
- uint16_t timestampsPerCall; /* Output chunk size */
- uint16_t fs;
- uint32_t ssrc; /* Current ssrc */
- int16_t new_codec;
- int16_t first_packet;
-
- /* MCU/DSP Communication layer */
- int16_t *pw16_readAddress;
- int16_t *pw16_writeAddress;
- void *main_inst;
-
- CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
- functions to use and which codpoints that
- have been assigned */
- SplitInfo_t PayloadSplit_inst; /* Information about how the current codec
- payload should be splitted */
- WebRtcNetEQ_RTCP_t RTCP_inst; /* RTCP statistics */
- PacketBuf_t PacketBuffer_inst; /* The packet buffer */
- BufstatsInst_t BufferStat_inst; /* Statistics that are used to make decision
- for what the DSP should perform */
-#ifdef NETEQ_ATEVENT_DECODE
- dtmf_inst_t DTMF_inst;
-#endif
- int NoOfExpandCalls;
- int16_t AVT_PlayoutOn;
- enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
-
- int16_t one_desc; /* Number of times running on one desc */
-
- uint32_t lostTS; /* Number of timestamps lost */
- uint32_t lastReportTS; /* Timestamp elapsed since last report was given */
-
- int waiting_times[kLenWaitingTimes]; /* Waiting time statistics storage. */
- int len_waiting_times;
- int next_waiting_time_index;
-
- uint32_t externalTS;
- uint32_t internalTS;
- int16_t TSscalingInitialized;
- enum TsScaling scalingFactor;
-
- /* AV-sync enabled. In AV-sync NetEq screens packets for specific sync
- * packets. Sync packets are not decoded by a decoder but generate all-zero
- * signal with the same number of samples as previously decoded payload.
- * Also in AV-sync mode the sample-size of a sync payload is reported as
- * previous frame-size. */
- int av_sync;
-
-#ifdef NETEQ_STEREO
- int usingStereo;
-#endif
-
- /* The sequence number of the latest decoded RTP payload. */
- int decoded_packet_sequence_number;
- uint32_t decoded_packet_timestamp;
-} MCUInst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_McuReset(...)
- *
- * Reset the MCU instance.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuReset(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetMcuInCallStats(...)
- *
- * Reset MCU-side statistics variables for the in-call statistics.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetWaitingTimeStats(...)
- *
- * Reset waiting-time statistics.
- *
- * Input:
- * - inst : MCU instance.
- *
- * Return value : n/a
- */
-void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_LogWaitingTime(...)
- *
- * Log waiting-time to the statistics.
- *
- * Input:
- * - inst : MCU instance.
- * - waiting_time : Waiting time in "RecOut calls" (i.e., 1 call = 10 ms).
- *
- * Return value : n/a
- */
-void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetMcuJitterStat(...)
- *
- * Reset MCU-side statistics variables for the post-call statistics.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_McuAddressInit(...)
- *
- * Initializes MCU with read address and write address.
- *
- * Input:
- * - inst : MCU instance
- * - Data2McuAddress : Pointer to MCU address
- * - Data2DspAddress : Pointer to DSP address
- * - main_inst : Pointer to NetEQ main instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
- void * Data2DspAddress, void *main_inst);
-
-/****************************************************************************
- * WebRtcNetEQ_McuSetFs(...)
- *
- * Initializes MCU with read address and write address.
- *
- * Input:
- * - inst : MCU instance
- * - fs_hz : Sample rate in Hz -- 8000, 16000, 32000, (48000)
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs_hz);
-
-/****************************************************************************
- * WebRtcNetEQ_SignalMcu(...)
- *
- * Signal the MCU that data is available and ask for a RecOut decision.
- *
- * Input:
- * - inst : MCU instance
- * - av_sync : 1 if NetEQ is in AV-sync mode, otherwise 0.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_SignalMcu(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInInternal(...)
- *
- * This function inserts a packet into the jitter buffer.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
- * - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
- uint32_t uw32_timeRec);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInInternal(...)
- *
- * Split the packet according to split_inst and inserts the parts into
- * Buffer_inst.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
- * - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
- * - av_sync : indicates if AV-sync is enabled, 1 enabled,
- * 0 disabled.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t* packet,
- PacketBuf_t* Buffer_inst,
- SplitInfo_t* split_inst,
- int16_t* flushed,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_GetTimestampScaling(...)
- *
- * Update information about timestamp scaling for a payload type
- * in MCU_inst->scalingFactor.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - rtpPayloadType : RTP payload number
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType);
-
-/****************************************************************************
- * WebRtcNetEQ_ScaleTimestampExternalToInternal(...)
- *
- * Convert from external to internal timestamp using current scaling info.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - externalTS : External timestamp
- *
- * Return value : Internal timestamp
- */
-
-uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- uint32_t externalTS);
-
-/****************************************************************************
- * WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
- *
- * Convert from external to internal timestamp using current scaling info.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - externalTS : Internal timestamp
- *
- * Return value : External timestamp
- */
-
-uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- uint32_t internalTS);
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c
deleted file mode 100644
index 666ecc85612..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "mcu.h"
-
-#include <string.h> /* to define NULL */
-
-/*
- * Initializes MCU with read address and write address
- */
-int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
- void * Data2DspAddress, void *main_inst)
-{
-
- inst->pw16_readAddress = (int16_t*) Data2McuAddress;
- inst->pw16_writeAddress = (int16_t*) Data2DspAddress;
- inst->main_inst = main_inst;
-
- inst->millisecondsPerCall = 10;
-
- /* Do expansions in the beginning */
- if (inst->pw16_writeAddress != NULL) inst->pw16_writeAddress[0] = DSP_INSTR_EXPAND;
-
- return (0);
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
deleted file mode 100644
index 2c48ec7dde2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Communication between MCU and DSP sides.
- */
-
-#include "mcu_dsp_common.h"
-
-#include <string.h>
-
-/* Initialize instances with read and write address */
-int WebRtcNetEQ_DSPinit(MainInst_t *inst)
-{
- int res = 0;
-
- res |= WebRtcNetEQ_AddressInit(&inst->DSPinst, NULL, NULL, inst);
- res |= WebRtcNetEQ_McuAddressInit(&inst->MCUinst, NULL, NULL, inst);
-
- return res;
-
-}
-
-/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem)
-{
- inst->MCUinst.pw16_readAddress = pw16_shared_mem;
- inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
- return WebRtcNetEQ_SignalMcu(&inst->MCUinst);
-}
-
-int WebRtcNetEQ_IsSyncPayload(const void* payload, int payload_len_bytes) {
- if (payload_len_bytes != SYNC_PAYLOAD_LEN_BYTES ||
- memcmp(payload, kSyncPayload, SYNC_PAYLOAD_LEN_BYTES) != 0) {
- return 0;
- }
- return 1;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
deleted file mode 100644
index b4ab514bc95..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * The main NetEQ instance, which is where the DSP and MCU sides join.
- */
-
-#ifndef MCU_DSP_COMMON_H
-#define MCU_DSP_COMMON_H
-
-#include "typedefs.h"
-
-#include "dsp.h"
-#include "mcu.h"
-
-/* Define size of shared memory area. */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define SHARED_MEM_SIZE (6*640)
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define SHARED_MEM_SIZE (4*640)
-#elif defined(NETEQ_WIDEBAND)
- #define SHARED_MEM_SIZE (2*640)
-#else
- #define SHARED_MEM_SIZE 640
-#endif
-
-#define SYNC_PAYLOAD_LEN_BYTES 7
-static const uint8_t kSyncPayload[SYNC_PAYLOAD_LEN_BYTES] = {
- 'a', 'v', 's', 'y', 'n', 'c', '\0' };
-
-/* Struct to hold the NetEQ instance */
-typedef struct
-{
- DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
- MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
- int16_t ErrorCode; /* Store last error code */
-#ifdef NETEQ_STEREO
- int16_t masterSlave; /* 0 = not set, 1 = master, 2 = slave */
-#endif /* NETEQ_STEREO */
-} MainInst_t;
-
-/* Struct used for communication between DSP and MCU sides of NetEQ */
-typedef struct
-{
- uint32_t playedOutTS; /* Timestamp position at end of DSP data */
- uint16_t samplesLeft; /* Number of samples stored */
- int16_t MD; /* Multiple description codec information */
- int16_t lastMode; /* Latest mode of NetEQ playout */
- int16_t frameLen; /* Frame length of previously decoded packet */
-} DSP2MCU_info_t;
-
-/* Initialize instances with read and write address */
-int WebRtcNetEQ_DSPinit(MainInst_t *inst);
-
-/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem);
-
-/* Returns 1 if the given payload matches |kSyncPayload| payload, otherwise
- * 0 is returned. */
-int WebRtcNetEQ_IsSyncPayload(const void* payload, int payload_len_bytes);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c
deleted file mode 100644
index ddbb798af87..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Reset MCU side data.
- */
-
-#include "mcu.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "automode.h"
-
-int WebRtcNetEQ_McuReset(MCUInst_t *inst)
-{
-
-#ifdef NETEQ_ATEVENT_DECODE
- int ok;
-#endif
-
- /* MCU/DSP Communication layer */
- inst->pw16_readAddress = NULL;
- inst->pw16_writeAddress = NULL;
- inst->main_inst = NULL;
- inst->one_desc = 0;
- inst->BufferStat_inst.Automode_inst.extraDelayMs = 0;
- inst->BufferStat_inst.Automode_inst.minimum_delay_ms = 0;
- inst->BufferStat_inst.Automode_inst.maximum_delay_ms = 10000;
- inst->NetEqPlayoutMode = kPlayoutOn;
- inst->av_sync = 0;
-
- WebRtcNetEQ_DbReset(&inst->codec_DB_inst);
- memset(&inst->PayloadSplit_inst, 0, sizeof(SplitInfo_t));
-
- /* Clear the Packet buffer and the pointer to memory storage */
- WebRtcNetEQ_PacketBufferFlush(&inst->PacketBuffer_inst);
- inst->PacketBuffer_inst.memorySizeW16 = 0;
- inst->PacketBuffer_inst.maxInsertPositions = 0;
-
- /* Clear the decision and delay history */
- memset(&inst->BufferStat_inst, 0, sizeof(BufstatsInst_t));
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
- if (ok != 0)
- {
- return ok;
- }
-#endif
- inst->NoOfExpandCalls = 0;
- inst->current_Codec = -1;
- inst->current_Payload = -1;
-
- inst->millisecondsPerCall = 10;
- inst->timestampsPerCall = inst->millisecondsPerCall * 8;
- inst->fs = 8000;
- inst->first_packet = 1;
-
- WebRtcNetEQ_ResetMcuInCallStats(inst);
-
- WebRtcNetEQ_ResetWaitingTimeStats(inst);
-
- WebRtcNetEQ_ResetMcuJitterStat(inst);
-
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
-
- return 0;
-}
-
-/*
- * Reset MCU-side statistics variables for the in-call statistics.
- */
-
-int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst)
-{
- inst->lostTS = 0;
- inst->lastReportTS = 0;
- inst->PacketBuffer_inst.discardedPackets = 0;
-
- return 0;
-}
-
-/*
- * Reset waiting-time statistics.
- */
-
-void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst) {
- memset(inst->waiting_times, 0,
- kLenWaitingTimes * sizeof(inst->waiting_times[0]));
- inst->len_waiting_times = 0;
- inst->next_waiting_time_index = 0;
-}
-
-/*
- * Store waiting-time in the statistics.
- */
-
-void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time) {
- assert(inst->next_waiting_time_index < kLenWaitingTimes);
- inst->waiting_times[inst->next_waiting_time_index] = waiting_time;
- inst->next_waiting_time_index++;
- if (inst->next_waiting_time_index >= kLenWaitingTimes) {
- inst->next_waiting_time_index = 0;
- }
- if (inst->len_waiting_times < kLenWaitingTimes) {
- inst->len_waiting_times++;
- }
-}
-
-/*
- * Reset all MCU-side statistics variables for the post-call statistics.
- */
-
-int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst)
-{
- inst->BufferStat_inst.Automode_inst.countIAT500ms = 0;
- inst->BufferStat_inst.Automode_inst.countIAT1000ms = 0;
- inst->BufferStat_inst.Automode_inst.countIAT2000ms = 0;
- inst->BufferStat_inst.Automode_inst.longestIATms = 0;
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c
deleted file mode 100644
index 78da2c7c7db..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c
+++ /dev/null
@@ -1,570 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the function to merge a new packet with expanded data after a packet loss.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Merge(...)
- *
- * This function...
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to new decoded speech.
- * - len : Number of samples in pw16_decoded.
- *
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to pw16_outData
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_expanded 210*fs/8000 0 209*fs/8000
- int16_t pw16_expandedLB 100 210*fs/8000 99+210*fs/8000
- int16_t pw16_decodedLB 40 100+210*fs/8000 139+210*fs/8000
- int32_t pw32_corr 2*60 140+210*fs/8000 260+210*fs/8000
- int16_t pw16_corrVec 68 210*fs/8000 67+210*fs/8000
-
- [gap in scratch vector]
-
- func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/8000 39+496*fs/8000
-
- Total: 40+496*fs/8000
- */
-
-#define SCRATCH_pw16_expanded 0
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 1260
-#define SCRATCH_pw16_decodedLB 1360
-#define SCRATCH_pw32_corr 1400
-#define SCRATCH_pw16_corrVec 1260
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 840
-#define SCRATCH_pw16_decodedLB 940
-#define SCRATCH_pw32_corr 980
-#define SCRATCH_pw16_corrVec 840
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 420
-#define SCRATCH_pw16_decodedLB 520
-#define SCRATCH_pw32_corr 560
-#define SCRATCH_pw16_corrVec 420
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_pw16_expandedLB 210
-#define SCRATCH_pw16_decodedLB 310
-#define SCRATCH_pw32_corr 350
-#define SCRATCH_pw16_corrVec 210
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-int WebRtcNetEQ_Merge(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int len, int16_t *pw16_outData,
- int16_t *pw16_len)
-{
-
- int16_t fs_mult;
- int16_t fs_shift;
- int32_t w32_En_new_frame, w32_En_old_frame;
- int16_t w16_expmax, w16_newmax;
- int16_t w16_tmp, w16_tmp2;
- int32_t w32_tmp;
-#ifdef SCRATCH
- int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
- int16_t *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
- int16_t *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
-#else
- int16_t pw16_expanded[(125+80+5)*FSMULT];
- int16_t pw16_expandedLB[100];
- int16_t pw16_decodedLB[40];
- int32_t pw32_corr[60];
- int16_t pw16_corrVec[4+60+4];
-#endif
- int16_t *pw16_corr = &pw16_corrVec[4];
- int16_t w16_stopPos = 0, w16_bestIndex, w16_interpLen;
- int16_t w16_bestVal; /* bestVal is dummy */
- int16_t w16_startfact, w16_inc;
- int16_t w16_expandedLen;
- int16_t w16_startPos;
- int16_t w16_expLen, w16_newLen = 0;
- int16_t *pw16_decodedOut;
- int16_t w16_muted;
-
- int w16_decodedLen = len;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
- fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
-
- /*************************************
- * Generate data to merge with
- *************************************/
- /*
- * Check how much data that is left since earlier
- * (at least there should be the overlap)...
- */
- w16_startPos = inst->endPosition - inst->curPosition;
- /* Get one extra expansion to merge and overlap with */
- inst->ExpandInst.w16_stopMuting = 1;
- inst->ExpandInst.w16_lagsDirection = 1; /* make sure we get the "optimal" lag */
- inst->ExpandInst.w16_lagsPosition = -1; /* out of the 3 possible ones */
- w16_expandedLen = 0; /* Does not fill any function currently */
-
- if (w16_startPos >= 210 * FSMULT)
- {
- /*
- * The number of samples available in the sync buffer is more than what fits in
- * pw16_expanded.Keep the first 210*FSMULT samples, but shift them towards the end of
- * the buffer. This is ok, since all of the buffer will be expand data anyway, so as
- * long as the beginning is left untouched, we're fine.
- */
-
- w16_tmp = w16_startPos - 210 * FSMULT; /* length difference */
-
- WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[inst->curPosition+w16_tmp] ,
- &inst->speechBuffer[inst->curPosition], 210*FSMULT);
-
- inst->curPosition += w16_tmp; /* move start position of sync buffer accordingly */
- w16_startPos = 210 * FSMULT; /* this is the truncated length */
- }
-
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_expanded, /* let Expand write to beginning of pw16_expanded to avoid overflow */
- &w16_newLen, 0);
-
- /*
- * Now shift the data in pw16_expanded to where it belongs.
- * Truncate all that ends up outside the vector.
- */
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_expanded[w16_startPos], pw16_expanded,
- WEBRTC_SPL_MIN(w16_newLen,
- WEBRTC_SPL_MAX(210*FSMULT - w16_startPos, 0) ) );
-
- inst->ExpandInst.w16_stopMuting = 0;
-
- /* Copy what is left since earlier into the expanded vector */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_expanded, &inst->speechBuffer[inst->curPosition], w16_startPos);
-
- /*
- * Do "ugly" copy and paste from the expanded in order to generate more data
- * to correlate (but not interpolate) with.
- */
- w16_expandedLen = (120 + 80 + 2) * fs_mult;
- w16_expLen = w16_startPos + w16_newLen;
-
- if (w16_expLen < w16_expandedLen)
- {
- while ((w16_expLen + w16_newLen) < w16_expandedLen)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
- w16_newLen);
- w16_expLen += w16_newLen;
- }
-
- /* Copy last part (fraction of a whole expansion) */
-
- WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
- (w16_expandedLen-w16_expLen));
- }
- w16_expLen = w16_expandedLen;
-
- /* Adjust muting factor (main muting factor times expand muting factor) */
- inst->w16_muteFactor
- = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
- inst->ExpandInst.w16_expandMuteFactor, 14);
-
- /* Adjust muting factor if new vector is more or less of the BGN energy */
- len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
- w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (int16_t) len);
- w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* Calculate energy of old data */
- w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
- w32_En_old_frame = WebRtcNetEQ_DotW16W16(pw16_expanded, pw16_expanded, len, w16_tmp);
-
- /* Calculate energy of new data */
- w16_tmp2 = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_newmax, w16_newmax));
- w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2,0);
- w32_En_new_frame = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, len, w16_tmp2);
-
- /* Align to same Q-domain */
- if (w16_tmp2 > w16_tmp)
- {
- w32_En_old_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_old_frame, (w16_tmp2-w16_tmp));
- }
- else
- {
- w32_En_new_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_new_frame, (w16_tmp-w16_tmp2));
- }
-
- /* Calculate muting factor to use for new frame */
- if (w32_En_new_frame > w32_En_old_frame)
- {
- /* Normalize w32_En_new_frame to 14 bits */
- w16_tmp = WebRtcSpl_NormW32(w32_En_new_frame) - 17;
- w32_En_new_frame = WEBRTC_SPL_SHIFT_W32(w32_En_new_frame, w16_tmp);
-
- /*
- * Put w32_En_old_frame in a domain 14 higher, so that
- * w32_En_old_frame/w32_En_new_frame is in Q14
- */
- w16_tmp = w16_tmp + 14;
- w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
- w16_tmp
- = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (int16_t) w32_En_new_frame);
- /* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
- w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,14));
- }
- else
- {
- w16_muted = 16384; /* Set = 1.0 when old frame has higher energy than new */
- }
-
- /* Set the raise the continued muting factor w16_muted if w16_muteFactor is lower */
- if (w16_muted > inst->w16_muteFactor)
- {
- inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
- }
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- /* do not downsample and calculate correlations for slave instance(s) */
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
-#endif
-
- /*********************************************
- * Downsample to 4kHz and find best overlap
- *********************************************/
-
- /* Downsample to 4 kHz */
- if (inst->fs == 8000)
- {
- WebRtcSpl_DownsampleFast(&pw16_expanded[2], (int16_t) (w16_expandedLen - 2),
- pw16_expandedLB, (int16_t) (100),
- (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl, (int16_t) 3,
- (int16_t) 2, (int16_t) 0);
- if (w16_decodedLen <= 80)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 2;
- w16_tmp = temp_len / 2;
- WebRtcSpl_DownsampleFast(&pw16_decoded[2], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
- (int16_t) 3, (int16_t) 2, (int16_t) 0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(&pw16_decoded[2],
- (int16_t) (w16_decodedLen - 2), pw16_decodedLB,
- (int16_t) (40), (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
- (int16_t) 3, (int16_t) 2, (int16_t) 0);
- }
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs==16000)
- {
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[4], (int16_t)(w16_expandedLen-4),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- if (w16_decodedLen<=160)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 4;
- w16_tmp = temp_len / 4;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[4], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[4], (int16_t)(w16_decodedLen-4),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- }
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs==32000)
- {
- /*
- * TODO(hlundin) Why is the offset into pw16_expanded 6?
- */
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- if (w16_decodedLen<=320)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 6;
- w16_tmp = temp_len / 8;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- }
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if (inst->fs==48000) */
- {
- /*
- * TODO(hlundin) Why is the offset into pw16_expanded 6?
- */
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- if (w16_decodedLen<=320)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- /*
- * TODO(hlundin): Is this correct? Downsampling is a factor 12
- * but w16_tmp = temp_len / 8.
- * (Was w16_tmp = ((w16_decodedLen-6)>>3) before re-write.)
- */
- int16_t temp_len = w16_decodedLen - 6;
- w16_tmp = temp_len / 8;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- }
-#endif
- }
-
- /* Calculate correlation without any normalization (40 samples) */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) inst->ExpandInst.w16_maxLag,
- (int16_t) (fs_mult * 2)) + 1;
- w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
- if (w32_tmp > 26843546)
- {
- w16_tmp = 3;
- }
- else
- {
- w16_tmp = 0;
- }
-
- WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
- (int16_t) w16_stopPos, w16_tmp, 1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_stopPos, pw32_corr, w16_tmp);
-
- /* Calculate allowed starting point for peak finding.
- The peak location bestIndex must fulfill two criteria:
- (1) w16_bestIndex+w16_decodedLen < inst->timestampsPerCall+inst->ExpandInst.w16_overlap
- (2) w16_bestIndex+w16_decodedLen < w16_startPos */
- w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
- inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
- /* Downscale starting index to 4kHz domain */
- w16_tmp2 = WebRtcSpl_DivW32W16ResW16((int32_t) w16_tmp,
- (int16_t) (fs_mult << 1));
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* This is master or mono instance; find peak */
- WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
- &w16_bestVal);
- w16_bestIndex += w16_tmp; /* compensate for modified starting index */
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* Get peak location from master instance */
- w16_bestIndex = msInfo->bestIndex;
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find peak */
- WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
- &w16_bestVal);
- w16_bestIndex += w16_tmp; /* compensate for modified starting index */
-
-#endif /* NETEQ_STEREO */
-
- /*
- * Ensure that underrun does not occur for 10ms case => we have to get at least
- * 10ms + overlap . (This should never happen thanks to the above modification of
- * peak-finding starting point.)
- * */
- while ((w16_bestIndex + w16_decodedLen) < (inst->timestampsPerCall
- + inst->ExpandInst.w16_overlap) || w16_bestIndex + w16_decodedLen < w16_startPos)
- {
- w16_bestIndex += w16_newLen; /* Jump one lag ahead */
- }
- pw16_decodedOut = pw16_outData + w16_bestIndex;
-
- /* Mute the new decoded data if needed (and unmute it linearly) */
- w16_interpLen = WEBRTC_SPL_MIN(60*fs_mult,
- w16_expandedLen-w16_bestIndex); /* this is the overlapping part of pw16_expanded */
- w16_interpLen = WEBRTC_SPL_MIN(w16_interpLen, w16_decodedLen);
- w16_inc = WebRtcSpl_DivW32W16ResW16(4194,
- fs_mult); /* in Q20, 0.004 for NB and 0.002 for WB */
- if (inst->w16_muteFactor < 16384)
- {
- WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
- (int16_t) w16_interpLen);
- WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
- &pw16_decodedOut[w16_interpLen], w16_inc,
- (int16_t) (w16_decodedLen - w16_interpLen));
- }
- else
- {
- /* No muting needed */
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decodedOut[w16_interpLen], &pw16_decoded[w16_interpLen],
- (w16_decodedLen-w16_interpLen));
- }
-
- /* Do overlap and interpolate linearly */
- w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (int16_t) (w16_interpLen + 1)); /* Q14 */
- w16_startfact = (16384 - w16_inc);
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
- WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
- &w16_startfact, w16_inc, w16_interpLen);
-
- inst->w16_mode = MODE_MERGE;
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /* New added length (w16_startPos samples were borrowed) */
- *pw16_len = w16_bestIndex + w16_decodedLen - w16_startPos;
-
- /* Update VQmon parameter */
- inst->w16_concealedTS += (*pw16_len - w16_decodedLen);
- inst->w16_concealedTS = WEBRTC_SPL_MAX(0, inst->w16_concealedTS);
-
- /* Update in-call and post-call statistics */
- if (inst->ExpandInst.w16_expandMuteFactor == 0)
- {
- /* expansion generates noise only */
- inst->statInst.expandedNoiseSamples += (*pw16_len - w16_decodedLen);
- /* Short-term activity statistics. */
- inst->activity_stats.merge_expand_bgn_samples +=
- (*pw16_len - w16_decodedLen);
- }
- else
- {
- /* expansion generates more than only noise */
- inst->statInst.expandedVoiceSamples += (*pw16_len - w16_decodedLen);
- /* Short-term activity statistics. */
- inst->activity_stats.merge_expand_normal_samples +=
- (*pw16_len - w16_decodedLen);
- }
- inst->statInst.expandLength += (*pw16_len - w16_decodedLen);
-
-
- /* Copy back the first part of the data to the speechHistory */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition], pw16_outData, w16_startPos);
-
-
- /* Move data to within outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, &pw16_outData[w16_startPos], (*pw16_len));
-
- return 0;
-}
-
-#undef SCRATCH_pw16_expanded
-#undef SCRATCH_pw16_expandedLB
-#undef SCRATCH_pw16_decodedLB
-#undef SCRATCH_pw32_corr
-#undef SCRATCH_pw16_corrVec
-#undef SCRATCH_NETEQ_EXPAND
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
new file mode 100644
index 00000000000..d3d8077516b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
@@ -0,0 +1,366 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/merge.h"
+
+#include <assert.h>
+#include <string.h> // memmove, memcpy, memset, size_t
+
+#include <algorithm> // min, max
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+int Merge::Process(int16_t* input, size_t input_length,
+ int16_t* external_mute_factor_array,
+ AudioMultiVector* output) {
+ // TODO(hlundin): Change to an enumerator and skip assert.
+ assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 ||
+ fs_hz_ == 48000);
+ assert(fs_hz_ <= kMaxSampleRate); // Should not be possible.
+
+ int old_length;
+ int expand_period;
+ // Get expansion data to overlap and mix with.
+ int expanded_length = GetExpandedSignal(&old_length, &expand_period);
+
+ // Transfer input signal to an AudioMultiVector.
+ AudioMultiVector input_vector(num_channels_);
+ input_vector.PushBackInterleaved(input, input_length);
+ size_t input_length_per_channel = input_vector.Size();
+ assert(input_length_per_channel == input_length / num_channels_);
+
+ int16_t best_correlation_index = 0;
+ size_t output_length = 0;
+
+ for (size_t channel = 0; channel < num_channels_; ++channel) {
+ int16_t* input_channel = &input_vector[channel][0];
+ int16_t* expanded_channel = &expanded_[channel][0];
+ int16_t expanded_max, input_max;
+ int16_t new_mute_factor = SignalScaling(
+ input_channel, static_cast<int>(input_length_per_channel),
+ expanded_channel, &expanded_max, &input_max);
+
+ // Adjust muting factor (product of "main" muting factor and expand muting
+ // factor).
+ int16_t* external_mute_factor = &external_mute_factor_array[channel];
+ *external_mute_factor =
+ (*external_mute_factor * expand_->MuteFactor(channel)) >> 14;
+
+ // Update |external_mute_factor| if it is lower than |new_mute_factor|.
+ if (new_mute_factor > *external_mute_factor) {
+ *external_mute_factor = std::min(new_mute_factor,
+ static_cast<int16_t>(16384));
+ }
+
+ if (channel == 0) {
+ // Downsample, correlate, and find strongest correlation period for the
+ // master (i.e., first) channel only.
+ // Downsample to 4kHz sample rate.
+ Downsample(input_channel, static_cast<int>(input_length_per_channel),
+ expanded_channel, expanded_length);
+
+ // Calculate the lag of the strongest correlation period.
+ best_correlation_index = CorrelateAndPeakSearch(
+ expanded_max, input_max, old_length,
+ static_cast<int>(input_length_per_channel), expand_period);
+ }
+
+ static const int kTempDataSize = 3600;
+ int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
+ int16_t* decoded_output = temp_data + best_correlation_index;
+
+ // Mute the new decoded data if needed (and unmute it linearly).
+ // This is the overlapping part of expanded_signal.
+ int interpolation_length = std::min(
+ kMaxCorrelationLength * fs_mult_,
+ expanded_length - best_correlation_index);
+ interpolation_length = std::min(interpolation_length,
+ static_cast<int>(input_length_per_channel));
+ if (*external_mute_factor < 16384) {
+ // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB,
+ // and so on.
+ int increment = 4194 / fs_mult_;
+ *external_mute_factor = DspHelper::RampSignal(input_channel,
+ interpolation_length,
+ *external_mute_factor,
+ increment);
+ DspHelper::UnmuteSignal(&input_channel[interpolation_length],
+ input_length_per_channel - interpolation_length,
+ external_mute_factor, increment,
+ &decoded_output[interpolation_length]);
+ } else {
+ // No muting needed.
+ memmove(
+ &decoded_output[interpolation_length],
+ &input_channel[interpolation_length],
+ sizeof(int16_t) * (input_length_per_channel - interpolation_length));
+ }
+
+ // Do overlap and mix linearly.
+ int increment = 16384 / (interpolation_length + 1); // In Q14.
+ int16_t mute_factor = 16384 - increment;
+ memmove(temp_data, expanded_channel,
+ sizeof(int16_t) * best_correlation_index);
+ DspHelper::CrossFade(&expanded_channel[best_correlation_index],
+ input_channel, interpolation_length,
+ &mute_factor, increment, decoded_output);
+
+ output_length = best_correlation_index + input_length_per_channel;
+ if (channel == 0) {
+ assert(output->Empty()); // Output should be empty at this point.
+ output->AssertSize(output_length);
+ } else {
+ assert(output->Size() == output_length);
+ }
+ memcpy(&(*output)[channel][0], temp_data,
+ sizeof(temp_data[0]) * output_length);
+ }
+
+ // Copy back the first part of the data to |sync_buffer_| and remove it from
+ // |output|.
+ sync_buffer_->ReplaceAtIndex(*output, old_length, sync_buffer_->next_index());
+ output->PopFront(old_length);
+
+ // Return new added length. |old_length| samples were borrowed from
+ // |sync_buffer_|.
+ return static_cast<int>(output_length) - old_length;
+}
+
+int Merge::GetExpandedSignal(int* old_length, int* expand_period) {
+ // Check how much data that is left since earlier.
+ *old_length = static_cast<int>(sync_buffer_->FutureLength());
+ // Should never be less than overlap_length.
+ assert(*old_length >= static_cast<int>(expand_->overlap_length()));
+ // Generate data to merge the overlap with using expand.
+ expand_->SetParametersForMergeAfterExpand();
+
+ if (*old_length >= 210 * kMaxSampleRate / 8000) {
+ // TODO(hlundin): Write test case for this.
+ // The number of samples available in the sync buffer is more than what fits
+ // in expanded_signal. Keep the first 210 * kMaxSampleRate / 8000 samples,
+ // but shift them towards the end of the buffer. This is ok, since all of
+ // the buffer will be expand data anyway, so as long as the beginning is
+ // left untouched, we're fine.
+ int16_t length_diff = *old_length - 210 * kMaxSampleRate / 8000;
+ sync_buffer_->InsertZerosAtIndex(length_diff, sync_buffer_->next_index());
+ *old_length = 210 * kMaxSampleRate / 8000;
+ // This is the truncated length.
+ }
+ // This assert should always be true thanks to the if statement above.
+ assert(210 * kMaxSampleRate / 8000 - *old_length >= 0);
+
+ AudioMultiVector expanded_temp(num_channels_);
+ expand_->Process(&expanded_temp);
+ *expand_period = static_cast<int>(expanded_temp.Size()); // Samples per
+ // channel.
+
+ expanded_.Clear();
+ // Copy what is left since earlier into the expanded vector.
+ expanded_.PushBackFromIndex(*sync_buffer_, sync_buffer_->next_index());
+ assert(expanded_.Size() == static_cast<size_t>(*old_length));
+ assert(expanded_temp.Size() > 0);
+ // Do "ugly" copy and paste from the expanded in order to generate more data
+ // to correlate (but not interpolate) with.
+ const int required_length = (120 + 80 + 2) * fs_mult_;
+ if (expanded_.Size() < static_cast<size_t>(required_length)) {
+ while (expanded_.Size() < static_cast<size_t>(required_length)) {
+ // Append one more pitch period each time.
+ expanded_.PushBack(expanded_temp);
+ }
+ // Trim the length to exactly |required_length|.
+ expanded_.PopBack(expanded_.Size() - required_length);
+ }
+ assert(expanded_.Size() >= static_cast<size_t>(required_length));
+ return required_length;
+}
+
+int16_t Merge::SignalScaling(const int16_t* input, int input_length,
+ const int16_t* expanded_signal,
+ int16_t* expanded_max, int16_t* input_max) const {
+ // Adjust muting factor if new vector is more or less of the BGN energy.
+ const int mod_input_length = std::min(64 * fs_mult_, input_length);
+ *expanded_max = WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
+ *input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
+
+ // Calculate energy of expanded signal.
+ // |log_fs_mult| is log2(fs_mult_), but is not exact for 48000 Hz.
+ int log_fs_mult = 30 - WebRtcSpl_NormW32(fs_mult_);
+ int expanded_shift = 6 + log_fs_mult
+ - WebRtcSpl_NormW32(*expanded_max * *expanded_max);
+ expanded_shift = std::max(expanded_shift, 0);
+ int32_t energy_expanded = WebRtcSpl_DotProductWithScale(expanded_signal,
+ expanded_signal,
+ mod_input_length,
+ expanded_shift);
+
+ // Calculate energy of input signal.
+ int input_shift = 6 + log_fs_mult -
+ WebRtcSpl_NormW32(*input_max * *input_max);
+ input_shift = std::max(input_shift, 0);
+ int32_t energy_input = WebRtcSpl_DotProductWithScale(input, input,
+ mod_input_length,
+ input_shift);
+
+ // Align to the same Q-domain.
+ if (input_shift > expanded_shift) {
+ energy_expanded = energy_expanded >> (input_shift - expanded_shift);
+ } else {
+ energy_input = energy_input >> (expanded_shift - input_shift);
+ }
+
+ // Calculate muting factor to use for new frame.
+ int16_t mute_factor;
+ if (energy_input > energy_expanded) {
+ // Normalize |energy_input| to 14 bits.
+ int16_t temp_shift = WebRtcSpl_NormW32(energy_input) - 17;
+ energy_input = WEBRTC_SPL_SHIFT_W32(energy_input, temp_shift);
+ // Put |energy_expanded| in a domain 14 higher, so that
+ // energy_expanded / energy_input is in Q14.
+ energy_expanded = WEBRTC_SPL_SHIFT_W32(energy_expanded, temp_shift + 14);
+ // Calculate sqrt(energy_expanded / energy_input) in Q14.
+ mute_factor = WebRtcSpl_SqrtFloor((energy_expanded / energy_input) << 14);
+ } else {
+ // Set to 1 (in Q14) when |expanded| has higher energy than |input|.
+ mute_factor = 16384;
+ }
+
+ return mute_factor;
+}
+
+// TODO(hlundin): There are some parameter values in this method that seem
+// strange. Compare with Expand::Correlation.
+void Merge::Downsample(const int16_t* input, int input_length,
+ const int16_t* expanded_signal, int expanded_length) {
+ const int16_t* filter_coefficients;
+ int num_coefficients;
+ int decimation_factor = fs_hz_ / 4000;
+ static const int kCompensateDelay = 0;
+ int length_limit = fs_hz_ / 100; // 10 ms in samples.
+ if (fs_hz_ == 8000) {
+ filter_coefficients = DspHelper::kDownsample8kHzTbl;
+ num_coefficients = 3;
+ } else if (fs_hz_ == 16000) {
+ filter_coefficients = DspHelper::kDownsample16kHzTbl;
+ num_coefficients = 5;
+ } else if (fs_hz_ == 32000) {
+ filter_coefficients = DspHelper::kDownsample32kHzTbl;
+ num_coefficients = 7;
+ } else { // fs_hz_ == 48000
+ filter_coefficients = DspHelper::kDownsample48kHzTbl;
+ num_coefficients = 7;
+ }
+ int signal_offset = num_coefficients - 1;
+ WebRtcSpl_DownsampleFast(&expanded_signal[signal_offset],
+ expanded_length - signal_offset,
+ expanded_downsampled_, kExpandDownsampLength,
+ filter_coefficients, num_coefficients,
+ decimation_factor, kCompensateDelay);
+ if (input_length <= length_limit) {
+ // Not quite long enough, so we have to cheat a bit.
+ int16_t temp_len = input_length - signal_offset;
+ // TODO(hlundin): Should |downsamp_temp_len| be corrected for round-off
+ // errors? I.e., (temp_len + decimation_factor - 1) / decimation_factor?
+ int16_t downsamp_temp_len = temp_len / decimation_factor;
+ WebRtcSpl_DownsampleFast(&input[signal_offset], temp_len,
+ input_downsampled_, downsamp_temp_len,
+ filter_coefficients, num_coefficients,
+ decimation_factor, kCompensateDelay);
+ memset(&input_downsampled_[downsamp_temp_len], 0,
+ sizeof(int16_t) * (kInputDownsampLength - downsamp_temp_len));
+ } else {
+ WebRtcSpl_DownsampleFast(&input[signal_offset],
+ input_length - signal_offset, input_downsampled_,
+ kInputDownsampLength, filter_coefficients,
+ num_coefficients, decimation_factor,
+ kCompensateDelay);
+ }
+}
+
+int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
+ int start_position, int input_length,
+ int expand_period) const {
+ // Calculate correlation without any normalization.
+ const int max_corr_length = kMaxCorrelationLength;
+ int stop_position_downsamp = std::min(
+ max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
+ int16_t correlation_shift = 0;
+ if (expanded_max * input_max > 26843546) {
+ correlation_shift = 3;
+ }
+
+ int32_t correlation[kMaxCorrelationLength];
+ WebRtcSpl_CrossCorrelation(correlation, input_downsampled_,
+ expanded_downsampled_, kInputDownsampLength,
+ stop_position_downsamp, correlation_shift, 1);
+
+ // Normalize correlation to 14 bits and copy to a 16-bit array.
+ const int pad_length = static_cast<int>(expand_->overlap_length() - 1);
+ const int correlation_buffer_size = 2 * pad_length + kMaxCorrelationLength;
+ scoped_ptr<int16_t[]> correlation16(new int16_t[correlation_buffer_size]);
+ memset(correlation16.get(), 0, correlation_buffer_size * sizeof(int16_t));
+ int16_t* correlation_ptr = &correlation16[pad_length];
+ int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
+ stop_position_downsamp);
+ int16_t norm_shift = std::max(0, 17 - WebRtcSpl_NormW32(max_correlation));
+ WebRtcSpl_VectorBitShiftW32ToW16(correlation_ptr, stop_position_downsamp,
+ correlation, norm_shift);
+
+ // Calculate allowed starting point for peak finding.
+ // The peak location bestIndex must fulfill two criteria:
+ // (1) w16_bestIndex + input_length <
+ // timestamps_per_call_ + expand_->overlap_length();
+ // (2) w16_bestIndex + input_length < start_position.
+ int start_index = timestamps_per_call_ +
+ static_cast<int>(expand_->overlap_length());
+ start_index = std::max(start_position, start_index);
+ start_index = std::max(start_index - input_length, 0);
+ // Downscale starting index to 4kHz domain. (fs_mult_ * 2 = fs_hz_ / 4000.)
+ int start_index_downsamp = start_index / (fs_mult_ * 2);
+
+ // Calculate a modified |stop_position_downsamp| to account for the increased
+ // start index |start_index_downsamp| and the effective array length.
+ int modified_stop_pos =
+ std::min(stop_position_downsamp,
+ kMaxCorrelationLength + pad_length - start_index_downsamp);
+ int best_correlation_index;
+ int16_t best_correlation;
+ static const int kNumCorrelationCandidates = 1;
+ DspHelper::PeakDetection(&correlation_ptr[start_index_downsamp],
+ modified_stop_pos, kNumCorrelationCandidates,
+ fs_mult_, &best_correlation_index,
+ &best_correlation);
+ // Compensate for modified start index.
+ best_correlation_index += start_index;
+
+ // Ensure that underrun does not occur for 10ms case => we have to get at
+ // least 10ms + overlap . (This should never happen thanks to the above
+ // modification of peak-finding starting point.)
+ while ((best_correlation_index + input_length) <
+ static_cast<int>(timestamps_per_call_ + expand_->overlap_length()) ||
+ best_correlation_index + input_length < start_position) {
+ assert(false); // Should never happen.
+ best_correlation_index += expand_period; // Jump one lag ahead.
+ }
+ return best_correlation_index;
+}
+
+int Merge::RequiredFutureSamples() {
+ return static_cast<int>(fs_hz_ / 100 * num_channels_); // 10 ms.
+}
+
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
new file mode 100644
index 00000000000..1bf0483dfe1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
+
+#include <assert.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class Expand;
+class SyncBuffer;
+
+// This class handles the transition from expansion to normal operation.
+// When a packet is not available for decoding when needed, the expand operation
+// is called to generate extrapolation data. If the missing packet arrives,
+// i.e., it was just delayed, it can be decoded and appended directly to the
+// end of the expanded data (thanks to how the Expand class operates). However,
+// if a later packet arrives instead, the loss is a fact, and the new data must
+// be stitched together with the end of the expanded data. This stitching is
+// what the Merge class does.
+class Merge {
+ public:
+ Merge(int fs_hz, size_t num_channels, Expand* expand, SyncBuffer* sync_buffer)
+ : fs_hz_(fs_hz),
+ num_channels_(num_channels),
+ fs_mult_(fs_hz_ / 8000),
+ timestamps_per_call_(fs_hz_ / 100),
+ expand_(expand),
+ sync_buffer_(sync_buffer),
+ expanded_(num_channels_) {
+ assert(num_channels_ > 0);
+ }
+
+ virtual ~Merge() {}
+
+ // The main method to produce the audio data. The decoded data is supplied in
+ // |input|, having |input_length| samples in total for all channels
+ // (interleaved). The result is written to |output|. The number of channels
+ // allocated in |output| defines the number of channels that will be used when
+ // de-interleaving |input|. The values in |external_mute_factor_array| (Q14)
+ // will be used to scale the audio, and is updated in the process. The array
+ // must have |num_channels_| elements.
+ virtual int Process(int16_t* input, size_t input_length,
+ int16_t* external_mute_factor_array,
+ AudioMultiVector* output);
+
+ virtual int RequiredFutureSamples();
+
+ protected:
+ const int fs_hz_;
+ const size_t num_channels_;
+
+ private:
+ static const int kMaxSampleRate = 48000;
+ static const int kExpandDownsampLength = 100;
+ static const int kInputDownsampLength = 40;
+ static const int kMaxCorrelationLength = 60;
+
+ // Calls |expand_| to get more expansion data to merge with. The data is
+ // written to |expanded_signal_|. Returns the length of the expanded data,
+ // while |expand_period| will be the number of samples in one expansion period
+ // (typically one pitch period). The value of |old_length| will be the number
+ // of samples that were taken from the |sync_buffer_|.
+ int GetExpandedSignal(int* old_length, int* expand_period);
+
+ // Analyzes |input| and |expanded_signal| to find maximum values. Returns
+ // a muting factor (Q14) to be used on the new data.
+ int16_t SignalScaling(const int16_t* input, int input_length,
+ const int16_t* expanded_signal,
+ int16_t* expanded_max, int16_t* input_max) const;
+
+ // Downsamples |input| (|input_length| samples) and |expanded_signal| to
+ // 4 kHz sample rate. The downsampled signals are written to
+ // |input_downsampled_| and |expanded_downsampled_|, respectively.
+ void Downsample(const int16_t* input, int input_length,
+ const int16_t* expanded_signal, int expanded_length);
+
+ // Calculates cross-correlation between |input_downsampled_| and
+ // |expanded_downsampled_|, and finds the correlation maximum. The maximizing
+ // lag is returned.
+ int16_t CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
+ int start_position, int input_length,
+ int expand_period) const;
+
+ const int fs_mult_; // fs_hz_ / 8000.
+ const int timestamps_per_call_;
+ Expand* expand_;
+ SyncBuffer* sync_buffer_;
+ int16_t expanded_downsampled_[kExpandDownsampLength];
+ int16_t input_downsampled_[kInputDownsampLength];
+ AudioMultiVector expanded_;
+
+ DISALLOW_COPY_AND_ASSIGN(Merge);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc
new file mode 100644
index 00000000000..fb5f789ff1e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for Merge class.
+
+#include "webrtc/modules/audio_coding/neteq/merge.h"
+
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+TEST(Merge, CreateAndDestroy) {
+ int fs = 8000;
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+ SyncBuffer sync_buffer(1, 1000);
+ RandomVector random_vector;
+ Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
+ Merge merge(fs, channels, &expand, &sync_buffer);
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c
deleted file mode 100644
index 47e2b442cd6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Calculate best overlap fit according to distortion measure.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
- int16_t w16_minLag, int16_t w16_maxLag,
- int16_t len, int32_t *pw16_dist)
-{
- int i, j;
- const int16_t *pw16_data1;
- const int16_t *pw16_data2;
- int32_t w32_diff;
- int32_t w32_sumdiff;
- int16_t bestIndex = -1;
- int32_t minDist = WEBRTC_SPL_WORD32_MAX;
-
- for (i = w16_minLag; i <= w16_maxLag; i++)
- {
- w32_sumdiff = 0;
- pw16_data1 = pw16_data;
- pw16_data2 = pw16_data - i;
-
- for (j = 0; j < len; j++)
- {
- w32_diff = pw16_data1[j] - pw16_data2[j];
- w32_sumdiff += WEBRTC_SPL_ABS_W32(w32_diff);
- }
-
- /* Compare with previous minimum */
- if (w32_sumdiff < minDist)
- {
- minDist = w32_sumdiff;
- bestIndex = i;
- }
- }
-
- *pw16_dist = minDist;
-
- return bestIndex;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
deleted file mode 100644
index 6c70d4916a3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function mixes a voiced signal with an unvoiced signal and
- * updates the weight on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
- int16_t *pw16_unvoicedVec,
- int16_t *w16_current_vfraction,
- int16_t w16_vfraction_change, int16_t N)
-{
- int i;
- int16_t w16_tmp2;
- int16_t vfraction = *w16_current_vfraction;
-
- w16_tmp2 = 16384 - vfraction;
- for (i = 0; i < N; i++)
- {
- pw16_outData[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
- WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
- 14);
- vfraction -= w16_vfraction_change;
- w16_tmp2 += w16_vfraction_change;
- }
- *w16_current_vfraction = vfraction;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
new file mode 100644
index 00000000000..edf3b54e9ea
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
+
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockAudioDecoder : public AudioDecoder {
+ public:
+ MockAudioDecoder() : AudioDecoder(kDecoderArbitrary) {}
+ virtual ~MockAudioDecoder() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD4(Decode, int(const uint8_t*, size_t, int16_t*,
+ AudioDecoder::SpeechType*));
+ MOCK_CONST_METHOD0(HasDecodePlc, bool());
+ MOCK_METHOD2(DecodePlc, int(int, int16_t*));
+ MOCK_METHOD0(Init, int());
+ MOCK_METHOD5(IncomingPacket, int(const uint8_t*, size_t, uint16_t, uint32_t,
+ uint32_t));
+ MOCK_METHOD0(ErrorCode, int());
+ MOCK_CONST_METHOD0(codec_type, NetEqDecoder());
+ MOCK_METHOD1(CodecSupported, bool(NetEqDecoder));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h
new file mode 100644
index 00000000000..a5a787c7aa4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
+
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockAudioVector : public AudioVector {
+ public:
+ MOCK_METHOD0(Clear,
+ void());
+ MOCK_CONST_METHOD1(CopyFrom,
+ void(AudioVector<T>* copy_to));
+ MOCK_METHOD1(PushFront,
+ void(const AudioVector<T>& prepend_this));
+ MOCK_METHOD2(PushFront,
+ void(const T* prepend_this, size_t length));
+ MOCK_METHOD1(PushBack,
+ void(const AudioVector<T>& append_this));
+ MOCK_METHOD2(PushBack,
+ void(const T* append_this, size_t length));
+ MOCK_METHOD1(PopFront,
+ void(size_t length));
+ MOCK_METHOD1(PopBack,
+ void(size_t length));
+ MOCK_METHOD1(Extend,
+ void(size_t extra_length));
+ MOCK_METHOD3(InsertAt,
+ void(const T* insert_this, size_t length, size_t position));
+ MOCK_METHOD3(OverwriteAt,
+ void(const T* insert_this, size_t length, size_t position));
+ MOCK_CONST_METHOD0(Size,
+ size_t());
+ MOCK_CONST_METHOD0(Empty,
+ bool());
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h
new file mode 100644
index 00000000000..d9210668dde
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
+
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockBufferLevelFilter : public BufferLevelFilter {
+ public:
+ virtual ~MockBufferLevelFilter() { Die(); }
+ MOCK_METHOD0(Die,
+ void());
+ MOCK_METHOD0(Reset,
+ void());
+ MOCK_METHOD3(Update,
+ void(int buffer_size_packets, int time_stretched_samples,
+ int packet_len_samples));
+ MOCK_METHOD1(SetTargetBufferLevel,
+ void(int target_buffer_level));
+ MOCK_CONST_METHOD0(filtered_current_level,
+ int());
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
new file mode 100644
index 00000000000..583fa54ba02
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockDecoderDatabase : public DecoderDatabase {
+ public:
+ virtual ~MockDecoderDatabase() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_CONST_METHOD0(Empty,
+ bool());
+ MOCK_CONST_METHOD0(Size,
+ int());
+ MOCK_METHOD0(Reset,
+ void());
+ MOCK_METHOD2(RegisterPayload,
+ int(uint8_t rtp_payload_type, NetEqDecoder codec_type));
+ MOCK_METHOD4(InsertExternal,
+ int(uint8_t rtp_payload_type, NetEqDecoder codec_type, int fs_hz,
+ AudioDecoder* decoder));
+ MOCK_METHOD1(Remove,
+ int(uint8_t rtp_payload_type));
+ MOCK_CONST_METHOD1(GetDecoderInfo,
+ const DecoderInfo*(uint8_t rtp_payload_type));
+ MOCK_CONST_METHOD1(GetRtpPayloadType,
+ uint8_t(NetEqDecoder codec_type));
+ MOCK_METHOD1(GetDecoder,
+ AudioDecoder*(uint8_t rtp_payload_type));
+ MOCK_CONST_METHOD2(IsType,
+ bool(uint8_t rtp_payload_type, NetEqDecoder codec_type));
+ MOCK_CONST_METHOD1(IsComfortNoise,
+ bool(uint8_t rtp_payload_type));
+ MOCK_CONST_METHOD1(IsDtmf,
+ bool(uint8_t rtp_payload_type));
+ MOCK_CONST_METHOD1(IsRed,
+ bool(uint8_t rtp_payload_type));
+ MOCK_METHOD2(SetActiveDecoder,
+ int(uint8_t rtp_payload_type, bool* new_decoder));
+ MOCK_METHOD0(GetActiveDecoder,
+ AudioDecoder*());
+ MOCK_METHOD1(SetActiveCngDecoder,
+ int(uint8_t rtp_payload_type));
+ MOCK_METHOD0(GetActiveCngDecoder,
+ AudioDecoder*());
+ MOCK_CONST_METHOD1(CheckPayloadTypes,
+ int(const PacketList& packet_list));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
new file mode 100644
index 00000000000..c21a1c28c73
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
+
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockDelayManager : public DelayManager {
+ public:
+ MockDelayManager(int max_packets_in_buffer, DelayPeakDetector* peak_detector)
+ : DelayManager(max_packets_in_buffer, peak_detector) {}
+ virtual ~MockDelayManager() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_CONST_METHOD0(iat_vector,
+ const IATVector&());
+ MOCK_METHOD3(Update,
+ int(uint16_t sequence_number, uint32_t timestamp, int sample_rate_hz));
+ MOCK_METHOD1(CalculateTargetLevel,
+ int(int iat_packets));
+ MOCK_METHOD1(SetPacketAudioLength,
+ int(int length_ms));
+ MOCK_METHOD0(Reset,
+ void());
+ MOCK_CONST_METHOD0(AverageIAT,
+ int());
+ MOCK_CONST_METHOD0(PeakFound,
+ bool());
+ MOCK_METHOD1(UpdateCounters,
+ void(int elapsed_time_ms));
+ MOCK_METHOD0(ResetPacketIatCount,
+ void());
+ MOCK_CONST_METHOD2(BufferLimits,
+ void(int* lower_limit, int* higher_limit));
+ MOCK_CONST_METHOD0(TargetLevel,
+ int());
+ MOCK_METHOD1(LastDecoderType,
+ void(NetEqDecoder decoder_type));
+ MOCK_METHOD1(set_extra_delay_ms,
+ void(int16_t delay));
+ MOCK_CONST_METHOD0(base_target_level,
+ int());
+ MOCK_METHOD1(set_streaming_mode,
+ void(bool value));
+ MOCK_CONST_METHOD0(last_pack_cng_or_dtmf,
+ int());
+ MOCK_METHOD1(set_last_pack_cng_or_dtmf,
+ void(int value));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
new file mode 100644
index 00000000000..26e09329e69
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
+
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockDelayPeakDetector : public DelayPeakDetector {
+ public:
+ virtual ~MockDelayPeakDetector() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD0(Reset, void());
+ MOCK_METHOD1(SetPacketAudioLength, void(int length_ms));
+ MOCK_METHOD0(peak_found, bool());
+ MOCK_CONST_METHOD0(MaxPeakHeight, int());
+ MOCK_CONST_METHOD0(MaxPeakPeriod, int());
+ MOCK_METHOD2(Update, bool(int inter_arrival_time, int target_level));
+ MOCK_METHOD1(IncrementCounter, void(int inc_ms));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
new file mode 100644
index 00000000000..0351d6b1e51
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockDtmfBuffer : public DtmfBuffer {
+ public:
+ MockDtmfBuffer(int fs) : DtmfBuffer(fs) {}
+ virtual ~MockDtmfBuffer() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD0(Flush,
+ void());
+ MOCK_METHOD1(InsertEvent,
+ int(const DtmfEvent& event));
+ MOCK_METHOD2(GetEvent,
+ bool(uint32_t current_timestamp, DtmfEvent* event));
+ MOCK_CONST_METHOD0(Length,
+ size_t());
+ MOCK_CONST_METHOD0(Empty,
+ bool());
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
new file mode 100644
index 00000000000..3bed4d152b9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
+
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockDtmfToneGenerator : public DtmfToneGenerator {
+ public:
+ virtual ~MockDtmfToneGenerator() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD3(Init,
+ int(int fs, int event, int attenuation));
+ MOCK_METHOD0(Reset,
+ void());
+ MOCK_METHOD2(Generate,
+ int(int num_samples, AudioMultiVector* output));
+ MOCK_CONST_METHOD0(initialized,
+ bool());
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
new file mode 100644
index 00000000000..9522b537e3e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
+
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+#include "gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+using ::testing::_;
+using ::testing::Invoke;
+
+// Implement an external version of the PCM16b decoder. This is a copy from
+// audio_decoder_impl.{cc, h}.
+class ExternalPcm16B : public AudioDecoder {
+ public:
+ explicit ExternalPcm16B(enum NetEqDecoder type)
+ : AudioDecoder(type) {
+ }
+
+ virtual int Decode(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type) {
+ int16_t temp_type;
+ int16_t ret = WebRtcPcm16b_DecodeW16(
+ state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
+ static_cast<int16_t>(encoded_len), decoded, &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+ }
+
+ virtual int Init() { return 0; }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(ExternalPcm16B);
+};
+
+// Create a mock of ExternalPcm16B which delegates all calls to the real object.
+// The reason is that we can then track that the correct calls are being made.
+class MockExternalPcm16B : public ExternalPcm16B {
+ public:
+ explicit MockExternalPcm16B(enum NetEqDecoder type)
+ : ExternalPcm16B(type),
+ real_(type) {
+ // By default, all calls are delegated to the real object.
+ ON_CALL(*this, Decode(_, _, _, _))
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::Decode));
+ ON_CALL(*this, HasDecodePlc())
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::HasDecodePlc));
+ ON_CALL(*this, DecodePlc(_, _))
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::DecodePlc));
+ ON_CALL(*this, Init())
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::Init));
+ ON_CALL(*this, IncomingPacket(_, _, _, _, _))
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::IncomingPacket));
+ ON_CALL(*this, ErrorCode())
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::ErrorCode));
+ ON_CALL(*this, codec_type())
+ .WillByDefault(Invoke(&real_, &ExternalPcm16B::codec_type));
+ }
+ virtual ~MockExternalPcm16B() { Die(); }
+
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD4(Decode,
+ int(const uint8_t* encoded, size_t encoded_len, int16_t* decoded,
+ SpeechType* speech_type));
+ MOCK_CONST_METHOD0(HasDecodePlc,
+ bool());
+ MOCK_METHOD2(DecodePlc,
+ int(int num_frames, int16_t* decoded));
+ MOCK_METHOD0(Init,
+ int());
+ MOCK_METHOD5(IncomingPacket,
+ int(const uint8_t* payload, size_t payload_len,
+ uint16_t rtp_sequence_number, uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp));
+ MOCK_METHOD0(ErrorCode,
+ int());
+ MOCK_CONST_METHOD0(codec_type,
+ NetEqDecoder());
+
+ private:
+ ExternalPcm16B real_;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
new file mode 100644
index 00000000000..2882248c192
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
+
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockPacketBuffer : public PacketBuffer {
+ public:
+ MockPacketBuffer(size_t max_number_of_packets)
+ : PacketBuffer(max_number_of_packets) {}
+ virtual ~MockPacketBuffer() { Die(); }
+ MOCK_METHOD0(Die, void());
+ MOCK_METHOD0(Flush,
+ void());
+ MOCK_CONST_METHOD0(Empty,
+ bool());
+ MOCK_METHOD1(InsertPacket,
+ int(Packet* packet));
+ MOCK_METHOD4(InsertPacketList,
+ int(PacketList* packet_list,
+ const DecoderDatabase& decoder_database,
+ uint8_t* current_rtp_payload_type,
+ uint8_t* current_cng_rtp_payload_type));
+ MOCK_CONST_METHOD1(NextTimestamp,
+ int(uint32_t* next_timestamp));
+ MOCK_CONST_METHOD2(NextHigherTimestamp,
+ int(uint32_t timestamp, uint32_t* next_timestamp));
+ MOCK_CONST_METHOD0(NextRtpHeader,
+ const RTPHeader*());
+ MOCK_METHOD1(GetNextPacket,
+ Packet*(int* discard_count));
+ MOCK_METHOD0(DiscardNextPacket,
+ int());
+ MOCK_METHOD1(DiscardOldPackets,
+ int(uint32_t timestamp_limit));
+ MOCK_CONST_METHOD0(NumPacketsInBuffer,
+ int());
+ MOCK_METHOD1(IncrementWaitingTimes,
+ void(int));
+ MOCK_CONST_METHOD0(current_memory_bytes,
+ int());
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h
new file mode 100644
index 00000000000..f1665423afd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
+
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+
+#include "gmock/gmock.h"
+
+namespace webrtc {
+
+class MockPayloadSplitter : public PayloadSplitter {
+ public:
+ MOCK_METHOD1(SplitRed,
+ int(PacketList* packet_list));
+ MOCK_METHOD2(SplitFec,
+ int(PacketList* packet_list, DecoderDatabase* decoder_database));
+ MOCK_METHOD2(CheckRedPayloads,
+ int(PacketList* packet_list, const DecoderDatabase& decoder_database));
+ MOCK_METHOD2(SplitAudio,
+ int(PacketList* packet_list, const DecoderDatabase& decoder_database));
+ MOCK_METHOD4(SplitBySamples,
+ void(const Packet* packet, int bytes_per_ms, int timestamps_per_ms,
+ PacketList* new_packets));
+ MOCK_METHOD4(SplitByFrames,
+ int(const Packet* packet, int bytes_per_frame, int timestamps_per_frame,
+ PacketList* new_packets));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c
deleted file mode 100644
index 767a71dee1f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function mutes a signal linearly on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
- int16_t N)
-{
- int i;
- int32_t w32_tmp = 1048608; /* (16384<<6 + 32) */
-
- for (i = 0; i < N; i++)
- {
- pw16_inout[i]
- = (int16_t) ((WEBRTC_SPL_MUL_16_16((int16_t)(w32_tmp>>6), pw16_inout[i])
- + 8192) >> 14);
- w32_tmp -= muteSlope;
- }
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
new file mode 100644
index 00000000000..7edacde7633
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+namespace webrtc {
+
+// Creates all classes needed and inject them into a new NetEqImpl object.
+// Return the new object.
+NetEq* NetEq::Create(const NetEq::Config& config) {
+ BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
+ DecoderDatabase* decoder_database = new DecoderDatabase;
+ DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
+ DelayManager* delay_manager =
+ new DelayManager(config.max_packets_in_buffer, delay_peak_detector);
+ delay_manager->SetMaximumDelay(config.max_delay_ms);
+ DtmfBuffer* dtmf_buffer = new DtmfBuffer(config.sample_rate_hz);
+ DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
+ PacketBuffer* packet_buffer = new PacketBuffer(config.max_packets_in_buffer);
+ PayloadSplitter* payload_splitter = new PayloadSplitter;
+ TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
+ AccelerateFactory* accelerate_factory = new AccelerateFactory;
+ ExpandFactory* expand_factory = new ExpandFactory;
+ PreemptiveExpandFactory* preemptive_expand_factory =
+ new PreemptiveExpandFactory;
+ return new NetEqImpl(config.sample_rate_hz,
+ buffer_level_filter,
+ decoder_database,
+ delay_manager,
+ delay_peak_detector,
+ dtmf_buffer,
+ dtmf_tone_generator,
+ packet_buffer,
+ payload_splitter,
+ timestamp_scaler,
+ accelerate_factory,
+ expand_factory,
+ preemptive_expand_factory);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
index 27e5c37aab7..21ccee41e1c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
@@ -7,242 +7,208 @@
# be found in the AUTHORS file in the root of the source tree.
{
+ 'variables': {
+ 'codecs': [
+ 'G711',
+ 'G722',
+ 'PCM16B',
+ 'iLBC',
+ 'iSAC',
+ 'iSACFix',
+ 'CNG',
+ ],
+ 'neteq_defines': [],
+ 'conditions': [
+ ['include_opus==1', {
+ 'codecs': ['webrtc_opus',],
+ 'neteq_defines': ['WEBRTC_CODEC_OPUS',],
+ }],
+ ],
+ 'neteq_dependencies': [
+ '<@(codecs)',
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ },
'targets': [
{
- 'target_name': 'NetEq',
+ 'target_name': 'neteq',
'type': 'static_library',
'dependencies': [
- 'CNG',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<@(neteq_dependencies)',
],
'defines': [
- 'NETEQ_VOICEENGINE_CODECS', # TODO: Should create a Chrome define which
- 'SCRATCH', # specifies a subset of codecs to support.
+ '<@(neteq_defines)',
],
'include_dirs': [
- 'interface',
- '<(webrtc_root)',
+ # Need Opus header files for the audio classifier.
+ '<(DEPTH)/third_party/opus/src/celt',
+ '<(DEPTH)/third_party/opus/src/src',
],
'direct_dependent_settings': {
'include_dirs': [
- 'interface',
- '<(webrtc_root)',
+ # Need Opus header files for the audio classifier.
+ '<(DEPTH)/third_party/opus/src/celt',
+ '<(DEPTH)/third_party/opus/src/src',
],
},
+ 'export_dependent_settings': [
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ ],
'sources': [
- 'interface/webrtc_neteq.h',
- 'interface/webrtc_neteq_help_macros.h',
- 'interface/webrtc_neteq_internal.h',
- 'accelerate.c',
- 'automode.c',
- 'automode.h',
- 'bgn_update.c',
- 'buffer_stats.h',
- 'bufstats_decision.c',
- 'cng_internal.c',
- 'codec_db.c',
- 'codec_db.h',
- 'codec_db_defines.h',
- 'correlator.c',
- 'delay_logging.h',
- 'dsp.c',
- 'dsp.h',
- 'dsp_helpfunctions.c',
- 'dsp_helpfunctions.h',
- 'dtmf_buffer.c',
+ 'interface/audio_decoder.h',
+ 'interface/neteq.h',
+ 'accelerate.cc',
+ 'accelerate.h',
+ 'audio_classifier.cc',
+ 'audio_classifier.h',
+ 'audio_decoder_impl.cc',
+ 'audio_decoder_impl.h',
+ 'audio_decoder.cc',
+ 'audio_multi_vector.cc',
+ 'audio_multi_vector.h',
+ 'audio_vector.cc',
+ 'audio_vector.h',
+ 'background_noise.cc',
+ 'background_noise.h',
+ 'buffer_level_filter.cc',
+ 'buffer_level_filter.h',
+ 'comfort_noise.cc',
+ 'comfort_noise.h',
+ 'decision_logic.cc',
+ 'decision_logic.h',
+ 'decision_logic_fax.cc',
+ 'decision_logic_fax.h',
+ 'decision_logic_normal.cc',
+ 'decision_logic_normal.h',
+ 'decoder_database.cc',
+ 'decoder_database.h',
+ 'defines.h',
+ 'delay_manager.cc',
+ 'delay_manager.h',
+ 'delay_peak_detector.cc',
+ 'delay_peak_detector.h',
+ 'dsp_helper.cc',
+ 'dsp_helper.h',
+ 'dtmf_buffer.cc',
'dtmf_buffer.h',
- 'dtmf_tonegen.c',
- 'dtmf_tonegen.h',
- 'expand.c',
- 'mcu.h',
- 'mcu_address_init.c',
- 'mcu_dsp_common.c',
- 'mcu_dsp_common.h',
- 'mcu_reset.c',
- 'merge.c',
- 'min_distortion.c',
- 'mix_voice_unvoice.c',
- 'mute_signal.c',
- 'neteq_defines.h',
- 'neteq_error_codes.h',
- 'neteq_statistics.h',
- 'normal.c',
- 'packet_buffer.c',
+ 'dtmf_tone_generator.cc',
+ 'dtmf_tone_generator.h',
+ 'expand.cc',
+ 'expand.h',
+ 'merge.cc',
+ 'merge.h',
+ 'neteq_impl.cc',
+ 'neteq_impl.h',
+ 'neteq.cc',
+ 'statistics_calculator.cc',
+ 'statistics_calculator.h',
+ 'normal.cc',
+ 'normal.h',
+ 'packet_buffer.cc',
'packet_buffer.h',
- 'peak_detection.c',
- 'preemptive_expand.c',
- 'random_vector.c',
- 'recin.c',
- 'recout.c',
- 'rtcp.c',
+ 'payload_splitter.cc',
+ 'payload_splitter.h',
+ 'post_decode_vad.cc',
+ 'post_decode_vad.h',
+ 'preemptive_expand.cc',
+ 'preemptive_expand.h',
+ 'random_vector.cc',
+ 'random_vector.h',
+ 'rtcp.cc',
'rtcp.h',
- 'rtp.c',
- 'rtp.h',
- 'set_fs.c',
- 'signal_mcu.c',
- 'split_and_insert.c',
- 'unmute_signal.c',
- 'webrtc_neteq.c',
+ 'sync_buffer.cc',
+ 'sync_buffer.h',
+ 'timestamp_scaler.cc',
+ 'timestamp_scaler.h',
+ 'time_stretch.cc',
+ 'time_stretch.h',
],
},
], # targets
'conditions': [
['include_tests==1', {
+ 'includes': ['neteq_tests.gypi',],
'targets': [
{
- 'target_name': 'neteq_unittests',
+ 'target_name': 'audio_decoder_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
- 'NetEq',
- 'NetEqTestTools',
- 'neteq_unittest_tools',
+ '<@(codecs)',
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'<(webrtc_root)/test/test.gyp:test_support_main',
],
- 'sources': [
- 'webrtc_neteq_unittest.cc',
+ 'defines': [
+ 'AUDIO_DECODER_UNITTEST',
+ 'WEBRTC_CODEC_G722',
+ 'WEBRTC_CODEC_ILBC',
+ 'WEBRTC_CODEC_ISACFX',
+ 'WEBRTC_CODEC_ISAC',
+ 'WEBRTC_CODEC_PCM16',
+ '<@(neteq_defines)',
],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
+ 'sources': [
+ 'audio_decoder_impl.cc',
+ 'audio_decoder_impl.h',
+ 'audio_decoder_unittest.cc',
+ 'audio_decoder.cc',
+ 'interface/audio_decoder.h',
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
],
- }, # neteq_unittests
- {
- 'target_name': 'NetEqRTPplay',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq', # NetEQ library defined above
- 'NetEqTestTools', # Test helpers
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- ],
- 'defines': [
- # TODO: Make codec selection conditional on definitions in target NetEq
- 'CODEC_ILBC',
- 'CODEC_PCM16B',
- 'CODEC_G711',
- 'CODEC_G722',
- 'CODEC_ISAC',
- 'CODEC_PCM16B_WB',
- 'CODEC_ISAC_SWB',
- 'CODEC_ISAC_FB',
- 'CODEC_PCM16B_32KHZ',
- 'CODEC_CNGCODEC8',
- 'CODEC_CNGCODEC16',
- 'CODEC_CNGCODEC32',
- 'CODEC_ATEVENT_DECODE',
- 'CODEC_RED',
- ],
- 'include_dirs': [
- '.',
- 'test',
- ],
- 'sources': [
- 'test/NetEqRTPplay.cc',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
-
- {
- 'target_name': 'neteq3_speed_test',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq',
- 'PCM16B',
- 'neteq_unittest_tools',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'test/neteq_speed_test.cc',
- ],
- },
+ }, # audio_decoder_unittests
{
- 'target_name': 'NetEqTestTools',
- # Collection of useful functions used in other tests
+ 'target_name': 'neteq_unittest_tools',
'type': 'static_library',
- 'variables': {
- # Expects RTP packets without payloads when enabled.
- 'neteq_dummy_rtp%': 0,
- },
'dependencies': [
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- '<(DEPTH)/testing/gtest.gyp:gtest',
+ 'rtp_rtcp',
],
'direct_dependent_settings': {
'include_dirs': [
- 'interface',
- 'test',
+ 'tools',
],
},
- 'defines': [
- # TODO: Make codec selection conditional on definitions in target NetEq
- 'CODEC_ILBC',
- 'CODEC_PCM16B',
- 'CODEC_G711',
- 'CODEC_G722',
- 'CODEC_ISAC',
- 'CODEC_PCM16B_WB',
- 'CODEC_ISAC_SWB',
- 'CODEC_ISAC_FB',
- 'CODEC_PCM16B_32KHZ',
- 'CODEC_CNGCODEC8',
- 'CODEC_CNGCODEC16',
- 'CODEC_CNGCODEC32',
- 'CODEC_ATEVENT_DECODE',
- 'CODEC_RED',
- ],
'include_dirs': [
- 'interface',
- 'test',
+ 'tools',
],
'sources': [
- 'test/NETEQTEST_CodecClass.cc',
- 'test/NETEQTEST_CodecClass.h',
- 'test/NETEQTEST_DummyRTPpacket.cc',
- 'test/NETEQTEST_DummyRTPpacket.h',
- 'test/NETEQTEST_NetEQClass.cc',
- 'test/NETEQTEST_NetEQClass.h',
- 'test/NETEQTEST_RTPpacket.cc',
- 'test/NETEQTEST_RTPpacket.h',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
+ 'tools/audio_checksum.h',
+ 'tools/audio_loop.cc',
+ 'tools/audio_loop.h',
+ 'tools/audio_sink.h',
+ 'tools/input_audio_file.cc',
+ 'tools/input_audio_file.h',
+ 'tools/output_audio_file.h',
+ 'tools/packet.cc',
+ 'tools/packet.h',
+ 'tools/packet_source.h',
+ 'tools/rtp_file_source.cc',
+ 'tools/rtp_file_source.h',
+ 'tools/rtp_generator.cc',
+ 'tools/rtp_generator.h',
+ ],
+ }, # neteq_unittest_tools
], # targets
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
- 'target_name': 'neteq_unittests_apk_target',
+ 'target_name': 'audio_decoder_unittests_apk_target',
'type': 'none',
'dependencies': [
- '<(apk_tests_path):neteq_unittests_apk',
+ '<(apk_tests_path):audio_decoder_unittests_apk',
],
},
],
@@ -250,17 +216,17 @@
['test_isolation_mode != "noop"', {
'targets': [
{
- 'target_name': 'neteq_unittests_run',
+ 'target_name': 'audio_decoder_unittests_run',
'type': 'none',
'dependencies': [
- 'neteq_unittests',
+ 'audio_decoder_unittests',
],
'includes': [
'../../../build/isolate.gypi',
- 'neteq_unittests.isolate',
+ 'audio_decoder_unittests.isolate',
],
'sources': [
- 'neteq_unittests.isolate',
+ 'audio_decoder_unittests.isolate',
],
},
],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h
deleted file mode 100644
index b3b3da5b70e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*****************************************************************************************
- *
- * Compilation flags in NetEQ:
- *
- *****************************************************************************************
- *
- ***** Platform flags ******
- *
- * SCRATCH Run NetEQ with "Scratch memory" to save some stack memory.
- * Definition can be used on all platforms
- *
- ***** Summary flags ******
- *
- * NETEQ_ALL_SPECIAL_CODECS Add support for special codecs (CN/RED/DTMF)
- *
- * NETEQ_ALL_NB_CODECS Add support for all NB codecs (except CN/RED/DTMF)
- *
- * NETEQ_ALL_WB_CODECS Add support for all WB codecs (except CN/RED/DTMF)
- *
- * NETEQ_VOICEENGINE_CODECS Support for all NB, WB and SWB32 codecs and CN, RED and DTMF
- *
- * NETEQ_ALL_CODECS Support for all NB, WB, SWB 32kHz and SWB 48kHz as well as
- * CN, RED and DTMF
- *
- ***** Sampling frequency ******
- * (Note: usually not needed when Summary flags are used)
- *
- * NETEQ_WIDEBAND Wideband enabled
- *
- * NETEQ_32KHZ_WIDEBAND Super wideband @ 32kHz enabled
- *
- * NETEQ_48KHZ_WIDEBAND Super wideband @ 48kHz enabled
- *
- ***** Special Codec ******
- * (Note: not needed if NETEQ_ALL_CODECS is used)
- *
- * NETEQ_RED_CODEC With this flag you enable NetEQ to understand redundancy in
- * the RTP. NetEQ will use the redundancy if it's the same
- * codec
- *
- * NETEQ_CNG_CODEC Enable DTX with the CN payload
- *
- * NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
- *
- ***** Speech Codecs *****
- * (Note: Not needed if Summary flags are used)
- *
- * NETEQ_G711_CODEC Enable G.711 u- and A-law
- *
- * NETEQ_PCM16B_CODEC Enable uncompressed 16-bit
- *
- * NETEQ_ILBC_CODEC Enable iLBC
- *
- * NETEQ_ISAC_CODEC Enable iSAC
- *
- * NETEQ_ISAC_SWB_CODEC Enable iSAC-SWB
- *
- * Note that the decoder of iSAC full-band operates at 32 kHz, that is the
- * decoded signal is at 32 kHz.
- * NETEQ_ISAC_FB_CODEC Enable iSAC-FB
- *
- * NETEQ_G722_CODEC Enable G.722
- *
- * NETEQ_G729_CODEC Enable G.729
- *
- * NETEQ_G729_1_CODEC Enable G.729.1
- *
- * NETEQ_G726_CODEC Enable G.726
- *
- * NETEQ_G722_1_CODEC Enable G722.1
- *
- * NETEQ_G722_1C_CODEC Enable G722.1 Annex C
- *
- * NETEQ_OPUS_CODEC Enable Opus
- *
- * NETEQ_SPEEX_CODEC Enable Speex (at 8 and 16 kHz sample rate)
- *
- * NETEQ_CELT_CODEC Enable Celt (at 32 kHz sample rate)
- *
- * NETEQ_GSMFR_CODEC Enable GSM-FR
- *
- * NETEQ_AMR_CODEC Enable AMR (narrowband)
- *
- * NETEQ_AMRWB_CODEC Enable AMR-WB
- *
- * NETEQ_CNG_CODEC Enable DTX with the CNG payload
- *
- * NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
- *
- ***** Test flags ******
- *
- * WEBRTC_NETEQ_40BITACC_TEST Run NetEQ with simulated 40-bit accumulator to run
- * bit-exact to a DSP implementation where the main (splib
- * and NetEQ) functions have been 40-bit optimized
- *
- *****************************************************************************************
- */
-
-#if !defined NETEQ_DEFINES_H
-#define NETEQ_DEFINES_H
-
-/* Data block structure for MCU to DSP communication:
- *
- *
- * First 3 16-bit words are pre-header that contains instructions and timestamp update
- * Fourth 16-bit word is length of data block 1
- * Rest is payload data
- *
- * 0 48 64 80
- * -------------...----------------------------------------------------------------------
- * | PreHeader ... | Length 1 | Payload data 1 ...... | Lenght 2| Data block 2.... | ...
- * -------------...----------------------------------------------------------------------
- *
- *
- * Preheader:
- * 4 MSB can be either of:
- */
-
-#define DSP_INSTR_NORMAL 0x1000
-/* Payload data will contain the encoded frames */
-
-#define DSP_INSTR_MERGE 0x2000
-/* Payload data block 1 will contain the encoded frame */
-/* Info block will contain the number of missing samples */
-
-#define DSP_INSTR_EXPAND 0x3000
-/* Payload data will be empty */
-
-#define DSP_INSTR_ACCELERATE 0x4000
-/* Payload data will contain the encoded frame */
-
-#define DSP_INSTR_DO_RFC3389CNG 0x5000
-/* Payload data will contain the SID frame if there is one*/
-
-#define DSP_INSTR_DTMF_GENERATE 0x6000
-/* Payload data will be one int16_t with the current DTMF value and one
- * int16_t with the current volume value
- */
-#define DSP_INSTR_NORMAL_ONE_DESC 0x7000
-/* No encoded frames */
-
-#define DSP_INSTR_DO_CODEC_INTERNAL_CNG 0x8000
-/* Codec has a built-in VAD/DTX scheme (use the above for "no transmission") */
-
-#define DSP_INSTR_PREEMPTIVE_EXPAND 0x9000
-/* Payload data will contain the encoded frames, if any */
-
-#define DSP_INSTR_DO_ALTERNATIVE_PLC 0xB000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS 0xC000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_AUDIO_REPETITION 0xD000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_AUDIO_REPETITION_INC_TS 0xE000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_FADE_TO_BGN 0xF000
-/* Exception handling: fade out to BGN (expand) */
-
-/*
- * Next 4 bits signal additional data that needs to be transmitted
- */
-
-#define DSP_CODEC_NO_CHANGE 0x0100
-#define DSP_CODEC_NEW_CODEC 0x0200
-#define DSP_CODEC_ADD_LATE_PKT 0x0300
-#define DSP_CODEC_RESET 0x0400
-#define DSP_DTMF_PAYLOAD 0x0010
-
-/*
- * The most significant bit of the payload-length
- * is used to flag whether the associated payload
- * is redundant payload. This currently useful only for
- * iSAC, where redundant payloads have to be treated
- * differently. Every time the length is read it must be
- * masked by DSP_CODEC_MASK_RED_FLAG to ignore the flag.
- * Use DSP_CODEC_RED_FLAG to set or retrieve the flag.
- */
-#define DSP_CODEC_MASK_RED_FLAG 0x7FFF
-#define DSP_CODEC_RED_FLAG 0x8000
-
-/*
- * The first block of payload data consist of decode function pointers,
- * and then the speech blocks.
- *
- */
-
-
-/*
- * The playout modes that NetEq produced (i.e. gives more info about if the
- * Accelerate was successful or not)
- */
-
-#define MODE_NORMAL 0x0000
-#define MODE_EXPAND 0x0001
-#define MODE_MERGE 0x0002
-#define MODE_SUCCESS_ACCELERATE 0x0003
-#define MODE_UNSUCCESS_ACCELERATE 0x0004
-#define MODE_RFC3389CNG 0x0005
-#define MODE_LOWEN_ACCELERATE 0x0006
-#define MODE_DTMF 0x0007
-#define MODE_ONE_DESCRIPTOR 0x0008
-#define MODE_CODEC_INTERNAL_CNG 0x0009
-#define MODE_SUCCESS_PREEMPTIVE 0x000A
-#define MODE_UNSUCCESS_PREEMPTIVE 0x000B
-#define MODE_LOWEN_PREEMPTIVE 0x000C
-#define MODE_FADE_TO_BGN 0x000D
-
-#define MODE_ERROR 0x0010
-
-#define MODE_AWAITING_CODEC_PTR 0x0100
-
-#define MODE_BGN_ONLY 0x0200
-
-#define MODE_MASTER_DTMF_SIGNAL 0x0400
-
-#define MODE_USING_STEREO 0x0800
-
-
-
-/***********************/
-/* Group codec defines */
-/***********************/
-
-#if (defined(NETEQ_ALL_SPECIAL_CODECS))
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_NB_CODECS)) /* Except RED, DTMF and CNG */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_G726_CODEC
- #define NETEQ_GSMFR_CODEC
- #define NETEQ_OPUS_CODEC
- #define NETEQ_AMR_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_WB_CODECS)) /* Except RED, DTMF and CNG */
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_OPUS_CODEC
- #define NETEQ_SPEEX_CODEC
- #define NETEQ_AMRWB_CODEC
- #define NETEQ_WIDEBAND
-#endif
-
-#if (defined(NETEQ_ALL_WB32_CODECS)) /* AAC, RED, DTMF and CNG */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
- #define NETEQ_OPUS_CODEC
-#endif
-
-#if (defined(NETEQ_VOICEENGINE_CODECS))
- /* Special codecs */
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-
- /* Narrowband codecs */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_AMR_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_GSMFR_CODEC
-
- /* Wideband codecs */
- #define NETEQ_WIDEBAND
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_AMRWB_CODEC
- #define NETEQ_SPEEX_CODEC
-
- /* Super wideband 32kHz codecs */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
-
- /* Fullband 48 kHz codecs */
- #define NETEQ_OPUS_CODEC
- #define NETEQ_ISAC_FB_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_CODECS))
- /* Special codecs */
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-
- /* Narrowband codecs */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_G726_CODEC
- #define NETEQ_GSMFR_CODEC
- #define NETEQ_AMR_CODEC
-
- /* Wideband codecs */
- #define NETEQ_WIDEBAND
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_SPEEX_CODEC
- #define NETEQ_AMRWB_CODEC
-
- /* Super wideband 32kHz codecs */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
-
- /* Super wideband 48kHz codecs */
- #define NETEQ_48KHZ_WIDEBAND
- #define NETEQ_OPUS_CODEC
- #define NETEQ_ISAC_FB_CODEC
-#endif
-
-/* Max output size from decoding one frame */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 5760 /* 120 ms super wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 6480 /* 120+15 ms super wideband (120 ms
- * decoded + 15 ms for merge overlap) */
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 3840 /* 120 ms super wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 4320 /* 120+15 ms super wideband (120 ms
- * decoded + 15 ms for merge overlap) */
-#elif defined(NETEQ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 1920 /* 120 ms wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 2160 /* 120+15 ms wideband (120 ms decoded +
- * 15 ms for merge overlap) */
-#else
- #define NETEQ_MAX_FRAME_SIZE 960 /* 120 ms narrowband */
- #define NETEQ_MAX_OUTPUT_SIZE 1080 /* 120+15 ms narrowband (120 ms decoded
- * + 15 ms for merge overlap) */
-#endif
-
-
-/* Enable stereo */
-#define NETEQ_STEREO
-
-#endif /* #if !defined NETEQ_DEFINES_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h
deleted file mode 100644
index ab639d9c3e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Definition of error codes.
- *
- * NOTE: When modifying the error codes,
- * also modify the function WebRtcNetEQ_GetErrorCode!
- */
-
-#if !defined NETEQ_ERROR_CODES_H
-#define NETEQ_ERROR_CODES_H
-
-/* Misc Error */
-#define NETEQ_OTHER_ERROR -1000
-
-/* Misc Recout Errors */
-#define FAULTY_INSTRUCTION -1001
-#define FAULTY_NETWORK_TYPE -1002
-#define FAULTY_DELAYVALUE -1003
-#define FAULTY_PLAYOUTMODE -1004
-#define CORRUPT_INSTANCE -1005
-#define ILLEGAL_MASTER_SLAVE_SWITCH -1006
-#define MASTER_SLAVE_ERROR -1007
-
-/* Misc Recout problems */
-#define UNKNOWN_BUFSTAT_DECISION -2001
-#define RECOUT_ERROR_DECODING -2002
-#define RECOUT_ERROR_SAMPLEUNDERRUN -2003
-#define RECOUT_ERROR_DECODED_TOO_MUCH -2004
-
-/* Misc RecIn problems */
-#define RECIN_CNG_ERROR -3001
-#define RECIN_UNKNOWNPAYLOAD -3002
-#define RECIN_BUFFERINSERT_ERROR -3003
-#define RECIN_SYNC_RTP_CHANGED_CODEC -3004
-#define RECIN_SYNC_RTP_NOT_ACCEPTABLE -3005
-
-/* PBUFFER/BUFSTAT ERRORS */
-#define PBUFFER_INIT_ERROR -4001
-#define PBUFFER_INSERT_ERROR1 -4002
-#define PBUFFER_INSERT_ERROR2 -4003
-#define PBUFFER_INSERT_ERROR3 -4004
-#define PBUFFER_INSERT_ERROR4 -4005
-#define PBUFFER_INSERT_ERROR5 -4006
-#define UNKNOWN_G723_HEADER -4007
-#define PBUFFER_NONEXISTING_PACKET -4008
-#define PBUFFER_NOT_INITIALIZED -4009
-#define AMBIGUOUS_ILBC_FRAME_SIZE -4010
-
-/* CODEC DATABASE ERRORS */
-#define CODEC_DB_FULL -5001
-#define CODEC_DB_NOT_EXIST1 -5002
-#define CODEC_DB_NOT_EXIST2 -5003
-#define CODEC_DB_NOT_EXIST3 -5004
-#define CODEC_DB_NOT_EXIST4 -5005
-#define CODEC_DB_UNKNOWN_CODEC -5006
-#define CODEC_DB_PAYLOAD_TAKEN -5007
-#define CODEC_DB_UNSUPPORTED_CODEC -5008
-#define CODEC_DB_UNSUPPORTED_FS -5009
-
-/* DTMF ERRORS */
-#define DTMF_DEC_PARAMETER_ERROR -6001
-#define DTMF_INSERT_ERROR -6002
-#define DTMF_GEN_UNKNOWN_SAMP_FREQ -6003
-#define DTMF_NOT_SUPPORTED -6004
-
-/* RTP/PACKET ERRORS */
-#define RED_SPLIT_ERROR1 -7001
-#define RED_SPLIT_ERROR2 -7002
-#define RTP_TOO_SHORT_PACKET -7003
-#define RTP_CORRUPT_PACKET -7004
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
new file mode 100644
index 00000000000..a40107651d0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Test to verify correct operation for externally created decoders.
+
+#include <string>
+#include <list>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+using ::testing::_;
+
+// This test encodes a few packets of PCM16b 32 kHz data and inserts it into two
+// different NetEq instances. The first instance uses the internal version of
+// the decoder object, while the second one uses an externally created decoder
+// object (ExternalPcm16B wrapped in MockExternalPcm16B, both defined above).
+// The test verifies that the output from both instances match.
+class NetEqExternalDecoderTest : public ::testing::Test {
+ protected:
+ static const int kTimeStepMs = 10;
+ static const int kMaxBlockSize = 480; // 10 ms @ 48 kHz.
+ static const uint8_t kPayloadType = 95;
+ static const int kSampleRateHz = 32000;
+
+ NetEqExternalDecoderTest()
+ : sample_rate_hz_(kSampleRateHz),
+ samples_per_ms_(sample_rate_hz_ / 1000),
+ frame_size_ms_(10),
+ frame_size_samples_(frame_size_ms_ * samples_per_ms_),
+ output_size_samples_(frame_size_ms_ * samples_per_ms_),
+ external_decoder_(new MockExternalPcm16B(kDecoderPCM16Bswb32kHz)),
+ rtp_generator_(samples_per_ms_),
+ payload_size_bytes_(0),
+ last_send_time_(0),
+ last_arrival_time_(0) {
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz_;
+ neteq_external_ = NetEq::Create(config);
+ neteq_ = NetEq::Create(config);
+ input_ = new int16_t[frame_size_samples_];
+ encoded_ = new uint8_t[2 * frame_size_samples_];
+ }
+
+ ~NetEqExternalDecoderTest() {
+ delete neteq_external_;
+ delete neteq_;
+ // We will now delete the decoder ourselves, so expecting Die to be called.
+ EXPECT_CALL(*external_decoder_, Die()).Times(1);
+ delete external_decoder_;
+ delete [] input_;
+ delete [] encoded_;
+ }
+
+ virtual void SetUp() {
+ const std::string file_name =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ input_file_.reset(new test::InputAudioFile(file_name));
+ assert(sample_rate_hz_ == 32000);
+ NetEqDecoder decoder = kDecoderPCM16Bswb32kHz;
+ EXPECT_CALL(*external_decoder_, Init());
+ // NetEq is not allowed to delete the external decoder (hence Times(0)).
+ EXPECT_CALL(*external_decoder_, Die()).Times(0);
+ ASSERT_EQ(NetEq::kOK,
+ neteq_external_->RegisterExternalDecoder(external_decoder_,
+ decoder,
+ kPayloadType));
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->RegisterPayloadType(decoder, kPayloadType));
+ }
+
+ virtual void TearDown() {}
+
+ int GetNewPackets() {
+ if (!input_file_->Read(frame_size_samples_, input_)) {
+ return -1;
+ }
+ payload_size_bytes_ = WebRtcPcm16b_Encode(input_, frame_size_samples_,
+ encoded_);
+ if (frame_size_samples_ * 2 != payload_size_bytes_) {
+ return -1;
+ }
+ int next_send_time = rtp_generator_.GetRtpHeader(kPayloadType,
+ frame_size_samples_,
+ &rtp_header_);
+ return next_send_time;
+ }
+
+ void VerifyOutput(size_t num_samples) {
+ for (size_t i = 0; i < num_samples; ++i) {
+ ASSERT_EQ(output_[i], output_external_[i]) <<
+ "Diff in sample " << i << ".";
+ }
+ }
+
+ virtual int GetArrivalTime(int send_time) {
+ int arrival_time = last_arrival_time_ + (send_time - last_send_time_);
+ last_send_time_ = send_time;
+ last_arrival_time_ = arrival_time;
+ return arrival_time;
+ }
+
+ virtual bool Lost() { return false; }
+
+ void RunTest(int num_loops) {
+ // Get next input packets (mono and multi-channel).
+ int next_send_time;
+ int next_arrival_time;
+ do {
+ next_send_time = GetNewPackets();
+ ASSERT_NE(-1, next_send_time);
+ next_arrival_time = GetArrivalTime(next_send_time);
+ } while (Lost()); // If lost, immediately read the next packet.
+
+ EXPECT_CALL(*external_decoder_, Decode(_, payload_size_bytes_, _, _))
+ .Times(num_loops);
+
+ int time_now = 0;
+ for (int k = 0; k < num_loops; ++k) {
+ while (time_now >= next_arrival_time) {
+ // Insert packet in regular instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(rtp_header_, encoded_,
+ payload_size_bytes_,
+ next_arrival_time));
+ // Insert packet in external decoder instance.
+ EXPECT_CALL(*external_decoder_,
+ IncomingPacket(_, payload_size_bytes_,
+ rtp_header_.header.sequenceNumber,
+ rtp_header_.header.timestamp,
+ next_arrival_time));
+ ASSERT_EQ(NetEq::kOK,
+ neteq_external_->InsertPacket(rtp_header_, encoded_,
+ payload_size_bytes_,
+ next_arrival_time));
+ // Get next input packet.
+ do {
+ next_send_time = GetNewPackets();
+ ASSERT_NE(-1, next_send_time);
+ next_arrival_time = GetArrivalTime(next_send_time);
+ } while (Lost()); // If lost, immediately read the next packet.
+ }
+ NetEqOutputType output_type;
+ // Get audio from regular instance.
+ int samples_per_channel;
+ int num_channels;
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->GetAudio(kMaxBlockSize, output_,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ // Get audio from external decoder instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_external_->GetAudio(kMaxBlockSize, output_external_,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ std::ostringstream ss;
+ ss << "Lap number " << k << ".";
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ // Compare mono and multi-channel.
+ ASSERT_NO_FATAL_FAILURE(VerifyOutput(output_size_samples_));
+
+ time_now += kTimeStepMs;
+ }
+ }
+
+ const int sample_rate_hz_;
+ const int samples_per_ms_;
+ const int frame_size_ms_;
+ const int frame_size_samples_;
+ const int output_size_samples_;
+ NetEq* neteq_external_;
+ NetEq* neteq_;
+ MockExternalPcm16B* external_decoder_;
+ test::RtpGenerator rtp_generator_;
+ int16_t* input_;
+ uint8_t* encoded_;
+ int16_t output_[kMaxBlockSize];
+ int16_t output_external_[kMaxBlockSize];
+ WebRtcRTPHeader rtp_header_;
+ int payload_size_bytes_;
+ int last_send_time_;
+ int last_arrival_time_;
+ scoped_ptr<test::InputAudioFile> input_file_;
+};
+
+TEST_F(NetEqExternalDecoderTest, RunTest) {
+ RunTest(100); // Run 100 laps @ 10 ms each in the test loop.
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
new file mode 100644
index 00000000000..64a86603943
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -0,0 +1,1947 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
+
+#include <assert.h>
+#include <memory.h> // memset
+
+#include <algorithm>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/merge.h"
+#include "webrtc/modules/audio_coding/neteq/normal.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+// Modify the code to obtain backwards bit-exactness. Once bit-exactness is no
+// longer required, this #define should be removed (and the code that it
+// enables).
+#define LEGACY_BITEXACT
+
+namespace webrtc {
+
+NetEqImpl::NetEqImpl(int fs,
+ BufferLevelFilter* buffer_level_filter,
+ DecoderDatabase* decoder_database,
+ DelayManager* delay_manager,
+ DelayPeakDetector* delay_peak_detector,
+ DtmfBuffer* dtmf_buffer,
+ DtmfToneGenerator* dtmf_tone_generator,
+ PacketBuffer* packet_buffer,
+ PayloadSplitter* payload_splitter,
+ TimestampScaler* timestamp_scaler,
+ AccelerateFactory* accelerate_factory,
+ ExpandFactory* expand_factory,
+ PreemptiveExpandFactory* preemptive_expand_factory,
+ bool create_components)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ buffer_level_filter_(buffer_level_filter),
+ decoder_database_(decoder_database),
+ delay_manager_(delay_manager),
+ delay_peak_detector_(delay_peak_detector),
+ dtmf_buffer_(dtmf_buffer),
+ dtmf_tone_generator_(dtmf_tone_generator),
+ packet_buffer_(packet_buffer),
+ payload_splitter_(payload_splitter),
+ timestamp_scaler_(timestamp_scaler),
+ vad_(new PostDecodeVad()),
+ expand_factory_(expand_factory),
+ accelerate_factory_(accelerate_factory),
+ preemptive_expand_factory_(preemptive_expand_factory),
+ last_mode_(kModeNormal),
+ decoded_buffer_length_(kMaxFrameSize),
+ decoded_buffer_(new int16_t[decoded_buffer_length_]),
+ playout_timestamp_(0),
+ new_codec_(false),
+ timestamp_(0),
+ reset_decoder_(false),
+ current_rtp_payload_type_(0xFF), // Invalid RTP payload type.
+ current_cng_rtp_payload_type_(0xFF), // Invalid RTP payload type.
+ ssrc_(0),
+ first_packet_(true),
+ error_code_(0),
+ decoder_error_code_(0),
+ decoded_packet_sequence_number_(-1),
+ decoded_packet_timestamp_(0) {
+ if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
+ LOG(LS_ERROR) << "Sample rate " << fs << " Hz not supported. " <<
+ "Changing to 8000 Hz.";
+ fs = 8000;
+ }
+ LOG(LS_VERBOSE) << "Create NetEqImpl object with fs = " << fs << ".";
+ fs_hz_ = fs;
+ fs_mult_ = fs / 8000;
+ output_size_samples_ = kOutputSizeMs * 8 * fs_mult_;
+ decoder_frame_length_ = 3 * output_size_samples_;
+ WebRtcSpl_Init();
+ if (create_components) {
+ SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
+ }
+}
+
+NetEqImpl::~NetEqImpl() {
+ LOG(LS_INFO) << "Deleting NetEqImpl object.";
+}
+
+int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
+ const uint8_t* payload,
+ int length_bytes,
+ uint32_t receive_timestamp) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp <<
+ ", sn=" << rtp_header.header.sequenceNumber <<
+ ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
+ ", ssrc=" << rtp_header.header.ssrc <<
+ ", len=" << length_bytes;
+ int error = InsertPacketInternal(rtp_header, payload, length_bytes,
+ receive_timestamp, false);
+ if (error != 0) {
+ LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
+ error_code_ = error;
+ return kFail;
+ }
+ return kOK;
+}
+
+int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
+ uint32_t receive_timestamp) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG(LS_VERBOSE) << "InsertPacket-Sync: ts="
+ << rtp_header.header.timestamp <<
+ ", sn=" << rtp_header.header.sequenceNumber <<
+ ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
+ ", ssrc=" << rtp_header.header.ssrc;
+
+ const uint8_t kSyncPayload[] = { 's', 'y', 'n', 'c' };
+ int error = InsertPacketInternal(
+ rtp_header, kSyncPayload, sizeof(kSyncPayload), receive_timestamp, true);
+
+ if (error != 0) {
+ LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
+ error_code_ = error;
+ return kFail;
+ }
+ return kOK;
+}
+
+int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio,
+ int* samples_per_channel, int* num_channels,
+ NetEqOutputType* type) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG(LS_VERBOSE) << "GetAudio";
+ int error = GetAudioInternal(max_length, output_audio, samples_per_channel,
+ num_channels);
+ LOG(LS_VERBOSE) << "Produced " << *samples_per_channel <<
+ " samples/channel for " << *num_channels << " channel(s)";
+ if (error != 0) {
+ LOG_FERR1(LS_WARNING, GetAudioInternal, error);
+ error_code_ = error;
+ return kFail;
+ }
+ if (type) {
+ *type = LastOutputType();
+ }
+ return kOK;
+}
+
+int NetEqImpl::RegisterPayloadType(enum NetEqDecoder codec,
+ uint8_t rtp_payload_type) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG_API2(static_cast<int>(rtp_payload_type), codec);
+ int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec);
+ if (ret != DecoderDatabase::kOK) {
+ LOG_FERR2(LS_WARNING, RegisterPayload, rtp_payload_type, codec);
+ switch (ret) {
+ case DecoderDatabase::kInvalidRtpPayloadType:
+ error_code_ = kInvalidRtpPayloadType;
+ break;
+ case DecoderDatabase::kCodecNotSupported:
+ error_code_ = kCodecNotSupported;
+ break;
+ case DecoderDatabase::kDecoderExists:
+ error_code_ = kDecoderExists;
+ break;
+ default:
+ error_code_ = kOtherError;
+ }
+ return kFail;
+ }
+ return kOK;
+}
+
+int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
+ enum NetEqDecoder codec,
+ uint8_t rtp_payload_type) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG_API2(static_cast<int>(rtp_payload_type), codec);
+ if (!decoder) {
+ LOG(LS_ERROR) << "Cannot register external decoder with NULL pointer";
+ assert(false);
+ return kFail;
+ }
+ const int sample_rate_hz = AudioDecoder::CodecSampleRateHz(codec);
+ int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
+ sample_rate_hz, decoder);
+ if (ret != DecoderDatabase::kOK) {
+ LOG_FERR2(LS_WARNING, InsertExternal, rtp_payload_type, codec);
+ switch (ret) {
+ case DecoderDatabase::kInvalidRtpPayloadType:
+ error_code_ = kInvalidRtpPayloadType;
+ break;
+ case DecoderDatabase::kCodecNotSupported:
+ error_code_ = kCodecNotSupported;
+ break;
+ case DecoderDatabase::kDecoderExists:
+ error_code_ = kDecoderExists;
+ break;
+ case DecoderDatabase::kInvalidSampleRate:
+ error_code_ = kInvalidSampleRate;
+ break;
+ case DecoderDatabase::kInvalidPointer:
+ error_code_ = kInvalidPointer;
+ break;
+ default:
+ error_code_ = kOtherError;
+ }
+ return kFail;
+ }
+ return kOK;
+}
+
+int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG_API1(static_cast<int>(rtp_payload_type));
+ int ret = decoder_database_->Remove(rtp_payload_type);
+ if (ret == DecoderDatabase::kOK) {
+ return kOK;
+ } else if (ret == DecoderDatabase::kDecoderNotFound) {
+ error_code_ = kDecoderNotFound;
+ } else {
+ error_code_ = kOtherError;
+ }
+ LOG_FERR1(LS_WARNING, Remove, rtp_payload_type);
+ return kFail;
+}
+
+bool NetEqImpl::SetMinimumDelay(int delay_ms) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (delay_ms >= 0 && delay_ms < 10000) {
+ assert(delay_manager_.get());
+ return delay_manager_->SetMinimumDelay(delay_ms);
+ }
+ return false;
+}
+
+bool NetEqImpl::SetMaximumDelay(int delay_ms) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (delay_ms >= 0 && delay_ms < 10000) {
+ assert(delay_manager_.get());
+ return delay_manager_->SetMaximumDelay(delay_ms);
+ }
+ return false;
+}
+
+int NetEqImpl::LeastRequiredDelayMs() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(delay_manager_.get());
+ return delay_manager_->least_required_delay_ms();
+}
+
+void NetEqImpl::SetPlayoutMode(NetEqPlayoutMode mode) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (!decision_logic_.get() || mode != decision_logic_->playout_mode()) {
+ // The reset() method calls delete for the old object.
+ CreateDecisionLogic(mode);
+ }
+}
+
+NetEqPlayoutMode NetEqImpl::PlayoutMode() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(decision_logic_.get());
+ return decision_logic_->playout_mode();
+}
+
+int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(decoder_database_.get());
+ const int total_samples_in_buffers = packet_buffer_->NumSamplesInBuffer(
+ decoder_database_.get(), decoder_frame_length_) +
+ static_cast<int>(sync_buffer_->FutureLength());
+ assert(delay_manager_.get());
+ assert(decision_logic_.get());
+ stats_.GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
+ decoder_frame_length_, *delay_manager_.get(),
+ *decision_logic_.get(), stats);
+ return 0;
+}
+
+void NetEqImpl::WaitingTimes(std::vector<int>* waiting_times) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ stats_.WaitingTimes(waiting_times);
+}
+
+void NetEqImpl::GetRtcpStatistics(RtcpStatistics* stats) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (stats) {
+ rtcp_.GetStatistics(false, stats);
+ }
+}
+
+void NetEqImpl::GetRtcpStatisticsNoReset(RtcpStatistics* stats) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (stats) {
+ rtcp_.GetStatistics(true, stats);
+ }
+}
+
+void NetEqImpl::EnableVad() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(vad_.get());
+ vad_->Enable();
+}
+
+void NetEqImpl::DisableVad() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(vad_.get());
+ vad_->Disable();
+}
+
+bool NetEqImpl::GetPlayoutTimestamp(uint32_t* timestamp) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (first_packet_) {
+ // We don't have a valid RTP timestamp until we have decoded our first
+ // RTP packet.
+ return false;
+ }
+ *timestamp = timestamp_scaler_->ToExternal(playout_timestamp_);
+ return true;
+}
+
+int NetEqImpl::LastError() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return error_code_;
+}
+
+int NetEqImpl::LastDecoderError() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return decoder_error_code_;
+}
+
+void NetEqImpl::FlushBuffers() {
+ CriticalSectionScoped lock(crit_sect_.get());
+ LOG_API0();
+ packet_buffer_->Flush();
+ assert(sync_buffer_.get());
+ assert(expand_.get());
+ sync_buffer_->Flush();
+ sync_buffer_->set_next_index(sync_buffer_->next_index() -
+ expand_->overlap_length());
+ // Set to wait for new codec.
+ first_packet_ = true;
+}
+
+void NetEqImpl::PacketBufferStatistics(int* current_num_packets,
+ int* max_num_packets) const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ packet_buffer_->BufferStat(current_num_packets, max_num_packets);
+}
+
+int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (decoded_packet_sequence_number_ < 0)
+ return -1;
+ *sequence_number = decoded_packet_sequence_number_;
+ *timestamp = decoded_packet_timestamp_;
+ return 0;
+}
+
+void NetEqImpl::SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(background_noise_.get());
+ background_noise_->set_mode(mode);
+}
+
+NetEqBackgroundNoiseMode NetEqImpl::BackgroundNoiseMode() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ assert(background_noise_.get());
+ return background_noise_->mode();
+}
+
+const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return sync_buffer_.get();
+}
+
+// Methods below this line are private.
+
+int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
+ const uint8_t* payload,
+ int length_bytes,
+ uint32_t receive_timestamp,
+ bool is_sync_packet) {
+ if (!payload) {
+ LOG_F(LS_ERROR) << "payload == NULL";
+ return kInvalidPointer;
+ }
+ // Sanity checks for sync-packets.
+ if (is_sync_packet) {
+ if (decoder_database_->IsDtmf(rtp_header.header.payloadType) ||
+ decoder_database_->IsRed(rtp_header.header.payloadType) ||
+ decoder_database_->IsComfortNoise(rtp_header.header.payloadType)) {
+ LOG_F(LS_ERROR) << "Sync-packet with an unacceptable payload type "
+ << rtp_header.header.payloadType;
+ return kSyncPacketNotAccepted;
+ }
+ if (first_packet_ ||
+ rtp_header.header.payloadType != current_rtp_payload_type_ ||
+ rtp_header.header.ssrc != ssrc_) {
+ // Even if |current_rtp_payload_type_| is 0xFF, sync-packet isn't
+ // accepted.
+ LOG_F(LS_ERROR) << "Changing codec, SSRC or first packet "
+ "with sync-packet.";
+ return kSyncPacketNotAccepted;
+ }
+ }
+ PacketList packet_list;
+ RTPHeader main_header;
+ {
+ // Convert to Packet.
+ // Create |packet| within this separate scope, since it should not be used
+ // directly once it's been inserted in the packet list. This way, |packet|
+ // is not defined outside of this block.
+ Packet* packet = new Packet;
+ packet->header.markerBit = false;
+ packet->header.payloadType = rtp_header.header.payloadType;
+ packet->header.sequenceNumber = rtp_header.header.sequenceNumber;
+ packet->header.timestamp = rtp_header.header.timestamp;
+ packet->header.ssrc = rtp_header.header.ssrc;
+ packet->header.numCSRCs = 0;
+ packet->payload_length = length_bytes;
+ packet->primary = true;
+ packet->waiting_time = 0;
+ packet->payload = new uint8_t[packet->payload_length];
+ packet->sync_packet = is_sync_packet;
+ if (!packet->payload) {
+ LOG_F(LS_ERROR) << "Payload pointer is NULL.";
+ }
+ assert(payload); // Already checked above.
+ memcpy(packet->payload, payload, packet->payload_length);
+ // Insert packet in a packet list.
+ packet_list.push_back(packet);
+ // Save main payloads header for later.
+ memcpy(&main_header, &packet->header, sizeof(main_header));
+ }
+
+ bool update_sample_rate_and_channels = false;
+ // Reinitialize NetEq if it's needed (changed SSRC or first call).
+ if ((main_header.ssrc != ssrc_) || first_packet_) {
+ rtcp_.Init(main_header.sequenceNumber);
+ first_packet_ = false;
+
+ // Flush the packet buffer and DTMF buffer.
+ packet_buffer_->Flush();
+ dtmf_buffer_->Flush();
+
+ // Store new SSRC.
+ ssrc_ = main_header.ssrc;
+
+ // Update audio buffer timestamp.
+ sync_buffer_->IncreaseEndTimestamp(main_header.timestamp - timestamp_);
+
+ // Update codecs.
+ timestamp_ = main_header.timestamp;
+ current_rtp_payload_type_ = main_header.payloadType;
+
+ // Set MCU to update codec on next SignalMCU call.
+ new_codec_ = true;
+
+ // Reset timestamp scaling.
+ timestamp_scaler_->Reset();
+
+ // Triger an update of sampling rate and the number of channels.
+ update_sample_rate_and_channels = true;
+ }
+
+ // Update RTCP statistics, only for regular packets.
+ if (!is_sync_packet)
+ rtcp_.Update(main_header, receive_timestamp);
+
+ // Check for RED payload type, and separate payloads into several packets.
+ if (decoder_database_->IsRed(main_header.payloadType)) {
+ assert(!is_sync_packet); // We had a sanity check for this.
+ if (payload_splitter_->SplitRed(&packet_list) != PayloadSplitter::kOK) {
+ LOG_FERR1(LS_WARNING, SplitRed, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ return kRedundancySplitError;
+ }
+ // Only accept a few RED payloads of the same type as the main data,
+ // DTMF events and CNG.
+ payload_splitter_->CheckRedPayloads(&packet_list, *decoder_database_);
+ // Update the stored main payload header since the main payload has now
+ // changed.
+ memcpy(&main_header, &packet_list.front()->header, sizeof(main_header));
+ }
+
+ // Check payload types.
+ if (decoder_database_->CheckPayloadTypes(packet_list) ==
+ DecoderDatabase::kDecoderNotFound) {
+ LOG_FERR1(LS_WARNING, CheckPayloadTypes, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ return kUnknownRtpPayloadType;
+ }
+
+ // Scale timestamp to internal domain (only for some codecs).
+ timestamp_scaler_->ToInternal(&packet_list);
+
+ // Process DTMF payloads. Cycle through the list of packets, and pick out any
+ // DTMF payloads found.
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ Packet* current_packet = (*it);
+ assert(current_packet);
+ assert(current_packet->payload);
+ if (decoder_database_->IsDtmf(current_packet->header.payloadType)) {
+ assert(!current_packet->sync_packet); // We had a sanity check for this.
+ DtmfEvent event;
+ int ret = DtmfBuffer::ParseEvent(
+ current_packet->header.timestamp,
+ current_packet->payload,
+ current_packet->payload_length,
+ &event);
+ if (ret != DtmfBuffer::kOK) {
+ LOG_FERR2(LS_WARNING, ParseEvent, ret,
+ current_packet->payload_length);
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ return kDtmfParsingError;
+ }
+ if (dtmf_buffer_->InsertEvent(event) != DtmfBuffer::kOK) {
+ LOG_FERR0(LS_WARNING, InsertEvent);
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ return kDtmfInsertError;
+ }
+ // TODO(hlundin): Let the destructor of Packet handle the payload.
+ delete [] current_packet->payload;
+ delete current_packet;
+ it = packet_list.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ // Check for FEC in packets, and separate payloads into several packets.
+ int ret = payload_splitter_->SplitFec(&packet_list, decoder_database_.get());
+ if (ret != PayloadSplitter::kOK) {
+ LOG_FERR1(LS_WARNING, SplitFec, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ switch (ret) {
+ case PayloadSplitter::kUnknownPayloadType:
+ return kUnknownRtpPayloadType;
+ default:
+ return kOtherError;
+ }
+ }
+
+ // Split payloads into smaller chunks. This also verifies that all payloads
+ // are of a known payload type. SplitAudio() method is protected against
+ // sync-packets.
+ ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
+ if (ret != PayloadSplitter::kOK) {
+ LOG_FERR1(LS_WARNING, SplitAudio, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ switch (ret) {
+ case PayloadSplitter::kUnknownPayloadType:
+ return kUnknownRtpPayloadType;
+ case PayloadSplitter::kFrameSplitError:
+ return kFrameSplitError;
+ default:
+ return kOtherError;
+ }
+ }
+
+ // Update bandwidth estimate, if the packet is not sync-packet.
+ if (!packet_list.empty() && !packet_list.front()->sync_packet) {
+ // The list can be empty here if we got nothing but DTMF payloads.
+ AudioDecoder* decoder =
+ decoder_database_->GetDecoder(main_header.payloadType);
+ assert(decoder); // Should always get a valid object, since we have
+ // already checked that the payload types are known.
+ decoder->IncomingPacket(packet_list.front()->payload,
+ packet_list.front()->payload_length,
+ packet_list.front()->header.sequenceNumber,
+ packet_list.front()->header.timestamp,
+ receive_timestamp);
+ }
+
+ // Insert packets in buffer.
+ int temp_bufsize = packet_buffer_->NumPacketsInBuffer();
+ ret = packet_buffer_->InsertPacketList(
+ &packet_list,
+ *decoder_database_,
+ &current_rtp_payload_type_,
+ &current_cng_rtp_payload_type_);
+ if (ret == PacketBuffer::kFlushed) {
+ // Reset DSP timestamp etc. if packet buffer flushed.
+ new_codec_ = true;
+ update_sample_rate_and_channels = true;
+ LOG_F(LS_WARNING) << "Packet buffer flushed";
+ } else if (ret != PacketBuffer::kOK) {
+ LOG_FERR1(LS_WARNING, InsertPacketList, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ return kOtherError;
+ }
+ if (current_rtp_payload_type_ != 0xFF) {
+ const DecoderDatabase::DecoderInfo* dec_info =
+ decoder_database_->GetDecoderInfo(current_rtp_payload_type_);
+ if (!dec_info) {
+ assert(false); // Already checked that the payload type is known.
+ }
+ }
+
+ if (update_sample_rate_and_channels && !packet_buffer_->Empty()) {
+ // We do not use |current_rtp_payload_type_| to |set payload_type|, but
+ // get the next RTP header from |packet_buffer_| to obtain the payload type.
+ // The reason for it is the following corner case. If NetEq receives a
+ // CNG packet with a sample rate different than the current CNG then it
+ // flushes its buffer, assuming send codec must have been changed. However,
+ // payload type of the hypothetically new send codec is not known.
+ const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
+ assert(rtp_header);
+ int payload_type = rtp_header->payloadType;
+ AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
+ assert(decoder); // Payloads are already checked to be valid.
+ const DecoderDatabase::DecoderInfo* decoder_info =
+ decoder_database_->GetDecoderInfo(payload_type);
+ assert(decoder_info);
+ if (decoder_info->fs_hz != fs_hz_ ||
+ decoder->channels() != algorithm_buffer_->Channels())
+ SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels());
+ }
+
+ // TODO(hlundin): Move this code to DelayManager class.
+ const DecoderDatabase::DecoderInfo* dec_info =
+ decoder_database_->GetDecoderInfo(main_header.payloadType);
+ assert(dec_info); // Already checked that the payload type is known.
+ delay_manager_->LastDecoderType(dec_info->codec_type);
+ if (delay_manager_->last_pack_cng_or_dtmf() == 0) {
+ // Calculate the total speech length carried in each packet.
+ temp_bufsize = packet_buffer_->NumPacketsInBuffer() - temp_bufsize;
+ temp_bufsize *= decoder_frame_length_;
+
+ if ((temp_bufsize > 0) &&
+ (temp_bufsize != decision_logic_->packet_length_samples())) {
+ decision_logic_->set_packet_length_samples(temp_bufsize);
+ delay_manager_->SetPacketAudioLength((1000 * temp_bufsize) / fs_hz_);
+ }
+
+ // Update statistics.
+ if ((int32_t) (main_header.timestamp - timestamp_) >= 0 &&
+ !new_codec_) {
+ // Only update statistics if incoming packet is not older than last played
+ // out packet, and if new codec flag is not set.
+ delay_manager_->Update(main_header.sequenceNumber, main_header.timestamp,
+ fs_hz_);
+ }
+ } else if (delay_manager_->last_pack_cng_or_dtmf() == -1) {
+ // This is first "normal" packet after CNG or DTMF.
+ // Reset packet time counter and measure time until next packet,
+ // but don't update statistics.
+ delay_manager_->set_last_pack_cng_or_dtmf(0);
+ delay_manager_->ResetPacketIatCount();
+ }
+ return 0;
+}
+
+int NetEqImpl::GetAudioInternal(size_t max_length, int16_t* output,
+ int* samples_per_channel, int* num_channels) {
+ PacketList packet_list;
+ DtmfEvent dtmf_event;
+ Operations operation;
+ bool play_dtmf;
+ int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
+ &play_dtmf);
+ if (return_value != 0) {
+ LOG_FERR1(LS_WARNING, GetDecision, return_value);
+ assert(false);
+ last_mode_ = kModeError;
+ return return_value;
+ }
+ LOG(LS_VERBOSE) << "GetDecision returned operation=" << operation <<
+ " and " << packet_list.size() << " packet(s)";
+
+ AudioDecoder::SpeechType speech_type;
+ int length = 0;
+ int decode_return_value = Decode(&packet_list, &operation,
+ &length, &speech_type);
+
+ assert(vad_.get());
+ bool sid_frame_available =
+ (operation == kRfc3389Cng && !packet_list.empty());
+ vad_->Update(decoded_buffer_.get(), length, speech_type,
+ sid_frame_available, fs_hz_);
+
+ algorithm_buffer_->Clear();
+ switch (operation) {
+ case kNormal: {
+ DoNormal(decoded_buffer_.get(), length, speech_type, play_dtmf);
+ break;
+ }
+ case kMerge: {
+ DoMerge(decoded_buffer_.get(), length, speech_type, play_dtmf);
+ break;
+ }
+ case kExpand: {
+ return_value = DoExpand(play_dtmf);
+ break;
+ }
+ case kAccelerate: {
+ return_value = DoAccelerate(decoded_buffer_.get(), length, speech_type,
+ play_dtmf);
+ break;
+ }
+ case kPreemptiveExpand: {
+ return_value = DoPreemptiveExpand(decoded_buffer_.get(), length,
+ speech_type, play_dtmf);
+ break;
+ }
+ case kRfc3389Cng:
+ case kRfc3389CngNoPacket: {
+ return_value = DoRfc3389Cng(&packet_list, play_dtmf);
+ break;
+ }
+ case kCodecInternalCng: {
+ // This handles the case when there is no transmission and the decoder
+ // should produce internal comfort noise.
+ // TODO(hlundin): Write test for codec-internal CNG.
+ DoCodecInternalCng();
+ break;
+ }
+ case kDtmf: {
+ // TODO(hlundin): Write test for this.
+ return_value = DoDtmf(dtmf_event, &play_dtmf);
+ break;
+ }
+ case kAlternativePlc: {
+ // TODO(hlundin): Write test for this.
+ DoAlternativePlc(false);
+ break;
+ }
+ case kAlternativePlcIncreaseTimestamp: {
+ // TODO(hlundin): Write test for this.
+ DoAlternativePlc(true);
+ break;
+ }
+ case kAudioRepetitionIncreaseTimestamp: {
+ // TODO(hlundin): Write test for this.
+ sync_buffer_->IncreaseEndTimestamp(output_size_samples_);
+ // Skipping break on purpose. Execution should move on into the
+ // next case.
+ }
+ case kAudioRepetition: {
+ // TODO(hlundin): Write test for this.
+ // Copy last |output_size_samples_| from |sync_buffer_| to
+ // |algorithm_buffer|.
+ algorithm_buffer_->PushBackFromIndex(
+ *sync_buffer_, sync_buffer_->Size() - output_size_samples_);
+ expand_->Reset();
+ break;
+ }
+ case kUndefined: {
+ LOG_F(LS_ERROR) << "Invalid operation kUndefined.";
+ assert(false); // This should not happen.
+ last_mode_ = kModeError;
+ return kInvalidOperation;
+ }
+ } // End of switch.
+ if (return_value < 0) {
+ return return_value;
+ }
+
+ if (last_mode_ != kModeRfc3389Cng) {
+ comfort_noise_->Reset();
+ }
+
+ // Copy from |algorithm_buffer| to |sync_buffer_|.
+ sync_buffer_->PushBack(*algorithm_buffer_);
+
+ // Extract data from |sync_buffer_| to |output|.
+ size_t num_output_samples_per_channel = output_size_samples_;
+ size_t num_output_samples = output_size_samples_ * sync_buffer_->Channels();
+ if (num_output_samples > max_length) {
+ LOG(LS_WARNING) << "Output array is too short. " << max_length << " < " <<
+ output_size_samples_ << " * " << sync_buffer_->Channels();
+ num_output_samples = max_length;
+ num_output_samples_per_channel = static_cast<int>(
+ max_length / sync_buffer_->Channels());
+ }
+ int samples_from_sync = static_cast<int>(
+ sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel,
+ output));
+ *num_channels = static_cast<int>(sync_buffer_->Channels());
+ LOG(LS_VERBOSE) << "Sync buffer (" << *num_channels << " channel(s)):" <<
+ " insert " << algorithm_buffer_->Size() << " samples, extract " <<
+ samples_from_sync << " samples";
+ if (samples_from_sync != output_size_samples_) {
+ LOG_F(LS_ERROR) << "samples_from_sync != output_size_samples_";
+ // TODO(minyue): treatment of under-run, filling zeros
+ memset(output, 0, num_output_samples * sizeof(int16_t));
+ *samples_per_channel = output_size_samples_;
+ return kSampleUnderrun;
+ }
+ *samples_per_channel = output_size_samples_;
+
+ // Should always have overlap samples left in the |sync_buffer_|.
+ assert(sync_buffer_->FutureLength() >= expand_->overlap_length());
+
+ if (play_dtmf) {
+ return_value = DtmfOverdub(dtmf_event, sync_buffer_->Channels(), output);
+ }
+
+ // Update the background noise parameters if last operation wrote data
+ // straight from the decoder to the |sync_buffer_|. That is, none of the
+ // operations that modify the signal can be followed by a parameter update.
+ if ((last_mode_ == kModeNormal) ||
+ (last_mode_ == kModeAccelerateFail) ||
+ (last_mode_ == kModePreemptiveExpandFail) ||
+ (last_mode_ == kModeRfc3389Cng) ||
+ (last_mode_ == kModeCodecInternalCng)) {
+ background_noise_->Update(*sync_buffer_, *vad_.get());
+ }
+
+ if (operation == kDtmf) {
+ // DTMF data was written the end of |sync_buffer_|.
+ // Update index to end of DTMF data in |sync_buffer_|.
+ sync_buffer_->set_dtmf_index(sync_buffer_->Size());
+ }
+
+ if (last_mode_ != kModeExpand) {
+ // If last operation was not expand, calculate the |playout_timestamp_| from
+ // the |sync_buffer_|. However, do not update the |playout_timestamp_| if it
+ // would be moved "backwards".
+ uint32_t temp_timestamp = sync_buffer_->end_timestamp() -
+ static_cast<uint32_t>(sync_buffer_->FutureLength());
+ if (static_cast<int32_t>(temp_timestamp - playout_timestamp_) > 0) {
+ playout_timestamp_ = temp_timestamp;
+ }
+ } else {
+ // Use dead reckoning to estimate the |playout_timestamp_|.
+ playout_timestamp_ += output_size_samples_;
+ }
+
+ if (decode_return_value) return decode_return_value;
+ return return_value;
+}
+
+int NetEqImpl::GetDecision(Operations* operation,
+ PacketList* packet_list,
+ DtmfEvent* dtmf_event,
+ bool* play_dtmf) {
+ // Initialize output variables.
+ *play_dtmf = false;
+ *operation = kUndefined;
+
+ // Increment time counters.
+ packet_buffer_->IncrementWaitingTimes();
+ stats_.IncreaseCounter(output_size_samples_, fs_hz_);
+
+ assert(sync_buffer_.get());
+ uint32_t end_timestamp = sync_buffer_->end_timestamp();
+ if (!new_codec_) {
+ packet_buffer_->DiscardOldPackets(end_timestamp);
+ }
+ const RTPHeader* header = packet_buffer_->NextRtpHeader();
+
+ if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
+ // Because of timestamp peculiarities, we have to "manually" disallow using
+ // a CNG packet with the same timestamp as the one that was last played.
+ // This can happen when using redundancy and will cause the timing to shift.
+ while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
+ (end_timestamp >= header->timestamp ||
+ end_timestamp + decision_logic_->generated_noise_samples() >
+ header->timestamp)) {
+ // Don't use this packet, discard it.
+ if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
+ assert(false); // Must be ok by design.
+ }
+ // Check buffer again.
+ if (!new_codec_) {
+ packet_buffer_->DiscardOldPackets(end_timestamp);
+ }
+ header = packet_buffer_->NextRtpHeader();
+ }
+ }
+
+ assert(expand_.get());
+ const int samples_left = static_cast<int>(sync_buffer_->FutureLength() -
+ expand_->overlap_length());
+ if (last_mode_ == kModeAccelerateSuccess ||
+ last_mode_ == kModeAccelerateLowEnergy ||
+ last_mode_ == kModePreemptiveExpandSuccess ||
+ last_mode_ == kModePreemptiveExpandLowEnergy) {
+ // Subtract (samples_left + output_size_samples_) from sampleMemory.
+ decision_logic_->AddSampleMemory(-(samples_left + output_size_samples_));
+ }
+
+ // Check if it is time to play a DTMF event.
+ if (dtmf_buffer_->GetEvent(end_timestamp +
+ decision_logic_->generated_noise_samples(),
+ dtmf_event)) {
+ *play_dtmf = true;
+ }
+
+ // Get instruction.
+ assert(sync_buffer_.get());
+ assert(expand_.get());
+ *operation = decision_logic_->GetDecision(*sync_buffer_,
+ *expand_,
+ decoder_frame_length_,
+ header,
+ last_mode_,
+ *play_dtmf,
+ &reset_decoder_);
+
+ // Check if we already have enough samples in the |sync_buffer_|. If so,
+ // change decision to normal, unless the decision was merge, accelerate, or
+ // preemptive expand.
+ if (samples_left >= output_size_samples_ &&
+ *operation != kMerge &&
+ *operation != kAccelerate &&
+ *operation != kPreemptiveExpand) {
+ *operation = kNormal;
+ return 0;
+ }
+
+ decision_logic_->ExpandDecision(*operation);
+
+ // Check conditions for reset.
+ if (new_codec_ || *operation == kUndefined) {
+ // The only valid reason to get kUndefined is that new_codec_ is set.
+ assert(new_codec_);
+ if (*play_dtmf && !header) {
+ timestamp_ = dtmf_event->timestamp;
+ } else {
+ assert(header);
+ if (!header) {
+ LOG_F(LS_ERROR) << "Packet missing where it shouldn't.";
+ return -1;
+ }
+ timestamp_ = header->timestamp;
+ if (*operation == kRfc3389CngNoPacket
+#ifndef LEGACY_BITEXACT
+ // Without this check, it can happen that a non-CNG packet is sent to
+ // the CNG decoder as if it was a SID frame. This is clearly a bug,
+ // but is kept for now to maintain bit-exactness with the test
+ // vectors.
+ && decoder_database_->IsComfortNoise(header->payloadType)
+#endif
+ ) {
+ // Change decision to CNG packet, since we do have a CNG packet, but it
+ // was considered too early to use. Now, use it anyway.
+ *operation = kRfc3389Cng;
+ } else if (*operation != kRfc3389Cng) {
+ *operation = kNormal;
+ }
+ }
+ // Adjust |sync_buffer_| timestamp before setting |end_timestamp| to the
+ // new value.
+ sync_buffer_->IncreaseEndTimestamp(timestamp_ - end_timestamp);
+ end_timestamp = timestamp_;
+ new_codec_ = false;
+ decision_logic_->SoftReset();
+ buffer_level_filter_->Reset();
+ delay_manager_->Reset();
+ stats_.ResetMcu();
+ }
+
+ int required_samples = output_size_samples_;
+ const int samples_10_ms = 80 * fs_mult_;
+ const int samples_20_ms = 2 * samples_10_ms;
+ const int samples_30_ms = 3 * samples_10_ms;
+
+ switch (*operation) {
+ case kExpand: {
+ timestamp_ = end_timestamp;
+ return 0;
+ }
+ case kRfc3389CngNoPacket:
+ case kCodecInternalCng: {
+ return 0;
+ }
+ case kDtmf: {
+ // TODO(hlundin): Write test for this.
+ // Update timestamp.
+ timestamp_ = end_timestamp;
+ if (decision_logic_->generated_noise_samples() > 0 &&
+ last_mode_ != kModeDtmf) {
+ // Make a jump in timestamp due to the recently played comfort noise.
+ uint32_t timestamp_jump = decision_logic_->generated_noise_samples();
+ sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
+ timestamp_ += timestamp_jump;
+ }
+ decision_logic_->set_generated_noise_samples(0);
+ return 0;
+ }
+ case kAccelerate: {
+ // In order to do a accelerate we need at least 30 ms of audio data.
+ if (samples_left >= samples_30_ms) {
+ // Already have enough data, so we do not need to extract any more.
+ decision_logic_->set_sample_memory(samples_left);
+ decision_logic_->set_prev_time_scale(true);
+ return 0;
+ } else if (samples_left >= samples_10_ms &&
+ decoder_frame_length_ >= samples_30_ms) {
+ // Avoid decoding more data as it might overflow the playout buffer.
+ *operation = kNormal;
+ return 0;
+ } else if (samples_left < samples_20_ms &&
+ decoder_frame_length_ < samples_30_ms) {
+ // Build up decoded data by decoding at least 20 ms of audio data. Do
+ // not perform accelerate yet, but wait until we only need to do one
+ // decoding.
+ required_samples = 2 * output_size_samples_;
+ *operation = kNormal;
+ }
+ // If none of the above is true, we have one of two possible situations:
+ // (1) 20 ms <= samples_left < 30 ms and decoder_frame_length_ < 30 ms; or
+ // (2) samples_left < 10 ms and decoder_frame_length_ >= 30 ms.
+ // In either case, we move on with the accelerate decision, and decode one
+ // frame now.
+ break;
+ }
+ case kPreemptiveExpand: {
+ // In order to do a preemptive expand we need at least 30 ms of decoded
+ // audio data.
+ if ((samples_left >= samples_30_ms) ||
+ (samples_left >= samples_10_ms &&
+ decoder_frame_length_ >= samples_30_ms)) {
+ // Already have enough data, so we do not need to extract any more.
+ // Or, avoid decoding more data as it might overflow the playout buffer.
+ // Still try preemptive expand, though.
+ decision_logic_->set_sample_memory(samples_left);
+ decision_logic_->set_prev_time_scale(true);
+ return 0;
+ }
+ if (samples_left < samples_20_ms &&
+ decoder_frame_length_ < samples_30_ms) {
+ // Build up decoded data by decoding at least 20 ms of audio data.
+ // Still try to perform preemptive expand.
+ required_samples = 2 * output_size_samples_;
+ }
+ // Move on with the preemptive expand decision.
+ break;
+ }
+ case kMerge: {
+ required_samples =
+ std::max(merge_->RequiredFutureSamples(), required_samples);
+ break;
+ }
+ default: {
+ // Do nothing.
+ }
+ }
+
+ // Get packets from buffer.
+ int extracted_samples = 0;
+ if (header &&
+ *operation != kAlternativePlc &&
+ *operation != kAlternativePlcIncreaseTimestamp &&
+ *operation != kAudioRepetition &&
+ *operation != kAudioRepetitionIncreaseTimestamp) {
+ sync_buffer_->IncreaseEndTimestamp(header->timestamp - end_timestamp);
+ if (decision_logic_->CngOff()) {
+ // Adjustment of timestamp only corresponds to an actual packet loss
+ // if comfort noise is not played. If comfort noise was just played,
+ // this adjustment of timestamp is only done to get back in sync with the
+ // stream timestamp; no loss to report.
+ stats_.LostSamples(header->timestamp - end_timestamp);
+ }
+
+ if (*operation != kRfc3389Cng) {
+ // We are about to decode and use a non-CNG packet.
+ decision_logic_->SetCngOff();
+ }
+ // Reset CNG timestamp as a new packet will be delivered.
+ // (Also if this is a CNG packet, since playedOutTS is updated.)
+ decision_logic_->set_generated_noise_samples(0);
+
+ extracted_samples = ExtractPackets(required_samples, packet_list);
+ if (extracted_samples < 0) {
+ LOG_F(LS_WARNING) << "Failed to extract packets from buffer.";
+ return kPacketBufferCorruption;
+ }
+ }
+
+ if (*operation == kAccelerate ||
+ *operation == kPreemptiveExpand) {
+ decision_logic_->set_sample_memory(samples_left + extracted_samples);
+ decision_logic_->set_prev_time_scale(true);
+ }
+
+ if (*operation == kAccelerate) {
+ // Check that we have enough data (30ms) to do accelerate.
+ if (extracted_samples + samples_left < samples_30_ms) {
+ // TODO(hlundin): Write test for this.
+ // Not enough, do normal operation instead.
+ *operation = kNormal;
+ }
+ }
+
+ timestamp_ = end_timestamp;
+ return 0;
+}
+
+int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
+ int* decoded_length,
+ AudioDecoder::SpeechType* speech_type) {
+ *speech_type = AudioDecoder::kSpeech;
+ AudioDecoder* decoder = NULL;
+ if (!packet_list->empty()) {
+ const Packet* packet = packet_list->front();
+ int payload_type = packet->header.payloadType;
+ if (!decoder_database_->IsComfortNoise(payload_type)) {
+ decoder = decoder_database_->GetDecoder(payload_type);
+ assert(decoder);
+ if (!decoder) {
+ LOG_FERR1(LS_WARNING, GetDecoder, payload_type);
+ PacketBuffer::DeleteAllPackets(packet_list);
+ return kDecoderNotFound;
+ }
+ bool decoder_changed;
+ decoder_database_->SetActiveDecoder(payload_type, &decoder_changed);
+ if (decoder_changed) {
+ // We have a new decoder. Re-init some values.
+ const DecoderDatabase::DecoderInfo* decoder_info = decoder_database_
+ ->GetDecoderInfo(payload_type);
+ assert(decoder_info);
+ if (!decoder_info) {
+ LOG_FERR1(LS_WARNING, GetDecoderInfo, payload_type);
+ PacketBuffer::DeleteAllPackets(packet_list);
+ return kDecoderNotFound;
+ }
+ // If sampling rate or number of channels has changed, we need to make
+ // a reset.
+ if (decoder_info->fs_hz != fs_hz_ ||
+ decoder->channels() != algorithm_buffer_->Channels()) {
+ // TODO(tlegrand): Add unittest to cover this event.
+ SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels());
+ }
+ sync_buffer_->set_end_timestamp(timestamp_);
+ playout_timestamp_ = timestamp_;
+ }
+ }
+ }
+
+ if (reset_decoder_) {
+ // TODO(hlundin): Write test for this.
+ // Reset decoder.
+ if (decoder) {
+ decoder->Init();
+ }
+ // Reset comfort noise decoder.
+ AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ if (cng_decoder) {
+ cng_decoder->Init();
+ }
+ reset_decoder_ = false;
+ }
+
+#ifdef LEGACY_BITEXACT
+ // Due to a bug in old SignalMCU, it could happen that CNG operation was
+ // decided, but a speech packet was provided. The speech packet will be used
+ // to update the comfort noise decoder, as if it was a SID frame, which is
+ // clearly wrong.
+ if (*operation == kRfc3389Cng) {
+ return 0;
+ }
+#endif
+
+ *decoded_length = 0;
+ // Update codec-internal PLC state.
+ if ((*operation == kMerge) && decoder && decoder->HasDecodePlc()) {
+ decoder->DecodePlc(1, &decoded_buffer_[*decoded_length]);
+ }
+
+ int return_value = DecodeLoop(packet_list, operation, decoder,
+ decoded_length, speech_type);
+
+ if (*decoded_length < 0) {
+ // Error returned from the decoder.
+ *decoded_length = 0;
+ sync_buffer_->IncreaseEndTimestamp(decoder_frame_length_);
+ int error_code = 0;
+ if (decoder)
+ error_code = decoder->ErrorCode();
+ if (error_code != 0) {
+ // Got some error code from the decoder.
+ decoder_error_code_ = error_code;
+ return_value = kDecoderErrorCode;
+ } else {
+ // Decoder does not implement error codes. Return generic error.
+ return_value = kOtherDecoderError;
+ }
+ LOG_FERR2(LS_WARNING, DecodeLoop, error_code, packet_list->size());
+ *operation = kExpand; // Do expansion to get data instead.
+ }
+ if (*speech_type != AudioDecoder::kComfortNoise) {
+ // Don't increment timestamp if codec returned CNG speech type
+ // since in this case, the we will increment the CNGplayedTS counter.
+ // Increase with number of samples per channel.
+ assert(*decoded_length == 0 ||
+ (decoder && decoder->channels() == sync_buffer_->Channels()));
+ sync_buffer_->IncreaseEndTimestamp(
+ *decoded_length / static_cast<int>(sync_buffer_->Channels()));
+ }
+ return return_value;
+}
+
+int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation,
+ AudioDecoder* decoder, int* decoded_length,
+ AudioDecoder::SpeechType* speech_type) {
+ Packet* packet = NULL;
+ if (!packet_list->empty()) {
+ packet = packet_list->front();
+ }
+ // Do decoding.
+ while (packet &&
+ !decoder_database_->IsComfortNoise(packet->header.payloadType)) {
+ assert(decoder); // At this point, we must have a decoder object.
+ // The number of channels in the |sync_buffer_| should be the same as the
+ // number decoder channels.
+ assert(sync_buffer_->Channels() == decoder->channels());
+ assert(decoded_buffer_length_ >= kMaxFrameSize * decoder->channels());
+ assert(*operation == kNormal || *operation == kAccelerate ||
+ *operation == kMerge || *operation == kPreemptiveExpand);
+ packet_list->pop_front();
+ int payload_length = packet->payload_length;
+ int16_t decode_length;
+ if (packet->sync_packet) {
+ // Decode to silence with the same frame size as the last decode.
+ LOG(LS_VERBOSE) << "Decoding sync-packet: " <<
+ " ts=" << packet->header.timestamp <<
+ ", sn=" << packet->header.sequenceNumber <<
+ ", pt=" << static_cast<int>(packet->header.payloadType) <<
+ ", ssrc=" << packet->header.ssrc <<
+ ", len=" << packet->payload_length;
+ memset(&decoded_buffer_[*decoded_length], 0, decoder_frame_length_ *
+ decoder->channels() * sizeof(decoded_buffer_[0]));
+ decode_length = decoder_frame_length_;
+ } else if (!packet->primary) {
+ // This is a redundant payload; call the special decoder method.
+ LOG(LS_VERBOSE) << "Decoding packet (redundant):" <<
+ " ts=" << packet->header.timestamp <<
+ ", sn=" << packet->header.sequenceNumber <<
+ ", pt=" << static_cast<int>(packet->header.payloadType) <<
+ ", ssrc=" << packet->header.ssrc <<
+ ", len=" << packet->payload_length;
+ decode_length = decoder->DecodeRedundant(
+ packet->payload, packet->payload_length,
+ &decoded_buffer_[*decoded_length], speech_type);
+ } else {
+ LOG(LS_VERBOSE) << "Decoding packet: ts=" << packet->header.timestamp <<
+ ", sn=" << packet->header.sequenceNumber <<
+ ", pt=" << static_cast<int>(packet->header.payloadType) <<
+ ", ssrc=" << packet->header.ssrc <<
+ ", len=" << packet->payload_length;
+ decode_length = decoder->Decode(packet->payload,
+ packet->payload_length,
+ &decoded_buffer_[*decoded_length],
+ speech_type);
+ }
+
+ delete[] packet->payload;
+ delete packet;
+ packet = NULL;
+ if (decode_length > 0) {
+ *decoded_length += decode_length;
+ // Update |decoder_frame_length_| with number of samples per channel.
+ decoder_frame_length_ = decode_length /
+ static_cast<int>(decoder->channels());
+ LOG(LS_VERBOSE) << "Decoded " << decode_length << " samples (" <<
+ decoder->channels() << " channel(s) -> " << decoder_frame_length_ <<
+ " samples per channel)";
+ } else if (decode_length < 0) {
+ // Error.
+ LOG_FERR2(LS_WARNING, Decode, decode_length, payload_length);
+ *decoded_length = -1;
+ PacketBuffer::DeleteAllPackets(packet_list);
+ break;
+ }
+ if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
+ // Guard against overflow.
+ LOG_F(LS_WARNING) << "Decoded too much.";
+ PacketBuffer::DeleteAllPackets(packet_list);
+ return kDecodedTooMuch;
+ }
+ if (!packet_list->empty()) {
+ packet = packet_list->front();
+ } else {
+ packet = NULL;
+ }
+ } // End of decode loop.
+
+ // If the list is not empty at this point, either a decoding error terminated
+ // the while-loop, or list must hold exactly one CNG packet.
+ assert(packet_list->empty() || *decoded_length < 0 ||
+ (packet_list->size() == 1 && packet &&
+ decoder_database_->IsComfortNoise(packet->header.payloadType)));
+ return 0;
+}
+
+void NetEqImpl::DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
+ AudioDecoder::SpeechType speech_type, bool play_dtmf) {
+ assert(normal_.get());
+ assert(mute_factor_array_.get());
+ normal_->Process(decoded_buffer, decoded_length, last_mode_,
+ mute_factor_array_.get(), algorithm_buffer_.get());
+ if (decoded_length != 0) {
+ last_mode_ = kModeNormal;
+ }
+
+ // If last packet was decoded as an inband CNG, set mode to CNG instead.
+ if ((speech_type == AudioDecoder::kComfortNoise)
+ || ((last_mode_ == kModeCodecInternalCng)
+ && (decoded_length == 0))) {
+ // TODO(hlundin): Remove second part of || statement above.
+ last_mode_ = kModeCodecInternalCng;
+ }
+
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+}
+
+void NetEqImpl::DoMerge(int16_t* decoded_buffer, size_t decoded_length,
+ AudioDecoder::SpeechType speech_type, bool play_dtmf) {
+ assert(mute_factor_array_.get());
+ assert(merge_.get());
+ int new_length = merge_->Process(decoded_buffer, decoded_length,
+ mute_factor_array_.get(),
+ algorithm_buffer_.get());
+
+ // Update in-call and post-call statistics.
+ if (expand_->MuteFactor(0) == 0) {
+ // Expand generates only noise.
+ stats_.ExpandedNoiseSamples(new_length - static_cast<int>(decoded_length));
+ } else {
+ // Expansion generates more than only noise.
+ stats_.ExpandedVoiceSamples(new_length - static_cast<int>(decoded_length));
+ }
+
+ last_mode_ = kModeMerge;
+ // If last packet was decoded as an inband CNG, set mode to CNG instead.
+ if (speech_type == AudioDecoder::kComfortNoise) {
+ last_mode_ = kModeCodecInternalCng;
+ }
+ expand_->Reset();
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+}
+
+int NetEqImpl::DoExpand(bool play_dtmf) {
+ while ((sync_buffer_->FutureLength() - expand_->overlap_length()) <
+ static_cast<size_t>(output_size_samples_)) {
+ algorithm_buffer_->Clear();
+ int return_value = expand_->Process(algorithm_buffer_.get());
+ int length = static_cast<int>(algorithm_buffer_->Size());
+
+ // Update in-call and post-call statistics.
+ if (expand_->MuteFactor(0) == 0) {
+ // Expand operation generates only noise.
+ stats_.ExpandedNoiseSamples(length);
+ } else {
+ // Expand operation generates more than only noise.
+ stats_.ExpandedVoiceSamples(length);
+ }
+
+ last_mode_ = kModeExpand;
+
+ if (return_value < 0) {
+ return return_value;
+ }
+
+ sync_buffer_->PushBack(*algorithm_buffer_);
+ algorithm_buffer_->Clear();
+ }
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+ return 0;
+}
+
+int NetEqImpl::DoAccelerate(int16_t* decoded_buffer, size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) {
+ const size_t required_samples = 240 * fs_mult_; // Must have 30 ms.
+ size_t borrowed_samples_per_channel = 0;
+ size_t num_channels = algorithm_buffer_->Channels();
+ size_t decoded_length_per_channel = decoded_length / num_channels;
+ if (decoded_length_per_channel < required_samples) {
+ // Must move data from the |sync_buffer_| in order to get 30 ms.
+ borrowed_samples_per_channel = static_cast<int>(required_samples -
+ decoded_length_per_channel);
+ memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
+ decoded_buffer,
+ sizeof(int16_t) * decoded_length);
+ sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
+ decoded_buffer);
+ decoded_length = required_samples * num_channels;
+ }
+
+ int16_t samples_removed;
+ Accelerate::ReturnCodes return_code = accelerate_->Process(
+ decoded_buffer, decoded_length, algorithm_buffer_.get(),
+ &samples_removed);
+ stats_.AcceleratedSamples(samples_removed);
+ switch (return_code) {
+ case Accelerate::kSuccess:
+ last_mode_ = kModeAccelerateSuccess;
+ break;
+ case Accelerate::kSuccessLowEnergy:
+ last_mode_ = kModeAccelerateLowEnergy;
+ break;
+ case Accelerate::kNoStretch:
+ last_mode_ = kModeAccelerateFail;
+ break;
+ case Accelerate::kError:
+ // TODO(hlundin): Map to kModeError instead?
+ last_mode_ = kModeAccelerateFail;
+ return kAccelerateError;
+ }
+
+ if (borrowed_samples_per_channel > 0) {
+ // Copy borrowed samples back to the |sync_buffer_|.
+ size_t length = algorithm_buffer_->Size();
+ if (length < borrowed_samples_per_channel) {
+ // This destroys the beginning of the buffer, but will not cause any
+ // problems.
+ sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
+ sync_buffer_->Size() -
+ borrowed_samples_per_channel);
+ sync_buffer_->PushFrontZeros(borrowed_samples_per_channel - length);
+ algorithm_buffer_->PopFront(length);
+ assert(algorithm_buffer_->Empty());
+ } else {
+ sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
+ borrowed_samples_per_channel,
+ sync_buffer_->Size() -
+ borrowed_samples_per_channel);
+ algorithm_buffer_->PopFront(borrowed_samples_per_channel);
+ }
+ }
+
+ // If last packet was decoded as an inband CNG, set mode to CNG instead.
+ if (speech_type == AudioDecoder::kComfortNoise) {
+ last_mode_ = kModeCodecInternalCng;
+ }
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+ expand_->Reset();
+ return 0;
+}
+
+int NetEqImpl::DoPreemptiveExpand(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) {
+ const size_t required_samples = 240 * fs_mult_; // Must have 30 ms.
+ size_t num_channels = algorithm_buffer_->Channels();
+ int borrowed_samples_per_channel = 0;
+ int old_borrowed_samples_per_channel = 0;
+ size_t decoded_length_per_channel = decoded_length / num_channels;
+ if (decoded_length_per_channel < required_samples) {
+ // Must move data from the |sync_buffer_| in order to get 30 ms.
+ borrowed_samples_per_channel = static_cast<int>(required_samples -
+ decoded_length_per_channel);
+ // Calculate how many of these were already played out.
+ old_borrowed_samples_per_channel = static_cast<int>(
+ borrowed_samples_per_channel - sync_buffer_->FutureLength());
+ old_borrowed_samples_per_channel = std::max(
+ 0, old_borrowed_samples_per_channel);
+ memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
+ decoded_buffer,
+ sizeof(int16_t) * decoded_length);
+ sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
+ decoded_buffer);
+ decoded_length = required_samples * num_channels;
+ }
+
+ int16_t samples_added;
+ PreemptiveExpand::ReturnCodes return_code = preemptive_expand_->Process(
+ decoded_buffer, static_cast<int>(decoded_length),
+ old_borrowed_samples_per_channel,
+ algorithm_buffer_.get(), &samples_added);
+ stats_.PreemptiveExpandedSamples(samples_added);
+ switch (return_code) {
+ case PreemptiveExpand::kSuccess:
+ last_mode_ = kModePreemptiveExpandSuccess;
+ break;
+ case PreemptiveExpand::kSuccessLowEnergy:
+ last_mode_ = kModePreemptiveExpandLowEnergy;
+ break;
+ case PreemptiveExpand::kNoStretch:
+ last_mode_ = kModePreemptiveExpandFail;
+ break;
+ case PreemptiveExpand::kError:
+ // TODO(hlundin): Map to kModeError instead?
+ last_mode_ = kModePreemptiveExpandFail;
+ return kPreemptiveExpandError;
+ }
+
+ if (borrowed_samples_per_channel > 0) {
+ // Copy borrowed samples back to the |sync_buffer_|.
+ sync_buffer_->ReplaceAtIndex(
+ *algorithm_buffer_, borrowed_samples_per_channel,
+ sync_buffer_->Size() - borrowed_samples_per_channel);
+ algorithm_buffer_->PopFront(borrowed_samples_per_channel);
+ }
+
+ // If last packet was decoded as an inband CNG, set mode to CNG instead.
+ if (speech_type == AudioDecoder::kComfortNoise) {
+ last_mode_ = kModeCodecInternalCng;
+ }
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+ expand_->Reset();
+ return 0;
+}
+
+int NetEqImpl::DoRfc3389Cng(PacketList* packet_list, bool play_dtmf) {
+ if (!packet_list->empty()) {
+ // Must have exactly one SID frame at this point.
+ assert(packet_list->size() == 1);
+ Packet* packet = packet_list->front();
+ packet_list->pop_front();
+ if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
+#ifdef LEGACY_BITEXACT
+ // This can happen due to a bug in GetDecision. Change the payload type
+ // to a CNG type, and move on. Note that this means that we are in fact
+ // sending a non-CNG payload to the comfort noise decoder for decoding.
+ // Clearly wrong, but will maintain bit-exactness with legacy.
+ if (fs_hz_ == 8000) {
+ packet->header.payloadType =
+ decoder_database_->GetRtpPayloadType(kDecoderCNGnb);
+ } else if (fs_hz_ == 16000) {
+ packet->header.payloadType =
+ decoder_database_->GetRtpPayloadType(kDecoderCNGwb);
+ } else if (fs_hz_ == 32000) {
+ packet->header.payloadType =
+ decoder_database_->GetRtpPayloadType(kDecoderCNGswb32kHz);
+ } else if (fs_hz_ == 48000) {
+ packet->header.payloadType =
+ decoder_database_->GetRtpPayloadType(kDecoderCNGswb48kHz);
+ }
+ assert(decoder_database_->IsComfortNoise(packet->header.payloadType));
+#else
+ LOG(LS_ERROR) << "Trying to decode non-CNG payload as CNG.";
+ return kOtherError;
+#endif
+ }
+ // UpdateParameters() deletes |packet|.
+ if (comfort_noise_->UpdateParameters(packet) ==
+ ComfortNoise::kInternalError) {
+ LOG_FERR0(LS_WARNING, UpdateParameters);
+ algorithm_buffer_->Zeros(output_size_samples_);
+ return -comfort_noise_->internal_error_code();
+ }
+ }
+ int cn_return = comfort_noise_->Generate(output_size_samples_,
+ algorithm_buffer_.get());
+ expand_->Reset();
+ last_mode_ = kModeRfc3389Cng;
+ if (!play_dtmf) {
+ dtmf_tone_generator_->Reset();
+ }
+ if (cn_return == ComfortNoise::kInternalError) {
+ LOG_FERR1(LS_WARNING, comfort_noise_->Generate, cn_return);
+ decoder_error_code_ = comfort_noise_->internal_error_code();
+ return kComfortNoiseErrorCode;
+ } else if (cn_return == ComfortNoise::kUnknownPayloadType) {
+ LOG_FERR1(LS_WARNING, comfort_noise_->Generate, cn_return);
+ return kUnknownRtpPayloadType;
+ }
+ return 0;
+}
+
+void NetEqImpl::DoCodecInternalCng() {
+ int length = 0;
+ // TODO(hlundin): Will probably need a longer buffer for multi-channel.
+ int16_t decoded_buffer[kMaxFrameSize];
+ AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
+ if (decoder) {
+ const uint8_t* dummy_payload = NULL;
+ AudioDecoder::SpeechType speech_type;
+ length = decoder->Decode(dummy_payload, 0, decoded_buffer, &speech_type);
+ }
+ assert(mute_factor_array_.get());
+ normal_->Process(decoded_buffer, length, last_mode_, mute_factor_array_.get(),
+ algorithm_buffer_.get());
+ last_mode_ = kModeCodecInternalCng;
+ expand_->Reset();
+}
+
+int NetEqImpl::DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf) {
+ // This block of the code and the block further down, handling |dtmf_switch|
+ // are commented out. Otherwise playing out-of-band DTMF would fail in VoE
+ // test, DtmfTest.ManualSuccessfullySendsOutOfBandTelephoneEvents. This is
+ // equivalent to |dtmf_switch| always be false.
+ //
+ // See http://webrtc-codereview.appspot.com/1195004/ for discussion
+ // On this issue. This change might cause some glitches at the point of
+ // switch from audio to DTMF. Issue 1545 is filed to track this.
+ //
+ // bool dtmf_switch = false;
+ // if ((last_mode_ != kModeDtmf) && dtmf_tone_generator_->initialized()) {
+ // // Special case; see below.
+ // // We must catch this before calling Generate, since |initialized| is
+ // // modified in that call.
+ // dtmf_switch = true;
+ // }
+
+ int dtmf_return_value = 0;
+ if (!dtmf_tone_generator_->initialized()) {
+ // Initialize if not already done.
+ dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
+ dtmf_event.volume);
+ }
+
+ if (dtmf_return_value == 0) {
+ // Generate DTMF signal.
+ dtmf_return_value = dtmf_tone_generator_->Generate(output_size_samples_,
+ algorithm_buffer_.get());
+ }
+
+ if (dtmf_return_value < 0) {
+ algorithm_buffer_->Zeros(output_size_samples_);
+ return dtmf_return_value;
+ }
+
+ // if (dtmf_switch) {
+ // // This is the special case where the previous operation was DTMF
+ // // overdub, but the current instruction is "regular" DTMF. We must make
+ // // sure that the DTMF does not have any discontinuities. The first DTMF
+ // // sample that we generate now must be played out immediately, therefore
+ // // it must be copied to the speech buffer.
+ // // TODO(hlundin): This code seems incorrect. (Legacy.) Write test and
+ // // verify correct operation.
+ // assert(false);
+ // // Must generate enough data to replace all of the |sync_buffer_|
+ // // "future".
+ // int required_length = sync_buffer_->FutureLength();
+ // assert(dtmf_tone_generator_->initialized());
+ // dtmf_return_value = dtmf_tone_generator_->Generate(required_length,
+ // algorithm_buffer_);
+ // assert((size_t) required_length == algorithm_buffer_->Size());
+ // if (dtmf_return_value < 0) {
+ // algorithm_buffer_->Zeros(output_size_samples_);
+ // return dtmf_return_value;
+ // }
+ //
+ // // Overwrite the "future" part of the speech buffer with the new DTMF
+ // // data.
+ // // TODO(hlundin): It seems that this overwriting has gone lost.
+ // // Not adapted for multi-channel yet.
+ // assert(algorithm_buffer_->Channels() == 1);
+ // if (algorithm_buffer_->Channels() != 1) {
+ // LOG(LS_WARNING) << "DTMF not supported for more than one channel";
+ // return kStereoNotSupported;
+ // }
+ // // Shuffle the remaining data to the beginning of algorithm buffer.
+ // algorithm_buffer_->PopFront(sync_buffer_->FutureLength());
+ // }
+
+ sync_buffer_->IncreaseEndTimestamp(output_size_samples_);
+ expand_->Reset();
+ last_mode_ = kModeDtmf;
+
+ // Set to false because the DTMF is already in the algorithm buffer.
+ *play_dtmf = false;
+ return 0;
+}
+
+void NetEqImpl::DoAlternativePlc(bool increase_timestamp) {
+ AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
+ int length;
+ if (decoder && decoder->HasDecodePlc()) {
+ // Use the decoder's packet-loss concealment.
+ // TODO(hlundin): Will probably need a longer buffer for multi-channel.
+ int16_t decoded_buffer[kMaxFrameSize];
+ length = decoder->DecodePlc(1, decoded_buffer);
+ if (length > 0) {
+ algorithm_buffer_->PushBackInterleaved(decoded_buffer, length);
+ } else {
+ length = 0;
+ }
+ } else {
+ // Do simple zero-stuffing.
+ length = output_size_samples_;
+ algorithm_buffer_->Zeros(length);
+ // By not advancing the timestamp, NetEq inserts samples.
+ stats_.AddZeros(length);
+ }
+ if (increase_timestamp) {
+ sync_buffer_->IncreaseEndTimestamp(length);
+ }
+ expand_->Reset();
+}
+
+int NetEqImpl::DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
+ int16_t* output) const {
+ size_t out_index = 0;
+ int overdub_length = output_size_samples_; // Default value.
+
+ if (sync_buffer_->dtmf_index() > sync_buffer_->next_index()) {
+ // Special operation for transition from "DTMF only" to "DTMF overdub".
+ out_index = std::min(
+ sync_buffer_->dtmf_index() - sync_buffer_->next_index(),
+ static_cast<size_t>(output_size_samples_));
+ overdub_length = output_size_samples_ - static_cast<int>(out_index);
+ }
+
+ AudioMultiVector dtmf_output(num_channels);
+ int dtmf_return_value = 0;
+ if (!dtmf_tone_generator_->initialized()) {
+ dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
+ dtmf_event.volume);
+ }
+ if (dtmf_return_value == 0) {
+ dtmf_return_value = dtmf_tone_generator_->Generate(overdub_length,
+ &dtmf_output);
+ assert((size_t) overdub_length == dtmf_output.Size());
+ }
+ dtmf_output.ReadInterleaved(overdub_length, &output[out_index]);
+ return dtmf_return_value < 0 ? dtmf_return_value : 0;
+}
+
+int NetEqImpl::ExtractPackets(int required_samples, PacketList* packet_list) {
+ bool first_packet = true;
+ uint8_t prev_payload_type = 0;
+ uint32_t prev_timestamp = 0;
+ uint16_t prev_sequence_number = 0;
+ bool next_packet_available = false;
+
+ const RTPHeader* header = packet_buffer_->NextRtpHeader();
+ assert(header);
+ if (!header) {
+ return -1;
+ }
+ uint32_t first_timestamp = header->timestamp;
+ int extracted_samples = 0;
+
+ // Packet extraction loop.
+ do {
+ timestamp_ = header->timestamp;
+ int discard_count = 0;
+ Packet* packet = packet_buffer_->GetNextPacket(&discard_count);
+ // |header| may be invalid after the |packet_buffer_| operation.
+ header = NULL;
+ if (!packet) {
+ LOG_FERR1(LS_ERROR, GetNextPacket, discard_count) <<
+ "Should always be able to extract a packet here";
+ assert(false); // Should always be able to extract a packet here.
+ return -1;
+ }
+ stats_.PacketsDiscarded(discard_count);
+ // Store waiting time in ms; packets->waiting_time is in "output blocks".
+ stats_.StoreWaitingTime(packet->waiting_time * kOutputSizeMs);
+ assert(packet->payload_length > 0);
+ packet_list->push_back(packet); // Store packet in list.
+
+ if (first_packet) {
+ first_packet = false;
+ decoded_packet_sequence_number_ = prev_sequence_number =
+ packet->header.sequenceNumber;
+ decoded_packet_timestamp_ = prev_timestamp = packet->header.timestamp;
+ prev_payload_type = packet->header.payloadType;
+ }
+
+ // Store number of extracted samples.
+ int packet_duration = 0;
+ AudioDecoder* decoder = decoder_database_->GetDecoder(
+ packet->header.payloadType);
+ if (decoder) {
+ if (packet->sync_packet) {
+ packet_duration = decoder_frame_length_;
+ } else {
+ packet_duration = packet->primary ?
+ decoder->PacketDuration(packet->payload, packet->payload_length) :
+ decoder->PacketDurationRedundant(packet->payload,
+ packet->payload_length);
+ }
+ } else {
+ LOG_FERR1(LS_WARNING, GetDecoder, packet->header.payloadType) <<
+ "Could not find a decoder for a packet about to be extracted.";
+ assert(false);
+ }
+ if (packet_duration <= 0) {
+ // Decoder did not return a packet duration. Assume that the packet
+ // contains the same number of samples as the previous one.
+ packet_duration = decoder_frame_length_;
+ }
+ extracted_samples = packet->header.timestamp - first_timestamp +
+ packet_duration;
+
+ // Check what packet is available next.
+ header = packet_buffer_->NextRtpHeader();
+ next_packet_available = false;
+ if (header && prev_payload_type == header->payloadType) {
+ int16_t seq_no_diff = header->sequenceNumber - prev_sequence_number;
+ int32_t ts_diff = header->timestamp - prev_timestamp;
+ if (seq_no_diff == 1 ||
+ (seq_no_diff == 0 && ts_diff == decoder_frame_length_)) {
+ // The next sequence number is available, or the next part of a packet
+ // that was split into pieces upon insertion.
+ next_packet_available = true;
+ }
+ prev_sequence_number = header->sequenceNumber;
+ }
+ } while (extracted_samples < required_samples && next_packet_available);
+
+ return extracted_samples;
+}
+
+void NetEqImpl::UpdatePlcComponents(int fs_hz, size_t channels) {
+ // Delete objects and create new ones.
+ expand_.reset(expand_factory_->Create(background_noise_.get(),
+ sync_buffer_.get(), &random_vector_,
+ fs_hz, channels));
+ merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
+}
+
+void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
+ LOG_API2(fs_hz, channels);
+ // TODO(hlundin): Change to an enumerator and skip assert.
+ assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
+ assert(channels > 0);
+
+ fs_hz_ = fs_hz;
+ fs_mult_ = fs_hz / 8000;
+ output_size_samples_ = kOutputSizeMs * 8 * fs_mult_;
+ decoder_frame_length_ = 3 * output_size_samples_; // Initialize to 30ms.
+
+ last_mode_ = kModeNormal;
+
+ // Create a new array of mute factors and set all to 1.
+ mute_factor_array_.reset(new int16_t[channels]);
+ for (size_t i = 0; i < channels; ++i) {
+ mute_factor_array_[i] = 16384; // 1.0 in Q14.
+ }
+
+ // Reset comfort noise decoder, if there is one active.
+ AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ if (cng_decoder) {
+ cng_decoder->Init();
+ }
+
+ // Reinit post-decode VAD with new sample rate.
+ assert(vad_.get()); // Cannot be NULL here.
+ vad_->Init();
+
+ // Delete algorithm buffer and create a new one.
+ algorithm_buffer_.reset(new AudioMultiVector(channels));
+
+ // Delete sync buffer and create a new one.
+ sync_buffer_.reset(new SyncBuffer(channels, kSyncBufferSize * fs_mult_));
+
+
+ // Delete BackgroundNoise object and create a new one, while preserving its
+ // mode.
+ NetEqBackgroundNoiseMode current_mode = kBgnOn;
+ if (background_noise_.get())
+ current_mode = background_noise_->mode();
+ background_noise_.reset(new BackgroundNoise(channels));
+ background_noise_->set_mode(current_mode);
+
+ // Reset random vector.
+ random_vector_.Reset();
+
+ UpdatePlcComponents(fs_hz, channels);
+
+ // Move index so that we create a small set of future samples (all 0).
+ sync_buffer_->set_next_index(sync_buffer_->next_index() -
+ expand_->overlap_length());
+
+ normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_,
+ expand_.get()));
+ accelerate_.reset(
+ accelerate_factory_->Create(fs_hz, channels, *background_noise_));
+ preemptive_expand_.reset(preemptive_expand_factory_->Create(
+ fs_hz, channels,
+ *background_noise_,
+ static_cast<int>(expand_->overlap_length())));
+
+ // Delete ComfortNoise object and create a new one.
+ comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(),
+ sync_buffer_.get()));
+
+ // Verify that |decoded_buffer_| is long enough.
+ if (decoded_buffer_length_ < kMaxFrameSize * channels) {
+ // Reallocate to larger size.
+ decoded_buffer_length_ = kMaxFrameSize * channels;
+ decoded_buffer_.reset(new int16_t[decoded_buffer_length_]);
+ }
+
+ // Create DecisionLogic if it is not created yet, then communicate new sample
+ // rate and output size to DecisionLogic object.
+ if (!decision_logic_.get()) {
+ CreateDecisionLogic(kPlayoutOn);
+ }
+ decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
+}
+
+NetEqOutputType NetEqImpl::LastOutputType() {
+ assert(vad_.get());
+ assert(expand_.get());
+ if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
+ return kOutputCNG;
+ } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) {
+ // Expand mode has faded down to background noise only (very long expand).
+ return kOutputPLCtoCNG;
+ } else if (last_mode_ == kModeExpand) {
+ return kOutputPLC;
+ } else if (vad_->running() && !vad_->active_speech()) {
+ return kOutputVADPassive;
+ } else {
+ return kOutputNormal;
+ }
+}
+
+void NetEqImpl::CreateDecisionLogic(NetEqPlayoutMode mode) {
+ decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
+ mode,
+ decoder_database_.get(),
+ *packet_buffer_.get(),
+ delay_manager_.get(),
+ buffer_level_filter_.get()));
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
new file mode 100644
index 00000000000..e92babd8e35
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -0,0 +1,406 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h" // Declare PacketList.
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/rtcp.h"
+#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class Accelerate;
+class BackgroundNoise;
+class BufferLevelFilter;
+class ComfortNoise;
+class CriticalSectionWrapper;
+class DecisionLogic;
+class DecoderDatabase;
+class DelayManager;
+class DelayPeakDetector;
+class DtmfBuffer;
+class DtmfToneGenerator;
+class Expand;
+class Merge;
+class Normal;
+class PacketBuffer;
+class PayloadSplitter;
+class PostDecodeVad;
+class PreemptiveExpand;
+class RandomVector;
+class SyncBuffer;
+class TimestampScaler;
+struct AccelerateFactory;
+struct DtmfEvent;
+struct ExpandFactory;
+struct PreemptiveExpandFactory;
+
+class NetEqImpl : public webrtc::NetEq {
+ public:
+ // Creates a new NetEqImpl object. The object will assume ownership of all
+ // injected dependencies, and will delete them when done.
+ NetEqImpl(int fs,
+ BufferLevelFilter* buffer_level_filter,
+ DecoderDatabase* decoder_database,
+ DelayManager* delay_manager,
+ DelayPeakDetector* delay_peak_detector,
+ DtmfBuffer* dtmf_buffer,
+ DtmfToneGenerator* dtmf_tone_generator,
+ PacketBuffer* packet_buffer,
+ PayloadSplitter* payload_splitter,
+ TimestampScaler* timestamp_scaler,
+ AccelerateFactory* accelerate_factory,
+ ExpandFactory* expand_factory,
+ PreemptiveExpandFactory* preemptive_expand_factory,
+ bool create_components = true);
+
+ virtual ~NetEqImpl();
+
+ // Inserts a new packet into NetEq. The |receive_timestamp| is an indication
+ // of the time when the packet was received, and should be measured with
+ // the same tick rate as the RTP timestamp of the current payload.
+ // Returns 0 on success, -1 on failure.
+ virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
+ const uint8_t* payload,
+ int length_bytes,
+ uint32_t receive_timestamp);
+
+ // Inserts a sync-packet into packet queue. Sync-packets are decoded to
+ // silence and are intended to keep AV-sync intact in an event of long packet
+ // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq
+ // might insert sync-packet when they observe that buffer level of NetEq is
+ // decreasing below a certain threshold, defined by the application.
+ // Sync-packets should have the same payload type as the last audio payload
+ // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change
+ // can be implied by inserting a sync-packet.
+ // Returns kOk on success, kFail on failure.
+ virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
+ uint32_t receive_timestamp);
+
+ // Instructs NetEq to deliver 10 ms of audio data. The data is written to
+ // |output_audio|, which can hold (at least) |max_length| elements.
+ // The number of channels that were written to the output is provided in
+ // the output variable |num_channels|, and each channel contains
+ // |samples_per_channel| elements. If more than one channel is written,
+ // the samples are interleaved.
+ // The speech type is written to |type|, if |type| is not NULL.
+ // Returns kOK on success, or kFail in case of an error.
+ virtual int GetAudio(size_t max_length, int16_t* output_audio,
+ int* samples_per_channel, int* num_channels,
+ NetEqOutputType* type);
+
+ // Associates |rtp_payload_type| with |codec| and stores the information in
+ // the codec database. Returns kOK on success, kFail on failure.
+ virtual int RegisterPayloadType(enum NetEqDecoder codec,
+ uint8_t rtp_payload_type);
+
+ // Provides an externally created decoder object |decoder| to insert in the
+ // decoder database. The decoder implements a decoder of type |codec| and
+ // associates it with |rtp_payload_type|. Returns kOK on success, kFail on
+ // failure.
+ virtual int RegisterExternalDecoder(AudioDecoder* decoder,
+ enum NetEqDecoder codec,
+ uint8_t rtp_payload_type);
+
+ // Removes |rtp_payload_type| from the codec database. Returns 0 on success,
+ // -1 on failure.
+ virtual int RemovePayloadType(uint8_t rtp_payload_type);
+
+ virtual bool SetMinimumDelay(int delay_ms);
+
+ virtual bool SetMaximumDelay(int delay_ms);
+
+ virtual int LeastRequiredDelayMs() const;
+
+ virtual int SetTargetDelay() { return kNotImplemented; }
+
+ virtual int TargetDelay() { return kNotImplemented; }
+
+ virtual int CurrentDelay() { return kNotImplemented; }
+
+ // Sets the playout mode to |mode|.
+ virtual void SetPlayoutMode(NetEqPlayoutMode mode);
+
+ // Returns the current playout mode.
+ virtual NetEqPlayoutMode PlayoutMode() const;
+
+ // Writes the current network statistics to |stats|. The statistics are reset
+ // after the call.
+ virtual int NetworkStatistics(NetEqNetworkStatistics* stats);
+
+ // Writes the last packet waiting times (in ms) to |waiting_times|. The number
+ // of values written is no more than 100, but may be smaller if the interface
+ // is polled again before 100 packets has arrived.
+ virtual void WaitingTimes(std::vector<int>* waiting_times);
+
+ // Writes the current RTCP statistics to |stats|. The statistics are reset
+ // and a new report period is started with the call.
+ virtual void GetRtcpStatistics(RtcpStatistics* stats);
+
+ // Same as RtcpStatistics(), but does not reset anything.
+ virtual void GetRtcpStatisticsNoReset(RtcpStatistics* stats);
+
+ // Enables post-decode VAD. When enabled, GetAudio() will return
+ // kOutputVADPassive when the signal contains no speech.
+ virtual void EnableVad();
+
+ // Disables post-decode VAD.
+ virtual void DisableVad();
+
+ virtual bool GetPlayoutTimestamp(uint32_t* timestamp);
+
+ virtual int SetTargetNumberOfChannels() { return kNotImplemented; }
+
+ virtual int SetTargetSampleRate() { return kNotImplemented; }
+
+ // Returns the error code for the last occurred error. If no error has
+ // occurred, 0 is returned.
+ virtual int LastError();
+
+ // Returns the error code last returned by a decoder (audio or comfort noise).
+ // When LastError() returns kDecoderErrorCode or kComfortNoiseErrorCode, check
+ // this method to get the decoder's error code.
+ virtual int LastDecoderError();
+
+ // Flushes both the packet buffer and the sync buffer.
+ virtual void FlushBuffers();
+
+ virtual void PacketBufferStatistics(int* current_num_packets,
+ int* max_num_packets) const;
+
+ // Get sequence number and timestamp of the latest RTP.
+ // This method is to facilitate NACK.
+ virtual int DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const;
+
+ // Sets background noise mode.
+ virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode);
+
+ // Gets background noise mode.
+ virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const;
+
+ // This accessor method is only intended for testing purposes.
+ virtual const SyncBuffer* sync_buffer_for_test() const;
+
+ protected:
+ static const int kOutputSizeMs = 10;
+ static const int kMaxFrameSize = 2880; // 60 ms @ 48 kHz.
+ // TODO(hlundin): Provide a better value for kSyncBufferSize.
+ static const int kSyncBufferSize = 2 * kMaxFrameSize;
+
+ // Inserts a new packet into NetEq. This is used by the InsertPacket method
+ // above. Returns 0 on success, otherwise an error code.
+ // TODO(hlundin): Merge this with InsertPacket above?
+ int InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
+ const uint8_t* payload,
+ int length_bytes,
+ uint32_t receive_timestamp,
+ bool is_sync_packet)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Delivers 10 ms of audio data. The data is written to |output|, which can
+ // hold (at least) |max_length| elements. The number of channels that were
+ // written to the output is provided in the output variable |num_channels|,
+ // and each channel contains |samples_per_channel| elements. If more than one
+ // channel is written, the samples are interleaved.
+ // Returns 0 on success, otherwise an error code.
+ int GetAudioInternal(size_t max_length,
+ int16_t* output,
+ int* samples_per_channel,
+ int* num_channels) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Provides a decision to the GetAudioInternal method. The decision what to
+ // do is written to |operation|. Packets to decode are written to
+ // |packet_list|, and a DTMF event to play is written to |dtmf_event|. When
+ // DTMF should be played, |play_dtmf| is set to true by the method.
+ // Returns 0 on success, otherwise an error code.
+ int GetDecision(Operations* operation,
+ PacketList* packet_list,
+ DtmfEvent* dtmf_event,
+ bool* play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Decodes the speech packets in |packet_list|, and writes the results to
+ // |decoded_buffer|, which is allocated to hold |decoded_buffer_length|
+ // elements. The length of the decoded data is written to |decoded_length|.
+ // The speech type -- speech or (codec-internal) comfort noise -- is written
+ // to |speech_type|. If |packet_list| contains any SID frames for RFC 3389
+ // comfort noise, those are not decoded.
+ int Decode(PacketList* packet_list,
+ Operations* operation,
+ int* decoded_length,
+ AudioDecoder::SpeechType* speech_type)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method to Decode(). Performs the actual decoding.
+ int DecodeLoop(PacketList* packet_list,
+ Operations* operation,
+ AudioDecoder* decoder,
+ int* decoded_length,
+ AudioDecoder::SpeechType* speech_type)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the Normal class to perform the normal operation.
+ void DoNormal(const int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the Merge class to perform the merge operation.
+ void DoMerge(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the Expand class to perform the expand operation.
+ int DoExpand(bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the Accelerate class to perform the accelerate
+ // operation.
+ int DoAccelerate(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the PreemptiveExpand class to perform the
+ // preemtive expand operation.
+ int DoPreemptiveExpand(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Sub-method which calls the ComfortNoise class to generate RFC 3389 comfort
+ // noise. |packet_list| can either contain one SID frame to update the
+ // noise parameters, or no payload at all, in which case the previously
+ // received parameters are used.
+ int DoRfc3389Cng(PacketList* packet_list, bool play_dtmf)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Calls the audio decoder to generate codec-internal comfort noise when
+ // no packet was received.
+ void DoCodecInternalCng() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Calls the DtmfToneGenerator class to generate DTMF tones.
+ int DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Produces packet-loss concealment using alternative methods. If the codec
+ // has an internal PLC, it is called to generate samples. Otherwise, the
+ // method performs zero-stuffing.
+ void DoAlternativePlc(bool increase_timestamp)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Overdub DTMF on top of |output|.
+ int DtmfOverdub(const DtmfEvent& dtmf_event,
+ size_t num_channels,
+ int16_t* output) const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Extracts packets from |packet_buffer_| to produce at least
+ // |required_samples| samples. The packets are inserted into |packet_list|.
+ // Returns the number of samples that the packets in the list will produce, or
+ // -1 in case of an error.
+ int ExtractPackets(int required_samples, PacketList* packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Resets various variables and objects to new values based on the sample rate
+ // |fs_hz| and |channels| number audio channels.
+ void SetSampleRateAndChannels(int fs_hz, size_t channels)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Returns the output type for the audio produced by the latest call to
+ // GetAudio().
+ NetEqOutputType LastOutputType() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Updates Expand and Merge.
+ virtual void UpdatePlcComponents(int fs_hz, size_t channels)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Creates DecisionLogic object for the given mode.
+ virtual void CreateDecisionLogic(NetEqPlayoutMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ const scoped_ptr<BufferLevelFilter> buffer_level_filter_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DecoderDatabase> decoder_database_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DelayManager> delay_manager_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DelayPeakDetector> delay_peak_detector_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DtmfBuffer> dtmf_buffer_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DtmfToneGenerator> dtmf_tone_generator_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PacketBuffer> packet_buffer_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PayloadSplitter> payload_splitter_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<TimestampScaler> timestamp_scaler_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PostDecodeVad> vad_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<ExpandFactory> expand_factory_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<AccelerateFactory> accelerate_factory_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PreemptiveExpandFactory> preemptive_expand_factory_
+ GUARDED_BY(crit_sect_);
+
+ scoped_ptr<BackgroundNoise> background_noise_ GUARDED_BY(crit_sect_);
+ scoped_ptr<DecisionLogic> decision_logic_ GUARDED_BY(crit_sect_);
+ scoped_ptr<AudioMultiVector> algorithm_buffer_ GUARDED_BY(crit_sect_);
+ scoped_ptr<SyncBuffer> sync_buffer_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Expand> expand_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Normal> normal_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Merge> merge_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Accelerate> accelerate_ GUARDED_BY(crit_sect_);
+ scoped_ptr<PreemptiveExpand> preemptive_expand_ GUARDED_BY(crit_sect_);
+ RandomVector random_vector_ GUARDED_BY(crit_sect_);
+ scoped_ptr<ComfortNoise> comfort_noise_ GUARDED_BY(crit_sect_);
+ Rtcp rtcp_ GUARDED_BY(crit_sect_);
+ StatisticsCalculator stats_ GUARDED_BY(crit_sect_);
+ int fs_hz_ GUARDED_BY(crit_sect_);
+ int fs_mult_ GUARDED_BY(crit_sect_);
+ int output_size_samples_ GUARDED_BY(crit_sect_);
+ int decoder_frame_length_ GUARDED_BY(crit_sect_);
+ Modes last_mode_ GUARDED_BY(crit_sect_);
+ scoped_ptr<int16_t[]> mute_factor_array_ GUARDED_BY(crit_sect_);
+ size_t decoded_buffer_length_ GUARDED_BY(crit_sect_);
+ scoped_ptr<int16_t[]> decoded_buffer_ GUARDED_BY(crit_sect_);
+ uint32_t playout_timestamp_ GUARDED_BY(crit_sect_);
+ bool new_codec_ GUARDED_BY(crit_sect_);
+ uint32_t timestamp_ GUARDED_BY(crit_sect_);
+ bool reset_decoder_ GUARDED_BY(crit_sect_);
+ uint8_t current_rtp_payload_type_ GUARDED_BY(crit_sect_);
+ uint8_t current_cng_rtp_payload_type_ GUARDED_BY(crit_sect_);
+ uint32_t ssrc_ GUARDED_BY(crit_sect_);
+ bool first_packet_ GUARDED_BY(crit_sect_);
+ int error_code_ GUARDED_BY(crit_sect_); // Store last error code.
+ int decoder_error_code_ GUARDED_BY(crit_sect_);
+
+ // These values are used by NACK module to estimate time-to-play of
+ // a missing packet. Occasionally, NetEq might decide to decode more
+ // than one packet. Therefore, these values store sequence number and
+ // timestamp of the first packet pulled from the packet buffer. In
+ // such cases, these values do not exactly represent the sequence number
+ // or timestamp associated with a 10ms audio pulled from NetEq. NACK
+ // module is designed to compensate for this.
+ int decoded_packet_sequence_number_ GUARDED_BY(crit_sect_);
+ uint32_t decoded_packet_timestamp_ GUARDED_BY(crit_sect_);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
new file mode 100644
index 00000000000..2e66487fae5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -0,0 +1,498 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+using ::testing::Return;
+using ::testing::ReturnNull;
+using ::testing::_;
+using ::testing::SetArgPointee;
+using ::testing::InSequence;
+using ::testing::Invoke;
+using ::testing::WithArg;
+
+namespace webrtc {
+
+// This function is called when inserting a packet list into the mock packet
+// buffer. The purpose is to delete all inserted packets properly, to avoid
+// memory leaks in the test.
+int DeletePacketsAndReturnOk(PacketList* packet_list) {
+ PacketBuffer::DeleteAllPackets(packet_list);
+ return PacketBuffer::kOK;
+}
+
+class NetEqImplTest : public ::testing::Test {
+ protected:
+ NetEqImplTest()
+ : neteq_(NULL),
+ config_(),
+ mock_buffer_level_filter_(NULL),
+ buffer_level_filter_(NULL),
+ use_mock_buffer_level_filter_(true),
+ mock_decoder_database_(NULL),
+ decoder_database_(NULL),
+ use_mock_decoder_database_(true),
+ mock_delay_peak_detector_(NULL),
+ delay_peak_detector_(NULL),
+ use_mock_delay_peak_detector_(true),
+ mock_delay_manager_(NULL),
+ delay_manager_(NULL),
+ use_mock_delay_manager_(true),
+ mock_dtmf_buffer_(NULL),
+ dtmf_buffer_(NULL),
+ use_mock_dtmf_buffer_(true),
+ mock_dtmf_tone_generator_(NULL),
+ dtmf_tone_generator_(NULL),
+ use_mock_dtmf_tone_generator_(true),
+ mock_packet_buffer_(NULL),
+ packet_buffer_(NULL),
+ use_mock_packet_buffer_(true),
+ mock_payload_splitter_(NULL),
+ payload_splitter_(NULL),
+ use_mock_payload_splitter_(true),
+ timestamp_scaler_(NULL) {
+ config_.sample_rate_hz = 8000;
+ }
+
+ void CreateInstance() {
+ if (use_mock_buffer_level_filter_) {
+ mock_buffer_level_filter_ = new MockBufferLevelFilter;
+ buffer_level_filter_ = mock_buffer_level_filter_;
+ } else {
+ buffer_level_filter_ = new BufferLevelFilter;
+ }
+ if (use_mock_decoder_database_) {
+ mock_decoder_database_ = new MockDecoderDatabase;
+ EXPECT_CALL(*mock_decoder_database_, GetActiveCngDecoder())
+ .WillOnce(ReturnNull());
+ decoder_database_ = mock_decoder_database_;
+ } else {
+ decoder_database_ = new DecoderDatabase;
+ }
+ if (use_mock_delay_peak_detector_) {
+ mock_delay_peak_detector_ = new MockDelayPeakDetector;
+ EXPECT_CALL(*mock_delay_peak_detector_, Reset()).Times(1);
+ delay_peak_detector_ = mock_delay_peak_detector_;
+ } else {
+ delay_peak_detector_ = new DelayPeakDetector;
+ }
+ if (use_mock_delay_manager_) {
+ mock_delay_manager_ = new MockDelayManager(config_.max_packets_in_buffer,
+ delay_peak_detector_);
+ EXPECT_CALL(*mock_delay_manager_, set_streaming_mode(false)).Times(1);
+ delay_manager_ = mock_delay_manager_;
+ } else {
+ delay_manager_ =
+ new DelayManager(config_.max_packets_in_buffer, delay_peak_detector_);
+ }
+ if (use_mock_dtmf_buffer_) {
+ mock_dtmf_buffer_ = new MockDtmfBuffer(config_.sample_rate_hz);
+ dtmf_buffer_ = mock_dtmf_buffer_;
+ } else {
+ dtmf_buffer_ = new DtmfBuffer(config_.sample_rate_hz);
+ }
+ if (use_mock_dtmf_tone_generator_) {
+ mock_dtmf_tone_generator_ = new MockDtmfToneGenerator;
+ dtmf_tone_generator_ = mock_dtmf_tone_generator_;
+ } else {
+ dtmf_tone_generator_ = new DtmfToneGenerator;
+ }
+ if (use_mock_packet_buffer_) {
+ mock_packet_buffer_ = new MockPacketBuffer(config_.max_packets_in_buffer);
+ packet_buffer_ = mock_packet_buffer_;
+ } else {
+ packet_buffer_ = new PacketBuffer(config_.max_packets_in_buffer);
+ }
+ if (use_mock_payload_splitter_) {
+ mock_payload_splitter_ = new MockPayloadSplitter;
+ payload_splitter_ = mock_payload_splitter_;
+ } else {
+ payload_splitter_ = new PayloadSplitter;
+ }
+ timestamp_scaler_ = new TimestampScaler(*decoder_database_);
+ AccelerateFactory* accelerate_factory = new AccelerateFactory;
+ ExpandFactory* expand_factory = new ExpandFactory;
+ PreemptiveExpandFactory* preemptive_expand_factory =
+ new PreemptiveExpandFactory;
+
+ neteq_ = new NetEqImpl(config_.sample_rate_hz,
+ buffer_level_filter_,
+ decoder_database_,
+ delay_manager_,
+ delay_peak_detector_,
+ dtmf_buffer_,
+ dtmf_tone_generator_,
+ packet_buffer_,
+ payload_splitter_,
+ timestamp_scaler_,
+ accelerate_factory,
+ expand_factory,
+ preemptive_expand_factory);
+ ASSERT_TRUE(neteq_ != NULL);
+ }
+
+ void UseNoMocks() {
+ ASSERT_TRUE(neteq_ == NULL) << "Must call UseNoMocks before CreateInstance";
+ use_mock_buffer_level_filter_ = false;
+ use_mock_decoder_database_ = false;
+ use_mock_delay_peak_detector_ = false;
+ use_mock_delay_manager_ = false;
+ use_mock_dtmf_buffer_ = false;
+ use_mock_dtmf_tone_generator_ = false;
+ use_mock_packet_buffer_ = false;
+ use_mock_payload_splitter_ = false;
+ }
+
+ virtual ~NetEqImplTest() {
+ if (use_mock_buffer_level_filter_) {
+ EXPECT_CALL(*mock_buffer_level_filter_, Die()).Times(1);
+ }
+ if (use_mock_decoder_database_) {
+ EXPECT_CALL(*mock_decoder_database_, Die()).Times(1);
+ }
+ if (use_mock_delay_manager_) {
+ EXPECT_CALL(*mock_delay_manager_, Die()).Times(1);
+ }
+ if (use_mock_delay_peak_detector_) {
+ EXPECT_CALL(*mock_delay_peak_detector_, Die()).Times(1);
+ }
+ if (use_mock_dtmf_buffer_) {
+ EXPECT_CALL(*mock_dtmf_buffer_, Die()).Times(1);
+ }
+ if (use_mock_dtmf_tone_generator_) {
+ EXPECT_CALL(*mock_dtmf_tone_generator_, Die()).Times(1);
+ }
+ if (use_mock_packet_buffer_) {
+ EXPECT_CALL(*mock_packet_buffer_, Die()).Times(1);
+ }
+ delete neteq_;
+ }
+
+ NetEqImpl* neteq_;
+ NetEq::Config config_;
+ MockBufferLevelFilter* mock_buffer_level_filter_;
+ BufferLevelFilter* buffer_level_filter_;
+ bool use_mock_buffer_level_filter_;
+ MockDecoderDatabase* mock_decoder_database_;
+ DecoderDatabase* decoder_database_;
+ bool use_mock_decoder_database_;
+ MockDelayPeakDetector* mock_delay_peak_detector_;
+ DelayPeakDetector* delay_peak_detector_;
+ bool use_mock_delay_peak_detector_;
+ MockDelayManager* mock_delay_manager_;
+ DelayManager* delay_manager_;
+ bool use_mock_delay_manager_;
+ MockDtmfBuffer* mock_dtmf_buffer_;
+ DtmfBuffer* dtmf_buffer_;
+ bool use_mock_dtmf_buffer_;
+ MockDtmfToneGenerator* mock_dtmf_tone_generator_;
+ DtmfToneGenerator* dtmf_tone_generator_;
+ bool use_mock_dtmf_tone_generator_;
+ MockPacketBuffer* mock_packet_buffer_;
+ PacketBuffer* packet_buffer_;
+ bool use_mock_packet_buffer_;
+ MockPayloadSplitter* mock_payload_splitter_;
+ PayloadSplitter* payload_splitter_;
+ bool use_mock_payload_splitter_;
+ TimestampScaler* timestamp_scaler_;
+};
+
+
+// This tests the interface class NetEq.
+// TODO(hlundin): Move to separate file?
+TEST(NetEq, CreateAndDestroy) {
+ NetEq::Config config;
+ NetEq* neteq = NetEq::Create(config);
+ delete neteq;
+}
+
+TEST_F(NetEqImplTest, RegisterPayloadType) {
+ CreateInstance();
+ uint8_t rtp_payload_type = 0;
+ NetEqDecoder codec_type = kDecoderPCMu;
+ EXPECT_CALL(*mock_decoder_database_,
+ RegisterPayload(rtp_payload_type, codec_type));
+ neteq_->RegisterPayloadType(codec_type, rtp_payload_type);
+}
+
+TEST_F(NetEqImplTest, RemovePayloadType) {
+ CreateInstance();
+ uint8_t rtp_payload_type = 0;
+ EXPECT_CALL(*mock_decoder_database_, Remove(rtp_payload_type))
+ .WillOnce(Return(DecoderDatabase::kDecoderNotFound));
+ // Check that kFail is returned when database returns kDecoderNotFound.
+ EXPECT_EQ(NetEq::kFail, neteq_->RemovePayloadType(rtp_payload_type));
+}
+
+TEST_F(NetEqImplTest, InsertPacket) {
+ CreateInstance();
+ const int kPayloadLength = 100;
+ const uint8_t kPayloadType = 0;
+ const uint16_t kFirstSequenceNumber = 0x1234;
+ const uint32_t kFirstTimestamp = 0x12345678;
+ const uint32_t kSsrc = 0x87654321;
+ const uint32_t kFirstReceiveTime = 17;
+ uint8_t payload[kPayloadLength] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = kFirstSequenceNumber;
+ rtp_header.header.timestamp = kFirstTimestamp;
+ rtp_header.header.ssrc = kSsrc;
+
+ // Create a mock decoder object.
+ MockAudioDecoder mock_decoder;
+ // BWE update function called with first packet.
+ EXPECT_CALL(mock_decoder, IncomingPacket(_,
+ kPayloadLength,
+ kFirstSequenceNumber,
+ kFirstTimestamp,
+ kFirstReceiveTime));
+ // BWE update function called with second packet.
+ EXPECT_CALL(mock_decoder, IncomingPacket(_,
+ kPayloadLength,
+ kFirstSequenceNumber + 1,
+ kFirstTimestamp + 160,
+ kFirstReceiveTime + 155));
+ EXPECT_CALL(mock_decoder, Die()).Times(1); // Called when deleted.
+
+ // Expectations for decoder database.
+ EXPECT_CALL(*mock_decoder_database_, IsRed(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not RED.
+ EXPECT_CALL(*mock_decoder_database_, CheckPayloadTypes(_))
+ .Times(2)
+ .WillRepeatedly(Return(DecoderDatabase::kOK)); // Payload type is valid.
+ EXPECT_CALL(*mock_decoder_database_, IsDtmf(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not DTMF.
+ EXPECT_CALL(*mock_decoder_database_, GetDecoder(kPayloadType))
+ .Times(3)
+ .WillRepeatedly(Return(&mock_decoder));
+ EXPECT_CALL(*mock_decoder_database_, IsComfortNoise(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not CNG.
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderPCMu;
+ EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ // Expectations for packet buffer.
+ EXPECT_CALL(*mock_packet_buffer_, NumPacketsInBuffer())
+ .WillOnce(Return(0)) // First packet.
+ .WillOnce(Return(1)) // Second packet.
+ .WillOnce(Return(2)); // Second packet, checking after it was inserted.
+ EXPECT_CALL(*mock_packet_buffer_, Empty())
+ .WillOnce(Return(false)); // Called once after first packet is inserted.
+ EXPECT_CALL(*mock_packet_buffer_, Flush())
+ .Times(1);
+ EXPECT_CALL(*mock_packet_buffer_, InsertPacketList(_, _, _, _))
+ .Times(2)
+ .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType),
+ WithArg<0>(Invoke(DeletePacketsAndReturnOk))));
+ // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based
+ // index) is a pointer, and the variable pointed to is set to kPayloadType.
+ // Also invoke the function DeletePacketsAndReturnOk to properly delete all
+ // packets in the list (to avoid memory leaks in the test).
+ EXPECT_CALL(*mock_packet_buffer_, NextRtpHeader())
+ .Times(1)
+ .WillOnce(Return(&rtp_header.header));
+
+ // Expectations for DTMF buffer.
+ EXPECT_CALL(*mock_dtmf_buffer_, Flush())
+ .Times(1);
+
+ // Expectations for delay manager.
+ {
+ // All expectations within this block must be called in this specific order.
+ InSequence sequence; // Dummy variable.
+ // Expectations when the first packet is inserted.
+ EXPECT_CALL(*mock_delay_manager_, LastDecoderType(kDecoderPCMu))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, last_pack_cng_or_dtmf())
+ .Times(2)
+ .WillRepeatedly(Return(-1));
+ EXPECT_CALL(*mock_delay_manager_, set_last_pack_cng_or_dtmf(0))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, ResetPacketIatCount()).Times(1);
+ // Expectations when the second packet is inserted. Slightly different.
+ EXPECT_CALL(*mock_delay_manager_, LastDecoderType(kDecoderPCMu))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, last_pack_cng_or_dtmf())
+ .WillOnce(Return(0));
+ EXPECT_CALL(*mock_delay_manager_, SetPacketAudioLength(30))
+ .WillOnce(Return(0));
+ }
+
+ // Expectations for payload splitter.
+ EXPECT_CALL(*mock_payload_splitter_, SplitAudio(_, _))
+ .Times(2)
+ .WillRepeatedly(Return(PayloadSplitter::kOK));
+
+ // Insert first packet.
+ neteq_->InsertPacket(rtp_header, payload, kPayloadLength, kFirstReceiveTime);
+
+ // Insert second packet.
+ rtp_header.header.timestamp += 160;
+ rtp_header.header.sequenceNumber += 1;
+ neteq_->InsertPacket(rtp_header, payload, kPayloadLength,
+ kFirstReceiveTime + 155);
+}
+
+TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) {
+ UseNoMocks();
+ CreateInstance();
+
+ const int kPayloadLengthSamples = 80;
+ const size_t kPayloadLengthBytes = 2 * kPayloadLengthSamples; // PCM 16-bit.
+ const uint8_t kPayloadType = 17; // Just an arbitrary number.
+ const uint32_t kReceiveTime = 17; // Value doesn't matter for this test.
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = 0x1234;
+ rtp_header.header.timestamp = 0x12345678;
+ rtp_header.header.ssrc = 0x87654321;
+
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->RegisterPayloadType(kDecoderPCM16B, kPayloadType));
+
+ // Insert packets. The buffer should not flush.
+ for (int i = 1; i <= config_.max_packets_in_buffer; ++i) {
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ rtp_header.header.timestamp += kPayloadLengthSamples;
+ rtp_header.header.sequenceNumber += 1;
+ EXPECT_EQ(i, packet_buffer_->NumPacketsInBuffer());
+ }
+
+ // Insert one more packet and make sure the buffer got flushed. That is, it
+ // should only hold one single packet.
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ EXPECT_EQ(1, packet_buffer_->NumPacketsInBuffer());
+ const RTPHeader* test_header = packet_buffer_->NextRtpHeader();
+ EXPECT_EQ(rtp_header.header.timestamp, test_header->timestamp);
+ EXPECT_EQ(rtp_header.header.sequenceNumber, test_header->sequenceNumber);
+}
+
+// This test verifies that timestamps propagate from the incoming packets
+// through to the sync buffer and to the playout timestamp.
+TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
+ UseNoMocks();
+ CreateInstance();
+
+ const uint8_t kPayloadType = 17; // Just an arbitrary number.
+ const uint32_t kReceiveTime = 17; // Value doesn't matter for this test.
+ const int kSampleRateHz = 8000;
+ const int kPayloadLengthSamples = 10 * kSampleRateHz / 1000; // 10 ms.
+ const size_t kPayloadLengthBytes = kPayloadLengthSamples;
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = 0x1234;
+ rtp_header.header.timestamp = 0x12345678;
+ rtp_header.header.ssrc = 0x87654321;
+
+ // This is a dummy decoder that produces as many output samples as the input
+ // has bytes. The output is an increasing series, starting at 1 for the first
+ // sample, and then increasing by 1 for each sample.
+ class CountingSamplesDecoder : public AudioDecoder {
+ public:
+ explicit CountingSamplesDecoder(enum NetEqDecoder type)
+ : AudioDecoder(type), next_value_(1) {}
+
+ // Produce as many samples as input bytes (|encoded_len|).
+ virtual int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ for (size_t i = 0; i < encoded_len; ++i) {
+ decoded[i] = next_value_++;
+ }
+ *speech_type = kSpeech;
+ return encoded_len;
+ }
+
+ virtual int Init() {
+ next_value_ = 1;
+ return 0;
+ }
+
+ uint16_t next_value() const { return next_value_; }
+
+ private:
+ int16_t next_value_;
+ } decoder_(kDecoderPCM16B);
+
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->RegisterExternalDecoder(
+ &decoder_, kDecoderPCM16B, kPayloadType));
+
+ // Insert one packet.
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+
+ // Pull audio once.
+ const int kMaxOutputSize = 10 * kSampleRateHz / 1000;
+ int16_t output[kMaxOutputSize];
+ int samples_per_channel;
+ int num_channels;
+ NetEqOutputType type;
+ EXPECT_EQ(
+ NetEq::kOK,
+ neteq_->GetAudio(
+ kMaxOutputSize, output, &samples_per_channel, &num_channels, &type));
+ ASSERT_EQ(kMaxOutputSize, samples_per_channel);
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(kOutputNormal, type);
+
+ // Start with a simple check that the fake decoder is behaving as expected.
+ EXPECT_EQ(kPayloadLengthSamples, decoder_.next_value() - 1);
+
+ // The value of the last of the output samples is the same as the number of
+ // samples played from the decoded packet. Thus, this number + the RTP
+ // timestamp should match the playout timestamp.
+ uint32_t timestamp = 0;
+ EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
+ EXPECT_EQ(rtp_header.header.timestamp + output[samples_per_channel - 1],
+ timestamp);
+
+ // Check the timestamp for the last value in the sync buffer. This should
+ // be one full frame length ahead of the RTP timestamp.
+ const SyncBuffer* sync_buffer = neteq_->sync_buffer_for_test();
+ ASSERT_TRUE(sync_buffer != NULL);
+ EXPECT_EQ(rtp_header.header.timestamp + kPayloadLengthSamples,
+ sync_buffer->end_timestamp());
+
+ // Check that the number of samples still to play from the sync buffer add
+ // up with what was already played out.
+ EXPECT_EQ(kPayloadLengthSamples - output[samples_per_channel - 1],
+ static_cast<int>(sync_buffer->FutureLength()));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h
deleted file mode 100644
index bba5b06b964..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Definitions of statistics data structures for MCU and DSP sides.
- */
-
-#include "typedefs.h"
-
-#ifndef NETEQ_STATISTICS_H
-#define NETEQ_STATISTICS_H
-
-/*
- * Statistics struct on DSP side
- */
-typedef struct
-{
-
- /* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
- uint32_t expandLength; /* number of samples produced through expand */
- uint32_t preemptiveLength; /* number of samples produced through pre-emptive
- expand */
- uint32_t accelerateLength; /* number of samples removed through accelerate */
- int addedSamples; /* number of samples inserted in off mode */
-
- /* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
- uint32_t expandedVoiceSamples; /* number of voice samples produced through expand */
- uint32_t expandedNoiseSamples; /* number of noise (background) samples produced
- through expand */
-
-} DSPStats_t;
-
-typedef struct {
- int preemptive_expand_bgn_samples;
- int preemptive_expand_normal_samples;
-
- int expand_bgn_samples;
- int expand_normal_samples;
-
- int merge_expand_bgn_samples;
- int merge_expand_normal_samples;
-
- int accelerate_bgn_samples;
- int accelarate_normal_samples;
-} ActivityStats;
-
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
new file mode 100644
index 00000000000..3c695c81d04
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
@@ -0,0 +1,421 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Test to verify correct stereo and multi-channel operation.
+
+#include <algorithm>
+#include <string>
+#include <list>
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+struct TestParameters {
+ int frame_size;
+ int sample_rate;
+ int num_channels;
+};
+
+// This is a parameterized test. The test parameters are supplied through a
+// TestParameters struct, which is obtained through the GetParam() method.
+//
+// The objective of the test is to create a mono input signal and a
+// multi-channel input signal, where each channel is identical to the mono
+// input channel. The two input signals are processed through their respective
+// NetEq instances. After that, the output signals are compared. The expected
+// result is that each channel in the multi-channel output is identical to the
+// mono output.
+class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
+ protected:
+ static const int kTimeStepMs = 10;
+ static const int kMaxBlockSize = 480; // 10 ms @ 48 kHz.
+ static const uint8_t kPayloadTypeMono = 95;
+ static const uint8_t kPayloadTypeMulti = 96;
+
+ NetEqStereoTest()
+ : num_channels_(GetParam().num_channels),
+ sample_rate_hz_(GetParam().sample_rate),
+ samples_per_ms_(sample_rate_hz_ / 1000),
+ frame_size_ms_(GetParam().frame_size),
+ frame_size_samples_(frame_size_ms_ * samples_per_ms_),
+ output_size_samples_(10 * samples_per_ms_),
+ rtp_generator_mono_(samples_per_ms_),
+ rtp_generator_(samples_per_ms_),
+ payload_size_bytes_(0),
+ multi_payload_size_bytes_(0),
+ last_send_time_(0),
+ last_arrival_time_(0) {
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz_;
+ neteq_mono_ = NetEq::Create(config);
+ neteq_ = NetEq::Create(config);
+ input_ = new int16_t[frame_size_samples_];
+ encoded_ = new uint8_t[2 * frame_size_samples_];
+ input_multi_channel_ = new int16_t[frame_size_samples_ * num_channels_];
+ encoded_multi_channel_ = new uint8_t[frame_size_samples_ * 2 *
+ num_channels_];
+ output_multi_channel_ = new int16_t[kMaxBlockSize * num_channels_];
+ }
+
+ ~NetEqStereoTest() {
+ delete neteq_mono_;
+ delete neteq_;
+ delete [] input_;
+ delete [] encoded_;
+ delete [] input_multi_channel_;
+ delete [] encoded_multi_channel_;
+ delete [] output_multi_channel_;
+ }
+
+ virtual void SetUp() {
+ const std::string file_name =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ input_file_.reset(new test::InputAudioFile(file_name));
+ NetEqDecoder mono_decoder;
+ NetEqDecoder multi_decoder;
+ switch (sample_rate_hz_) {
+ case 8000:
+ mono_decoder = kDecoderPCM16B;
+ if (num_channels_ == 2) {
+ multi_decoder = kDecoderPCM16B_2ch;
+ } else if (num_channels_ == 5) {
+ multi_decoder = kDecoderPCM16B_5ch;
+ } else {
+ FAIL() << "Only 2 and 5 channels supported for 8000 Hz.";
+ }
+ break;
+ case 16000:
+ mono_decoder = kDecoderPCM16Bwb;
+ if (num_channels_ == 2) {
+ multi_decoder = kDecoderPCM16Bwb_2ch;
+ } else {
+ FAIL() << "More than 2 channels is not supported for 16000 Hz.";
+ }
+ break;
+ case 32000:
+ mono_decoder = kDecoderPCM16Bswb32kHz;
+ if (num_channels_ == 2) {
+ multi_decoder = kDecoderPCM16Bswb32kHz_2ch;
+ } else {
+ FAIL() << "More than 2 channels is not supported for 32000 Hz.";
+ }
+ break;
+ case 48000:
+ mono_decoder = kDecoderPCM16Bswb48kHz;
+ if (num_channels_ == 2) {
+ multi_decoder = kDecoderPCM16Bswb48kHz_2ch;
+ } else {
+ FAIL() << "More than 2 channels is not supported for 48000 Hz.";
+ }
+ break;
+ default:
+ FAIL() << "We shouldn't get here.";
+ }
+ ASSERT_EQ(NetEq::kOK,
+ neteq_mono_->RegisterPayloadType(mono_decoder,
+ kPayloadTypeMono));
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->RegisterPayloadType(multi_decoder,
+ kPayloadTypeMulti));
+ }
+
+ virtual void TearDown() {}
+
+ int GetNewPackets() {
+ if (!input_file_->Read(frame_size_samples_, input_)) {
+ return -1;
+ }
+ payload_size_bytes_ = WebRtcPcm16b_Encode(input_, frame_size_samples_,
+ encoded_);
+ if (frame_size_samples_ * 2 != payload_size_bytes_) {
+ return -1;
+ }
+ int next_send_time = rtp_generator_mono_.GetRtpHeader(kPayloadTypeMono,
+ frame_size_samples_,
+ &rtp_header_mono_);
+ test::InputAudioFile::DuplicateInterleaved(input_, frame_size_samples_,
+ num_channels_,
+ input_multi_channel_);
+ multi_payload_size_bytes_ = WebRtcPcm16b_Encode(
+ input_multi_channel_, frame_size_samples_ * num_channels_,
+ encoded_multi_channel_);
+ if (frame_size_samples_ * 2 * num_channels_ != multi_payload_size_bytes_) {
+ return -1;
+ }
+ rtp_generator_.GetRtpHeader(kPayloadTypeMulti, frame_size_samples_,
+ &rtp_header_);
+ return next_send_time;
+ }
+
+ void VerifyOutput(size_t num_samples) {
+ for (size_t i = 0; i < num_samples; ++i) {
+ for (int j = 0; j < num_channels_; ++j) {
+ ASSERT_EQ(output_[i], output_multi_channel_[i * num_channels_ + j]) <<
+ "Diff in sample " << i << ", channel " << j << ".";
+ }
+ }
+ }
+
+ virtual int GetArrivalTime(int send_time) {
+ int arrival_time = last_arrival_time_ + (send_time - last_send_time_);
+ last_send_time_ = send_time;
+ last_arrival_time_ = arrival_time;
+ return arrival_time;
+ }
+
+ virtual bool Lost() { return false; }
+
+ void RunTest(int num_loops) {
+ // Get next input packets (mono and multi-channel).
+ int next_send_time;
+ int next_arrival_time;
+ do {
+ next_send_time = GetNewPackets();
+ ASSERT_NE(-1, next_send_time);
+ next_arrival_time = GetArrivalTime(next_send_time);
+ } while (Lost()); // If lost, immediately read the next packet.
+
+ int time_now = 0;
+ for (int k = 0; k < num_loops; ++k) {
+ while (time_now >= next_arrival_time) {
+ // Insert packet in mono instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_mono_->InsertPacket(rtp_header_mono_, encoded_,
+ payload_size_bytes_,
+ next_arrival_time));
+ // Insert packet in multi-channel instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(rtp_header_, encoded_multi_channel_,
+ multi_payload_size_bytes_,
+ next_arrival_time));
+ // Get next input packets (mono and multi-channel).
+ do {
+ next_send_time = GetNewPackets();
+ ASSERT_NE(-1, next_send_time);
+ next_arrival_time = GetArrivalTime(next_send_time);
+ } while (Lost()); // If lost, immediately read the next packet.
+ }
+ NetEqOutputType output_type;
+ // Get audio from mono instance.
+ int samples_per_channel;
+ int num_channels;
+ EXPECT_EQ(NetEq::kOK,
+ neteq_mono_->GetAudio(kMaxBlockSize, output_,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ // Get audio from multi-channel instance.
+ ASSERT_EQ(NetEq::kOK,
+ neteq_->GetAudio(kMaxBlockSize * num_channels_,
+ output_multi_channel_,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ EXPECT_EQ(num_channels_, num_channels);
+ EXPECT_EQ(output_size_samples_, samples_per_channel);
+ std::ostringstream ss;
+ ss << "Lap number " << k << ".";
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ // Compare mono and multi-channel.
+ ASSERT_NO_FATAL_FAILURE(VerifyOutput(output_size_samples_));
+
+ time_now += kTimeStepMs;
+ }
+ }
+
+ const int num_channels_;
+ const int sample_rate_hz_;
+ const int samples_per_ms_;
+ const int frame_size_ms_;
+ const int frame_size_samples_;
+ const int output_size_samples_;
+ NetEq* neteq_mono_;
+ NetEq* neteq_;
+ test::RtpGenerator rtp_generator_mono_;
+ test::RtpGenerator rtp_generator_;
+ int16_t* input_;
+ int16_t* input_multi_channel_;
+ uint8_t* encoded_;
+ uint8_t* encoded_multi_channel_;
+ int16_t output_[kMaxBlockSize];
+ int16_t* output_multi_channel_;
+ WebRtcRTPHeader rtp_header_mono_;
+ WebRtcRTPHeader rtp_header_;
+ int payload_size_bytes_;
+ int multi_payload_size_bytes_;
+ int last_send_time_;
+ int last_arrival_time_;
+ scoped_ptr<test::InputAudioFile> input_file_;
+};
+
+class NetEqStereoTestNoJitter : public NetEqStereoTest {
+ protected:
+ NetEqStereoTestNoJitter()
+ : NetEqStereoTest() {
+ // Start the sender 100 ms before the receiver to pre-fill the buffer.
+ // This is to avoid doing preemptive expand early in the test.
+ // TODO(hlundin): Mock the decision making instead to control the modes.
+ last_arrival_time_ = -100;
+ }
+};
+
+TEST_P(NetEqStereoTestNoJitter, DISABLED_ON_ANDROID(RunTest)) {
+ RunTest(8);
+}
+
+class NetEqStereoTestPositiveDrift : public NetEqStereoTest {
+ protected:
+ NetEqStereoTestPositiveDrift()
+ : NetEqStereoTest(),
+ drift_factor(0.9) {
+ // Start the sender 100 ms before the receiver to pre-fill the buffer.
+ // This is to avoid doing preemptive expand early in the test.
+ // TODO(hlundin): Mock the decision making instead to control the modes.
+ last_arrival_time_ = -100;
+ }
+ virtual int GetArrivalTime(int send_time) {
+ int arrival_time = last_arrival_time_ +
+ drift_factor * (send_time - last_send_time_);
+ last_send_time_ = send_time;
+ last_arrival_time_ = arrival_time;
+ return arrival_time;
+ }
+
+ double drift_factor;
+};
+
+TEST_P(NetEqStereoTestPositiveDrift, DISABLED_ON_ANDROID(RunTest)) {
+ RunTest(100);
+}
+
+class NetEqStereoTestNegativeDrift : public NetEqStereoTestPositiveDrift {
+ protected:
+ NetEqStereoTestNegativeDrift()
+ : NetEqStereoTestPositiveDrift() {
+ drift_factor = 1.1;
+ last_arrival_time_ = 0;
+ }
+};
+
+TEST_P(NetEqStereoTestNegativeDrift, DISABLED_ON_ANDROID(RunTest)) {
+ RunTest(100);
+}
+
+class NetEqStereoTestDelays : public NetEqStereoTest {
+ protected:
+ static const int kDelayInterval = 10;
+ static const int kDelay = 1000;
+ NetEqStereoTestDelays()
+ : NetEqStereoTest(),
+ frame_index_(0) {
+ }
+
+ virtual int GetArrivalTime(int send_time) {
+ // Deliver immediately, unless we have a back-log.
+ int arrival_time = std::min(last_arrival_time_, send_time);
+ if (++frame_index_ % kDelayInterval == 0) {
+ // Delay this packet.
+ arrival_time += kDelay;
+ }
+ last_send_time_ = send_time;
+ last_arrival_time_ = arrival_time;
+ return arrival_time;
+ }
+
+ int frame_index_;
+};
+
+TEST_P(NetEqStereoTestDelays, DISABLED_ON_ANDROID(RunTest)) {
+ RunTest(1000);
+}
+
+class NetEqStereoTestLosses : public NetEqStereoTest {
+ protected:
+ static const int kLossInterval = 10;
+ NetEqStereoTestLosses()
+ : NetEqStereoTest(),
+ frame_index_(0) {
+ }
+
+ virtual bool Lost() {
+ return (++frame_index_) % kLossInterval == 0;
+ }
+
+ int frame_index_;
+};
+
+TEST_P(NetEqStereoTestLosses, DISABLED_ON_ANDROID(RunTest)) {
+ RunTest(100);
+}
+
+
+// Creates a list of parameter sets.
+std::list<TestParameters> GetTestParameters() {
+ std::list<TestParameters> l;
+ const int sample_rates[] = {8000, 16000, 32000};
+ const int num_rates = sizeof(sample_rates) / sizeof(sample_rates[0]);
+ // Loop through sample rates.
+ for (int rate_index = 0; rate_index < num_rates; ++rate_index) {
+ int sample_rate = sample_rates[rate_index];
+ // Loop through all frame sizes between 10 and 60 ms.
+ for (int frame_size = 10; frame_size <= 60; frame_size += 10) {
+ TestParameters p;
+ p.frame_size = frame_size;
+ p.sample_rate = sample_rate;
+ p.num_channels = 2;
+ l.push_back(p);
+ if (sample_rate == 8000) {
+ // Add a five-channel test for 8000 Hz.
+ p.num_channels = 5;
+ l.push_back(p);
+ }
+ }
+ }
+ return l;
+}
+
+// Pretty-printing the test parameters in case of an error.
+void PrintTo(const TestParameters& p, ::std::ostream* os) {
+ *os << "{frame_size = " << p.frame_size <<
+ ", num_channels = " << p.num_channels <<
+ ", sample_rate = " << p.sample_rate << "}";
+}
+
+// Instantiate the tests. Each test is instantiated using the function above,
+// so that all different parameter combinations are tested.
+INSTANTIATE_TEST_CASE_P(MultiChannel,
+ NetEqStereoTestNoJitter,
+ ::testing::ValuesIn(GetTestParameters()));
+
+INSTANTIATE_TEST_CASE_P(MultiChannel,
+ NetEqStereoTestPositiveDrift,
+ ::testing::ValuesIn(GetTestParameters()));
+
+INSTANTIATE_TEST_CASE_P(MultiChannel,
+ NetEqStereoTestNegativeDrift,
+ ::testing::ValuesIn(GetTestParameters()));
+
+INSTANTIATE_TEST_CASE_P(MultiChannel,
+ NetEqStereoTestDelays,
+ ::testing::ValuesIn(GetTestParameters()));
+
+INSTANTIATE_TEST_CASE_P(MultiChannel,
+ NetEqStereoTestLosses,
+ ::testing::ValuesIn(GetTestParameters()));
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
new file mode 100644
index 00000000000..4d2ce252bdf
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
@@ -0,0 +1,248 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'neteq_rtpplay',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ 'neteq_test_tools',
+ 'neteq_unittest_tools',
+ 'PCM16B',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ 'tools/neteq_rtpplay.cc',
+ ],
+ 'defines': [
+ ],
+ }, # neteq_rtpplay
+
+ {
+ 'target_name': 'RTPencode',
+ 'type': 'executable',
+ 'dependencies': [
+ # TODO(hlundin): Make RTPencode use ACM to encode files.
+ 'neteq_test_tools',# Test helpers
+ 'G711',
+ 'G722',
+ 'PCM16B',
+ 'iLBC',
+ 'iSAC',
+ 'CNG',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ ],
+ 'defines': [
+ 'CODEC_ILBC',
+ 'CODEC_PCM16B',
+ 'CODEC_G711',
+ 'CODEC_G722',
+ 'CODEC_ISAC',
+ 'CODEC_PCM16B_WB',
+ 'CODEC_ISAC_SWB',
+ 'CODEC_PCM16B_32KHZ',
+ 'CODEC_PCM16B_48KHZ',
+ 'CODEC_CNGCODEC8',
+ 'CODEC_CNGCODEC16',
+ 'CODEC_CNGCODEC32',
+ 'CODEC_ATEVENT_DECODE',
+ 'CODEC_RED',
+ ],
+ 'include_dirs': [
+ 'interface',
+ 'test',
+ '<(webrtc_root)',
+ ],
+ 'sources': [
+ 'test/RTPencode.cc',
+ ],
+ # Disable warnings to enable Win64 build, issue 1323.
+ 'msvs_disabled_warnings': [
+ 4267, # size_t to int truncation.
+ ],
+ },
+
+ {
+ 'target_name': 'RTPjitter',
+ 'type': 'executable',
+ 'dependencies': [
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'test/RTPjitter.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'rtp_analyze',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq_unittest_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ ],
+ 'sources': [
+ 'tools/rtp_analyze.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'RTPchange',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq_test_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'test/RTPchange.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'RTPtimeshift',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq_test_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'test/RTPtimeshift.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'RTPcat',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq_test_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'test/RTPcat.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'rtp_to_text',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq_test_tools',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ 'sources': [
+ 'test/rtp_to_text.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'audio_classifier_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ ],
+ 'sources': [
+ 'test/audio_classifier_test.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'neteq_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'neteq',
+ 'PCM16B',
+ 'neteq_unittest_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ 'tools/neteq_performance_test.cc',
+ 'tools/neteq_performance_test.h',
+ 'tools/neteq_quality_test.cc',
+ 'tools/neteq_quality_test.h',
+ ],
+ }, # neteq_test_support
+
+ {
+ 'target_name': 'neteq_speed_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ 'neteq_test_support',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'test/neteq_speed_test.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'neteq_opus_fec_quality_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ 'neteq_test_support',
+ 'webrtc_opus',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'test/neteq_opus_fec_quality_test.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'neteq_test_tools',
+ # Collection of useful functions used in other tests.
+ 'type': 'static_library',
+ 'variables': {
+ # Expects RTP packets without payloads when enabled.
+ 'neteq_dummy_rtp%': 0,
+ },
+ 'dependencies': [
+ 'G711',
+ 'G722',
+ 'PCM16B',
+ 'iLBC',
+ 'iSAC',
+ 'CNG',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'interface',
+ 'test',
+ '<(webrtc_root)',
+ ],
+ },
+ 'defines': [
+ ],
+ 'include_dirs': [
+ 'interface',
+ 'test',
+ '<(webrtc_root)',
+ ],
+ 'sources': [
+ 'test/NETEQTEST_DummyRTPpacket.cc',
+ 'test/NETEQTEST_DummyRTPpacket.h',
+ 'test/NETEQTEST_RTPpacket.cc',
+ 'test/NETEQTEST_RTPpacket.h',
+ ],
+ # Disable warnings to enable Win64 build, issue 1323.
+ 'msvs_disabled_warnings': [
+ 4267, # size_t to int truncation.
+ ],
+ },
+ ], # targets
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
new file mode 100644
index 00000000000..0233e195003
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -0,0 +1,1437 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for NetEQ.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+
+#include <math.h>
+#include <stdlib.h>
+#include <string.h> // memset
+
+#include <algorithm>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+#include "webrtc/typedefs.h"
+
+DEFINE_bool(gen_ref, false, "Generate reference files.");
+
+namespace webrtc {
+
+static bool IsAllZero(const int16_t* buf, int buf_length) {
+ bool all_zero = true;
+ for (int n = 0; n < buf_length && all_zero; ++n)
+ all_zero = buf[n] == 0;
+ return all_zero;
+}
+
+static bool IsAllNonZero(const int16_t* buf, int buf_length) {
+ bool all_non_zero = true;
+ for (int n = 0; n < buf_length && all_non_zero; ++n)
+ all_non_zero = buf[n] != 0;
+ return all_non_zero;
+}
+
+class RefFiles {
+ public:
+ RefFiles(const std::string& input_file, const std::string& output_file);
+ ~RefFiles();
+ template<class T> void ProcessReference(const T& test_results);
+ template<typename T, size_t n> void ProcessReference(
+ const T (&test_results)[n],
+ size_t length);
+ template<typename T, size_t n> void WriteToFile(
+ const T (&test_results)[n],
+ size_t length);
+ template<typename T, size_t n> void ReadFromFileAndCompare(
+ const T (&test_results)[n],
+ size_t length);
+ void WriteToFile(const NetEqNetworkStatistics& stats);
+ void ReadFromFileAndCompare(const NetEqNetworkStatistics& stats);
+ void WriteToFile(const RtcpStatistics& stats);
+ void ReadFromFileAndCompare(const RtcpStatistics& stats);
+
+ FILE* input_fp_;
+ FILE* output_fp_;
+};
+
+RefFiles::RefFiles(const std::string &input_file,
+ const std::string &output_file)
+ : input_fp_(NULL),
+ output_fp_(NULL) {
+ if (!input_file.empty()) {
+ input_fp_ = fopen(input_file.c_str(), "rb");
+ EXPECT_TRUE(input_fp_ != NULL);
+ }
+ if (!output_file.empty()) {
+ output_fp_ = fopen(output_file.c_str(), "wb");
+ EXPECT_TRUE(output_fp_ != NULL);
+ }
+}
+
+RefFiles::~RefFiles() {
+ if (input_fp_) {
+ EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
+ fclose(input_fp_);
+ }
+ if (output_fp_) fclose(output_fp_);
+}
+
+template<class T>
+void RefFiles::ProcessReference(const T& test_results) {
+ WriteToFile(test_results);
+ ReadFromFileAndCompare(test_results);
+}
+
+template<typename T, size_t n>
+void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
+ WriteToFile(test_results, length);
+ ReadFromFileAndCompare(test_results, length);
+}
+
+template<typename T, size_t n>
+void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
+ if (output_fp_) {
+ ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
+ }
+}
+
+template<typename T, size_t n>
+void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
+ size_t length) {
+ if (input_fp_) {
+ // Read from ref file.
+ T* ref = new T[length];
+ ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
+ // Compare
+ ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
+ delete [] ref;
+ }
+}
+
+void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats) {
+ if (output_fp_) {
+ ASSERT_EQ(1u, fwrite(&stats, sizeof(NetEqNetworkStatistics), 1,
+ output_fp_));
+ }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+ const NetEqNetworkStatistics& stats) {
+ if (input_fp_) {
+ // Read from ref file.
+ size_t stat_size = sizeof(NetEqNetworkStatistics);
+ NetEqNetworkStatistics ref_stats;
+ ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
+ // Compare
+ ASSERT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
+ }
+}
+
+void RefFiles::WriteToFile(const RtcpStatistics& stats) {
+ if (output_fp_) {
+ ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
+ output_fp_));
+ ASSERT_EQ(1u, fwrite(&(stats.cumulative_lost),
+ sizeof(stats.cumulative_lost), 1, output_fp_));
+ ASSERT_EQ(1u, fwrite(&(stats.extended_max_sequence_number),
+ sizeof(stats.extended_max_sequence_number), 1,
+ output_fp_));
+ ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
+ output_fp_));
+ }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+ const RtcpStatistics& stats) {
+ if (input_fp_) {
+ // Read from ref file.
+ RtcpStatistics ref_stats;
+ ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
+ sizeof(ref_stats.fraction_lost), 1, input_fp_));
+ ASSERT_EQ(1u, fread(&(ref_stats.cumulative_lost),
+ sizeof(ref_stats.cumulative_lost), 1, input_fp_));
+ ASSERT_EQ(1u, fread(&(ref_stats.extended_max_sequence_number),
+ sizeof(ref_stats.extended_max_sequence_number), 1,
+ input_fp_));
+ ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
+ input_fp_));
+ // Compare
+ ASSERT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
+ ASSERT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost);
+ ASSERT_EQ(ref_stats.extended_max_sequence_number,
+ stats.extended_max_sequence_number);
+ ASSERT_EQ(ref_stats.jitter, stats.jitter);
+ }
+}
+
+class NetEqDecodingTest : public ::testing::Test {
+ protected:
+ // NetEQ must be polled for data once every 10 ms. Thus, neither of the
+ // constants below can be changed.
+ static const int kTimeStepMs = 10;
+ static const int kBlockSize8kHz = kTimeStepMs * 8;
+ static const int kBlockSize16kHz = kTimeStepMs * 16;
+ static const int kBlockSize32kHz = kTimeStepMs * 32;
+ static const int kMaxBlockSize = kBlockSize32kHz;
+ static const int kInitSampleRateHz = 8000;
+
+ NetEqDecodingTest();
+ virtual void SetUp();
+ virtual void TearDown();
+ void SelectDecoders(NetEqDecoder* used_codec);
+ void LoadDecoders();
+ void OpenInputFile(const std::string &rtp_file);
+ void Process(NETEQTEST_RTPpacket* rtp_ptr, int* out_len);
+ void DecodeAndCompare(const std::string &rtp_file,
+ const std::string &ref_file);
+ void DecodeAndCheckStats(const std::string &rtp_file,
+ const std::string &stat_ref_file,
+ const std::string &rtcp_ref_file);
+ static void PopulateRtpInfo(int frame_index,
+ int timestamp,
+ WebRtcRTPHeader* rtp_info);
+ static void PopulateCng(int frame_index,
+ int timestamp,
+ WebRtcRTPHeader* rtp_info,
+ uint8_t* payload,
+ int* payload_len);
+
+ void CheckBgnOff(int sampling_rate, NetEqBackgroundNoiseMode bgn_mode);
+
+ void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
+ const std::set<uint16_t>& drop_seq_numbers,
+ bool expect_seq_no_wrap, bool expect_timestamp_wrap);
+
+ void LongCngWithClockDrift(double drift_factor,
+ double network_freeze_ms,
+ bool pull_audio_during_freeze,
+ int delay_tolerance_ms,
+ int max_time_to_speech_ms);
+
+ void DuplicateCng();
+
+ uint32_t PlayoutTimestamp();
+
+ NetEq* neteq_;
+ FILE* rtp_fp_;
+ unsigned int sim_clock_;
+ int16_t out_data_[kMaxBlockSize];
+ int output_sample_rate_;
+ int algorithmic_delay_ms_;
+};
+
+// Allocating the static const so that it can be passed by reference.
+const int NetEqDecodingTest::kTimeStepMs;
+const int NetEqDecodingTest::kBlockSize8kHz;
+const int NetEqDecodingTest::kBlockSize16kHz;
+const int NetEqDecodingTest::kBlockSize32kHz;
+const int NetEqDecodingTest::kMaxBlockSize;
+const int NetEqDecodingTest::kInitSampleRateHz;
+
+NetEqDecodingTest::NetEqDecodingTest()
+ : neteq_(NULL),
+ rtp_fp_(NULL),
+ sim_clock_(0),
+ output_sample_rate_(kInitSampleRateHz),
+ algorithmic_delay_ms_(0) {
+ memset(out_data_, 0, sizeof(out_data_));
+}
+
+void NetEqDecodingTest::SetUp() {
+ NetEq::Config config;
+ config.sample_rate_hz = kInitSampleRateHz;
+ neteq_ = NetEq::Create(config);
+ NetEqNetworkStatistics stat;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&stat));
+ algorithmic_delay_ms_ = stat.current_buffer_size_ms;
+ ASSERT_TRUE(neteq_);
+ LoadDecoders();
+}
+
+void NetEqDecodingTest::TearDown() {
+ delete neteq_;
+ if (rtp_fp_)
+ fclose(rtp_fp_);
+}
+
+void NetEqDecodingTest::LoadDecoders() {
+ // Load PCMu.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCMu, 0));
+ // Load PCMa.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCMa, 8));
+#ifndef WEBRTC_ANDROID
+ // Load iLBC.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderILBC, 102));
+#endif // WEBRTC_ANDROID
+ // Load iSAC.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, 103));
+#ifndef WEBRTC_ANDROID
+ // Load iSAC SWB.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACswb, 104));
+ // Load iSAC FB.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACfb, 105));
+#endif // WEBRTC_ANDROID
+ // Load PCM16B nb.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16B, 93));
+ // Load PCM16B wb.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bwb, 94));
+ // Load PCM16B swb32.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bswb32kHz, 95));
+ // Load CNG 8 kHz.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGnb, 13));
+ // Load CNG 16 kHz.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGwb, 98));
+}
+
+void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
+ rtp_fp_ = fopen(rtp_file.c_str(), "rb");
+ ASSERT_TRUE(rtp_fp_ != NULL);
+ ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
+}
+
+void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int* out_len) {
+ // Check if time to receive.
+ while ((sim_clock_ >= rtp->time()) &&
+ (rtp->dataLen() >= 0)) {
+ if (rtp->dataLen() > 0) {
+ WebRtcRTPHeader rtpInfo;
+ rtp->parseHeader(&rtpInfo);
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtpInfo,
+ rtp->payload(),
+ rtp->payloadLen(),
+ rtp->time() * (output_sample_rate_ / 1000)));
+ }
+ // Get next packet.
+ ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
+ }
+
+ // Get audio from NetEq.
+ NetEqOutputType type;
+ int num_channels;
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, out_len,
+ &num_channels, &type));
+ ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
+ (*out_len == kBlockSize16kHz) ||
+ (*out_len == kBlockSize32kHz));
+ output_sample_rate_ = *out_len / 10 * 1000;
+
+ // Increase time.
+ sim_clock_ += kTimeStepMs;
+}
+
+void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
+ const std::string &ref_file) {
+ OpenInputFile(rtp_file);
+
+ std::string ref_out_file = "";
+ if (ref_file.empty()) {
+ ref_out_file = webrtc::test::OutputPath() + "neteq_universal_ref.pcm";
+ }
+ RefFiles ref_files(ref_file, ref_out_file);
+
+ NETEQTEST_RTPpacket rtp;
+ ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+ int i = 0;
+ while (rtp.dataLen() >= 0) {
+ std::ostringstream ss;
+ ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ int out_len = 0;
+ ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
+ ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(out_data_, out_len));
+ }
+}
+
+void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
+ const std::string &stat_ref_file,
+ const std::string &rtcp_ref_file) {
+ OpenInputFile(rtp_file);
+ std::string stat_out_file = "";
+ if (stat_ref_file.empty()) {
+ stat_out_file = webrtc::test::OutputPath() +
+ "neteq_network_stats.dat";
+ }
+ RefFiles network_stat_files(stat_ref_file, stat_out_file);
+
+ std::string rtcp_out_file = "";
+ if (rtcp_ref_file.empty()) {
+ rtcp_out_file = webrtc::test::OutputPath() +
+ "neteq_rtcp_stats.dat";
+ }
+ RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
+
+ NETEQTEST_RTPpacket rtp;
+ ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+ while (rtp.dataLen() >= 0) {
+ int out_len;
+ ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
+
+ // Query the network statistics API once per second
+ if (sim_clock_ % 1000 == 0) {
+ // Process NetworkStatistics.
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+ ASSERT_NO_FATAL_FAILURE(
+ network_stat_files.ProcessReference(network_stats));
+
+ // Process RTCPstat.
+ RtcpStatistics rtcp_stats;
+ neteq_->GetRtcpStatistics(&rtcp_stats);
+ ASSERT_NO_FATAL_FAILURE(rtcp_stat_files.ProcessReference(rtcp_stats));
+ }
+ }
+}
+
+void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
+ int timestamp,
+ WebRtcRTPHeader* rtp_info) {
+ rtp_info->header.sequenceNumber = frame_index;
+ rtp_info->header.timestamp = timestamp;
+ rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
+ rtp_info->header.payloadType = 94; // PCM16b WB codec.
+ rtp_info->header.markerBit = 0;
+}
+
+void NetEqDecodingTest::PopulateCng(int frame_index,
+ int timestamp,
+ WebRtcRTPHeader* rtp_info,
+ uint8_t* payload,
+ int* payload_len) {
+ rtp_info->header.sequenceNumber = frame_index;
+ rtp_info->header.timestamp = timestamp;
+ rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
+ rtp_info->header.payloadType = 98; // WB CNG.
+ rtp_info->header.markerBit = 0;
+ payload[0] = 64; // Noise level -64 dBov, quite arbitrarily chosen.
+ *payload_len = 1; // Only noise level, no spectral parameters.
+}
+
+void NetEqDecodingTest::CheckBgnOff(int sampling_rate_hz,
+ NetEqBackgroundNoiseMode bgn_mode) {
+ int expected_samples_per_channel = 0;
+ uint8_t payload_type = 0xFF; // Invalid.
+ if (sampling_rate_hz == 8000) {
+ expected_samples_per_channel = kBlockSize8kHz;
+ payload_type = 93; // PCM 16, 8 kHz.
+ } else if (sampling_rate_hz == 16000) {
+ expected_samples_per_channel = kBlockSize16kHz;
+ payload_type = 94; // PCM 16, 16 kHZ.
+ } else if (sampling_rate_hz == 32000) {
+ expected_samples_per_channel = kBlockSize32kHz;
+ payload_type = 95; // PCM 16, 32 kHz.
+ } else {
+ ASSERT_TRUE(false); // Unsupported test case.
+ }
+
+ NetEqOutputType type;
+ int16_t output[kBlockSize32kHz]; // Maximum size is chosen.
+ int16_t input[kBlockSize32kHz]; // Maximum size is chosen.
+
+ // Payload of 10 ms of PCM16 32 kHz.
+ uint8_t payload[kBlockSize32kHz * sizeof(int16_t)];
+
+ // Random payload.
+ for (int n = 0; n < expected_samples_per_channel; ++n) {
+ input[n] = (rand() & ((1 << 10) - 1)) - ((1 << 5) - 1);
+ }
+ int enc_len_bytes = WebRtcPcm16b_EncodeW16(
+ input, expected_samples_per_channel, reinterpret_cast<int16_t*>(payload));
+ ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2);
+
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ rtp_info.header.payloadType = payload_type;
+
+ int number_channels = 0;
+ int samples_per_channel = 0;
+
+ uint32_t receive_timestamp = 0;
+ for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
+ number_channels = 0;
+ samples_per_channel = 0;
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info, payload, enc_len_bytes, receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
+ &number_channels, &type));
+ ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
+ ASSERT_EQ(kOutputNormal, type);
+
+ // Next packet.
+ rtp_info.header.timestamp += expected_samples_per_channel;
+ rtp_info.header.sequenceNumber++;
+ receive_timestamp += expected_samples_per_channel;
+ }
+
+ number_channels = 0;
+ samples_per_channel = 0;
+
+ // Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull one
+ // frame without checking speech-type. This is the first frame pulled without
+ // inserting any packet, and might not be labeled as PCL.
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
+ &number_channels, &type));
+ ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
+
+ // To be able to test the fading of background noise we need at lease to pull
+ // 611 frames.
+ const int kFadingThreshold = 611;
+
+ // Test several CNG-to-PLC packet for the expected behavior. The number 20 is
+ // arbitrary, but sufficiently large to test enough number of frames.
+ const int kNumPlcToCngTestFrames = 20;
+ bool plc_to_cng = false;
+ for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) {
+ number_channels = 0;
+ samples_per_channel = 0;
+ memset(output, 1, sizeof(output)); // Set to non-zero.
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
+ &number_channels, &type));
+ ASSERT_EQ(1, number_channels);
+ ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
+ if (type == kOutputPLCtoCNG) {
+ plc_to_cng = true;
+ double sum_squared = 0;
+ for (int k = 0; k < number_channels * samples_per_channel; ++k)
+ sum_squared += output[k] * output[k];
+ if (bgn_mode == kBgnOn) {
+ EXPECT_NE(0, sum_squared);
+ } else if (bgn_mode == kBgnOff || n > kFadingThreshold) {
+ EXPECT_EQ(0, sum_squared);
+ }
+ } else {
+ EXPECT_EQ(kOutputPLC, type);
+ }
+ }
+ EXPECT_TRUE(plc_to_cng); // Just to be sure that PLC-to-CNG has occurred.
+}
+
+TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestBitExactness)) {
+ const std::string input_rtp_file = webrtc::test::ProjectRootPath() +
+ "resources/audio_coding/neteq_universal_new.rtp";
+ // Note that neteq4_universal_ref.pcm and neteq4_universal_ref_win_32.pcm
+ // are identical. The latter could have been removed, but if clients still
+ // have a copy of the file, the test will fail.
+ const std::string input_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm");
+
+ if (FLAGS_gen_ref) {
+ DecodeAndCompare(input_rtp_file, "");
+ } else {
+ DecodeAndCompare(input_rtp_file, input_ref_file);
+ }
+}
+
+TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestNetworkStatistics)) {
+ const std::string input_rtp_file = webrtc::test::ProjectRootPath() +
+ "resources/audio_coding/neteq_universal_new.rtp";
+#if defined(_MSC_VER) && (_MSC_VER >= 1700)
+ // For Visual Studio 2012 and later, we will have to use the generic reference
+ // file, rather than the windows-specific one.
+ const std::string network_stat_ref_file = webrtc::test::ProjectRootPath() +
+ "resources/audio_coding/neteq4_network_stats.dat";
+#else
+ const std::string network_stat_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat");
+#endif
+ const std::string rtcp_stat_ref_file =
+ webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat");
+ if (FLAGS_gen_ref) {
+ DecodeAndCheckStats(input_rtp_file, "", "");
+ } else {
+ DecodeAndCheckStats(input_rtp_file, network_stat_ref_file,
+ rtcp_stat_ref_file);
+ }
+}
+
+// TODO(hlundin): Re-enable test once the statistics interface is up and again.
+TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
+ // Use fax mode to avoid time-scaling. This is to simplify the testing of
+ // packet waiting times in the packet buffer.
+ neteq_->SetPlayoutMode(kPlayoutFax);
+ ASSERT_EQ(kPlayoutFax, neteq_->PlayoutMode());
+ // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
+ size_t num_frames = 30;
+ const int kSamples = 10 * 16;
+ const int kPayloadBytes = kSamples * 2;
+ for (size_t i = 0; i < num_frames; ++i) {
+ uint16_t payload[kSamples] = {0};
+ WebRtcRTPHeader rtp_info;
+ rtp_info.header.sequenceNumber = i;
+ rtp_info.header.timestamp = i * kSamples;
+ rtp_info.header.ssrc = 0x1234; // Just an arbitrary SSRC.
+ rtp_info.header.payloadType = 94; // PCM16b WB codec.
+ rtp_info.header.markerBit = 0;
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info,
+ reinterpret_cast<uint8_t*>(payload),
+ kPayloadBytes, 0));
+ }
+ // Pull out all data.
+ for (size_t i = 0; i < num_frames; ++i) {
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ std::vector<int> waiting_times;
+ neteq_->WaitingTimes(&waiting_times);
+ EXPECT_EQ(num_frames, waiting_times.size());
+ // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
+ // spacing (per definition), we expect the delay to increase with 10 ms for
+ // each packet.
+ for (size_t i = 0; i < waiting_times.size(); ++i) {
+ EXPECT_EQ(static_cast<int>(i + 1) * 10, waiting_times[i]);
+ }
+
+ // Check statistics again and make sure it's been reset.
+ neteq_->WaitingTimes(&waiting_times);
+ int len = waiting_times.size();
+ EXPECT_EQ(0, len);
+
+ // Process > 100 frames, and make sure that that we get statistics
+ // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
+ num_frames = 110;
+ for (size_t i = 0; i < num_frames; ++i) {
+ uint16_t payload[kSamples] = {0};
+ WebRtcRTPHeader rtp_info;
+ rtp_info.header.sequenceNumber = i;
+ rtp_info.header.timestamp = i * kSamples;
+ rtp_info.header.ssrc = 0x1235; // Just an arbitrary SSRC.
+ rtp_info.header.payloadType = 94; // PCM16b WB codec.
+ rtp_info.header.markerBit = 0;
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info,
+ reinterpret_cast<uint8_t*>(payload),
+ kPayloadBytes, 0));
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ neteq_->WaitingTimes(&waiting_times);
+ EXPECT_EQ(100u, waiting_times.size());
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
+ const int kNumFrames = 3000; // Needed for convergence.
+ int frame_index = 0;
+ const int kSamples = 10 * 16;
+ const int kPayloadBytes = kSamples * 2;
+ while (frame_index < kNumFrames) {
+ // Insert one packet each time, except every 10th time where we insert two
+ // packets at once. This will create a negative clock-drift of approx. 10%.
+ int num_packets = (frame_index % 10 == 0 ? 2 : 1);
+ for (int n = 0; n < num_packets; ++n) {
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++frame_index;
+ }
+
+ // Pull out data once.
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+ EXPECT_EQ(-103196, network_stats.clockdrift_ppm);
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
+ const int kNumFrames = 5000; // Needed for convergence.
+ int frame_index = 0;
+ const int kSamples = 10 * 16;
+ const int kPayloadBytes = kSamples * 2;
+ for (int i = 0; i < kNumFrames; ++i) {
+ // Insert one packet each time, except every 10th time where we don't insert
+ // any packet. This will create a positive clock-drift of approx. 11%.
+ int num_packets = (i % 10 == 9 ? 0 : 1);
+ for (int n = 0; n < num_packets; ++n) {
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++frame_index;
+ }
+
+ // Pull out data once.
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+ EXPECT_EQ(110946, network_stats.clockdrift_ppm);
+}
+
+void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
+ double network_freeze_ms,
+ bool pull_audio_during_freeze,
+ int delay_tolerance_ms,
+ int max_time_to_speech_ms) {
+ uint16_t seq_no = 0;
+ uint32_t timestamp = 0;
+ const int kFrameSizeMs = 30;
+ const int kSamples = kFrameSizeMs * 16;
+ const int kPayloadBytes = kSamples * 2;
+ double next_input_time_ms = 0.0;
+ double t_ms;
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+
+ // Insert speech for 5 seconds.
+ const int kSpeechDurationMs = 5000;
+ for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
+ // Each turn in this for loop is 10 ms.
+ while (next_input_time_ms <= t_ms) {
+ // Insert one 30 ms speech frame.
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++seq_no;
+ timestamp += kSamples;
+ next_input_time_ms += static_cast<double>(kFrameSizeMs) * drift_factor;
+ }
+ // Pull out data once.
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ EXPECT_EQ(kOutputNormal, type);
+ int32_t delay_before = timestamp - PlayoutTimestamp();
+
+ // Insert CNG for 1 minute (= 60000 ms).
+ const int kCngPeriodMs = 100;
+ const int kCngPeriodSamples = kCngPeriodMs * 16; // Period in 16 kHz samples.
+ const int kCngDurationMs = 60000;
+ for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
+ // Each turn in this for loop is 10 ms.
+ while (next_input_time_ms <= t_ms) {
+ // Insert one CNG frame each 100 ms.
+ uint8_t payload[kPayloadBytes];
+ int payload_len;
+ WebRtcRTPHeader rtp_info;
+ PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ++seq_no;
+ timestamp += kCngPeriodSamples;
+ next_input_time_ms += static_cast<double>(kCngPeriodMs) * drift_factor;
+ }
+ // Pull out data once.
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+
+ EXPECT_EQ(kOutputCNG, type);
+
+ if (network_freeze_ms > 0) {
+ // First keep pulling audio for |network_freeze_ms| without inserting
+ // any data, then insert CNG data corresponding to |network_freeze_ms|
+ // without pulling any output audio.
+ const double loop_end_time = t_ms + network_freeze_ms;
+ for (; t_ms < loop_end_time; t_ms += 10) {
+ // Pull out data once.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ }
+ bool pull_once = pull_audio_during_freeze;
+ // If |pull_once| is true, GetAudio will be called once half-way through
+ // the network recovery period.
+ double pull_time_ms = (t_ms + next_input_time_ms) / 2;
+ while (next_input_time_ms <= t_ms) {
+ if (pull_once && next_input_time_ms >= pull_time_ms) {
+ pull_once = false;
+ // Pull out data once.
+ ASSERT_EQ(
+ 0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ t_ms += 10;
+ }
+ // Insert one CNG frame each 100 ms.
+ uint8_t payload[kPayloadBytes];
+ int payload_len;
+ WebRtcRTPHeader rtp_info;
+ PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ++seq_no;
+ timestamp += kCngPeriodSamples;
+ next_input_time_ms += kCngPeriodMs * drift_factor;
+ }
+ }
+
+ // Insert speech again until output type is speech.
+ double speech_restart_time_ms = t_ms;
+ while (type != kOutputNormal) {
+ // Each turn in this for loop is 10 ms.
+ while (next_input_time_ms <= t_ms) {
+ // Insert one 30 ms speech frame.
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++seq_no;
+ timestamp += kSamples;
+ next_input_time_ms += kFrameSizeMs * drift_factor;
+ }
+ // Pull out data once.
+ ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
+ &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ // Increase clock.
+ t_ms += 10;
+ }
+
+ // Check that the speech starts again within reasonable time.
+ double time_until_speech_returns_ms = t_ms - speech_restart_time_ms;
+ EXPECT_LT(time_until_speech_returns_ms, max_time_to_speech_ms);
+ int32_t delay_after = timestamp - PlayoutTimestamp();
+ // Compare delay before and after, and make sure it differs less than 20 ms.
+ EXPECT_LE(delay_after, delay_before + delay_tolerance_ms * 16);
+ EXPECT_GE(delay_after, delay_before - delay_tolerance_ms * 16);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDrift) {
+ // Apply a clock drift of -25 ms / s (sender faster than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDrift) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDriftNetworkFreeze) {
+ // Apply a clock drift of -25 ms / s (sender faster than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 50;
+ const int kMaxTimeToSpeechMs = 200;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreeze) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreezeExtraPull) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = true;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithoutClockDrift) {
+ const double kDriftFactor = 1.0; // No drift.
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 10;
+ const int kMaxTimeToSpeechMs = 50;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, UnknownPayloadType) {
+ const int kPayloadBytes = 100;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ rtp_info.header.payloadType = 1; // Not registered as a decoder.
+ EXPECT_EQ(NetEq::kFail,
+ neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ EXPECT_EQ(NetEq::kUnknownRtpPayloadType, neteq_->LastError());
+}
+
+TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
+ const int kPayloadBytes = 100;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ rtp_info.header.payloadType = 103; // iSAC, but the payload is invalid.
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ NetEqOutputType type;
+ // Set all of |out_data_| to 1, and verify that it was set to 0 by the call
+ // to GetAudio.
+ for (int i = 0; i < kMaxBlockSize; ++i) {
+ out_data_[i] = 1;
+ }
+ int num_channels;
+ int samples_per_channel;
+ EXPECT_EQ(NetEq::kFail,
+ neteq_->GetAudio(kMaxBlockSize, out_data_,
+ &samples_per_channel, &num_channels, &type));
+ // Verify that there is a decoder error to check.
+ EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
+ // Code 6730 is an iSAC error code.
+ EXPECT_EQ(6730, neteq_->LastDecoderError());
+ // Verify that the first 160 samples are set to 0, and that the remaining
+ // samples are left unmodified.
+ static const int kExpectedOutputLength = 160; // 10 ms at 16 kHz sample rate.
+ for (int i = 0; i < kExpectedOutputLength; ++i) {
+ std::ostringstream ss;
+ ss << "i = " << i;
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ EXPECT_EQ(0, out_data_[i]);
+ }
+ for (int i = kExpectedOutputLength; i < kMaxBlockSize; ++i) {
+ std::ostringstream ss;
+ ss << "i = " << i;
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ EXPECT_EQ(1, out_data_[i]);
+ }
+}
+
+TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
+ NetEqOutputType type;
+ // Set all of |out_data_| to 1, and verify that it was set to 0 by the call
+ // to GetAudio.
+ for (int i = 0; i < kMaxBlockSize; ++i) {
+ out_data_[i] = 1;
+ }
+ int num_channels;
+ int samples_per_channel;
+ EXPECT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_,
+ &samples_per_channel,
+ &num_channels, &type));
+ // Verify that the first block of samples is set to 0.
+ static const int kExpectedOutputLength =
+ kInitSampleRateHz / 100; // 10 ms at initial sample rate.
+ for (int i = 0; i < kExpectedOutputLength; ++i) {
+ std::ostringstream ss;
+ ss << "i = " << i;
+ SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
+ EXPECT_EQ(0, out_data_[i]);
+ }
+}
+
+TEST_F(NetEqDecodingTest, BackgroundNoise) {
+ neteq_->SetBackgroundNoiseMode(kBgnOn);
+ CheckBgnOff(8000, kBgnOn);
+ CheckBgnOff(16000, kBgnOn);
+ CheckBgnOff(32000, kBgnOn);
+ EXPECT_EQ(kBgnOn, neteq_->BackgroundNoiseMode());
+
+ neteq_->SetBackgroundNoiseMode(kBgnOff);
+ CheckBgnOff(8000, kBgnOff);
+ CheckBgnOff(16000, kBgnOff);
+ CheckBgnOff(32000, kBgnOff);
+ EXPECT_EQ(kBgnOff, neteq_->BackgroundNoiseMode());
+
+ neteq_->SetBackgroundNoiseMode(kBgnFade);
+ CheckBgnOff(8000, kBgnFade);
+ CheckBgnOff(16000, kBgnFade);
+ CheckBgnOff(32000, kBgnFade);
+ EXPECT_EQ(kBgnFade, neteq_->BackgroundNoiseMode());
+}
+
+TEST_F(NetEqDecodingTest, SyncPacketInsert) {
+ WebRtcRTPHeader rtp_info;
+ uint32_t receive_timestamp = 0;
+ // For the readability use the following payloads instead of the defaults of
+ // this test.
+ uint8_t kPcm16WbPayloadType = 1;
+ uint8_t kCngNbPayloadType = 2;
+ uint8_t kCngWbPayloadType = 3;
+ uint8_t kCngSwb32PayloadType = 4;
+ uint8_t kCngSwb48PayloadType = 5;
+ uint8_t kAvtPayloadType = 6;
+ uint8_t kRedPayloadType = 7;
+ uint8_t kIsacPayloadType = 9; // Payload type 8 is already registered.
+
+ // Register decoders.
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bwb,
+ kPcm16WbPayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGnb, kCngNbPayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGwb, kCngWbPayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb32kHz,
+ kCngSwb32PayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb48kHz,
+ kCngSwb48PayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderAVT, kAvtPayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderRED, kRedPayloadType));
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, kIsacPayloadType));
+
+ PopulateRtpInfo(0, 0, &rtp_info);
+ rtp_info.header.payloadType = kPcm16WbPayloadType;
+
+ // The first packet injected cannot be sync-packet.
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ // Payload length of 10 ms PCM16 16 kHz.
+ const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
+ uint8_t payload[kPayloadBytes] = {0};
+ ASSERT_EQ(0, neteq_->InsertPacket(
+ rtp_info, payload, kPayloadBytes, receive_timestamp));
+
+ // Next packet. Last packet contained 10 ms audio.
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+
+ // Unacceptable payload types CNG, AVT (DTMF), RED.
+ rtp_info.header.payloadType = kCngNbPayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ rtp_info.header.payloadType = kCngWbPayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ rtp_info.header.payloadType = kCngSwb32PayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ rtp_info.header.payloadType = kCngSwb48PayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ rtp_info.header.payloadType = kAvtPayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ rtp_info.header.payloadType = kRedPayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ // Change of codec cannot be initiated with a sync packet.
+ rtp_info.header.payloadType = kIsacPayloadType;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ // Change of SSRC is not allowed with a sync packet.
+ rtp_info.header.payloadType = kPcm16WbPayloadType;
+ ++rtp_info.header.ssrc;
+ EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+
+ --rtp_info.header.ssrc;
+ EXPECT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+}
+
+// First insert several noise like packets, then sync-packets. Decoding all
+// packets should not produce error, statistics should not show any packet loss
+// and sync-packets should decode to zero.
+// TODO(turajs) we will have a better test if we have a referece NetEq, and
+// when Sync packets are inserted in "test" NetEq we insert all-zero payload
+// in reference NetEq and compare the output of those two.
+TEST_F(NetEqDecodingTest, SyncPacketDecode) {
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
+ uint8_t payload[kPayloadBytes];
+ int16_t decoded[kBlockSize16kHz];
+ int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
+ for (int n = 0; n < kPayloadBytes; ++n) {
+ payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
+ }
+ // Insert some packets which decode to noise. We are not interested in
+ // actual decoded values.
+ NetEqOutputType output_type;
+ int num_channels;
+ int samples_per_channel;
+ uint32_t receive_timestamp = 0;
+ for (int n = 0; n < 100; ++n) {
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
+ ASSERT_EQ(1, num_channels);
+
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+ const int kNumSyncPackets = 10;
+
+ // Make sure sufficient number of sync packets are inserted that we can
+ // conduct a test.
+ ASSERT_GT(kNumSyncPackets, algorithmic_frame_delay);
+ // Insert sync-packets, the decoded sequence should be all-zero.
+ for (int n = 0; n < kNumSyncPackets; ++n) {
+ ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
+ ASSERT_EQ(1, num_channels);
+ if (n > algorithmic_frame_delay) {
+ EXPECT_TRUE(IsAllZero(decoded, samples_per_channel * num_channels));
+ }
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+
+ // We insert regular packets, if sync packet are not correctly buffered then
+ // network statistics would show some packet loss.
+ for (int n = 0; n <= algorithmic_frame_delay + 10; ++n) {
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ if (n >= algorithmic_frame_delay + 1) {
+ // Expect that this frame contain samples from regular RTP.
+ EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels));
+ }
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+ // Expecting a "clean" network.
+ EXPECT_EQ(0, network_stats.packet_loss_rate);
+ EXPECT_EQ(0, network_stats.expand_rate);
+ EXPECT_EQ(0, network_stats.accelerate_rate);
+ EXPECT_LE(network_stats.preemptive_rate, 150);
+}
+
+// Test if the size of the packet buffer reported correctly when containing
+// sync packets. Also, test if network packets override sync packets. That is to
+// prefer decoding a network packet to a sync packet, if both have same sequence
+// number and timestamp.
+TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
+ uint8_t payload[kPayloadBytes];
+ int16_t decoded[kBlockSize16kHz];
+ for (int n = 0; n < kPayloadBytes; ++n) {
+ payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
+ }
+ // Insert some packets which decode to noise. We are not interested in
+ // actual decoded values.
+ NetEqOutputType output_type;
+ int num_channels;
+ int samples_per_channel;
+ uint32_t receive_timestamp = 0;
+ int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
+ for (int n = 0; n < algorithmic_frame_delay; ++n) {
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
+ ASSERT_EQ(1, num_channels);
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+ const int kNumSyncPackets = 10;
+
+ WebRtcRTPHeader first_sync_packet_rtp_info;
+ memcpy(&first_sync_packet_rtp_info, &rtp_info, sizeof(rtp_info));
+
+ // Insert sync-packets, but no decoding.
+ for (int n = 0; n < kNumSyncPackets; ++n) {
+ ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+ EXPECT_EQ(kNumSyncPackets * 10 + algorithmic_delay_ms_,
+ network_stats.current_buffer_size_ms);
+
+ // Rewind |rtp_info| to that of the first sync packet.
+ memcpy(&rtp_info, &first_sync_packet_rtp_info, sizeof(rtp_info));
+
+ // Insert.
+ for (int n = 0; n < kNumSyncPackets; ++n) {
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
+
+ // Decode.
+ for (int n = 0; n < kNumSyncPackets; ++n) {
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
+ ASSERT_EQ(1, num_channels);
+ EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels));
+ }
+}
+
+void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
+ uint32_t start_timestamp,
+ const std::set<uint16_t>& drop_seq_numbers,
+ bool expect_seq_no_wrap,
+ bool expect_timestamp_wrap) {
+ uint16_t seq_no = start_seq_no;
+ uint32_t timestamp = start_timestamp;
+ const int kBlocksPerFrame = 3; // Number of 10 ms blocks per frame.
+ const int kFrameSizeMs = kBlocksPerFrame * kTimeStepMs;
+ const int kSamples = kBlockSize16kHz * kBlocksPerFrame;
+ const int kPayloadBytes = kSamples * sizeof(int16_t);
+ double next_input_time_ms = 0.0;
+ int16_t decoded[kBlockSize16kHz];
+ int num_channels;
+ int samples_per_channel;
+ NetEqOutputType output_type;
+ uint32_t receive_timestamp = 0;
+
+ // Insert speech for 2 seconds.
+ const int kSpeechDurationMs = 2000;
+ int packets_inserted = 0;
+ uint16_t last_seq_no;
+ uint32_t last_timestamp;
+ bool timestamp_wrapped = false;
+ bool seq_no_wrapped = false;
+ for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
+ // Each turn in this for loop is 10 ms.
+ while (next_input_time_ms <= t_ms) {
+ // Insert one 30 ms speech frame.
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) {
+ // This sequence number was not in the set to drop. Insert it.
+ ASSERT_EQ(0,
+ neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ ++packets_inserted;
+ }
+ NetEqNetworkStatistics network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
+
+ // Due to internal NetEq logic, preferred buffer-size is about 4 times the
+ // packet size for first few packets. Therefore we refrain from checking
+ // the criteria.
+ if (packets_inserted > 4) {
+ // Expect preferred and actual buffer size to be no more than 2 frames.
+ EXPECT_LE(network_stats.preferred_buffer_size_ms, kFrameSizeMs * 2);
+ EXPECT_LE(network_stats.current_buffer_size_ms, kFrameSizeMs * 2 +
+ algorithmic_delay_ms_);
+ }
+ last_seq_no = seq_no;
+ last_timestamp = timestamp;
+
+ ++seq_no;
+ timestamp += kSamples;
+ receive_timestamp += kSamples;
+ next_input_time_ms += static_cast<double>(kFrameSizeMs);
+
+ seq_no_wrapped |= seq_no < last_seq_no;
+ timestamp_wrapped |= timestamp < last_timestamp;
+ }
+ // Pull out data once.
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
+ ASSERT_EQ(1, num_channels);
+
+ // Expect delay (in samples) to be less than 2 packets.
+ EXPECT_LE(timestamp - PlayoutTimestamp(),
+ static_cast<uint32_t>(kSamples * 2));
+ }
+ // Make sure we have actually tested wrap-around.
+ ASSERT_EQ(expect_seq_no_wrap, seq_no_wrapped);
+ ASSERT_EQ(expect_timestamp_wrap, timestamp_wrapped);
+}
+
+TEST_F(NetEqDecodingTest, SequenceNumberWrap) {
+ // Start with a sequence number that will soon wrap.
+ std::set<uint16_t> drop_seq_numbers; // Don't drop any packets.
+ WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false);
+}
+
+TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) {
+ // Start with a sequence number that will soon wrap.
+ std::set<uint16_t> drop_seq_numbers;
+ drop_seq_numbers.insert(0xFFFF);
+ drop_seq_numbers.insert(0x0);
+ WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false);
+}
+
+TEST_F(NetEqDecodingTest, TimestampWrap) {
+ // Start with a timestamp that will soon wrap.
+ std::set<uint16_t> drop_seq_numbers;
+ WrapTest(0, 0xFFFFFFFF - 3000, drop_seq_numbers, false, true);
+}
+
+TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) {
+ // Start with a timestamp and a sequence number that will wrap at the same
+ // time.
+ std::set<uint16_t> drop_seq_numbers;
+ WrapTest(0xFFFF - 10, 0xFFFFFFFF - 5000, drop_seq_numbers, true, true);
+}
+
+void NetEqDecodingTest::DuplicateCng() {
+ uint16_t seq_no = 0;
+ uint32_t timestamp = 0;
+ const int kFrameSizeMs = 10;
+ const int kSampleRateKhz = 16;
+ const int kSamples = kFrameSizeMs * kSampleRateKhz;
+ const int kPayloadBytes = kSamples * 2;
+
+ const int algorithmic_delay_samples = std::max(
+ algorithmic_delay_ms_ * kSampleRateKhz, 5 * kSampleRateKhz / 8);
+ // Insert three speech packet. Three are needed to get the frame length
+ // correct.
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ for (int i = 0; i < 3; ++i) {
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++seq_no;
+ timestamp += kSamples;
+
+ // Pull audio once.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+ // Verify speech output.
+ EXPECT_EQ(kOutputNormal, type);
+
+ // Insert same CNG packet twice.
+ const int kCngPeriodMs = 100;
+ const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz;
+ int payload_len;
+ PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+ // This is the first time this CNG packet is inserted.
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+
+ // Pull audio once and make sure CNG is played.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ EXPECT_EQ(timestamp - algorithmic_delay_samples, PlayoutTimestamp());
+
+ // Insert the same CNG packet again. Note that at this point it is old, since
+ // we have already decoded the first copy of it.
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+
+ // Pull audio until we have played |kCngPeriodMs| of CNG. Start at 10 ms since
+ // we have already pulled out CNG once.
+ for (int cng_time_ms = 10; cng_time_ms < kCngPeriodMs; cng_time_ms += 10) {
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ EXPECT_EQ(timestamp - algorithmic_delay_samples,
+ PlayoutTimestamp());
+ }
+
+ // Insert speech again.
+ ++seq_no;
+ timestamp += kCngPeriodSamples;
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+
+ // Pull audio once and verify that the output is speech again.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputNormal, type);
+ EXPECT_EQ(timestamp + kSamples - algorithmic_delay_samples,
+ PlayoutTimestamp());
+}
+
+uint32_t NetEqDecodingTest::PlayoutTimestamp() {
+ uint32_t playout_timestamp = 0;
+ EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&playout_timestamp));
+ return playout_timestamp;
+}
+
+TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { DuplicateCng(); }
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate
deleted file mode 100644
index e8f4e482aaf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'conditions': [
- ['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
- 'variables': {
- 'isolate_dependency_untracked': [
- '../../../../../data/',
- '../../../../../resources/',
- ],
- },
- }],
- ['OS=="linux" or OS=="mac" or OS=="win"', {
- 'variables': {
- 'command': [
- '../../../../testing/test_env.py',
- '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_touched': [
- '../../../../DEPS',
- ],
- 'isolate_dependency_tracked': [
- '../../../../resources/audio_coding/neteq_network_stats.dat',
- '../../../../resources/audio_coding/neteq_rtcp_stats.dat',
- '../../../../resources/audio_coding/neteq_universal.rtp',
- '../../../../resources/audio_coding/neteq_universal_ref.pcm',
- '../../../../resources/audio_coding/testfile32kHz.pcm',
- '../../../../testing/test_env.py',
- '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_untracked': [
- '../../../../tools/swarming_client/',
- ],
- },
- }],
- ],
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c
deleted file mode 100644
index 8cbda521542..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for handling "normal" speech operation.
- */
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_expanded 125*fs/8000 0 125*fs/8000-1
-
- func WebRtcNetEQ_Expand 40+370*fs/8000 125*fs/8000 39+495*fs/8000
-
- Total: 40+495*fs/8000
- */
-
-#define SCRATCH_PW16_EXPANDED 0
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-/****************************************************************************
- * WebRtcNetEQ_Normal(...)
- *
- * This function has the possibility to modify data that is played out in Normal
- * mode, for example adjust the gain of the signal. The length of the signal
- * can not be changed.
- *
- * Input:
- * - inst : NetEq instance, i.e. the user that requests more
- * speech/audio data
- * - scratchPtr : Pointer to scratch vector
- * - decoded : Pointer to vector of new data from decoder
- * (Vector contents may be altered by the function)
- * - len : Number of input samples
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Pointer to variable where the number of samples
- * produced will be written
- *
- * Return value : >=0 - Number of samples written to outData
- * -1 - Error
- */
-
-int WebRtcNetEQ_Normal(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int16_t len,
- int16_t *pw16_outData, int16_t *pw16_len)
-{
-
- int i;
- int16_t fs_mult;
- int16_t fs_shift;
- int32_t w32_En_speech;
- int16_t enLen;
- int16_t w16_muted;
- int16_t w16_inc, w16_frac;
- int16_t w16_tmp;
- int32_t w32_tmp;
-
- /* Sanity check */
- if (len < 0)
- {
- /* Cannot have negative length of input vector */
- return (-1);
- }
-
- if (len == 0)
- {
- /* Still got some data to play => continue with the same mode */
- *pw16_len = len;
- return (len);
- }
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
- fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
-
- /*
- * Check if last RecOut call resulted in an Expand or a FadeToBGN. If so, we have to take
- * care of some cross-fading and unmuting.
- */
- if (inst->w16_mode == MODE_EXPAND || inst->w16_mode == MODE_FADE_TO_BGN)
- {
-
- /* Define memory where temporary result from Expand algorithm can be stored. */
-#ifdef SCRATCH
- int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
-#else
- int16_t pw16_expanded[FSMULT * 125];
-#endif
- int16_t expandedLen = 0;
- int16_t w16_decodedMax;
-
- /* Find largest value in new data */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* Generate interpolation data using Expand */
- /* First, set Expand parameters to appropriate values. */
- inst->ExpandInst.w16_lagsPosition = 0;
- inst->ExpandInst.w16_lagsDirection = 0;
- inst->ExpandInst.w16_stopMuting = 1; /* Do not mute signal any more */
-
- /* Call Expand */
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_expanded, &expandedLen, (int16_t) (inst->w16_mode == MODE_FADE_TO_BGN));
-
- inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
- inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
-
- /* Adjust muting factor (main muting factor times expand muting factor) */
- if (inst->w16_mode == MODE_FADE_TO_BGN)
- {
- /* If last mode was FadeToBGN, the mute factor should be zero. */
- inst->w16_muteFactor = 0;
- }
- else
- {
- /* w16_muteFactor * w16_expandMuteFactor */
- inst->w16_muteFactor
- = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
- inst->ExpandInst.w16_expandMuteFactor, 14);
- }
-
- /* Adjust muting factor if needed (to BGN level) */
- enLen = WEBRTC_SPL_MIN(fs_mult<<6, len); /* min( fs_mult * 64, len ) */
- w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(
- WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
- w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
- w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (int16_t) (enLen >> w16_tmp));
-
- if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
- {
- /* Normalize new frame energy to 15 bits */
- w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
- /* we want inst->BGNInst.energy/En_speech in Q14 */
- w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
- w16_tmp = (int16_t) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
- w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
- w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((int32_t) w16_tmp,
- 14)); /* w16_muted in Q14 (sqrt(Q28)) */
- }
- else
- {
- w16_muted = 16384; /* 1.0 in Q14 */
- }
- if (w16_muted > inst->w16_muteFactor)
- {
- inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
- }
-
- /* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14) */
- w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
- for (i = 0; i < len; i++)
- {
- /* scale with mute factor */
- w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
- /* shift 14 with proper rounding */
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
- /* increase mute_factor towards 16384 */
- inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
- }
-
- /*
- * Interpolate the expanded data into the new vector
- * (NB/WB/SWB32/SWB40 8/16/32/32 samples)
- */
- fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
- w16_inc = 4 >> fs_shift;
- w16_frac = w16_inc;
- for (i = 0; i < 8 * fs_mult; i++)
- {
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
- WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
- 5);
- w16_frac += w16_inc;
- }
-
-#ifdef NETEQ_CNG_CODEC
- }
- else if (inst->w16_mode==MODE_RFC3389CNG)
- { /* previous was RFC 3389 CNG...*/
- int16_t pw16_CngInterp[32];
- /* Reset mute factor and start up fresh */
- inst->w16_muteFactor = 16384;
- if (inst->CNG_Codec_inst != NULL)
- {
- /* Generate long enough for 32kHz */
- if(WebRtcCng_Generate(inst->CNG_Codec_inst,pw16_CngInterp, 32, 0)<0)
- {
- /* error returned; set return vector to all zeros */
- WebRtcSpl_MemSetW16(pw16_CngInterp, 0, 32);
- }
- }
- else
- {
- /*
- * If no CNG instance is defined, just copy from the decoded data.
- * (This will result in interpolating the decoded with itself.)
- */
- WEBRTC_SPL_MEMCPY_W16(pw16_CngInterp, pw16_decoded, fs_mult * 8);
- }
- /*
- * Interpolate the CNG into the new vector
- * (NB/WB/SWB32kHz/SWB48kHz 8/16/32/32 samples)
- */
- fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
- w16_inc = 4>>fs_shift;
- w16_frac = w16_inc;
- for (i = 0; i < 8 * fs_mult; i++)
- {
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
- WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
- 5);
- w16_frac += w16_inc;
- }
-#endif
-
- }
- else if (inst->w16_muteFactor < 16384)
- {
- /*
- * Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are still
- * ramping up from previous muting.
- * If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14)
- */
- w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
- for (i = 0; i < len; i++)
- {
- /* scale with mute factor */
- w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
- /* shift 14 with proper rounding */
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
- /* increase mute_factor towards 16384 */
- inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
- }
- }
-
- /* Copy data to other buffer */WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
-
- inst->w16_mode = MODE_NORMAL;
- *pw16_len = len;
- return (len);
-
-}
-
-#undef SCRATCH_PW16_EXPANDED
-#undef SCRATCH_NETEQ_EXPAND
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
new file mode 100644
index 00000000000..bfde179bd17
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/normal.h"
+
+#include <string.h> // memset, memcpy
+
+#include <algorithm> // min
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+namespace webrtc {
+
+int Normal::Process(const int16_t* input,
+ size_t length,
+ Modes last_mode,
+ int16_t* external_mute_factor_array,
+ AudioMultiVector* output) {
+ if (length == 0) {
+ // Nothing to process.
+ output->Clear();
+ return static_cast<int>(length);
+ }
+
+ assert(output->Empty());
+ // Output should be empty at this point.
+ output->PushBackInterleaved(input, length);
+ int16_t* signal = &(*output)[0][0];
+
+ const unsigned fs_mult = fs_hz_ / 8000;
+ assert(fs_mult > 0);
+ // fs_shift = log2(fs_mult), rounded down.
+ // Note that |fs_shift| is not "exact" for 48 kHz.
+ // TODO(hlundin): Investigate this further.
+ const int fs_shift = 30 - WebRtcSpl_NormW32(fs_mult);
+
+ // Check if last RecOut call resulted in an Expand. If so, we have to take
+ // care of some cross-fading and unmuting.
+ if (last_mode == kModeExpand) {
+ // Generate interpolation data using Expand.
+ // First, set Expand parameters to appropriate values.
+ expand_->SetParametersForNormalAfterExpand();
+
+ // Call Expand.
+ AudioMultiVector expanded(output->Channels());
+ expand_->Process(&expanded);
+ expand_->Reset();
+
+ for (size_t channel_ix = 0; channel_ix < output->Channels(); ++channel_ix) {
+ // Adjust muting factor (main muting factor times expand muting factor).
+ external_mute_factor_array[channel_ix] = static_cast<int16_t>(
+ WEBRTC_SPL_MUL_16_16_RSFT(external_mute_factor_array[channel_ix],
+ expand_->MuteFactor(channel_ix), 14));
+
+ int16_t* signal = &(*output)[channel_ix][0];
+ size_t length_per_channel = length / output->Channels();
+ // Find largest absolute value in new data.
+ int16_t decoded_max = WebRtcSpl_MaxAbsValueW16(
+ signal, static_cast<int>(length_per_channel));
+ // Adjust muting factor if needed (to BGN level).
+ int energy_length = std::min(static_cast<int>(fs_mult * 64),
+ static_cast<int>(length_per_channel));
+ int scaling = 6 + fs_shift
+ - WebRtcSpl_NormW32(decoded_max * decoded_max);
+ scaling = std::max(scaling, 0); // |scaling| should always be >= 0.
+ int32_t energy = WebRtcSpl_DotProductWithScale(signal, signal,
+ energy_length, scaling);
+ energy = energy / (energy_length >> scaling);
+
+ int mute_factor;
+ if ((energy != 0) &&
+ (energy > background_noise_.Energy(channel_ix))) {
+ // Normalize new frame energy to 15 bits.
+ scaling = WebRtcSpl_NormW32(energy) - 16;
+ // We want background_noise_.energy() / energy in Q14.
+ int32_t bgn_energy =
+ background_noise_.Energy(channel_ix) << (scaling+14);
+ int16_t energy_scaled = energy << scaling;
+ int16_t ratio = WebRtcSpl_DivW32W16(bgn_energy, energy_scaled);
+ mute_factor = WebRtcSpl_SqrtFloor(static_cast<int32_t>(ratio) << 14);
+ } else {
+ mute_factor = 16384; // 1.0 in Q14.
+ }
+ if (mute_factor > external_mute_factor_array[channel_ix]) {
+ external_mute_factor_array[channel_ix] = std::min(mute_factor, 16384);
+ }
+
+ // If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
+ int16_t increment = 64 / fs_mult;
+ for (size_t i = 0; i < length_per_channel; i++) {
+ // Scale with mute factor.
+ assert(channel_ix < output->Channels());
+ assert(i < output->Size());
+ int32_t scaled_signal = (*output)[channel_ix][i] *
+ external_mute_factor_array[channel_ix];
+ // Shift 14 with proper rounding.
+ (*output)[channel_ix][i] = (scaled_signal + 8192) >> 14;
+ // Increase mute_factor towards 16384.
+ external_mute_factor_array[channel_ix] =
+ std::min(external_mute_factor_array[channel_ix] + increment, 16384);
+ }
+
+ // Interpolate the expanded data into the new vector.
+ // (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
+ assert(fs_shift < 3); // Will always be 0, 1, or, 2.
+ increment = 4 >> fs_shift;
+ int fraction = increment;
+ for (size_t i = 0; i < 8 * fs_mult; i++) {
+ // TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8
+ // now for legacy bit-exactness.
+ assert(channel_ix < output->Channels());
+ assert(i < output->Size());
+ (*output)[channel_ix][i] =
+ (fraction * (*output)[channel_ix][i] +
+ (32 - fraction) * expanded[channel_ix][i] + 8) >> 5;
+ fraction += increment;
+ }
+ }
+ } else if (last_mode == kModeRfc3389Cng) {
+ assert(output->Channels() == 1); // Not adapted for multi-channel yet.
+ static const int kCngLength = 32;
+ int16_t cng_output[kCngLength];
+ // Reset mute factor and start up fresh.
+ external_mute_factor_array[0] = 16384;
+ AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+
+ if (cng_decoder) {
+ CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
+ // Generate long enough for 32kHz.
+ if (WebRtcCng_Generate(cng_inst, cng_output, kCngLength, 0) < 0) {
+ // Error returned; set return vector to all zeros.
+ memset(cng_output, 0, sizeof(cng_output));
+ }
+ } else {
+ // If no CNG instance is defined, just copy from the decoded data.
+ // (This will result in interpolating the decoded with itself.)
+ memcpy(cng_output, signal, fs_mult * 8 * sizeof(int16_t));
+ }
+ // Interpolate the CNG into the new vector.
+ // (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
+ assert(fs_shift < 3); // Will always be 0, 1, or, 2.
+ int16_t increment = 4 >> fs_shift;
+ int16_t fraction = increment;
+ for (size_t i = 0; i < 8 * fs_mult; i++) {
+ // TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8 now
+ // for legacy bit-exactness.
+ signal[i] =
+ (fraction * signal[i] + (32 - fraction) * cng_output[i] + 8) >> 5;
+ fraction += increment;
+ }
+ } else if (external_mute_factor_array[0] < 16384) {
+ // Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are
+ // still ramping up from previous muting.
+ // If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
+ int16_t increment = 64 / fs_mult;
+ size_t length_per_channel = length / output->Channels();
+ for (size_t i = 0; i < length_per_channel; i++) {
+ for (size_t channel_ix = 0; channel_ix < output->Channels();
+ ++channel_ix) {
+ // Scale with mute factor.
+ assert(channel_ix < output->Channels());
+ assert(i < output->Size());
+ int32_t scaled_signal = (*output)[channel_ix][i] *
+ external_mute_factor_array[channel_ix];
+ // Shift 14 with proper rounding.
+ (*output)[channel_ix][i] = (scaled_signal + 8192) >> 14;
+ // Increase mute_factor towards 16384.
+ external_mute_factor_array[channel_ix] =
+ std::min(16384, external_mute_factor_array[channel_ix] + increment);
+ }
+ }
+ }
+
+ return static_cast<int>(length);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h
new file mode 100644
index 00000000000..aa24b528af4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
+
+#include <string.h> // Access to size_t.
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BackgroundNoise;
+class DecoderDatabase;
+class Expand;
+
+// This class provides the "Normal" DSP operation, that is performed when
+// there is no data loss, no need to stretch the timing of the signal, and
+// no other "special circumstances" are at hand.
+class Normal {
+ public:
+ Normal(int fs_hz, DecoderDatabase* decoder_database,
+ const BackgroundNoise& background_noise,
+ Expand* expand)
+ : fs_hz_(fs_hz),
+ decoder_database_(decoder_database),
+ background_noise_(background_noise),
+ expand_(expand) {
+ }
+
+ virtual ~Normal() {}
+
+ // Performs the "Normal" operation. The decoder data is supplied in |input|,
+ // having |length| samples in total for all channels (interleaved). The
+ // result is written to |output|. The number of channels allocated in
+ // |output| defines the number of channels that will be used when
+ // de-interleaving |input|. |last_mode| contains the mode used in the previous
+ // GetAudio call (i.e., not the current one), and |external_mute_factor| is
+ // a pointer to the mute factor in the NetEqImpl class.
+ int Process(const int16_t* input, size_t length,
+ Modes last_mode,
+ int16_t* external_mute_factor_array,
+ AudioMultiVector* output);
+
+ private:
+ int fs_hz_;
+ DecoderDatabase* decoder_database_;
+ const BackgroundNoise& background_noise_;
+ Expand* expand_;
+
+ DISALLOW_COPY_AND_ASSIGN(Normal);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
new file mode 100644
index 00000000000..c855865cfaa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for Normal class.
+
+#include "webrtc/modules/audio_coding/neteq/normal.h"
+
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+TEST(Normal, CreateAndDestroy) {
+ MockDecoderDatabase db;
+ int fs = 8000;
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+ SyncBuffer sync_buffer(1, 1000);
+ RandomVector random_vector;
+ Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
+ Normal normal(fs, &db, bgn, &expand);
+ EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
new file mode 100644
index 00000000000..89ddda782cf
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
+
+#include <list>
+
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Struct for holding RTP packets.
+struct Packet {
+ RTPHeader header;
+ uint8_t* payload; // Datagram excluding RTP header and header extension.
+ int payload_length;
+ bool primary; // Primary, i.e., not redundant payload.
+ int waiting_time;
+ bool sync_packet;
+
+ // Constructor.
+ Packet()
+ : payload(NULL),
+ payload_length(0),
+ primary(true),
+ waiting_time(0),
+ sync_packet(false) {
+ }
+
+ // Comparison operators. Establish a packet ordering based on (1) timestamp,
+ // (2) sequence number, (3) regular packet vs sync-packet and (4) redundancy.
+ // Timestamp and sequence numbers are compared taking wrap-around into
+ // account. If both timestamp and sequence numbers are identical and one of
+ // the packets is sync-packet, the regular packet is considered earlier. For
+ // two regular packets with the same sequence number and timestamp a primary
+ // payload is considered "smaller" than a secondary.
+ bool operator==(const Packet& rhs) const {
+ return (this->header.timestamp == rhs.header.timestamp &&
+ this->header.sequenceNumber == rhs.header.sequenceNumber &&
+ this->primary == rhs.primary &&
+ this->sync_packet == rhs.sync_packet);
+ }
+ bool operator!=(const Packet& rhs) const { return !operator==(rhs); }
+ bool operator<(const Packet& rhs) const {
+ if (this->header.timestamp == rhs.header.timestamp) {
+ if (this->header.sequenceNumber == rhs.header.sequenceNumber) {
+ // Timestamp and sequence numbers are identical. A sync packet should
+ // be recognized "larger" (i.e. "later") compared to a "network packet"
+ // (regular packet from network not sync-packet). If none of the packets
+ // are sync-packets, then deem the left hand side to be "smaller"
+ // (i.e., "earlier") if it is primary, and right hand side is not.
+ //
+ // The condition on sync packets to be larger than "network packets,"
+ // given same RTP sequence number and timestamp, guarantees that a
+ // "network packet" to be inserted in an earlier position into
+ // |packet_buffer_| compared to a sync packet of same timestamp and
+ // sequence number.
+ if (rhs.sync_packet)
+ return true;
+ if (this->sync_packet)
+ return false;
+ return (this->primary && !rhs.primary);
+ }
+ return (static_cast<uint16_t>(rhs.header.sequenceNumber
+ - this->header.sequenceNumber) < 0xFFFF / 2);
+ }
+ return (static_cast<uint32_t>(rhs.header.timestamp
+ - this->header.timestamp) < 0xFFFFFFFF / 2);
+ }
+ bool operator>(const Packet& rhs) const { return rhs.operator<(*this); }
+ bool operator<=(const Packet& rhs) const { return !operator>(rhs); }
+ bool operator>=(const Packet& rhs) const { return !operator<(rhs); }
+};
+
+// A list of packets.
+typedef std::list<Packet*> PacketList;
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c
deleted file mode 100644
index a542333cf87..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c
+++ /dev/null
@@ -1,851 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the actual packet buffer data structure.
- */
-
-#include <assert.h>
-#include "packet_buffer.h"
-
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "mcu_dsp_common.h"
-
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-/* special code for offline delay logging */
-#include "delay_logging.h"
-#include <stdio.h>
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-extern uint32_t tot_received_packets;
-#endif /* NETEQ_DELAY_LOGGING */
-
-
-int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- int16_t *pw16_memory, int memorySize)
-{
- int i;
- int pos = 0;
-
- /* Sanity check */
- if ((memorySize < PBUFFER_MIN_MEMORY_SIZE) || (pw16_memory == NULL)
- || (maxNoOfPackets < 2) || (maxNoOfPackets > 600))
- {
- /* Invalid parameters */
- return (PBUFFER_INIT_ERROR);
- }
-
- /* Clear the buffer instance */
- WebRtcSpl_MemSetW16((int16_t*) bufferInst, 0,
- sizeof(PacketBuf_t) / sizeof(int16_t));
-
- /* Clear the buffer memory */
- WebRtcSpl_MemSetW16((int16_t*) pw16_memory, 0, memorySize);
-
- /* Set maximum number of packets */
- bufferInst->maxInsertPositions = maxNoOfPackets;
-
- /* Initialize array pointers */
- /* After each pointer has been set, the index pos is advanced to point immediately
- * after the the recently allocated vector. Note that one step for the pos index
- * corresponds to a int16_t.
- */
-
- bufferInst->timeStamp = (uint32_t*) &pw16_memory[pos];
- pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * uint32_t */
-
- bufferInst->payloadLocation = (int16_t**) &pw16_memory[pos];
- pos += maxNoOfPackets * (sizeof(int16_t*) / sizeof(int16_t)); /* advance */
-
- bufferInst->seqNumber = (uint16_t*) &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * uint16_t */
-
- bufferInst->payloadType = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->payloadLengthBytes = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->rcuPlCntr = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->waitingTime = (int*) (&pw16_memory[pos]);
- /* Advance maxNoOfPackets * sizeof(waitingTime element). */
- pos += maxNoOfPackets *
- sizeof(*bufferInst->waitingTime) / sizeof(*pw16_memory);
-
- /* The payload memory starts after the slot arrays */
- bufferInst->startPayloadMemory = &pw16_memory[pos];
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->memorySizeW16 = (memorySize - pos); /* Remaining memory */
-
- /* Initialize each payload slot as empty with infinite delay */
- for (i = 0; i < bufferInst->maxInsertPositions; i++)
- {
- bufferInst->payloadType[i] = -1;
- }
-
- /* Reset buffer parameters */
- bufferInst->numPacketsInBuffer = 0;
- bufferInst->packSizeSamples = 0;
- bufferInst->insertPosition = 0;
-
- /* Reset buffer statistics */
- bufferInst->discardedPackets = 0;
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst)
-{
- int i;
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* Packet buffer has not been initialized */
- /* Don't do the flushing operation, since we do not
- know the state of the struct variables */
- return (0);
- }
-
- /* Set all payload lengths to zero */
- WebRtcSpl_MemSetW16(bufferInst->payloadLengthBytes, 0, bufferInst->maxInsertPositions);
-
- /* Reset buffer variables */
- bufferInst->numPacketsInBuffer = 0;
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->insertPosition = 0;
-
- /* Clear all slots, starting with the last one */
- for (i = (bufferInst->maxInsertPositions - 1); i >= 0; i--)
- {
- bufferInst->payloadType[i] = -1;
- bufferInst->timeStamp[i] = 0;
- bufferInst->seqNumber[i] = 0;
- }
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- int16_t *flushed, int av_sync)
-{
- int nextPos;
- int i;
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- int temp_var;
-#endif /* NETEQ_DELAY_LOGGING */
-
- /* Initialize to "no flush" */
- *flushed = 0;
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* packet buffer has not been initialized */
- return (-1);
- }
-
- /* Sanity check for payload length
- (payloadLen in bytes and memory size in int16_t) */
- if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
- <= 0))
- {
- /* faulty or too long payload length */
- return (-1);
- }
-
- /* If we are in AV-sync mode, there is a risk that we have inserted a sync
- * packet but now received the real version of it. Or because of some timing
- * we might be overwriting a true payload with sync (I'm not sure why this
- * should happen in regular case, but in some FEC enabled case happens).
- * Go through packets and delete the sync version of the packet in hand. Or
- * if this is sync packet and the regular version of it exists in the buffer
- * refrain from inserting.
- *
- * TODO(turajs): Could we get this for free if we had set the RCU-counter of
- * the sync packet to a number larger than 2?
- */
- if (av_sync) {
- for (i = 0; i < bufferInst->maxInsertPositions; ++i) {
- /* Check if sequence numbers match and the payload actually exists. */
- if (bufferInst->seqNumber[i] == RTPpacket->seqNumber &&
- bufferInst->payloadLengthBytes[i] > 0) {
- if (WebRtcNetEQ_IsSyncPayload(RTPpacket->payload,
- RTPpacket->payloadLen)) {
- return 0;
- }
-
- if (WebRtcNetEQ_IsSyncPayload(bufferInst->payloadLocation[i],
- bufferInst->payloadLengthBytes[i])) {
- /* Clear the position in the buffer. */
- bufferInst->payloadType[i] = -1;
- bufferInst->payloadLengthBytes[i] = 0;
-
- /* Reduce packet counter by one. */
- bufferInst->numPacketsInBuffer--;
- /* TODO(turajs) if this is the latest packet better we rewind
- * insertPosition and related variables. */
- break; /* There should be only one match. */
- }
- }
- }
- }
-
- /* Find a position in the buffer for this packet */
- if (bufferInst->numPacketsInBuffer != 0)
- {
- /* Get the next slot */
- bufferInst->insertPosition++;
- if (bufferInst->insertPosition >= bufferInst->maxInsertPositions)
- {
- /* "Wrap around" and start from the beginning */
- bufferInst->insertPosition = 0;
- }
-
- /* Check if there is enough space for the new packet */
- if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
- >= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
- {
- int16_t *tempMemAddress;
-
- /*
- * Payload does not fit at the end of the memory, put it in the beginning
- * instead
- */
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
-
- /*
- * Now, we must search for the next non-empty payload,
- * finding the one with the lowest start address for the payload
- */
- tempMemAddress = &bufferInst->startPayloadMemory[bufferInst->memorySizeW16];
- nextPos = -1;
-
- /* Loop through all slots again */
- for (i = 0; i < bufferInst->maxInsertPositions; i++)
- {
- /* Look for the non-empty slot with the lowest
- payload location address */
- if (bufferInst->payloadLengthBytes[i] != 0 && bufferInst->payloadLocation[i]
- < tempMemAddress)
- {
- tempMemAddress = bufferInst->payloadLocation[i];
- nextPos = i;
- }
- }
-
- /* Check that we did find a previous payload */
- if (nextPos == -1)
- {
- /* The buffer is corrupt => flush and return error */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- return (-1);
- }
- }
- else
- {
- /* Payload fits at the end of memory. */
-
- /* Find the next non-empty slot. */
- nextPos = bufferInst->insertPosition + 1;
-
- /* Increase nextPos until a non-empty slot is found or end of array is encountered*/
- while ((bufferInst->payloadLengthBytes[nextPos] == 0) && (nextPos
- < bufferInst->maxInsertPositions))
- {
- nextPos++;
- }
-
- if (nextPos == bufferInst->maxInsertPositions)
- {
- /*
- * Reached the end of the array, so there must be a packet in the first
- * position instead
- */
- nextPos = 0;
-
- /* Increase nextPos until a non-empty slot is found */
- while (bufferInst->payloadLengthBytes[nextPos] == 0)
- {
- nextPos++;
- }
- }
- } /* end if-else */
-
- /*
- * Check if the new payload will extend into a payload later in memory.
- * If so, the buffer is full.
- */
- if ((bufferInst->currentMemoryPos <= bufferInst->payloadLocation[nextPos])
- && ((&bufferInst->currentMemoryPos[(RTPpacket->payloadLen + 1) >> 1])
- > bufferInst->payloadLocation[nextPos]))
- {
- /* Buffer is full, so the buffer must be flushed */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- }
-
- if (bufferInst->payloadLengthBytes[bufferInst->insertPosition] != 0)
- {
- /* All positions are already taken and entire buffer should be flushed */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- }
-
- }
- else
- {
- /* Buffer is empty, just insert the packet at the beginning */
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->insertPosition = 0;
- }
-
- /* Insert packet in the found position */
- if (RTPpacket->starts_byte1 == 0)
- {
- /* Payload is 16-bit aligned => just copy it */
-
- WEBRTC_SPL_MEMCPY_W8(bufferInst->currentMemoryPos,
- RTPpacket->payload, RTPpacket->payloadLen);
- }
- else
- {
- /* Payload is not 16-bit aligned => align it during copy operation */
- for (i = 0; i < RTPpacket->payloadLen; i++)
- {
- /* copy the (i+1)-th byte to the i-th byte */
-
- WEBRTC_SPL_SET_BYTE(bufferInst->currentMemoryPos,
- (WEBRTC_SPL_GET_BYTE(RTPpacket->payload, (i + 1))), i);
- }
- }
-
- /* Copy the packet information */
- bufferInst->payloadLocation[bufferInst->insertPosition] = bufferInst->currentMemoryPos;
- bufferInst->payloadLengthBytes[bufferInst->insertPosition] = RTPpacket->payloadLen;
- bufferInst->payloadType[bufferInst->insertPosition] = RTPpacket->payloadType;
- bufferInst->seqNumber[bufferInst->insertPosition] = RTPpacket->seqNumber;
- bufferInst->timeStamp[bufferInst->insertPosition] = RTPpacket->timeStamp;
- bufferInst->rcuPlCntr[bufferInst->insertPosition] = RTPpacket->rcuPlCntr;
- bufferInst->waitingTime[bufferInst->insertPosition] = 0;
- /* Update buffer parameters */
- bufferInst->numPacketsInBuffer++;
- bufferInst->currentMemoryPos += (RTPpacket->payloadLen + 1) >> 1;
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- if (*flushed)
- {
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_FLUSH;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- }
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->timeStamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->seqNumber, sizeof(uint16_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->payloadType, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->payloadLen, sizeof(int16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
- tot_received_packets++;
-#endif /* NETEQ_DELAY_LOGGING */
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
- int bufferPosition, int *waitingTime)
-{
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* packet buffer has not been initialized */
- return (PBUFFER_NOT_INITIALIZED);
- }
-
- if (bufferPosition < 0 || bufferPosition >= bufferInst->maxInsertPositions)
- {
- /* buffer position is outside valid range */
- return (NETEQ_OTHER_ERROR);
- }
-
- /* Check that there is a valid payload in the specified position */
- if (bufferInst->payloadLengthBytes[bufferPosition] <= 0)
- {
- /* The position does not contain a valid payload */
- RTPpacket->payloadLen = 0; /* Set zero length */
- return (PBUFFER_NONEXISTING_PACKET); /* Return error */
- }
-
- /* Payload exists => extract payload data */
-
- /* Copy the actual data payload to RTP packet struct */
-
- WEBRTC_SPL_MEMCPY_W16((int16_t*) RTPpacket->payload,
- bufferInst->payloadLocation[bufferPosition],
- (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in int16_t*/
-
- /* Copy payload parameters */
- RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
- RTPpacket->payloadType = bufferInst->payloadType[bufferPosition];
- RTPpacket->seqNumber = bufferInst->seqNumber[bufferPosition];
- RTPpacket->timeStamp = bufferInst->timeStamp[bufferPosition];
- RTPpacket->rcuPlCntr = bufferInst->rcuPlCntr[bufferPosition];
- *waitingTime = bufferInst->waitingTime[bufferPosition];
- RTPpacket->starts_byte1 = 0; /* payload is 16-bit aligned */
-
- /* Clear the position in the packet buffer */
- bufferInst->payloadType[bufferPosition] = -1;
- bufferInst->payloadLengthBytes[bufferPosition] = 0;
- bufferInst->seqNumber[bufferPosition] = 0;
- bufferInst->timeStamp[bufferPosition] = 0;
- bufferInst->waitingTime[bufferPosition] = 0;
- bufferInst->payloadLocation[bufferPosition] = bufferInst->startPayloadMemory;
-
- /* Reduce packet counter with one */
- bufferInst->numPacketsInBuffer--;
-
- return (0);
-}
-
-int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
- uint32_t current_time_stamp,
- uint32_t* time_stamp,
- int* buffer_position,
- int erase_old_packets,
- int16_t* payload_type) {
- int32_t time_stamp_diff = WEBRTC_SPL_WORD32_MAX; /* Smallest diff found. */
- int32_t new_diff;
- int i;
- int16_t rcu_payload_cntr;
- if (buffer_inst->startPayloadMemory == NULL) {
- /* Packet buffer has not been initialized. */
- return PBUFFER_NOT_INITIALIZED;
- }
-
- /* Initialize all return values. */
- *time_stamp = 0;
- *payload_type = -1; /* Indicates that no packet was found. */
- *buffer_position = -1; /* Indicates that no packet was found. */
- rcu_payload_cntr = WEBRTC_SPL_WORD16_MAX; /* Indicates no packet found. */
-
- /* Check if buffer is empty. */
- if (buffer_inst->numPacketsInBuffer <= 0) {
- return 0;
- }
-
- /* Loop through all slots in buffer. */
- if (erase_old_packets) { /* If old payloads should be discarded. */
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Calculate difference between this slot and current_time_stamp. */
- new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
-
- /* Check if payload should be discarded. */
- if ((new_diff < 0) /* Payload is too old */
- && (new_diff > -30000) /* Account for TS wrap-around. */
- && (buffer_inst->payloadLengthBytes[i] > 0)) { /* Payload exists. */
- /* Throw away old packet. */
-
- /* Clear the position in the buffer. */
- buffer_inst->payloadType[i] = -1;
- buffer_inst->payloadLengthBytes[i] = 0;
-
- /* Reduce packet counter by one. */
- buffer_inst->numPacketsInBuffer--;
- /* Increase discard counter for in-call statistics. */
- buffer_inst->discardedPackets++;
- } else if (((new_diff < time_stamp_diff)
- || ((new_diff == time_stamp_diff)
- && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
- && (buffer_inst->payloadLengthBytes[i] > 0)) {
- /* New diff is smaller than previous diffs or we have a candidate with a
- * time stamp as previous candidate but better RCU-counter;
- * and the payload exists.
- */
- /* Save this position as the best candidate. */
- *buffer_position = i;
- time_stamp_diff = new_diff;
- *payload_type = buffer_inst->payloadType[i];
- rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
- }
- }
- } else {
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Calculate difference between this slot and current_time_stamp. */
- new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
-
- /* Check if this is the oldest packet. */
- if (((new_diff < time_stamp_diff)
- || ((new_diff == time_stamp_diff)
- && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
- && (buffer_inst->payloadLengthBytes[i] > 0)) {
- /* New diff is smaller than previous diffs or we have a candidate with a
- * time_stamp as previous candidate but better RCU-counter;
- * and the payload exists.
- */
- /* Save this position as the best candidate. */
- *buffer_position = i;
- time_stamp_diff = new_diff;
- *payload_type = buffer_inst->payloadType[i];
- rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
- }
- }
- }
-
- /* Check that we did find a real position. */
- if (*buffer_position >= 0) {
- /* Get the time_stamp for the best position. */
- *time_stamp = buffer_inst->timeStamp[*buffer_position];
- }
-
- return 0;
-}
-
-int WebRtcNetEQ_PacketBufferGetPacketSize(const PacketBuf_t* buffer_inst,
- int buffer_pos,
- const CodecDbInst_t* codec_database,
- int codec_pos, int last_duration,
- int av_sync) {
- if (codec_database->funcDurationEst[codec_pos] == NULL) {
- return last_duration;
- }
-
- if (av_sync != 0 &&
- WebRtcNetEQ_IsSyncPayload(buffer_inst->payloadLocation[buffer_pos],
- buffer_inst->payloadLengthBytes[buffer_pos])) {
- // In AV-sync and sync payload, report |last_duration| as current duration.
- return last_duration;
- }
-
- return (*codec_database->funcDurationEst[codec_pos])(
- codec_database->codec_state[codec_pos],
- (const uint8_t *)buffer_inst->payloadLocation[buffer_pos],
- buffer_inst->payloadLengthBytes[buffer_pos]);
-}
-
-int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t* codec_database,
- int av_sync) {
- int i, count;
- int last_duration;
- int last_codec_pos;
- int last_payload_type;
- int32_t size_samples;
-
- count = 0;
- last_duration = buffer_inst->packSizeSamples;
- last_codec_pos = -1;
- last_payload_type = -1;
- size_samples = 0;
-
- /* Loop through all slots in the buffer */
- for (i = 0; i < buffer_inst->maxInsertPositions; i++) {
- /* Only count the packets with non-zero size */
- if (buffer_inst->payloadLengthBytes[i] != 0) {
- int payload_type;
- int codec_pos;
- /* Figure out the codec database entry for this payload_type. */
- payload_type = buffer_inst->payloadType[i];
- /* Remember the last one, to avoid the database search. */
- if(payload_type == last_payload_type) {
- codec_pos = last_codec_pos;
- }
- else {
- codec_pos = WebRtcNetEQ_DbGetCodec(codec_database,
- payload_type);
- if (codec_pos >= 0) {
- codec_pos = codec_database->position[codec_pos];
- }
- }
- last_codec_pos = codec_pos;
- last_payload_type = payload_type;
- if (codec_pos >= 0) {
- /*
- * Right now WebRtcNetEQ_PacketBufferGetPacketSize either always
- * returns last_duration or always computes the real duration without
- * looking at last_duration. If an implementation really wanted to use
- * last_duration to compute a changing duration, we would have to
- * iterate through the packets in chronological order by timestamp.
- */
- /* Check for error before setting. */
- int temp_last_duration = WebRtcNetEQ_PacketBufferGetPacketSize(
- buffer_inst, i, codec_database, codec_pos,
- last_duration, av_sync);
- if (temp_last_duration >= 0)
- last_duration = temp_last_duration;
- }
- /* Add in the size of this packet. */
- size_samples += last_duration;
- count++;
- }
- }
-
- /* Sanity check; size cannot be negative */
- if (size_samples < 0) {
- size_samples = 0;
- }
- return size_samples;
-}
-
-void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst) {
- int i;
- /* Loop through all slots in the buffer. */
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Only increment waiting time for the packets with non-zero size. */
- if (buffer_inst->payloadLengthBytes[i] != 0) {
- buffer_inst->waitingTime[i]++;
- }
- }
-}
-
-int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
- int noOfCodecs, int *maxBytes,
- int *maxSlots,
- int* per_slot_overhead_bytes)
-{
- int i;
- int ok = 0;
- int16_t w16_tmp;
- int16_t codecBytes;
- int16_t codecBuffers;
-
- /* Initialize return variables to zero */
- *maxBytes = 0;
- *maxSlots = 0;
-
- /* Loop through all codecs supplied to function */
- for (i = 0; i < noOfCodecs; i++)
- {
- /* Find current codec and set parameters accordingly */
-
- if ((codecID[i] == kDecoderPCMu) || (codecID[i] == kDecoderPCMu_2ch))
- {
- codecBytes = 1680; /* Up to 210ms @ 64kbps */
- codecBuffers = 30; /* Down to 5ms frames */
- }
- else if ((codecID[i] == kDecoderPCMa) ||
- (codecID[i] == kDecoderPCMa_2ch))
- {
- codecBytes = 1680; /* Up to 210ms @ 64kbps */
- codecBuffers = 30; /* Down to 5ms frames */
- }
- else if (codecID[i] == kDecoderILBC)
- {
- codecBytes = 380; /* 200ms @ 15.2kbps (20ms frames) */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderISAC)
- {
- codecBytes = 960; /* 240ms @ 32kbps (60ms frames) */
- codecBuffers = 8;
- }
- else if ((codecID[i] == kDecoderISACswb) ||
- (codecID[i] == kDecoderISACfb))
- {
- codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
- codecBuffers = 8;
- }
- else if (codecID[i] == kDecoderOpus)
- {
- codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
- codecBuffers = 30; /* Replicating the value for PCMu/a */
- }
- else if ((codecID[i] == kDecoderPCM16B) ||
- (codecID[i] == kDecoderPCM16B_2ch))
- {
- codecBytes = 3360; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderPCM16Bwb) ||
- (codecID[i] == kDecoderPCM16Bwb_2ch))
- {
- codecBytes = 6720; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderPCM16Bswb32kHz) ||
- (codecID[i] == kDecoderPCM16Bswb32kHz_2ch))
- {
- codecBytes = 13440; /* 210ms */
- codecBuffers = 15;
- }
- else if (codecID[i] == kDecoderPCM16Bswb48kHz)
- {
- codecBytes = 20160; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderG722) ||
- (codecID[i] == kDecoderG722_2ch))
- {
- codecBytes = 1680; /* 210ms @ 64kbps */
- codecBuffers = 15;
- }
- else if (codecID[i] == kDecoderRED)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderAVT)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderCNG)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderG729)
- {
- codecBytes = 210; /* 210ms @ 8kbps */
- codecBuffers = 20; /* max 200ms supported for 10ms frames */
- }
- else if (codecID[i] == kDecoderG729_1)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10; /* max 200ms supported for 20ms frames */
- }
- else if (codecID[i] == kDecoderG726_16)
- {
- codecBytes = 400; /* 200ms @ 16kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_24)
- {
- codecBytes = 600; /* 200ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_32)
- {
- codecBytes = 800; /* 200ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_40)
- {
- codecBytes = 1000; /* 200ms @ 40kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_16)
- {
- codecBytes = 420; /* 210ms @ 16kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_24)
- {
- codecBytes = 630; /* 210ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_32)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_24)
- {
- codecBytes = 630; /* 210ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_32)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_48)
- {
- codecBytes = 1260; /* 210ms @ 48kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderSPEEX_8)
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderSPEEX_16)
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if ((codecID[i] == kDecoderCELT_32) ||
- (codecID[i] == kDecoderCELT_32_2ch))
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderGSMFR)
- {
- codecBytes = 340; /* 200ms */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderAMR)
- {
- codecBytes = 384; /* 240ms @ 12.2kbps+headers (60ms frames) */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderAMRWB)
- {
- codecBytes = 744;
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderArbitrary)
- {
- codecBytes = 6720; /* Assume worst case uncompressed WB 210ms */
- codecBuffers = 15;
- }
- else
- {
- /*Unknow codec */
- codecBytes = 0;
- codecBuffers = 0;
- ok = CODEC_DB_UNKNOWN_CODEC;
- }
-
- /* Update max variables */
- *maxBytes = WEBRTC_SPL_MAX((*maxBytes), codecBytes);
- *maxSlots = WEBRTC_SPL_MAX((*maxSlots), codecBuffers);
-
- } /* end of for loop */
-
- /*
- * Add size needed by the additional pointers for each slot inside struct,
- * as indicated on each line below.
- */
- w16_tmp = (sizeof(uint32_t) /* timeStamp */
- + sizeof(int16_t*) /* payloadLocation */
- + sizeof(uint16_t) /* seqNumber */
- + sizeof(int16_t) /* payloadType */
- + sizeof(int16_t) /* payloadLengthBytes */
- + sizeof(int16_t) /* rcuPlCntr */
- + sizeof(int)); /* waitingTime */
- /* Add the extra size per slot to the memory count */
- *maxBytes += w16_tmp * (*maxSlots);
-
- *per_slot_overhead_bytes = w16_tmp;
- return ok;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
new file mode 100644
index 00000000000..8a81c2598bc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This is the implementation of the PacketBuffer class. It is mostly based on
+// an STL list. The list is kept sorted at all times so that the next packet to
+// decode is at the beginning of the list.
+
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+
+#include <algorithm> // find_if()
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+
+namespace webrtc {
+
+// Predicate used when inserting packets in the buffer list.
+// Operator() returns true when |packet| goes before |new_packet|.
+class NewTimestampIsLarger {
+ public:
+ explicit NewTimestampIsLarger(const Packet* new_packet)
+ : new_packet_(new_packet) {
+ }
+ bool operator()(Packet* packet) {
+ return (*new_packet_ >= *packet);
+ }
+
+ private:
+ const Packet* new_packet_;
+};
+
+PacketBuffer::PacketBuffer(size_t max_number_of_packets)
+ : max_number_of_packets_(max_number_of_packets) {}
+
+// Destructor. All packets in the buffer will be destroyed.
+PacketBuffer::~PacketBuffer() {
+ Flush();
+}
+
+// Flush the buffer. All packets in the buffer will be destroyed.
+void PacketBuffer::Flush() {
+ DeleteAllPackets(&buffer_);
+}
+
+int PacketBuffer::InsertPacket(Packet* packet) {
+ if (!packet || !packet->payload) {
+ if (packet) {
+ delete packet;
+ }
+ return kInvalidPacket;
+ }
+
+ int return_val = kOK;
+
+ if (buffer_.size() >= max_number_of_packets_) {
+ // Buffer is full. Flush it.
+ Flush();
+ return_val = kFlushed;
+ }
+
+ // Get an iterator pointing to the place in the buffer where the new packet
+ // should be inserted. The list is searched from the back, since the most
+ // likely case is that the new packet should be near the end of the list.
+ PacketList::reverse_iterator rit = std::find_if(
+ buffer_.rbegin(), buffer_.rend(),
+ NewTimestampIsLarger(packet));
+ buffer_.insert(rit.base(), packet); // Insert the packet at that position.
+
+ return return_val;
+}
+
+int PacketBuffer::InsertPacketList(PacketList* packet_list,
+ const DecoderDatabase& decoder_database,
+ uint8_t* current_rtp_payload_type,
+ uint8_t* current_cng_rtp_payload_type) {
+ bool flushed = false;
+ while (!packet_list->empty()) {
+ Packet* packet = packet_list->front();
+ if (decoder_database.IsComfortNoise(packet->header.payloadType)) {
+ if (*current_cng_rtp_payload_type != 0xFF &&
+ *current_cng_rtp_payload_type != packet->header.payloadType) {
+ // New CNG payload type implies new codec type.
+ *current_rtp_payload_type = 0xFF;
+ Flush();
+ flushed = true;
+ }
+ *current_cng_rtp_payload_type = packet->header.payloadType;
+ } else if (!decoder_database.IsDtmf(packet->header.payloadType)) {
+ // This must be speech.
+ if (*current_rtp_payload_type != 0xFF &&
+ *current_rtp_payload_type != packet->header.payloadType) {
+ *current_cng_rtp_payload_type = 0xFF;
+ Flush();
+ flushed = true;
+ }
+ *current_rtp_payload_type = packet->header.payloadType;
+ }
+ int return_val = InsertPacket(packet);
+ packet_list->pop_front();
+ if (return_val == kFlushed) {
+ // The buffer flushed, but this is not an error. We can still continue.
+ flushed = true;
+ } else if (return_val != kOK) {
+ // An error occurred. Delete remaining packets in list and return.
+ DeleteAllPackets(packet_list);
+ return return_val;
+ }
+ }
+ return flushed ? kFlushed : kOK;
+}
+
+int PacketBuffer::NextTimestamp(uint32_t* next_timestamp) const {
+ if (Empty()) {
+ return kBufferEmpty;
+ }
+ if (!next_timestamp) {
+ return kInvalidPointer;
+ }
+ *next_timestamp = buffer_.front()->header.timestamp;
+ return kOK;
+}
+
+int PacketBuffer::NextHigherTimestamp(uint32_t timestamp,
+ uint32_t* next_timestamp) const {
+ if (Empty()) {
+ return kBufferEmpty;
+ }
+ if (!next_timestamp) {
+ return kInvalidPointer;
+ }
+ PacketList::const_iterator it;
+ for (it = buffer_.begin(); it != buffer_.end(); ++it) {
+ if ((*it)->header.timestamp >= timestamp) {
+ // Found a packet matching the search.
+ *next_timestamp = (*it)->header.timestamp;
+ return kOK;
+ }
+ }
+ return kNotFound;
+}
+
+const RTPHeader* PacketBuffer::NextRtpHeader() const {
+ if (Empty()) {
+ return NULL;
+ }
+ return const_cast<const RTPHeader*>(&(buffer_.front()->header));
+}
+
+Packet* PacketBuffer::GetNextPacket(int* discard_count) {
+ if (Empty()) {
+ // Buffer is empty.
+ return NULL;
+ }
+
+ Packet* packet = buffer_.front();
+ // Assert that the packet sanity checks in InsertPacket method works.
+ assert(packet && packet->payload);
+ buffer_.pop_front();
+ // Discard other packets with the same timestamp. These are duplicates or
+ // redundant payloads that should not be used.
+ if (discard_count) {
+ *discard_count = 0;
+ }
+ while (!Empty() &&
+ buffer_.front()->header.timestamp == packet->header.timestamp) {
+ if (DiscardNextPacket() != kOK) {
+ assert(false); // Must be ok by design.
+ }
+ if (discard_count) {
+ ++(*discard_count);
+ }
+ }
+ return packet;
+}
+
+int PacketBuffer::DiscardNextPacket() {
+ if (Empty()) {
+ return kBufferEmpty;
+ }
+ // Assert that the packet sanity checks in InsertPacket method works.
+ assert(buffer_.front());
+ assert(buffer_.front()->payload);
+ DeleteFirstPacket(&buffer_);
+ return kOK;
+}
+
+int PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit) {
+ while (!Empty() &&
+ timestamp_limit != buffer_.front()->header.timestamp &&
+ static_cast<uint32_t>(timestamp_limit
+ - buffer_.front()->header.timestamp) <
+ 0xFFFFFFFF / 2) {
+ if (DiscardNextPacket() != kOK) {
+ assert(false); // Must be ok by design.
+ }
+ }
+ return 0;
+}
+
+int PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
+ int last_decoded_length) const {
+ PacketList::const_iterator it;
+ int num_samples = 0;
+ int last_duration = last_decoded_length;
+ for (it = buffer_.begin(); it != buffer_.end(); ++it) {
+ Packet* packet = (*it);
+ AudioDecoder* decoder =
+ decoder_database->GetDecoder(packet->header.payloadType);
+ if (decoder) {
+ int duration;
+ if (packet->sync_packet) {
+ duration = last_duration;
+ } else if (packet->primary) {
+ duration =
+ decoder->PacketDuration(packet->payload, packet->payload_length);
+ } else {
+ continue;
+ }
+ if (duration >= 0) {
+ last_duration = duration; // Save the most up-to-date (valid) duration.
+ }
+ }
+ num_samples += last_duration;
+ }
+ return num_samples;
+}
+
+void PacketBuffer::IncrementWaitingTimes(int inc) {
+ PacketList::iterator it;
+ for (it = buffer_.begin(); it != buffer_.end(); ++it) {
+ (*it)->waiting_time += inc;
+ }
+}
+
+bool PacketBuffer::DeleteFirstPacket(PacketList* packet_list) {
+ if (packet_list->empty()) {
+ return false;
+ }
+ Packet* first_packet = packet_list->front();
+ delete [] first_packet->payload;
+ delete first_packet;
+ packet_list->pop_front();
+ return true;
+}
+
+void PacketBuffer::DeleteAllPackets(PacketList* packet_list) {
+ while (DeleteFirstPacket(packet_list)) {
+ // Continue while the list is not empty.
+ }
+}
+
+void PacketBuffer::BufferStat(int* num_packets, int* max_num_packets) const {
+ *num_packets = static_cast<int>(buffer_.size());
+ *max_num_packets = static_cast<int>(max_number_of_packets_);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
index 61ff2b970fd..76c4ddd161d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,247 +8,128 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * Interface for the actual packet buffer data structure.
- */
-
-#ifndef PACKET_BUFFER_H
-#define PACKET_BUFFER_H
-
-#include "typedefs.h"
-
-#include "webrtc_neteq.h"
-#include "rtp.h"
-
-/* Define minimum allowed buffer memory, in 16-bit words */
-#define PBUFFER_MIN_MEMORY_SIZE 150
-
-/****************************/
-/* The packet buffer struct */
-/****************************/
-
-typedef struct
-{
-
- /* Variables common to the entire buffer */
- uint16_t packSizeSamples; /* packet size in samples of last decoded packet */
- int16_t *startPayloadMemory; /* pointer to the payload memory */
- int memorySizeW16; /* the size (in int16_t) of the payload memory */
- int16_t *currentMemoryPos; /* The memory position to insert next payload */
- int numPacketsInBuffer; /* The number of packets in the buffer */
- int insertPosition; /* The position to insert next packet */
- int maxInsertPositions; /* Maximum number of packets allowed */
-
- /* Arrays with one entry per packet slot */
- /* NOTE: If these are changed, the changes must be accounted for at the end of
- the function WebRtcNetEQ_GetDefaultCodecSettings(). */
- uint32_t *timeStamp; /* Timestamp in slot n */
- int16_t **payloadLocation; /* Memory location of payload in slot n */
- uint16_t *seqNumber; /* Sequence number in slot n */
- int16_t *payloadType; /* Payload type of packet in slot n */
- int16_t *payloadLengthBytes; /* Payload length of packet in slot n */
- int16_t *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
- 2 for redundant payload */
- int *waitingTime;
-
- /* Statistics counter */
- uint16_t discardedPackets; /* Number of discarded packets */
-
-} PacketBuf_t;
-
-/*************************/
-/* Function declarations */
-/*************************/
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferInit(...)
- *
- * This function initializes the packet buffer.
- *
- * Input:
- * - bufferInst : Buffer instance to be initialized
- * - noOfPackets : Maximum number of packets that buffer should hold
- * - memory : Pointer to the storage memory for the payloads
- * - memorySize : The size of the payload memory (in int16_t)
- *
- * Output:
- * - bufferInst : Updated buffer instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- int16_t *pw16_memory, int memorySize);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferFlush(...)
- *
- * This function flushes all the packets in the buffer.
- *
- * Input:
- * - bufferInst : Buffer instance to be flushed
- *
- * Output:
- * - bufferInst : Flushed buffer instance
- *
- * Return value : 0 - Ok
- */
-
-int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferInsert(...)
- *
- * This function inserts an RTP packet into the packet buffer.
- *
- * Input:
- * - bufferInst : Buffer instance
- * - RTPpacket : An RTP packet struct (with payload, sequence
- * number, etc.)
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Output:
- * - bufferInst : Updated buffer instance
- * - flushed : 1 if buffer was flushed, 0 otherwise
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- int16_t *flushed, int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferExtract(...)
- *
- * This function extracts a payload from the buffer.
- *
- * Input:
- * - bufferInst : Buffer instance
- * - bufferPosition: Position of the packet that should be extracted
- *
- * Output:
- * - RTPpacket : An RTP packet struct (with payload, sequence
- * number, etc)
- * - bufferInst : Updated buffer instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
- int bufferPosition, int *waitingTime);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferFindLowestTimestamp(...)
- *
- * This function finds the next packet with the lowest timestamp.
- *
- * Input:
- * - buffer_inst : Buffer instance.
- * - current_time_stamp : The timestamp to compare packet timestamps with.
- * - erase_old_packets : If non-zero, erase packets older than currentTS.
- *
- * Output:
- * - time_stamp : Lowest timestamp that was found.
- * - buffer_position : Position of this packet (-1 if there are no
- * packets in the buffer).
- * - payload_type : Payload type of the found payload.
- *
- * Return value : 0 - Ok;
- * < 0 - Error.
- */
-
-int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
- uint32_t current_time_stamp,
- uint32_t* time_stamp,
- int* buffer_position,
- int erase_old_packets,
- int16_t* payload_type);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferGetPacketSize(...)
- *
- * Calculate and return an estimate of the data length (in samples) of the
- * given packet. If no estimate is available (because we do not know how to
- * compute packet durations for the associated payload type), last_duration
- * will be returned instead.
- *
- * Input:
- * - buffer_inst : Buffer instance
- * - buffer_pos : The index of the buffer of which to estimate the
- * duration
- * - codec_database : Codec database instance
- * - codec_pos : The codec database entry associated with the payload
- * type of the specified buffer.
- * - last_duration : The duration of the previous frame.
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Return value : The buffer size in samples
- */
-
-int WebRtcNetEQ_PacketBufferGetPacketSize(const PacketBuf_t* buffer_inst,
- int buffer_pos,
- const CodecDbInst_t* codec_database,
- int codec_pos, int last_duration,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferGetSize(...)
- *
- * Calculate and return an estimate of the total data length (in samples)
- * currently in the buffer. The estimate is calculated as the number of
- * packets currently in the buffer (which does not have any remaining waiting
- * time), multiplied with the number of samples obtained from the last
- * decoded packet.
- *
- * Input:
- * - buffer_inst : Buffer instance
- * - codec_database : Codec database instance
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Return value : The buffer size in samples
- */
-
-int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t* codec_database,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_IncrementWaitingTimes(...)
- *
- * Increment the waiting time for all packets in the buffer by one.
- *
- * Input:
- * - bufferInst : Buffer instance
- *
- * Return value : n/a
- */
-
-void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst);
-
-/****************************************************************************
- * WebRtcNetEQ_GetDefaultCodecSettings(...)
- *
- * Calculates a recommended buffer size for a specific set of codecs.
- *
- * Input:
- * - codecID : An array of codec types that will be used
- * - noOfCodecs : Number of codecs in array codecID
- *
- * Output:
- * - maxBytes : Recommended buffer memory size in bytes
- * - maxSlots : Recommended number of slots in buffer
- * - per_slot_overhead_bytes : overhead in bytes for each slot in buffer.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
- int noOfCodecs, int *maxBytes,
- int *maxSlots,
- int* per_slot_overhead_bytes);
-
-#endif /* PACKET_BUFFER_H */
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class DecoderDatabase;
+
+// This is the actual buffer holding the packets before decoding.
+class PacketBuffer {
+ public:
+ enum BufferReturnCodes {
+ kOK = 0,
+ kFlushed,
+ kNotFound,
+ kBufferEmpty,
+ kInvalidPacket,
+ kInvalidPointer
+ };
+
+ // Constructor creates a buffer which can hold a maximum of
+ // |max_number_of_packets| packets.
+ PacketBuffer(size_t max_number_of_packets);
+
+ // Deletes all packets in the buffer before destroying the buffer.
+ virtual ~PacketBuffer();
+
+ // Flushes the buffer and deletes all packets in it.
+ virtual void Flush();
+
+ // Returns true for an empty buffer.
+ virtual bool Empty() const { return buffer_.empty(); }
+
+ // Inserts |packet| into the buffer. The buffer will take over ownership of
+ // the packet object.
+ // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer
+ // was flushed due to overfilling.
+ virtual int InsertPacket(Packet* packet);
+
+ // Inserts a list of packets into the buffer. The buffer will take over
+ // ownership of the packet objects.
+ // Returns PacketBuffer::kOK if all packets were inserted successfully.
+ // If the buffer was flushed due to overfilling, only a subset of the list is
+ // inserted, and PacketBuffer::kFlushed is returned.
+ // The last three parameters are included for legacy compatibility.
+ // TODO(hlundin): Redesign to not use current_*_payload_type and
+ // decoder_database.
+ virtual int InsertPacketList(PacketList* packet_list,
+ const DecoderDatabase& decoder_database,
+ uint8_t* current_rtp_payload_type,
+ uint8_t* current_cng_rtp_payload_type);
+
+ // Gets the timestamp for the first packet in the buffer and writes it to the
+ // output variable |next_timestamp|.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int NextTimestamp(uint32_t* next_timestamp) const;
+
+ // Gets the timestamp for the first packet in the buffer with a timestamp no
+ // lower than the input limit |timestamp|. The result is written to the output
+ // variable |next_timestamp|.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int NextHigherTimestamp(uint32_t timestamp,
+ uint32_t* next_timestamp) const;
+
+ // Returns a (constant) pointer the RTP header of the first packet in the
+ // buffer. Returns NULL if the buffer is empty.
+ virtual const RTPHeader* NextRtpHeader() const;
+
+ // Extracts the first packet in the buffer and returns a pointer to it.
+ // Returns NULL if the buffer is empty. The caller is responsible for deleting
+ // the packet.
+ // Subsequent packets with the same timestamp as the one extracted will be
+ // discarded and properly deleted. The number of discarded packets will be
+ // written to the output variable |discard_count|.
+ virtual Packet* GetNextPacket(int* discard_count);
+
+ // Discards the first packet in the buffer. The packet is deleted.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int DiscardNextPacket();
+
+ // Discards all packets that are (strictly) older than |timestamp_limit|.
+ // Returns number of packets discarded.
+ virtual int DiscardOldPackets(uint32_t timestamp_limit);
+
+ // Returns the number of packets in the buffer, including duplicates and
+ // redundant packets.
+ virtual int NumPacketsInBuffer() const {
+ return static_cast<int>(buffer_.size());
+ }
+
+ // Returns the number of samples in the buffer, including samples carried in
+ // duplicate and redundant packets.
+ virtual int NumSamplesInBuffer(DecoderDatabase* decoder_database,
+ int last_decoded_length) const;
+
+ // Increase the waiting time counter for every packet in the buffer by |inc|.
+ // The default value for |inc| is 1.
+ virtual void IncrementWaitingTimes(int inc = 1);
+
+ virtual void BufferStat(int* num_packets, int* max_num_packets) const;
+
+ // Static method that properly deletes the first packet, and its payload
+ // array, in |packet_list|. Returns false if |packet_list| already was empty,
+ // otherwise true.
+ static bool DeleteFirstPacket(PacketList* packet_list);
+
+ // Static method that properly deletes all packets, and their payload arrays,
+ // in |packet_list|.
+ static void DeleteAllPackets(PacketList* packet_list);
+
+ private:
+ size_t max_number_of_packets_;
+ PacketList buffer_;
+ DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
new file mode 100644
index 00000000000..5e6b89fdc4b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -0,0 +1,529 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for PacketBuffer class.
+
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+
+using ::testing::Return;
+using ::testing::_;
+
+namespace webrtc {
+
+// Helper class to generate packets. Packets must be deleted by the user.
+class PacketGenerator {
+ public:
+ PacketGenerator(uint16_t seq_no, uint32_t ts, uint8_t pt, int frame_size);
+ virtual ~PacketGenerator() {}
+ Packet* NextPacket(int payload_size_bytes);
+ void SkipPacket();
+
+ uint16_t seq_no_;
+ uint32_t ts_;
+ uint8_t pt_;
+ int frame_size_;
+};
+
+PacketGenerator::PacketGenerator(uint16_t seq_no, uint32_t ts, uint8_t pt,
+ int frame_size)
+ : seq_no_(seq_no),
+ ts_(ts),
+ pt_(pt),
+ frame_size_(frame_size) {
+}
+
+Packet* PacketGenerator::NextPacket(int payload_size_bytes) {
+ Packet* packet = new Packet;
+ packet->header.sequenceNumber = seq_no_;
+ packet->header.timestamp = ts_;
+ packet->header.payloadType = pt_;
+ packet->header.markerBit = false;
+ packet->header.ssrc = 0x12345678;
+ packet->header.numCSRCs = 0;
+ packet->header.paddingLength = 0;
+ packet->payload_length = payload_size_bytes;
+ packet->primary = true;
+ packet->payload = new uint8_t[payload_size_bytes];
+ ++seq_no_;
+ ts_ += frame_size_;
+ return packet;
+}
+
+void PacketGenerator::SkipPacket() {
+ ++seq_no_;
+ ts_ += frame_size_;
+}
+
+
+// Start of test definitions.
+
+TEST(PacketBuffer, CreateAndDestroy) {
+ PacketBuffer* buffer = new PacketBuffer(10); // 10 packets.
+ EXPECT_TRUE(buffer->Empty());
+ delete buffer;
+}
+
+TEST(PacketBuffer, InsertPacket) {
+ PacketBuffer buffer(10); // 10 packets.
+ PacketGenerator gen(17u, 4711u, 0, 10);
+
+ const int payload_len = 100;
+ Packet* packet = gen.NextPacket(payload_len);
+
+ EXPECT_EQ(0, buffer.InsertPacket(packet));
+ uint32_t next_ts;
+ EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
+ EXPECT_EQ(4711u, next_ts);
+ EXPECT_FALSE(buffer.Empty());
+ EXPECT_EQ(1, buffer.NumPacketsInBuffer());
+ const RTPHeader* hdr = buffer.NextRtpHeader();
+ EXPECT_EQ(&(packet->header), hdr); // Compare pointer addresses.
+
+ // Do not explicitly flush buffer or delete packet to test that it is deleted
+ // with the buffer. (Tested with Valgrind or similar tool.)
+}
+
+// Test to flush buffer.
+TEST(PacketBuffer, FlushBuffer) {
+ PacketBuffer buffer(10); // 10 packets.
+ PacketGenerator gen(0, 0, 0, 10);
+ const int payload_len = 10;
+
+ // Insert 10 small packets; should be ok.
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
+ }
+ EXPECT_EQ(10, buffer.NumPacketsInBuffer());
+ EXPECT_FALSE(buffer.Empty());
+
+ buffer.Flush();
+ // Buffer should delete the payloads itself.
+ EXPECT_EQ(0, buffer.NumPacketsInBuffer());
+ EXPECT_TRUE(buffer.Empty());
+}
+
+// Test to fill the buffer over the limits, and verify that it flushes.
+TEST(PacketBuffer, OverfillBuffer) {
+ PacketBuffer buffer(10); // 10 packets.
+ PacketGenerator gen(0, 0, 0, 10);
+
+ // Insert 10 small packets; should be ok.
+ const int payload_len = 10;
+ int i;
+ for (i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
+ }
+ EXPECT_EQ(10, buffer.NumPacketsInBuffer());
+ uint32_t next_ts;
+ EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
+ EXPECT_EQ(0u, next_ts); // Expect first inserted packet to be first in line.
+
+ // Insert 11th packet; should flush the buffer and insert it after flushing.
+ Packet* packet = gen.NextPacket(payload_len);
+ EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacket(packet));
+ EXPECT_EQ(1, buffer.NumPacketsInBuffer());
+ EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
+ // Expect last inserted packet to be first in line.
+ EXPECT_EQ(packet->header.timestamp, next_ts);
+
+ // Flush buffer to delete all packets.
+ buffer.Flush();
+}
+
+// Test inserting a list of packets.
+TEST(PacketBuffer, InsertPacketList) {
+ PacketBuffer buffer(10); // 10 packets.
+ PacketGenerator gen(0, 0, 0, 10);
+ PacketList list;
+ const int payload_len = 10;
+
+ // Insert 10 small packets.
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ list.push_back(packet);
+ }
+
+ MockDecoderDatabase decoder_database;
+ EXPECT_CALL(decoder_database, IsComfortNoise(0))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(decoder_database, IsDtmf(0))
+ .WillRepeatedly(Return(false));
+ uint8_t current_pt = 0xFF;
+ uint8_t current_cng_pt = 0xFF;
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacketList(&list,
+ decoder_database,
+ &current_pt,
+ &current_cng_pt));
+ EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
+ EXPECT_EQ(10, buffer.NumPacketsInBuffer());
+ EXPECT_EQ(0, current_pt); // Current payload type changed to 0.
+ EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
+
+ buffer.Flush(); // Clean up.
+
+ EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
+}
+
+// Test inserting a list of packets. Last packet is of a different payload type.
+// Expecting the buffer to flush.
+// TODO(hlundin): Remove this test when legacy operation is no longer needed.
+TEST(PacketBuffer, InsertPacketListChangePayloadType) {
+ PacketBuffer buffer(10); // 10 packets.
+ PacketGenerator gen(0, 0, 0, 10);
+ PacketList list;
+ const int payload_len = 10;
+
+ // Insert 10 small packets.
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ list.push_back(packet);
+ }
+ // Insert 11th packet of another payload type (not CNG).
+ Packet* packet = gen.NextPacket(payload_len);
+ packet->header.payloadType = 1;
+ list.push_back(packet);
+
+
+ MockDecoderDatabase decoder_database;
+ EXPECT_CALL(decoder_database, IsComfortNoise(_))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(decoder_database, IsDtmf(_))
+ .WillRepeatedly(Return(false));
+ uint8_t current_pt = 0xFF;
+ uint8_t current_cng_pt = 0xFF;
+ EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacketList(&list,
+ decoder_database,
+ &current_pt,
+ &current_cng_pt));
+ EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
+ EXPECT_EQ(1, buffer.NumPacketsInBuffer()); // Only the last packet.
+ EXPECT_EQ(1, current_pt); // Current payload type changed to 0.
+ EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
+
+ buffer.Flush(); // Clean up.
+
+ EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
+}
+
+// Test inserting a number of packets, and verifying correct extraction order.
+// The packets inserted are as follows:
+// Packet no. Seq. no. Primary TS Secondary TS
+// 0 0xFFFD 0xFFFFFFD7 -
+// 1 0xFFFE 0xFFFFFFE1 0xFFFFFFD7
+// 2 0xFFFF 0xFFFFFFEB 0xFFFFFFE1
+// 3 0x0000 0xFFFFFFF5 0xFFFFFFEB
+// 4 0x0001 0xFFFFFFFF 0xFFFFFFF5
+// 5 0x0002 0x0000000A 0xFFFFFFFF
+// 6 MISSING--0x0003------0x00000014----0x0000000A--MISSING
+// 7 0x0004 0x0000001E 0x00000014
+// 8 0x0005 0x00000028 0x0000001E
+// 9 0x0006 0x00000032 0x00000028
+TEST(PacketBuffer, ExtractOrderRedundancy) {
+ PacketBuffer buffer(100); // 100 packets.
+ const uint32_t ts_increment = 10; // Samples per packet.
+ const uint16_t start_seq_no = 0xFFFF - 2; // Wraps after 3 packets.
+ const uint32_t start_ts = 0xFFFFFFFF -
+ 4 * ts_increment; // Wraps after 5 packets.
+ const uint8_t primary_pt = 0;
+ const uint8_t secondary_pt = 1;
+ PacketGenerator gen(start_seq_no, start_ts, primary_pt, ts_increment);
+ // Insert secondary payloads too. (Simulating RED.)
+ PacketGenerator red_gen(start_seq_no + 1, start_ts, secondary_pt,
+ ts_increment);
+
+ // Insert 9 small packets (skip one).
+ for (int i = 0; i < 10; ++i) {
+ const int payload_len = 10;
+ if (i == 6) {
+ // Skip this packet.
+ gen.SkipPacket();
+ red_gen.SkipPacket();
+ continue;
+ }
+ // Primary payload.
+ Packet* packet = gen.NextPacket(payload_len);
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
+ if (i >= 1) {
+ // Secondary payload.
+ packet = red_gen.NextPacket(payload_len);
+ packet->primary = false;
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
+ }
+ }
+ EXPECT_EQ(17, buffer.NumPacketsInBuffer()); // 9 primary + 8 secondary
+
+ uint16_t current_seq_no = start_seq_no;
+ uint32_t current_ts = start_ts;
+
+ for (int i = 0; i < 10; ++i) {
+ // Extract packets.
+ int drop_count = 0;
+ Packet* packet = buffer.GetNextPacket(&drop_count);
+ ASSERT_FALSE(packet == NULL);
+ if (i == 6) {
+ // Special case for the dropped primary payload.
+ // Expect secondary payload, and one step higher sequence number.
+ EXPECT_EQ(current_seq_no + 1, packet->header.sequenceNumber);
+ EXPECT_EQ(current_ts, packet->header.timestamp);
+ EXPECT_FALSE(packet->primary);
+ EXPECT_EQ(1, packet->header.payloadType);
+ EXPECT_EQ(0, drop_count);
+ } else {
+ EXPECT_EQ(current_seq_no, packet->header.sequenceNumber);
+ EXPECT_EQ(current_ts, packet->header.timestamp);
+ EXPECT_TRUE(packet->primary);
+ EXPECT_EQ(0, packet->header.payloadType);
+ if (i == 5 || i == 9) {
+ // No duplicate TS for dropped packet or for last primary payload.
+ EXPECT_EQ(0, drop_count);
+ } else {
+ EXPECT_EQ(1, drop_count);
+ }
+ }
+ ++current_seq_no;
+ current_ts += ts_increment;
+ delete [] packet->payload;
+ delete packet;
+ }
+}
+
+TEST(PacketBuffer, DiscardPackets) {
+ PacketBuffer buffer(100); // 100 packets.
+ const uint16_t start_seq_no = 17;
+ const uint32_t start_ts = 4711;
+ const uint32_t ts_increment = 10;
+ PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
+ PacketList list;
+ const int payload_len = 10;
+
+ // Insert 10 small packets.
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ buffer.InsertPacket(packet);
+ }
+ EXPECT_EQ(10, buffer.NumPacketsInBuffer());
+
+ // Discard them one by one and make sure that the right packets are at the
+ // front of the buffer.
+ uint32_t current_ts = start_ts;
+ for (int i = 0; i < 10; ++i) {
+ uint32_t ts;
+ EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&ts));
+ EXPECT_EQ(current_ts, ts);
+ EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket());
+ current_ts += ts_increment;
+ }
+ EXPECT_TRUE(buffer.Empty());
+}
+
+TEST(PacketBuffer, Reordering) {
+ PacketBuffer buffer(100); // 100 packets.
+ const uint16_t start_seq_no = 17;
+ const uint32_t start_ts = 4711;
+ const uint32_t ts_increment = 10;
+ PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
+ const int payload_len = 10;
+
+ // Generate 10 small packets and insert them into a PacketList. Insert every
+ // odd packet to the front, and every even packet to the back, thus creating
+ // a (rather strange) reordering.
+ PacketList list;
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ if (i % 2) {
+ list.push_front(packet);
+ } else {
+ list.push_back(packet);
+ }
+ }
+
+ MockDecoderDatabase decoder_database;
+ EXPECT_CALL(decoder_database, IsComfortNoise(0))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(decoder_database, IsDtmf(0))
+ .WillRepeatedly(Return(false));
+ uint8_t current_pt = 0xFF;
+ uint8_t current_cng_pt = 0xFF;
+
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacketList(&list,
+ decoder_database,
+ &current_pt,
+ &current_cng_pt));
+ EXPECT_EQ(10, buffer.NumPacketsInBuffer());
+
+ // Extract them and make sure that come out in the right order.
+ uint32_t current_ts = start_ts;
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = buffer.GetNextPacket(NULL);
+ ASSERT_FALSE(packet == NULL);
+ EXPECT_EQ(current_ts, packet->header.timestamp);
+ current_ts += ts_increment;
+ delete [] packet->payload;
+ delete packet;
+ }
+ EXPECT_TRUE(buffer.Empty());
+
+ EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
+}
+
+TEST(PacketBuffer, Failures) {
+ const uint16_t start_seq_no = 17;
+ const uint32_t start_ts = 4711;
+ const uint32_t ts_increment = 10;
+ int payload_len = 100;
+ PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
+
+ PacketBuffer* buffer = new PacketBuffer(100); // 100 packets.
+ Packet* packet = NULL;
+ EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
+ packet = gen.NextPacket(payload_len);
+ delete [] packet->payload;
+ packet->payload = NULL;
+ EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
+ // Packet is deleted by the PacketBuffer.
+
+ // Buffer should still be empty. Test all empty-checks.
+ uint32_t temp_ts;
+ EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->NextTimestamp(&temp_ts));
+ EXPECT_EQ(PacketBuffer::kBufferEmpty,
+ buffer->NextHigherTimestamp(0, &temp_ts));
+ EXPECT_EQ(NULL, buffer->NextRtpHeader());
+ EXPECT_EQ(NULL, buffer->GetNextPacket(NULL));
+ EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->DiscardNextPacket());
+ EXPECT_EQ(0, buffer->DiscardOldPackets(0)); // 0 packets discarded.
+
+ // Insert one packet to make the buffer non-empty.
+ packet = gen.NextPacket(payload_len);
+ EXPECT_EQ(PacketBuffer::kOK, buffer->InsertPacket(packet));
+ EXPECT_EQ(PacketBuffer::kInvalidPointer, buffer->NextTimestamp(NULL));
+ EXPECT_EQ(PacketBuffer::kInvalidPointer,
+ buffer->NextHigherTimestamp(0, NULL));
+ delete buffer;
+
+ // Insert packet list of three packets, where the second packet has an invalid
+ // payload. Expect first packet to be inserted, and the remaining two to be
+ // discarded.
+ buffer = new PacketBuffer(100); // 100 packets.
+ PacketList list;
+ list.push_back(gen.NextPacket(payload_len)); // Valid packet.
+ packet = gen.NextPacket(payload_len);
+ delete [] packet->payload;
+ packet->payload = NULL; // Invalid.
+ list.push_back(packet);
+ list.push_back(gen.NextPacket(payload_len)); // Valid packet.
+ MockDecoderDatabase decoder_database;
+ EXPECT_CALL(decoder_database, IsComfortNoise(0))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(decoder_database, IsDtmf(0))
+ .WillRepeatedly(Return(false));
+ uint8_t current_pt = 0xFF;
+ uint8_t current_cng_pt = 0xFF;
+ EXPECT_EQ(PacketBuffer::kInvalidPacket,
+ buffer->InsertPacketList(&list,
+ decoder_database,
+ &current_pt,
+ &current_cng_pt));
+ EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
+ EXPECT_EQ(1, buffer->NumPacketsInBuffer());
+ delete buffer;
+ EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
+}
+
+// Test packet comparison function.
+// The function should return true if the first packet "goes before" the second.
+TEST(PacketBuffer, ComparePackets) {
+ PacketGenerator gen(0, 0, 0, 10);
+ Packet* a = gen.NextPacket(10); // SN = 0, TS = 0.
+ Packet* b = gen.NextPacket(10); // SN = 1, TS = 10.
+ EXPECT_FALSE(*a == *b);
+ EXPECT_TRUE(*a != *b);
+ EXPECT_TRUE(*a < *b);
+ EXPECT_FALSE(*a > *b);
+ EXPECT_TRUE(*a <= *b);
+ EXPECT_FALSE(*a >= *b);
+
+ // Testing wrap-around case; 'a' is earlier but has a larger timestamp value.
+ a->header.timestamp = 0xFFFFFFFF - 10;
+ EXPECT_FALSE(*a == *b);
+ EXPECT_TRUE(*a != *b);
+ EXPECT_TRUE(*a < *b);
+ EXPECT_FALSE(*a > *b);
+ EXPECT_TRUE(*a <= *b);
+ EXPECT_FALSE(*a >= *b);
+
+ // Test equal packets.
+ EXPECT_TRUE(*a == *a);
+ EXPECT_FALSE(*a != *a);
+ EXPECT_FALSE(*a < *a);
+ EXPECT_FALSE(*a > *a);
+ EXPECT_TRUE(*a <= *a);
+ EXPECT_TRUE(*a >= *a);
+
+ // Test equal timestamps but different sequence numbers (0 and 1).
+ a->header.timestamp = b->header.timestamp;
+ EXPECT_FALSE(*a == *b);
+ EXPECT_TRUE(*a != *b);
+ EXPECT_TRUE(*a < *b);
+ EXPECT_FALSE(*a > *b);
+ EXPECT_TRUE(*a <= *b);
+ EXPECT_FALSE(*a >= *b);
+
+ // Test equal timestamps but different sequence numbers (32767 and 1).
+ a->header.sequenceNumber = 0xFFFF;
+ EXPECT_FALSE(*a == *b);
+ EXPECT_TRUE(*a != *b);
+ EXPECT_TRUE(*a < *b);
+ EXPECT_FALSE(*a > *b);
+ EXPECT_TRUE(*a <= *b);
+ EXPECT_FALSE(*a >= *b);
+
+ // Test equal timestamps and sequence numbers, but only 'b' is primary.
+ a->header.sequenceNumber = b->header.sequenceNumber;
+ a->primary = false;
+ b->primary = true;
+ EXPECT_FALSE(*a == *b);
+ EXPECT_TRUE(*a != *b);
+ EXPECT_FALSE(*a < *b);
+ EXPECT_TRUE(*a > *b);
+ EXPECT_FALSE(*a <= *b);
+ EXPECT_TRUE(*a >= *b);
+
+ delete [] a->payload;
+ delete a;
+ delete [] b->payload;
+ delete b;
+}
+
+// Test the DeleteFirstPacket DeleteAllPackets methods.
+TEST(PacketBuffer, DeleteAllPackets) {
+ PacketGenerator gen(0, 0, 0, 10);
+ PacketList list;
+ const int payload_len = 10;
+
+ // Insert 10 small packets.
+ for (int i = 0; i < 10; ++i) {
+ Packet* packet = gen.NextPacket(payload_len);
+ list.push_back(packet);
+ }
+ EXPECT_TRUE(PacketBuffer::DeleteFirstPacket(&list));
+ EXPECT_EQ(9u, list.size());
+ PacketBuffer::DeleteAllPackets(&list);
+ EXPECT_TRUE(list.empty());
+ EXPECT_FALSE(PacketBuffer::DeleteFirstPacket(&list));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
new file mode 100644
index 00000000000..1d61ef0cf40
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
@@ -0,0 +1,430 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+
+namespace webrtc {
+
+// The method loops through a list of packets {A, B, C, ...}. Each packet is
+// split into its corresponding RED payloads, {A1, A2, ...}, which is
+// temporarily held in the list |new_packets|.
+// When the first packet in |packet_list| has been processed, the orignal packet
+// is replaced by the new ones in |new_packets|, so that |packet_list| becomes:
+// {A1, A2, ..., B, C, ...}. The method then continues with B, and C, until all
+// the original packets have been replaced by their split payloads.
+int PayloadSplitter::SplitRed(PacketList* packet_list) {
+ int ret = kOK;
+ PacketList::iterator it = packet_list->begin();
+ while (it != packet_list->end()) {
+ PacketList new_packets; // An empty list to store the split packets in.
+ Packet* red_packet = (*it);
+ assert(red_packet->payload);
+ uint8_t* payload_ptr = red_packet->payload;
+
+ // Read RED headers (according to RFC 2198):
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |F| block PT | timestamp offset | block length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // Last RED header:
+ // 0 1 2 3 4 5 6 7
+ // +-+-+-+-+-+-+-+-+
+ // |0| Block PT |
+ // +-+-+-+-+-+-+-+-+
+
+ bool last_block = false;
+ int sum_length = 0;
+ while (!last_block) {
+ Packet* new_packet = new Packet;
+ new_packet->header = red_packet->header;
+ // Check the F bit. If F == 0, this was the last block.
+ last_block = ((*payload_ptr & 0x80) == 0);
+ // Bits 1 through 7 are payload type.
+ new_packet->header.payloadType = payload_ptr[0] & 0x7F;
+ if (last_block) {
+ // No more header data to read.
+ ++sum_length; // Account for RED header size of 1 byte.
+ new_packet->payload_length = red_packet->payload_length - sum_length;
+ new_packet->primary = true; // Last block is always primary.
+ payload_ptr += 1; // Advance to first payload byte.
+ } else {
+ // Bits 8 through 21 are timestamp offset.
+ int timestamp_offset = (payload_ptr[1] << 6) +
+ ((payload_ptr[2] & 0xFC) >> 2);
+ new_packet->header.timestamp = red_packet->header.timestamp -
+ timestamp_offset;
+ // Bits 22 through 31 are payload length.
+ new_packet->payload_length = ((payload_ptr[2] & 0x03) << 8) +
+ payload_ptr[3];
+ new_packet->primary = false;
+ payload_ptr += 4; // Advance to next RED header.
+ }
+ sum_length += new_packet->payload_length;
+ sum_length += 4; // Account for RED header size of 4 bytes.
+ // Store in new list of packets.
+ new_packets.push_back(new_packet);
+ }
+
+ // Populate the new packets with payload data.
+ // |payload_ptr| now points at the first payload byte.
+ PacketList::iterator new_it;
+ for (new_it = new_packets.begin(); new_it != new_packets.end(); ++new_it) {
+ int payload_length = (*new_it)->payload_length;
+ if (payload_ptr + payload_length >
+ red_packet->payload + red_packet->payload_length) {
+ // The block lengths in the RED headers do not match the overall packet
+ // length. Something is corrupt. Discard this and the remaining
+ // payloads from this packet.
+ while (new_it != new_packets.end()) {
+ // Payload should not have been allocated yet.
+ assert(!(*new_it)->payload);
+ delete (*new_it);
+ new_it = new_packets.erase(new_it);
+ }
+ ret = kRedLengthMismatch;
+ break;
+ }
+ (*new_it)->payload = new uint8_t[payload_length];
+ memcpy((*new_it)->payload, payload_ptr, payload_length);
+ payload_ptr += payload_length;
+ }
+ // Reverse the order of the new packets, so that the primary payload is
+ // always first.
+ new_packets.reverse();
+ // Insert new packets into original list, before the element pointed to by
+ // iterator |it|.
+ packet_list->splice(it, new_packets, new_packets.begin(),
+ new_packets.end());
+ // Delete old packet payload.
+ delete [] (*it)->payload;
+ delete (*it);
+ // Remove |it| from the packet list. This operation effectively moves the
+ // iterator |it| to the next packet in the list. Thus, we do not have to
+ // increment it manually.
+ it = packet_list->erase(it);
+ }
+ return ret;
+}
+
+int PayloadSplitter::SplitFec(PacketList* packet_list,
+ DecoderDatabase* decoder_database) {
+ PacketList::iterator it = packet_list->begin();
+ // Iterate through all packets in |packet_list|.
+ while (it != packet_list->end()) {
+ Packet* packet = (*it); // Just to make the notation more intuitive.
+ // Get codec type for this payload.
+ uint8_t payload_type = packet->header.payloadType;
+ const DecoderDatabase::DecoderInfo* info =
+ decoder_database->GetDecoderInfo(payload_type);
+ if (!info) {
+ return kUnknownPayloadType;
+ }
+ // No splitting for a sync-packet.
+ if (packet->sync_packet) {
+ ++it;
+ continue;
+ }
+
+ // Not an FEC packet.
+ AudioDecoder* decoder = decoder_database->GetDecoder(payload_type);
+ // decoder should not return NULL.
+ assert(decoder != NULL);
+ if (!decoder ||
+ !decoder->PacketHasFec(packet->payload, packet->payload_length)) {
+ ++it;
+ continue;
+ }
+
+ switch (info->codec_type) {
+ case kDecoderOpus:
+ case kDecoderOpus_2ch: {
+ Packet* new_packet = new Packet;
+
+ new_packet->header = packet->header;
+ int duration = decoder->
+ PacketDurationRedundant(packet->payload, packet->payload_length);
+ new_packet->header.timestamp -= duration;
+ new_packet->payload = new uint8_t[packet->payload_length];
+ memcpy(new_packet->payload, packet->payload, packet->payload_length);
+ new_packet->payload_length = packet->payload_length;
+ new_packet->primary = false;
+ new_packet->waiting_time = packet->waiting_time;
+ new_packet->sync_packet = packet->sync_packet;
+
+ packet_list->insert(it, new_packet);
+ break;
+ }
+ default: {
+ return kFecSplitError;
+ }
+ }
+
+ ++it;
+ }
+ return kOK;
+}
+
+int PayloadSplitter::CheckRedPayloads(PacketList* packet_list,
+ const DecoderDatabase& decoder_database) {
+ PacketList::iterator it = packet_list->begin();
+ int main_payload_type = -1;
+ int num_deleted_packets = 0;
+ while (it != packet_list->end()) {
+ uint8_t this_payload_type = (*it)->header.payloadType;
+ if (!decoder_database.IsDtmf(this_payload_type) &&
+ !decoder_database.IsComfortNoise(this_payload_type)) {
+ if (main_payload_type == -1) {
+ // This is the first packet in the list which is non-DTMF non-CNG.
+ main_payload_type = this_payload_type;
+ } else {
+ if (this_payload_type != main_payload_type) {
+ // We do not allow redundant payloads of a different type.
+ // Discard this payload.
+ delete [] (*it)->payload;
+ delete (*it);
+ // Remove |it| from the packet list. This operation effectively
+ // moves the iterator |it| to the next packet in the list. Thus, we
+ // do not have to increment it manually.
+ it = packet_list->erase(it);
+ ++num_deleted_packets;
+ continue;
+ }
+ }
+ }
+ ++it;
+ }
+ return num_deleted_packets;
+}
+
+int PayloadSplitter::SplitAudio(PacketList* packet_list,
+ const DecoderDatabase& decoder_database) {
+ PacketList::iterator it = packet_list->begin();
+ // Iterate through all packets in |packet_list|.
+ while (it != packet_list->end()) {
+ Packet* packet = (*it); // Just to make the notation more intuitive.
+ // Get codec type for this payload.
+ const DecoderDatabase::DecoderInfo* info =
+ decoder_database.GetDecoderInfo(packet->header.payloadType);
+ if (!info) {
+ return kUnknownPayloadType;
+ }
+ // No splitting for a sync-packet.
+ if (packet->sync_packet) {
+ ++it;
+ continue;
+ }
+ PacketList new_packets;
+ switch (info->codec_type) {
+ case kDecoderPCMu:
+ case kDecoderPCMa: {
+ // 8 bytes per ms; 8 timestamps per ms.
+ SplitBySamples(packet, 8, 8, &new_packets);
+ break;
+ }
+ case kDecoderPCMu_2ch:
+ case kDecoderPCMa_2ch: {
+ // 2 * 8 bytes per ms; 8 timestamps per ms.
+ SplitBySamples(packet, 2 * 8, 8, &new_packets);
+ break;
+ }
+ case kDecoderG722: {
+ // 8 bytes per ms; 16 timestamps per ms.
+ SplitBySamples(packet, 8, 16, &new_packets);
+ break;
+ }
+ case kDecoderPCM16B: {
+ // 16 bytes per ms; 8 timestamps per ms.
+ SplitBySamples(packet, 16, 8, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bwb: {
+ // 32 bytes per ms; 16 timestamps per ms.
+ SplitBySamples(packet, 32, 16, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bswb32kHz: {
+ // 64 bytes per ms; 32 timestamps per ms.
+ SplitBySamples(packet, 64, 32, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bswb48kHz: {
+ // 96 bytes per ms; 48 timestamps per ms.
+ SplitBySamples(packet, 96, 48, &new_packets);
+ break;
+ }
+ case kDecoderPCM16B_2ch: {
+ // 2 * 16 bytes per ms; 8 timestamps per ms.
+ SplitBySamples(packet, 2 * 16, 8, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bwb_2ch: {
+ // 2 * 32 bytes per ms; 16 timestamps per ms.
+ SplitBySamples(packet, 2 * 32, 16, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bswb32kHz_2ch: {
+ // 2 * 64 bytes per ms; 32 timestamps per ms.
+ SplitBySamples(packet, 2 * 64, 32, &new_packets);
+ break;
+ }
+ case kDecoderPCM16Bswb48kHz_2ch: {
+ // 2 * 96 bytes per ms; 48 timestamps per ms.
+ SplitBySamples(packet, 2 * 96, 48, &new_packets);
+ break;
+ }
+ case kDecoderPCM16B_5ch: {
+ // 5 * 16 bytes per ms; 8 timestamps per ms.
+ SplitBySamples(packet, 5 * 16, 8, &new_packets);
+ break;
+ }
+ case kDecoderILBC: {
+ int bytes_per_frame;
+ int timestamps_per_frame;
+ if (packet->payload_length >= 950) {
+ return kTooLargePayload;
+ } else if (packet->payload_length % 38 == 0) {
+ // 20 ms frames.
+ bytes_per_frame = 38;
+ timestamps_per_frame = 160;
+ } else if (packet->payload_length % 50 == 0) {
+ // 30 ms frames.
+ bytes_per_frame = 50;
+ timestamps_per_frame = 240;
+ } else {
+ return kFrameSplitError;
+ }
+ int ret = SplitByFrames(packet, bytes_per_frame, timestamps_per_frame,
+ &new_packets);
+ if (ret < 0) {
+ return ret;
+ } else if (ret == kNoSplit) {
+ // Do not split at all. Simply advance to the next packet in the list.
+ ++it;
+ // We do not have any new packets to insert, and should not delete the
+ // old one. Skip the code after the switch case, and jump straight to
+ // the next packet in the while loop.
+ continue;
+ }
+ break;
+ }
+ default: {
+ // Do not split at all. Simply advance to the next packet in the list.
+ ++it;
+ // We do not have any new packets to insert, and should not delete the
+ // old one. Skip the code after the switch case, and jump straight to
+ // the next packet in the while loop.
+ continue;
+ }
+ }
+ // Insert new packets into original list, before the element pointed to by
+ // iterator |it|.
+ packet_list->splice(it, new_packets, new_packets.begin(),
+ new_packets.end());
+ // Delete old packet payload.
+ delete [] (*it)->payload;
+ delete (*it);
+ // Remove |it| from the packet list. This operation effectively moves the
+ // iterator |it| to the next packet in the list. Thus, we do not have to
+ // increment it manually.
+ it = packet_list->erase(it);
+ }
+ return kOK;
+}
+
+void PayloadSplitter::SplitBySamples(const Packet* packet,
+ int bytes_per_ms,
+ int timestamps_per_ms,
+ PacketList* new_packets) {
+ assert(packet);
+ assert(new_packets);
+
+ int split_size_bytes = packet->payload_length;
+
+ // Find a "chunk size" >= 20 ms and < 40 ms.
+ int min_chunk_size = bytes_per_ms * 20;
+ // Reduce the split size by half as long as |split_size_bytes| is at least
+ // twice the minimum chunk size (so that the resulting size is at least as
+ // large as the minimum chunk size).
+ while (split_size_bytes >= 2 * min_chunk_size) {
+ split_size_bytes >>= 1;
+ }
+ int timestamps_per_chunk =
+ split_size_bytes * timestamps_per_ms / bytes_per_ms;
+ uint32_t timestamp = packet->header.timestamp;
+
+ uint8_t* payload_ptr = packet->payload;
+ int len = packet->payload_length;
+ while (len >= (2 * split_size_bytes)) {
+ Packet* new_packet = new Packet;
+ new_packet->payload_length = split_size_bytes;
+ new_packet->header = packet->header;
+ new_packet->header.timestamp = timestamp;
+ timestamp += timestamps_per_chunk;
+ new_packet->primary = packet->primary;
+ new_packet->payload = new uint8_t[split_size_bytes];
+ memcpy(new_packet->payload, payload_ptr, split_size_bytes);
+ payload_ptr += split_size_bytes;
+ new_packets->push_back(new_packet);
+ len -= split_size_bytes;
+ }
+
+ if (len > 0) {
+ Packet* new_packet = new Packet;
+ new_packet->payload_length = len;
+ new_packet->header = packet->header;
+ new_packet->header.timestamp = timestamp;
+ new_packet->primary = packet->primary;
+ new_packet->payload = new uint8_t[len];
+ memcpy(new_packet->payload, payload_ptr, len);
+ new_packets->push_back(new_packet);
+ }
+}
+
+int PayloadSplitter::SplitByFrames(const Packet* packet,
+ int bytes_per_frame,
+ int timestamps_per_frame,
+ PacketList* new_packets) {
+ if (packet->payload_length % bytes_per_frame != 0) {
+ return kFrameSplitError;
+ }
+
+ int num_frames = packet->payload_length / bytes_per_frame;
+ if (num_frames == 1) {
+ // Special case. Do not split the payload.
+ return kNoSplit;
+ }
+
+ uint32_t timestamp = packet->header.timestamp;
+ uint8_t* payload_ptr = packet->payload;
+ int len = packet->payload_length;
+ while (len > 0) {
+ assert(len >= bytes_per_frame);
+ Packet* new_packet = new Packet;
+ new_packet->payload_length = bytes_per_frame;
+ new_packet->header = packet->header;
+ new_packet->header.timestamp = timestamp;
+ timestamp += timestamps_per_frame;
+ new_packet->primary = packet->primary;
+ new_packet->payload = new uint8_t[bytes_per_frame];
+ memcpy(new_packet->payload, payload_ptr, bytes_per_frame);
+ payload_ptr += bytes_per_frame;
+ new_packets->push_back(new_packet);
+ len -= bytes_per_frame;
+ }
+ return kOK;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h
new file mode 100644
index 00000000000..a3dd77e5a53
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class DecoderDatabase;
+
+// This class handles splitting of payloads into smaller parts.
+// The class does not have any member variables, and the methods could have
+// been made static. The reason for not making them static is testability.
+// With this design, the splitting functionality can be mocked during testing
+// of the NetEqImpl class.
+class PayloadSplitter {
+ public:
+ enum SplitterReturnCodes {
+ kOK = 0,
+ kNoSplit = 1,
+ kTooLargePayload = -1,
+ kFrameSplitError = -2,
+ kUnknownPayloadType = -3,
+ kRedLengthMismatch = -4,
+ kFecSplitError = -5,
+ };
+
+ PayloadSplitter() {}
+
+ virtual ~PayloadSplitter() {}
+
+ // Splits each packet in |packet_list| into its separate RED payloads. Each
+ // RED payload is packetized into a Packet. The original elements in
+ // |packet_list| are properly deleted, and replaced by the new packets.
+ // Note that all packets in |packet_list| must be RED payloads, i.e., have
+ // RED headers according to RFC 2198 at the very beginning of the payload.
+ // Returns kOK or an error.
+ virtual int SplitRed(PacketList* packet_list);
+
+ // Iterates through |packet_list| and, duplicate each audio payload that has
+ // FEC as new packet for redundant decoding. The decoder database is needed to
+ // get information about which payload type each packet contains.
+ virtual int SplitFec(PacketList* packet_list,
+ DecoderDatabase* decoder_database);
+
+ // Checks all packets in |packet_list|. Packets that are DTMF events or
+ // comfort noise payloads are kept. Except that, only one single payload type
+ // is accepted. Any packet with another payload type is discarded.
+ virtual int CheckRedPayloads(PacketList* packet_list,
+ const DecoderDatabase& decoder_database);
+
+ // Iterates through |packet_list| and, if possible, splits each audio payload
+ // into suitable size chunks. The result is written back to |packet_list| as
+ // new packets. The decoder database is needed to get information about which
+ // payload type each packet contains.
+ virtual int SplitAudio(PacketList* packet_list,
+ const DecoderDatabase& decoder_database);
+
+ private:
+ // Splits the payload in |packet|. The payload is assumed to be from a
+ // sample-based codec.
+ virtual void SplitBySamples(const Packet* packet,
+ int bytes_per_ms,
+ int timestamps_per_ms,
+ PacketList* new_packets);
+
+ // Splits the payload in |packet|. The payload will be split into chunks of
+ // size |bytes_per_frame|, corresponding to a |timestamps_per_frame|
+ // RTP timestamps.
+ virtual int SplitByFrames(const Packet* packet,
+ int bytes_per_frame,
+ int timestamps_per_frame,
+ PacketList* new_packets);
+
+ DISALLOW_COPY_AND_ASSIGN(PayloadSplitter);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
new file mode 100644
index 00000000000..5cde1bda5e5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
@@ -0,0 +1,777 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for PayloadSplitter class.
+
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+
+#include <assert.h>
+
+#include <utility> // pair
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using ::testing::Return;
+using ::testing::ReturnNull;
+
+namespace webrtc {
+
+static const int kRedPayloadType = 100;
+static const int kPayloadLength = 10;
+static const int kRedHeaderLength = 4; // 4 bytes RED header.
+static const uint16_t kSequenceNumber = 0;
+static const uint32_t kBaseTimestamp = 0x12345678;
+
+// RED headers (according to RFC 2198):
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |F| block PT | timestamp offset | block length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// Last RED header:
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |0| Block PT |
+// +-+-+-+-+-+-+-+-+
+
+// Creates a RED packet, with |num_payloads| payloads, with payload types given
+// by the values in array |payload_types| (which must be of length
+// |num_payloads|). Each redundant payload is |timestamp_offset| samples
+// "behind" the the previous payload.
+Packet* CreateRedPayload(int num_payloads,
+ uint8_t* payload_types,
+ int timestamp_offset) {
+ Packet* packet = new Packet;
+ packet->header.payloadType = kRedPayloadType;
+ packet->header.timestamp = kBaseTimestamp;
+ packet->header.sequenceNumber = kSequenceNumber;
+ packet->payload_length = (kPayloadLength + 1) +
+ (num_payloads - 1) * (kPayloadLength + kRedHeaderLength);
+ uint8_t* payload = new uint8_t[packet->payload_length];
+ uint8_t* payload_ptr = payload;
+ for (int i = 0; i < num_payloads; ++i) {
+ // Write the RED headers.
+ if (i == num_payloads - 1) {
+ // Special case for last payload.
+ *payload_ptr = payload_types[i] & 0x7F; // F = 0;
+ ++payload_ptr;
+ break;
+ }
+ *payload_ptr = payload_types[i] & 0x7F;
+ // Not the last block; set F = 1.
+ *payload_ptr |= 0x80;
+ ++payload_ptr;
+ int this_offset = (num_payloads - i - 1) * timestamp_offset;
+ *payload_ptr = this_offset >> 6;
+ ++payload_ptr;
+ assert(kPayloadLength <= 1023); // Max length described by 10 bits.
+ *payload_ptr = ((this_offset & 0x3F) << 2) | (kPayloadLength >> 8);
+ ++payload_ptr;
+ *payload_ptr = kPayloadLength & 0xFF;
+ ++payload_ptr;
+ }
+ for (int i = 0; i < num_payloads; ++i) {
+ // Write |i| to all bytes in each payload.
+ memset(payload_ptr, i, kPayloadLength);
+ payload_ptr += kPayloadLength;
+ }
+ packet->payload = payload;
+ return packet;
+}
+
+
+// A possible Opus packet that contains FEC is the following.
+// The frame is 20 ms in duration.
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |0|0|0|0|1|0|0|0|x|1|x|x|x|x|x|x|x| |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |
+// | Compressed frame 1 (N-2 bytes)... :
+// : |
+// | |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Packet* CreateOpusFecPacket(uint8_t payload_type, int payload_length,
+ uint8_t payload_value) {
+ Packet* packet = new Packet;
+ packet->header.payloadType = payload_type;
+ packet->header.timestamp = kBaseTimestamp;
+ packet->header.sequenceNumber = kSequenceNumber;
+ packet->payload_length = payload_length;
+ uint8_t* payload = new uint8_t[packet->payload_length];
+ payload[0] = 0x08;
+ payload[1] = 0x40;
+ memset(&payload[2], payload_value, payload_length - 2);
+ packet->payload = payload;
+ return packet;
+}
+
+// Create a packet with all payload bytes set to |payload_value|.
+Packet* CreatePacket(uint8_t payload_type, int payload_length,
+ uint8_t payload_value) {
+ Packet* packet = new Packet;
+ packet->header.payloadType = payload_type;
+ packet->header.timestamp = kBaseTimestamp;
+ packet->header.sequenceNumber = kSequenceNumber;
+ packet->payload_length = payload_length;
+ uint8_t* payload = new uint8_t[packet->payload_length];
+ memset(payload, payload_value, payload_length);
+ packet->payload = payload;
+ return packet;
+}
+
+// Checks that |packet| has the attributes given in the remaining parameters.
+void VerifyPacket(const Packet* packet,
+ int payload_length,
+ uint8_t payload_type,
+ uint16_t sequence_number,
+ uint32_t timestamp,
+ uint8_t payload_value,
+ bool primary = true) {
+ EXPECT_EQ(payload_length, packet->payload_length);
+ EXPECT_EQ(payload_type, packet->header.payloadType);
+ EXPECT_EQ(sequence_number, packet->header.sequenceNumber);
+ EXPECT_EQ(timestamp, packet->header.timestamp);
+ EXPECT_EQ(primary, packet->primary);
+ ASSERT_FALSE(packet->payload == NULL);
+ for (int i = 0; i < packet->payload_length; ++i) {
+ EXPECT_EQ(payload_value, packet->payload[i]);
+ }
+}
+
+// Start of test definitions.
+
+TEST(PayloadSplitter, CreateAndDestroy) {
+ PayloadSplitter* splitter = new PayloadSplitter;
+ delete splitter;
+}
+
+// Packet A is split into A1 and A2.
+TEST(RedPayloadSplitter, OnePacketTwoPayloads) {
+ uint8_t payload_types[] = {0, 0};
+ const int kTimestampOffset = 160;
+ Packet* packet = CreateRedPayload(2, payload_types, kTimestampOffset);
+ PacketList packet_list;
+ packet_list.push_back(packet);
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
+ ASSERT_EQ(2u, packet_list.size());
+ // Check first packet. The first in list should always be the primary payload.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber,
+ kBaseTimestamp, 1, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check second packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
+ kBaseTimestamp - kTimestampOffset, 0, false);
+ delete [] packet->payload;
+ delete packet;
+}
+
+// Packets A and B are not split at all. Only the RED header in each packet is
+// removed.
+TEST(RedPayloadSplitter, TwoPacketsOnePayload) {
+ uint8_t payload_types[] = {0};
+ const int kTimestampOffset = 160;
+ // Create first packet, with a single RED payload.
+ Packet* packet = CreateRedPayload(1, payload_types, kTimestampOffset);
+ PacketList packet_list;
+ packet_list.push_back(packet);
+ // Create second packet, with a single RED payload.
+ packet = CreateRedPayload(1, payload_types, kTimestampOffset);
+ // Manually change timestamp and sequence number of second packet.
+ packet->header.timestamp += kTimestampOffset;
+ packet->header.sequenceNumber++;
+ packet_list.push_back(packet);
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
+ ASSERT_EQ(2u, packet_list.size());
+ // Check first packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
+ kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check second packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber + 1,
+ kBaseTimestamp + kTimestampOffset, 0, true);
+ delete [] packet->payload;
+ delete packet;
+}
+
+// Packets A and B are split into packets A1, A2, A3, B1, B2, B3, with
+// attributes as follows:
+//
+// A1* A2 A3 B1* B2 B3
+// Payload type 0 1 2 0 1 2
+// Timestamp b b-o b-2o b+o b b-o
+// Sequence number 0 0 0 1 1 1
+//
+// b = kBaseTimestamp, o = kTimestampOffset, * = primary.
+TEST(RedPayloadSplitter, TwoPacketsThreePayloads) {
+ uint8_t payload_types[] = {2, 1, 0}; // Primary is the last one.
+ const int kTimestampOffset = 160;
+ // Create first packet, with 3 RED payloads.
+ Packet* packet = CreateRedPayload(3, payload_types, kTimestampOffset);
+ PacketList packet_list;
+ packet_list.push_back(packet);
+ // Create first packet, with 3 RED payloads.
+ packet = CreateRedPayload(3, payload_types, kTimestampOffset);
+ // Manually change timestamp and sequence number of second packet.
+ packet->header.timestamp += kTimestampOffset;
+ packet->header.sequenceNumber++;
+ packet_list.push_back(packet);
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
+ ASSERT_EQ(6u, packet_list.size());
+ // Check first packet, A1.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[2], kSequenceNumber,
+ kBaseTimestamp, 2, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check second packet, A2.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber,
+ kBaseTimestamp - kTimestampOffset, 1, false);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check third packet, A3.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
+ kBaseTimestamp - 2 * kTimestampOffset, 0, false);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check fourth packet, B1.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[2], kSequenceNumber + 1,
+ kBaseTimestamp + kTimestampOffset, 2, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check fifth packet, B2.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber + 1,
+ kBaseTimestamp, 1, false);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ // Check sixth packet, B3.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber + 1,
+ kBaseTimestamp - kTimestampOffset, 0, false);
+ delete [] packet->payload;
+ delete packet;
+}
+
+// Creates a list with 4 packets with these payload types:
+// 0 = CNGnb
+// 1 = PCMu
+// 2 = DTMF (AVT)
+// 3 = iLBC
+// We expect the method CheckRedPayloads to discard the iLBC packet, since it
+// is a non-CNG, non-DTMF payload of another type than the first speech payload
+// found in the list (which is PCMu).
+TEST(RedPayloadSplitter, CheckRedPayloads) {
+ PacketList packet_list;
+ for (int i = 0; i <= 3; ++i) {
+ // Create packet with payload type |i|, payload length 10 bytes, all 0.
+ Packet* packet = CreatePacket(i, 10, 0);
+ packet_list.push_back(packet);
+ }
+
+ // Use a real DecoderDatabase object here instead of a mock, since it is
+ // easier to just register the payload types and let the actual implementation
+ // do its job.
+ DecoderDatabase decoder_database;
+ decoder_database.RegisterPayload(0, kDecoderCNGnb);
+ decoder_database.RegisterPayload(1, kDecoderPCMu);
+ decoder_database.RegisterPayload(2, kDecoderAVT);
+ decoder_database.RegisterPayload(3, kDecoderILBC);
+
+ PayloadSplitter splitter;
+ splitter.CheckRedPayloads(&packet_list, decoder_database);
+
+ ASSERT_EQ(3u, packet_list.size()); // Should have dropped the last packet.
+ // Verify packets. The loop verifies that payload types 0, 1, and 2 are in the
+ // list.
+ for (int i = 0; i <= 2; ++i) {
+ Packet* packet = packet_list.front();
+ VerifyPacket(packet, 10, i, kSequenceNumber, kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+ }
+ EXPECT_TRUE(packet_list.empty());
+}
+
+// Packet A is split into A1, A2 and A3. But the length parameter is off, so
+// the last payloads should be discarded.
+TEST(RedPayloadSplitter, WrongPayloadLength) {
+ uint8_t payload_types[] = {0, 0, 0};
+ const int kTimestampOffset = 160;
+ Packet* packet = CreateRedPayload(3, payload_types, kTimestampOffset);
+ // Manually tamper with the payload length of the packet.
+ // This is one byte too short for the second payload (out of three).
+ // We expect only the first payload to be returned.
+ packet->payload_length -= kPayloadLength + 1;
+ PacketList packet_list;
+ packet_list.push_back(packet);
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kRedLengthMismatch,
+ splitter.SplitRed(&packet_list));
+ ASSERT_EQ(1u, packet_list.size());
+ // Check first packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
+ kBaseTimestamp - 2 * kTimestampOffset, 0, false);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+}
+
+// Test that iSAC, iSAC-swb, RED, DTMF, CNG, and "Arbitrary" payloads do not
+// get split.
+TEST(AudioPayloadSplitter, NonSplittable) {
+ // Set up packets with different RTP payload types. The actual values do not
+ // matter, since we are mocking the decoder database anyway.
+ PacketList packet_list;
+ for (int i = 0; i < 6; ++i) {
+ // Let the payload type be |i|, and the payload value 10 * |i|.
+ packet_list.push_back(CreatePacket(i, kPayloadLength, 10 * i));
+ }
+
+ MockDecoderDatabase decoder_database;
+ // Tell the mock decoder database to return DecoderInfo structs with different
+ // codec types.
+ // Use scoped pointers to avoid having to delete them later.
+ scoped_ptr<DecoderDatabase::DecoderInfo> info0(
+ new DecoderDatabase::DecoderInfo(kDecoderISAC, 16000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(0))
+ .WillRepeatedly(Return(info0.get()));
+ scoped_ptr<DecoderDatabase::DecoderInfo> info1(
+ new DecoderDatabase::DecoderInfo(kDecoderISACswb, 32000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(1))
+ .WillRepeatedly(Return(info1.get()));
+ scoped_ptr<DecoderDatabase::DecoderInfo> info2(
+ new DecoderDatabase::DecoderInfo(kDecoderRED, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(2))
+ .WillRepeatedly(Return(info2.get()));
+ scoped_ptr<DecoderDatabase::DecoderInfo> info3(
+ new DecoderDatabase::DecoderInfo(kDecoderAVT, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(3))
+ .WillRepeatedly(Return(info3.get()));
+ scoped_ptr<DecoderDatabase::DecoderInfo> info4(
+ new DecoderDatabase::DecoderInfo(kDecoderCNGnb, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(4))
+ .WillRepeatedly(Return(info4.get()));
+ scoped_ptr<DecoderDatabase::DecoderInfo> info5(
+ new DecoderDatabase::DecoderInfo(kDecoderArbitrary, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(5))
+ .WillRepeatedly(Return(info5.get()));
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
+ EXPECT_EQ(6u, packet_list.size());
+
+ // Check that all payloads are intact.
+ uint8_t payload_type = 0;
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ VerifyPacket((*it), kPayloadLength, payload_type, kSequenceNumber,
+ kBaseTimestamp, 10 * payload_type);
+ ++payload_type;
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+// Test unknown payload type.
+TEST(AudioPayloadSplitter, UnknownPayloadType) {
+ PacketList packet_list;
+ static const uint8_t kPayloadType = 17; // Just a random number.
+ int kPayloadLengthBytes = 4711; // Random number.
+ packet_list.push_back(CreatePacket(kPayloadType, kPayloadLengthBytes, 0));
+
+ MockDecoderDatabase decoder_database;
+ // Tell the mock decoder database to return NULL when asked for decoder info.
+ // This signals that the decoder database does not recognize the payload type.
+ EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(ReturnNull());
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kUnknownPayloadType,
+ splitter.SplitAudio(&packet_list, decoder_database));
+ EXPECT_EQ(1u, packet_list.size());
+
+
+ // Delete the packets and payloads to avoid having the test leak memory.
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+class SplitBySamplesTest : public ::testing::TestWithParam<NetEqDecoder> {
+ protected:
+ virtual void SetUp() {
+ decoder_type_ = GetParam();
+ switch (decoder_type_) {
+ case kDecoderPCMu:
+ case kDecoderPCMa:
+ bytes_per_ms_ = 8;
+ samples_per_ms_ = 8;
+ break;
+ case kDecoderPCMu_2ch:
+ case kDecoderPCMa_2ch:
+ bytes_per_ms_ = 2 * 8;
+ samples_per_ms_ = 8;
+ break;
+ case kDecoderG722:
+ bytes_per_ms_ = 8;
+ samples_per_ms_ = 16;
+ break;
+ case kDecoderPCM16B:
+ bytes_per_ms_ = 16;
+ samples_per_ms_ = 8;
+ break;
+ case kDecoderPCM16Bwb:
+ bytes_per_ms_ = 32;
+ samples_per_ms_ = 16;
+ break;
+ case kDecoderPCM16Bswb32kHz:
+ bytes_per_ms_ = 64;
+ samples_per_ms_ = 32;
+ break;
+ case kDecoderPCM16Bswb48kHz:
+ bytes_per_ms_ = 96;
+ samples_per_ms_ = 48;
+ break;
+ case kDecoderPCM16B_2ch:
+ bytes_per_ms_ = 2 * 16;
+ samples_per_ms_ = 8;
+ break;
+ case kDecoderPCM16Bwb_2ch:
+ bytes_per_ms_ = 2 * 32;
+ samples_per_ms_ = 16;
+ break;
+ case kDecoderPCM16Bswb32kHz_2ch:
+ bytes_per_ms_ = 2 * 64;
+ samples_per_ms_ = 32;
+ break;
+ case kDecoderPCM16Bswb48kHz_2ch:
+ bytes_per_ms_ = 2 * 96;
+ samples_per_ms_ = 48;
+ break;
+ case kDecoderPCM16B_5ch:
+ bytes_per_ms_ = 5 * 16;
+ samples_per_ms_ = 8;
+ break;
+ default:
+ assert(false);
+ break;
+ }
+ }
+ int bytes_per_ms_;
+ int samples_per_ms_;
+ NetEqDecoder decoder_type_;
+};
+
+// Test splitting sample-based payloads.
+TEST_P(SplitBySamplesTest, PayloadSizes) {
+ PacketList packet_list;
+ static const uint8_t kPayloadType = 17; // Just a random number.
+ for (int payload_size_ms = 10; payload_size_ms <= 60; payload_size_ms += 10) {
+ // The payload values are set to be the same as the payload_size, so that
+ // one can distinguish from which packet the split payloads come from.
+ int payload_size_bytes = payload_size_ms * bytes_per_ms_;
+ packet_list.push_back(CreatePacket(kPayloadType, payload_size_bytes,
+ payload_size_ms));
+ }
+
+ MockDecoderDatabase decoder_database;
+ // Tell the mock decoder database to return DecoderInfo structs with different
+ // codec types.
+ // Use scoped pointers to avoid having to delete them later.
+ // (Sample rate is set to 8000 Hz, but does not matter.)
+ scoped_ptr<DecoderDatabase::DecoderInfo> info(
+ new DecoderDatabase::DecoderInfo(decoder_type_, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(info.get()));
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
+ // The payloads are expected to be split as follows:
+ // 10 ms -> 10 ms
+ // 20 ms -> 20 ms
+ // 30 ms -> 30 ms
+ // 40 ms -> 20 + 20 ms
+ // 50 ms -> 25 + 25 ms
+ // 60 ms -> 30 + 30 ms
+ int expected_size_ms[] = {10, 20, 30, 20, 20, 25, 25, 30, 30};
+ int expected_payload_value[] = {10, 20, 30, 40, 40, 50, 50, 60, 60};
+ int expected_timestamp_offset_ms[] = {0, 0, 0, 0, 20, 0, 25, 0, 30};
+ size_t expected_num_packets =
+ sizeof(expected_size_ms) / sizeof(expected_size_ms[0]);
+ EXPECT_EQ(expected_num_packets, packet_list.size());
+
+ PacketList::iterator it = packet_list.begin();
+ int i = 0;
+ while (it != packet_list.end()) {
+ int length_bytes = expected_size_ms[i] * bytes_per_ms_;
+ uint32_t expected_timestamp = kBaseTimestamp +
+ expected_timestamp_offset_ms[i] * samples_per_ms_;
+ VerifyPacket((*it), length_bytes, kPayloadType, kSequenceNumber,
+ expected_timestamp, expected_payload_value[i]);
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ ++i;
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+INSTANTIATE_TEST_CASE_P(
+ PayloadSplitter, SplitBySamplesTest,
+ ::testing::Values(kDecoderPCMu, kDecoderPCMa, kDecoderPCMu_2ch,
+ kDecoderPCMa_2ch, kDecoderG722, kDecoderPCM16B,
+ kDecoderPCM16Bwb, kDecoderPCM16Bswb32kHz,
+ kDecoderPCM16Bswb48kHz, kDecoderPCM16B_2ch,
+ kDecoderPCM16Bwb_2ch, kDecoderPCM16Bswb32kHz_2ch,
+ kDecoderPCM16Bswb48kHz_2ch, kDecoderPCM16B_5ch));
+
+
+class SplitIlbcTest : public ::testing::TestWithParam<std::pair<int, int> > {
+ protected:
+ virtual void SetUp() {
+ const std::pair<int, int> parameters = GetParam();
+ num_frames_ = parameters.first;
+ frame_length_ms_ = parameters.second;
+ frame_length_bytes_ = (frame_length_ms_ == 20) ? 38 : 50;
+ }
+ size_t num_frames_;
+ int frame_length_ms_;
+ int frame_length_bytes_;
+};
+
+// Test splitting sample-based payloads.
+TEST_P(SplitIlbcTest, NumFrames) {
+ PacketList packet_list;
+ static const uint8_t kPayloadType = 17; // Just a random number.
+ const int frame_length_samples = frame_length_ms_ * 8;
+ int payload_length_bytes = frame_length_bytes_ * num_frames_;
+ Packet* packet = CreatePacket(kPayloadType, payload_length_bytes, 0);
+ // Fill payload with increasing integers {0, 1, 2, ...}.
+ for (int i = 0; i < packet->payload_length; ++i) {
+ packet->payload[i] = static_cast<uint8_t>(i);
+ }
+ packet_list.push_back(packet);
+
+ MockDecoderDatabase decoder_database;
+ // Tell the mock decoder database to return DecoderInfo structs with different
+ // codec types.
+ // Use scoped pointers to avoid having to delete them later.
+ scoped_ptr<DecoderDatabase::DecoderInfo> info(
+ new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(info.get()));
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
+ EXPECT_EQ(num_frames_, packet_list.size());
+
+ PacketList::iterator it = packet_list.begin();
+ int frame_num = 0;
+ uint8_t payload_value = 0;
+ while (it != packet_list.end()) {
+ Packet* packet = (*it);
+ EXPECT_EQ(kBaseTimestamp + frame_length_samples * frame_num,
+ packet->header.timestamp);
+ EXPECT_EQ(frame_length_bytes_, packet->payload_length);
+ EXPECT_EQ(kPayloadType, packet->header.payloadType);
+ EXPECT_EQ(kSequenceNumber, packet->header.sequenceNumber);
+ EXPECT_EQ(true, packet->primary);
+ ASSERT_FALSE(packet->payload == NULL);
+ for (int i = 0; i < packet->payload_length; ++i) {
+ EXPECT_EQ(payload_value, packet->payload[i]);
+ ++payload_value;
+ }
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ ++frame_num;
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+// Test 1 through 5 frames of 20 and 30 ms size.
+// Also test the maximum number of frames in one packet for 20 and 30 ms.
+// The maximum is defined by the largest payload length that can be uniquely
+// resolved to a frame size of either 38 bytes (20 ms) or 50 bytes (30 ms).
+INSTANTIATE_TEST_CASE_P(
+ PayloadSplitter, SplitIlbcTest,
+ ::testing::Values(std::pair<int, int>(1, 20), // 1 frame, 20 ms.
+ std::pair<int, int>(2, 20), // 2 frames, 20 ms.
+ std::pair<int, int>(3, 20), // And so on.
+ std::pair<int, int>(4, 20),
+ std::pair<int, int>(5, 20),
+ std::pair<int, int>(24, 20),
+ std::pair<int, int>(1, 30),
+ std::pair<int, int>(2, 30),
+ std::pair<int, int>(3, 30),
+ std::pair<int, int>(4, 30),
+ std::pair<int, int>(5, 30),
+ std::pair<int, int>(18, 30)));
+
+// Test too large payload size.
+TEST(IlbcPayloadSplitter, TooLargePayload) {
+ PacketList packet_list;
+ static const uint8_t kPayloadType = 17; // Just a random number.
+ int kPayloadLengthBytes = 950;
+ Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
+ packet_list.push_back(packet);
+
+ MockDecoderDatabase decoder_database;
+ scoped_ptr<DecoderDatabase::DecoderInfo> info(
+ new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(info.get()));
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kTooLargePayload,
+ splitter.SplitAudio(&packet_list, decoder_database));
+ EXPECT_EQ(1u, packet_list.size());
+
+ // Delete the packets and payloads to avoid having the test leak memory.
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+// Payload not an integer number of frames.
+TEST(IlbcPayloadSplitter, UnevenPayload) {
+ PacketList packet_list;
+ static const uint8_t kPayloadType = 17; // Just a random number.
+ int kPayloadLengthBytes = 39; // Not an even number of frames.
+ Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
+ packet_list.push_back(packet);
+
+ MockDecoderDatabase decoder_database;
+ scoped_ptr<DecoderDatabase::DecoderInfo> info(
+ new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
+ EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(info.get()));
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kFrameSplitError,
+ splitter.SplitAudio(&packet_list, decoder_database));
+ EXPECT_EQ(1u, packet_list.size());
+
+ // Delete the packets and payloads to avoid having the test leak memory.
+ PacketList::iterator it = packet_list.begin();
+ while (it != packet_list.end()) {
+ delete [] (*it)->payload;
+ delete (*it);
+ it = packet_list.erase(it);
+ }
+
+ // The destructor is called when decoder_database goes out of scope.
+ EXPECT_CALL(decoder_database, Die());
+}
+
+TEST(FecPayloadSplitter, MixedPayload) {
+ PacketList packet_list;
+ DecoderDatabase decoder_database;
+
+ decoder_database.RegisterPayload(0, kDecoderOpus);
+ decoder_database.RegisterPayload(1, kDecoderPCMu);
+
+ Packet* packet = CreateOpusFecPacket(0, 10, 0xFF);
+ packet_list.push_back(packet);
+
+ packet = CreatePacket(0, 10, 0); // Non-FEC Opus payload.
+ packet_list.push_back(packet);
+
+ packet = CreatePacket(1, 10, 0); // Non-Opus payload.
+ packet_list.push_back(packet);
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kOK,
+ splitter.SplitFec(&packet_list, &decoder_database));
+ EXPECT_EQ(4u, packet_list.size());
+
+ // Check first packet.
+ packet = packet_list.front();
+ EXPECT_EQ(0, packet->header.payloadType);
+ EXPECT_EQ(kBaseTimestamp - 20 * 32, packet->header.timestamp);
+ EXPECT_EQ(10, packet->payload_length);
+ EXPECT_FALSE(packet->primary);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check second packet.
+ packet = packet_list.front();
+ EXPECT_EQ(0, packet->header.payloadType);
+ EXPECT_EQ(kBaseTimestamp, packet->header.timestamp);
+ EXPECT_EQ(10, packet->payload_length);
+ EXPECT_TRUE(packet->primary);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check third packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, 10, 0, kSequenceNumber, kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check fourth packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, 10, 1, kSequenceNumber, kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c
deleted file mode 100644
index 8c85d2a837e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the peak detection used for finding correlation peaks.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
-const int16_t WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
- { 150, 50, 80 }, { 160, 57, 85 },
- { 180, 72, 96 }, { 200, 89, 107 },
- { 210, 98, 112 }, { 220, 108, 117 },
- { 240, 128, 128 }, { 260, 150, 139 },
- { 270, 162, 144 }, { 280, 174, 149 },
- { 300, 200, 160 }, { 320, 228, 171 },
- { 330, 242, 176 }, { 340, 257, 181 },
- { 360, 288, 192 } };
-
-int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
- int16_t w16_nmbPeaks, int16_t fs_mult,
- int16_t *pw16_winIndex,
- int16_t *pw16_winValue)
-{
- /* Local variables */
- int i;
- int16_t w16_tmp;
- int16_t w16_tmp2;
- int16_t indMin = 0;
- int16_t indMax = 0;
-
- /* Peak detection */
-
- for (i = 0; i <= (w16_nmbPeaks - 1); i++)
- {
- if (w16_nmbPeaks == 1)
- {
- /*
- * Single peak
- * The parabola fit assumes that an extra point is available; worst case it gets
- * a zero on the high end of the signal.
- */
- w16_dataLen++;
- }
-
- pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (int16_t) (w16_dataLen - 1));
-
- if (i != w16_nmbPeaks - 1)
- {
- w16_tmp = pw16_winIndex[i] - 2; /* *fs_mult; */
- indMin = WEBRTC_SPL_MAX(0, w16_tmp);
- w16_tmp = pw16_winIndex[i] + 2; /* *fs_mult; */
- w16_tmp2 = w16_dataLen - 1;
- indMax = WEBRTC_SPL_MIN(w16_tmp2, w16_tmp);
- }
-
- if ((pw16_winIndex[i] != 0) && (pw16_winIndex[i] != (w16_dataLen - 2)))
- {
- /* Parabola fit*/
- WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]), &(pw16_winIndex[i]),
- &(pw16_winValue[i]), fs_mult);
- }
- else
- {
- if (pw16_winIndex[i] == (w16_dataLen - 2))
- {
- if (pw16_data[pw16_winIndex[i]] > pw16_data[pw16_winIndex[i] + 1])
- {
- WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]),
- &(pw16_winIndex[i]), &(pw16_winValue[i]), fs_mult);
- }
- else if (pw16_data[pw16_winIndex[i]] <= pw16_data[pw16_winIndex[i] + 1])
- {
- pw16_winValue[i] = (pw16_data[pw16_winIndex[i]]
- + pw16_data[pw16_winIndex[i] + 1]) >> 1; /* lin approx */
- pw16_winIndex[i] = (pw16_winIndex[i] * 2 + 1) * fs_mult;
- }
- }
- else
- {
- pw16_winValue[i] = pw16_data[pw16_winIndex[i]];
- pw16_winIndex[i] = pw16_winIndex[i] * 2 * fs_mult;
- }
- }
-
- if (i != w16_nmbPeaks - 1)
- {
- WebRtcSpl_MemSetW16(&(pw16_data[indMin]), 0, (indMax - indMin + 1));
- /* for (j=indMin; j<=indMax; j++) pw16_data[j] = 0; */
- }
- }
-
- return 0;
-}
-
-int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
- int16_t *pw16_outVal, int16_t fs_mult)
-{
- /* Variables */
- int32_t Num, Den;
- int32_t temp;
- int16_t flag, stp, strt, lmt;
- uint16_t PFind[13];
-
- if (fs_mult == 1)
- {
- PFind[0] = 0;
- PFind[1] = 8;
- PFind[2] = 16;
- }
- else if (fs_mult == 2)
- {
- PFind[0] = 0;
- PFind[1] = 4;
- PFind[2] = 8;
- PFind[3] = 12;
- PFind[4] = 16;
- }
- else if (fs_mult == 4)
- {
- PFind[0] = 0;
- PFind[1] = 2;
- PFind[2] = 4;
- PFind[3] = 6;
- PFind[4] = 8;
- PFind[5] = 10;
- PFind[6] = 12;
- PFind[7] = 14;
- PFind[8] = 16;
- }
- else
- {
- PFind[0] = 0;
- PFind[1] = 1;
- PFind[2] = 3;
- PFind[3] = 4;
- PFind[4] = 5;
- PFind[5] = 7;
- PFind[6] = 8;
- PFind[7] = 9;
- PFind[8] = 11;
- PFind[9] = 12;
- PFind[10] = 13;
- PFind[11] = 15;
- PFind[12] = 16;
- }
-
- /* Num = -3*pw16_3pts[0] + 4*pw16_3pts[1] - pw16_3pts[2]; */
- /* Den = pw16_3pts[0] - 2*pw16_3pts[1] + pw16_3pts[2]; */
- Num = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],-3) + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],4)
- - pw16_3pts[2];
-
- Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
-
- temp = (int32_t) WEBRTC_SPL_MUL(Num, (int32_t)120); /* need 32_16 really */
- flag = 1;
- stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
- strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
- + WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
-
- if (temp < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)strt))
- {
- lmt = strt - stp;
- while (flag)
- {
- if ((flag == fs_mult) || (temp
- > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
- {
- *pw16_outVal
- = (int16_t)
- (((int32_t) ((int32_t) WEBRTC_SPL_MUL(Den,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
- + (int32_t) WEBRTC_SPL_MUL(Num,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
- + WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
- *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
- flag = 0;
- }
- else
- {
- flag++;
- lmt -= stp;
- }
- }
- }
- else if (temp > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)(strt+stp)))
- {
- lmt = strt + (stp << 1);
- while (flag)
- {
- if ((flag == fs_mult) || (temp
- < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
- {
- int32_t temp_term_1, temp_term_2, temp_term_3;
-
- temp_term_1 = WEBRTC_SPL_MUL(Den,
- (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
- temp_term_2 = WEBRTC_SPL_MUL(Num,
- (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
- temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
-
- *pw16_outVal
- = (int16_t) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
-
- *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
- flag = 0;
- }
- else
- {
- flag++;
- lmt += stp;
- }
- }
-
- }
- else
- {
- *pw16_outVal = pw16_3pts[1];
- *pw16_Ind = (*pw16_Ind) * 2 * fs_mult;
- }
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc
new file mode 100644
index 00000000000..7ae7f97abc9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
+
+namespace webrtc {
+
+PostDecodeVad::~PostDecodeVad() {
+ if (vad_instance_)
+ WebRtcVad_Free(vad_instance_);
+}
+
+void PostDecodeVad::Enable() {
+ if (!vad_instance_) {
+ // Create the instance.
+ if (WebRtcVad_Create(&vad_instance_) != 0) {
+ // Failed to create instance.
+ Disable();
+ return;
+ }
+ }
+ Init();
+ enabled_ = true;
+}
+
+void PostDecodeVad::Disable() {
+ enabled_ = false;
+ running_ = false;
+}
+
+void PostDecodeVad::Init() {
+ running_ = false;
+ if (vad_instance_) {
+ WebRtcVad_Init(vad_instance_);
+ WebRtcVad_set_mode(vad_instance_, kVadMode);
+ running_ = true;
+ }
+}
+
+void PostDecodeVad::Update(int16_t* signal, int length,
+ AudioDecoder::SpeechType speech_type,
+ bool sid_frame,
+ int fs_hz) {
+ if (!vad_instance_ || !enabled_) {
+ return;
+ }
+
+ if (speech_type == AudioDecoder::kComfortNoise || sid_frame ||
+ fs_hz > 16000) {
+ // TODO(hlundin): Remove restriction on fs_hz.
+ running_ = false;
+ active_speech_ = true;
+ sid_interval_counter_ = 0;
+ } else if (!running_) {
+ ++sid_interval_counter_;
+ }
+
+ if (sid_interval_counter_ >= kVadAutoEnable) {
+ Init();
+ }
+
+ if (length > 0 && running_) {
+ int vad_sample_index = 0;
+ active_speech_ = false;
+ // Loop through frame sizes 30, 20, and 10 ms.
+ for (int vad_frame_size_ms = 30; vad_frame_size_ms >= 10;
+ vad_frame_size_ms -= 10) {
+ int vad_frame_size_samples = vad_frame_size_ms * fs_hz / 1000;
+ while (length - vad_sample_index >= vad_frame_size_samples) {
+ int vad_return = WebRtcVad_Process(
+ vad_instance_, fs_hz, &signal[vad_sample_index],
+ vad_frame_size_samples);
+ active_speech_ |= (vad_return == 1);
+ vad_sample_index += vad_frame_size_samples;
+ }
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h
new file mode 100644
index 00000000000..e713009c85f
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
+
+#include <string> // size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_audio/vad/include/webrtc_vad.h"
+#include "webrtc/common_types.h" // NULL
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class PostDecodeVad {
+ public:
+ PostDecodeVad()
+ : enabled_(false),
+ running_(false),
+ active_speech_(true),
+ sid_interval_counter_(0),
+ vad_instance_(NULL) {
+ }
+
+ virtual ~PostDecodeVad();
+
+ // Enables post-decode VAD.
+ void Enable();
+
+ // Disables post-decode VAD.
+ void Disable();
+
+ // Initializes post-decode VAD.
+ void Init();
+
+ // Updates post-decode VAD with the audio data in |signal| having |length|
+ // samples. The data is of type |speech_type|, at the sample rate |fs_hz|.
+ void Update(int16_t* signal, int length,
+ AudioDecoder::SpeechType speech_type, bool sid_frame, int fs_hz);
+
+ // Accessors.
+ bool enabled() const { return enabled_; }
+ bool running() const { return running_; }
+ bool active_speech() const { return active_speech_; }
+
+ private:
+ static const int kVadMode = 0; // Sets aggressiveness to "Normal".
+ // Number of Update() calls without CNG/SID before re-enabling VAD.
+ static const int kVadAutoEnable = 3000;
+
+ bool enabled_;
+ bool running_;
+ bool active_speech_;
+ int sid_interval_counter_;
+ ::VadInst* vad_instance_;
+
+ DISALLOW_COPY_AND_ASSIGN(PostDecodeVad);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc
new file mode 100644
index 00000000000..ed48db858d4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for PostDecodeVad class.
+
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(PostDecodeVad, CreateAndDestroy) {
+ PostDecodeVad vad;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c
deleted file mode 100644
index e56c0628415..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the Pre-emptive Expand algorithm that is used to increase
- * the delay by repeating a part of the audio stream.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define PREEMPTIVE_CORR_LEN 50
-#define PREEMPTIVE_MIN_LAG 10
-#define PREEMPTIVE_MAX_LAG 60
-#define PREEMPTIVE_DOWNSAMPLED_LEN (PREEMPTIVE_CORR_LEN + PREEMPTIVE_MAX_LAG)
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_downSampSpeech 110 0 109
- int32_t pw32_corr 2*50 110 209
- int16_t pw16_corr 50 0 49
-
- Total: 110+2*50
- */
-
-#define SCRATCH_PW16_DS_SPEECH 0
-#define SCRATCH_PW32_CORR PREEMPTIVE_DOWNSAMPLED_LEN
-#define SCRATCH_PW16_CORR 0
-
-/****************************************************************************
- * WebRtcNetEQ_PreEmptiveExpand(...)
- *
- * This function tries to extend the audio data by repeating one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low. The algorithm is the
- * reciprocal of the Accelerate algorithm.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - oldDataLen : Length of the part of decoded that has already been played out.
- * - BGNonly : If non-zero, Pre-emptive Expand will only copy
- * the first DEFAULT_TIME_ADJUST seconds of the
- * input and append to the end. No signal matching is
- * done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored. The vector must be at least
- * min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
- * elements long.
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len, int oldDataLen,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
-#ifdef SCRATCH
- /* Use scratch memory for internal temporary vectors */
- int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
-#else
- /* Allocate memory for temporary vectors */
- int16_t pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
- int32_t pw32_corr[PREEMPTIVE_CORR_LEN];
- int16_t pw16_corr[PREEMPTIVE_CORR_LEN];
-#endif
- int16_t w16_decodedMax = 0;
- int16_t w16_tmp = 0;
- int16_t w16_tmp2;
- int32_t w32_tmp;
- int32_t w32_tmp2;
-
- const int16_t w16_startLag = PREEMPTIVE_MIN_LAG;
- const int16_t w16_endLag = PREEMPTIVE_MAX_LAG;
- const int16_t w16_corrLen = PREEMPTIVE_CORR_LEN;
- const int16_t *pw16_vec1, *pw16_vec2;
- int16_t *pw16_vectmp;
- int16_t w16_inc, w16_startfact;
- int16_t w16_bestIndex, w16_bestVal;
- int16_t w16_VAD = 1;
- int16_t fsMult;
- int16_t fsMult120;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1, w16_en2;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_sqrtEn1En2;
- int16_t w16_bestCorr = 0;
- int ok;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
-
- /* Pre-calculate common multiplication with fsMult */
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
-
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /*
- * Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
- * Also, the new part must be at least .625 ms (w16_overlap).
- */
- if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
- - inst->ExpandInst.w16_overlap)
- {
- /* Length of decoded data too short */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /***********************************/
- /* Special operations for BGN only */
- /***********************************/
-
- /* Check if "background noise only" flag is set */
- if (BGNonly)
- {
- /* special operation for BGN only; simply insert a chunk of data */
- w16_bestIndex = DEFAULT_TIME_ADJUST * (fsMult << 3); /* X*fs/1000 */
-
- /* Sanity check for bestIndex */
- if (w16_bestIndex > len)
- { /* not good, do nothing instead */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /* set length parameter */
- *pw16_len = len + w16_bestIndex;
-
-
- /* copy to output */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
- WEBRTC_SPL_MEMCPY_W16(&pw16_outData[len], pw16_decoded, w16_bestIndex);
-
- /* set mode */
- inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
-
- /* update statistics */
- inst->statInst.preemptiveLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.preemptive_expand_bgn_samples += w16_bestIndex;
-
- return 0;
- } /* end of special code for BGN mode */
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find correlation lag only for non-slave instances */
-
-#endif
-
- /****************************************************************/
- /* Find the strongest correlation lag by downsampling to 4 kHz, */
- /* calculating correlation for downsampled signal and finding */
- /* the strongest correlation peak. */
- /****************************************************************/
-
- /* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* downsample the decoded speech to 4 kHz */
- ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
- PREEMPTIVE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
- if (ok != 0)
- {
- /* error */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /*
- * Set scaling factor for cross correlation to protect against
- * overflow (log2(50) => 6)
- */
- w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_downSampSpeech[w16_endLag],
- &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
-
- /* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
- /* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) (NETEQ_MAX_OUTPUT_SIZE - len),
- (int16_t) (fsMult << 1)) - w16_startLag;
- w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- if (msInfo->extraInfo == PE_EXP_FAIL)
- {
- /* Master has signaled an unsuccessful preemptive expand */
- w16_bestIndex = 0;
- }
- else
- {
- /* Get best index from master */
- w16_bestIndex = msInfo->bestIndex;
- }
- }
- else
- {
- /* Invalid mode */
- return (MASTER_SLAVE_ERROR);
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
-#endif /* NETEQ_STEREO */
-
-#ifdef NETEQ_STEREO
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Calculate correlation only for non-slave instances */
-
-#endif /* NETEQ_STEREO */
-
- /*****************************************************/
- /* Calculate correlation bestCorr for the found lag. */
- /* Also do a simple VAD decision. */
- /*****************************************************/
-
- /*
- * Calculate scaling to ensure that bestIndex samples can be square-summed
- * without overflowing
- */
- w16_tmp = (31
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
- w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
- w16_tmp -= 31;
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
- (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
- (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
-
- /* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
- w16_bestIndex, w16_tmp);
-
- /* Check VAD constraint
- ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
- if (inst->BGNInst.w16_initialized == 1)
- {
- w32_tmp2 = inst->BGNInst.w32_energy;
- }
- else
- {
- /* if BGN parameters have not been estimated, use a fixed threshold */
- w32_tmp2 = 75000;
- }
- w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
- w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
- w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
-
- /* Scale w32_tmp properly before comparing with w32_tmp2 */
- /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
- if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
- {
- /* Cannot scale only w32_tmp, must scale w32_temp2 too */
- int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
- w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
- }
- else
- {
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
- }
-
- if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- {
- /* The signal seems to be passive speech */
- w16_VAD = 0;
- w16_bestCorr = 0; /* Correlation does not matter */
-
- /* For low energy expansion, the new data can be less than 15 ms,
- but we must ensure that bestIndex is not larger than the new data. */
- w16_bestIndex = WEBRTC_SPL_MIN( w16_bestIndex, len - oldDataLen );
- }
- else
- {
- /* The signal is active speech */
- w16_VAD = 1;
-
- /* Calculate correlation (cc/sqrt(en1*en2)) */
-
- /* Start with calculating scale values */
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
-
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- w16_en1Scale += 1;
- }
-
- /* Convert energies to int16_t */
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
-
- /* Calculate energy product */
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
-
- /* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
- w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
- w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
- }
-
-#ifdef NETEQ_STEREO
-
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
-#endif /* NETEQ_STEREO */
-
- /*******************************************************/
- /* Check preemptive expand criteria and insert samples */
- /*******************************************************/
-
- /* Check for strong correlation (>0.9) and at least 15 ms new data,
- or passive speech */
-#ifdef NETEQ_STEREO
- if (((((w16_bestCorr > 14746) && (oldDataLen <= fsMult120)) || (w16_VAD == 0))
- && (msInfo->msMode != NETEQ_SLAVE)) || ((msInfo->msMode == NETEQ_SLAVE)
- && (msInfo->extraInfo != PE_EXP_FAIL)))
-#else
- if (((w16_bestCorr > 14746) && (oldDataLen <= fsMult120))
- || (w16_VAD == 0))
-#endif
- {
- /* Do expand operation by overlap add */
-
- /* Set length of the first part, not to be modified */
- int16_t w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
-
- /*
- * Calculate cross-fading slope so that the fading factor goes from
- * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
- */
- w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
- (int16_t) (w16_bestIndex + 1)); /* in Q14 */
-
- /* Initiate fading factor */
- w16_startfact = 16384 - w16_inc;
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[w16_startIndex - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[w16_startIndex];
-
-
- /* Copy unmodified part [0 to 15 ms] */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_startIndex);
-
- /* Generate interpolated part of length bestIndex (1 pitch period) */
- pw16_vectmp = pw16_outData + w16_startIndex;
- /* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec2,
- (int16_t*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
-
- /* Move the last part (also unmodified) */
- /* Take from decoded at 15 ms */
- pw16_vec2 = &pw16_decoded[w16_startIndex];
- WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
- (int16_t) (len - w16_startIndex));
-
- /* Set the mode flag */
- if (w16_VAD)
- {
- inst->w16_mode = MODE_SUCCESS_PREEMPTIVE;
- }
- else
- {
- inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
- }
-
- /* Calculate resulting length = original length + pitch period */
- *pw16_len = len + w16_bestIndex;
-
- /* Update in-call statistics */
- inst->statInst.preemptiveLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.preemptive_expand_normal_samples += w16_bestIndex;
- return 0;
- }
- else
- {
- /* Preemptive Expand not allowed */
-
-#ifdef NETEQ_STEREO
- /* Signal to slave(s) that this was unsuccessful */
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->extraInfo = PE_EXP_FAIL;
- }
-#endif
-
- /* Set mode flag to unsuccessful preemptive expand */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
-
- /* Length is unmodified */
- *pw16_len = len;
-
-
- /* Simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return 0;
- }
-}
-
-#undef SCRATCH_PW16_DS_SPEECH
-#undef SCRATCH_PW32_CORR
-#undef SCRATCH_PW16_CORR
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc
new file mode 100644
index 00000000000..b2dc3e60cba
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+
+#include <algorithm> // min, max
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+PreemptiveExpand::ReturnCodes PreemptiveExpand::Process(
+ const int16_t* input,
+ int input_length,
+ int old_data_length,
+ AudioMultiVector* output,
+ int16_t* length_change_samples) {
+ old_data_length_per_channel_ = old_data_length;
+ // Input length must be (almost) 30 ms.
+ // Also, the new part must be at least |overlap_samples_| elements.
+ static const int k15ms = 120; // 15 ms = 120 samples at 8 kHz sample rate.
+ if (num_channels_ == 0 ||
+ input_length / num_channels_ < (2 * k15ms - 1) * fs_mult_ ||
+ old_data_length >= input_length / num_channels_ - overlap_samples_) {
+ // Length of input data too short to do preemptive expand. Simply move all
+ // data from input to output.
+ output->PushBackInterleaved(input, input_length);
+ return kError;
+ }
+ return TimeStretch::Process(input, input_length, output,
+ length_change_samples);
+}
+
+void PreemptiveExpand::SetParametersForPassiveSpeech(size_t len,
+ int16_t* best_correlation,
+ int* peak_index) const {
+ // When the signal does not contain any active speech, the correlation does
+ // not matter. Simply set it to zero.
+ *best_correlation = 0;
+
+ // For low energy expansion, the new data can be less than 15 ms,
+ // but we must ensure that best_correlation is not larger than the length of
+ // the new data.
+ // but we must ensure that best_correlation is not larger than the new data.
+ *peak_index = std::min(*peak_index,
+ static_cast<int>(len - old_data_length_per_channel_));
+}
+
+PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch(
+ const int16_t *input, size_t input_length, size_t peak_index,
+ int16_t best_correlation, bool active_speech,
+ AudioMultiVector* output) const {
+ // Pre-calculate common multiplication with |fs_mult_|.
+ // 120 corresponds to 15 ms.
+ int fs_mult_120 = fs_mult_ * 120;
+ assert(old_data_length_per_channel_ >= 0); // Make sure it's been set.
+ // Check for strong correlation (>0.9 in Q14) and at least 15 ms new data,
+ // or passive speech.
+ if (((best_correlation > kCorrelationThreshold) &&
+ (old_data_length_per_channel_ <= fs_mult_120)) ||
+ !active_speech) {
+ // Do accelerate operation by overlap add.
+
+ // Set length of the first part, not to be modified.
+ size_t unmodified_length = std::max(old_data_length_per_channel_,
+ fs_mult_120);
+ // Copy first part, including cross-fade region.
+ output->PushBackInterleaved(
+ input, (unmodified_length + peak_index) * num_channels_);
+ // Copy the last |peak_index| samples up to 15 ms to |temp_vector|.
+ AudioMultiVector temp_vector(num_channels_);
+ temp_vector.PushBackInterleaved(
+ &input[(unmodified_length - peak_index) * num_channels_],
+ peak_index * num_channels_);
+ // Cross-fade |temp_vector| onto the end of |output|.
+ output->CrossFade(temp_vector, peak_index);
+ // Copy the last unmodified part, 15 ms + pitch period until the end.
+ output->PushBackInterleaved(
+ &input[unmodified_length * num_channels_],
+ input_length - unmodified_length * num_channels_);
+
+ if (active_speech) {
+ return kSuccess;
+ } else {
+ return kSuccessLowEnergy;
+ }
+ } else {
+ // Accelerate not allowed. Simply move all data from decoded to outData.
+ output->PushBackInterleaved(input, input_length);
+ return kNoStretch;
+ }
+}
+
+PreemptiveExpand* PreemptiveExpandFactory::Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples) const {
+ return new PreemptiveExpand(
+ sample_rate_hz, num_channels, background_noise, overlap_samples);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h
new file mode 100644
index 00000000000..1aa61330145
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
+
+#include <assert.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BackgroundNoise;
+
+// This class implements the PreemptiveExpand operation. Most of the work is
+// done in the base class TimeStretch, which is shared with the Accelerate
+// operation. In the PreemptiveExpand class, the operations that are specific to
+// PreemptiveExpand are implemented.
+class PreemptiveExpand : public TimeStretch {
+ public:
+ PreemptiveExpand(int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples)
+ : TimeStretch(sample_rate_hz, num_channels, background_noise),
+ old_data_length_per_channel_(-1),
+ overlap_samples_(overlap_samples) {
+ }
+
+ virtual ~PreemptiveExpand() {}
+
+ // This method performs the actual PreemptiveExpand operation. The samples are
+ // read from |input|, of length |input_length| elements, and are written to
+ // |output|. The number of samples added through time-stretching is
+ // is provided in the output |length_change_samples|. The method returns
+ // the outcome of the operation as an enumerator value.
+ ReturnCodes Process(const int16_t *pw16_decoded,
+ int len,
+ int old_data_len,
+ AudioMultiVector* output,
+ int16_t* length_change_samples);
+
+ protected:
+ // Sets the parameters |best_correlation| and |peak_index| to suitable
+ // values when the signal contains no active speech.
+ virtual void SetParametersForPassiveSpeech(size_t len,
+ int16_t* w16_bestCorr,
+ int* w16_bestIndex) const;
+
+ // Checks the criteria for performing the time-stretching operation and,
+ // if possible, performs the time-stretching.
+ virtual ReturnCodes CheckCriteriaAndStretch(
+ const int16_t *pw16_decoded, size_t len, size_t w16_bestIndex,
+ int16_t w16_bestCorr, bool w16_VAD,
+ AudioMultiVector* output) const;
+
+ private:
+ int old_data_length_per_channel_;
+ int overlap_samples_;
+
+ DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand);
+};
+
+struct PreemptiveExpandFactory {
+ PreemptiveExpandFactory() {}
+ virtual ~PreemptiveExpandFactory() {}
+
+ virtual PreemptiveExpand* Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples) const;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c
deleted file mode 100644
index c168ab5437f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function generates a pseudo-random vector.
- */
-
-#include "dsp_helpfunctions.h"
-
-/*
- * Values are normalized so that
- * sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
- */
-const int16_t WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
-{
- 2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522,
- 13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314,
- 5426, 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112, -613, 201, -10367, -2960,
- -2419, 3442, 4299, -6116, -6092, 1552, -1650, -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968,
- 12052, -5845, -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552, -9084, -5753,
- 8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403, 11407, 6232, -1683, 24340, -11166, 4017, -10448,
- 3153, -2936, 6212, 2891, -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403, 2205,
- 4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339, 5079, -2645, -9515, 6622, 14651, 15852,
- 359, 122, 8246, -3502, -6696, -3679, -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219,
- 1141, 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117, 13792, 5742, 16168, 8661,
- -1609, -6095, 1881, 14380, -5588, 6758, -6425, -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952,
- -10146, -4667, -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559, 4740, -4819, 992,
- -8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588, -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188,
- -1022, 4852, 10101, -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678, -1600, -9230,
- 68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947, 4341, 1014, -4889, -2603, 1246, -5630,
- -3596, -870, -1298, 2784, -3317, -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579
-};
-
-
-void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
- int16_t w16_len, int16_t w16_incval)
-{
- int i;
- int16_t w16_pos;
- for (i = 0; i < w16_len; i++)
- {
- *w32_seed = (*w32_seed) + w16_incval;
- w16_pos = (int16_t) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
- pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
- }
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc
new file mode 100644
index 00000000000..b12f2171553
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+
+namespace webrtc {
+
+const int16_t RandomVector::kRandomTable[RandomVector::kRandomTableSize] = {
+ 2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380,
+ -4959, -1280, -21716, 7133, -1522, 13458, -3902, 2789, -675, 3441, 5016,
+ -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314, 5426,
+ 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112,
+ -613, 201, -10367, -2960, -2419, 3442, 4299, -6116, -6092, 1552, -1650,
+ -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968, 12052, -5845,
+ -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552,
+ -9084, -5753, 8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403,
+ 11407, 6232, -1683, 24340, -11166, 4017, -10448, 3153, -2936, 6212, 2891,
+ -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403,
+ 2205, 4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339,
+ 5079, -2645, -9515, 6622, 14651, 15852, 359, 122, 8246, -3502, -6696, -3679,
+ -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219, 1141,
+ 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117,
+ 13792, 5742, 16168, 8661, -1609, -6095, 1881, 14380, -5588, 6758, -6425,
+ -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952, -10146, -4667,
+ -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559,
+ 4740, -4819, 992, -8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588,
+ -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188, -1022, 4852, 10101,
+ -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678,
+ -1600, -9230, 68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947,
+ 4341, 1014, -4889, -2603, 1246, -5630, -3596, -870, -1298, 2784, -3317,
+ -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579 };
+
+void RandomVector::Reset() {
+ seed_ = 777;
+ seed_increment_ = 1;
+}
+
+void RandomVector::Generate(size_t length, int16_t* output) {
+ for (size_t i = 0; i < length; i++) {
+ seed_ += seed_increment_;
+ size_t position = seed_ & (kRandomTableSize - 1);
+ output[i] = kRandomTable[position];
+ }
+}
+
+void RandomVector::IncreaseSeedIncrement(int16_t increase_by) {
+ seed_increment_+= increase_by;
+ seed_increment_ &= kRandomTableSize - 1;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h
new file mode 100644
index 00000000000..767dc48eee3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
+
+#include <string.h> // size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This class generates pseudo-random samples.
+class RandomVector {
+ public:
+ static const int kRandomTableSize = 256;
+ static const int16_t kRandomTable[kRandomTableSize];
+
+ RandomVector()
+ : seed_(777),
+ seed_increment_(1) {
+ }
+
+ void Reset();
+
+ void Generate(size_t length, int16_t* output);
+
+ void IncreaseSeedIncrement(int16_t increase_by);
+
+ // Accessors and mutators.
+ int16_t seed_increment() { return seed_increment_; }
+ void set_seed_increment(int16_t value) { seed_increment_ = value; }
+
+ private:
+ uint32_t seed_;
+ int16_t seed_increment_;
+
+ DISALLOW_COPY_AND_ASSIGN(RandomVector);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc
new file mode 100644
index 00000000000..cbdcdf7c829
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for RandomVector class.
+
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(RandomVector, CreateAndDestroy) {
+ RandomVector random_vector;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c
deleted file mode 100644
index 17bea5f5bbf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c
+++ /dev/null
@@ -1,531 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the RecIn function, which is the main function for inserting RTP
- * packets into NetEQ.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "automode.h"
-#include "dtmf_buffer.h"
-#include "mcu_dsp_common.h"
-#include "neteq_defines.h"
-#include "neteq_error_codes.h"
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
- uint32_t uw32_timeRec)
-{
- RTPPacket_t RTPpacket[2];
- int i_k;
- int i_ok = 0, i_No_Of_Payloads = 1;
- int16_t flushed = 0;
- int16_t codecPos;
- int curr_Codec;
- int16_t isREDPayload = 0;
- int32_t temp_bufsize;
- int is_sync_rtp = MCU_inst->av_sync && WebRtcNetEQ_IsSyncPayload(
- RTPpacketInput->payload, RTPpacketInput->payloadLen);
-#ifdef NETEQ_RED_CODEC
- RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
- RTPpacketPtr[0] = &RTPpacket[0];
- RTPpacketPtr[1] = &RTPpacket[1];
-#endif
-
- temp_bufsize = WebRtcNetEQ_PacketBufferGetSize(&MCU_inst->PacketBuffer_inst,
- &MCU_inst->codec_DB_inst,
- MCU_inst->av_sync);
- /*
- * Copy from input RTP packet to local copy
- * (mainly to enable multiple payloads using RED)
- */
-
- WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t));
-
- /* Reinitialize NetEq if it's needed (changed SSRC or first call) */
-
- if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1))
- {
- WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber);
- MCU_inst->first_packet = 0;
-
- /* Flush the buffer */
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
-
- /* Store new SSRC */
- MCU_inst->ssrc = RTPpacket[0].ssrc;
-
- /* Update codecs */
- MCU_inst->timeStamp = RTPpacket[0].timeStamp;
- MCU_inst->current_Payload = RTPpacket[0].payloadType;
-
- /*Set MCU to update codec on next SignalMCU call */
- MCU_inst->new_codec = 1;
-
- /* Reset timestamp scaling */
- MCU_inst->TSscalingInitialized = 0;
-
- }
-
- if (!is_sync_rtp) { /* Update only if it not sync packet. */
- /* Call RTCP statistics if it is not sync packet. */
- i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst),
- RTPpacket[0].seqNumber,
- RTPpacket[0].timeStamp, uw32_timeRec);
- }
-
- /* If Redundancy is supported and this is the redundancy payload, separate the payloads */
-#ifdef NETEQ_RED_CODEC
- if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderRED))
- {
- if (is_sync_rtp)
- {
- /* Sync packet should not have RED payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
- /* Split the payload into a main and a redundancy payloads */
- i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
-
- /*
- * Only accept a few redundancies of the same type as the main data,
- * AVT events and CNG.
- */
- if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType)
- && (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload(
- &MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload(
- &MCU_inst->codec_DB_inst, RTPpacket[0].payloadType))
- && (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType)))
- {
- i_No_Of_Payloads = 1;
- }
- isREDPayload = 1;
- }
-#endif
-
- /* loop over the number of payloads */
- for (i_k = 0; i_k < i_No_Of_Payloads; i_k++)
- {
-
- if (isREDPayload == 1)
- {
- RTPpacket[i_k].rcuPlCntr = i_k;
- }
- else
- {
- RTPpacket[i_k].rcuPlCntr = 0;
- }
-
- /* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */
- if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderILBC) && !is_sync_rtp) /* Don't update if sync RTP. */
- {
- i_ok = WebRtcNetEQ_DbGetSplitInfo(
- &MCU_inst->PayloadSplit_inst,
- (enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
- }
-
- /* Get information about timestamp scaling for this payload type */
- i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
-
- if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling)
- {
- /* Must initialize scaling with current timestamps */
- MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
- MCU_inst->internalTS = RTPpacket[i_k].timeStamp;
- MCU_inst->TSscalingInitialized = 1;
- }
-
- /* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
- if (MCU_inst->TSscalingInitialized == 1)
- {
- uint32_t newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
- RTPpacket[i_k].timeStamp);
-
- /* save the incoming timestamp for next time */
- MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
-
- /* add the scaled difference to last scaled timestamp and save ... */
- MCU_inst->internalTS = newTS;
-
- RTPpacket[i_k].timeStamp = newTS;
- }
-
- /* Is this a DTMF packet?*/
- if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderAVT))
- {
- if (is_sync_rtp)
- {
- /* Sync RTP should not have AVT payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
-#ifdef NETEQ_ATEVENT_DECODE
- if (MCU_inst->AVT_PlayoutOn)
- {
- i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst,
- RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen,
- RTPpacket[i_k].timeStamp);
- if (i_ok != 0)
- {
- return i_ok;
- }
- }
-#endif
-#ifdef NETEQ_STEREO
- if (MCU_inst->usingStereo == 0)
- {
- /* do not set this for DTMF packets when using stereo mode */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
- }
-#else
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
-#endif
- }
- else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType))
- {
- /* Is this a CNG packet? how should we handle this?*/
-#ifdef NETEQ_CNG_CODEC
- /* Get CNG sample rate */
- uint16_t fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType);
- if (is_sync_rtp)
- {
- /* Sync RTP should not have CNG payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
- /* Force sampling frequency to 32000 Hz CNG 48000 Hz. */
- /* TODO(tlegrand): remove limitation once ACM has full 48 kHz
- * support. */
- if (fsCng > 32000) {
- fsCng = 32000;
- }
- if ((fsCng != MCU_inst->fs) && (fsCng > 8000))
- {
- /*
- * We have received CNG with a different sample rate from what we are using
- * now (must be > 8000, since we may use only one CNG type (default) for all
- * frequencies). Flush buffer and signal new codec.
- */
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
- MCU_inst->new_codec = 1;
- MCU_inst->current_Codec = -1;
- }
- i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst,
- &RTPpacket[i_k], &flushed, MCU_inst->av_sync);
- if (i_ok < 0)
- {
- return RECIN_CNG_ERROR;
- }
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
-#else /* NETEQ_CNG_CODEC not defined */
- return RECIN_UNKNOWNPAYLOAD;
-#endif /* NETEQ_CNG_CODEC */
- }
- else
- {
- /* Reinitialize the splitting if the payload and/or the payload length has changed */
- curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType);
- if (curr_Codec != MCU_inst->current_Codec)
- {
- if (curr_Codec < 0)
- {
- return RECIN_UNKNOWNPAYLOAD;
- }
- if (is_sync_rtp)
- {
- /* Sync RTP should not cause codec change. */
- return RECIN_SYNC_RTP_CHANGED_CODEC;
- }
- MCU_inst->current_Codec = curr_Codec;
- MCU_inst->current_Payload = RTPpacket[i_k].payloadType;
- i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst,
- (enum WebRtcNetEQDecoder) MCU_inst->current_Codec,
- RTPpacket[i_k].payloadLen);
- if (i_ok < 0)
- { /* error returned */
- return i_ok;
- }
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
- MCU_inst->new_codec = 1;
- }
-
- /* Parse the payload and insert it into the buffer */
- i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k],
- &MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst,
- &flushed, MCU_inst->av_sync);
- if (i_ok < 0)
- {
- return i_ok;
- }
- if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0)
- {
- /* first normal packet after CNG or DTMF */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1;
- }
- }
- /* Reset DSP timestamp etc. if packet buffer flushed */
- if (flushed)
- {
- MCU_inst->new_codec = 1;
- }
- }
-
- /*
- * If not sync RTP, update Bandwidth Estimate.
- * Only send the main payload to BWE.
- */
- if (!is_sync_rtp &&
- (curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[0].payloadType)) >= 0)
- {
- codecPos = MCU_inst->codec_DB_inst.position[curr_Codec];
- if (MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos] != NULL) /* codec has BWE function */
- {
- if (RTPpacket[0].starts_byte1) /* check for shifted byte alignment */
- {
- /* re-align to 16-bit alignment */
- for (i_k = 0; i_k < RTPpacket[0].payloadLen; i_k++)
- {
- WEBRTC_SPL_SET_BYTE(RTPpacket[0].payload,
- WEBRTC_SPL_GET_BYTE(RTPpacket[0].payload, i_k+1),
- i_k);
- }
- RTPpacket[0].starts_byte1 = 0;
- }
-
- MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
- MCU_inst->codec_DB_inst.codec_state[codecPos],
- (const uint16_t *) RTPpacket[0].payload,
- (int32_t) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
- (uint32_t) RTPpacket[0].timeStamp, (uint32_t) uw32_timeRec);
- }
- }
-
- if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == 0)
- {
- /* Calculate the total speech length carried in each packet */
- temp_bufsize = WebRtcNetEQ_PacketBufferGetSize(
- &MCU_inst->PacketBuffer_inst, &MCU_inst->codec_DB_inst,
- MCU_inst->av_sync) - temp_bufsize;
-
- if ((temp_bufsize > 0) && (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF
- == 0) && (temp_bufsize
- != MCU_inst->BufferStat_inst.Automode_inst.packetSpeechLenSamp))
- {
- /* Change the auto-mode parameters if packet length has changed */
- WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
- (int16_t) temp_bufsize, MCU_inst->fs);
- }
-
- /* update statistics */
- if ((int32_t) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
- && !MCU_inst->new_codec)
- {
- /*
- * Only update statistics if incoming packet is not older than last played out
- * packet, and if new codec flag is not set.
- */
- WebRtcNetEQ_UpdateIatStatistics(&MCU_inst->BufferStat_inst.Automode_inst,
- MCU_inst->PacketBuffer_inst.maxInsertPositions, RTPpacket[0].seqNumber,
- RTPpacket[0].timeStamp, MCU_inst->fs,
- WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) MCU_inst->current_Codec),
- (MCU_inst->NetEqPlayoutMode == kPlayoutStreaming));
- }
- }
- else if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == -1)
- {
- /*
- * This is first "normal" packet after CNG or DTMF.
- * Reset packet time counter and measure time until next packet,
- * but don't update statistics.
- */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 0;
- MCU_inst->BufferStat_inst.Automode_inst.packetIatCountSamp = 0;
- }
- return 0;
-
-}
-
-int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType)
-{
- enum WebRtcNetEQDecoder codec;
- int codecNumber;
-
- codecNumber = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, rtpPayloadType);
- if (codecNumber < 0)
- {
- /* error */
- return codecNumber;
- }
-
- /* cast to enumerator */
- codec = (enum WebRtcNetEQDecoder) codecNumber;
-
- /*
- * The factor obtained below is the number with which the RTP timestamp must be
- * multiplied to get the true sample count.
- */
- switch (codec)
- {
- case kDecoderG722:
- case kDecoderG722_2ch:
- {
- /* Use timestamp scaling with factor 2 (two output samples per RTP timestamp) */
- MCU_inst->scalingFactor = kTSscalingTwo;
- break;
- }
- case kDecoderISACfb:
- case kDecoderOpus:
- {
- /* We resample Opus internally to 32 kHz, and isac-fb decodes at
- * 32 kHz, but timestamps are counted at 48 kHz. So there are two
- * output samples per three RTP timestamp ticks. */
- MCU_inst->scalingFactor = kTSscalingTwoThirds;
- break;
- }
-
- case kDecoderAVT:
- case kDecoderCNG:
- {
- /* TODO(tlegrand): remove scaling once ACM has full 48 kHz
- * support. */
- uint16_t sample_freq =
- WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
- rtpPayloadType);
- if (sample_freq == 48000) {
- MCU_inst->scalingFactor = kTSscalingTwoThirds;
- }
-
- /* For sample_freq <= 32 kHz, do not change the timestamp scaling
- * settings. */
- break;
- }
- default:
- {
- /* do not use timestamp scaling */
- MCU_inst->scalingFactor = kTSnoScaling;
- break;
- }
- }
- return 0;
-}
-
-uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- uint32_t externalTS)
-{
- int32_t timestampDiff;
- uint32_t internalTS;
-
- /* difference between this and last incoming timestamp */
- timestampDiff = externalTS - MCU_inst->externalTS;
-
- switch (MCU_inst->scalingFactor)
- {
- case kTSscalingTwo:
- {
- /* multiply with 2 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingTwoThirds:
- {
- /* multiply with 2/3 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
- timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
- break;
- }
- case kTSscalingFourThirds:
- {
- /* multiply with 4/3 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 2);
- timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
- break;
- }
- default:
- {
- /* no scaling */
- }
- }
-
- /* add the scaled difference to last scaled timestamp and save ... */
- internalTS = MCU_inst->internalTS + timestampDiff;
-
- return internalTS;
-}
-
-uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- uint32_t internalTS)
-{
- int32_t timestampDiff;
- uint32_t externalTS;
-
- /* difference between this and last incoming timestamp */
- timestampDiff = (int32_t) internalTS - MCU_inst->internalTS;
-
- switch (MCU_inst->scalingFactor)
- {
- case kTSscalingTwo:
- {
- /* divide by 2 */
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingTwoThirds:
- {
- /* multiply with 3/2 */
- timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingFourThirds:
- {
- /* multiply with 3/4 */
- timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 2);
- break;
- }
- default:
- {
- /* no scaling */
- }
- }
-
- /* add the scaled difference to last scaled timestamp and save ... */
- externalTS = MCU_inst->externalTS + timestampDiff;
-
- return externalTS;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c
deleted file mode 100644
index 8f62007310c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c
+++ /dev/null
@@ -1,1502 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of RecOut function, which is the main function for the audio output
- * process. This function must be called (through the NetEQ API) once every 10 ms.
- */
-
-#include "dsp.h"
-
-#include <assert.h>
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-#include "neteq_defines.h"
-#include "mcu_dsp_common.h"
-
-/* Audio types */
-#define TYPE_SPEECH 1
-#define TYPE_CNG 2
-
-#ifdef NETEQ_DELAY_LOGGING
-#include "delay_logging.h"
-#include <stdio.h>
-#pragma message("*******************************************************************")
-#pragma message("You have specified to use NETEQ_DELAY_LOGGING in the NetEQ library.")
-#pragma message("Make sure that your test application supports this.")
-#pragma message("*******************************************************************")
-#endif
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_NetEqAlgorithm_buffer 1080*fs/8000 0 1080*fs/8000-1
- struct dspInfo 6 1080*fs/8000 1085*fs/8000
-
- func WebRtcNetEQ_Normal 40+495*fs/8000 0 39+495*fs/8000
- func WebRtcNetEQ_Merge 40+496*fs/8000 0 39+496*fs/8000
- func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/800 39+496*fs/8000
- func WebRtcNetEQ_Accelerate 210 240*fs/8000 209+240*fs/8000
- func WebRtcNetEQ_BGNUpdate 69 480*fs/8000 68+480*fs/8000
-
- Total: 1086*fs/8000
- */
-
-#define SCRATCH_ALGORITHM_BUFFER 0
-#define SCRATCH_NETEQ_NORMAL 0
-#define SCRATCH_NETEQ_MERGE 0
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_DSP_INFO 6480
-#define SCRATCH_NETEQ_ACCELERATE 1440
-#define SCRATCH_NETEQ_BGN_UPDATE 2880
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_DSP_INFO 4320
-#define SCRATCH_NETEQ_ACCELERATE 960
-#define SCRATCH_NETEQ_BGN_UPDATE 1920
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_DSP_INFO 2160
-#define SCRATCH_NETEQ_ACCELERATE 480
-#define SCRATCH_NETEQ_BGN_UPDATE 960
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_DSP_INFO 1080
-#define SCRATCH_NETEQ_ACCELERATE 240
-#define SCRATCH_NETEQ_BGN_UPDATE 480
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 6516
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 4344
-#elif (defined(NETEQ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 2172
-#else /* NB */
-#define SIZE_SCRATCH_BUFFER 1086
-#endif
-
-#ifdef NETEQ_DELAY_LOGGING
-extern FILE *delay_fid2; /* file pointer to delay log file */
-extern uint32_t tot_received_packets;
-#endif
-
-
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
- int16_t *pw16_len, int16_t BGNonly,
- int av_sync)
-{
-
- int16_t blockLen, payloadLen, len = 0, pos;
- int16_t w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
- int16_t *blockPtr;
- int16_t MD = 0;
-
- int16_t speechType = TYPE_SPEECH;
- uint16_t instr;
- uint16_t uw16_tmp;
-#ifdef SCRATCH
- char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
- int16_t *pw16_scratchPtr = (int16_t*) pw8_ScratchBuffer;
- /* pad with 240*fs_mult to match the overflow guard below */
- int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- int16_t *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
- + SCRATCH_ALGORITHM_BUFFER;
- DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
-#else
- /* pad with 240*fs_mult to match the overflow guard below */
- int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- int16_t pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE+240*6];
- DSP2MCU_info_t dspInfoStruct;
- DSP2MCU_info_t *dspInfo = &dspInfoStruct;
-#endif
- int16_t fs_mult;
- int borrowedSamples;
- int oldBorrowedSamples;
- int return_value = 0;
- int16_t lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
- void *mainInstBackup = inst->main_inst;
-
-#ifdef NETEQ_DELAY_LOGGING
- int temp_var;
-#endif
- int16_t dtmfValue = -1;
- int16_t dtmfVolume = -1;
- int playDtmf = 0;
-#ifdef NETEQ_ATEVENT_DECODE
- int dtmfSwitch = 0;
-#endif
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
- int16_t *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
- inst->pw16_readAddress = sharedMem;
- inst->pw16_writeAddress = sharedMem;
-
- /* Get information about if there is one descriptor left */
- if (inst->codec_ptr_inst.funcGetMDinfo != NULL)
- {
- MD = inst->codec_ptr_inst.funcGetMDinfo(inst->codec_ptr_inst.codec_state);
- if (MD > 0)
- MD = 1;
- else
- MD = 0;
- }
-
-#ifdef NETEQ_STEREO
- if ((msInfo->msMode == NETEQ_SLAVE) && (inst->codec_ptr_inst.funcDecode != NULL))
- {
- /*
- * Valid function pointers indicate that we have decoded something,
- * and that the timestamp information is correct.
- */
-
- /* Get the information from master to correct synchronization */
- uint32_t currentMasterTimestamp;
- uint32_t currentSlaveTimestamp;
-
- currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
- currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
-
- /* Partition the uint32_t space in three: [0 0.25) [0.25 0.75] (0.75 1]
- * We consider a wrap to have occurred if the timestamps are in
- * different edge partitions.
- */
- if (currentSlaveTimestamp < 0x40000000 &&
- currentMasterTimestamp > 0xc0000000) {
- // Slave has wrapped.
- currentSlaveTimestamp += (0xffffffff - currentMasterTimestamp) + 1;
- currentMasterTimestamp = 0;
- } else if (currentMasterTimestamp < 0x40000000 &&
- currentSlaveTimestamp > 0xc0000000) {
- // Master has wrapped.
- currentMasterTimestamp += (0xffffffff - currentSlaveTimestamp) + 1;
- currentSlaveTimestamp = 0;
- }
-
- if (currentSlaveTimestamp < currentMasterTimestamp)
- {
- /* brute-force discard a number of samples to catch up */
- inst->curPosition += currentMasterTimestamp - currentSlaveTimestamp;
-
- }
- else if (currentSlaveTimestamp > currentMasterTimestamp)
- {
- /* back off current position to slow down */
- inst->curPosition -= currentSlaveTimestamp - currentMasterTimestamp;
- }
-
- /* make sure we have at least "overlap" samples left */
- inst->curPosition = WEBRTC_SPL_MIN(inst->curPosition,
- inst->endPosition - inst->ExpandInst.w16_overlap);
-
- /* make sure we do not end up outside the speech history */
- inst->curPosition = WEBRTC_SPL_MAX(inst->curPosition, 0);
- }
-#endif
-
- /* Write status data to shared memory */
- dspInfo->playedOutTS = inst->endTimestamp;
- dspInfo->samplesLeft = inst->endPosition - inst->curPosition
- - inst->ExpandInst.w16_overlap;
- dspInfo->MD = MD;
- dspInfo->lastMode = inst->w16_mode;
- dspInfo->frameLen = inst->w16_frameLen;
-
- /* Force update of codec if codec function is NULL */
- if (inst->codec_ptr_inst.funcDecode == NULL)
- {
- dspInfo->lastMode |= MODE_AWAITING_CODEC_PTR;
- }
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_SLAVE && (msInfo->extraInfo == DTMF_OVERDUB
- || msInfo->extraInfo == DTMF_ONLY))
- {
- /* Signal that the master instance generated DTMF tones */
- dspInfo->lastMode |= MODE_MASTER_DTMF_SIGNAL;
- }
-
- if (msInfo->msMode != NETEQ_MONO)
- {
- /* We are using stereo mode; signal this to MCU side */
- dspInfo->lastMode |= MODE_USING_STEREO;
- }
-#endif
-
- WEBRTC_SPL_MEMCPY_W8(inst->pw16_writeAddress,dspInfo,sizeof(DSP2MCU_info_t));
-
- /* Signal MCU with "interrupt" call to main inst*/
-#ifdef NETEQ_STEREO
- assert(msInfo != NULL);
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* clear info to slave */
- WebRtcSpl_MemSetW16((int16_t *) msInfo, 0,
- sizeof(MasterSlaveInfo) / sizeof(int16_t));
- /* re-set mode */
- msInfo->msMode = NETEQ_MASTER;
-
- /* Store some information to slave */
- msInfo->endTimestamp = inst->endTimestamp;
- msInfo->samplesLeftWithOverlap = inst->endPosition - inst->curPosition;
- }
-#endif
-
- /*
- * This call will trigger the MCU side to make a decision based on buffer contents and
- * decision history. Instructions, encoded data and function pointers will be written
- * to the shared memory.
- */
- return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
-
- /* Read MCU data and instructions */
- instr = (uint16_t) (inst->pw16_readAddress[0] & 0xf000);
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->instruction = instr;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* Nothing to do */
- }
-#endif
-
- /* check for error returned from MCU side, if so, return error */
- if (return_value < 0)
- {
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return return_value;
- }
-
- blockPtr = &((inst->pw16_readAddress)[3]);
-
- /* Check for DTMF payload flag */
- if ((inst->pw16_readAddress[0] & DSP_DTMF_PAYLOAD) != 0)
- {
- playDtmf = 1;
- dtmfValue = blockPtr[1];
- dtmfVolume = blockPtr[2];
- blockPtr += 3;
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* signal to slave that master is using DTMF */
- msInfo->extraInfo = DTMF_OVERDUB;
- }
-#endif
- }
-
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of int16_t */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
-
- /* Do we have to change our decoder? */
- if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_NEW_CODEC)
- {
- WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
- if (inst->codec_ptr_inst.codec_fs != 0)
- {
- return_value = WebRtcNetEQ_DSPInit(inst, inst->codec_ptr_inst.codec_fs);
- if (return_value != 0)
- { /* error returned */
- instr = DSP_INSTR_FADE_TO_BGN; /* emergency instruction */
- }
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&inst->fs, sizeof(uint16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
-#endif
- }
-
- /* Copy it again since the init destroys this part */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
- inst->endTimestamp = inst->codec_ptr_inst.timeStamp;
- inst->videoSyncTimestamp = inst->codec_ptr_inst.timeStamp;
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- if (inst->codec_ptr_inst.funcDecodeInit != NULL)
- {
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
-
-#ifdef NETEQ_CNG_CODEC
-
- /* Also update the CNG state as this might be uninitialized */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->CNG_Codec_inst,blockPtr,(payloadLen+1)>>1);
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- if (inst->CNG_Codec_inst != NULL)
- {
- WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif
- }
- else if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_RESET)
- {
- /* Reset the current codec (but not DSP struct) */
- if (inst->codec_ptr_inst.funcDecodeInit != NULL)
- {
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
-
-#ifdef NETEQ_CNG_CODEC
- /* And reset CNG */
- if (inst->CNG_Codec_inst != NULL)
- {
- WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif /*NETEQ_CNG_CODEC*/
- }
-
- fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs);
-
- /* Add late packet? */
- if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_ADD_LATE_PKT)
- {
- if (inst->codec_ptr_inst.funcAddLatePkt != NULL)
- {
- /* Only do this if the codec has support for Add Late Pkt */
- inst->codec_ptr_inst.funcAddLatePkt(inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen);
- }
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- }
-
- /* Do we have to decode data? */
- if ((instr == DSP_INSTR_NORMAL) || (instr == DSP_INSTR_ACCELERATE) || (instr
- == DSP_INSTR_MERGE) || (instr == DSP_INSTR_PREEMPTIVE_EXPAND))
- {
- /* Do we need to update codec-internal PLC state? */
- if ((instr == DSP_INSTR_MERGE) && (inst->codec_ptr_inst.funcDecodePLC != NULL))
- {
- len = 0;
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- &pw16_decoded_buffer[len], 1);
- }
- len = 0;
-
- /* Do decoding */
- while ((blockLen > 0) && (len < (240 * fs_mult))) /* Guard somewhat against overflow */
- {
- if (inst->codec_ptr_inst.funcDecode != NULL)
- {
- int16_t dec_Len;
- if (!BGNonly)
- {
- /* Check if this is a sync payload. */
- if (av_sync && WebRtcNetEQ_IsSyncPayload(blockPtr,
- payloadLen)) {
- /* Zero-stuffing with same size as the last frame. */
- dec_Len = inst->w16_frameLen;
- memset(&pw16_decoded_buffer[len], 0, dec_Len *
- sizeof(pw16_decoded_buffer[len]));
- } else {
- /* Do decoding as normal
- *
- * blockPtr is pointing to payload, at this point,
- * the most significant bit of *(blockPtr - 1) is a flag if
- * set to 1 indicates that the following payload is the
- * redundant payload.
- */
- if (((*(blockPtr - 1) & DSP_CODEC_RED_FLAG) != 0)
- && (inst->codec_ptr_inst.funcDecodeRCU != NULL))
- {
- dec_Len = inst->codec_ptr_inst.funcDecodeRCU(
- inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen, &pw16_decoded_buffer[len], &speechType);
- }
- else
- {
- /* Regular decoding. */
- dec_Len = inst->codec_ptr_inst.funcDecode(
- inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen, &pw16_decoded_buffer[len], &speechType);
- }
- }
- }
- else
- {
- /*
- * Background noise mode: don't decode, just produce the same length BGN.
- * Don't call Expand for BGN here, since Expand uses the memory where the
- * bitstreams are stored (sharemem).
- */
- dec_Len = inst->w16_frameLen;
- }
-
- if (dec_Len > 0)
- {
- len += dec_Len;
- /* Update frameLen */
- inst->w16_frameLen = dec_Len;
- }
- else if (dec_Len < 0)
- {
- /* Error */
- len = -1;
- break;
- }
- /*
- * Sanity check (although we might still write outside memory when this
- * happens...)
- */
- if (len > NETEQ_MAX_FRAME_SIZE)
- {
- WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
- *pw16_len = inst->timestampsPerCall;
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return RECOUT_ERROR_DECODED_TOO_MUCH;
- }
-
- /* Verify that instance was not corrupted by decoder */
- if (mainInstBackup != inst->main_inst)
- {
- /* Instance is corrupt */
- return CORRUPT_INSTANCE;
- }
-
- }
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- }
-
- if (len < 0)
- {
- len = 0;
- inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
- if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
- {
- return_value = -inst->codec_ptr_inst.funcGetErrorCode(
- inst->codec_ptr_inst.codec_state);
- }
- else
- {
- return_value = RECOUT_ERROR_DECODING;
- }
- instr = DSP_INSTR_FADE_TO_BGN;
- }
- if (speechType != TYPE_CNG)
- {
- /*
- * Don't increment timestamp if codec returned CNG speech type
- * since in this case, the MCU side will increment the CNGplayedTS counter.
- */
- inst->endTimestamp += len;
- }
- }
- else if (instr == DSP_INSTR_NORMAL_ONE_DESC)
- {
- if (inst->codec_ptr_inst.funcDecode != NULL)
- {
- len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state, NULL, 0,
- pw16_decoded_buffer, &speechType);
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- if (fwrite(&inst->endTimestamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) {
- return -1;
- }
- if (fwrite(&dspInfo->samplesLeft, sizeof(uint16_t),
- 1, delay_fid2) != 1) {
- return -1;
- }
- tot_received_packets++;
-#endif
- }
- if (speechType != TYPE_CNG)
- {
- /*
- * Don't increment timestamp if codec returned CNG speech type
- * since in this case, the MCU side will increment the CNGplayedTS counter.
- */
- inst->endTimestamp += len;
- }
-
- /* Verify that instance was not corrupted by decoder */
- if (mainInstBackup != inst->main_inst)
- {
- /* Instance is corrupt */
- return CORRUPT_INSTANCE;
- }
-
- if (len <= 0)
- {
- len = 0;
- if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
- {
- return_value = -inst->codec_ptr_inst.funcGetErrorCode(
- inst->codec_ptr_inst.codec_state);
- }
- else
- {
- return_value = RECOUT_ERROR_DECODING;
- }
- if ((inst->codec_ptr_inst.funcDecodeInit != NULL)
- && (inst->codec_ptr_inst.codec_state != NULL))
- {
- /* Reinitialize codec state as something is obviously wrong */
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
- inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
- instr = DSP_INSTR_FADE_TO_BGN;
- }
- }
-
- if (len == 0 && lastModeBGNonly) /* no new data */
- {
- BGNonly = 1; /* force BGN this time too */
- }
-
-#ifdef NETEQ_VAD
- if ((speechType == TYPE_CNG) /* decoder responded with codec-internal CNG */
- || ((instr == DSP_INSTR_DO_RFC3389CNG) && (blockLen > 0)) /* ... or, SID frame */
- || (inst->fs > 16000)) /* ... or, if not NB or WB */
- {
- /* disable post-decode VAD upon first sign of send-side DTX/VAD active, or if SWB */
- inst->VADInst.VADEnabled = 0;
- inst->VADInst.VADDecision = 1; /* set to always active, just to be on the safe side */
- inst->VADInst.SIDintervalCounter = 0; /* reset SID interval counter */
- }
- else if (!inst->VADInst.VADEnabled) /* VAD disabled and no SID/CNG data observed this time */
- {
- inst->VADInst.SIDintervalCounter++; /* increase counter */
- }
-
- /* check for re-enabling the VAD */
- if (inst->VADInst.SIDintervalCounter >= POST_DECODE_VAD_AUTO_ENABLE)
- {
- /*
- * It's been a while since the last CNG/SID frame was observed => re-enable VAD.
- * (Do not care to look for a VAD instance, since this is done inside the init
- * function)
- */
- WebRtcNetEQ_InitVAD(&inst->VADInst, inst->fs);
- }
-
- if (len > 0 /* if we decoded any data */
- && inst->VADInst.VADEnabled /* and VAD enabled */
- && inst->fs <= 16000) /* can only do VAD for NB and WB */
- {
- int VADframeSize; /* VAD frame size in ms */
- int VADSamplePtr = 0;
-
- inst->VADInst.VADDecision = 0;
-
- if (inst->VADInst.VADFunction != NULL) /* make sure that VAD function is provided */
- {
- /* divide the data into groups, as large as possible */
- for (VADframeSize = 30; VADframeSize >= 10; VADframeSize -= 10)
- {
- /* loop through 30, 20, 10 */
-
- while (inst->VADInst.VADDecision == 0
- && len - VADSamplePtr >= VADframeSize * fs_mult * 8)
- {
- /*
- * Only continue until first active speech found, and as long as there is
- * one VADframeSize left.
- */
-
- /* call VAD with new decoded data */
- inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
- inst->VADInst.VADState, (int) inst->fs,
- (int16_t *) &pw16_decoded_buffer[VADSamplePtr],
- (VADframeSize * fs_mult * 8));
-
- VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
- }
- }
- }
- else
- { /* VAD function is NULL */
- inst->VADInst.VADDecision = 1; /* set decision to active */
- inst->VADInst.VADEnabled = 0; /* disable VAD since we have no VAD function */
- }
-
- }
-#endif /* NETEQ_VAD */
-
- /* Adjust timestamp if needed */
- uw16_tmp = (uint16_t) inst->pw16_readAddress[1];
- inst->endTimestamp += (((uint32_t) uw16_tmp) << 16);
- uw16_tmp = (uint16_t) inst->pw16_readAddress[2];
- inst->endTimestamp += uw16_tmp;
-
- if (BGNonly && len > 0)
- {
- /*
- * If BGN mode, we did not produce any data at decoding.
- * Do it now instead.
- */
-
- WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_decoded_buffer, len);
- }
-
- /* Switch on the instruction received from the MCU side. */
- switch (instr)
- {
- case DSP_INSTR_NORMAL:
-
- /* Allow for signal processing to apply gain-back etc */
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if ((speechType == TYPE_CNG) || ((inst->w16_mode == MODE_CODEC_INTERNAL_CNG)
- && (len == 0)))
- {
- inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
- }
-
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
- case DSP_INSTR_NORMAL_ONE_DESC:
-
- /* Allow for signal processing to apply gain-back etc */
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- inst->w16_mode = MODE_ONE_DESCRIPTOR;
- break;
- case DSP_INSTR_MERGE:
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = -len;
-#endif
- /* Call Merge with history*/
- return_value = WebRtcNetEQ_Merge(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_MERGE,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var += len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_EXPAND:
- len = 0;
- pos = 0;
- while ((inst->endPosition - inst->curPosition - inst->ExpandInst.w16_overlap + pos)
- < (inst->timestampsPerCall))
- {
- return_value = WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- /*
- * Update buffer, but only end part (otherwise expand state is destroyed
- * since it reuses speechBuffer[] memory
- */
-
- WEBRTC_SPL_MEMMOVE_W16(inst->pw16_speechHistory,
- inst->pw16_speechHistory + len,
- (inst->w16_speechHistoryLen-len));
- WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[inst->w16_speechHistoryLen-len],
- pw16_NetEqAlgorithm_buffer, len);
-
- inst->curPosition -= len;
-
- /* Update variables for VQmon */
- inst->w16_concealedTS += len;
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = 0; /* already written the data, so do not write it again further down. */
- }
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_ACCELERATE:
- if (len < 3 * 80 * fs_mult)
- {
- /* We need to move data from the speechBuffer[] in order to get 30 ms */
- borrowedSamples = 3 * 80 * fs_mult - len;
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
- pw16_decoded_buffer, len);
- WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
- &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- borrowedSamples);
-
- return_value = WebRtcNetEQ_Accelerate(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, 3 * inst->timestampsPerCall,
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- /* Copy back samples to the buffer */
- if (len < borrowedSamples)
- {
- /*
- * This destroys the beginning of the buffer, but will not cause any
- * problems
- */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
- pw16_NetEqAlgorithm_buffer, len);
- WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[borrowedSamples-len],
- inst->speechBuffer,
- (inst->endPosition-(borrowedSamples-len)));
-
- inst->curPosition += (borrowedSamples - len);
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = 3 * inst->timestampsPerCall - len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = 0;
- }
- else
- {
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
- pw16_NetEqAlgorithm_buffer, borrowedSamples);
- WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[borrowedSamples],
- (len-borrowedSamples));
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = 3 * inst->timestampsPerCall - len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = len - borrowedSamples;
- }
-
- }
- else
- {
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
-#endif
- return_value = WebRtcNetEQ_Accelerate(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var -= len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- }
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_DO_RFC3389CNG:
-#ifdef NETEQ_CNG_CODEC
- if (blockLen > 0)
- {
- if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (uint8_t*) blockPtr,
- payloadLen) < 0)
- {
- /* error returned from CNG function */
- return_value = -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- len = inst->timestampsPerCall;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- break;
- }
- }
-
- if (BGNonly)
- {
- /* Get data from BGN function instead of CNG */
- len = WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, inst->timestampsPerCall);
- if (len != inst->timestampsPerCall)
- {
- /* this is not good, treat this as an error */
- return_value = -1;
- }
- }
- else
- {
- return_value = WebRtcNetEQ_Cng(inst, pw16_NetEqAlgorithm_buffer,
- inst->timestampsPerCall);
- }
- len = inst->timestampsPerCall;
- inst->ExpandInst.w16_consecExp = 0;
- inst->w16_mode = MODE_RFC3389CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
-
- if (return_value < 0)
- {
- /* error returned */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
-
- break;
-#else
- return FAULTY_INSTRUCTION;
-#endif
- case DSP_INSTR_DO_CODEC_INTERNAL_CNG:
- /*
- * This represents the case when there is no transmission and the decoder should
- * do internal CNG.
- */
- len = 0;
- if (inst->codec_ptr_inst.funcDecode != NULL && !BGNonly)
- {
- len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state,
- blockPtr, 0, pw16_decoded_buffer, &speechType);
- }
- else
- {
- /* get BGN data */
- len = WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_decoded_buffer, inst->timestampsPerCall);
- }
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
- inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
- inst->ExpandInst.w16_consecExp = 0;
- break;
-
- case DSP_INSTR_DTMF_GENERATE:
-#ifdef NETEQ_ATEVENT_DECODE
- dtmfSwitch = 0;
- if ((inst->w16_mode != MODE_DTMF) && (inst->DTMFInst.reinit == 0))
- {
- /* Special case; see below.
- * We must catch this before calling DTMFGenerate,
- * since reinit is set to 0 in that call.
- */
- dtmfSwitch = 1;
- }
-
- len = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- pw16_NetEqAlgorithm_buffer, inst->fs, -1);
- if (len < 0)
- {
- /* error occurred */
- return_value = len;
- len = inst->timestampsPerCall;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
-
- if (dtmfSwitch == 1)
- {
- /*
- * This is the special case where the previous operation was DTMF overdub.
- * but the current instruction is "regular" DTMF. We must make sure that the
- * DTMF does not have any discontinuities. The first DTMF sample that we
- * generate now must be played out immediately, wherefore it must be copied to
- * the speech buffer.
- */
-
- /*
- * Generate extra DTMF data to fill the space between
- * curPosition and endPosition
- */
- int16_t tempLen;
-
- tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- &pw16_NetEqAlgorithm_buffer[len], inst->fs,
- inst->endPosition - inst->curPosition);
- if (tempLen < 0)
- {
- /* error occurred */
- return_value = tempLen;
- len = inst->endPosition - inst->curPosition;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0,
- inst->endPosition - inst->curPosition);
- }
-
- /* Add to total length */
- len += tempLen;
-
- /* Overwrite the "future" part of the speech buffer with the new DTMF data */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition],
- pw16_NetEqAlgorithm_buffer,
- inst->endPosition - inst->curPosition);
-
- /* Shuffle the remaining data to the beginning of algorithm buffer */
- len -= (inst->endPosition - inst->curPosition);
- WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[inst->endPosition - inst->curPosition],
- len);
- }
-
- inst->endTimestamp += inst->timestampsPerCall;
- inst->DTMFInst.reinit = 0;
- inst->ExpandInst.w16_consecExp = 0;
- inst->w16_mode = MODE_DTMF;
- BGNonly = 0; /* override BGN only and let DTMF through */
-
- playDtmf = 0; /* set to zero because the DTMF is already in the Algorithm buffer */
- /*
- * If playDtmf is 1, an extra DTMF vector will be generated and overdubbed
- * on the output.
- */
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* signal to slave that master is using DTMF only */
- msInfo->extraInfo = DTMF_ONLY;
- }
-#endif
-
- break;
-#else
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
-#endif
-
- case DSP_INSTR_DO_ALTERNATIVE_PLC:
- if (inst->codec_ptr_inst.funcDecodePLC != 0)
- {
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- pw16_NetEqAlgorithm_buffer, 1);
- }
- else
- {
- len = inst->timestampsPerCall;
- /* ZeroStuffing... */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- /* By not advancing the timestamp, NetEq inserts samples. */
- inst->statInst.addedSamples += len;
- }
- inst->ExpandInst.w16_consecExp = 0;
- break;
- case DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS:
- if (inst->codec_ptr_inst.funcDecodePLC != 0)
- {
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- pw16_NetEqAlgorithm_buffer, 1);
- }
- else
- {
- len = inst->timestampsPerCall;
- /* ZeroStuffing... */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
- inst->ExpandInst.w16_consecExp = 0;
- inst->endTimestamp += len;
- break;
- case DSP_INSTR_DO_AUDIO_REPETITION:
- len = inst->timestampsPerCall;
- /* copy->paste... */
- WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
- &inst->speechBuffer[inst->endPosition-len], len);
- inst->ExpandInst.w16_consecExp = 0;
- break;
- case DSP_INSTR_DO_AUDIO_REPETITION_INC_TS:
- len = inst->timestampsPerCall;
- /* copy->paste... */
- WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
- &inst->speechBuffer[inst->endPosition-len], len);
- inst->ExpandInst.w16_consecExp = 0;
- inst->endTimestamp += len;
- break;
-
- case DSP_INSTR_PREEMPTIVE_EXPAND:
- if (len < 3 * inst->timestampsPerCall)
- {
- /* borrow samples from sync buffer if necessary */
- borrowedSamples = 3 * inst->timestampsPerCall - len; /* borrow this many samples */
- /* calculate how many of these are already played out */
- oldBorrowedSamples = WEBRTC_SPL_MAX(0,
- borrowedSamples - (inst->endPosition - inst->curPosition));
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
- pw16_decoded_buffer, len);
- WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
- &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- borrowedSamples);
- }
- else
- {
- borrowedSamples = 0;
- oldBorrowedSamples = 0;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- w16_tmp1 = len;
-#endif
- /* do the expand */
- return_value = WebRtcNetEQ_PreEmptiveExpand(inst,
-#ifdef SCRATCH
- /* use same scratch memory as Accelerate */
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, len + borrowedSamples, oldBorrowedSamples,
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- if (borrowedSamples > 0)
- {
- /* return borrowed samples */
-
- /* Copy back to last part of speechBuffer from beginning of output buffer */
- WEBRTC_SPL_MEMCPY_W16( &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- pw16_NetEqAlgorithm_buffer,
- borrowedSamples);
-
- len -= borrowedSamples; /* remove the borrowed samples from new total length */
-
- /* Move to beginning of output buffer from end of output buffer */
- WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[borrowedSamples],
- len);
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len - w16_tmp1; /* number of samples added */
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- /* If last packet was decoded as inband CNG, set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_FADE_TO_BGN:
- {
- int tempReturnValue;
- /* do not overwrite return_value, since it likely contains an error code */
-
- /* calculate interpolation length */
- w16_tmp3 = WEBRTC_SPL_MIN(inst->endPosition - inst->curPosition,
- inst->timestampsPerCall);
- /* check that it will fit in pw16_NetEqAlgorithm_buffer */
- if (w16_tmp3 + inst->w16_frameLen > NETEQ_MAX_OUTPUT_SIZE)
- {
- w16_tmp3 = NETEQ_MAX_OUTPUT_SIZE - inst->w16_frameLen;
- }
-
- /* call Expand */
- len = inst->timestampsPerCall + inst->ExpandInst.w16_overlap;
- pos = 0;
-
- tempReturnValue = WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, &len, 1);
-
- if (tempReturnValue < 0)
- {
- /* error */
- /* this error value will override return_value */
- return tempReturnValue;
- }
-
- pos += len; /* got len samples from expand */
-
- /* copy to fill the demand */
- while (pos + len <= inst->w16_frameLen + w16_tmp3)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos],
- pw16_NetEqAlgorithm_buffer, len);
- pos += len;
- }
-
- /* fill with fraction of the expand vector if needed */
- if (pos < inst->w16_frameLen + w16_tmp3)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos], pw16_NetEqAlgorithm_buffer,
- inst->w16_frameLen + w16_tmp3 - pos);
- }
-
- len = inst->w16_frameLen + w16_tmp3; /* truncate any surplus samples since we don't want these */
-
- /*
- * Mix with contents in sync buffer. Find largest power of two that is less than
- * interpolate length divide 16384 with this number; result is in w16_tmp2.
- */
- w16_tmp1 = 2;
- w16_tmp2 = 16384;
- while (w16_tmp1 <= w16_tmp3)
- {
- w16_tmp2 >>= 1; /* divide with 2 */
- w16_tmp1 <<= 1; /* increase with a factor of 2 */
- }
-
- w16_tmp1 = 0;
- pos = 0;
- while (w16_tmp1 < 16384)
- {
- inst->speechBuffer[inst->curPosition + pos]
- =
- (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
- 16384-w16_tmp1 ) +
- WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
- 14 );
- w16_tmp1 += w16_tmp2;
- pos++;
- }
-
- /* overwrite remainder of speech buffer */
-
- WEBRTC_SPL_MEMCPY_W16( &inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
- &pw16_NetEqAlgorithm_buffer[pos], w16_tmp3 - pos);
-
- len -= w16_tmp3;
- /* shift algorithm buffer */
-
- WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[w16_tmp3],
- len );
-
- /* Update variables for VQmon */
- inst->w16_concealedTS += len;
-
- inst->w16_mode = MODE_FADE_TO_BGN;
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
-
- break;
- }
-
- default:
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
- } /* end of grand switch */
-
- /* Copy data directly to output buffer */
-
- w16_tmp2 = 0;
- if ((inst->endPosition + len - inst->curPosition - inst->ExpandInst.w16_overlap)
- >= inst->timestampsPerCall)
- {
- w16_tmp2 = inst->endPosition - inst->curPosition;
- w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2, 0); /* Additional error protection, just in case */
- w16_tmp1 = WEBRTC_SPL_MIN(w16_tmp2, inst->timestampsPerCall);
- w16_tmp2 = inst->timestampsPerCall - w16_tmp1;
- WEBRTC_SPL_MEMCPY_W16(pw16_outData, &inst->speechBuffer[inst->curPosition], w16_tmp1);
- WEBRTC_SPL_MEMCPY_W16(&pw16_outData[w16_tmp1], pw16_NetEqAlgorithm_buffer, w16_tmp2);
- DataEnough = 1;
- }
- else
- {
- DataEnough = 0;
- }
-
- if (playDtmf != 0)
- {
-#ifdef NETEQ_ATEVENT_DECODE
- int16_t outDataIndex = 0;
- int16_t overdubLen = -1; /* default len */
- int16_t dtmfLen;
-
- /*
- * Overdub the output with DTMF. Note that this is not executed if the
- * DSP_INSTR_DTMF_GENERATE operation is performed above.
- */
- if (inst->DTMFInst.lastDtmfSample - inst->curPosition > 0)
- {
- /* special operation for transition from "DTMF only" to "DTMF overdub" */
- outDataIndex
- = WEBRTC_SPL_MIN(inst->DTMFInst.lastDtmfSample - inst->curPosition,
- inst->timestampsPerCall);
- overdubLen = inst->timestampsPerCall - outDataIndex;
- }
-
- dtmfLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- &pw16_outData[outDataIndex], inst->fs, overdubLen);
- if (dtmfLen < 0)
- {
- /* error occurred */
- return_value = dtmfLen;
- }
- inst->DTMFInst.reinit = 0;
-#else
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
-#endif
- }
-
- /*
- * Shuffle speech buffer to allow more data. Move data from pw16_NetEqAlgorithm_buffer
- * to speechBuffer.
- */
- if (instr != DSP_INSTR_EXPAND)
- {
- w16_tmp1 = WEBRTC_SPL_MIN(inst->endPosition, len);
- WEBRTC_SPL_MEMMOVE_W16(inst->speechBuffer, inst->speechBuffer + w16_tmp1,
- (inst->endPosition-w16_tmp1));
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-w16_tmp1],
- &pw16_NetEqAlgorithm_buffer[len-w16_tmp1], w16_tmp1);
-#ifdef NETEQ_ATEVENT_DECODE
- /* Update index to end of DTMF data in speech buffer */
- if (instr == DSP_INSTR_DTMF_GENERATE)
- {
- /* We have written DTMF data to the end of speech buffer */
- inst->DTMFInst.lastDtmfSample = inst->endPosition;
- }
- else if (inst->DTMFInst.lastDtmfSample > 0)
- {
- /* The end of DTMF data in speech buffer has been shuffled */
- inst->DTMFInst.lastDtmfSample -= w16_tmp1;
- }
-#endif
- /*
- * Update the BGN history if last operation was not expand (nor Merge, Accelerate
- * or Pre-emptive expand, to save complexity).
- */
- if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_MERGE)
- && (inst->w16_mode != MODE_SUCCESS_ACCELERATE) && (inst->w16_mode
- != MODE_LOWEN_ACCELERATE) && (inst->w16_mode != MODE_SUCCESS_PREEMPTIVE)
- && (inst->w16_mode != MODE_LOWEN_PREEMPTIVE) && (inst->w16_mode
- != MODE_FADE_TO_BGN) && (inst->w16_mode != MODE_DTMF) && (!BGNonly))
- {
- WebRtcNetEQ_BGNUpdate(inst
-#ifdef SCRATCH
- , pw16_scratchPtr + SCRATCH_NETEQ_BGN_UPDATE
-#endif
- );
- }
- }
- else /* instr == DSP_INSTR_EXPAND */
- {
- /* Nothing should be done since data is already copied to output. */
- }
-
- inst->curPosition -= len;
-
- /*
- * Extra protection in case something should go totally wrong in terms of sizes...
- * If everything is ok this should NEVER happen.
- */
- if (inst->curPosition < -inst->timestampsPerCall)
- {
- inst->curPosition = -inst->timestampsPerCall;
- }
-
- if ((instr != DSP_INSTR_EXPAND) && (instr != DSP_INSTR_MERGE) && (instr
- != DSP_INSTR_FADE_TO_BGN))
- {
- /* Reset concealed TS parameter if it does not seem to have been flushed */
- if (inst->w16_concealedTS > inst->timestampsPerCall)
- {
- inst->w16_concealedTS = 0;
- }
- }
-
- /*
- * Double-check that we actually have 10 ms to play. If we haven't, there has been a
- * serious error.The decoder might have returned way too few samples
- */
- if (!DataEnough)
- {
- /* This should not happen. Set outdata to zeros, and return error. */
- WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
- *pw16_len = inst->timestampsPerCall;
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return RECOUT_ERROR_SAMPLEUNDERRUN;
- }
-
- /*
- * Update Videosync timestamp (this special timestamp is needed since the endTimestamp
- * stops during CNG and Expand periods.
- */
- if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
- {
- uint32_t uw32_tmpTS;
- uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
- if ((int32_t) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
- {
- inst->videoSyncTimestamp = uw32_tmpTS;
- }
- }
- else
- {
- inst->videoSyncTimestamp += inst->timestampsPerCall;
- }
-
- /* After this, regardless of what has happened, deliver 10 ms of future data */
- inst->curPosition += inst->timestampsPerCall;
- *pw16_len = inst->timestampsPerCall;
-
- /* Remember if BGNonly was used */
- if (BGNonly)
- {
- inst->w16_mode |= MODE_BGN_ONLY;
- }
-
- return return_value;
-}
-
-#undef SCRATCH_ALGORITHM_BUFFER
-#undef SCRATCH_NETEQ_NORMAL
-#undef SCRATCH_NETEQ_MERGE
-#undef SCRATCH_NETEQ_BGN_UPDATE
-#undef SCRATCH_NETEQ_EXPAND
-#undef SCRATCH_DSP_INFO
-#undef SCRATCH_NETEQ_ACCELERATE
-#undef SIZE_SCRATCH_BUFFER
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c
deleted file mode 100644
index d1ce934bcfb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of RTCP statistics reporting.
- */
-
-#include "rtcp.h"
-
-#include <string.h>
-
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo)
-{
- /*
- * Initialize everything to zero and then set the start values for the RTP packet stream.
- */
- WebRtcSpl_MemSetW16((int16_t*) RTCP_inst, 0,
- sizeof(WebRtcNetEQ_RTCP_t) / sizeof(int16_t));
- RTCP_inst->base_seq = uw16_seqNo;
- RTCP_inst->max_seq = uw16_seqNo;
- return 0;
-}
-
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
- uint32_t uw32_timeStamp, uint32_t uw32_recTime)
-{
- int16_t w16_SeqDiff;
- int32_t w32_TimeDiff;
- int32_t w32_JitterDiff;
-
- /*
- * Update number of received packets, and largest packet number received.
- */
- RTCP_inst->received++;
- w16_SeqDiff = uw16_seqNo - RTCP_inst->max_seq;
- if (w16_SeqDiff >= 0)
- {
- if (uw16_seqNo < RTCP_inst->max_seq)
- {
- /* Wrap around detected */
- RTCP_inst->cycles++;
- }
- RTCP_inst->max_seq = uw16_seqNo;
- }
-
- /* Calculate Jitter, and update previous timestamps */
- /* Note that the value in RTCP_inst->jitter is in Q4. */
- if (RTCP_inst->received > 1)
- {
- w32_TimeDiff = (uw32_recTime - (uw32_timeStamp - RTCP_inst->transit));
- w32_TimeDiff = WEBRTC_SPL_ABS_W32(w32_TimeDiff);
- w32_JitterDiff = WEBRTC_SPL_LSHIFT_W16(w32_TimeDiff, 4) - RTCP_inst->jitter;
- RTCP_inst->jitter = RTCP_inst->jitter + WEBRTC_SPL_RSHIFT_W32((w32_JitterDiff + 8), 4);
- }
- RTCP_inst->transit = (uw32_timeStamp - uw32_recTime);
- return 0;
-}
-
-int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- uint16_t *puw16_fraction_lost,
- uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
- uint32_t *puw32_jitter, int16_t doNotReset)
-{
- uint32_t uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
- int32_t w32_lost;
-
- /* Extended highest sequence number received */
- *puw32_ext_max
- = (uint32_t) WEBRTC_SPL_LSHIFT_W32((uint32_t)RTCP_inst->cycles, 16)
- + RTCP_inst->max_seq;
-
- /*
- * Calculate expected number of packets and compare it to the number of packets that
- * were actually received => the cumulative number of packets lost can be extracted.
- */
- uw32_exp_nr = *puw32_ext_max - RTCP_inst->base_seq + 1;
- if (RTCP_inst->received == 0)
- {
- /* no packets received, assume none lost */
- *puw32_cum_lost = 0;
- }
- else if (uw32_exp_nr > RTCP_inst->received)
- {
- *puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
- if (*puw32_cum_lost > (uint32_t) 0xFFFFFF)
- {
- *puw32_cum_lost = 0xFFFFFF;
- }
- }
- else
- {
- *puw32_cum_lost = 0;
- }
-
- /* Fraction lost (Since last report) */
- uw32_exp_interval = uw32_exp_nr - RTCP_inst->exp_prior;
- if (!doNotReset)
- {
- RTCP_inst->exp_prior = uw32_exp_nr;
- }
- uw32_rec_interval = RTCP_inst->received - RTCP_inst->rec_prior;
- if (!doNotReset)
- {
- RTCP_inst->rec_prior = RTCP_inst->received;
- }
- w32_lost = (int32_t) (uw32_exp_interval - uw32_rec_interval);
- if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
- {
- *puw16_fraction_lost = 0;
- }
- else
- {
- *puw16_fraction_lost = (uint16_t) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
- / uw32_exp_interval);
- }
- if (*puw16_fraction_lost > 0xFF)
- {
- *puw16_fraction_lost = 0xFF;
- }
-
- /* Inter-arrival jitter */
- *puw32_jitter = (RTCP_inst->jitter) >> 4; /* scaling from Q4 */
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc
new file mode 100644
index 00000000000..cf8e0280bb2
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/rtcp.h"
+
+#include <string.h>
+
+#include <algorithm>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+void Rtcp::Init(uint16_t start_sequence_number) {
+ cycles_ = 0;
+ max_seq_no_ = start_sequence_number;
+ base_seq_no_ = start_sequence_number;
+ received_packets_ = 0;
+ received_packets_prior_ = 0;
+ expected_prior_ = 0;
+ jitter_ = 0;
+ transit_ = 0;
+}
+
+void Rtcp::Update(const RTPHeader& rtp_header, uint32_t receive_timestamp) {
+ // Update number of received packets, and largest packet number received.
+ received_packets_++;
+ int16_t sn_diff = rtp_header.sequenceNumber - max_seq_no_;
+ if (sn_diff >= 0) {
+ if (rtp_header.sequenceNumber < max_seq_no_) {
+ // Wrap-around detected.
+ cycles_++;
+ }
+ max_seq_no_ = rtp_header.sequenceNumber;
+ }
+
+ // Calculate jitter according to RFC 3550, and update previous timestamps.
+ // Note that the value in |jitter_| is in Q4.
+ if (received_packets_ > 1) {
+ int32_t ts_diff = receive_timestamp - (rtp_header.timestamp - transit_);
+ ts_diff = WEBRTC_SPL_ABS_W32(ts_diff);
+ int32_t jitter_diff = (ts_diff << 4) - jitter_;
+ // Calculate 15 * jitter_ / 16 + jitter_diff / 16 (with proper rounding).
+ jitter_ = jitter_ + ((jitter_diff + 8) >> 4);
+ }
+ transit_ = rtp_header.timestamp - receive_timestamp;
+}
+
+void Rtcp::GetStatistics(bool no_reset, RtcpStatistics* stats) {
+ // Extended highest sequence number received.
+ stats->extended_max_sequence_number =
+ (static_cast<int>(cycles_) << 16) + max_seq_no_;
+
+ // Calculate expected number of packets and compare it with the number of
+ // packets that were actually received. The cumulative number of lost packets
+ // can be extracted.
+ uint32_t expected_packets =
+ stats->extended_max_sequence_number - base_seq_no_ + 1;
+ if (received_packets_ == 0) {
+ // No packets received, assume none lost.
+ stats->cumulative_lost = 0;
+ } else if (expected_packets > received_packets_) {
+ stats->cumulative_lost = expected_packets - received_packets_;
+ if (stats->cumulative_lost > 0xFFFFFF) {
+ stats->cumulative_lost = 0xFFFFFF;
+ }
+ } else {
+ stats->cumulative_lost = 0;
+ }
+
+ // Fraction lost since last report.
+ uint32_t expected_since_last = expected_packets - expected_prior_;
+ uint32_t received_since_last = received_packets_ - received_packets_prior_;
+ if (!no_reset) {
+ expected_prior_ = expected_packets;
+ received_packets_prior_ = received_packets_;
+ }
+ int32_t lost = expected_since_last - received_since_last;
+ if (expected_since_last == 0 || lost <= 0 || received_packets_ == 0) {
+ stats->fraction_lost = 0;
+ } else {
+ stats->fraction_lost = std::min(0xFFU, (lost << 8) / expected_since_last);
+ }
+
+ stats->jitter = jitter_ >> 4; // Scaling from Q4.
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
index 5e066eb38f2..2a765efa588 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
@@ -8,95 +8,51 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * RTCP statistics reporting.
- */
-
-#ifndef RTCP_H
-#define RTCP_H
-
-#include "typedefs.h"
-
-typedef struct
-{
- uint16_t cycles; /* The number of wrap-arounds for the sequence number */
- uint16_t max_seq; /* The maximum sequence number received
- (starts from 0 again after wrap around) */
- uint16_t base_seq; /* The sequence number of the first packet that arrived */
- uint32_t received; /* The number of packets that has been received */
- uint32_t rec_prior; /* Number of packets received when last report was generated */
- uint32_t exp_prior; /* Number of packets that should have been received if no
- packets were lost. Stored value from last report. */
- uint32_t jitter; /* Jitter statistics at this instance (calculated according to RFC) */
- int32_t transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
-} WebRtcNetEQ_RTCP_t;
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPInit(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - seqNo : Packet number of the first received frame.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo);
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPUpdate(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - seqNo : Packet number of the first received frame.
- * - timeStamp : Time stamp from the RTP header.
- * - recTime : Time (in RTP timestamps) when this packet was received.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
- uint32_t uw32_timeStamp, uint32_t uw32_recTime);
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPGetStats(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - doNotReset : If non-zero, the fraction lost statistics will not
- * be reset.
- *
- * Output:
- * - RTCP_inst : Updated RTCP information (some statistics are
- * reset when generating this report)
- * - fraction_lost : Number of lost RTP packets divided by the number of
- * expected packets, since the last RTCP Report.
- * - cum_lost : Cumulative number of lost packets during this
- * session.
- * - ext_max : Extended highest sequence number received.
- * - jitter : Inter-arrival jitter.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- uint16_t *puw16_fraction_lost,
- uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
- uint32_t *puw32_jitter, int16_t doNotReset);
-
-#endif
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+struct RTPHeader;
+
+class Rtcp {
+ public:
+ Rtcp() {
+ Init(0);
+ }
+
+ ~Rtcp() {}
+
+ // Resets the RTCP statistics, and sets the first received sequence number.
+ void Init(uint16_t start_sequence_number);
+
+ // Updates the RTCP statistics with a new received packet.
+ void Update(const RTPHeader& rtp_header, uint32_t receive_timestamp);
+
+ // Returns the current RTCP statistics. If |no_reset| is true, the statistics
+ // are not reset, otherwise they are.
+ void GetStatistics(bool no_reset, RtcpStatistics* stats);
+
+ private:
+ uint16_t cycles_; // The number of wrap-arounds for the sequence number.
+ uint16_t max_seq_no_; // The maximum sequence number received. Starts over
+ // from 0 after wrap-around.
+ uint16_t base_seq_no_; // The sequence number of the first received packet.
+ uint32_t received_packets_; // The number of packets that have been received.
+ uint32_t received_packets_prior_; // Number of packets received when last
+ // report was generated.
+ uint32_t expected_prior_; // Expected number of packets, at the time of the
+ // last report.
+ uint32_t jitter_; // Current jitter value.
+ int32_t transit_; // Clock difference for previous packet.
+
+ DISALLOW_COPY_AND_ASSIGN(Rtcp);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c
deleted file mode 100644
index 6ab5944b5aa..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * RTP related functions.
- */
-
-#include "rtp.h"
-
-#include "typedefs.h" /* to define endianness */
-
-#include "neteq_error_codes.h"
-
-int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
- RTPPacket_t* RTPheader)
-{
- int i_P, i_X, i_CC, i_startPosition;
- int i_IPver;
- int i_extlength = -1; /* Default value is there is no extension */
- int i_padlength = 0; /* Default value if there is no padding */
-
- if (i_DatagramLen < 12)
- {
- return RTP_TOO_SHORT_PACKET;
- }
-
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
- i_P = (((uint16_t) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
- i_X = (((uint16_t) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
- i_CC = ((uint16_t) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
- RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type */
- RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number */
- RTPheader->timeStamp = ((((uint32_t) ((uint16_t) pw16_Datagram[2])) << 16)
- | (uint16_t) (pw16_Datagram[3])); /* Get timestamp */
- RTPheader->ssrc = (((uint32_t) pw16_Datagram[4]) << 16)
- + (((uint32_t) pw16_Datagram[5])); /* Get the SSRC */
-
- if (i_X == 1)
- {
- /* Extension header exists. Find out how many int32_t it consists of. */
- i_extlength = pw16_Datagram[7 + 2 * i_CC];
- }
- if (i_P == 1)
- {
- /* Padding exists. Find out how many bytes the padding consists of. */
- if (i_DatagramLen & 0x1)
- {
- /* odd number of bytes => last byte in higher byte */
- i_padlength = (((uint16_t) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
- }
- else
- {
- /* even number of bytes => last byte in lower byte */
- i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF);
- }
- }
-#else /* WEBRTC_ARCH_LITTLE_ENDIAN */
- i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
- i_P = (((uint16_t) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
- i_X = (((uint16_t) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
- i_CC = (uint16_t) (pw16_Datagram[0] & 0xF); /* Get the CC number */
- RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
- RTPheader->seqNumber = (((((uint16_t) pw16_Datagram[1]) >> 8) & 0xFF)
- | (((uint16_t) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
- RTPheader->timeStamp = ((((uint16_t) pw16_Datagram[2]) & 0xFF) << 24)
- | ((((uint16_t) pw16_Datagram[2]) & 0xFF00) << 8)
- | ((((uint16_t) pw16_Datagram[3]) >> 8) & 0xFF)
- | ((((uint16_t) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
- RTPheader->ssrc = ((((uint16_t) pw16_Datagram[4]) & 0xFF) << 24)
- | ((((uint16_t) pw16_Datagram[4]) & 0xFF00) << 8)
- | ((((uint16_t) pw16_Datagram[5]) >> 8) & 0xFF)
- | ((((uint16_t) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
-
- if (i_X == 1)
- {
- /* Extension header exists. Find out how many int32_t it consists of. */
- i_extlength = (((((uint16_t) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
- | (((uint16_t) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
- }
- if (i_P == 1)
- {
- /* Padding exists. Find out how many bytes the padding consists of. */
- if (i_DatagramLen & 0x1)
- {
- /* odd number of bytes => last byte in higher byte */
- i_padlength = (pw16_Datagram[i_DatagramLen >> 1] & 0xFF);
- }
- else
- {
- /* even number of bytes => last byte in lower byte */
- i_padlength = (((uint16_t) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
- }
- }
-#endif
-
- i_startPosition = 12 + 4 * (i_extlength + 1) + 4 * i_CC;
- RTPheader->payload = &pw16_Datagram[i_startPosition >> 1];
- RTPheader->payloadLen = i_DatagramLen - i_startPosition - i_padlength;
- RTPheader->starts_byte1 = 0;
-
- if ((i_IPver != 2) || (RTPheader->payloadLen <= 0) || (RTPheader->payloadLen >= 16000)
- || (i_startPosition < 12) || (i_startPosition > i_DatagramLen))
- {
- return RTP_CORRUPT_PACKET;
- }
-
- return 0;
-}
-
-#ifdef NETEQ_RED_CODEC
-
-int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
- int *i_No_Of_Payloads)
-{
- const int16_t *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
- uint16_t uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
- int i_blockLength, i_k;
- int i_discardedBlockLength = 0;
- int singlePayload = 0;
-
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- if ((pw16_data[0] & 0x8000) == 0)
- {
- /* Only one payload in this packet*/
- singlePayload = 1;
- /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
- i_blockLength = -4;
- RTPheader[0]->payloadType = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
- }
- else
- {
- /* Discard all but the two last payloads. */
- while (((pw16_data[2] & 0x8000) != 0) &&
- (pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
- {
- i_discardedBlockLength += (4+(((uint16_t)pw16_data[1]) & 0x3FF));
- pw16_data+=2;
- }
- if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
- {
- return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
- }
- singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
- RTPheader[0]->payloadType = ((((uint16_t)pw16_data[2]) & 0x7F00) >> 8);
- uw16_offsetTimeStamp = ((((uint16_t)pw16_data[0]) & 0xFF) << 6) +
- ((((uint16_t)pw16_data[1]) & 0xFC00) >> 10);
- i_blockLength = (((uint16_t)pw16_data[1]) & 0x3FF);
- }
-#else /* WEBRTC_ARCH_LITTLE_ENDIAN */
- if ((pw16_data[0] & 0x80) == 0)
- {
- /* Only one payload in this packet */
- singlePayload = 1;
- /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
- i_blockLength = -4;
- RTPheader[0]->payloadType = (((uint16_t) pw16_data[0]) & 0x7F);
- }
- else
- {
- /* Discard all but the two last payloads. */
- while (((pw16_data[2] & 0x80) != 0) && (pw16_data < ((RTPheader[0]->payload)
- + ((RTPheader[0]->payloadLen + 1) >> 1))))
- {
- i_discardedBlockLength += (4 + ((((uint16_t) pw16_data[1]) & 0x3) << 8)
- + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8));
- pw16_data += 2;
- }
- if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
- {
- return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
- }
- singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = (((uint16_t) pw16_data[0]) & 0x7F);
- RTPheader[0]->payloadType = (((uint16_t) pw16_data[2]) & 0x7F);
- uw16_offsetTimeStamp = ((((uint16_t) pw16_data[0]) & 0xFF00) >> 2)
- + ((((uint16_t) pw16_data[1]) & 0xFC) >> 2);
- i_blockLength = ((((uint16_t) pw16_data[1]) & 0x3) << 8)
- + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8);
- }
-#endif
-
- if (i_MaximumPayloads < 2 || singlePayload == 1)
- {
- /* Reject the redundancy; or no redundant payload present. */
- for (i_k = 1; i_k < i_MaximumPayloads; i_k++)
- {
- RTPheader[i_k]->payloadType = -1;
- RTPheader[i_k]->payloadLen = 0;
- }
-
- /* update the pointer for the main data */
- pw16_data = &pw16_data[(5 + i_blockLength) >> 1];
- RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
- RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- - i_discardedBlockLength;
- RTPheader[0]->payload = pw16_data;
-
- *i_No_Of_Payloads = 1;
-
- }
- else
- {
- /* Redundancy accepted, put the redundancy in second RTPheader. */
- RTPheader[1]->payloadType = uw16_secondPayload;
- RTPheader[1]->payload = &pw16_data[5 >> 1];
- RTPheader[1]->starts_byte1 = 5 & 0x1;
- RTPheader[1]->seqNumber = RTPheader[0]->seqNumber;
- RTPheader[1]->timeStamp = RTPheader[0]->timeStamp - uw16_offsetTimeStamp;
- RTPheader[1]->ssrc = RTPheader[0]->ssrc;
- RTPheader[1]->payloadLen = i_blockLength;
-
- /* Modify first RTP packet, so that it contains the main data. */
- RTPheader[0]->payload = &pw16_data[(5 + i_blockLength) >> 1];
- RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
- RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- - i_discardedBlockLength;
-
- /* Clear the following payloads. */
- for (i_k = 2; i_k < i_MaximumPayloads; i_k++)
- {
- RTPheader[i_k]->payloadType = -1;
- RTPheader[i_k]->payloadLen = 0;
- }
-
- *i_No_Of_Payloads = 2;
- }
- return 0;
-}
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h
deleted file mode 100644
index 4642eaef770..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * RTP data struct and related functions.
- */
-
-#ifndef RTP_H
-#define RTP_H
-
-#include "typedefs.h"
-
-#include "codec_db.h"
-
-typedef struct
-{
- uint16_t seqNumber;
- uint32_t timeStamp;
- uint32_t ssrc;
- int payloadType;
- const int16_t *payload;
- int16_t payloadLen;
- int16_t starts_byte1;
- int16_t rcuPlCntr;
-} RTPPacket_t;
-
-/****************************************************************************
- * WebRtcNetEQ_RTPPayloadInfo(...)
- *
- * Converts a datagram into an RTP header struct.
- *
- * Input:
- * - Datagram : UDP datagram from the network
- * - DatagramLen : Length in bytes of the datagram
- *
- * Output:
- * - RTPheader : Structure with the datagram info
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
- RTPPacket_t* RTPheader);
-
-/****************************************************************************
- * WebRtcNetEQ_RedundancySplit(...)
- *
- * Splits a Redundancy RTP struct into two RTP structs. User has to check
- * that it's really the redundancy payload. No such check is done inside this
- * function.
- *
- * Input:
- * - RTPheader : First header holds the whole RTP packet (with the redundancy payload)
- * - MaximumPayloads:
- * The maximum number of RTP payloads that should be
- * extracted (1+maximum_no_of_Redundancies).
- *
- * Output:
- * - RTPheader : First header holds the main RTP data, while 2..N
- * holds the redundancy data.
- * - No_Of
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
- int *i_No_Of_Payloads);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c
deleted file mode 100644
index ac974548e61..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Function were the sample rate is set.
- */
-
-#include "mcu.h"
-
-#include "dtmf_buffer.h"
-#include "neteq_error_codes.h"
-
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs)
-{
- int16_t ok = 0;
-
- switch (fs)
- {
- case 8000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 8;
- break;
- }
-
-#ifdef NETEQ_WIDEBAND
- case 16000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 16000, 1120);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 16;
- break;
- }
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 32000, 2240);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 32;
- break;
- }
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 48000, 3360);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 48;
- break;
- }
-#endif
-
- default:
- {
- /* Not supported yet */
- return CODEC_DB_UNSUPPORTED_FS;
- }
- } /* end switch */
-
- inst->fs = fs;
-
- return ok;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c
deleted file mode 100644
index b795ec30e38..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c
+++ /dev/null
@@ -1,820 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Signal the MCU that data is available and ask for a RecOut decision.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "signal_processing_library.h"
-
-#include "automode.h"
-#include "dtmf_buffer.h"
-#include "mcu_dsp_common.h"
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-#include "delay_logging.h"
-#include <stdio.h>
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-#endif
-
-
-/*
- * Update the frame size, if we can.
- */
-static int WebRtcNetEQ_UpdatePackSizeSamples(MCUInst_t* inst, int buffer_pos,
- int payload_type,
- int pack_size_samples) {
- if (buffer_pos >= 0) {
- int codec_pos;
- codec_pos = WebRtcNetEQ_DbGetCodec(&inst->codec_DB_inst, payload_type);
- if (codec_pos >= 0) {
- codec_pos = inst->codec_DB_inst.position[codec_pos];
- if (codec_pos >= 0) {
- int temp_packet_size_samples = WebRtcNetEQ_PacketBufferGetPacketSize(
- &inst->PacketBuffer_inst, buffer_pos, &inst->codec_DB_inst,
- codec_pos, pack_size_samples, inst->av_sync);
- if (temp_packet_size_samples > 0)
- return temp_packet_size_samples;
- return pack_size_samples;
- }
- }
- }
- return pack_size_samples;
-}
-
-/*
- * Signals the MCU that DSP status data is available.
- */
-int WebRtcNetEQ_SignalMcu(MCUInst_t *inst)
-{
-
- int i_bufferpos, i_res;
- uint16_t uw16_instr;
- DSP2MCU_info_t dspInfo;
- int16_t *blockPtr, blockLen;
- uint32_t uw32_availableTS;
- RTPPacket_t temp_pkt;
- int32_t w32_bufsize, w32_tmp;
- int16_t payloadType = -1;
- int16_t wantedNoOfTimeStamps;
- int32_t totalTS;
- int16_t oldPT, latePacketExist = 0;
- uint32_t oldTS, prevTS, uw32_tmp;
- uint16_t prevSeqNo;
- int16_t nextSeqNoAvail;
- int16_t fs_mult, w16_tmp;
- int16_t lastModeBGNonly = 0;
-#ifdef NETEQ_DELAY_LOGGING
- int temp_var;
-#endif
- int playDtmf = 0;
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
-
- /* Increment counter since last statistics report */
- inst->lastReportTS += inst->timestampsPerCall;
-
- /* Increment waiting time for all packets. */
- WebRtcNetEQ_IncrementWaitingTimes(&inst->PacketBuffer_inst);
-
- /* Read info from DSP so we now current status */
-
- WEBRTC_SPL_MEMCPY_W8(&dspInfo,inst->pw16_readAddress,sizeof(DSP2MCU_info_t));
-
- /* Set blockPtr to first payload block */
- blockPtr = &inst->pw16_writeAddress[3];
-
- /* Clear instruction word and number of lost samples (2*int16_t) */
- inst->pw16_writeAddress[0] = 0;
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[2] = 0;
-
- if ((dspInfo.lastMode & MODE_AWAITING_CODEC_PTR) != 0)
- {
- /*
- * Make sure state is adjusted so that a codec update is
- * performed when first packet arrives.
- */
- if (inst->new_codec != 1)
- {
- inst->current_Codec = -1;
- }
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_AWAITING_CODEC_PTR);
- }
-
-#ifdef NETEQ_STEREO
- if ((dspInfo.lastMode & MODE_MASTER_DTMF_SIGNAL) != 0)
- {
- playDtmf = 1; /* force DTMF decision */
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_MASTER_DTMF_SIGNAL);
- }
-
- if ((dspInfo.lastMode & MODE_USING_STEREO) != 0)
- {
- if (inst->usingStereo == 0)
- {
- /* stereo mode changed; reset automode instance to re-synchronize statistics */
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
- }
- inst->usingStereo = 1;
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_USING_STEREO);
- }
- else
- {
- inst->usingStereo = 0;
- }
-#endif
-
- /* detect if BGN_ONLY flag is set in lastMode */
- if ((dspInfo.lastMode & MODE_BGN_ONLY) != 0)
- {
- lastModeBGNonly = 1; /* remember flag */
- dspInfo.lastMode ^= MODE_BGN_ONLY; /* clear the flag */
- }
-
- if ((dspInfo.lastMode == MODE_RFC3389CNG) || (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
- || (dspInfo.lastMode == MODE_EXPAND))
- {
- /*
- * If last mode was CNG (or Expand, since this could be covering up for a lost CNG
- * packet), increase the CNGplayedTS counter.
- */
- inst->BufferStat_inst.uw32_CNGplayedTS += inst->timestampsPerCall;
-
- if (dspInfo.lastMode == MODE_RFC3389CNG)
- {
- /* remember that RFC3389CNG is on (needed if CNG is interrupted by DTMF) */
- inst->BufferStat_inst.w16_cngOn = CNG_RFC3389_ON;
- }
- else if (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
- {
- /* remember that internal CNG is on (needed if CNG is interrupted by DTMF) */
- inst->BufferStat_inst.w16_cngOn = CNG_INTERNAL_ON;
- }
-
- }
-
- /* Update packet size from previously decoded packet */
- if (dspInfo.frameLen > 0)
- {
- inst->PacketBuffer_inst.packSizeSamples = dspInfo.frameLen;
- }
-
- /* Look for late packet (unless codec has changed) */
- if (inst->new_codec != 1)
- {
- if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec))
- {
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- inst->timeStamp, &uw32_availableTS, &i_bufferpos, 1, &payloadType);
- if ((inst->new_codec != 1) && (inst->timeStamp == uw32_availableTS)
- && (inst->timeStamp < dspInfo.playedOutTS) && (i_bufferpos != -1)
- && (WebRtcNetEQ_DbGetPayload(&(inst->codec_DB_inst),
- (enum WebRtcNetEQDecoder) inst->current_Codec) == payloadType))
- {
- int waitingTime;
- temp_pkt.payload = blockPtr + 1;
- i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
- i_bufferpos, &waitingTime);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
- WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
- *blockPtr = temp_pkt.payloadLen;
- /* set the flag if this is a redundant payload */
- if (temp_pkt.rcuPlCntr > 0)
- {
- *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
- }
- blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
-
- /*
- * Close the data with a zero size block, in case we will not write any
- * more data.
- */
- *blockPtr = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
- | DSP_CODEC_ADD_LATE_PKT;
- latePacketExist = 1;
- }
- }
- }
-
- i_res = WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
- &payloadType);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
-
- if (inst->BufferStat_inst.w16_cngOn == CNG_RFC3389_ON)
- {
- /*
- * Because of timestamp peculiarities, we have to "manually" disallow using a CNG
- * packet with the same timestamp as the one that was last played. This can happen
- * when using redundancy and will cause the timing to shift.
- */
- while (i_bufferpos != -1 && WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst,
- payloadType) && dspInfo.playedOutTS >= uw32_availableTS)
- {
-
- /* Don't use this packet, discard it */
- inst->PacketBuffer_inst.payloadType[i_bufferpos] = -1;
- inst->PacketBuffer_inst.payloadLengthBytes[i_bufferpos] = 0;
- inst->PacketBuffer_inst.numPacketsInBuffer--;
-
- /* Check buffer again */
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
- &payloadType);
- }
- }
-
- /* Check packet buffer */
- w32_bufsize = WebRtcNetEQ_PacketBufferGetSize(&inst->PacketBuffer_inst,
- &inst->codec_DB_inst, inst->av_sync);
-
- if (dspInfo.lastMode == MODE_SUCCESS_ACCELERATE || dspInfo.lastMode
- == MODE_LOWEN_ACCELERATE || dspInfo.lastMode == MODE_SUCCESS_PREEMPTIVE
- || dspInfo.lastMode == MODE_LOWEN_PREEMPTIVE)
- {
- /* Subtract (dspInfo.samplesLeft + inst->timestampsPerCall) from sampleMemory */
- inst->BufferStat_inst.Automode_inst.sampleMemory -= dspInfo.samplesLeft
- + inst->timestampsPerCall;
- }
-
- /* calculate total current buffer size (in ms*8), including sync buffer */
- w32_bufsize = WebRtcSpl_DivW32W16((w32_bufsize + dspInfo.samplesLeft), fs_mult);
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* DTMF data will affect the decision */
- if (WebRtcNetEQ_DtmfDecode(&inst->DTMF_inst, blockPtr + 1, blockPtr + 2,
- dspInfo.playedOutTS + inst->BufferStat_inst.uw32_CNGplayedTS) > 0)
- {
- playDtmf = 1;
-
- /* Flag DTMF payload */
- inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] | DSP_DTMF_PAYLOAD;
-
- /* Block Length in bytes */
- blockPtr[0] = 4;
- /* Advance to next payload position */
- blockPtr += 3;
- }
-#endif
-
- /* Update the frame size, if we can. */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos, payloadType,
- inst->PacketBuffer_inst.packSizeSamples);
- /* Update statistics and make decision */
- uw16_instr = WebRtcNetEQ_BufstatsDecision(&inst->BufferStat_inst,
- inst->PacketBuffer_inst.packSizeSamples, w32_bufsize, dspInfo.playedOutTS,
- uw32_availableTS, i_bufferpos == -1,
- WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType), dspInfo.lastMode,
- inst->NetEqPlayoutMode, inst->timestampsPerCall, inst->NoOfExpandCalls, fs_mult,
- lastModeBGNonly, playDtmf);
-
- /* Check if time to reset loss counter */
- if (inst->lastReportTS > WEBRTC_SPL_UMUL(inst->fs, MAX_LOSS_REPORT_PERIOD))
- {
- /* reset loss counter */
- WebRtcNetEQ_ResetMcuInCallStats(inst);
- }
-
- /* Check sync buffer size */
- if ((dspInfo.samplesLeft >= inst->timestampsPerCall) && (uw16_instr
- != BUFSTATS_DO_ACCELERATE) && (uw16_instr != BUFSTATS_DO_MERGE) && (uw16_instr
- != BUFSTATS_DO_PREEMPTIVE_EXPAND))
- {
- *blockPtr = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_NORMAL;
- return 0;
- }
-
- if (uw16_instr == BUFSTATS_DO_EXPAND)
- {
- inst->NoOfExpandCalls++;
- }
- else
- {
- /* reset counter */
- inst->NoOfExpandCalls = 0;
- }
-
- /* New codec or big change in packet number? */
- if ((inst->new_codec) || (uw16_instr == BUFSTAT_REINIT))
- {
- CodecFuncInst_t cinst;
-
- /* Clear other instructions */
- blockPtr = &inst->pw16_writeAddress[3];
- /* Clear instruction word */
- inst->pw16_writeAddress[0] = 0;
-
- inst->timeStamp = uw32_availableTS;
- dspInfo.playedOutTS = uw32_availableTS;
- if (inst->current_Codec != -1)
- {
- i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst,
- (enum WebRtcNetEQDecoder) inst->current_Codec, &cinst);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
- }
- else
- {
- /* The main codec has not been initialized yet (first packets are DTMF or CNG). */
- if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
- {
- /* The currently extracted packet is CNG; get CNG fs */
- uint16_t tempFs;
-
- tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
- /* TODO(tlegrand): Remove this limitation once ACM has full
- * 48 kHz support. */
- if (tempFs > 32000)
- {
- inst->fs = 32000;
- }
- else if (tempFs > 0)
- {
- inst->fs = tempFs;
- }
- }
- WebRtcSpl_MemSetW16((int16_t*) &cinst, 0,
- sizeof(CodecFuncInst_t) / sizeof(int16_t));
- cinst.codec_fs = inst->fs;
- }
- cinst.timeStamp = inst->timeStamp;
- blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(int16_t) - 1); /* in Word16 */
- *blockPtr = blockLen * 2;
- blockPtr++;
- WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
- blockPtr += blockLen;
- inst->new_codec = 0;
-
- /* Reinitialize the MCU fs */
- i_res = WebRtcNetEQ_McuSetFs(inst, cinst.codec_fs);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
-
- /* Set the packet size by guessing */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos, payloadType,
- inst->timestampsPerCall * 3);
-
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
-
-#ifdef NETEQ_CNG_CODEC
- /* Also insert CNG state as this might be needed by DSP */
- i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst, kDecoderCNG, &cinst);
- if ((i_res < 0) && (i_res != CODEC_DB_NOT_EXIST1))
- {
- /* other error returned */
- /* (CODEC_DB_NOT_EXIST1 simply indicates that CNG is not used */
- return i_res;
- }
- else
- {
- /* CNG exists */
- blockLen = (sizeof(cinst.codec_state)) >> (sizeof(int16_t) - 1);
- *blockPtr = blockLen * 2;
- blockPtr++;
- WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
- blockPtr += blockLen;
- }
-#endif
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
- | DSP_CODEC_NEW_CODEC;
-
- if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
- {
- /*
- * Change decision to CNG packet, since we do have a CNG packet, but it was
- * considered too early to use. Now, use it anyway.
- */
- uw16_instr = BUFSTATS_DO_RFC3389CNG_PACKET;
- }
- else if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
- {
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
-
- /* reset loss counter */
- WebRtcNetEQ_ResetMcuInCallStats(inst);
- }
-
- /* Should we just reset the decoder? */
- if (uw16_instr == BUFSTAT_REINIT_DECODER)
- {
- /* Change decision to normal and flag decoder reset */
- uw16_instr = BUFSTATS_DO_NORMAL;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff) | DSP_CODEC_RESET;
- }
-
- /* Expand requires no new packet */
- if (uw16_instr == BUFSTATS_DO_EXPAND)
- {
-
- inst->timeStamp = dspInfo.playedOutTS;
-
- /* Have we got one descriptor left? */
- if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec)
- && (dspInfo.MD || latePacketExist))
- {
-
- if (dspInfo.lastMode != MODE_ONE_DESCRIPTOR)
- {
- /* this is the first "consecutive" one-descriptor decoding; reset counter */
- inst->one_desc = 0;
- }
- if (inst->one_desc < MAX_ONE_DESC)
- {
- /* use that one descriptor */
- inst->one_desc++; /* increase counter */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL_ONE_DESC;
-
- /* decrease counter since we did no Expand */
- inst->NoOfExpandCalls = WEBRTC_SPL_MAX(inst->NoOfExpandCalls - 1, 0);
- return 0;
- }
- else
- {
- /* too many consecutive one-descriptor decodings; do expand instead */
- inst->one_desc = 0; /* reset counter */
- }
-
- }
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_EXPAND;
- return 0;
- }
-
- /* Merge is not needed if we still have a descriptor */
- if ((uw16_instr == BUFSTATS_DO_MERGE) && (dspInfo.MD != 0))
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL_ONE_DESC;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do CNG without trying to extract any packets from buffer */
- if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_RFC3389CNG;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do built-in CNG without extracting any new packets from buffer */
- if (uw16_instr == BUFSTATS_DO_INTERNAL_CNG_NOPACKET)
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_CODEC_INTERNAL_CNG;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do DTMF without extracting any new packets from buffer */
- if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
- {
- uint32_t timeStampJump = 0;
-
- /* Update timestamp */
- if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
- {
- /* Jump in timestamps if needed */
- timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
- inst->pw16_writeAddress[1] = (uint16_t) (timeStampJump >> 16);
- inst->pw16_writeAddress[2] = (uint16_t) (timeStampJump & 0xFFFF);
- }
-
- inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
-
- inst->BufferStat_inst.uw32_CNGplayedTS = 0;
- inst->NoOfExpandCalls = 0;
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DTMF_GENERATE;
- *blockPtr = 0;
- return 0;
- }
-
- if (uw16_instr == BUFSTATS_DO_ACCELERATE)
- {
- /* In order to do a Accelerate we need at least 30 ms of data */
- if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
- {
- /* Already have enough data, so we do not need to extract any more */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_ACCELERATE;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /* Avoid decoding more data as it might overflow playout buffer */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL;
- *blockPtr = 0;
- return 0;
- }
- else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /* For >= 30ms allow Accelerate with a decoding to avoid overflow in playout buffer */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
- {
- /* We need to decode another 10 ms in order to do an Accelerate */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else
- {
- /*
- * Build up decoded data by decoding at least 20 ms of data.
- * Do not perform Accelerate yet, but wait until we only need to do one decoding.
- */
- wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
- }
- else if (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND)
- {
- /* In order to do a Preemptive Expand we need at least 30 ms of data */
- if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
- {
- /* Already have enough data, so we do not need to extract any more */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /*
- * Avoid decoding more data as it might overflow playout buffer;
- * still try Preemptive Expand though.
- */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /*
- * For >= 30ms allow Preemptive Expand with a decoding to avoid overflow in
- * playout buffer
- */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
- {
- /* We need to decode another 10 ms in order to do an Preemptive Expand */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else
- {
- /*
- * Build up decoded data by decoding at least 20 ms of data,
- * Still try to perform Preemptive Expand.
- */
- wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
- }
- }
- else
- {
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
-
- /* Otherwise get data from buffer, try to get at least 10ms */
- totalTS = 0;
- oldTS = uw32_availableTS;
- if ((i_bufferpos > -1) && (uw16_instr != BUFSTATS_DO_ALTERNATIVE_PLC) && (uw16_instr
- != BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS) && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION)
- && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
- {
- uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
- inst->pw16_writeAddress[1] = (uint16_t) (uw32_tmp >> 16);
- inst->pw16_writeAddress[2] = (uint16_t) (uw32_tmp & 0xFFFF);
- if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
- {
- /*
- * Adjustment of TS only corresponds to an actual packet loss
- * if comfort noise is not played. If comfort noise was just played,
- * this adjustment of TS is only done to get back in sync with the
- * stream TS; no loss to report.
- */
- inst->lostTS += uw32_tmp;
- }
-
- if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
- {
- /* We are about to decode and use a non-CNG packet => CNG period is ended */
- inst->BufferStat_inst.w16_cngOn = CNG_OFF;
- }
-
- /*
- * Reset CNG timestamp as a new packet will be delivered.
- * (Also if CNG packet, since playedOutTS is updated.)
- */
- inst->BufferStat_inst.uw32_CNGplayedTS = 0;
-
- prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
- prevTS = inst->PacketBuffer_inst.timeStamp[i_bufferpos];
- oldPT = inst->PacketBuffer_inst.payloadType[i_bufferpos];
-
- /* These values are used by NACK module to estimate time-to-play of
- * a missing packet. Occasionally, NetEq might decide to decode more
- * than one packet. Therefore, these values store sequence number and
- * timestamp of the first packet pulled from the packet buffer. In
- * such cases, these values do not exactly represent the sequence number
- * or timestamp associated with a 10ms audio pulled from NetEq. NACK
- * module is designed to compensate for this.
- */
- inst->decoded_packet_sequence_number = prevSeqNo;
- inst->decoded_packet_timestamp = prevTS;
-
- /* clear flag bits */
- inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] & 0xFF3F;
- do
- {
- int waitingTime;
- inst->timeStamp = uw32_availableTS;
- /* Write directly to shared memory */
- temp_pkt.payload = blockPtr + 1;
- i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
- i_bufferpos, &waitingTime);
-
- if (i_res < 0)
- {
- /* error returned */
- return i_res;
- }
- WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&temp_pkt.timeStamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&dspInfo.samplesLeft, sizeof(uint16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
-#endif
-
- *blockPtr = temp_pkt.payloadLen;
- /* set the flag if this is a redundant payload */
- if (temp_pkt.rcuPlCntr > 0)
- {
- *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
- }
- blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
-
- if (i_bufferpos > -1)
- {
- /*
- * Store number of TS extracted (last extracted is assumed to be of
- * packSizeSamples).
- */
- totalTS = uw32_availableTS - oldTS + inst->PacketBuffer_inst.packSizeSamples;
- }
- /* Check what next packet is available */
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- inst->timeStamp, &uw32_availableTS, &i_bufferpos, 0, &payloadType);
-
- nextSeqNoAvail = 0;
- if ((i_bufferpos > -1) && (oldPT
- == inst->PacketBuffer_inst.payloadType[i_bufferpos]))
- {
- w16_tmp = inst->PacketBuffer_inst.seqNumber[i_bufferpos] - prevSeqNo;
- w32_tmp = inst->PacketBuffer_inst.timeStamp[i_bufferpos] - prevTS;
- if ((w16_tmp == 1) || /* Next packet */
- ((w16_tmp == 0) && (w32_tmp == inst->PacketBuffer_inst.packSizeSamples)))
- { /* or packet split into frames */
- nextSeqNoAvail = 1;
- }
- prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
- }
- /* Update the frame size, if we can. */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos,
- payloadType, inst->PacketBuffer_inst.packSizeSamples);
- }
- while ((totalTS < wantedNoOfTimeStamps) && (nextSeqNoAvail == 1));
- }
-
- if ((uw16_instr == BUFSTATS_DO_ACCELERATE)
- || (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND))
- {
- /* Check that we have enough data (30ms) to do the Accelearate */
- if ((totalTS + dspInfo.samplesLeft) < WEBRTC_SPL_MUL(3,inst->timestampsPerCall)
- && (uw16_instr == BUFSTATS_DO_ACCELERATE))
- {
- /* Not enough, do normal operation instead */
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
- else
- {
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft + totalTS;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- }
- }
-
- /* Close the data with a zero size block */
- *blockPtr = 0;
-
- /* Write data to DSP */
- switch (uw16_instr)
- {
- case BUFSTATS_DO_NORMAL:
- /* Normal with decoding included */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL;
- break;
- case BUFSTATS_DO_ACCELERATE:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_ACCELERATE;
- break;
- case BUFSTATS_DO_MERGE:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_MERGE;
- break;
- case BUFSTATS_DO_RFC3389CNG_PACKET:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_RFC3389CNG;
- break;
- case BUFSTATS_DO_ALTERNATIVE_PLC:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_ALTERNATIVE_PLC;
- break;
- case BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS;
- break;
- case BUFSTATS_DO_AUDIO_REPETITION:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_AUDIO_REPETITION;
- break;
- case BUFSTATS_DO_AUDIO_REPETITION_INC_TS:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_AUDIO_REPETITION_INC_TS;
- break;
- case BUFSTATS_DO_PREEMPTIVE_EXPAND:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- break;
- default:
- return UNKNOWN_BUFSTAT_DECISION;
- }
-
- inst->timeStamp = dspInfo.playedOutTS;
- return 0;
-
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c
deleted file mode 100644
index d7f17fdc8d4..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Split an RTP payload (if possible and suitable) and insert into packet buffer.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "mcu_dsp_common.h"
-#include "neteq_error_codes.h"
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t* packet,
- PacketBuf_t* Buffer_inst,
- SplitInfo_t* split_inst,
- int16_t* flushed,
- int av_sync)
-{
-
- int i_ok;
- int len;
- int i;
- RTPPacket_t temp_packet;
- int16_t localFlushed = 0;
- const int16_t *pw16_startPayload;
- const int is_sync_rtp = av_sync &&
- WebRtcNetEQ_IsSyncPayload(packet->payload, packet->payloadLen);
- *flushed = 0;
-
- len = packet->payloadLen;
-
- /* Copy to temp packet that can be modified. */
-
- WEBRTC_SPL_MEMCPY_W8(&temp_packet,packet,sizeof(RTPPacket_t));
-
- if (split_inst->deltaBytes == NO_SPLIT ||
- is_sync_rtp) /* Don't split sync RTPs just insert. */
- {
- /* Not splittable codec */
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR5;
- }
- }
- else if (split_inst->deltaBytes < -10)
- {
- /* G711, PCM16B or G722, use "soft splitting" */
- int split_size = packet->payloadLen;
- int mult = WEBRTC_SPL_ABS_W32(split_inst->deltaBytes) - 10;
-
- /* Find "chunk size" >= 20 ms and < 40 ms
- * split_inst->deltaTime in this case contains the number of bytes per
- * timestamp unit times 2
- */
- while (split_size >= ((80 << split_inst->deltaTime) * mult))
- {
- split_size >>= 1;
- }
-
- /* Make the size an even value. */
- if (split_size > 1)
- {
- split_size >>= 1;
- split_size *= 2;
- }
-
- temp_packet.payloadLen = split_size;
- pw16_startPayload = temp_packet.payload;
- i = 0;
- while (len >= (2 * split_size))
- {
- /* insert every chunk */
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- temp_packet.timeStamp += ((2 * split_size) >> split_inst->deltaTime);
- i++;
- temp_packet.payload = &(pw16_startPayload[(i * split_size) >> 1]);
- temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_size & 0x1);
-
- len -= split_size;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR1;
- }
- }
-
- /* Insert the rest */
- temp_packet.payloadLen = len;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR2;
- }
- }
- else
- {
- /* Frame based codec, use hard splitting. */
- i = 0;
- pw16_startPayload = temp_packet.payload;
- while (len >= split_inst->deltaBytes)
- {
-
- temp_packet.payloadLen = split_inst->deltaBytes;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- i++;
- temp_packet.payload = &(pw16_startPayload[(i * split_inst->deltaBytes) >> 1]);
- temp_packet.timeStamp += split_inst->deltaTime;
- temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_inst->deltaBytes
- & 0x1);
-
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR3;
- }
- len -= split_inst->deltaBytes;
-
- }
- if (len > 0)
- {
- /* Must be a either an error or a SID frame at the end of the packet. */
- temp_packet.payloadLen = len;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR4;
- }
- }
- }
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
new file mode 100644
index 00000000000..383f7055549
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
+
+#include <assert.h>
+#include <string.h> // memset
+
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+
+namespace webrtc {
+
+StatisticsCalculator::StatisticsCalculator()
+ : preemptive_samples_(0),
+ accelerate_samples_(0),
+ added_zero_samples_(0),
+ expanded_voice_samples_(0),
+ expanded_noise_samples_(0),
+ discarded_packets_(0),
+ lost_timestamps_(0),
+ last_report_timestamp_(0),
+ len_waiting_times_(0),
+ next_waiting_time_index_(0) {
+ memset(waiting_times_, 0, kLenWaitingTimes * sizeof(waiting_times_[0]));
+}
+
+void StatisticsCalculator::Reset() {
+ preemptive_samples_ = 0;
+ accelerate_samples_ = 0;
+ added_zero_samples_ = 0;
+ expanded_voice_samples_ = 0;
+ expanded_noise_samples_ = 0;
+}
+
+void StatisticsCalculator::ResetMcu() {
+ discarded_packets_ = 0;
+ lost_timestamps_ = 0;
+ last_report_timestamp_ = 0;
+}
+
+void StatisticsCalculator::ResetWaitingTimeStatistics() {
+ memset(waiting_times_, 0, kLenWaitingTimes * sizeof(waiting_times_[0]));
+ len_waiting_times_ = 0;
+ next_waiting_time_index_ = 0;
+}
+
+void StatisticsCalculator::ExpandedVoiceSamples(int num_samples) {
+ expanded_voice_samples_ += num_samples;
+}
+
+void StatisticsCalculator::ExpandedNoiseSamples(int num_samples) {
+ expanded_noise_samples_ += num_samples;
+}
+
+void StatisticsCalculator::PreemptiveExpandedSamples(int num_samples) {
+ preemptive_samples_ += num_samples;
+}
+
+void StatisticsCalculator::AcceleratedSamples(int num_samples) {
+ accelerate_samples_ += num_samples;
+}
+
+void StatisticsCalculator::AddZeros(int num_samples) {
+ added_zero_samples_ += num_samples;
+}
+
+void StatisticsCalculator::PacketsDiscarded(int num_packets) {
+ discarded_packets_ += num_packets;
+}
+
+void StatisticsCalculator::LostSamples(int num_samples) {
+ lost_timestamps_ += num_samples;
+}
+
+void StatisticsCalculator::IncreaseCounter(int num_samples, int fs_hz) {
+ last_report_timestamp_ += num_samples;
+ if (last_report_timestamp_ >
+ static_cast<uint32_t>(fs_hz * kMaxReportPeriod)) {
+ lost_timestamps_ = 0;
+ last_report_timestamp_ = 0;
+ discarded_packets_ = 0;
+ }
+}
+
+void StatisticsCalculator::StoreWaitingTime(int waiting_time_ms) {
+ assert(next_waiting_time_index_ < kLenWaitingTimes);
+ waiting_times_[next_waiting_time_index_] = waiting_time_ms;
+ next_waiting_time_index_++;
+ if (next_waiting_time_index_ >= kLenWaitingTimes) {
+ next_waiting_time_index_ = 0;
+ }
+ if (len_waiting_times_ < kLenWaitingTimes) {
+ len_waiting_times_++;
+ }
+}
+
+void StatisticsCalculator::GetNetworkStatistics(
+ int fs_hz,
+ int num_samples_in_buffers,
+ int samples_per_packet,
+ const DelayManager& delay_manager,
+ const DecisionLogic& decision_logic,
+ NetEqNetworkStatistics *stats) {
+ if (fs_hz <= 0 || !stats) {
+ assert(false);
+ return;
+ }
+
+ stats->added_zero_samples = added_zero_samples_;
+ stats->current_buffer_size_ms = num_samples_in_buffers * 1000 / fs_hz;
+ const int ms_per_packet = decision_logic.packet_length_samples() /
+ (fs_hz / 1000);
+ stats->preferred_buffer_size_ms = (delay_manager.TargetLevel() >> 8) *
+ ms_per_packet;
+ stats->jitter_peaks_found = delay_manager.PeakFound();
+ stats->clockdrift_ppm = delay_manager.AverageIAT();
+
+ stats->packet_loss_rate = CalculateQ14Ratio(lost_timestamps_,
+ last_report_timestamp_);
+
+ const unsigned discarded_samples = discarded_packets_ * samples_per_packet;
+ stats->packet_discard_rate = CalculateQ14Ratio(discarded_samples,
+ last_report_timestamp_);
+
+ stats->accelerate_rate = CalculateQ14Ratio(accelerate_samples_,
+ last_report_timestamp_);
+
+ stats->preemptive_rate = CalculateQ14Ratio(preemptive_samples_,
+ last_report_timestamp_);
+
+ stats->expand_rate = CalculateQ14Ratio(expanded_voice_samples_ +
+ expanded_noise_samples_,
+ last_report_timestamp_);
+
+ // Reset counters.
+ ResetMcu();
+ Reset();
+}
+
+void StatisticsCalculator::WaitingTimes(std::vector<int>* waiting_times) {
+ if (!waiting_times) {
+ return;
+ }
+ waiting_times->assign(waiting_times_, waiting_times_ + len_waiting_times_);
+ ResetWaitingTimeStatistics();
+}
+
+int StatisticsCalculator::CalculateQ14Ratio(uint32_t numerator,
+ uint32_t denominator) {
+ if (numerator == 0) {
+ return 0;
+ } else if (numerator < denominator) {
+ // Ratio must be smaller than 1 in Q14.
+ assert((numerator << 14) / denominator < (1 << 14));
+ return (numerator << 14) / denominator;
+ } else {
+ // Will not produce a ratio larger than 1, since this is probably an error.
+ return 1 << 14;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h
new file mode 100644
index 00000000000..07ef8536fa8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
+
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class DecisionLogic;
+class DelayManager;
+
+// This class handles various network statistics in NetEq.
+class StatisticsCalculator {
+ public:
+ StatisticsCalculator();
+
+ virtual ~StatisticsCalculator() {}
+
+ // Resets most of the counters.
+ void Reset();
+
+ // Resets the counters that are not handled by Reset().
+ void ResetMcu();
+
+ // Resets the waiting time statistics.
+ void ResetWaitingTimeStatistics();
+
+ // Reports that |num_samples| samples were produced through expansion, and
+ // that the expansion produced other than just noise samples.
+ void ExpandedVoiceSamples(int num_samples);
+
+ // Reports that |num_samples| samples were produced through expansion, and
+ // that the expansion produced only noise samples.
+ void ExpandedNoiseSamples(int num_samples);
+
+ // Reports that |num_samples| samples were produced through preemptive
+ // expansion.
+ void PreemptiveExpandedSamples(int num_samples);
+
+ // Reports that |num_samples| samples were removed through accelerate.
+ void AcceleratedSamples(int num_samples);
+
+ // Reports that |num_samples| zeros were inserted into the output.
+ void AddZeros(int num_samples);
+
+ // Reports that |num_packets| packets were discarded.
+ void PacketsDiscarded(int num_packets);
+
+ // Reports that |num_samples| were lost.
+ void LostSamples(int num_samples);
+
+ // Increases the report interval counter with |num_samples| at a sample rate
+ // of |fs_hz|.
+ void IncreaseCounter(int num_samples, int fs_hz);
+
+ // Stores new packet waiting time in waiting time statistics.
+ void StoreWaitingTime(int waiting_time_ms);
+
+ // Returns the current network statistics in |stats|. The current sample rate
+ // is |fs_hz|, the total number of samples in packet buffer and sync buffer
+ // yet to play out is |num_samples_in_buffers|, and the number of samples per
+ // packet is |samples_per_packet|.
+ void GetNetworkStatistics(int fs_hz,
+ int num_samples_in_buffers,
+ int samples_per_packet,
+ const DelayManager& delay_manager,
+ const DecisionLogic& decision_logic,
+ NetEqNetworkStatistics *stats);
+
+ void WaitingTimes(std::vector<int>* waiting_times);
+
+ private:
+ static const int kMaxReportPeriod = 60; // Seconds before auto-reset.
+ static const int kLenWaitingTimes = 100;
+
+ // Calculates numerator / denominator, and returns the value in Q14.
+ static int CalculateQ14Ratio(uint32_t numerator, uint32_t denominator);
+
+ uint32_t preemptive_samples_;
+ uint32_t accelerate_samples_;
+ int added_zero_samples_;
+ uint32_t expanded_voice_samples_;
+ uint32_t expanded_noise_samples_;
+ int discarded_packets_;
+ uint32_t lost_timestamps_;
+ uint32_t last_report_timestamp_;
+ int waiting_times_[kLenWaitingTimes]; // Used as a circular buffer.
+ int len_waiting_times_;
+ int next_waiting_time_index_;
+
+ DISALLOW_COPY_AND_ASSIGN(StatisticsCalculator);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc
new file mode 100644
index 00000000000..d1802e174fc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include <algorithm> // Access to min.
+
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+namespace webrtc {
+
+size_t SyncBuffer::FutureLength() const {
+ return Size() - next_index_;
+}
+
+void SyncBuffer::PushBack(const AudioMultiVector& append_this) {
+ size_t samples_added = append_this.Size();
+ AudioMultiVector::PushBack(append_this);
+ AudioMultiVector::PopFront(samples_added);
+ if (samples_added <= next_index_) {
+ next_index_ -= samples_added;
+ } else {
+ // This means that we are pushing out future data that was never used.
+// assert(false);
+ // TODO(hlundin): This assert must be disabled to support 60 ms frames.
+ // This should not happen even for 60 ms frames, but it does. Investigate
+ // why.
+ next_index_ = 0;
+ }
+ dtmf_index_ -= std::min(dtmf_index_, samples_added);
+}
+
+void SyncBuffer::PushFrontZeros(size_t length) {
+ InsertZerosAtIndex(length, 0);
+}
+
+void SyncBuffer::InsertZerosAtIndex(size_t length, size_t position) {
+ position = std::min(position, Size());
+ length = std::min(length, Size() - position);
+ AudioMultiVector::PopBack(length);
+ for (size_t channel = 0; channel < Channels(); ++channel) {
+ channels_[channel]->InsertZerosAt(length, position);
+ }
+ if (next_index_ >= position) {
+ // We are moving the |next_index_| sample.
+ set_next_index(next_index_ + length); // Overflow handled by subfunction.
+ }
+ if (dtmf_index_ > 0 && dtmf_index_ >= position) {
+ // We are moving the |dtmf_index_| sample.
+ set_dtmf_index(dtmf_index_ + length); // Overflow handled by subfunction.
+ }
+}
+
+void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this,
+ size_t length,
+ size_t position) {
+ position = std::min(position, Size()); // Cap |position| in the valid range.
+ length = std::min(length, Size() - position);
+ AudioMultiVector::OverwriteAt(insert_this, length, position);
+}
+
+void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this,
+ size_t position) {
+ ReplaceAtIndex(insert_this, insert_this.Size(), position);
+}
+
+size_t SyncBuffer::GetNextAudioInterleaved(size_t requested_len,
+ int16_t* output) {
+ if (!output) {
+ assert(false);
+ return 0;
+ }
+ size_t samples_to_read = std::min(FutureLength(), requested_len);
+ ReadInterleavedFromIndex(next_index_, samples_to_read, output);
+ next_index_ += samples_to_read;
+ return samples_to_read;
+}
+
+void SyncBuffer::IncreaseEndTimestamp(uint32_t increment) {
+ end_timestamp_ += increment;
+}
+
+void SyncBuffer::Flush() {
+ Zeros(Size());
+ next_index_ = Size();
+ end_timestamp_ = 0;
+ dtmf_index_ = 0;
+}
+
+void SyncBuffer::set_next_index(size_t value) {
+ // Cannot set |next_index_| larger than the size of the buffer.
+ next_index_ = std::min(value, Size());
+}
+
+void SyncBuffer::set_dtmf_index(size_t value) {
+ // Cannot set |dtmf_index_| larger than the size of the buffer.
+ dtmf_index_ = std::min(value, Size());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h
new file mode 100644
index 00000000000..59bd4d87e26
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class SyncBuffer : public AudioMultiVector {
+ public:
+ SyncBuffer(size_t channels, size_t length)
+ : AudioMultiVector(channels, length),
+ next_index_(length),
+ end_timestamp_(0),
+ dtmf_index_(0) {}
+
+ virtual ~SyncBuffer() {}
+
+ // Returns the number of samples yet to play out form the buffer.
+ size_t FutureLength() const;
+
+ // Adds the contents of |append_this| to the back of the SyncBuffer. Removes
+ // the same number of samples from the beginning of the SyncBuffer, to
+ // maintain a constant buffer size. The |next_index_| is updated to reflect
+ // the move of the beginning of "future" data.
+ void PushBack(const AudioMultiVector& append_this);
+
+ // Adds |length| zeros to the beginning of each channel. Removes
+ // the same number of samples from the end of the SyncBuffer, to
+ // maintain a constant buffer size. The |next_index_| is updated to reflect
+ // the move of the beginning of "future" data.
+ // Note that this operation may delete future samples that are waiting to
+ // be played.
+ void PushFrontZeros(size_t length);
+
+ // Inserts |length| zeros into each channel at index |position|. The size of
+ // the SyncBuffer is kept constant, which means that the last |length|
+ // elements in each channel will be purged.
+ virtual void InsertZerosAtIndex(size_t length, size_t position);
+
+ // Overwrites each channel in this SyncBuffer with values taken from
+ // |insert_this|. The values are taken from the beginning of |insert_this| and
+ // are inserted starting at |position|. |length| values are written into each
+ // channel. The size of the SyncBuffer is kept constant. That is, if |length|
+ // and |position| are selected such that the new data would extend beyond the
+ // end of the current SyncBuffer, the buffer is not extended.
+ // The |next_index_| is not updated.
+ virtual void ReplaceAtIndex(const AudioMultiVector& insert_this,
+ size_t length,
+ size_t position);
+
+ // Same as the above method, but where all of |insert_this| is written (with
+ // the same constraints as above, that the SyncBuffer is not extended).
+ virtual void ReplaceAtIndex(const AudioMultiVector& insert_this,
+ size_t position);
+
+ // Reads |requested_len| samples from each channel and writes them interleaved
+ // into |output|. The |next_index_| is updated to point to the sample to read
+ // next time.
+ size_t GetNextAudioInterleaved(size_t requested_len, int16_t* output);
+
+ // Adds |increment| to |end_timestamp_|.
+ void IncreaseEndTimestamp(uint32_t increment);
+
+ // Flushes the buffer. The buffer will contain only zeros after the flush, and
+ // |next_index_| will point to the end, like when the buffer was first
+ // created.
+ void Flush();
+
+ const AudioVector& Channel(size_t n) const { return *channels_[n]; }
+ AudioVector& Channel(size_t n) { return *channels_[n]; }
+
+ // Accessors and mutators.
+ size_t next_index() const { return next_index_; }
+ void set_next_index(size_t value);
+ uint32_t end_timestamp() const { return end_timestamp_; }
+ void set_end_timestamp(uint32_t value) { end_timestamp_ = value; }
+ size_t dtmf_index() const { return dtmf_index_; }
+ void set_dtmf_index(size_t value);
+
+ private:
+ size_t next_index_;
+ uint32_t end_timestamp_; // The timestamp of the last sample in the buffer.
+ size_t dtmf_index_; // Index to the first non-DTMF sample in the buffer.
+
+ DISALLOW_COPY_AND_ASSIGN(SyncBuffer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc
new file mode 100644
index 00000000000..1a3d0fe781c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+TEST(SyncBuffer, CreateAndDestroy) {
+ // Create a SyncBuffer with two channels and 10 samples each.
+ static const size_t kLen = 10;
+ static const size_t kChannels = 2;
+ SyncBuffer sync_buffer(kChannels, kLen);
+ EXPECT_EQ(kChannels, sync_buffer.Channels());
+ EXPECT_EQ(kLen, sync_buffer.Size());
+ // When the buffer is empty, the next index to play out is at the end.
+ EXPECT_EQ(kLen, sync_buffer.next_index());
+ // Verify that all elements are zero.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kLen; ++i) {
+ EXPECT_EQ(0, sync_buffer[channel][i]);
+ }
+ }
+}
+
+TEST(SyncBuffer, SetNextIndex) {
+ // Create a SyncBuffer with two channels and 100 samples each.
+ static const size_t kLen = 100;
+ static const size_t kChannels = 2;
+ SyncBuffer sync_buffer(kChannels, kLen);
+ sync_buffer.set_next_index(0);
+ EXPECT_EQ(0u, sync_buffer.next_index());
+ sync_buffer.set_next_index(kLen / 2);
+ EXPECT_EQ(kLen / 2, sync_buffer.next_index());
+ sync_buffer.set_next_index(kLen);
+ EXPECT_EQ(kLen, sync_buffer.next_index());
+ // Try to set larger than the buffer size; should cap at buffer size.
+ sync_buffer.set_next_index(kLen + 1);
+ EXPECT_EQ(kLen, sync_buffer.next_index());
+}
+
+TEST(SyncBuffer, PushBackAndFlush) {
+ // Create a SyncBuffer with two channels and 100 samples each.
+ static const size_t kLen = 100;
+ static const size_t kChannels = 2;
+ SyncBuffer sync_buffer(kChannels, kLen);
+ static const size_t kNewLen = 10;
+ AudioMultiVector new_data(kChannels, kNewLen);
+ // Populate |new_data|.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kNewLen; ++i) {
+ new_data[channel][i] = i;
+ }
+ }
+ // Push back |new_data| into |sync_buffer|. This operation should pop out
+ // data from the front of |sync_buffer|, so that the size of the buffer
+ // remains the same. The |next_index_| should also move with the same length.
+ sync_buffer.PushBack(new_data);
+ ASSERT_EQ(kLen, sync_buffer.Size());
+ // Verify that |next_index_| moved accordingly.
+ EXPECT_EQ(kLen - kNewLen, sync_buffer.next_index());
+ // Verify the new contents.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kNewLen; ++i) {
+ EXPECT_EQ(new_data[channel][i],
+ sync_buffer[channel][sync_buffer.next_index() + i]);
+ }
+ }
+
+ // Now flush the buffer, and verify that it is all zeros, and that next_index
+ // points to the end.
+ sync_buffer.Flush();
+ ASSERT_EQ(kLen, sync_buffer.Size());
+ EXPECT_EQ(kLen, sync_buffer.next_index());
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kLen; ++i) {
+ EXPECT_EQ(0, sync_buffer[channel][i]);
+ }
+ }
+}
+
+TEST(SyncBuffer, PushFrontZeros) {
+ // Create a SyncBuffer with two channels and 100 samples each.
+ static const size_t kLen = 100;
+ static const size_t kChannels = 2;
+ SyncBuffer sync_buffer(kChannels, kLen);
+ static const size_t kNewLen = 10;
+ AudioMultiVector new_data(kChannels, kNewLen);
+ // Populate |new_data|.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kNewLen; ++i) {
+ new_data[channel][i] = 1000 + i;
+ }
+ }
+ sync_buffer.PushBack(new_data);
+ EXPECT_EQ(kLen, sync_buffer.Size());
+
+ // Push |kNewLen| - 1 zeros into each channel in the front of the SyncBuffer.
+ sync_buffer.PushFrontZeros(kNewLen - 1);
+ EXPECT_EQ(kLen, sync_buffer.Size()); // Size should remain the same.
+ // Verify that |next_index_| moved accordingly. Should be at the end - 1.
+ EXPECT_EQ(kLen - 1, sync_buffer.next_index());
+ // Verify the zeros.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kNewLen - 1; ++i) {
+ EXPECT_EQ(0, sync_buffer[channel][i]);
+ }
+ }
+ // Verify that the correct data is at the end of the SyncBuffer.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ EXPECT_EQ(1000, sync_buffer[channel][sync_buffer.next_index()]);
+ }
+}
+
+TEST(SyncBuffer, GetNextAudioInterleaved) {
+ // Create a SyncBuffer with two channels and 100 samples each.
+ static const size_t kLen = 100;
+ static const size_t kChannels = 2;
+ SyncBuffer sync_buffer(kChannels, kLen);
+ static const size_t kNewLen = 10;
+ AudioMultiVector new_data(kChannels, kNewLen);
+ // Populate |new_data|.
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ for (size_t i = 0; i < kNewLen; ++i) {
+ new_data[channel][i] = i;
+ }
+ }
+ // Push back |new_data| into |sync_buffer|. This operation should pop out
+ // data from the front of |sync_buffer|, so that the size of the buffer
+ // remains the same. The |next_index_| should also move with the same length.
+ sync_buffer.PushBack(new_data);
+
+ // Read to interleaved output. Read in two batches, where each read operation
+ // should automatically update the |net_index_| in the SyncBuffer.
+ int16_t output[kChannels * kNewLen];
+ // Note that |samples_read| is the number of samples read from each channel.
+ // That is, the number of samples written to |output| is
+ // |samples_read| * |kChannels|.
+ size_t samples_read = sync_buffer.GetNextAudioInterleaved(kNewLen / 2,
+ output);
+ samples_read +=
+ sync_buffer.GetNextAudioInterleaved(kNewLen / 2,
+ &output[samples_read * kChannels]);
+ EXPECT_EQ(kNewLen, samples_read);
+
+ // Verify the data.
+ int16_t* output_ptr = output;
+ for (size_t i = 0; i < kNewLen; ++i) {
+ for (size_t channel = 0; channel < kChannels; ++channel) {
+ EXPECT_EQ(new_data[channel][i], *output_ptr);
+ ++output_ptr;
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
new file mode 100644
index 00000000000..a9228d49889
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
@@ -0,0 +1,216 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
+
+#include <algorithm> // min, max
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+TimeStretch::ReturnCodes TimeStretch::Process(
+ const int16_t* input,
+ size_t input_len,
+ AudioMultiVector* output,
+ int16_t* length_change_samples) {
+
+ // Pre-calculate common multiplication with |fs_mult_|.
+ int fs_mult_120 = fs_mult_ * 120; // Corresponds to 15 ms.
+
+ const int16_t* signal;
+ scoped_ptr<int16_t[]> signal_array;
+ size_t signal_len;
+ if (num_channels_ == 1) {
+ signal = input;
+ signal_len = input_len;
+ } else {
+ // We want |signal| to be only the first channel of |input|, which is
+ // interleaved. Thus, we take the first sample, skip forward |num_channels|
+ // samples, and continue like that.
+ signal_len = input_len / num_channels_;
+ signal_array.reset(new int16_t[signal_len]);
+ signal = signal_array.get();
+ size_t j = master_channel_;
+ for (size_t i = 0; i < signal_len; ++i) {
+ signal_array[i] = input[j];
+ j += num_channels_;
+ }
+ }
+
+ // Find maximum absolute value of input signal.
+ max_input_value_ = WebRtcSpl_MaxAbsValueW16(signal,
+ static_cast<int>(signal_len));
+
+ // Downsample to 4 kHz sample rate and calculate auto-correlation.
+ DspHelper::DownsampleTo4kHz(signal, signal_len, kDownsampledLen,
+ sample_rate_hz_, true /* compensate delay*/,
+ downsampled_input_);
+ AutoCorrelation();
+
+ // Find the strongest correlation peak.
+ static const int kNumPeaks = 1;
+ int peak_index;
+ int16_t peak_value;
+ DspHelper::PeakDetection(auto_correlation_, kCorrelationLen, kNumPeaks,
+ fs_mult_, &peak_index, &peak_value);
+ // Assert that |peak_index| stays within boundaries.
+ assert(peak_index >= 0);
+ assert(peak_index <= (2 * kCorrelationLen - 1) * fs_mult_);
+
+ // Compensate peak_index for displaced starting position. The displacement
+ // happens in AutoCorrelation(). Here, |kMinLag| is in the down-sampled 4 kHz
+ // domain, while the |peak_index| is in the original sample rate; hence, the
+ // multiplication by fs_mult_ * 2.
+ peak_index += kMinLag * fs_mult_ * 2;
+ // Assert that |peak_index| stays within boundaries.
+ assert(peak_index >= 20 * fs_mult_);
+ assert(peak_index <= 20 * fs_mult_ + (2 * kCorrelationLen - 1) * fs_mult_);
+
+ // Calculate scaling to ensure that |peak_index| samples can be square-summed
+ // without overflowing.
+ int scaling = 31 - WebRtcSpl_NormW32(max_input_value_ * max_input_value_) -
+ WebRtcSpl_NormW32(peak_index);
+ scaling = std::max(0, scaling);
+
+ // |vec1| starts at 15 ms minus one pitch period.
+ const int16_t* vec1 = &signal[fs_mult_120 - peak_index];
+ // |vec2| start at 15 ms.
+ const int16_t* vec2 = &signal[fs_mult_120];
+ // Calculate energies for |vec1| and |vec2|, assuming they both contain
+ // |peak_index| samples.
+ int32_t vec1_energy =
+ WebRtcSpl_DotProductWithScale(vec1, vec1, peak_index, scaling);
+ int32_t vec2_energy =
+ WebRtcSpl_DotProductWithScale(vec2, vec2, peak_index, scaling);
+
+ // Calculate cross-correlation between |vec1| and |vec2|.
+ int32_t cross_corr =
+ WebRtcSpl_DotProductWithScale(vec1, vec2, peak_index, scaling);
+
+ // Check if the signal seems to be active speech or not (simple VAD).
+ bool active_speech = SpeechDetection(vec1_energy, vec2_energy, peak_index,
+ scaling);
+
+ int16_t best_correlation;
+ if (!active_speech) {
+ SetParametersForPassiveSpeech(signal_len, &best_correlation, &peak_index);
+ } else {
+ // Calculate correlation:
+ // cross_corr / sqrt(vec1_energy * vec2_energy).
+
+ // Start with calculating scale values.
+ int energy1_scale = std::max(0, 16 - WebRtcSpl_NormW32(vec1_energy));
+ int energy2_scale = std::max(0, 16 - WebRtcSpl_NormW32(vec2_energy));
+
+ // Make sure total scaling is even (to simplify scale factor after sqrt).
+ if ((energy1_scale + energy2_scale) & 1) {
+ // The sum is odd.
+ energy1_scale += 1;
+ }
+
+ // Scale energies to int16_t.
+ int16_t vec1_energy_int16 =
+ static_cast<int16_t>(vec1_energy >> energy1_scale);
+ int16_t vec2_energy_int16 =
+ static_cast<int16_t>(vec2_energy >> energy2_scale);
+
+ // Calculate square-root of energy product.
+ int16_t sqrt_energy_prod = WebRtcSpl_SqrtFloor(vec1_energy_int16 *
+ vec2_energy_int16);
+
+ // Calculate cross_corr / sqrt(en1*en2) in Q14.
+ int temp_scale = 14 - (energy1_scale + energy2_scale) / 2;
+ cross_corr = WEBRTC_SPL_SHIFT_W32(cross_corr, temp_scale);
+ cross_corr = std::max(0, cross_corr); // Don't use if negative.
+ best_correlation = WebRtcSpl_DivW32W16(cross_corr, sqrt_energy_prod);
+ // Make sure |best_correlation| is no larger than 1 in Q14.
+ best_correlation = std::min(static_cast<int16_t>(16384), best_correlation);
+ }
+
+
+ // Check accelerate criteria and stretch the signal.
+ ReturnCodes return_value = CheckCriteriaAndStretch(
+ input, input_len, peak_index, best_correlation, active_speech, output);
+ switch (return_value) {
+ case kSuccess:
+ *length_change_samples = peak_index;
+ break;
+ case kSuccessLowEnergy:
+ *length_change_samples = peak_index;
+ break;
+ case kNoStretch:
+ case kError:
+ *length_change_samples = 0;
+ break;
+ }
+ return return_value;
+}
+
+void TimeStretch::AutoCorrelation() {
+ // Set scaling factor for cross correlation to protect against overflow.
+ int scaling = kLogCorrelationLen - WebRtcSpl_NormW32(
+ max_input_value_ * max_input_value_);
+ scaling = std::max(0, scaling);
+
+ // Calculate correlation from lag kMinLag to lag kMaxLag in 4 kHz domain.
+ int32_t auto_corr[kCorrelationLen];
+ WebRtcSpl_CrossCorrelation(auto_corr, &downsampled_input_[kMaxLag],
+ &downsampled_input_[kMaxLag - kMinLag],
+ kCorrelationLen, kMaxLag - kMinLag, scaling, -1);
+
+ // Normalize correlation to 14 bits and write to |auto_correlation_|.
+ int32_t max_corr = WebRtcSpl_MaxAbsValueW32(auto_corr, kCorrelationLen);
+ scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
+ WebRtcSpl_VectorBitShiftW32ToW16(auto_correlation_, kCorrelationLen,
+ auto_corr, scaling);
+}
+
+bool TimeStretch::SpeechDetection(int32_t vec1_energy, int32_t vec2_energy,
+ int peak_index, int scaling) const {
+ // Check if the signal seems to be active speech or not (simple VAD).
+ // If (vec1_energy + vec2_energy) / (2 * peak_index) <=
+ // 8 * background_noise_energy, then we say that the signal contains no
+ // active speech.
+ // Rewrite the inequality as:
+ // (vec1_energy + vec2_energy) / 16 <= peak_index * background_noise_energy.
+ // The two sides of the inequality will be denoted |left_side| and
+ // |right_side|.
+ int32_t left_side = (vec1_energy + vec2_energy) / 16;
+ int32_t right_side;
+ if (background_noise_.initialized()) {
+ right_side = background_noise_.Energy(master_channel_);
+ } else {
+ // If noise parameters have not been estimated, use a fixed threshold.
+ right_side = 75000;
+ }
+ int right_scale = 16 - WebRtcSpl_NormW32(right_side);
+ right_scale = std::max(0, right_scale);
+ left_side = left_side >> right_scale;
+ right_side = peak_index * (right_side >> right_scale);
+
+ // Scale |left_side| properly before comparing with |right_side|.
+ // (|scaling| is the scale factor before energy calculation, thus the scale
+ // factor for the energy is 2 * scaling.)
+ if (WebRtcSpl_NormW32(left_side) < 2 * scaling) {
+ // Cannot scale only |left_side|, must scale |right_side| too.
+ int temp_scale = WebRtcSpl_NormW32(left_side);
+ left_side = left_side << temp_scale;
+ right_side = right_side >> (2 * scaling - temp_scale);
+ } else {
+ left_side = left_side << 2 * scaling;
+ }
+ return left_side > right_side;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h
new file mode 100644
index 00000000000..9396d8ff519
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
+
+#include <assert.h>
+#include <string.h> // memset, size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class BackgroundNoise;
+
+// This is the base class for Accelerate and PreemptiveExpand. This class
+// cannot be instantiated, but must be used through either of the derived
+// classes.
+class TimeStretch {
+ public:
+ enum ReturnCodes {
+ kSuccess = 0,
+ kSuccessLowEnergy = 1,
+ kNoStretch = 2,
+ kError = -1
+ };
+
+ TimeStretch(int sample_rate_hz, size_t num_channels,
+ const BackgroundNoise& background_noise)
+ : sample_rate_hz_(sample_rate_hz),
+ fs_mult_(sample_rate_hz / 8000),
+ num_channels_(static_cast<int>(num_channels)),
+ master_channel_(0), // First channel is master.
+ background_noise_(background_noise),
+ max_input_value_(0) {
+ assert(sample_rate_hz_ == 8000 ||
+ sample_rate_hz_ == 16000 ||
+ sample_rate_hz_ == 32000 ||
+ sample_rate_hz_ == 48000);
+ assert(num_channels_ > 0);
+ assert(static_cast<int>(master_channel_) < num_channels_);
+ memset(auto_correlation_, 0, sizeof(auto_correlation_));
+ }
+
+ virtual ~TimeStretch() {}
+
+ // This method performs the processing common to both Accelerate and
+ // PreemptiveExpand.
+ ReturnCodes Process(const int16_t* input,
+ size_t input_len,
+ AudioMultiVector* output,
+ int16_t* length_change_samples);
+
+ protected:
+ // Sets the parameters |best_correlation| and |peak_index| to suitable
+ // values when the signal contains no active speech. This method must be
+ // implemented by the sub-classes.
+ virtual void SetParametersForPassiveSpeech(size_t input_length,
+ int16_t* best_correlation,
+ int* peak_index) const = 0;
+
+ // Checks the criteria for performing the time-stretching operation and,
+ // if possible, performs the time-stretching. This method must be implemented
+ // by the sub-classes.
+ virtual ReturnCodes CheckCriteriaAndStretch(
+ const int16_t* input, size_t input_length, size_t peak_index,
+ int16_t best_correlation, bool active_speech,
+ AudioMultiVector* output) const = 0;
+
+ static const int kCorrelationLen = 50;
+ static const int kLogCorrelationLen = 6; // >= log2(kCorrelationLen).
+ static const int kMinLag = 10;
+ static const int kMaxLag = 60;
+ static const int kDownsampledLen = kCorrelationLen + kMaxLag;
+ static const int kCorrelationThreshold = 14746; // 0.9 in Q14.
+
+ const int sample_rate_hz_;
+ const int fs_mult_; // Sample rate multiplier = sample_rate_hz_ / 8000.
+ const int num_channels_;
+ const size_t master_channel_;
+ const BackgroundNoise& background_noise_;
+ int16_t max_input_value_;
+ int16_t downsampled_input_[kDownsampledLen];
+ // Adding 1 to the size of |auto_correlation_| because of how it is used
+ // by the peak-detection algorithm.
+ int16_t auto_correlation_[kCorrelationLen + 1];
+
+ private:
+ // Calculates the auto-correlation of |downsampled_input_| and writes the
+ // result to |auto_correlation_|.
+ void AutoCorrelation();
+
+ // Performs a simple voice-activity detection based on the input parameters.
+ bool SpeechDetection(int32_t vec1_energy, int32_t vec2_energy,
+ int peak_index, int scaling) const;
+
+ DISALLOW_COPY_AND_ASSIGN(TimeStretch);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc
new file mode 100644
index 00000000000..64789b4d436
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for Accelerate and PreemptiveExpand classes.
+
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+
+namespace webrtc {
+
+TEST(TimeStretch, CreateAndDestroy) {
+ const int kSampleRate = 8000;
+ const size_t kNumChannels = 1;
+ const int kOverlapSamples = 5 * kSampleRate / 8000;
+ BackgroundNoise bgn(kNumChannels);
+ Accelerate accelerate(kSampleRate, kNumChannels, bgn);
+ PreemptiveExpand preemptive_expand(
+ kSampleRate, kNumChannels, bgn, kOverlapSamples);
+}
+
+TEST(TimeStretch, CreateUsingFactory) {
+ const int kSampleRate = 8000;
+ const size_t kNumChannels = 1;
+ const int kOverlapSamples = 5 * kSampleRate / 8000;
+ BackgroundNoise bgn(kNumChannels);
+
+ AccelerateFactory accelerate_factory;
+ Accelerate* accelerate =
+ accelerate_factory.Create(kSampleRate, kNumChannels, bgn);
+ EXPECT_TRUE(accelerate != NULL);
+ delete accelerate;
+
+ PreemptiveExpandFactory preemptive_expand_factory;
+ PreemptiveExpand* preemptive_expand = preemptive_expand_factory.Create(
+ kSampleRate, kNumChannels, bgn, kOverlapSamples);
+ EXPECT_TRUE(preemptive_expand != NULL);
+ delete preemptive_expand;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
new file mode 100644
index 00000000000..01890136a94
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+void TimestampScaler::ToInternal(Packet* packet) {
+ if (!packet) {
+ return;
+ }
+ packet->header.timestamp = ToInternal(packet->header.timestamp,
+ packet->header.payloadType);
+}
+
+void TimestampScaler::ToInternal(PacketList* packet_list) {
+ PacketList::iterator it;
+ for (it = packet_list->begin(); it != packet_list->end(); ++it) {
+ ToInternal(*it);
+ }
+}
+
+uint32_t TimestampScaler::ToInternal(uint32_t external_timestamp,
+ uint8_t rtp_payload_type) {
+ const DecoderDatabase::DecoderInfo* info =
+ decoder_database_.GetDecoderInfo(rtp_payload_type);
+ if (!info) {
+ // Payload type is unknown. Do not scale.
+ return external_timestamp;
+ }
+ switch (info->codec_type) {
+ case kDecoderG722:
+ case kDecoderG722_2ch: {
+ // Use timestamp scaling with factor 2 (two output samples per RTP
+ // timestamp).
+ numerator_ = 2;
+ denominator_ = 1;
+ break;
+ }
+ case kDecoderOpus:
+ case kDecoderOpus_2ch:
+ case kDecoderISACfb:
+ case kDecoderCNGswb48kHz: {
+ // Use timestamp scaling with factor 2/3 (32 kHz sample rate, but RTP
+ // timestamps run on 48 kHz).
+ // TODO(tlegrand): Remove scaling for kDecoderCNGswb48kHz once ACM has
+ // full 48 kHz support.
+ numerator_ = 2;
+ denominator_ = 3;
+ }
+ case kDecoderAVT:
+ case kDecoderCNGnb:
+ case kDecoderCNGwb:
+ case kDecoderCNGswb32kHz: {
+ // Do not change the timestamp scaling settings for DTMF or CNG.
+ break;
+ }
+ default: {
+ // Do not use timestamp scaling for any other codec.
+ numerator_ = 1;
+ denominator_ = 1;
+ break;
+ }
+ }
+
+ if (!(numerator_ == 1 && denominator_ == 1)) {
+ // We have a scale factor != 1.
+ if (!first_packet_received_) {
+ external_ref_ = external_timestamp;
+ internal_ref_ = external_timestamp;
+ first_packet_received_ = true;
+ }
+ int32_t external_diff = external_timestamp - external_ref_;
+ assert(denominator_ > 0); // Should not be possible.
+ external_ref_ = external_timestamp;
+ internal_ref_ += (external_diff * numerator_) / denominator_;
+ LOG(LS_VERBOSE) << "Converting timestamp: " << external_timestamp <<
+ " -> " << internal_ref_;
+ return internal_ref_;
+ } else {
+ // No scaling.
+ return external_timestamp;
+ }
+}
+
+
+uint32_t TimestampScaler::ToExternal(uint32_t internal_timestamp) const {
+ if (!first_packet_received_ || (numerator_ == 1 && denominator_ == 1)) {
+ // Not initialized, or scale factor is 1.
+ return internal_timestamp;
+ } else {
+ int32_t internal_diff = internal_timestamp - internal_ref_;
+ assert(numerator_ > 0); // Should not be possible.
+ // Do not update references in this method.
+ // Switch |denominator_| and |numerator_| to convert the other way.
+ return external_ref_ + (internal_diff * denominator_) / numerator_;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h
new file mode 100644
index 00000000000..59b8cc7d1dc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class DecoderDatabase;
+
+// This class scales timestamps for codecs that need timestamp scaling.
+// This is done for codecs where one RTP timestamp does not correspond to
+// one sample.
+class TimestampScaler {
+ public:
+ explicit TimestampScaler(const DecoderDatabase& decoder_database)
+ : first_packet_received_(false),
+ numerator_(1),
+ denominator_(1),
+ external_ref_(0),
+ internal_ref_(0),
+ decoder_database_(decoder_database) {}
+
+ virtual ~TimestampScaler() {}
+
+ // Start over.
+ virtual void Reset() { first_packet_received_ = false; }
+
+ // Scale the timestamp in |packet| from external to internal.
+ virtual void ToInternal(Packet* packet);
+
+ // Scale the timestamp for all packets in |packet_list| from external to
+ // internal.
+ virtual void ToInternal(PacketList* packet_list);
+
+ // Returns the internal equivalent of |external_timestamp|, given the
+ // RTP payload type |rtp_payload_type|.
+ virtual uint32_t ToInternal(uint32_t external_timestamp,
+ uint8_t rtp_payload_type);
+
+ // Scales back to external timestamp. This is the inverse of ToInternal().
+ virtual uint32_t ToExternal(uint32_t internal_timestamp) const;
+
+ private:
+ bool first_packet_received_;
+ int numerator_;
+ int denominator_;
+ uint32_t external_ref_;
+ uint32_t internal_ref_;
+ const DecoderDatabase& decoder_database_;
+
+ DISALLOW_COPY_AND_ASSIGN(TimestampScaler);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
new file mode 100644
index 00000000000..8cbbfa393ac
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
@@ -0,0 +1,327 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+
+using ::testing::Return;
+using ::testing::ReturnNull;
+using ::testing::_;
+
+namespace webrtc {
+
+TEST(TimestampScaler, TestNoScaling) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderPCMu; // Does not use scaled timestamps.
+ static const uint8_t kRtpPayloadType = 0;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ for (uint32_t timestamp = 0xFFFFFFFF - 5; timestamp != 5; ++timestamp) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(timestamp, scaler.ToExternal(timestamp));
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestNoScalingLargeStep) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderPCMu; // Does not use scaled timestamps.
+ static const uint8_t kRtpPayloadType = 0;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ static const uint32_t kStep = 160;
+ uint32_t start_timestamp = 0;
+ // |external_timestamp| will be a large positive value.
+ start_timestamp = start_timestamp - 5 * kStep;
+ for (uint32_t timestamp = start_timestamp; timestamp != 5 * kStep;
+ timestamp += kStep) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(timestamp, scaler.ToExternal(timestamp));
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestG722) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ uint32_t external_timestamp = 0xFFFFFFFF - 5;
+ uint32_t internal_timestamp = external_timestamp;
+ for (; external_timestamp != 5; ++external_timestamp) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ internal_timestamp += 2;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestG722LargeStep) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ static const uint32_t kStep = 320;
+ uint32_t external_timestamp = 0;
+ // |external_timestamp| will be a large positive value.
+ external_timestamp = external_timestamp - 5 * kStep;
+ uint32_t internal_timestamp = external_timestamp;
+ for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ // Internal timestamp should be incremented with twice the step.
+ internal_timestamp += 2 * kStep;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestG722WithCng) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info_g722, info_cng;
+ info_g722.codec_type = kDecoderG722; // Uses a factor 2 scaling.
+ info_cng.codec_type = kDecoderCNGwb;
+ static const uint8_t kRtpPayloadTypeG722 = 17;
+ static const uint8_t kRtpPayloadTypeCng = 13;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeG722))
+ .WillRepeatedly(Return(&info_g722));
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeCng))
+ .WillRepeatedly(Return(&info_cng));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ uint32_t external_timestamp = 0xFFFFFFFF - 5;
+ uint32_t internal_timestamp = external_timestamp;
+ bool next_is_cng = false;
+ for (; external_timestamp != 5; ++external_timestamp) {
+ // Alternate between G.722 and CNG every other packet.
+ if (next_is_cng) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadTypeCng));
+ next_is_cng = false;
+ } else {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadTypeG722));
+ next_is_cng = true;
+ }
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ internal_timestamp += 2;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+// Make sure that the method ToInternal(Packet* packet) is wired up correctly.
+// Since it is simply calling the other ToInternal method, we are not doing
+// as many tests here.
+TEST(TimestampScaler, TestG722Packet) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderG722; // Does uses a factor 2 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ uint32_t external_timestamp = 0xFFFFFFFF - 5;
+ uint32_t internal_timestamp = external_timestamp;
+ Packet packet;
+ packet.header.payloadType = kRtpPayloadType;
+ for (; external_timestamp != 5; ++external_timestamp) {
+ packet.header.timestamp = external_timestamp;
+ // Scale to internal timestamp.
+ scaler.ToInternal(&packet);
+ EXPECT_EQ(internal_timestamp, packet.header.timestamp);
+ internal_timestamp += 2;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+// Make sure that the method ToInternal(PacketList* packet_list) is wired up
+// correctly. Since it is simply calling the ToInternal(Packet* packet) method,
+// we are not doing as many tests here.
+TEST(TimestampScaler, TestG722PacketList) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ uint32_t external_timestamp = 0xFFFFFFFF - 5;
+ uint32_t internal_timestamp = external_timestamp;
+ Packet packet1;
+ packet1.header.payloadType = kRtpPayloadType;
+ packet1.header.timestamp = external_timestamp;
+ Packet packet2;
+ packet2.header.payloadType = kRtpPayloadType;
+ packet2.header.timestamp = external_timestamp + 10;
+ PacketList packet_list;
+ packet_list.push_back(&packet1);
+ packet_list.push_back(&packet2);
+
+ scaler.ToInternal(&packet_list);
+ EXPECT_EQ(internal_timestamp, packet1.header.timestamp);
+ EXPECT_EQ(internal_timestamp + 20, packet2.header.timestamp);
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestG722Reset) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ uint32_t external_timestamp = 0xFFFFFFFF - 5;
+ uint32_t internal_timestamp = external_timestamp;
+ for (; external_timestamp != 5; ++external_timestamp) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ internal_timestamp += 2;
+ }
+ // Reset the scaler. After this, we expect the internal and external to start
+ // over at the same value again.
+ scaler.Reset();
+ internal_timestamp = external_timestamp;
+ for (; external_timestamp != 15; ++external_timestamp) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ internal_timestamp += 2;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestOpusLargeStep) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderOpus; // Uses a factor 2/3 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ static const uint32_t kStep = 960;
+ uint32_t external_timestamp = 0;
+ // |external_timestamp| will be a large positive value.
+ external_timestamp = external_timestamp - 5 * kStep;
+ uint32_t internal_timestamp = external_timestamp;
+ for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ // Internal timestamp should be incremented with twice the step.
+ internal_timestamp += 2 * kStep / 3;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, TestIsacFbLargeStep) {
+ MockDecoderDatabase db;
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderISACfb; // Uses a factor 2/3 scaling.
+ static const uint8_t kRtpPayloadType = 17;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ TimestampScaler scaler(db);
+ // Test both sides of the timestamp wrap-around.
+ static const uint32_t kStep = 960;
+ uint32_t external_timestamp = 0;
+ // |external_timestamp| will be a large positive value.
+ external_timestamp = external_timestamp - 5 * kStep;
+ uint32_t internal_timestamp = external_timestamp;
+ for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
+ // Scale to internal timestamp.
+ EXPECT_EQ(internal_timestamp,
+ scaler.ToInternal(external_timestamp, kRtpPayloadType));
+ // Scale back.
+ EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
+ // Internal timestamp should be incremented with twice the step.
+ internal_timestamp += 2 * kStep / 3;
+ }
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+TEST(TimestampScaler, Failures) {
+ static const uint8_t kRtpPayloadType = 17;
+ MockDecoderDatabase db;
+ EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
+ .WillOnce(ReturnNull()); // Return NULL to indicate unknown payload type.
+
+ TimestampScaler scaler(db);
+ uint32_t timestamp = 4711; // Some number.
+ EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
+
+ Packet* packet = NULL;
+ scaler.ToInternal(packet); // Should not crash. That's all we can test.
+
+ EXPECT_CALL(db, Die()); // Called when database object is deleted.
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h
new file mode 100644
index 00000000000..ac5682651b5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class AudioChecksum : public AudioSink {
+ public:
+ AudioChecksum() : finished_(false) {}
+
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) OVERRIDE {
+ if (finished_)
+ return false;
+
+#ifndef WEBRTC_ARCH_LITTLE_ENDIAN
+#error "Big-endian gives a different checksum"
+#endif
+ checksum_.Update(audio, num_samples * sizeof(*audio));
+ return true;
+ }
+
+ // Finalizes the computations, and returns the checksum.
+ std::string Finish() {
+ if (!finished_) {
+ finished_ = true;
+ checksum_.Finish(checksum_result_, rtc::Md5Digest::kSize);
+ }
+ return rtc::hex_encode(checksum_result_, rtc::Md5Digest::kSize);
+ }
+
+ private:
+ rtc::Md5Digest checksum_;
+ char checksum_result_[rtc::Md5Digest::kSize];
+ bool finished_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioChecksum);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
new file mode 100644
index 00000000000..2d2a7e3dd4a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+namespace webrtc {
+namespace test {
+
+bool AudioLoop::Init(const std::string file_name,
+ size_t max_loop_length_samples,
+ size_t block_length_samples) {
+ FILE* fp = fopen(file_name.c_str(), "rb");
+ if (!fp) return false;
+
+ audio_array_.reset(new int16_t[max_loop_length_samples +
+ block_length_samples]);
+ size_t samples_read = fread(audio_array_.get(), sizeof(int16_t),
+ max_loop_length_samples, fp);
+ fclose(fp);
+
+ // Block length must be shorter than the loop length.
+ if (block_length_samples > samples_read) return false;
+
+ // Add an extra block length of samples to the end of the array, starting
+ // over again from the beginning of the array. This is done to simplify
+ // the reading process when reading over the end of the loop.
+ memcpy(&audio_array_[samples_read], audio_array_.get(),
+ block_length_samples * sizeof(int16_t));
+
+ loop_length_samples_ = samples_read;
+ block_length_samples_ = block_length_samples;
+ return true;
+}
+
+const int16_t* AudioLoop::GetNextBlock() {
+ // Check that the AudioLoop is initialized.
+ if (block_length_samples_ == 0) return NULL;
+
+ const int16_t* output_ptr = &audio_array_[next_index_];
+ next_index_ = (next_index_ + block_length_samples_) % loop_length_samples_;
+ return output_ptr;
+}
+
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
new file mode 100644
index 00000000000..9647d827ac9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Class serving as an infinite source of audio, realized by looping an audio
+// clip.
+class AudioLoop {
+ public:
+ AudioLoop()
+ : next_index_(0),
+ loop_length_samples_(0),
+ block_length_samples_(0) {
+ }
+
+ virtual ~AudioLoop() {}
+
+ // Initializes the AudioLoop by reading from |file_name|. The loop will be no
+ // longer than |max_loop_length_samples|, if the length of the file is
+ // greater. Otherwise, the loop length is the same as the file length.
+ // The audio will be delivered in blocks of |block_length_samples|.
+ // Returns false if the initialization failed, otherwise true.
+ bool Init(const std::string file_name, size_t max_loop_length_samples,
+ size_t block_length_samples);
+
+ // Returns a pointer to the next block of audio. The number given as
+ // |block_length_samples| to the Init() function determines how many samples
+ // that can be safely read from the pointer.
+ const int16_t* GetNextBlock();
+
+ private:
+ size_t next_index_;
+ size_t loop_length_samples_;
+ size_t block_length_samples_;
+ scoped_ptr<int16_t[]> audio_array_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioLoop);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
new file mode 100644
index 00000000000..5743c3641de
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Interface class for an object receiving raw output audio from test
+// applications.
+class AudioSink {
+ public:
+ AudioSink() {}
+ virtual ~AudioSink() {}
+
+ // Writes |num_samples| from |audio| to the AudioSink. Returns true if
+ // successful, otherwise false.
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) = 0;
+
+ // Writes |audio_frame| to the AudioSink. Returns true if successful,
+ // otherwise false.
+ bool WriteAudioFrame(const AudioFrame& audio_frame) {
+ return WriteArray(
+ audio_frame.data_,
+ audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioSink);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc
new file mode 100644
index 00000000000..806317320f8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+
+namespace webrtc {
+namespace test {
+
+InputAudioFile::InputAudioFile(const std::string file_name) {
+ fp_ = fopen(file_name.c_str(), "rb");
+}
+
+InputAudioFile::~InputAudioFile() { fclose(fp_); }
+
+bool InputAudioFile::Read(size_t samples, int16_t* destination) {
+ if (!fp_) {
+ return false;
+ }
+ size_t samples_read = fread(destination, sizeof(int16_t), samples, fp_);
+ if (samples_read < samples) {
+ // Rewind and read the missing samples.
+ rewind(fp_);
+ size_t missing_samples = samples - samples_read;
+ if (fread(destination, sizeof(int16_t), missing_samples, fp_) <
+ missing_samples) {
+ // Could not read enough even after rewinding the file.
+ return false;
+ }
+ }
+ return true;
+}
+
+void InputAudioFile::DuplicateInterleaved(const int16_t* source, size_t samples,
+ size_t channels,
+ int16_t* destination) {
+ for (size_t i = 0; i < samples; ++i) {
+ for (size_t j = 0; j < channels; ++j) {
+ destination[i * channels + j] = source[i];
+ }
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h
new file mode 100644
index 00000000000..274f8ea07e5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
+
+#include <stdio.h>
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Class for handling a looping input audio file.
+class InputAudioFile {
+ public:
+ explicit InputAudioFile(const std::string file_name);
+
+ virtual ~InputAudioFile();
+
+ // Reads |samples| elements from source file to |destination|. Returns true
+ // if the read was successful, otherwise false. If the file end is reached,
+ // the file is rewound and reading continues from the beginning.
+ // The output |destination| must have the capacity to hold |samples| elements.
+ bool Read(size_t samples, int16_t* destination);
+
+ // Creates a multi-channel signal from a mono signal. Each sample is repeated
+ // |channels| times to create an interleaved multi-channel signal where all
+ // channels are identical. The output |destination| must have the capacity to
+ // hold samples * channels elements.
+ static void DuplicateInterleaved(const int16_t* source, size_t samples,
+ size_t channels, int16_t* destination);
+
+ private:
+ FILE* fp_;
+ DISALLOW_COPY_AND_ASSIGN(InputAudioFile);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
new file mode 100644
index 00000000000..433546fbcbd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h"
+
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/typedefs.h"
+
+using webrtc::NetEq;
+using webrtc::test::AudioLoop;
+using webrtc::test::RtpGenerator;
+using webrtc::WebRtcRTPHeader;
+
+namespace webrtc {
+namespace test {
+
+int64_t NetEqPerformanceTest::Run(int runtime_ms,
+ int lossrate,
+ double drift_factor) {
+ const std::string kInputFileName =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ const int kSampRateHz = 32000;
+ const webrtc::NetEqDecoder kDecoderType = webrtc::kDecoderPCM16Bswb32kHz;
+ const int kPayloadType = 95;
+
+ // Initialize NetEq instance.
+ NetEq::Config config;
+ config.sample_rate_hz = kSampRateHz;
+ NetEq* neteq = NetEq::Create(config);
+ // Register decoder in |neteq|.
+ if (neteq->RegisterPayloadType(kDecoderType, kPayloadType) != 0)
+ return -1;
+
+ // Set up AudioLoop object.
+ AudioLoop audio_loop;
+ const size_t kMaxLoopLengthSamples = kSampRateHz * 10; // 10 second loop.
+ const size_t kInputBlockSizeSamples = 60 * kSampRateHz / 1000; // 60 ms.
+ if (!audio_loop.Init(kInputFileName, kMaxLoopLengthSamples,
+ kInputBlockSizeSamples))
+ return -1;
+
+ int32_t time_now_ms = 0;
+
+ // Get first input packet.
+ WebRtcRTPHeader rtp_header;
+ RtpGenerator rtp_gen(kSampRateHz / 1000);
+ // Start with positive drift first half of simulation.
+ rtp_gen.set_drift_factor(drift_factor);
+ bool drift_flipped = false;
+ int32_t packet_input_time_ms =
+ rtp_gen.GetRtpHeader(kPayloadType, kInputBlockSizeSamples, &rtp_header);
+ const int16_t* input_samples = audio_loop.GetNextBlock();
+ if (!input_samples) exit(1);
+ uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)];
+ int payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
+ kInputBlockSizeSamples,
+ input_payload);
+ assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
+
+ // Main loop.
+ webrtc::Clock* clock = webrtc::Clock::GetRealTimeClock();
+ int64_t start_time_ms = clock->TimeInMilliseconds();
+ while (time_now_ms < runtime_ms) {
+ while (packet_input_time_ms <= time_now_ms) {
+ // Drop every N packets, where N = FLAGS_lossrate.
+ bool lost = false;
+ if (lossrate > 0) {
+ lost = ((rtp_header.header.sequenceNumber - 1) % lossrate) == 0;
+ }
+ if (!lost) {
+ // Insert packet.
+ int error = neteq->InsertPacket(
+ rtp_header, input_payload, payload_len,
+ packet_input_time_ms * kSampRateHz / 1000);
+ if (error != NetEq::kOK)
+ return -1;
+ }
+
+ // Get next packet.
+ packet_input_time_ms = rtp_gen.GetRtpHeader(kPayloadType,
+ kInputBlockSizeSamples,
+ &rtp_header);
+ input_samples = audio_loop.GetNextBlock();
+ if (!input_samples) return -1;
+ payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
+ kInputBlockSizeSamples,
+ input_payload);
+ assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
+ }
+
+ // Get output audio, but don't do anything with it.
+ static const int kMaxChannels = 1;
+ static const int kMaxSamplesPerMs = 48000 / 1000;
+ static const int kOutputBlockSizeMs = 10;
+ static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs *
+ kMaxChannels;
+ int16_t out_data[kOutDataLen];
+ int num_channels;
+ int samples_per_channel;
+ int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
+ &num_channels, NULL);
+ if (error != NetEq::kOK)
+ return -1;
+
+ assert(samples_per_channel == kSampRateHz * 10 / 1000);
+
+ time_now_ms += kOutputBlockSizeMs;
+ if (time_now_ms >= runtime_ms / 2 && !drift_flipped) {
+ // Apply negative drift second half of simulation.
+ rtp_gen.set_drift_factor(-drift_factor);
+ drift_flipped = true;
+ }
+ }
+ int64_t end_time_ms = clock->TimeInMilliseconds();
+ delete neteq;
+ return end_time_ms - start_time_ms;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h
new file mode 100644
index 00000000000..d094db0f9b3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class NetEqPerformanceTest {
+ public:
+ // Runs a performance test with parameters as follows:
+ // |runtime_ms|: the simulation time, i.e., the duration of the audio data.
+ // |lossrate|: drop one out of |lossrate| packets, e.g., one out of 10.
+ // |drift_factor|: clock drift in [0, 1].
+ // Returns the runtime in ms.
+ static int64_t Run(int runtime_ms, int lossrate, double drift_factor);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
new file mode 100644
index 00000000000..fc5d8abaf82
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include "webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h"
+
+namespace webrtc {
+namespace test {
+
+const uint8_t kPayloadType = 95;
+const int kOutputSizeMs = 10;
+
+NetEqQualityTest::NetEqQualityTest(int block_duration_ms,
+ int in_sampling_khz,
+ int out_sampling_khz,
+ enum NetEqDecoder decoder_type,
+ int channels,
+ double drift_factor,
+ std::string in_filename,
+ std::string out_filename)
+ : decoded_time_ms_(0),
+ decodable_time_ms_(0),
+ drift_factor_(drift_factor),
+ block_duration_ms_(block_duration_ms),
+ in_sampling_khz_(in_sampling_khz),
+ out_sampling_khz_(out_sampling_khz),
+ decoder_type_(decoder_type),
+ channels_(channels),
+ in_filename_(in_filename),
+ out_filename_(out_filename),
+ in_size_samples_(in_sampling_khz_ * block_duration_ms_),
+ out_size_samples_(out_sampling_khz_ * kOutputSizeMs),
+ payload_size_bytes_(0),
+ max_payload_bytes_(0),
+ in_file_(new InputAudioFile(in_filename_)),
+ out_file_(NULL),
+ rtp_generator_(new RtpGenerator(in_sampling_khz_, 0, 0,
+ decodable_time_ms_)) {
+ NetEq::Config config;
+ config.sample_rate_hz = out_sampling_khz_ * 1000;
+ neteq_.reset(NetEq::Create(config));
+ max_payload_bytes_ = in_size_samples_ * channels_ * sizeof(int16_t);
+ in_data_.reset(new int16_t[in_size_samples_ * channels_]);
+ payload_.reset(new uint8_t[max_payload_bytes_]);
+ out_data_.reset(new int16_t[out_size_samples_ * channels_]);
+}
+
+void NetEqQualityTest::SetUp() {
+ out_file_ = fopen(out_filename_.c_str(), "wb");
+ ASSERT_TRUE(out_file_ != NULL);
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(decoder_type_, kPayloadType));
+ rtp_generator_->set_drift_factor(drift_factor_);
+}
+
+void NetEqQualityTest::TearDown() {
+ fclose(out_file_);
+}
+
+int NetEqQualityTest::Transmit() {
+ int packet_input_time_ms =
+ rtp_generator_->GetRtpHeader(kPayloadType, in_size_samples_,
+ &rtp_header_);
+ if (!PacketLost(packet_input_time_ms) && payload_size_bytes_ > 0) {
+ int ret = neteq_->InsertPacket(rtp_header_, &payload_[0],
+ payload_size_bytes_,
+ packet_input_time_ms * in_sampling_khz_);
+ if (ret != NetEq::kOK)
+ return -1;
+ }
+ return packet_input_time_ms;
+}
+
+int NetEqQualityTest::DecodeBlock() {
+ int channels;
+ int samples;
+ int ret = neteq_->GetAudio(out_size_samples_ * channels_, &out_data_[0],
+ &samples, &channels, NULL);
+
+ if (ret != NetEq::kOK) {
+ return -1;
+ } else {
+ assert(channels == channels_);
+ assert(samples == kOutputSizeMs * out_sampling_khz_);
+ fwrite(&out_data_[0], sizeof(int16_t), samples * channels, out_file_);
+ return samples;
+ }
+}
+
+void NetEqQualityTest::Simulate(int end_time_ms) {
+ int audio_size_samples;
+
+ while (decoded_time_ms_ < end_time_ms) {
+ while (decodable_time_ms_ - kOutputSizeMs < decoded_time_ms_) {
+ ASSERT_TRUE(in_file_->Read(in_size_samples_ * channels_, &in_data_[0]));
+ payload_size_bytes_ = EncodeBlock(&in_data_[0],
+ in_size_samples_, &payload_[0],
+ max_payload_bytes_);
+ decodable_time_ms_ = Transmit() + block_duration_ms_;
+ }
+ audio_size_samples = DecodeBlock();
+ if (audio_size_samples > 0) {
+ decoded_time_ms_ += audio_size_samples / out_sampling_khz_;
+ }
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
new file mode 100644
index 00000000000..87fc50794ca
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
+
+#include <string>
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class NetEqQualityTest : public ::testing::Test {
+ protected:
+ NetEqQualityTest(int block_duration_ms,
+ int in_sampling_khz,
+ int out_sampling_khz,
+ enum NetEqDecoder decoder_type,
+ int channels,
+ double drift_factor,
+ std::string in_filename,
+ std::string out_filename);
+ virtual void SetUp() OVERRIDE;
+ virtual void TearDown() OVERRIDE;
+
+ // EncodeBlock(...) does the following:
+ // 1. encodes a block of audio, saved in |in_data| and has a length of
+ // |block_size_samples| (samples per channel),
+ // 2. save the bit stream to |payload| of |max_bytes| bytes in size,
+ // 3. returns the length of the payload (in bytes),
+ virtual int EncodeBlock(int16_t* in_data, int block_size_samples,
+ uint8_t* payload, int max_bytes) = 0;
+
+ // PacketLoss(...) determines weather a packet sent at an indicated time gets
+ // lost or not.
+ virtual bool PacketLost(int packet_input_time_ms) { return false; }
+
+ // DecodeBlock() decodes a block of audio using the payload stored in
+ // |payload_| with the length of |payload_size_bytes_| (bytes). The decoded
+ // audio is to be stored in |out_data_|.
+ int DecodeBlock();
+
+ // Transmit() uses |rtp_generator_| to generate a packet and passes it to
+ // |neteq_|.
+ int Transmit();
+
+ // Simulate(...) runs encoding / transmitting / decoding up to |end_time_ms|
+ // (miliseconds), the resulted audio is stored in the file with the name of
+ // |out_filename_|.
+ void Simulate(int end_time_ms);
+
+ private:
+ int decoded_time_ms_;
+ int decodable_time_ms_;
+ double drift_factor_;
+ const int block_duration_ms_;
+ const int in_sampling_khz_;
+ const int out_sampling_khz_;
+ const enum NetEqDecoder decoder_type_;
+ const int channels_;
+ const std::string in_filename_;
+ const std::string out_filename_;
+
+ // Number of samples per channel in a frame.
+ const int in_size_samples_;
+
+ // Expected output number of samples per channel in a frame.
+ const int out_size_samples_;
+
+ int payload_size_bytes_;
+ int max_payload_bytes_;
+
+ scoped_ptr<InputAudioFile> in_file_;
+ FILE* out_file_;
+
+ scoped_ptr<RtpGenerator> rtp_generator_;
+ scoped_ptr<NetEq> neteq_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<uint8_t[]> payload_;
+ scoped_ptr<int16_t[]> out_data_;
+ WebRtcRTPHeader rtp_header_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
new file mode 100644
index 00000000000..3c5f6b0bbaa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
@@ -0,0 +1,628 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(hlundin): The functionality in this file should be moved into one or
+// several classes.
+
+#include <assert.h>
+#include <stdio.h>
+
+#include <algorithm>
+#include <iostream>
+#include <string>
+
+#include "google/gflags.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/typedefs.h"
+
+using webrtc::NetEq;
+using webrtc::WebRtcRTPHeader;
+
+// Flag validators.
+static bool ValidatePayloadType(const char* flagname, int32_t value) {
+ if (value >= 0 && value <= 127) // Value is ok.
+ return true;
+ printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
+ return false;
+}
+
+// Define command line flags.
+DEFINE_int32(pcmu, 0, "RTP payload type for PCM-u");
+static const bool pcmu_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcmu, &ValidatePayloadType);
+DEFINE_int32(pcma, 8, "RTP payload type for PCM-a");
+static const bool pcma_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcma, &ValidatePayloadType);
+DEFINE_int32(ilbc, 102, "RTP payload type for iLBC");
+static const bool ilbc_dummy =
+ google::RegisterFlagValidator(&FLAGS_ilbc, &ValidatePayloadType);
+DEFINE_int32(isac, 103, "RTP payload type for iSAC");
+static const bool isac_dummy =
+ google::RegisterFlagValidator(&FLAGS_isac, &ValidatePayloadType);
+DEFINE_int32(isac_swb, 104, "RTP payload type for iSAC-swb (32 kHz)");
+static const bool isac_swb_dummy =
+ google::RegisterFlagValidator(&FLAGS_isac_swb, &ValidatePayloadType);
+DEFINE_int32(pcm16b, 93, "RTP payload type for PCM16b-nb (8 kHz)");
+static const bool pcm16b_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcm16b, &ValidatePayloadType);
+DEFINE_int32(pcm16b_wb, 94, "RTP payload type for PCM16b-wb (16 kHz)");
+static const bool pcm16b_wb_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcm16b_wb, &ValidatePayloadType);
+DEFINE_int32(pcm16b_swb32, 95, "RTP payload type for PCM16b-swb32 (32 kHz)");
+static const bool pcm16b_swb32_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcm16b_swb32, &ValidatePayloadType);
+DEFINE_int32(pcm16b_swb48, 96, "RTP payload type for PCM16b-swb48 (48 kHz)");
+static const bool pcm16b_swb48_dummy =
+ google::RegisterFlagValidator(&FLAGS_pcm16b_swb48, &ValidatePayloadType);
+DEFINE_int32(g722, 9, "RTP payload type for G.722");
+static const bool g722_dummy =
+ google::RegisterFlagValidator(&FLAGS_g722, &ValidatePayloadType);
+DEFINE_int32(avt, 106, "RTP payload type for AVT/DTMF");
+static const bool avt_dummy =
+ google::RegisterFlagValidator(&FLAGS_avt, &ValidatePayloadType);
+DEFINE_int32(red, 117, "RTP payload type for redundant audio (RED)");
+static const bool red_dummy =
+ google::RegisterFlagValidator(&FLAGS_red, &ValidatePayloadType);
+DEFINE_int32(cn_nb, 13, "RTP payload type for comfort noise (8 kHz)");
+static const bool cn_nb_dummy =
+ google::RegisterFlagValidator(&FLAGS_cn_nb, &ValidatePayloadType);
+DEFINE_int32(cn_wb, 98, "RTP payload type for comfort noise (16 kHz)");
+static const bool cn_wb_dummy =
+ google::RegisterFlagValidator(&FLAGS_cn_wb, &ValidatePayloadType);
+DEFINE_int32(cn_swb32, 99, "RTP payload type for comfort noise (32 kHz)");
+static const bool cn_swb32_dummy =
+ google::RegisterFlagValidator(&FLAGS_cn_swb32, &ValidatePayloadType);
+DEFINE_int32(cn_swb48, 100, "RTP payload type for comfort noise (48 kHz)");
+static const bool cn_swb48_dummy =
+ google::RegisterFlagValidator(&FLAGS_cn_swb48, &ValidatePayloadType);
+DEFINE_bool(codec_map, false, "Prints the mapping between RTP payload type and "
+ "codec");
+DEFINE_bool(dummy_rtp, false, "The input file contains ""dummy"" RTP data, "
+ "i.e., only headers");
+DEFINE_string(replacement_audio_file, "",
+ "A PCM file that will be used to populate ""dummy"" RTP packets");
+
+// Declaring helper functions (defined further down in this file).
+std::string CodecName(webrtc::NetEqDecoder codec);
+void RegisterPayloadTypes(NetEq* neteq);
+void PrintCodecMapping();
+size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file,
+ webrtc::scoped_ptr<int16_t[]>* replacement_audio,
+ webrtc::scoped_ptr<uint8_t[]>* payload,
+ size_t* payload_mem_size_bytes,
+ size_t* frame_size_samples,
+ WebRtcRTPHeader* rtp_header,
+ NETEQTEST_RTPpacket* next_rtp);
+int CodecSampleRate(uint8_t payload_type);
+int CodecTimestampRate(uint8_t payload_type);
+bool IsComfortNosie(uint8_t payload_type);
+
+int main(int argc, char* argv[]) {
+ static const int kMaxChannels = 5;
+ static const int kMaxSamplesPerMs = 48000 / 1000;
+ static const int kOutputBlockSizeMs = 10;
+
+ std::string program_name = argv[0];
+ std::string usage = "Tool for decoding an RTP dump file using NetEq.\n"
+ "Run " + program_name + " --helpshort for usage.\n"
+ "Example usage:\n" + program_name +
+ " input.rtp output.pcm\n";
+ google::SetUsageMessage(usage);
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ if (FLAGS_codec_map) {
+ PrintCodecMapping();
+ }
+
+ if (argc != 3) {
+ if (FLAGS_codec_map) {
+ // We have already printed the codec map. Just end the program.
+ return 0;
+ }
+ // Print usage information.
+ std::cout << google::ProgramUsage();
+ return 0;
+ }
+
+ FILE* in_file = fopen(argv[1], "rb");
+ if (!in_file) {
+ std::cerr << "Cannot open input file " << argv[1] << std::endl;
+ exit(1);
+ }
+ std::cout << "Input file: " << argv[1] << std::endl;
+
+ FILE* out_file = fopen(argv[2], "wb");
+ if (!in_file) {
+ std::cerr << "Cannot open output file " << argv[2] << std::endl;
+ exit(1);
+ }
+ std::cout << "Output file: " << argv[2] << std::endl;
+
+ // Check if a replacement audio file was provided, and if so, open it.
+ bool replace_payload = false;
+ webrtc::scoped_ptr<webrtc::test::InputAudioFile> replacement_audio_file;
+ if (!FLAGS_replacement_audio_file.empty()) {
+ replacement_audio_file.reset(
+ new webrtc::test::InputAudioFile(FLAGS_replacement_audio_file));
+ replace_payload = true;
+ }
+
+ // Read RTP file header.
+ if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0) {
+ std::cerr << "Wrong format in RTP file" << std::endl;
+ exit(1);
+ }
+
+ // Enable tracing.
+ webrtc::Trace::CreateTrace();
+ webrtc::Trace::SetTraceFile((webrtc::test::OutputPath() +
+ "neteq_trace.txt").c_str());
+ webrtc::Trace::set_level_filter(webrtc::kTraceAll);
+
+ // Initialize NetEq instance.
+ int sample_rate_hz = 16000;
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz;
+ NetEq* neteq = NetEq::Create(config);
+ RegisterPayloadTypes(neteq);
+
+ // Read first packet.
+ NETEQTEST_RTPpacket* rtp;
+ NETEQTEST_RTPpacket* next_rtp = NULL;
+ if (!FLAGS_dummy_rtp) {
+ rtp = new NETEQTEST_RTPpacket();
+ if (replace_payload) {
+ next_rtp = new NETEQTEST_RTPpacket();
+ }
+ } else {
+ rtp = new NETEQTEST_DummyRTPpacket();
+ if (replace_payload) {
+ next_rtp = new NETEQTEST_DummyRTPpacket();
+ }
+ }
+ rtp->readFromFile(in_file);
+ if (rtp->dataLen() < 0) {
+ std::cout << "Warning: RTP file is empty" << std::endl;
+ }
+
+ // Set up variables for audio replacement if needed.
+ size_t input_frame_size_timestamps = 0;
+ webrtc::scoped_ptr<int16_t[]> replacement_audio;
+ webrtc::scoped_ptr<uint8_t[]> payload;
+ size_t payload_mem_size_bytes = 0;
+ if (replace_payload) {
+ // Initially assume that the frame size is 30 ms at the initial sample rate.
+ // This value will be replaced with the correct one as soon as two
+ // consecutive packets are found.
+ input_frame_size_timestamps = 30 * sample_rate_hz / 1000;
+ replacement_audio.reset(new int16_t[input_frame_size_timestamps]);
+ payload_mem_size_bytes = 2 * input_frame_size_timestamps;
+ payload.reset(new uint8_t[payload_mem_size_bytes]);
+ assert(next_rtp);
+ next_rtp->readFromFile(in_file);
+ }
+
+ // This is the main simulation loop.
+ int time_now_ms = rtp->time(); // Start immediately with the first packet.
+ int next_input_time_ms = rtp->time();
+ int next_output_time_ms = time_now_ms;
+ if (time_now_ms % kOutputBlockSizeMs != 0) {
+ // Make sure that next_output_time_ms is rounded up to the next multiple
+ // of kOutputBlockSizeMs. (Legacy bit-exactness.)
+ next_output_time_ms +=
+ kOutputBlockSizeMs - time_now_ms % kOutputBlockSizeMs;
+ }
+ while (rtp->dataLen() >= 0) {
+ // Check if it is time to insert packet.
+ while (time_now_ms >= next_input_time_ms && rtp->dataLen() >= 0) {
+ if (rtp->dataLen() > 0) {
+ // Parse RTP header.
+ WebRtcRTPHeader rtp_header;
+ rtp->parseHeader(&rtp_header);
+ uint8_t* payload_ptr = rtp->payload();
+ size_t payload_len = rtp->payloadLen();
+ if (replace_payload) {
+ payload_len = ReplacePayload(replacement_audio_file.get(),
+ &replacement_audio,
+ &payload,
+ &payload_mem_size_bytes,
+ &input_frame_size_timestamps,
+ &rtp_header,
+ next_rtp);
+ payload_ptr = payload.get();
+ }
+ int error = neteq->InsertPacket(rtp_header, payload_ptr,
+ static_cast<int>(payload_len),
+ rtp->time() * sample_rate_hz / 1000);
+ if (error != NetEq::kOK) {
+ std::cerr << "InsertPacket returned error code " <<
+ neteq->LastError() << std::endl;
+ }
+ }
+ // Get next packet from file.
+ rtp->readFromFile(in_file);
+ if (replace_payload) {
+ // At this point |rtp| contains the packet *after* |next_rtp|.
+ // Swap RTP packet objects between |rtp| and |next_rtp|.
+ NETEQTEST_RTPpacket* temp_rtp = rtp;
+ rtp = next_rtp;
+ next_rtp = temp_rtp;
+ }
+ next_input_time_ms = rtp->time();
+ }
+
+ // Check if it is time to get output audio.
+ if (time_now_ms >= next_output_time_ms) {
+ static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs *
+ kMaxChannels;
+ int16_t out_data[kOutDataLen];
+ int num_channels;
+ int samples_per_channel;
+ int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
+ &num_channels, NULL);
+ if (error != NetEq::kOK) {
+ std::cerr << "GetAudio returned error code " <<
+ neteq->LastError() << std::endl;
+ } else {
+ // Calculate sample rate from output size.
+ sample_rate_hz = 1000 * samples_per_channel / kOutputBlockSizeMs;
+ }
+
+ // Write to file.
+ // TODO(hlundin): Make writing to file optional.
+ size_t write_len = samples_per_channel * num_channels;
+ if (fwrite(out_data, sizeof(out_data[0]), write_len, out_file) !=
+ write_len) {
+ std::cerr << "Error while writing to file" << std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ }
+ next_output_time_ms += kOutputBlockSizeMs;
+ }
+ // Advance time to next event.
+ time_now_ms = std::min(next_input_time_ms, next_output_time_ms);
+ }
+
+ std::cout << "Simulation done" << std::endl;
+
+ fclose(in_file);
+ fclose(out_file);
+ delete rtp;
+ delete next_rtp;
+ delete neteq;
+ webrtc::Trace::ReturnTrace();
+ return 0;
+}
+
+
+// Help functions.
+
+// Maps a codec type to a printable name string.
+std::string CodecName(webrtc::NetEqDecoder codec) {
+ switch (codec) {
+ case webrtc::kDecoderPCMu:
+ return "PCM-u";
+ case webrtc::kDecoderPCMa:
+ return "PCM-a";
+ case webrtc::kDecoderILBC:
+ return "iLBC";
+ case webrtc::kDecoderISAC:
+ return "iSAC";
+ case webrtc::kDecoderISACswb:
+ return "iSAC-swb (32 kHz)";
+ case webrtc::kDecoderPCM16B:
+ return "PCM16b-nb (8 kHz)";
+ case webrtc::kDecoderPCM16Bwb:
+ return "PCM16b-wb (16 kHz)";
+ case webrtc::kDecoderPCM16Bswb32kHz:
+ return "PCM16b-swb32 (32 kHz)";
+ case webrtc::kDecoderPCM16Bswb48kHz:
+ return "PCM16b-swb48 (48 kHz)";
+ case webrtc::kDecoderG722:
+ return "G.722";
+ case webrtc::kDecoderRED:
+ return "redundant audio (RED)";
+ case webrtc::kDecoderAVT:
+ return "AVT/DTMF";
+ case webrtc::kDecoderCNGnb:
+ return "comfort noise (8 kHz)";
+ case webrtc::kDecoderCNGwb:
+ return "comfort noise (16 kHz)";
+ case webrtc::kDecoderCNGswb32kHz:
+ return "comfort noise (32 kHz)";
+ case webrtc::kDecoderCNGswb48kHz:
+ return "comfort noise (48 kHz)";
+ default:
+ assert(false);
+ return "undefined";
+ }
+}
+
+// Registers all decoders in |neteq|.
+void RegisterPayloadTypes(NetEq* neteq) {
+ assert(neteq);
+ int error;
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCMu, FLAGS_pcmu);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcmu <<
+ " as " << CodecName(webrtc::kDecoderPCMu).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCMa, FLAGS_pcma);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcma <<
+ " as " << CodecName(webrtc::kDecoderPCMa).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderILBC, FLAGS_ilbc);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_ilbc <<
+ " as " << CodecName(webrtc::kDecoderILBC).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderISAC, FLAGS_isac);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_isac <<
+ " as " << CodecName(webrtc::kDecoderISAC).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderISACswb, FLAGS_isac_swb);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_isac_swb <<
+ " as " << CodecName(webrtc::kDecoderISACswb).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16B, FLAGS_pcm16b);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcm16b <<
+ " as " << CodecName(webrtc::kDecoderPCM16B).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bwb,
+ FLAGS_pcm16b_wb);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcm16b_wb <<
+ " as " << CodecName(webrtc::kDecoderPCM16Bwb).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bswb32kHz,
+ FLAGS_pcm16b_swb32);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcm16b_swb32 <<
+ " as " << CodecName(webrtc::kDecoderPCM16Bswb32kHz).c_str() <<
+ std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bswb48kHz,
+ FLAGS_pcm16b_swb48);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_pcm16b_swb48 <<
+ " as " << CodecName(webrtc::kDecoderPCM16Bswb48kHz).c_str() <<
+ std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderG722, FLAGS_g722);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_g722 <<
+ " as " << CodecName(webrtc::kDecoderG722).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderAVT, FLAGS_avt);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_avt <<
+ " as " << CodecName(webrtc::kDecoderAVT).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderRED, FLAGS_red);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_red <<
+ " as " << CodecName(webrtc::kDecoderRED).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderCNGnb, FLAGS_cn_nb);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_cn_nb <<
+ " as " << CodecName(webrtc::kDecoderCNGnb).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderCNGwb, FLAGS_cn_wb);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_cn_wb <<
+ " as " << CodecName(webrtc::kDecoderCNGwb).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderCNGswb32kHz,
+ FLAGS_cn_swb32);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_cn_swb32 <<
+ " as " << CodecName(webrtc::kDecoderCNGswb32kHz).c_str() << std::endl;
+ exit(1);
+ }
+ error = neteq->RegisterPayloadType(webrtc::kDecoderCNGswb48kHz,
+ FLAGS_cn_swb48);
+ if (error) {
+ std::cerr << "Cannot register payload type " << FLAGS_cn_swb48 <<
+ " as " << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << std::endl;
+ exit(1);
+ }
+}
+
+void PrintCodecMapping() {
+ std::cout << CodecName(webrtc::kDecoderPCMu).c_str() << ": " << FLAGS_pcmu <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderPCMa).c_str() << ": " << FLAGS_pcma <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderILBC).c_str() << ": " << FLAGS_ilbc <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderISAC).c_str() << ": " << FLAGS_isac <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderISACswb).c_str() << ": " <<
+ FLAGS_isac_swb << std::endl;
+ std::cout << CodecName(webrtc::kDecoderPCM16B).c_str() << ": " <<
+ FLAGS_pcm16b << std::endl;
+ std::cout << CodecName(webrtc::kDecoderPCM16Bwb).c_str() << ": " <<
+ FLAGS_pcm16b_wb << std::endl;
+ std::cout << CodecName(webrtc::kDecoderPCM16Bswb32kHz).c_str() << ": " <<
+ FLAGS_pcm16b_swb32 << std::endl;
+ std::cout << CodecName(webrtc::kDecoderPCM16Bswb48kHz).c_str() << ": " <<
+ FLAGS_pcm16b_swb48 << std::endl;
+ std::cout << CodecName(webrtc::kDecoderG722).c_str() << ": " << FLAGS_g722 <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderAVT).c_str() << ": " << FLAGS_avt <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderRED).c_str() << ": " << FLAGS_red <<
+ std::endl;
+ std::cout << CodecName(webrtc::kDecoderCNGnb).c_str() << ": " <<
+ FLAGS_cn_nb << std::endl;
+ std::cout << CodecName(webrtc::kDecoderCNGwb).c_str() << ": " <<
+ FLAGS_cn_wb << std::endl;
+ std::cout << CodecName(webrtc::kDecoderCNGswb32kHz).c_str() << ": " <<
+ FLAGS_cn_swb32 << std::endl;
+ std::cout << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << ": " <<
+ FLAGS_cn_swb48 << std::endl;
+}
+
+size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file,
+ webrtc::scoped_ptr<int16_t[]>* replacement_audio,
+ webrtc::scoped_ptr<uint8_t[]>* payload,
+ size_t* payload_mem_size_bytes,
+ size_t* frame_size_samples,
+ WebRtcRTPHeader* rtp_header,
+ NETEQTEST_RTPpacket* next_rtp) {
+ size_t payload_len = 0;
+ // Check for CNG.
+ if (IsComfortNosie(rtp_header->header.payloadType)) {
+ // If CNG, simply insert a zero-energy one-byte payload.
+ if (*payload_mem_size_bytes < 1) {
+ (*payload).reset(new uint8_t[1]);
+ *payload_mem_size_bytes = 1;
+ }
+ (*payload)[0] = 127; // Max attenuation of CNG.
+ payload_len = 1;
+ } else {
+ if (next_rtp->payloadLen() > 0) {
+ // Check if payload length has changed.
+ if (next_rtp->sequenceNumber() == rtp_header->header.sequenceNumber + 1) {
+ if (*frame_size_samples !=
+ next_rtp->timeStamp() - rtp_header->header.timestamp) {
+ *frame_size_samples =
+ next_rtp->timeStamp() - rtp_header->header.timestamp;
+ (*replacement_audio).reset(
+ new int16_t[*frame_size_samples]);
+ *payload_mem_size_bytes = 2 * *frame_size_samples;
+ (*payload).reset(new uint8_t[*payload_mem_size_bytes]);
+ }
+ }
+ }
+ // Get new speech.
+ assert((*replacement_audio).get());
+ if (CodecTimestampRate(rtp_header->header.payloadType) !=
+ CodecSampleRate(rtp_header->header.payloadType) ||
+ rtp_header->header.payloadType == FLAGS_red ||
+ rtp_header->header.payloadType == FLAGS_avt) {
+ // Some codecs have different sample and timestamp rates. And neither
+ // RED nor DTMF is supported for replacement.
+ std::cerr << "Codec not supported for audio replacement." <<
+ std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ }
+ assert(*frame_size_samples > 0);
+ if (!replacement_audio_file->Read(*frame_size_samples,
+ (*replacement_audio).get())) {
+ std::cerr << "Could no read replacement audio file." << std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ }
+ // Encode it as PCM16.
+ assert((*payload).get());
+ payload_len = WebRtcPcm16b_Encode((*replacement_audio).get(),
+ static_cast<int16_t>(*frame_size_samples),
+ (*payload).get());
+ assert(payload_len == 2 * *frame_size_samples);
+ // Change payload type to PCM16.
+ switch (CodecSampleRate(rtp_header->header.payloadType)) {
+ case 8000:
+ rtp_header->header.payloadType = FLAGS_pcm16b;
+ break;
+ case 16000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_wb;
+ break;
+ case 32000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_swb32;
+ break;
+ case 48000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_swb48;
+ break;
+ default:
+ std::cerr << "Payload type " <<
+ static_cast<int>(rtp_header->header.payloadType) <<
+ " not supported or unknown." << std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ assert(false);
+ }
+ }
+ return payload_len;
+}
+
+int CodecSampleRate(uint8_t payload_type) {
+ if (payload_type == FLAGS_pcmu ||
+ payload_type == FLAGS_pcma ||
+ payload_type == FLAGS_ilbc ||
+ payload_type == FLAGS_pcm16b ||
+ payload_type == FLAGS_cn_nb) {
+ return 8000;
+ } else if (payload_type == FLAGS_isac ||
+ payload_type == FLAGS_pcm16b_wb ||
+ payload_type == FLAGS_g722 ||
+ payload_type == FLAGS_cn_wb) {
+ return 16000;
+ } else if (payload_type == FLAGS_isac_swb ||
+ payload_type == FLAGS_pcm16b_swb32 ||
+ payload_type == FLAGS_cn_swb32) {
+ return 32000;
+ } else if (payload_type == FLAGS_pcm16b_swb48 ||
+ payload_type == FLAGS_cn_swb48) {
+ return 48000;
+ } else if (payload_type == FLAGS_avt ||
+ payload_type == FLAGS_red) {
+ return 0;
+ } else {
+ return -1;
+ }
+}
+
+int CodecTimestampRate(uint8_t payload_type) {
+ if (payload_type == FLAGS_g722) {
+ return 8000;
+ } else {
+ return CodecSampleRate(payload_type);
+ }
+}
+
+bool IsComfortNosie(uint8_t payload_type) {
+ if (payload_type == FLAGS_cn_nb ||
+ payload_type == FLAGS_cn_wb ||
+ payload_type == FLAGS_cn_swb32 ||
+ payload_type == FLAGS_cn_swb48) {
+ return true;
+ } else {
+ return false;
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h
new file mode 100644
index 00000000000..1d6128076e1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
+
+#include <assert.h>
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
+
+namespace webrtc {
+namespace test {
+
+class OutputAudioFile : public AudioSink {
+ public:
+ // Creates an OutputAudioFile, opening a file named |file_name| for writing.
+ // The file format is 16-bit signed host-endian PCM.
+ explicit OutputAudioFile(const std::string& file_name) {
+ out_file_ = fopen(file_name.c_str(), "wb");
+ }
+
+ virtual ~OutputAudioFile() {
+ if (out_file_)
+ fclose(out_file_);
+ }
+
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) OVERRIDE {
+ assert(out_file_);
+ return fwrite(audio, sizeof(*audio), num_samples, out_file_) == num_samples;
+ }
+
+ private:
+ FILE* out_file_;
+
+ DISALLOW_COPY_AND_ASSIGN(OutputAudioFile);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc
new file mode 100644
index 00000000000..d8fb7134f10
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+
+namespace webrtc {
+namespace test {
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(allocated_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ valid_header_ = ParseHeader(parser);
+}
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(virtual_packet_length_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ valid_header_ = ParseHeader(parser);
+}
+
+Packet::Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(allocated_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ valid_header_ = ParseHeader(*parser);
+}
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(virtual_packet_length_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ valid_header_ = ParseHeader(*parser);
+}
+
+bool Packet::ExtractRedHeaders(std::list<RTPHeader*>* headers) const {
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |1| block PT | timestamp offset | block length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |1| ... |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |0| block PT |
+ // +-+-+-+-+-+-+-+-+
+ //
+
+ assert(payload_);
+ const uint8_t* payload_ptr = payload_;
+ const uint8_t* payload_end_ptr = payload_ptr + payload_length_bytes_;
+
+ // Find all RED headers with the extension bit set to 1. That is, all headers
+ // but the last one.
+ while ((payload_ptr < payload_end_ptr) && (*payload_ptr & 0x80)) {
+ RTPHeader* header = new RTPHeader;
+ CopyToHeader(header);
+ header->payloadType = payload_ptr[0] & 0x7F;
+ uint32_t offset = (payload_ptr[1] << 6) + ((payload_ptr[2] & 0xFC) >> 2);
+ header->timestamp -= offset;
+ headers->push_front(header);
+ payload_ptr += 4;
+ }
+ // Last header.
+ assert(payload_ptr < payload_end_ptr);
+ if (payload_ptr >= payload_end_ptr) {
+ return false; // Payload too short.
+ }
+ RTPHeader* header = new RTPHeader;
+ CopyToHeader(header);
+ header->payloadType = payload_ptr[0] & 0x7F;
+ headers->push_front(header);
+ return true;
+}
+
+void Packet::DeleteRedHeaders(std::list<RTPHeader*>* headers) {
+ while (!headers->empty()) {
+ delete headers->front();
+ headers->pop_front();
+ }
+}
+
+bool Packet::ParseHeader(const RtpHeaderParser& parser) {
+ bool valid_header = parser.Parse(
+ payload_memory_.get(), static_cast<int>(packet_length_bytes_), &header_);
+ assert(valid_header);
+ if (!valid_header) {
+ return false;
+ }
+ assert(header_.headerLength <= packet_length_bytes_);
+ payload_ = &payload_memory_[header_.headerLength];
+ assert(packet_length_bytes_ >= header_.headerLength);
+ payload_length_bytes_ = packet_length_bytes_ - header_.headerLength;
+ assert(virtual_packet_length_bytes_ >= header_.headerLength);
+ virtual_payload_length_bytes_ =
+ virtual_packet_length_bytes_ - header_.headerLength;
+ return true;
+}
+
+void Packet::CopyToHeader(RTPHeader* destination) const {
+ destination->markerBit = header_.markerBit;
+ destination->payloadType = header_.payloadType;
+ destination->sequenceNumber = header_.sequenceNumber;
+ destination->timestamp = header_.timestamp;
+ destination->ssrc = header_.ssrc;
+ destination->numCSRCs = header_.numCSRCs;
+ destination->paddingLength = header_.paddingLength;
+ destination->headerLength = header_.headerLength;
+ destination->payload_type_frequency = header_.payload_type_frequency;
+ memcpy(&destination->arrOfCSRCs,
+ &header_.arrOfCSRCs,
+ sizeof(header_.arrOfCSRCs));
+ memcpy(
+ &destination->extension, &header_.extension, sizeof(header_.extension));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h
new file mode 100644
index 00000000000..eb8ce28a227
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class RtpHeaderParser;
+
+namespace test {
+
+// Class for handling RTP packets in test applications.
+class Packet {
+ public:
+ // Creates a packet, with the packet payload (including header bytes) in
+ // |packet_memory|. The length of |packet_memory| is |allocated_bytes|.
+ // The new object assumes ownership of |packet_memory| and will delete it
+ // when the Packet object is deleted. The |time_ms| is an extra time
+ // associated with this packet, typically used to denote arrival time.
+ // The first bytes in |packet_memory| will be parsed using |parser|.
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser);
+
+ // Same as above, but with the extra argument |virtual_packet_length_bytes|.
+ // This is typically used when reading RTP dump files that only contain the
+ // RTP headers, and no payload (a.k.a RTP dummy files or RTP light). The
+ // |virtual_packet_length_bytes| tells what size the packet had on wire,
+ // including the now discarded payload, whereas |allocated_bytes| is the
+ // length of the remaining payload (typically only the RTP header).
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser);
+
+ // The following two constructors are the same as above, but without a
+ // parser. Note that when the object is constructed using any of these
+ // methods, the header will be parsed using a default RtpHeaderParser object.
+ // In particular, RTP header extensions won't be parsed.
+ Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms);
+
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms);
+
+ virtual ~Packet() {}
+
+ // Parses the first bytes of the RTP payload, interpreting them as RED headers
+ // according to RFC 2198. The headers will be inserted into |headers|. The
+ // caller of the method assumes ownership of the objects in the list, and
+ // must delete them properly.
+ bool ExtractRedHeaders(std::list<RTPHeader*>* headers) const;
+
+ // Deletes all RTPHeader objects in |headers|, but does not delete |headers|
+ // itself.
+ static void DeleteRedHeaders(std::list<RTPHeader*>* headers);
+
+ const uint8_t* payload() const { return payload_; }
+
+ size_t packet_length_bytes() const { return packet_length_bytes_; }
+
+ size_t payload_length_bytes() const { return payload_length_bytes_; }
+
+ size_t virtual_packet_length_bytes() const {
+ return virtual_packet_length_bytes_;
+ }
+
+ size_t virtual_payload_length_bytes() const {
+ return virtual_payload_length_bytes_;
+ }
+
+ const RTPHeader& header() const { return header_; }
+
+ void set_time_ms(double time) { time_ms_ = time; }
+ double time_ms() const { return time_ms_; }
+ bool valid_header() const { return valid_header_; }
+
+ private:
+ bool ParseHeader(const RtpHeaderParser& parser);
+ void CopyToHeader(RTPHeader* destination) const;
+
+ RTPHeader header_;
+ scoped_ptr<uint8_t[]> payload_memory_;
+ const uint8_t* payload_; // First byte after header.
+ const size_t packet_length_bytes_; // Total length of packet.
+ size_t payload_length_bytes_; // Length of the payload, after RTP header.
+ // Zero for dummy RTP packets.
+ // Virtual lengths are used when parsing RTP header files (dummy RTP files).
+ const size_t virtual_packet_length_bytes_;
+ size_t virtual_payload_length_bytes_;
+ double time_ms_; // Used to denote a packet's arrival time.
+ bool valid_header_; // Set by the RtpHeaderParser.
+
+ DISALLOW_COPY_AND_ASSIGN(Packet);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h
new file mode 100644
index 00000000000..669bc14e461
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+namespace test {
+
+class Packet;
+
+// Interface class for an object delivering RTP packets to test applications.
+class PacketSource {
+ public:
+ PacketSource() {}
+ virtual ~PacketSource() {}
+
+ // Returns a pointer to the next packet. Returns NULL if the source is
+ // depleted, or if an error occurred.
+ virtual Packet* NextPacket() = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(PacketSource);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc
new file mode 100644
index 00000000000..df844ee84f7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for test Packet class.
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+const int kHeaderLengthBytes = 12;
+
+void MakeRtpHeader(int payload_type,
+ int seq_number,
+ uint32_t timestamp,
+ uint32_t ssrc,
+ uint8_t* rtp_data) {
+ rtp_data[0] = 0x80;
+ rtp_data[1] = payload_type & 0xFF;
+ rtp_data[2] = (seq_number >> 8) & 0xFF;
+ rtp_data[3] = (seq_number) & 0xFF;
+ rtp_data[4] = (timestamp >> 24) & 0xFF;
+ rtp_data[5] = (timestamp >> 16) & 0xFF;
+ rtp_data[6] = (timestamp >> 8) & 0xFF;
+ rtp_data[7] = timestamp & 0xFF;
+ rtp_data[8] = (ssrc >> 24) & 0xFF;
+ rtp_data[9] = (ssrc >> 16) & 0xFF;
+ rtp_data[10] = (ssrc >> 8) & 0xFF;
+ rtp_data[11] = ssrc & 0xFF;
+}
+} // namespace
+
+TEST(TestPacket, RegularPacket) {
+ const size_t kPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory, kPacketLengthBytes, kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+}
+
+TEST(TestPacket, DummyPacket) {
+ const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header.
+ const size_t kVirtualPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory,
+ kPacketLengthBytes,
+ kVirtualPacketLengthBytes,
+ kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+}
+
+namespace {
+// Writes one RED block header starting at |rtp_data|, according to RFC 2198.
+// returns the number of bytes written (1 or 4).
+//
+// Format if |last_payoad| is false:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1| block PT | timestamp offset | block length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// Format if |last_payoad| is true:
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |0| Block PT |
+// +-+-+-+-+-+-+-+-+
+
+int MakeRedHeader(int payload_type,
+ uint32_t timestamp_offset,
+ int block_length,
+ bool last_payload,
+ uint8_t* rtp_data) {
+ rtp_data[0] = 0x80 | (payload_type & 0x7F); // Set the first bit to 1.
+ if (last_payload) {
+ rtp_data[0] &= 0x7F; // Reset the first but to 0 to indicate last block.
+ return 1;
+ }
+ rtp_data[1] = timestamp_offset >> 6;
+ rtp_data[2] = (timestamp_offset & 0x3F) << 2;
+ rtp_data[2] |= block_length >> 8;
+ rtp_data[3] = block_length & 0xFF;
+ return 4;
+}
+} // namespace
+
+TEST(TestPacket, RED) {
+ const size_t kPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kRedPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kRedPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ // Create four RED headers.
+ // Payload types are just the same as the block index the offset is 100 times
+ // the block index.
+ const int kRedBlocks = 4;
+ uint8_t* payload_ptr =
+ &packet_memory[kHeaderLengthBytes]; // First byte after header.
+ for (int i = 0; i < kRedBlocks; ++i) {
+ int payload_type = i;
+ // Offset value is not used for the last block.
+ uint32_t timestamp_offset = 100 * i;
+ int block_length = 10 * i;
+ bool last_block = (i == kRedBlocks - 1) ? true : false;
+ payload_ptr += MakeRedHeader(
+ payload_type, timestamp_offset, block_length, last_block, payload_ptr);
+ }
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory, kPacketLengthBytes, kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kRedPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+ std::list<RTPHeader*> red_headers;
+ EXPECT_TRUE(packet.ExtractRedHeaders(&red_headers));
+ EXPECT_EQ(kRedBlocks, static_cast<int>(red_headers.size()));
+ int block_index = 0;
+ for (std::list<RTPHeader*>::reverse_iterator it = red_headers.rbegin();
+ it != red_headers.rend();
+ ++it) {
+ // Reading list from the back, since the extraction puts the main payload
+ // (which is the last one on wire) first.
+ RTPHeader* red_block = *it;
+ EXPECT_EQ(block_index, red_block->payloadType);
+ EXPECT_EQ(kSequenceNumber, red_block->sequenceNumber);
+ if (block_index == kRedBlocks - 1) {
+ // Last block has zero offset per definition.
+ EXPECT_EQ(kTimestamp, red_block->timestamp);
+ } else {
+ EXPECT_EQ(kTimestamp - 100 * block_index, red_block->timestamp);
+ }
+ EXPECT_EQ(kSsrc, red_block->ssrc);
+ EXPECT_EQ(0, red_block->numCSRCs);
+ ++block_index;
+ }
+ Packet::DeleteRedHeaders(&red_headers);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc
new file mode 100644
index 00000000000..773cc2c8962
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdio.h>
+#include <vector>
+
+#include "google/gflags.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+// Flag validator.
+static bool ValidatePayloadType(const char* flagname, int32_t value) {
+ if (value >= 0 && value <= 127) // Value is ok.
+ return true;
+ printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
+ return false;
+}
+static bool ValidateExtensionId(const char* flagname, int32_t value) {
+ if (value > 0 && value <= 255) // Value is ok.
+ return true;
+ printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
+ return false;
+}
+
+// Define command line flags.
+DEFINE_int32(red, 117, "RTP payload type for RED");
+static const bool red_dummy =
+ google::RegisterFlagValidator(&FLAGS_red, &ValidatePayloadType);
+DEFINE_int32(audio_level, 1, "Extension ID for audio level (RFC 6464)");
+static const bool audio_level_dummy =
+ google::RegisterFlagValidator(&FLAGS_audio_level, &ValidateExtensionId);
+
+int main(int argc, char* argv[]) {
+ std::string program_name = argv[0];
+ std::string usage =
+ "Tool for parsing an RTP dump file to text output.\n"
+ "Run " +
+ program_name +
+ " --helpshort for usage.\n"
+ "Example usage:\n" +
+ program_name + " input.rtp output.txt\n\n" +
+ "Output is sent to stdout if no output file is given." +
+ "Note that this tool can read files with our without payloads.";
+ google::SetUsageMessage(usage);
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ if (argc != 2 && argc != 3) {
+ // Print usage information.
+ printf("%s", google::ProgramUsage());
+ return 0;
+ }
+
+ FILE* in_file = fopen(argv[1], "rb");
+ if (!in_file) {
+ printf("Cannot open input file %s\n", argv[1]);
+ return -1;
+ }
+ printf("Input file: %s\n", argv[1]);
+ webrtc::scoped_ptr<webrtc::test::RtpFileSource> file_source(
+ webrtc::test::RtpFileSource::Create(argv[1]));
+ assert(file_source.get());
+ // Set RTP extension ID.
+ bool print_audio_level = false;
+ if (!google::GetCommandLineFlagInfoOrDie("audio_level").is_default) {
+ print_audio_level = true;
+ file_source->RegisterRtpHeaderExtension(webrtc::kRtpExtensionAudioLevel,
+ FLAGS_audio_level);
+ }
+
+ FILE* out_file;
+ if (argc == 3) {
+ out_file = fopen(argv[2], "wt");
+ if (!out_file) {
+ printf("Cannot open output file %s\n", argv[2]);
+ return -1;
+ }
+ printf("Output file: %s\n\n", argv[2]);
+ } else {
+ out_file = stdout;
+ }
+
+ // Print file header.
+ fprintf(out_file, "SeqNo TimeStamp SendTime Size PT M SSRC");
+ if (print_audio_level) {
+ fprintf(out_file, " AuLvl (V)");
+ }
+ fprintf(out_file, "\n");
+
+ webrtc::scoped_ptr<webrtc::test::Packet> packet;
+ while (!file_source->EndOfFile()) {
+ packet.reset(file_source->NextPacket());
+ if (!packet.get()) {
+ // This is probably an RTCP packet. Move on to the next one.
+ continue;
+ }
+ assert(packet.get());
+ // Write packet data to file.
+ fprintf(out_file,
+ "%5u %10u %10u %5i %5i %2i %#08X",
+ packet->header().sequenceNumber,
+ packet->header().timestamp,
+ static_cast<unsigned int>(packet->time_ms()),
+ static_cast<int>(packet->packet_length_bytes()),
+ packet->header().payloadType,
+ packet->header().markerBit,
+ packet->header().ssrc);
+ if (print_audio_level && packet->header().extension.hasAudioLevel) {
+ // |audioLevel| consists of one bit for "V" and then 7 bits level.
+ fprintf(out_file,
+ " %5u (%1i)",
+ packet->header().extension.audioLevel & 0x7F,
+ (packet->header().extension.audioLevel & 0x80) == 0 ? 0 : 1);
+ }
+ fprintf(out_file, "\n");
+
+ if (packet->header().payloadType == FLAGS_red) {
+ std::list<webrtc::RTPHeader*> red_headers;
+ packet->ExtractRedHeaders(&red_headers);
+ while (!red_headers.empty()) {
+ webrtc::RTPHeader* red = red_headers.front();
+ assert(red);
+ fprintf(out_file,
+ "* %5u %10u %10u %5i\n",
+ red->sequenceNumber,
+ red->timestamp,
+ static_cast<unsigned int>(packet->time_ms()),
+ red->payloadType);
+ red_headers.pop_front();
+ delete red;
+ }
+ }
+ }
+
+ fclose(in_file);
+ fclose(out_file);
+
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
new file mode 100644
index 00000000000..6490d46857c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
+
+#include <assert.h>
+#include <string.h>
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h>
+#endif
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+
+namespace webrtc {
+namespace test {
+
+RtpFileSource* RtpFileSource::Create(const std::string& file_name) {
+ RtpFileSource* source = new RtpFileSource;
+ assert(source);
+ if (!source->OpenFile(file_name) || !source->SkipFileHeader()) {
+ assert(false);
+ delete source;
+ return NULL;
+ }
+ return source;
+}
+
+RtpFileSource::~RtpFileSource() {
+ if (in_file_)
+ fclose(in_file_);
+}
+
+bool RtpFileSource::RegisterRtpHeaderExtension(RTPExtensionType type,
+ uint8_t id) {
+ assert(parser_.get());
+ return parser_->RegisterRtpHeaderExtension(type, id);
+}
+
+Packet* RtpFileSource::NextPacket() {
+ while (!EndOfFile()) {
+ uint16_t length;
+ if (fread(&length, sizeof(length), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ length = ntohs(length);
+
+ uint16_t plen;
+ if (fread(&plen, sizeof(plen), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ plen = ntohs(plen);
+
+ uint32_t offset;
+ if (fread(&offset, sizeof(offset), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ offset = ntohl(offset);
+
+ // Use length here because a plen of 0 specifies RTCP.
+ assert(length >= kPacketHeaderSize);
+ size_t packet_size_bytes = length - kPacketHeaderSize;
+ if (packet_size_bytes == 0) {
+ // May be an RTCP packet.
+ // Read the next one.
+ continue;
+ }
+ scoped_ptr<uint8_t> packet_memory(new uint8_t[packet_size_bytes]);
+ if (fread(packet_memory.get(), 1, packet_size_bytes, in_file_) !=
+ packet_size_bytes) {
+ assert(false);
+ return NULL;
+ }
+ scoped_ptr<Packet> packet(new Packet(packet_memory.release(),
+ packet_size_bytes,
+ plen,
+ offset,
+ *parser_.get()));
+ if (!packet->valid_header()) {
+ assert(false);
+ return NULL;
+ }
+ return packet.release();
+ }
+ return NULL;
+}
+
+bool RtpFileSource::EndOfFile() const {
+ assert(in_file_);
+ return ftell(in_file_) >= file_end_;
+}
+
+RtpFileSource::RtpFileSource()
+ : PacketSource(),
+ in_file_(NULL),
+ file_end_(-1),
+ parser_(RtpHeaderParser::Create()) {}
+
+bool RtpFileSource::OpenFile(const std::string& file_name) {
+ in_file_ = fopen(file_name.c_str(), "rb");
+ assert(in_file_);
+ if (in_file_ == NULL) {
+ return false;
+ }
+
+ // Find out how long the file is.
+ fseek(in_file_, 0, SEEK_END);
+ file_end_ = ftell(in_file_);
+ rewind(in_file_);
+ return true;
+}
+
+bool RtpFileSource::SkipFileHeader() {
+ char firstline[kFirstLineLength];
+ assert(in_file_);
+ if (fgets(firstline, kFirstLineLength, in_file_) == NULL) {
+ assert(false);
+ return false;
+ }
+ // Check that the first line is ok.
+ if ((strncmp(firstline, "#!rtpplay1.0", 12) != 0) &&
+ (strncmp(firstline, "#!RTPencode1.0", 14) != 0)) {
+ assert(false);
+ return false;
+ }
+ // Skip the file header.
+ if (fseek(in_file_, kRtpFileHeaderSize, SEEK_CUR) != 0) {
+ assert(false);
+ return false;
+ }
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
new file mode 100644
index 00000000000..6b92a88698f
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
+
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class RtpHeaderParser;
+
+namespace test {
+
+class RtpFileSource : public PacketSource {
+ public:
+ // Creates an RtpFileSource reading from |file_name|. If the file cannot be
+ // opened, or has the wrong format, NULL will be returned.
+ static RtpFileSource* Create(const std::string& file_name);
+
+ virtual ~RtpFileSource();
+
+ // Registers an RTP header extension and binds it to |id|.
+ virtual bool RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id);
+
+ // Returns a pointer to the next packet. Returns NULL if end of file was
+ // reached, or if a the data was corrupt.
+ virtual Packet* NextPacket();
+
+ // Returns true if the end of file has been reached.
+ virtual bool EndOfFile() const;
+
+ private:
+ static const int kFirstLineLength = 40;
+ static const int kRtpFileHeaderSize = 4 + 4 + 4 + 2 + 2;
+ static const size_t kPacketHeaderSize = 8;
+
+ RtpFileSource();
+
+ bool OpenFile(const std::string& file_name);
+
+ bool SkipFileHeader();
+
+ FILE* in_file_;
+ int64_t file_end_;
+ scoped_ptr<RtpHeaderParser> parser_;
+
+ DISALLOW_COPY_AND_ASSIGN(RtpFileSource);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc
new file mode 100644
index 00000000000..17ac209f1d9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+
+namespace webrtc {
+namespace test {
+
+uint32_t RtpGenerator::GetRtpHeader(uint8_t payload_type,
+ size_t payload_length_samples,
+ WebRtcRTPHeader* rtp_header) {
+ assert(rtp_header);
+ if (!rtp_header) {
+ return 0;
+ }
+ rtp_header->header.sequenceNumber = seq_number_++;
+ rtp_header->header.timestamp = timestamp_;
+ timestamp_ += static_cast<uint32_t>(payload_length_samples);
+ rtp_header->header.payloadType = payload_type;
+ rtp_header->header.markerBit = false;
+ rtp_header->header.ssrc = ssrc_;
+ rtp_header->header.numCSRCs = 0;
+ rtp_header->frameType = kAudioFrameSpeech;
+
+ uint32_t this_send_time = next_send_time_ms_;
+ assert(samples_per_ms_ > 0);
+ next_send_time_ms_ += ((1.0 + drift_factor_) * payload_length_samples) /
+ samples_per_ms_;
+ return this_send_time;
+}
+
+void RtpGenerator::set_drift_factor(double factor) {
+ if (factor > -1.0) {
+ drift_factor_ = factor;
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
new file mode 100644
index 00000000000..d3824c8d22d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Class for generating RTP headers.
+class RtpGenerator {
+ public:
+ RtpGenerator(int samples_per_ms,
+ uint16_t start_seq_number = 0,
+ uint32_t start_timestamp = 0,
+ uint32_t start_send_time_ms = 0,
+ uint32_t ssrc = 0x12345678)
+ : seq_number_(start_seq_number),
+ timestamp_(start_timestamp),
+ next_send_time_ms_(start_send_time_ms),
+ ssrc_(ssrc),
+ samples_per_ms_(samples_per_ms),
+ drift_factor_(0.0) {
+ }
+
+ // Writes the next RTP header to |rtp_header|, which will be of type
+ // |payload_type|. Returns the send time for this packet (in ms). The value of
+ // |payload_length_samples| determines the send time for the next packet.
+ uint32_t GetRtpHeader(uint8_t payload_type, size_t payload_length_samples,
+ WebRtcRTPHeader* rtp_header);
+
+ void set_drift_factor(double factor);
+
+ private:
+ uint16_t seq_number_;
+ uint32_t timestamp_;
+ uint32_t next_send_time_ms_;
+ const uint32_t ssrc_;
+ const int samples_per_ms_;
+ double drift_factor_;
+ DISALLOW_COPY_AND_ASSIGN(RtpGenerator);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c
deleted file mode 100644
index 3128f21f4a0..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function "unmutes" a vector on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-
-void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
- int16_t *pw16_outVec, int16_t unmuteFact,
- int16_t N)
-{
- int i;
- uint16_t w16_tmp;
- int32_t w32_tmp;
-
- w16_tmp = (uint16_t) *startMuteFact;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,6) + 32;
- for (i = 0; i < N; i++)
- {
- pw16_outVec[i]
- = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
- w32_tmp += unmuteFact;
- w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
- w16_tmp = (uint16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
- w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
- }
- *startMuteFact = (int16_t) w16_tmp;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
deleted file mode 100644
index fad690d0810..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
+++ /dev/null
@@ -1,1769 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of main NetEQ API.
- */
-
-#include "webrtc_neteq.h"
-#include "webrtc_neteq_internal.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "typedefs.h"
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-#include "mcu_dsp_common.h"
-#include "rtcp.h"
-
-#define RETURN_ON_ERROR( macroExpr, macroInstPtr ) { \
- if ((macroExpr) != 0) { \
- if ((macroExpr) == -1) { \
- (macroInstPtr)->ErrorCode = - (NETEQ_OTHER_ERROR); \
- } else { \
- (macroInstPtr)->ErrorCode = -((int16_t) (macroExpr)); \
- } \
- return(-1); \
- } }
-
-int WebRtcNetEQ_strncpy(char *strDest, int numberOfElements,
- const char *strSource, int count)
-{
- /* check vector lengths */
- if (count > numberOfElements)
- {
- strDest[0] = '\0';
- return (-1);
- }
- else
- {
- strncpy(strDest, strSource, count);
- return (0);
- }
-}
-
-/**********************************************************
- * NETEQ Functions
- */
-
-/*****************************************
- * Error functions
- */
-
-int WebRtcNetEQ_GetErrorCode(void *inst)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- return (NetEqMainInst->ErrorCode);
-}
-
-int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen)
-{
- if ((errorName == NULL) || (maxStrLen <= 0))
- {
- return (-1);
- }
-
- if (errorCode < 0)
- {
- errorCode = -errorCode; // absolute value
- }
-
- switch (errorCode)
- {
- case 1: // could be -1
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "OTHER_ERROR", maxStrLen);
- break;
- }
- case 1001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_INSTRUCTION", maxStrLen);
- break;
- }
- case 1002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_NETWORK_TYPE", maxStrLen);
- break;
- }
- case 1003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_DELAYVALUE", maxStrLen);
- break;
- }
- case 1004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_PLAYOUTMODE", maxStrLen);
- break;
- }
- case 1005:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CORRUPT_INSTANCE", maxStrLen);
- break;
- }
- case 1006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "ILLEGAL_MASTER_SLAVE_SWITCH", maxStrLen);
- break;
- }
- case 1007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "MASTER_SLAVE_ERROR", maxStrLen);
- break;
- }
- case 2001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_BUFSTAT_DECISION", maxStrLen);
- break;
- }
- case 2002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODING", maxStrLen);
- break;
- }
- case 2003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_SAMPLEUNDERRUN", maxStrLen);
- break;
- }
- case 2004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODED_TOO_MUCH",
- maxStrLen);
- break;
- }
- case 3001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_CNG_ERROR", maxStrLen);
- break;
- }
- case 3002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_UNKNOWNPAYLOAD", maxStrLen);
- break;
- }
- case 3003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_BUFFERINSERT_ERROR", maxStrLen);
- break;
- }
- case 4001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INIT_ERROR", maxStrLen);
- break;
- }
- case 4002:
- case 4003:
- case 4004:
- case 4005:
- case 4006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INSERT_ERROR1", maxStrLen);
- break;
- }
- case 4007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_G723_HEADER", maxStrLen);
- break;
- }
- case 4008:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NONEXISTING_PACKET", maxStrLen);
- break;
- }
- case 4009:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NOT_INITIALIZED", maxStrLen);
- break;
- }
- case 4010:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "AMBIGUOUS_ILBC_FRAME_SIZE", maxStrLen);
- break;
- }
- case 5001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_FULL", maxStrLen);
- break;
- }
- case 5002:
- case 5003:
- case 5004:
- case 5005:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_NOT_EXIST", maxStrLen);
- break;
- }
- case 5006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNKNOWN_CODEC", maxStrLen);
- break;
- }
- case 5007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_PAYLOAD_TAKEN", maxStrLen);
- break;
- }
- case 5008:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_CODEC", maxStrLen);
- break;
- }
- case 5009:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_FS", maxStrLen);
- break;
- }
- case 6001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_DEC_PARAMETER_ERROR", maxStrLen);
- break;
- }
- case 6002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_INSERT_ERROR", maxStrLen);
- break;
- }
- case 6003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_GEN_UNKNOWN_SAMP_FREQ", maxStrLen);
- break;
- }
- case 6004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_NOT_SUPPORTED", maxStrLen);
- break;
- }
- case 7001:
- case 7002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RED_SPLIT_ERROR", maxStrLen);
- break;
- }
- case 7003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_TOO_SHORT_PACKET", maxStrLen);
- break;
- }
- case 7004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_CORRUPT_PACKET", maxStrLen);
- break;
- }
- default:
- {
- /* check for decoder error ranges */
- if (errorCode >= 6010 && errorCode <= 6810)
- {
- /* iSAC error code */
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "iSAC ERROR", maxStrLen);
- break;
- }
-
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_ERROR", maxStrLen);
- return (-1);
- }
- }
-
- return (0);
-}
-
-/* Assign functions (create not allowed in order to avoid malloc in lib) */
-int WebRtcNetEQ_AssignSize(int *sizeinbytes)
-{
- *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(int16_t);
- return (0);
-}
-
-int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) NETEQ_inst_Addr;
- *inst = NETEQ_inst_Addr;
- if (*inst == NULL) return (-1);
-
- WebRtcSpl_Init();
-
- /* Clear memory */
- WebRtcSpl_MemSetW16((int16_t*) NetEqMainInst, 0,
- (sizeof(MainInst_t) / sizeof(int16_t)));
- ok = WebRtcNetEQ_McuReset(&NetEqMainInst->MCUinst);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (0);
-}
-
-int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
- int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
- int *MaxNoOfPackets, int *sizeinbytes,
- int* per_packet_overhead_bytes)
-{
- int ok = 0;
- int multiplier;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *MaxNoOfPackets = 0;
- *sizeinbytes = 0;
- ok = WebRtcNetEQ_GetDefaultCodecSettings(codec, noOfCodecs, sizeinbytes,
- MaxNoOfPackets,
- per_packet_overhead_bytes);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- if (nwType == kUDPNormal)
- {
- multiplier = 1;
- }
- else if (nwType == kUDPVideoSync)
- {
- multiplier = 4;
- }
- else if (nwType == kTCPNormal)
- {
- multiplier = 4;
- }
- else if (nwType == kTCPLargeJitter)
- {
- multiplier = 8;
- }
- else if (nwType == kTCPXLargeJitter)
- {
- multiplier = 12;
- }
- else
- {
- NetEqMainInst->ErrorCode = -FAULTY_NETWORK_TYPE;
- return (-1);
- }
- *MaxNoOfPackets = (*MaxNoOfPackets) * multiplier;
- *sizeinbytes = (*sizeinbytes) * multiplier;
- return 0;
-}
-
-int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
- int sizeinbytes)
-{
- int ok;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_PacketBufferInit(&NetEqMainInst->MCUinst.PacketBuffer_inst,
- MaxNoOfPackets, (int16_t*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/************************************************
- * Init functions
- */
-
-/****************************************************************************
- * WebRtcNetEQ_Init(...)
- *
- * Initialize NetEQ.
- *
- * Input:
- * - inst : NetEQ instance
- * - fs : Initial sample rate in Hz (may change with payload)
- *
- * Output:
- * - inst : Initialized NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_Init(void *inst, uint16_t fs)
-{
- int ok = 0;
-
- /* Typecast inst to internal instance format */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
- /* Start out with no PostDecode VAD instance */
- NetEqMainInst->DSPinst.VADInst.VADState = NULL;
- /* Also set all VAD function pointers to NULL */
- NetEqMainInst->DSPinst.VADInst.initFunction = NULL;
- NetEqMainInst->DSPinst.VADInst.setmodeFunction = NULL;
- NetEqMainInst->DSPinst.VADInst.VADFunction = NULL;
-#endif /* NETEQ_VAD */
-
- ok = WebRtcNetEQ_DSPinit(NetEqMainInst); /* Init addresses between MCU and DSP */
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- ok = WebRtcNetEQ_DSPInit(&NetEqMainInst->DSPinst, fs); /* Init dsp side */
- RETURN_ON_ERROR(ok, NetEqMainInst);
- /* set BGN mode to default, since it is not cleared by DSP init function */
- NetEqMainInst->DSPinst.BGNInst.bgnMode = BGN_ON;
-
- /* init statistics functions and counters */
- ok = WebRtcNetEQ_ClearInCallStats(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
- ok = WebRtcNetEQ_ClearPostCallStats(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
- ok = WebRtcNetEQ_ResetMcuJitterStat(&NetEqMainInst->MCUinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* flush packet buffer */
- ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* set some variables to initial values */
- NetEqMainInst->MCUinst.current_Codec = -1;
- NetEqMainInst->MCUinst.current_Payload = -1;
- NetEqMainInst->MCUinst.first_packet = 1;
- NetEqMainInst->MCUinst.one_desc = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms =
- 10000;
- NetEqMainInst->MCUinst.NoOfExpandCalls = 0;
- NetEqMainInst->MCUinst.fs = fs;
-
- /* Not in AV-sync by default. */
- NetEqMainInst->MCUinst.av_sync = 0;
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* init DTMF decoder */
- ok = WebRtcNetEQ_DtmfDecoderInit(&(NetEqMainInst->MCUinst.DTMF_inst),fs,560);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-#endif
-
- /* init RTCP statistics */
- WebRtcNetEQ_RTCPInit(&(NetEqMainInst->MCUinst.RTCP_inst), 0);
-
- /* set BufferStat struct to zero */
- WebRtcSpl_MemSetW16((int16_t*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
- sizeof(BufstatsInst_t) / sizeof(int16_t));
-
- /* reset automode */
- WebRtcNetEQ_ResetAutomode(&(NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst),
- NetEqMainInst->MCUinst.PacketBuffer_inst.maxInsertPositions);
-
- NetEqMainInst->ErrorCode = 0;
-
-#ifdef NETEQ_STEREO
- /* set master/slave info to undecided */
- NetEqMainInst->masterSlave = 0;
-#endif
-
- /* Set to an invalid value. */
- NetEqMainInst->MCUinst.decoded_packet_sequence_number = -1;
- NetEqMainInst->MCUinst.decoded_packet_timestamp = 0;
-
- return (ok);
-}
-
-int WebRtcNetEQ_FlushBuffers(void *inst)
-{
- int ok = 0;
-
- /* Typecast inst to internal instance format */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- /* Flush packet buffer */
- ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* Set MCU to wait for new codec */
- NetEqMainInst->MCUinst.first_packet = 1;
-
- /* Flush speech buffer */
- ok = WebRtcNetEQ_FlushSpeechBuffer(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- return 0;
-}
-
-int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-#ifdef NETEQ_ATEVENT_DECODE
- NetEqMainInst->MCUinst.AVT_PlayoutOn = PlayoutAVTon;
- return(0);
-#else
- if (PlayoutAVTon != 0)
- {
- NetEqMainInst->ErrorCode = -DTMF_NOT_SUPPORTED;
- return (-1);
- }
- else
- {
- return (0);
- }
-#endif
-}
-
-int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- if ((DelayInMs < 0) || (DelayInMs > 10000)) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return (-1);
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = DelayInMs;
- return (0);
-}
-
-int WebRtcNetEQ_SetMinimumDelay(void *inst, int minimum_delay_ms) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return -1;
- if (minimum_delay_ms < 0 || minimum_delay_ms > 10000) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- if ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms >
- 0) && (minimum_delay_ms >
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms)) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms =
- minimum_delay_ms;
- return 0;
-}
-
-int WebRtcNetEQ_SetMaximumDelay(void *inst, int maximum_delay_ms) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return -1;
- if (maximum_delay_ms < 0 || maximum_delay_ms > 10000) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- if (maximum_delay_ms <
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms =
- maximum_delay_ms;
- return 0;
-}
-
-int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- if ((playoutMode != kPlayoutOn) && (playoutMode != kPlayoutOff) && (playoutMode
- != kPlayoutFax) && (playoutMode != kPlayoutStreaming))
- {
- NetEqMainInst->ErrorCode = -FAULTY_PLAYOUTMODE;
- return (-1);
- }
- else
- {
- NetEqMainInst->MCUinst.NetEqPlayoutMode = playoutMode;
- return (0);
- }
-}
-
-int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode)
-{
-
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- NetEqMainInst->DSPinst.BGNInst.bgnMode = (enum BGNMode) bgnMode;
-
- return (0);
-}
-
-int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode)
-{
-
- const MainInst_t *NetEqMainInst = (const MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- *bgnMode = (enum WebRtcNetEQBGNMode) NetEqMainInst->DSPinst.BGNInst.bgnMode;
-
- return (0);
-}
-
-/************************************************
- * CodecDB functions
- */
-
-int WebRtcNetEQ_CodecDbReset(void *inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_DbReset(&NetEqMainInst->MCUinst.codec_DB_inst);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- /* set function pointers to NULL to prevent RecOut from using the codec */
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
-
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
- int16_t *MaxEntries)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *MaxEntries = NUM_CODECS;
- *UsedEntries = NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs;
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
- enum WebRtcNetEQDecoder *codec)
-{
- int i;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *codec = (enum WebRtcNetEQDecoder) 0;
- if ((Entry >= 0) && (Entry < NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs))
- {
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- if (NetEqMainInst->MCUinst.codec_DB_inst.position[i] == Entry)
- {
- *codec = (enum WebRtcNetEQDecoder) i;
- }
- }
- }
- else
- {
- NetEqMainInst->ErrorCode = -(CODEC_DB_NOT_EXIST1);
- return (-1);
- }
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_DbAdd(&NetEqMainInst->MCUinst.codec_DB_inst, codecInst->codec,
- codecInst->payloadType, codecInst->funcDecode, codecInst->funcDecodeRCU,
- codecInst->funcDecodePLC, codecInst->funcDecodeInit, codecInst->funcAddLatePkt,
- codecInst->funcGetMDinfo, codecInst->funcGetPitch, codecInst->funcUpdBWEst,
- codecInst->funcDurationEst, codecInst->funcGetErrorCode,
- codecInst->codec_state, codecInst->codec_fs);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- /* check if currently used codec is being removed */
- if (NetEqMainInst->MCUinst.current_Codec == (int16_t) codec)
- {
- /* set function pointers to NULL to prevent RecOut from using the codec */
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
- }
-
- ok = WebRtcNetEQ_DbRemove(&NetEqMainInst->MCUinst.codec_DB_inst, codec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/*********************************
- * Real-time functions
- */
-
-int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
- uint32_t uw32_timeRec)
-{
- int ok = 0;
- RTPPacket_t RTPpacket;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- /* Parse RTP header */
- ok = WebRtcNetEQ_RTPPayloadInfo(p_w16datagramstart, w16_RTPlen, &RTPpacket);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_RecInRTPStruct(...)
- *
- * Alternative RecIn function, used when the RTP data has already been
- * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - payloadPtr : Pointer to the RTP payload (first byte after header)
- * - payloadLenBytes : Length (in bytes) of the payload in payloadPtr
- * - timeRec : Receive time (in timestamps of the used codec)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const uint8_t *payloadPtr, int16_t payloadLenBytes,
- uint32_t uw32_timeRec)
-{
- int ok = 0;
- RTPPacket_t RTPpacket;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- /* Load NetEQ's RTP struct from Module RTP struct */
- RTPpacket.payloadType = rtpInfo->payloadType;
- RTPpacket.seqNumber = rtpInfo->sequenceNumber;
- RTPpacket.timeStamp = rtpInfo->timeStamp;
- RTPpacket.ssrc = rtpInfo->SSRC;
- RTPpacket.payload = (const int16_t*) payloadPtr;
- RTPpacket.payloadLen = payloadLenBytes;
- RTPpacket.starts_byte1 = 0;
-
- ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo msInfo;
- msInfo.msMode = NETEQ_MONO;
-#endif
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
-#ifdef NETEQ_STEREO
- NetEqMainInst->DSPinst.msInfo = &msInfo;
-#endif
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 0 /* not BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutMasterSlave(...)
- *
- * RecOut function for running several NetEQ instances in master/slave mode.
- * One master can be used to control several slaves.
- *
- * Input:
- * - inst : NetEQ instance
- * - isMaster : Non-zero indicates that this is the master channel
- * - msInfo : (slave only) Information from master
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- * - msInfo : (master only) Information to slave(s)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len, void *msInfo,
- int16_t isMaster)
-{
-#ifndef NETEQ_STEREO
- /* Stereo not supported */
- return(-1);
-#else
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- if (msInfo == NULL)
- {
- /* msInfo not provided */
- NetEqMainInst->ErrorCode = NETEQ_OTHER_ERROR;
- return (-1);
- }
-
- /* translate from external to internal Master/Slave information */
- NetEqMainInst->DSPinst.msInfo = (MasterSlaveInfo *) msInfo;
-
- /* check that we have not done a master/slave switch without first re-initializing */
- if ((NetEqMainInst->masterSlave == 1 && !isMaster) || /* switch from master to slave */
- (NetEqMainInst->masterSlave == 2 && isMaster)) /* switch from slave to master */
- {
- NetEqMainInst->ErrorCode = ILLEGAL_MASTER_SLAVE_SWITCH;
- return (-1);
- }
-
- if (!isMaster)
- {
- /* this is the slave */
- NetEqMainInst->masterSlave = 2;
- NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_SLAVE;
- }
- else
- {
- NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_MASTER;
- }
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 0 /* not BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- if (isMaster)
- {
- /* this is the master */
- NetEqMainInst->masterSlave = 1;
- }
-
- return (ok);
-#endif
-}
-
-int WebRtcNetEQ_GetMasterSlaveInfoSize()
-{
-#ifdef NETEQ_STEREO
- return (sizeof(MasterSlaveInfo));
-#else
- return(-1);
-#endif
-}
-
-/* Special RecOut that does not do any decoding. */
-int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo msInfo;
-#endif
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
-#ifdef NETEQ_STEREO
- /* keep same mode as before */
- switch (NetEqMainInst->masterSlave)
- {
- case 1:
- {
- msInfo.msMode = NETEQ_MASTER;
- break;
- }
- case 2:
- {
- msInfo.msMode = NETEQ_SLAVE;
- break;
- }
- default:
- {
- msInfo.msMode = NETEQ_MONO;
- break;
- }
- }
-
- NetEqMainInst->DSPinst.msInfo = &msInfo;
-#endif
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 1 /* BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
- &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
- &RTCP_inst->jitter, 0);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
- &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
- &RTCP_inst->jitter, 1);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- if (NetEqMainInst->MCUinst.TSscalingInitialized)
- {
- *timestamp = WebRtcNetEQ_ScaleTimestampInternalToExternal(&NetEqMainInst->MCUinst,
- NetEqMainInst->DSPinst.videoSyncTimestamp);
- }
- else
- {
- *timestamp = NetEqMainInst->DSPinst.videoSyncTimestamp;
- }
-
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_GetSpeechOutputType(...)
- *
- * Get the output type for the audio provided by the latest call to
- * WebRtcNetEQ_RecOut().
- *
- * kOutputNormal = normal audio (possibly processed)
- * kOutputPLC = loss concealment through stretching audio
- * kOutputCNG = comfort noise (codec-internal or RFC3389)
- * kOutputPLCtoCNG = background noise only due to long expand or error
- * kOutputVADPassive = PostDecode VAD signalling passive speaker
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - outputType : Output type from enum list WebRtcNetEQOutputType
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType)
-{
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- if ((NetEqMainInst->DSPinst.w16_mode & MODE_BGN_ONLY) != 0)
- {
- /* If last mode was background noise only */
- *outputType = kOutputPLCtoCNG;
-
- }
- else if ((NetEqMainInst->DSPinst.w16_mode == MODE_CODEC_INTERNAL_CNG)
- || (NetEqMainInst->DSPinst.w16_mode == MODE_RFC3389CNG))
- {
- /* If CN or internal CNG */
- *outputType = kOutputCNG;
-
- }
- else if ((NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
- && (NetEqMainInst->DSPinst.ExpandInst.w16_expandMuteFactor == 0))
- {
- /* Expand mode has faded down to background noise only (very long expand) */
- *outputType = kOutputPLCtoCNG;
-
- }
- else if (NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
- {
- /* PLC mode */
- *outputType = kOutputPLC;
-
-#ifdef NETEQ_VAD
- }
- else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 )
- {
- /* post-decode VAD says passive speaker */
- *outputType = kOutputVADPassive;
-#endif /* NETEQ_VAD */
-
- }
- else
- {
- /* Normal speech output type (can still be manipulated, e.g., accelerated) */
- *outputType = kOutputNormal;
- }
-
- return (0);
-}
-
-/**********************************
- * Functions related to VQmon
- */
-
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_LOST 0x01
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_DISCARDED 0x02
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_SUPRESS 0x04
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_CNGACTIVE 0x80
-
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
- uint16_t *concealedVoiceDurationMs,
- uint8_t *concealedVoiceFlags)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- int16_t fs_mult;
- int16_t ms_lost;
- if (NetEqMainInst == NULL) return (-1);
- fs_mult = WebRtcSpl_DivW32W16ResW16(NetEqMainInst->MCUinst.fs, 8000);
-
- ms_lost = WebRtcSpl_DivW32W16ResW16(
- (int32_t) NetEqMainInst->DSPinst.w16_concealedTS, (int16_t) (8 * fs_mult));
- if (ms_lost > NetEqMainInst->DSPinst.millisecondsPerCall) ms_lost
- = NetEqMainInst->DSPinst.millisecondsPerCall;
-
- *validVoiceDurationMs = NetEqMainInst->DSPinst.millisecondsPerCall - ms_lost;
- *concealedVoiceDurationMs = ms_lost;
- if (ms_lost > 0)
- {
- *concealedVoiceFlags = WEBRTC_NETEQ_CONCEALMENTFLAG_LOST;
- }
- else
- {
- *concealedVoiceFlags = 0;
- }
- NetEqMainInst->DSPinst.w16_concealedTS -= ms_lost * (8 * fs_mult);
-
- return (0);
-}
-
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
- uint8_t *adaptationRate)
-{
- /* Dummy check the inst, just to avoid compiler warnings. */
- if (inst == NULL)
- {
- /* Do nothing. */
- }
-
- /* Hardcoded variables that are used for VQmon as jitter buffer parameters */
- *absMaxDelayMs = 240;
- *adaptationRate = 1;
- return (0);
-}
-
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
- uint16_t *maxDelayMs)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *avgDelayMs = (uint16_t) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
- *maxDelayMs = (uint16_t) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
- return (0);
-}
-
-/*************************************
- * Statistics functions
- */
-
-/* Get the "in-call" statistics from NetEQ.
- * The statistics are reset after the query. */
-int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats)
-
-{
-
- uint16_t tempU16;
- uint32_t tempU32, tempU32_2;
- int numShift;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- stats->addedSamples = NetEqMainInst->DSPinst.statInst.addedSamples;
-
- /*******************/
- /* Get buffer size */
- /*******************/
-
- if (NetEqMainInst->MCUinst.fs != 0)
- {
- int32_t temp32;
- /* Query packet buffer for number of samples. */
- temp32 = WebRtcNetEQ_PacketBufferGetSize(
- &NetEqMainInst->MCUinst.PacketBuffer_inst,
- &NetEqMainInst->MCUinst.codec_DB_inst,
- NetEqMainInst->MCUinst.av_sync);
-
- /* Divide by sample rate.
- * Calculate temp32 * 1000 / fs to get result in ms. */
- stats->currentBufferSize = (uint16_t)
- WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
-
- /* Add number of samples yet to play in sync buffer. */
- temp32 = (int32_t) (NetEqMainInst->DSPinst.endPosition -
- NetEqMainInst->DSPinst.curPosition);
- stats->currentBufferSize += (uint16_t)
- WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
- }
- else
- {
- /* Sample rate not initialized. */
- stats->currentBufferSize = 0;
- }
-
- /***************************/
- /* Get optimal buffer size */
- /***************************/
-
- if (NetEqMainInst->MCUinst.fs != 0)
- {
- /* preferredBufferSize = Bopt * packSizeSamples / (fs/1000) */
- stats->preferredBufferSize
- = (uint16_t) WEBRTC_SPL_MUL_16_16(
- (int16_t) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
- WebRtcSpl_DivW32W16ResW16(
- (int32_t) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
- WebRtcSpl_DivW32W16ResW16( (int32_t) NetEqMainInst->MCUinst.fs, (int16_t) 1000 ) /* samples per ms */
- ) );
-
- /* add extra delay */
- if (NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs > 0)
- {
- stats->preferredBufferSize
- += NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs;
- }
- }
- else
- {
- /* sample rate not initialized */
- stats->preferredBufferSize = 0;
- }
-
- /***********************************/
- /* Check if jitter peaks are found */
- /***********************************/
-
- stats->jitterPeaksFound =
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.peakFound;
-
- /***********************/
- /* Calculate loss rate */
- /***********************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->MCUinst.lostTS == 0)
- {
- /* no losses */
- stats->currentPacketLossRate = 0;
- }
- else if (NetEqMainInst->MCUinst.lostTS < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->MCUinst.lostTS); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentPacketLossRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( (uint32_t) NetEqMainInst->MCUinst.lostTS, numShift);
-
- stats->currentPacketLossRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentPacketLossRate = 1 << 14; /* 1 in Q14 */
- }
-
- /**************************/
- /* Calculate discard rate */
- /**************************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- /* number of discarded samples */
- tempU32_2
- = WEBRTC_SPL_MUL_16_U16( (int16_t) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
- NetEqMainInst->MCUinst.PacketBuffer_inst.discardedPackets);
-
- if (tempU32_2 == 0)
- {
- /* no discarded samples */
- stats->currentDiscardRate = 0;
- }
- else if (tempU32_2 < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(tempU32_2); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentDiscardRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32 = WEBRTC_SPL_SHIFT_W32( tempU32_2, numShift);
-
- stats->currentDiscardRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentDiscardRate = 1 << 14; /* 1 in Q14 */
- }
-
- /*************************************************************/
- /* Calculate Accelerate, Expand and Pre-emptive Expand rates */
- /*************************************************************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.accelerateLength == 0)
- {
- /* no accelerate */
- stats->currentAccelerateRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.accelerateLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.accelerateLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentAccelerateRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.accelerateLength, numShift);
-
- stats->currentAccelerateRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentAccelerateRate = 1 << 14; /* 1 in Q14 */
- }
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.expandLength == 0)
- {
- /* no expand */
- stats->currentExpandRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.expandLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.expandLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentExpandRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.expandLength, numShift);
-
- stats->currentExpandRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentExpandRate = 1 << 14; /* 1 in Q14 */
- }
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.preemptiveLength == 0)
- {
- /* no pre-emptive expand */
- stats->currentPreemptiveRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.preemptiveLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.preemptiveLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentPreemptiveRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.preemptiveLength, numShift);
-
- stats->currentPreemptiveRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentPreemptiveRate = 1 << 14; /* 1 in Q14 */
- }
-
- stats->clockDriftPPM = WebRtcNetEQ_AverageIAT(
- &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst);
-
- /* reset counters */
- WebRtcNetEQ_ResetMcuInCallStats(&(NetEqMainInst->MCUinst));
- WebRtcNetEQ_ClearInCallStats(&(NetEqMainInst->DSPinst));
-
- return (0);
-}
-
-int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
- int max_length,
- int* waiting_times_ms) {
- int i = 0;
- MainInst_t *main_inst = (MainInst_t*) inst;
- if (main_inst == NULL) return -1;
-
- while ((i < max_length) && (i < main_inst->MCUinst.len_waiting_times)) {
- waiting_times_ms[i] = main_inst->MCUinst.waiting_times[i] *
- main_inst->DSPinst.millisecondsPerCall;
- ++i;
- }
- assert(i <= kLenWaitingTimes);
- WebRtcNetEQ_ResetWaitingTimeStats(&main_inst->MCUinst);
- return i;
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADInstance(...)
- *
- * Provide a pointer to an allocated VAD instance. If function is never
- * called or it is called with NULL pointer as VAD_inst, the post-decode
- * VAD functionality is disabled. Also provide pointers to init, setmode
- * and VAD functions. These are typically pointers to WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
- * interface file webrtc_vad.h.
- *
- * Input:
- * - NetEQ_inst : NetEQ instance
- * - VADinst : VAD instance
- * - initFunction : Pointer to VAD init function
- * - setmodeFunction : Pointer to VAD setmode function
- * - VADfunction : Pointer to VAD function
- *
- * Output:
- * - NetEQ_inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
- WebRtcNetEQ_VADInitFunction initFunction,
- WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
- WebRtcNetEQ_VADFunction VADFunction)
-{
-
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) NetEQ_inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
-
- /* Store pointer in PostDecode VAD struct */
- NetEqMainInst->DSPinst.VADInst.VADState = VAD_inst;
-
- /* Store function pointers */
- NetEqMainInst->DSPinst.VADInst.initFunction = initFunction;
- NetEqMainInst->DSPinst.VADInst.setmodeFunction = setmodeFunction;
- NetEqMainInst->DSPinst.VADInst.VADFunction = VADFunction;
-
- /* Call init function and return the result (ok or fail) */
- return(WebRtcNetEQ_InitVAD(&NetEqMainInst->DSPinst.VADInst, NetEqMainInst->DSPinst.fs));
-
-#else /* NETEQ_VAD not defined */
- return (-1);
-#endif /* NETEQ_VAD */
-
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADMode(...)
- *
- * Pass an aggressiveness mode parameter to the post-decode VAD instance.
- * If this function is never called, mode 0 (quality mode) is used as default.
- *
- * Input:
- * - inst : NetEQ instance
- * - mode : mode parameter (same range as WebRtc VAD mode)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADMode(void *inst, int mode)
-{
-
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
-
- /* Set mode and return result */
- return(WebRtcNetEQ_SetVADModeInternal(&NetEqMainInst->DSPinst.VADInst, mode));
-
-#else /* NETEQ_VAD not defined */
- return (-1);
-#endif /* NETEQ_VAD */
-
-}
-
-void WebRtcNetEQ_GetProcessingActivity(void *inst,
- WebRtcNetEQ_ProcessingActivity *stats) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- stats->accelerate_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.accelerate_bgn_samples;
- stats->accelerate_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.accelarate_normal_samples;
-
- stats->expand_bgn_sampels =
- NetEqMainInst->DSPinst.activity_stats.expand_bgn_samples;
- stats->expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.expand_normal_samples;
-
- stats->preemptive_expand_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.preemptive_expand_bgn_samples;
- stats->preemptive_expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.preemptive_expand_normal_samples;
-
- stats->merge_expand_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.merge_expand_bgn_samples;
- stats->merge_expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.merge_expand_normal_samples;
-
- WebRtcNetEQ_ClearActivityStats(&NetEqMainInst->DSPinst);
-}
-
-void WebRtcNetEQ_EnableAVSync(void* inst, int enable) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- NetEqMainInst->MCUinst.av_sync = (enable != 0) ? 1 : 0;
-}
-
-int WebRtcNetEQ_RecInSyncRTP(void* inst, WebRtcNetEQ_RTPInfo* rtp_info,
- uint32_t receive_timestamp) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst->MCUinst.av_sync == 0)
- return -1;
- if (WebRtcNetEQ_RecInRTPStruct(inst, rtp_info, kSyncPayload,
- SYNC_PAYLOAD_LEN_BYTES,
- receive_timestamp) < 0) {
- return -1;
- }
- return SYNC_PAYLOAD_LEN_BYTES;
-}
-
-int WebRtcNetEQ_GetRequiredDelayMs(const void* inst) {
- const MainInst_t* NetEqMainInst = (MainInst_t*)inst;
- const AutomodeInst_t* auto_mode = (NetEqMainInst == NULL) ? NULL :
- &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL || auto_mode == NULL)
- return 0;
-
- if (NetEqMainInst->MCUinst.fs == 0)
- return 0; // Sampling rate not initialized.
-
- /* |required_delay_q8| has the unit of packets in Q8 domain, therefore,
- * the corresponding delay is
- * required_delay_ms = (1000 * required_delay_q8 * samples_per_packet /
- * sample_rate_hz) / 256;
- */
- return (auto_mode->required_delay_q8 *
- ((auto_mode->packetSpeechLenSamp * 1000) / NetEqMainInst->MCUinst.fs) +
- 128) >> 8;
-}
-
-int WebRtcNetEQ_DecodedRtpInfo(const void* inst,
- int* sequence_number,
- uint32_t* timestamp) {
- const MainInst_t *NetEqMainInst = (inst == NULL) ? NULL :
- (const MainInst_t*) inst;
- if (NetEqMainInst->MCUinst.decoded_packet_sequence_number < 0)
- return -1;
- *sequence_number = NetEqMainInst->MCUinst.decoded_packet_sequence_number;
- *timestamp = NetEqMainInst->MCUinst.decoded_packet_timestamp;
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
deleted file mode 100644
index c37f8990a8b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
+++ /dev/null
@@ -1,778 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests for NetEQ.
- */
-
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-
-#include <stdlib.h>
-#include <string.h> // memset
-
-#include <set>
-#include <sstream>
-#include <string>
-#include <vector>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class RefFiles {
- public:
- RefFiles(const std::string& input_file, const std::string& output_file);
- ~RefFiles();
- template<class T> void ProcessReference(const T& test_results);
- template<typename T, size_t n> void ProcessReference(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void WriteToFile(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void ReadFromFileAndCompare(
- const T (&test_results)[n],
- size_t length);
- void WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats);
- void ReadFromFileAndCompare(const WebRtcNetEQ_NetworkStatistics& stats);
- void WriteToFile(const WebRtcNetEQ_RTCPStat& stats);
- void ReadFromFileAndCompare(const WebRtcNetEQ_RTCPStat& stats);
-
- FILE* input_fp_;
- FILE* output_fp_;
-};
-
-RefFiles::RefFiles(const std::string &input_file,
- const std::string &output_file)
- : input_fp_(NULL),
- output_fp_(NULL) {
- if (!input_file.empty()) {
- input_fp_ = fopen(input_file.c_str(), "rb");
- EXPECT_TRUE(input_fp_ != NULL);
- }
- if (!output_file.empty()) {
- output_fp_ = fopen(output_file.c_str(), "wb");
- EXPECT_TRUE(output_fp_ != NULL);
- }
-}
-
-RefFiles::~RefFiles() {
- if (input_fp_) {
- EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
- fclose(input_fp_);
- }
- if (output_fp_) fclose(output_fp_);
-}
-
-template<class T>
-void RefFiles::ProcessReference(const T& test_results) {
- WriteToFile(test_results);
- ReadFromFileAndCompare(test_results);
-}
-
-template<typename T, size_t n>
-void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
- WriteToFile(test_results, length);
- ReadFromFileAndCompare(test_results, length);
-}
-
-template<typename T, size_t n>
-void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
- if (output_fp_) {
- ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
- }
-}
-
-template<typename T, size_t n>
-void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
- size_t length) {
- if (input_fp_) {
- // Read from ref file.
- T* ref = new T[length];
- ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
- // Compare
- ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
- delete [] ref;
- }
-}
-
-void RefFiles::WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&stats, sizeof(WebRtcNetEQ_NetworkStatistics), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const WebRtcNetEQ_NetworkStatistics& stats) {
- if (input_fp_) {
- // Read from ref file.
- size_t stat_size = sizeof(WebRtcNetEQ_NetworkStatistics);
- WebRtcNetEQ_NetworkStatistics ref_stats;
- ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
- // Compare
- EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
- }
-}
-
-void RefFiles::WriteToFile(const WebRtcNetEQ_RTCPStat& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.cum_lost), sizeof(stats.cum_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.ext_max), sizeof(stats.ext_max), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const WebRtcNetEQ_RTCPStat& stats) {
- if (input_fp_) {
- // Read from ref file.
- WebRtcNetEQ_RTCPStat ref_stats;
- ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
- sizeof(ref_stats.fraction_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.cum_lost), sizeof(ref_stats.cum_lost), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.ext_max), sizeof(ref_stats.ext_max), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
- input_fp_));
- // Compare
- EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
- EXPECT_EQ(ref_stats.cum_lost, stats.cum_lost);
- EXPECT_EQ(ref_stats.ext_max, stats.ext_max);
- EXPECT_EQ(ref_stats.jitter, stats.jitter);
- }
-}
-
-class NetEqDecodingTest : public ::testing::Test {
- protected:
- // NetEQ must be polled for data once every 10 ms. Thus, neither of the
- // constants below can be changed.
- static const int kTimeStepMs = 10;
- static const int kBlockSize8kHz = kTimeStepMs * 8;
- static const int kBlockSize16kHz = kTimeStepMs * 16;
- static const int kBlockSize32kHz = kTimeStepMs * 32;
- static const int kMaxBlockSize = kBlockSize32kHz;
-
- NetEqDecodingTest();
- virtual void SetUp();
- virtual void TearDown();
- void SelectDecoders(WebRtcNetEQDecoder* used_codec);
- void LoadDecoders();
- void OpenInputFile(const std::string &rtp_file);
- void Process(NETEQTEST_RTPpacket* rtp_ptr, int16_t* out_len);
- void DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file);
- void DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file);
- static void PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info);
- static void PopulateCng(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint8_t* payload,
- int* payload_len);
- void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers);
-
- NETEQTEST_NetEQClass* neteq_inst_;
- std::vector<NETEQTEST_Decoder*> dec_;
- FILE* rtp_fp_;
- unsigned int sim_clock_;
- int16_t out_data_[kMaxBlockSize];
-};
-
-NetEqDecodingTest::NetEqDecodingTest()
- : neteq_inst_(NULL),
- rtp_fp_(NULL),
- sim_clock_(0) {
- memset(out_data_, 0, sizeof(out_data_));
-}
-
-void NetEqDecodingTest::SetUp() {
- WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
-
- SelectDecoders(usedCodec);
- neteq_inst_ = new NETEQTEST_NetEQClass(usedCodec, dec_.size(), 8000,
- kTCPLargeJitter);
- ASSERT_TRUE(neteq_inst_);
- LoadDecoders();
-}
-
-void NetEqDecodingTest::TearDown() {
- if (neteq_inst_)
- delete neteq_inst_;
- for (size_t i = 0; i < dec_.size(); ++i) {
- if (dec_[i])
- delete dec_[i];
- }
- if (rtp_fp_)
- fclose(rtp_fp_);
-}
-
-void NetEqDecodingTest::SelectDecoders(WebRtcNetEQDecoder* used_codec) {
- *used_codec++ = kDecoderPCMu;
- dec_.push_back(new decoder_PCMU(0));
- *used_codec++ = kDecoderPCMa;
- dec_.push_back(new decoder_PCMA(8));
- *used_codec++ = kDecoderILBC;
- dec_.push_back(new decoder_ILBC(102));
- *used_codec++ = kDecoderISAC;
- dec_.push_back(new decoder_iSAC(103));
- *used_codec++ = kDecoderISACswb;
- dec_.push_back(new decoder_iSACSWB(104));
- *used_codec++ = kDecoderISACfb;
- dec_.push_back(new decoder_iSACFB(105));
- *used_codec++ = kDecoderPCM16B;
- dec_.push_back(new decoder_PCM16B_NB(93));
- *used_codec++ = kDecoderPCM16Bwb;
- dec_.push_back(new decoder_PCM16B_WB(94));
- *used_codec++ = kDecoderPCM16Bswb32kHz;
- dec_.push_back(new decoder_PCM16B_SWB32(95));
- *used_codec++ = kDecoderCNG;
- dec_.push_back(new decoder_CNG(13, 8000));
- *used_codec++ = kDecoderCNG;
- dec_.push_back(new decoder_CNG(98, 16000));
-}
-
-void NetEqDecodingTest::LoadDecoders() {
- for (size_t i = 0; i < dec_.size(); ++i) {
- ASSERT_EQ(0, dec_[i]->loadToNetEQ(*neteq_inst_));
- }
-}
-
-void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
- rtp_fp_ = fopen(rtp_file.c_str(), "rb");
- ASSERT_TRUE(rtp_fp_ != NULL);
- ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
-}
-
-void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int16_t* out_len) {
- // Check if time to receive.
- while ((sim_clock_ >= rtp->time()) &&
- (rtp->dataLen() >= 0)) {
- if (rtp->dataLen() > 0) {
- ASSERT_EQ(0, neteq_inst_->recIn(*rtp));
- }
- // Get next packet.
- ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
- }
-
- // RecOut
- *out_len = neteq_inst_->recOut(out_data_);
- ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
- (*out_len == kBlockSize16kHz) ||
- (*out_len == kBlockSize32kHz));
-
- // Increase time.
- sim_clock_ += kTimeStepMs;
-}
-
-void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file) {
- OpenInputFile(rtp_file);
-
- std::string ref_out_file = "";
- if (ref_file.empty()) {
- ref_out_file = webrtc::test::OutputPath() + "neteq_out.pcm";
- }
- RefFiles ref_files(ref_file, ref_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- int i = 0;
- while (rtp.dataLen() >= 0) {
- std::ostringstream ss;
- ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- int16_t out_len;
- ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
- ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(out_data_, out_len));
- }
-}
-
-void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file) {
- OpenInputFile(rtp_file);
- std::string stat_out_file = "";
- if (stat_ref_file.empty()) {
- stat_out_file = webrtc::test::OutputPath() +
- "neteq_network_stats.dat";
- }
- RefFiles network_stat_files(stat_ref_file, stat_out_file);
-
- std::string rtcp_out_file = "";
- if (rtcp_ref_file.empty()) {
- rtcp_out_file = webrtc::test::OutputPath() +
- "neteq_rtcp_stats.dat";
- }
- RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- while (rtp.dataLen() >= 0) {
- int16_t out_len;
- Process(&rtp, &out_len);
-
- // Query the network statistics API once per second
- if (sim_clock_ % 1000 == 0) {
- // Process NetworkStatistics.
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- network_stat_files.ProcessReference(network_stats);
-
- // Process RTCPstat.
- WebRtcNetEQ_RTCPStat rtcp_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetRTCPStats(neteq_inst_->instance(),
- &rtcp_stats));
- rtcp_stat_files.ProcessReference(rtcp_stats);
- }
- }
-}
-
-void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info) {
- rtp_info->sequenceNumber = frame_index;
- rtp_info->timeStamp = timestamp;
- rtp_info->SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info->payloadType = 94; // PCM16b WB codec.
- rtp_info->markerBit = 0;
-}
-
-void NetEqDecodingTest::PopulateCng(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint8_t* payload,
- int* payload_len) {
- rtp_info->sequenceNumber = frame_index;
- rtp_info->timeStamp = timestamp;
- rtp_info->SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info->payloadType = 98; // WB CNG.
- rtp_info->markerBit = 0;
- payload[0] = 64; // Noise level -64 dBov, quite arbitrarily chosen.
- *payload_len = 1; // Only noise level, no spectral parameters.
-}
-
-#if (defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)) || defined(WEBRTC_ANDROID)
-// Disabled for Windows 64-bit until webrtc:1460 is fixed.
-#define MAYBE_TestBitExactness DISABLED_TestBitExactness
-#else
-#define MAYBE_TestBitExactness TestBitExactness
-#endif
-
-TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kInputRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal_ref.pcm";
-#else
- const std::string kInputRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_universal_ref", "pcm");
-#endif
- DecodeAndCompare(kInputRtpFile, kInputRefFile);
-}
-
-TEST_F(NetEqDecodingTest, TestNetworkStatistics) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kNetworkStatRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_network_stats.dat";
-#else
- const std::string kNetworkStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_network_stats", "dat");
-#endif
- const std::string kRtcpStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_rtcp_stats", "dat");
- DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
-}
-
-TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
- // Use fax mode to avoid time-scaling. This is to simplify the testing of
- // packet waiting times in the packet buffer.
- ASSERT_EQ(0,
- WebRtcNetEQ_SetPlayoutMode(neteq_inst_->instance(), kPlayoutFax));
- // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
- int num_frames = 30;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (int i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- rtp_info.sequenceNumber = i;
- rtp_info.timeStamp = i * kSamples;
- rtp_info.SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info.payloadType = 94; // PCM16b WB codec.
- rtp_info.markerBit = 0;
- ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- }
- // Pull out all data.
- for (int i = 0; i < num_frames; ++i) {
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
- const int kVecLen = 110; // More than kLenWaitingTimes in mcu.h.
- int waiting_times[kVecLen];
- int len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times);
- EXPECT_EQ(num_frames, len);
- // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
- // spacing (per definition), we expect the delay to increase with 10 ms for
- // each packet.
- for (int i = 0; i < len; ++i) {
- EXPECT_EQ((i + 1) * 10, waiting_times[i]);
- }
-
- // Check statistics again and make sure it's been reset.
- EXPECT_EQ(0, WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times));
-
- // Process > 100 frames, and make sure that that we get statistics
- // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
- num_frames = 110;
- for (int i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- rtp_info.sequenceNumber = i;
- rtp_info.timeStamp = i * kSamples;
- rtp_info.SSRC = 0x1235; // Just an arbitrary SSRC.
- rtp_info.payloadType = 94; // PCM16b WB codec.
- rtp_info.markerBit = 0;
- ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times);
- EXPECT_EQ(100, len);
-}
-
-TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
- const int kNumFrames = 3000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- while (frame_index < kNumFrames) {
- // Insert one packet each time, except every 10th time where we insert two
- // packets at once. This will create a negative clock-drift of approx. 10%.
- int num_packets = (frame_index % 10 == 0 ? 2 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- EXPECT_EQ(-103196, network_stats.clockDriftPPM);
-}
-
-TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
- const int kNumFrames = 5000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (int i = 0; i < kNumFrames; ++i) {
- // Insert one packet each time, except every 10th time where we don't insert
- // any packet. This will create a positive clock-drift of approx. 11%.
- int num_packets = (i % 10 == 9 ? 0 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- EXPECT_EQ(110946, network_stats.clockDriftPPM);
-}
-
-TEST_F(NetEqDecodingTest, LongCngWithClockDrift) {
- uint16_t seq_no = 0;
- uint32_t timestamp = 0;
- const int kFrameSizeMs = 30;
- const int kSamples = kFrameSizeMs * 16;
- const int kPayloadBytes = kSamples * 2;
- // Apply a clock drift of -25 ms / s (sender faster than receiver).
- const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
- double next_input_time_ms = 0.0;
- double t_ms;
-
- // Insert speech for 5 seconds.
- const int kSpeechDurationMs = 5000;
- for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- EXPECT_EQ(kOutputNormal, neteq_inst_->getOutputType());
- int32_t delay_before = timestamp - neteq_inst_->getSpeechTimeStamp();
-
- // Insert CNG for 1 minute (= 60000 ms).
- const int kCngPeriodMs = 100;
- const int kCngPeriodSamples = kCngPeriodMs * 16; // Period in 16 kHz samples.
- const int kCngDurationMs = 60000;
- for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one CNG frame each 100 ms.
- uint8_t payload[kPayloadBytes];
- int payload_len;
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- payload_len, 0));
- ++seq_no;
- timestamp += kCngPeriodSamples;
- next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- EXPECT_EQ(kOutputCNG, neteq_inst_->getOutputType());
-
- // Insert speech again until output type is speech.
- while (neteq_inst_->getOutputType() != kOutputNormal) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- // Increase clock.
- t_ms += 10;
- }
-
- int32_t delay_after = timestamp - neteq_inst_->getSpeechTimeStamp();
- // Compare delay before and after, and make sure it differs less than 20 ms.
- EXPECT_LE(delay_after, delay_before + 20 * 16);
- EXPECT_GE(delay_after, delay_before - 20 * 16);
-}
-
-TEST_F(NetEqDecodingTest, NoInputDataStereo) {
- void *ms_info;
- ms_info = malloc(WebRtcNetEQ_GetMasterSlaveInfoSize());
- neteq_inst_->setMaster();
-
- // Slave instance without decoders (because it is easier).
- WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
- usedCodec[0] = kDecoderPCMu;
- NETEQTEST_NetEQClass* slave_inst =
- new NETEQTEST_NetEQClass(usedCodec, 1, 8000, kTCPLargeJitter);
- ASSERT_TRUE(slave_inst);
- NETEQTEST_Decoder* dec = new decoder_PCMU(0);
- ASSERT_TRUE(dec != NULL);
- dec->loadToNetEQ(*slave_inst);
- slave_inst->setSlave();
-
- // Pull out data.
- const int kNumFrames = 100;
- for (int i = 0; i < kNumFrames; ++i) {
- ASSERT_TRUE(kBlockSize8kHz == neteq_inst_->recOut(out_data_, ms_info));
- ASSERT_TRUE(kBlockSize8kHz == slave_inst->recOut(out_data_, ms_info));
- }
-
- delete dec;
- delete slave_inst;
- free(ms_info);
-}
-
-TEST_F(NetEqDecodingTest, TestExtraDelay) {
- static const int kNumFrames = 120000; // Needed for convergence.
- int frame_index = 0;
- static const int kFrameSizeSamples = 30 * 16;
- static const int kPayloadBytes = kFrameSizeSamples * 2;
- test::InputAudioFile input_file(
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"));
- int16_t input[kFrameSizeSamples];
- // Buffers of NetEq cannot accommodate larger delays for PCM16.
- static const int kExtraDelayMs = 3200;
- ASSERT_EQ(0, WebRtcNetEQ_SetExtraDelay(neteq_inst_->instance(),
- kExtraDelayMs));
- for (int i = 0; i < kNumFrames; ++i) {
- ASSERT_TRUE(input_file.Read(kFrameSizeSamples, input));
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kFrameSizeSamples, &rtp_info);
- uint8_t* payload = reinterpret_cast<uint8_t*>(input);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- // Pull out data.
- for (int j = 0; j < 3; ++j) {
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
- if (i % 100 == 0) {
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- const int expected_lower_limit =
- std::min(i * 0.083 - 210, 0.9 * network_stats.preferredBufferSize);
- EXPECT_GE(network_stats.currentBufferSize, expected_lower_limit);
- const int expected_upper_limit =
- std::min(i * 0.083 + 255, 1.2 * network_stats.preferredBufferSize);
- EXPECT_LE(network_stats.currentBufferSize, expected_upper_limit);
- }
- }
-}
-
-void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
- uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers) {
- uint16_t seq_no = start_seq_no;
- uint32_t timestamp = start_timestamp;
- const int kFrameSizeMs = 30;
- const int kSamples = kFrameSizeMs * 16;
- const int kPayloadBytes = kSamples * 2;
- double next_input_time_ms = 0.0;
-
- // Insert speech for 1 second.
- const int kSpeechDurationMs = 1000;
- for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) {
- // This sequence number was not in the set to drop. Insert it.
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- }
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs);
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- // Expect preferred and actual buffer size to be no more than 2 frames.
- EXPECT_LE(network_stats.preferredBufferSize, kFrameSizeMs * 2);
- EXPECT_LE(network_stats.currentBufferSize, kFrameSizeMs * 2);
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- // Expect delay (in samples) to be less than 2 packets.
- EXPECT_LE(timestamp - neteq_inst_->getSpeechTimeStamp(),
- static_cast<uint32_t>(kSamples * 2));
- }
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrap) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers; // Don't drop any packets.
- WrapTest(0xFFFF - 5, 0, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- drop_seq_numbers.insert(0xFFFF);
- drop_seq_numbers.insert(0x0);
- WrapTest(0xFFFF - 5, 0, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, TimestampWrap) {
- // Start with a timestamp that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0, 0xFFFFFFFF - 1000, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) {
- // Start with a timestamp and a sequence number that will wrap at the same
- // time.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0xFFFF - 2, 0xFFFFFFFF - 1000, drop_seq_numbers);
-}
-
-} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS
deleted file mode 100644
index d54559c4ad8..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-henrik.lundin@webrtc.org
-tina.legrand@webrtc.org
-turaj@webrtc.org
-minyue@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc
deleted file mode 100644
index 88cfa4dad9b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-
-namespace webrtc {
-
-Accelerate::ReturnCodes Accelerate::Process(
- const int16_t* input,
- size_t input_length,
- AudioMultiVector* output,
- int16_t* length_change_samples) {
- // Input length must be (almost) 30 ms.
- static const int k15ms = 120; // 15 ms = 120 samples at 8 kHz sample rate.
- if (num_channels_ == 0 || static_cast<int>(input_length) / num_channels_ <
- (2 * k15ms - 1) * fs_mult_) {
- // Length of input data too short to do accelerate. Simply move all data
- // from input to output.
- output->PushBackInterleaved(input, input_length);
- return kError;
- }
- return TimeStretch::Process(input, input_length, output,
- length_change_samples);
-}
-
-void Accelerate::SetParametersForPassiveSpeech(size_t /*len*/,
- int16_t* best_correlation,
- int* /*peak_index*/) const {
- // When the signal does not contain any active speech, the correlation does
- // not matter. Simply set it to zero.
- *best_correlation = 0;
-}
-
-Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch(
- const int16_t* input, size_t input_length, size_t peak_index,
- int16_t best_correlation, bool active_speech,
- AudioMultiVector* output) const {
- // Check for strong correlation or passive speech.
- if ((best_correlation > kCorrelationThreshold) || !active_speech) {
- // Do accelerate operation by overlap add.
-
- // Pre-calculate common multiplication with |fs_mult_|.
- // 120 corresponds to 15 ms.
- size_t fs_mult_120 = fs_mult_ * 120;
-
- assert(fs_mult_120 >= peak_index); // Should be handled in Process().
- // Copy first part; 0 to 15 ms.
- output->PushBackInterleaved(input, fs_mult_120 * num_channels_);
- // Copy the |peak_index| starting at 15 ms to |temp_vector|.
- AudioMultiVector temp_vector(num_channels_);
- temp_vector.PushBackInterleaved(&input[fs_mult_120 * num_channels_],
- peak_index * num_channels_);
- // Cross-fade |temp_vector| onto the end of |output|.
- output->CrossFade(temp_vector, peak_index);
- // Copy the last unmodified part, 15 ms + pitch period until the end.
- output->PushBackInterleaved(
- &input[(fs_mult_120 + peak_index) * num_channels_],
- input_length - (fs_mult_120 + peak_index) * num_channels_);
-
- if (active_speech) {
- return kSuccess;
- } else {
- return kSuccessLowEnergy;
- }
- } else {
- // Accelerate not allowed. Simply move all data from decoded to outData.
- output->PushBackInterleaved(input, input_length);
- return kNoStretch;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h
deleted file mode 100644
index 83e3e384543..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BackgroundNoise;
-
-// This class implements the Accelerate operation. Most of the work is done
-// in the base class TimeStretch, which is shared with the PreemptiveExpand
-// operation. In the Accelerate class, the operations that are specific to
-// Accelerate are implemented.
-class Accelerate : public TimeStretch {
- public:
- Accelerate(int sample_rate_hz, size_t num_channels,
- const BackgroundNoise& background_noise)
- : TimeStretch(sample_rate_hz, num_channels, background_noise) {
- }
-
- virtual ~Accelerate() {}
-
- // This method performs the actual Accelerate operation. The samples are
- // read from |input|, of length |input_length| elements, and are written to
- // |output|. The number of samples removed through time-stretching is
- // is provided in the output |length_change_samples|. The method returns
- // the outcome of the operation as an enumerator value.
- ReturnCodes Process(const int16_t* input,
- size_t input_length,
- AudioMultiVector* output,
- int16_t* length_change_samples);
-
- protected:
- // Sets the parameters |best_correlation| and |peak_index| to suitable
- // values when the signal contains no active speech.
- virtual void SetParametersForPassiveSpeech(size_t len,
- int16_t* best_correlation,
- int* peak_index) const OVERRIDE;
-
- // Checks the criteria for performing the time-stretching operation and,
- // if possible, performs the time-stretching.
- virtual ReturnCodes CheckCriteriaAndStretch(
- const int16_t* input, size_t input_length, size_t peak_index,
- int16_t best_correlation, bool active_speech,
- AudioMultiVector* output) const OVERRIDE;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Accelerate);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc
deleted file mode 100644
index 35422e3f9f5..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
-
-namespace webrtc {
-
-int AudioDecoder::DecodeRedundant(const uint8_t* encoded,
- size_t encoded_len,
- int16_t* decoded,
- SpeechType* speech_type) {
- return Decode(encoded, encoded_len, decoded, speech_type);
-}
-
-bool AudioDecoder::HasDecodePlc() const { return false; }
-
-int AudioDecoder::DecodePlc(int num_frames, int16_t* decoded) { return -1; }
-
-int AudioDecoder::IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return 0;
-}
-
-int AudioDecoder::ErrorCode() { return 0; }
-
-int AudioDecoder::PacketDuration(const uint8_t* encoded, size_t encoded_len) {
- return kNotImplemented;
-}
-
-NetEqDecoder AudioDecoder::codec_type() const { return codec_type_; }
-
-bool AudioDecoder::CodecSupported(NetEqDecoder codec_type) {
- switch (codec_type) {
- case kDecoderPCMu:
- case kDecoderPCMa:
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch:
-#ifdef WEBRTC_CODEC_ILBC
- case kDecoderILBC:
-#endif
-#if defined(WEBRTC_CODEC_ISACFX) || defined(WEBRTC_CODEC_ISAC)
- case kDecoderISAC:
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- case kDecoderISACswb:
- case kDecoderISACfb:
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16B:
- case kDecoderPCM16Bwb:
- case kDecoderPCM16Bswb32kHz:
- case kDecoderPCM16Bswb48kHz:
- case kDecoderPCM16B_2ch:
- case kDecoderPCM16Bwb_2ch:
- case kDecoderPCM16Bswb32kHz_2ch:
- case kDecoderPCM16Bswb48kHz_2ch:
- case kDecoderPCM16B_5ch:
-#endif
-#ifdef WEBRTC_CODEC_G722
- case kDecoderG722:
- case kDecoderG722_2ch:
-#endif
-#ifdef WEBRTC_CODEC_CELT
- case kDecoderCELT_32:
- case kDecoderCELT_32_2ch:
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- case kDecoderOpus:
- case kDecoderOpus_2ch:
-#endif
- case kDecoderRED:
- case kDecoderAVT:
- case kDecoderCNGnb:
- case kDecoderCNGwb:
- case kDecoderCNGswb32kHz:
- case kDecoderCNGswb48kHz:
- case kDecoderArbitrary: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-int AudioDecoder::CodecSampleRateHz(NetEqDecoder codec_type) {
- switch (codec_type) {
- case kDecoderPCMu:
- case kDecoderPCMa:
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch:
-#ifdef WEBRTC_CODEC_ILBC
- case kDecoderILBC:
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16B:
- case kDecoderPCM16B_2ch:
- case kDecoderPCM16B_5ch:
-#endif
- case kDecoderCNGnb: {
- return 8000;
- }
-#if defined(WEBRTC_CODEC_ISACFX) || defined(WEBRTC_CODEC_ISAC)
- case kDecoderISAC:
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16Bwb:
- case kDecoderPCM16Bwb_2ch:
-#endif
-#ifdef WEBRTC_CODEC_G722
- case kDecoderG722:
- case kDecoderG722_2ch:
-#endif
- case kDecoderCNGwb: {
- return 16000;
- }
-#ifdef WEBRTC_CODEC_ISAC
- case kDecoderISACswb:
- case kDecoderISACfb:
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16Bswb32kHz:
- case kDecoderPCM16Bswb32kHz_2ch:
-#endif
-#ifdef WEBRTC_CODEC_CELT
- case kDecoderCELT_32:
- case kDecoderCELT_32_2ch:
-#endif
- case kDecoderCNGswb32kHz: {
- return 32000;
- }
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16Bswb48kHz:
- case kDecoderPCM16Bswb48kHz_2ch: {
- return 48000;
- }
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- case kDecoderOpus:
- case kDecoderOpus_2ch: {
- return 32000;
- }
-#endif
- case kDecoderCNGswb48kHz: {
- // TODO(tlegrand): Remove limitation once ACM has full 48 kHz support.
- return 32000;
- }
- default: {
- return -1; // Undefined sample rate.
- }
- }
-}
-
-AudioDecoder* AudioDecoder::CreateAudioDecoder(NetEqDecoder codec_type) {
- if (!CodecSupported(codec_type)) {
- return NULL;
- }
- switch (codec_type) {
- case kDecoderPCMu:
- return new AudioDecoderPcmU;
- case kDecoderPCMa:
- return new AudioDecoderPcmA;
- case kDecoderPCMu_2ch:
- return new AudioDecoderPcmUMultiCh(2);
- case kDecoderPCMa_2ch:
- return new AudioDecoderPcmAMultiCh(2);
-#ifdef WEBRTC_CODEC_ILBC
- case kDecoderILBC:
- return new AudioDecoderIlbc;
-#endif
-#if defined(WEBRTC_CODEC_ISACFX)
- case kDecoderISAC:
- return new AudioDecoderIsacFix;
-#elif defined(WEBRTC_CODEC_ISAC)
- case kDecoderISAC:
- return new AudioDecoderIsac;
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- case kDecoderISACswb:
- return new AudioDecoderIsacSwb;
- case kDecoderISACfb:
- return new AudioDecoderIsacFb;
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- case kDecoderPCM16B:
- case kDecoderPCM16Bwb:
- case kDecoderPCM16Bswb32kHz:
- case kDecoderPCM16Bswb48kHz:
- return new AudioDecoderPcm16B(codec_type);
- case kDecoderPCM16B_2ch:
- case kDecoderPCM16Bwb_2ch:
- case kDecoderPCM16Bswb32kHz_2ch:
- case kDecoderPCM16Bswb48kHz_2ch:
- case kDecoderPCM16B_5ch:
- return new AudioDecoderPcm16BMultiCh(codec_type);
-#endif
-#ifdef WEBRTC_CODEC_G722
- case kDecoderG722:
- return new AudioDecoderG722;
- case kDecoderG722_2ch:
- return new AudioDecoderG722Stereo;
-#endif
-#ifdef WEBRTC_CODEC_CELT
- case kDecoderCELT_32:
- case kDecoderCELT_32_2ch:
- return new AudioDecoderCelt(codec_type);
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- case kDecoderOpus:
- case kDecoderOpus_2ch:
- return new AudioDecoderOpus(codec_type);
-#endif
- case kDecoderCNGnb:
- case kDecoderCNGwb:
- case kDecoderCNGswb32kHz:
- case kDecoderCNGswb48kHz:
- return new AudioDecoderCng(codec_type);
- case kDecoderRED:
- case kDecoderAVT:
- case kDecoderArbitrary:
- default: {
- return NULL;
- }
- }
-}
-
-AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) {
- switch (type) {
- case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech.
- case 1:
- return kSpeech;
- case 2:
- return kComfortNoise;
- default:
- assert(false);
- return kSpeech;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc
deleted file mode 100644
index 5296a1bd0f9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc
+++ /dev/null
@@ -1,491 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
-
-#include <assert.h>
-#include <string.h> // memmove
-
-#ifdef WEBRTC_CODEC_CELT
-#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
-#endif
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#endif
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#endif
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#endif
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#endif
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#endif
-#ifdef WEBRTC_CODEC_PCM16
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#endif
-
-namespace webrtc {
-
-// PCMu
-int AudioDecoderPcmU::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcG711_DecodeU(
- state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
- static_cast<int16_t>(encoded_len), decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderPcmU::PacketDuration(const uint8_t* encoded,
- size_t encoded_len) {
- // One encoded byte per sample per channel.
- return static_cast<int>(encoded_len / channels_);
-}
-
-// PCMa
-int AudioDecoderPcmA::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcG711_DecodeA(
- state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
- static_cast<int16_t>(encoded_len), decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderPcmA::PacketDuration(const uint8_t* encoded,
- size_t encoded_len) {
- // One encoded byte per sample per channel.
- return static_cast<int>(encoded_len / channels_);
-}
-
-// PCM16B
-#ifdef WEBRTC_CODEC_PCM16
-AudioDecoderPcm16B::AudioDecoderPcm16B(enum NetEqDecoder type)
- : AudioDecoder(type) {
- assert(type == kDecoderPCM16B ||
- type == kDecoderPCM16Bwb ||
- type == kDecoderPCM16Bswb32kHz ||
- type == kDecoderPCM16Bswb48kHz);
-}
-
-int AudioDecoderPcm16B::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcPcm16b_DecodeW16(
- state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
- static_cast<int16_t>(encoded_len), decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderPcm16B::PacketDuration(const uint8_t* encoded,
- size_t encoded_len) {
- // Two encoded byte per sample per channel.
- return static_cast<int>(encoded_len / (2 * channels_));
-}
-
-AudioDecoderPcm16BMultiCh::AudioDecoderPcm16BMultiCh(
- enum NetEqDecoder type)
- : AudioDecoderPcm16B(kDecoderPCM16B) { // This will be changed below.
- codec_type_ = type; // Changing to actual type here.
- switch (codec_type_) {
- case kDecoderPCM16B_2ch:
- case kDecoderPCM16Bwb_2ch:
- case kDecoderPCM16Bswb32kHz_2ch:
- case kDecoderPCM16Bswb48kHz_2ch:
- channels_ = 2;
- break;
- case kDecoderPCM16B_5ch:
- channels_ = 5;
- break;
- default:
- assert(false);
- }
-}
-#endif
-
-// iLBC
-#ifdef WEBRTC_CODEC_ILBC
-AudioDecoderIlbc::AudioDecoderIlbc() : AudioDecoder(kDecoderILBC) {
- WebRtcIlbcfix_DecoderCreate(reinterpret_cast<iLBC_decinst_t**>(&state_));
-}
-
-AudioDecoderIlbc::~AudioDecoderIlbc() {
- WebRtcIlbcfix_DecoderFree(static_cast<iLBC_decinst_t*>(state_));
-}
-
-int AudioDecoderIlbc::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcIlbcfix_Decode(static_cast<iLBC_decinst_t*>(state_),
- reinterpret_cast<const int16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderIlbc::DecodePlc(int num_frames, int16_t* decoded) {
- return WebRtcIlbcfix_NetEqPlc(static_cast<iLBC_decinst_t*>(state_),
- decoded, num_frames);
-}
-
-int AudioDecoderIlbc::Init() {
- return WebRtcIlbcfix_Decoderinit30Ms(static_cast<iLBC_decinst_t*>(state_));
-}
-#endif
-
-// iSAC float
-#ifdef WEBRTC_CODEC_ISAC
-AudioDecoderIsac::AudioDecoderIsac() : AudioDecoder(kDecoderISAC) {
- WebRtcIsac_Create(reinterpret_cast<ISACStruct**>(&state_));
- WebRtcIsac_SetDecSampRate(static_cast<ISACStruct*>(state_), 16000);
-}
-
-AudioDecoderIsac::~AudioDecoderIsac() {
- WebRtcIsac_Free(static_cast<ISACStruct*>(state_));
-}
-
-int AudioDecoderIsac::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcIsac_Decode(static_cast<ISACStruct*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderIsac::DecodeRedundant(const uint8_t* encoded,
- size_t encoded_len, int16_t* decoded,
- SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcIsac_DecodeRcu(static_cast<ISACStruct*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderIsac::DecodePlc(int num_frames, int16_t* decoded) {
- return WebRtcIsac_DecodePlc(static_cast<ISACStruct*>(state_),
- decoded, num_frames);
-}
-
-int AudioDecoderIsac::Init() {
- return WebRtcIsac_DecoderInit(static_cast<ISACStruct*>(state_));
-}
-
-int AudioDecoderIsac::IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return WebRtcIsac_UpdateBwEstimate(static_cast<ISACStruct*>(state_),
- reinterpret_cast<const uint16_t*>(payload),
- static_cast<int32_t>(payload_len),
- rtp_sequence_number,
- rtp_timestamp,
- arrival_timestamp);
-}
-
-int AudioDecoderIsac::ErrorCode() {
- return WebRtcIsac_GetErrorCode(static_cast<ISACStruct*>(state_));
-}
-
-// iSAC SWB
-AudioDecoderIsacSwb::AudioDecoderIsacSwb() : AudioDecoderIsac() {
- codec_type_ = kDecoderISACswb;
- WebRtcIsac_SetDecSampRate(static_cast<ISACStruct*>(state_), 32000);
-}
-
-// iSAC FB
-AudioDecoderIsacFb::AudioDecoderIsacFb() : AudioDecoderIsacSwb() {
- codec_type_ = kDecoderISACfb;
-}
-#endif
-
-// iSAC fix
-#ifdef WEBRTC_CODEC_ISACFX
-AudioDecoderIsacFix::AudioDecoderIsacFix() : AudioDecoder(kDecoderISAC) {
- WebRtcIsacfix_Create(reinterpret_cast<ISACFIX_MainStruct**>(&state_));
-}
-
-AudioDecoderIsacFix::~AudioDecoderIsacFix() {
- WebRtcIsacfix_Free(static_cast<ISACFIX_MainStruct*>(state_));
-}
-
-int AudioDecoderIsacFix::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcIsacfix_Decode(static_cast<ISACFIX_MainStruct*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderIsacFix::Init() {
- return WebRtcIsacfix_DecoderInit(static_cast<ISACFIX_MainStruct*>(state_));
-}
-
-int AudioDecoderIsacFix::IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return WebRtcIsacfix_UpdateBwEstimate(
- static_cast<ISACFIX_MainStruct*>(state_),
- reinterpret_cast<const uint16_t*>(payload),
- static_cast<int32_t>(payload_len),
- rtp_sequence_number, rtp_timestamp, arrival_timestamp);
-}
-
-int AudioDecoderIsacFix::ErrorCode() {
- return WebRtcIsacfix_GetErrorCode(static_cast<ISACFIX_MainStruct*>(state_));
-}
-#endif
-
-// G.722
-#ifdef WEBRTC_CODEC_G722
-AudioDecoderG722::AudioDecoderG722() : AudioDecoder(kDecoderG722) {
- WebRtcG722_CreateDecoder(reinterpret_cast<G722DecInst**>(&state_));
-}
-
-AudioDecoderG722::~AudioDecoderG722() {
- WebRtcG722_FreeDecoder(static_cast<G722DecInst*>(state_));
-}
-
-int AudioDecoderG722::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcG722_Decode(
- static_cast<G722DecInst*>(state_),
- const_cast<int16_t*>(reinterpret_cast<const int16_t*>(encoded)),
- static_cast<int16_t>(encoded_len), decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderG722::Init() {
- return WebRtcG722_DecoderInit(static_cast<G722DecInst*>(state_));
-}
-
-int AudioDecoderG722::PacketDuration(const uint8_t* encoded,
- size_t encoded_len) {
- // 1/2 encoded byte per sample per channel.
- return static_cast<int>(2 * encoded_len / channels_);
-}
-
-AudioDecoderG722Stereo::AudioDecoderG722Stereo()
- : AudioDecoderG722(),
- state_left_(state_), // Base member |state_| is used for left channel.
- state_right_(NULL) {
- channels_ = 2;
- // |state_left_| already created by the base class AudioDecoderG722.
- WebRtcG722_CreateDecoder(reinterpret_cast<G722DecInst**>(&state_right_));
-}
-
-AudioDecoderG722Stereo::~AudioDecoderG722Stereo() {
- // |state_left_| will be freed by the base class AudioDecoderG722.
- WebRtcG722_FreeDecoder(static_cast<G722DecInst*>(state_right_));
-}
-
-int AudioDecoderG722Stereo::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- // De-interleave the bit-stream into two separate payloads.
- uint8_t* encoded_deinterleaved = new uint8_t[encoded_len];
- SplitStereoPacket(encoded, encoded_len, encoded_deinterleaved);
- // Decode left and right.
- int16_t ret = WebRtcG722_Decode(
- static_cast<G722DecInst*>(state_left_),
- reinterpret_cast<int16_t*>(encoded_deinterleaved),
- static_cast<int16_t>(encoded_len / 2), decoded, &temp_type);
- if (ret >= 0) {
- int decoded_len = ret;
- ret = WebRtcG722_Decode(
- static_cast<G722DecInst*>(state_right_),
- reinterpret_cast<int16_t*>(&encoded_deinterleaved[encoded_len / 2]),
- static_cast<int16_t>(encoded_len / 2), &decoded[decoded_len], &temp_type);
- if (ret == decoded_len) {
- decoded_len += ret;
- // Interleave output.
- for (int k = decoded_len / 2; k < decoded_len; k++) {
- int16_t temp = decoded[k];
- memmove(&decoded[2 * k - decoded_len + 2],
- &decoded[2 * k - decoded_len + 1],
- (decoded_len - k - 1) * sizeof(int16_t));
- decoded[2 * k - decoded_len + 1] = temp;
- }
- ret = decoded_len; // Return total number of samples.
- }
- }
- *speech_type = ConvertSpeechType(temp_type);
- delete [] encoded_deinterleaved;
- return ret;
-}
-
-int AudioDecoderG722Stereo::Init() {
- int ret = WebRtcG722_DecoderInit(static_cast<G722DecInst*>(state_right_));
- if (ret != 0) {
- return ret;
- }
- return AudioDecoderG722::Init();
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the output array.
-void AudioDecoderG722Stereo::SplitStereoPacket(const uint8_t* encoded,
- size_t encoded_len,
- uint8_t* encoded_deinterleaved) {
- assert(encoded);
- // Regroup the 4 bits/sample so |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
- // where "lx" is 4 bits representing left sample number x, and "rx" right
- // sample. Two samples fit in one byte, represented with |...|.
- for (size_t i = 0; i + 1 < encoded_len; i += 2) {
- uint8_t right_byte = ((encoded[i] & 0x0F) << 4) + (encoded[i + 1] & 0x0F);
- encoded_deinterleaved[i] = (encoded[i] & 0xF0) + (encoded[i + 1] >> 4);
- encoded_deinterleaved[i + 1] = right_byte;
- }
-
- // Move one byte representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
- // where N is the total number of samples.
- for (size_t i = 0; i < encoded_len / 2; i++) {
- uint8_t right_byte = encoded_deinterleaved[i + 1];
- memmove(&encoded_deinterleaved[i + 1], &encoded_deinterleaved[i + 2],
- encoded_len - i - 2);
- encoded_deinterleaved[encoded_len - 1] = right_byte;
- }
-}
-#endif
-
-// CELT
-#ifdef WEBRTC_CODEC_CELT
-AudioDecoderCelt::AudioDecoderCelt(enum NetEqDecoder type)
- : AudioDecoder(type) {
- assert(type == kDecoderCELT_32 || type == kDecoderCELT_32_2ch);
- if (type == kDecoderCELT_32) {
- channels_ = 1;
- } else {
- channels_ = 2;
- }
- WebRtcCelt_CreateDec(reinterpret_cast<CELT_decinst_t**>(&state_),
- static_cast<int>(channels_));
-}
-
-AudioDecoderCelt::~AudioDecoderCelt() {
- WebRtcCelt_FreeDec(static_cast<CELT_decinst_t*>(state_));
-}
-
-int AudioDecoderCelt::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default to speech.
- int ret = WebRtcCelt_DecodeUniversal(static_cast<CELT_decinst_t*>(state_),
- encoded, static_cast<int>(encoded_len),
- decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- if (ret < 0) {
- return -1;
- }
- // Return the total number of samples.
- return ret * static_cast<int>(channels_);
-}
-
-int AudioDecoderCelt::Init() {
- return WebRtcCelt_DecoderInit(static_cast<CELT_decinst_t*>(state_));
-}
-
-bool AudioDecoderCelt::HasDecodePlc() const { return true; }
-
-int AudioDecoderCelt::DecodePlc(int num_frames, int16_t* decoded) {
- int ret = WebRtcCelt_DecodePlc(static_cast<CELT_decinst_t*>(state_),
- decoded, num_frames);
- if (ret < 0) {
- return -1;
- }
- // Return the total number of samples.
- return ret * static_cast<int>(channels_);
-}
-#endif
-
-// Opus
-#ifdef WEBRTC_CODEC_OPUS
-AudioDecoderOpus::AudioDecoderOpus(enum NetEqDecoder type)
- : AudioDecoder(type) {
- if (type == kDecoderOpus_2ch) {
- channels_ = 2;
- } else {
- channels_ = 1;
- }
- WebRtcOpus_DecoderCreate(reinterpret_cast<OpusDecInst**>(&state_),
- static_cast<int>(channels_));
-}
-
-AudioDecoderOpus::~AudioDecoderOpus() {
- WebRtcOpus_DecoderFree(static_cast<OpusDecInst*>(state_));
-}
-
-int AudioDecoderOpus::Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = WebRtcOpus_DecodeNew(static_cast<OpusDecInst*>(state_), encoded,
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- if (ret > 0)
- ret *= static_cast<int16_t>(channels_); // Return total number of samples.
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
-}
-
-int AudioDecoderOpus::Init() {
- return WebRtcOpus_DecoderInitNew(static_cast<OpusDecInst*>(state_));
-}
-
-int AudioDecoderOpus::PacketDuration(const uint8_t* encoded,
- size_t encoded_len) {
- return WebRtcOpus_DurationEst(static_cast<OpusDecInst*>(state_),
- encoded, static_cast<int>(encoded_len));
-}
-#endif
-
-AudioDecoderCng::AudioDecoderCng(enum NetEqDecoder type)
- : AudioDecoder(type) {
- assert(type == kDecoderCNGnb || type == kDecoderCNGwb ||
- kDecoderCNGswb32kHz || type == kDecoderCNGswb48kHz);
- WebRtcCng_CreateDec(reinterpret_cast<CNG_dec_inst**>(&state_));
- assert(state_);
-}
-
-AudioDecoderCng::~AudioDecoderCng() {
- if (state_) {
- WebRtcCng_FreeDec(static_cast<CNG_dec_inst*>(state_));
- }
-}
-
-int AudioDecoderCng::Init() {
- assert(state_);
- return WebRtcCng_InitDec(static_cast<CNG_dec_inst*>(state_));
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h
deleted file mode 100644
index aa35db78082..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
-
-#include <assert.h>
-
-#ifndef AUDIO_DECODER_UNITTEST
-// If this is compiled as a part of the audio_deoder_unittest, the codec
-// selection is made in the gypi file instead of in engine_configurations.h.
-#include "webrtc/engine_configurations.h"
-#endif
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class AudioDecoderPcmU : public AudioDecoder {
- public:
- AudioDecoderPcmU() : AudioDecoder(kDecoderPCMu) {}
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init() { return 0; }
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU);
-};
-
-class AudioDecoderPcmA : public AudioDecoder {
- public:
- AudioDecoderPcmA() : AudioDecoder(kDecoderPCMa) {}
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init() { return 0; }
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA);
-};
-
-class AudioDecoderPcmUMultiCh : public AudioDecoderPcmU {
- public:
- explicit AudioDecoderPcmUMultiCh(size_t channels) : AudioDecoderPcmU() {
- assert(channels > 0);
- channels_ = channels;
- }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmUMultiCh);
-};
-
-class AudioDecoderPcmAMultiCh : public AudioDecoderPcmA {
- public:
- explicit AudioDecoderPcmAMultiCh(size_t channels) : AudioDecoderPcmA() {
- assert(channels > 0);
- channels_ = channels;
- }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmAMultiCh);
-};
-
-#ifdef WEBRTC_CODEC_PCM16
-// This class handles all four types (i.e., sample rates) of PCM16B codecs.
-// The type is specified in the constructor parameter |type|.
-class AudioDecoderPcm16B : public AudioDecoder {
- public:
- explicit AudioDecoderPcm16B(enum NetEqDecoder type);
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init() { return 0; }
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B);
-};
-
-// This class handles all four types (i.e., sample rates) of PCM16B codecs.
-// The type is specified in the constructor parameter |type|, and the number
-// of channels is derived from the type.
-class AudioDecoderPcm16BMultiCh : public AudioDecoderPcm16B {
- public:
- explicit AudioDecoderPcm16BMultiCh(enum NetEqDecoder type);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16BMultiCh);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_ILBC
-class AudioDecoderIlbc : public AudioDecoder {
- public:
- AudioDecoderIlbc();
- virtual ~AudioDecoderIlbc();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual bool HasDecodePlc() const { return true; }
- virtual int DecodePlc(int num_frames, int16_t* decoded);
- virtual int Init();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbc);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_ISAC
-class AudioDecoderIsac : public AudioDecoder {
- public:
- AudioDecoderIsac();
- virtual ~AudioDecoderIsac();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual bool HasDecodePlc() const { return true; }
- virtual int DecodePlc(int num_frames, int16_t* decoded);
- virtual int Init();
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp);
- virtual int ErrorCode();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsac);
-};
-
-class AudioDecoderIsacSwb : public AudioDecoderIsac {
- public:
- AudioDecoderIsacSwb();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacSwb);
-};
-
-class AudioDecoderIsacFb : public AudioDecoderIsacSwb {
- public:
- AudioDecoderIsacFb();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacFb);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_ISACFX
-class AudioDecoderIsacFix : public AudioDecoder {
- public:
- AudioDecoderIsacFix();
- virtual ~AudioDecoderIsacFix();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init();
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp);
- virtual int ErrorCode();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacFix);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_G722
-class AudioDecoderG722 : public AudioDecoder {
- public:
- AudioDecoderG722();
- virtual ~AudioDecoderG722();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual bool HasDecodePlc() const { return false; }
- virtual int Init();
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722);
-};
-
-class AudioDecoderG722Stereo : public AudioDecoderG722 {
- public:
- AudioDecoderG722Stereo();
- virtual ~AudioDecoderG722Stereo();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init();
-
- private:
- // Splits the stereo-interleaved payload in |encoded| into separate payloads
- // for left and right channels. The separated payloads are written to
- // |encoded_deinterleaved|, which must hold at least |encoded_len| samples.
- // The left channel starts at offset 0, while the right channel starts at
- // offset encoded_len / 2 into |encoded_deinterleaved|.
- void SplitStereoPacket(const uint8_t* encoded, size_t encoded_len,
- uint8_t* encoded_deinterleaved);
-
- void* const state_left_;
- void* state_right_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Stereo);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_CELT
-class AudioDecoderCelt : public AudioDecoder {
- public:
- explicit AudioDecoderCelt(enum NetEqDecoder type);
- virtual ~AudioDecoderCelt();
-
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init();
- virtual bool HasDecodePlc() const;
- virtual int DecodePlc(int num_frames, int16_t* decoded);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderCelt);
-};
-#endif
-
-#ifdef WEBRTC_CODEC_OPUS
-class AudioDecoderOpus : public AudioDecoder {
- public:
- explicit AudioDecoderOpus(enum NetEqDecoder type);
- virtual ~AudioDecoderOpus();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
- virtual int Init();
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpus);
-};
-#endif
-
-// AudioDecoderCng is a special type of AudioDecoder. It inherits from
-// AudioDecoder just to fit in the DecoderDatabase. None of the class methods
-// should be used, except constructor, destructor, and accessors.
-// TODO(hlundin): Consider the possibility to create a super-class to
-// AudioDecoder that is stored in DecoderDatabase. Then AudioDecoder and a
-// specific CngDecoder class could both inherit from that class.
-class AudioDecoderCng : public AudioDecoder {
- public:
- explicit AudioDecoderCng(enum NetEqDecoder type);
- virtual ~AudioDecoderCng();
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) { return -1; }
- virtual int Init();
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) { return -1; }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderCng);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc
deleted file mode 100644
index dbd9d121f4a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc
+++ /dev/null
@@ -1,931 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include "gtest/gtest.h"
-#include "webrtc/common_audio/resampler/include/resampler.h"
-#ifdef WEBRTC_CODEC_CELT
-#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
-#endif
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/system_wrappers/interface/data_log.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-class AudioDecoderTest : public ::testing::Test {
- protected:
- AudioDecoderTest()
- : input_fp_(NULL),
- input_(NULL),
- encoded_(NULL),
- decoded_(NULL),
- frame_size_(0),
- data_length_(0),
- encoded_bytes_(0),
- channels_(1),
- decoder_(NULL) {
- input_file_ = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/testfile32kHz.pcm";
- }
-
- virtual ~AudioDecoderTest() {}
-
- virtual void SetUp() {
- // Create arrays.
- ASSERT_GT(data_length_, 0u) << "The test must set data_length_ > 0";
- input_ = new int16_t[data_length_];
- encoded_ = new uint8_t[data_length_ * 2];
- decoded_ = new int16_t[data_length_ * channels_];
- // Open input file.
- input_fp_ = fopen(input_file_.c_str(), "rb");
- ASSERT_TRUE(input_fp_ != NULL) << "Failed to open file " << input_file_;
- // Read data to |input_|.
- ASSERT_EQ(data_length_,
- fread(input_, sizeof(int16_t), data_length_, input_fp_)) <<
- "Could not read enough data from file";
- // Logging to view input and output in Matlab.
- // Use 'gyp -Denable_data_logging=1' to enable logging.
- DataLog::CreateLog();
- DataLog::AddTable("CodecTest");
- DataLog::AddColumn("CodecTest", "input", 1);
- DataLog::AddColumn("CodecTest", "output", 1);
- }
-
- virtual void TearDown() {
- delete decoder_;
- decoder_ = NULL;
- // Close input file.
- fclose(input_fp_);
- // Delete arrays.
- delete [] input_;
- input_ = NULL;
- delete [] encoded_;
- encoded_ = NULL;
- delete [] decoded_;
- decoded_ = NULL;
- // Close log.
- DataLog::ReturnLog();
- }
-
- virtual void InitEncoder() { }
-
- // This method must be implemented for all tests derived from this class.
- virtual int EncodeFrame(const int16_t* input, size_t input_len,
- uint8_t* output) = 0;
-
- // Encodes and decodes audio. The absolute difference between the input and
- // output is compared vs |tolerance|, and the mean-squared error is compared
- // with |mse|. The encoded stream should contain |expected_bytes|. For stereo
- // audio, the absolute difference between the two channels is compared vs
- // |channel_diff_tolerance|.
- void EncodeDecodeTest(size_t expected_bytes, int tolerance, double mse,
- int delay = 0, int channel_diff_tolerance = 0) {
- ASSERT_GE(tolerance, 0) << "Test must define a tolerance >= 0";
- ASSERT_GE(channel_diff_tolerance, 0) <<
- "Test must define a channel_diff_tolerance >= 0";
- size_t processed_samples = 0u;
- encoded_bytes_ = 0u;
- InitEncoder();
- EXPECT_EQ(0, decoder_->Init());
- while (processed_samples + frame_size_ <= data_length_) {
- size_t enc_len = EncodeFrame(&input_[processed_samples], frame_size_,
- &encoded_[encoded_bytes_]);
- AudioDecoder::SpeechType speech_type;
- size_t dec_len = decoder_->Decode(&encoded_[encoded_bytes_], enc_len,
- &decoded_[processed_samples *
- channels_],
- &speech_type);
- EXPECT_EQ(frame_size_ * channels_, dec_len);
- encoded_bytes_ += enc_len;
- processed_samples += frame_size_;
- }
- // For some codecs it doesn't make sense to check expected number of bytes,
- // since the number can vary for different platforms. Opus and iSAC are
- // such codecs. In this case expected_bytes is set to 0.
- if (expected_bytes) {
- EXPECT_EQ(expected_bytes, encoded_bytes_);
- }
- CompareInputOutput(processed_samples, tolerance, delay);
- if (channels_ == 2)
- CompareTwoChannels(processed_samples, channel_diff_tolerance);
- EXPECT_LE(MseInputOutput(processed_samples, delay), mse);
- }
-
- // The absolute difference between the input and output (the first channel) is
- // compared vs |tolerance|. The parameter |delay| is used to correct for codec
- // delays.
- virtual void CompareInputOutput(size_t num_samples, int tolerance,
- int delay) const {
- assert(num_samples <= data_length_);
- for (unsigned int n = 0; n < num_samples - delay; ++n) {
- ASSERT_NEAR(input_[n], decoded_[channels_ * n + delay], tolerance) <<
- "Exit test on first diff; n = " << n;
- DataLog::InsertCell("CodecTest", "input", input_[n]);
- DataLog::InsertCell("CodecTest", "output", decoded_[channels_ * n]);
- DataLog::NextRow("CodecTest");
- }
- }
-
- // The absolute difference between the two channels in a stereo is compared vs
- // |tolerance|.
- virtual void CompareTwoChannels(size_t samples_per_channel,
- int tolerance) const {
- assert(samples_per_channel <= data_length_);
- for (unsigned int n = 0; n < samples_per_channel; ++n)
- ASSERT_NEAR(decoded_[channels_ * n], decoded_[channels_ * n + 1],
- tolerance) << "Stereo samples differ.";
- }
-
- // Calculates mean-squared error between input and output (the first channel).
- // The parameter |delay| is used to correct for codec delays.
- virtual double MseInputOutput(size_t num_samples, int delay) const {
- assert(num_samples <= data_length_);
- if (num_samples == 0) return 0.0;
- double squared_sum = 0.0;
- for (unsigned int n = 0; n < num_samples - delay; ++n) {
- squared_sum += (input_[n] - decoded_[channels_ * n + delay]) *
- (input_[n] - decoded_[channels_ * n + delay]);
- }
- return squared_sum / (num_samples - delay);
- }
-
- // Encodes a payload and decodes it twice with decoder re-init before each
- // decode. Verifies that the decoded result is the same.
- void ReInitTest() {
- uint8_t* encoded = encoded_;
- uint8_t* encoded_copy = encoded_ + 2 * frame_size_;
- int16_t* output1 = decoded_;
- int16_t* output2 = decoded_ + frame_size_;
- InitEncoder();
- size_t enc_len = EncodeFrame(input_, frame_size_, encoded);
- size_t dec_len;
- // Copy payload since iSAC fix destroys it during decode.
- // Issue: http://code.google.com/p/webrtc/issues/detail?id=845.
- // TODO(hlundin): Remove if the iSAC bug gets fixed.
- memcpy(encoded_copy, encoded, enc_len);
- AudioDecoder::SpeechType speech_type1, speech_type2;
- EXPECT_EQ(0, decoder_->Init());
- dec_len = decoder_->Decode(encoded, enc_len, output1, &speech_type1);
- EXPECT_EQ(frame_size_ * channels_, dec_len);
- // Re-init decoder and decode again.
- EXPECT_EQ(0, decoder_->Init());
- dec_len = decoder_->Decode(encoded_copy, enc_len, output2, &speech_type2);
- EXPECT_EQ(frame_size_ * channels_, dec_len);
- for (unsigned int n = 0; n < frame_size_; ++n) {
- ASSERT_EQ(output1[n], output2[n]) << "Exit test on first diff; n = " << n;
- }
- EXPECT_EQ(speech_type1, speech_type2);
- }
-
- // Call DecodePlc and verify that the correct number of samples is produced.
- void DecodePlcTest() {
- InitEncoder();
- size_t enc_len = EncodeFrame(input_, frame_size_, encoded_);
- AudioDecoder::SpeechType speech_type;
- EXPECT_EQ(0, decoder_->Init());
- size_t dec_len =
- decoder_->Decode(encoded_, enc_len, decoded_, &speech_type);
- EXPECT_EQ(frame_size_ * channels_, dec_len);
- // Call DecodePlc and verify that we get one frame of data.
- // (Overwrite the output from the above Decode call, but that does not
- // matter.)
- dec_len = decoder_->DecodePlc(1, decoded_);
- EXPECT_EQ(frame_size_ * channels_, dec_len);
- }
-
- std::string input_file_;
- FILE* input_fp_;
- int16_t* input_;
- uint8_t* encoded_;
- int16_t* decoded_;
- size_t frame_size_;
- size_t data_length_;
- size_t encoded_bytes_;
- size_t channels_;
- AudioDecoder* decoder_;
-};
-
-class AudioDecoderPcmUTest : public AudioDecoderTest {
- protected:
- AudioDecoderPcmUTest() : AudioDecoderTest() {
- frame_size_ = 160;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderPcmU;
- assert(decoder_);
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- int enc_len_bytes =
- WebRtcG711_EncodeU(NULL, const_cast<int16_t*>(input),
- static_cast<int>(input_len_samples),
- reinterpret_cast<int16_t*>(output));
- EXPECT_EQ(input_len_samples, static_cast<size_t>(enc_len_bytes));
- return enc_len_bytes;
- }
-};
-
-class AudioDecoderPcmATest : public AudioDecoderTest {
- protected:
- AudioDecoderPcmATest() : AudioDecoderTest() {
- frame_size_ = 160;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderPcmA;
- assert(decoder_);
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- int enc_len_bytes =
- WebRtcG711_EncodeA(NULL, const_cast<int16_t*>(input),
- static_cast<int>(input_len_samples),
- reinterpret_cast<int16_t*>(output));
- EXPECT_EQ(input_len_samples, static_cast<size_t>(enc_len_bytes));
- return enc_len_bytes;
- }
-};
-
-class AudioDecoderPcm16BTest : public AudioDecoderTest {
- protected:
- AudioDecoderPcm16BTest() : AudioDecoderTest() {
- frame_size_ = 160;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderPcm16B(kDecoderPCM16B);
- assert(decoder_);
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- int enc_len_bytes = WebRtcPcm16b_EncodeW16(
- const_cast<int16_t*>(input), static_cast<int>(input_len_samples),
- reinterpret_cast<int16_t*>(output));
- EXPECT_EQ(2 * input_len_samples, static_cast<size_t>(enc_len_bytes));
- return enc_len_bytes;
- }
-};
-
-class AudioDecoderIlbcTest : public AudioDecoderTest {
- protected:
- AudioDecoderIlbcTest() : AudioDecoderTest() {
- frame_size_ = 240;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderIlbc;
- assert(decoder_);
- WebRtcIlbcfix_EncoderCreate(&encoder_);
- }
-
- ~AudioDecoderIlbcTest() {
- WebRtcIlbcfix_EncoderFree(encoder_);
- }
-
- virtual void InitEncoder() {
- ASSERT_EQ(0, WebRtcIlbcfix_EncoderInit(encoder_, 30)); // 30 ms.
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- int enc_len_bytes =
- WebRtcIlbcfix_Encode(encoder_, input,
- static_cast<int>(input_len_samples),
- reinterpret_cast<int16_t*>(output));
- EXPECT_EQ(50, enc_len_bytes);
- return enc_len_bytes;
- }
-
- // Overload the default test since iLBC's function WebRtcIlbcfix_NetEqPlc does
- // not return any data. It simply resets a few states and returns 0.
- void DecodePlcTest() {
- InitEncoder();
- size_t enc_len = EncodeFrame(input_, frame_size_, encoded_);
- AudioDecoder::SpeechType speech_type;
- EXPECT_EQ(0, decoder_->Init());
- size_t dec_len =
- decoder_->Decode(encoded_, enc_len, decoded_, &speech_type);
- EXPECT_EQ(frame_size_, dec_len);
- // Simply call DecodePlc and verify that we get 0 as return value.
- EXPECT_EQ(0, decoder_->DecodePlc(1, decoded_));
- }
-
- iLBC_encinst_t* encoder_;
-};
-
-class AudioDecoderIsacFloatTest : public AudioDecoderTest {
- protected:
- AudioDecoderIsacFloatTest() : AudioDecoderTest() {
- input_size_ = 160;
- frame_size_ = 480;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderIsac;
- assert(decoder_);
- WebRtcIsac_Create(&encoder_);
- WebRtcIsac_SetEncSampRate(encoder_, 16000);
- }
-
- ~AudioDecoderIsacFloatTest() {
- WebRtcIsac_Free(encoder_);
- }
-
- virtual void InitEncoder() {
- ASSERT_EQ(0, WebRtcIsac_EncoderInit(encoder_, 1)); // Fixed mode.
- ASSERT_EQ(0, WebRtcIsac_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- // Insert 3 * 10 ms. Expect non-zero output on third call.
- EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- int enc_len_bytes =
- WebRtcIsac_Encode(encoder_, input, reinterpret_cast<int16_t*>(output));
- EXPECT_GT(enc_len_bytes, 0);
- return enc_len_bytes;
- }
-
- ISACStruct* encoder_;
- int input_size_;
-};
-
-class AudioDecoderIsacSwbTest : public AudioDecoderTest {
- protected:
- AudioDecoderIsacSwbTest() : AudioDecoderTest() {
- input_size_ = 320;
- frame_size_ = 960;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderIsacSwb;
- assert(decoder_);
- WebRtcIsac_Create(&encoder_);
- WebRtcIsac_SetEncSampRate(encoder_, 32000);
- }
-
- ~AudioDecoderIsacSwbTest() {
- WebRtcIsac_Free(encoder_);
- }
-
- virtual void InitEncoder() {
- ASSERT_EQ(0, WebRtcIsac_EncoderInit(encoder_, 1)); // Fixed mode.
- ASSERT_EQ(0, WebRtcIsac_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- // Insert 3 * 10 ms. Expect non-zero output on third call.
- EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- EXPECT_EQ(0, WebRtcIsac_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- int enc_len_bytes =
- WebRtcIsac_Encode(encoder_, input, reinterpret_cast<int16_t*>(output));
- EXPECT_GT(enc_len_bytes, 0);
- return enc_len_bytes;
- }
-
- ISACStruct* encoder_;
- int input_size_;
-};
-
-// This test is identical to AudioDecoderIsacSwbTest, except that it creates
-// an AudioDecoderIsacFb decoder object.
-class AudioDecoderIsacFbTest : public AudioDecoderIsacSwbTest {
- protected:
- AudioDecoderIsacFbTest() : AudioDecoderIsacSwbTest() {
- // Delete the |decoder_| that was created by AudioDecoderIsacSwbTest and
- // create an AudioDecoderIsacFb object instead.
- delete decoder_;
- decoder_ = new AudioDecoderIsacFb;
- assert(decoder_);
- }
-};
-
-class AudioDecoderIsacFixTest : public AudioDecoderTest {
- protected:
- AudioDecoderIsacFixTest() : AudioDecoderTest() {
- input_size_ = 160;
- frame_size_ = 480;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderIsacFix;
- assert(decoder_);
- WebRtcIsacfix_Create(&encoder_);
- }
-
- ~AudioDecoderIsacFixTest() {
- WebRtcIsacfix_Free(encoder_);
- }
-
- virtual void InitEncoder() {
- ASSERT_EQ(0, WebRtcIsacfix_EncoderInit(encoder_, 1)); // Fixed mode.
- ASSERT_EQ(0,
- WebRtcIsacfix_Control(encoder_, 32000, 30)); // 32 kbps, 30 ms.
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- // Insert 3 * 10 ms. Expect non-zero output on third call.
- EXPECT_EQ(0, WebRtcIsacfix_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- EXPECT_EQ(0, WebRtcIsacfix_Encode(encoder_, input,
- reinterpret_cast<int16_t*>(output)));
- input += input_size_;
- int enc_len_bytes = WebRtcIsacfix_Encode(
- encoder_, input, reinterpret_cast<int16_t*>(output));
- EXPECT_GT(enc_len_bytes, 0);
- return enc_len_bytes;
- }
-
- ISACFIX_MainStruct* encoder_;
- int input_size_;
-};
-
-class AudioDecoderG722Test : public AudioDecoderTest {
- protected:
- AudioDecoderG722Test() : AudioDecoderTest() {
- frame_size_ = 160;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderG722;
- assert(decoder_);
- WebRtcG722_CreateEncoder(&encoder_);
- }
-
- ~AudioDecoderG722Test() {
- WebRtcG722_FreeEncoder(encoder_);
- }
-
- virtual void InitEncoder() {
- ASSERT_EQ(0, WebRtcG722_EncoderInit(encoder_));
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- int enc_len_bytes =
- WebRtcG722_Encode(encoder_, const_cast<int16_t*>(input),
- static_cast<int>(input_len_samples),
- reinterpret_cast<int16_t*>(output));
- EXPECT_EQ(80, enc_len_bytes);
- return enc_len_bytes;
- }
-
- G722EncInst* encoder_;
-};
-
-class AudioDecoderG722StereoTest : public AudioDecoderG722Test {
- protected:
- AudioDecoderG722StereoTest() : AudioDecoderG722Test() {
- channels_ = 2;
- // Delete the |decoder_| that was created by AudioDecoderG722Test and
- // create an AudioDecoderG722Stereo object instead.
- delete decoder_;
- decoder_ = new AudioDecoderG722Stereo;
- assert(decoder_);
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- uint8_t* temp_output = new uint8_t[data_length_ * 2];
- // Encode a mono payload using the base test class.
- int mono_enc_len_bytes =
- AudioDecoderG722Test::EncodeFrame(input, input_len_samples,
- temp_output);
- // The bit-stream consists of 4-bit samples:
- // +--------+--------+--------+
- // | s0 s1 | s2 s3 | s4 s5 |
- // +--------+--------+--------+
- //
- // Duplicate them to the |output| such that the stereo stream becomes:
- // +--------+--------+--------+
- // | s0 s0 | s1 s1 | s2 s2 |
- // +--------+--------+--------+
- EXPECT_LE(mono_enc_len_bytes * 2, static_cast<int>(data_length_ * 2));
- uint8_t* output_ptr = output;
- for (int i = 0; i < mono_enc_len_bytes; ++i) {
- *output_ptr = (temp_output[i] & 0xF0) + (temp_output[i] >> 4);
- ++output_ptr;
- *output_ptr = (temp_output[i] << 4) + (temp_output[i] & 0x0F);
- ++output_ptr;
- }
- delete [] temp_output;
- return mono_enc_len_bytes * 2;
- }
-};
-
-#ifdef WEBRTC_CODEC_CELT
-class AudioDecoderCeltTest : public AudioDecoderTest {
- protected:
- static const int kEncodingRateBitsPerSecond = 64000;
- AudioDecoderCeltTest() : AudioDecoderTest(), encoder_(NULL) {
- frame_size_ = 640;
- data_length_ = 10 * frame_size_;
- decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32);
- assert(decoder_);
- WebRtcCelt_CreateEnc(&encoder_, static_cast<int>(channels_));
- }
-
- ~AudioDecoderCeltTest() {
- WebRtcCelt_FreeEnc(encoder_);
- }
-
- virtual void InitEncoder() {
- assert(encoder_);
- ASSERT_EQ(0, WebRtcCelt_EncoderInit(
- encoder_, static_cast<int>(channels_), kEncodingRateBitsPerSecond));
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- assert(encoder_);
- return WebRtcCelt_Encode(encoder_, input, output);
- }
-
- CELT_encinst_t* encoder_;
-};
-
-class AudioDecoderCeltStereoTest : public AudioDecoderTest {
- protected:
- static const int kEncodingRateBitsPerSecond = 64000;
- AudioDecoderCeltStereoTest() : AudioDecoderTest(), encoder_(NULL) {
- channels_ = 2;
- frame_size_ = 640;
- data_length_ = 10 * frame_size_;
- decoder_ = AudioDecoder::CreateAudioDecoder(kDecoderCELT_32_2ch);
- assert(decoder_);
- stereo_input_ = new int16_t[frame_size_ * channels_];
- WebRtcCelt_CreateEnc(&encoder_, static_cast<int>(channels_));
- }
-
- ~AudioDecoderCeltStereoTest() {
- delete [] stereo_input_;
- WebRtcCelt_FreeEnc(encoder_);
- }
-
- virtual void InitEncoder() {
- assert(encoder_);
- ASSERT_EQ(0, WebRtcCelt_EncoderInit(
- encoder_, static_cast<int>(channels_), kEncodingRateBitsPerSecond));
- }
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- assert(encoder_);
- assert(stereo_input_);
- for (size_t n = 0; n < frame_size_; ++n) {
- stereo_input_[n * 2] = stereo_input_[n * 2 + 1] = input[n];
- }
- return WebRtcCelt_Encode(encoder_, stereo_input_, output);
- }
-
- int16_t* stereo_input_;
- CELT_encinst_t* encoder_;
-};
-
-#endif
-
-class AudioDecoderOpusTest : public AudioDecoderTest {
- protected:
- AudioDecoderOpusTest() : AudioDecoderTest() {
- frame_size_ = 320;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderOpus(kDecoderOpus);
- assert(decoder_);
- WebRtcOpus_EncoderCreate(&encoder_, 1);
- }
-
- ~AudioDecoderOpusTest() {
- WebRtcOpus_EncoderFree(encoder_);
- }
-
- virtual void InitEncoder() {}
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- // Upsample from 32 to 48 kHz.
- Resampler rs;
- rs.Reset(32000, 48000, kResamplerSynchronous);
- const int max_resamp_len_samples = static_cast<int>(input_len_samples) *
- 3 / 2;
- int16_t* resamp_input = new int16_t[max_resamp_len_samples];
- int resamp_len_samples;
- EXPECT_EQ(0, rs.Push(input, static_cast<int>(input_len_samples),
- resamp_input, max_resamp_len_samples,
- resamp_len_samples));
- EXPECT_EQ(max_resamp_len_samples, resamp_len_samples);
- int enc_len_bytes =
- WebRtcOpus_Encode(encoder_, resamp_input, resamp_len_samples,
- static_cast<int>(data_length_), output);
- EXPECT_GT(enc_len_bytes, 0);
- delete [] resamp_input;
- return enc_len_bytes;
- }
-
- OpusEncInst* encoder_;
-};
-
-class AudioDecoderOpusStereoTest : public AudioDecoderTest {
- protected:
- AudioDecoderOpusStereoTest() : AudioDecoderTest() {
- channels_ = 2;
- frame_size_ = 320;
- data_length_ = 10 * frame_size_;
- decoder_ = new AudioDecoderOpus(kDecoderOpus_2ch);
- assert(decoder_);
- WebRtcOpus_EncoderCreate(&encoder_, 2);
- }
-
- ~AudioDecoderOpusStereoTest() {
- WebRtcOpus_EncoderFree(encoder_);
- }
-
- virtual void InitEncoder() {}
-
- virtual int EncodeFrame(const int16_t* input, size_t input_len_samples,
- uint8_t* output) {
- // Create stereo by duplicating each sample in |input|.
- const int input_stereo_samples = static_cast<int>(input_len_samples) * 2;
- int16_t* input_stereo = new int16_t[input_stereo_samples];
- for (size_t i = 0; i < input_len_samples; i++)
- input_stereo[i * 2] = input_stereo[i * 2 + 1] = input[i];
- // Upsample from 32 to 48 kHz.
- Resampler rs;
- rs.Reset(32000, 48000, kResamplerSynchronousStereo);
- const int max_resamp_len_samples = input_stereo_samples * 3 / 2;
- int16_t* resamp_input = new int16_t[max_resamp_len_samples];
- int resamp_len_samples;
- EXPECT_EQ(0, rs.Push(input_stereo, input_stereo_samples, resamp_input,
- max_resamp_len_samples, resamp_len_samples));
- EXPECT_EQ(max_resamp_len_samples, resamp_len_samples);
- int enc_len_bytes =
- WebRtcOpus_Encode(encoder_, resamp_input, resamp_len_samples / 2,
- static_cast<int16_t>(data_length_), output);
- EXPECT_GT(enc_len_bytes, 0);
- delete [] resamp_input;
- delete [] input_stereo;
- return enc_len_bytes;
- }
-
- OpusEncInst* encoder_;
-};
-
-TEST_F(AudioDecoderPcmUTest, EncodeDecode) {
- int tolerance = 251;
- double mse = 1734.0;
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu));
- EncodeDecodeTest(data_length_, tolerance, mse);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderPcmATest, EncodeDecode) {
- int tolerance = 308;
- double mse = 1931.0;
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa));
- EncodeDecodeTest(data_length_, tolerance, mse);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderPcm16BTest, EncodeDecode) {
- int tolerance = 0;
- double mse = 0.0;
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz));
- EncodeDecodeTest(2 * data_length_, tolerance, mse);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderIlbcTest, EncodeDecode) {
- int tolerance = 6808;
- double mse = 2.13e6;
- int delay = 80; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderILBC));
- EncodeDecodeTest(500, tolerance, mse, delay);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-
-TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) {
- int tolerance = 3399;
- double mse = 434951.0;
- int delay = 48; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
- EncodeDecodeTest(0, tolerance, mse, delay);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-
-TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
- int tolerance = 19757;
- double mse = 8.18e6;
- int delay = 160; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
- EncodeDecodeTest(0, tolerance, mse, delay);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-
-TEST_F(AudioDecoderIsacFbTest, EncodeDecode) {
- int tolerance = 19757;
- double mse = 8.18e6;
- int delay = 160; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
- EncodeDecodeTest(0, tolerance, mse, delay);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-
-TEST_F(AudioDecoderIsacFixTest, DISABLED_EncodeDecode) {
- int tolerance = 11034;
- double mse = 3.46e6;
- int delay = 54; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
- EncodeDecodeTest(735, tolerance, mse, delay);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderG722Test, EncodeDecode) {
- int tolerance = 6176;
- double mse = 238630.0;
- int delay = 22; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722));
- EncodeDecodeTest(data_length_ / 2, tolerance, mse, delay);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderG722StereoTest, CreateAndDestroy) {
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
-}
-
-TEST_F(AudioDecoderG722StereoTest, EncodeDecode) {
- int tolerance = 6176;
- int channel_diff_tolerance = 0;
- double mse = 238630.0;
- int delay = 22; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
- EncodeDecodeTest(data_length_, tolerance, mse, delay, channel_diff_tolerance);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderOpusTest, EncodeDecode) {
- int tolerance = 6176;
- double mse = 238630.0;
- int delay = 22; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus));
- EncodeDecodeTest(0, tolerance, mse, delay);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-TEST_F(AudioDecoderOpusStereoTest, EncodeDecode) {
- int tolerance = 6176;
- int channel_diff_tolerance = 0;
- double mse = 238630.0;
- int delay = 22; // Delay from input to output.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch));
- EncodeDecodeTest(0, tolerance, mse, delay, channel_diff_tolerance);
- ReInitTest();
- EXPECT_FALSE(decoder_->HasDecodePlc());
-}
-
-#ifdef WEBRTC_CODEC_CELT
-// In the two following CELT tests, the low amplitude of the test signal allow
-// us to have such low error thresholds, i.e. |tolerance|, |mse|. Furthermore,
-// in general, stereo signals with identical channels do not result in identical
-// encoded channels.
-TEST_F(AudioDecoderCeltTest, EncodeDecode) {
- int tolerance = 20;
- double mse = 17.0;
- int delay = 80; // Delay from input to output in samples.
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32));
- EncodeDecodeTest(1600, tolerance, mse, delay);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-
-TEST_F(AudioDecoderCeltStereoTest, EncodeDecode) {
- int tolerance = 20;
- // If both channels are identical, CELT not necessarily decodes identical
- // channels. However, for this input this is the case.
- int channel_diff_tolerance = 0;
- double mse = 20.0;
- // Delay from input to output in samples, accounting for stereo.
- int delay = 160;
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
- EncodeDecodeTest(1600, tolerance, mse, delay, channel_diff_tolerance);
- ReInitTest();
- EXPECT_TRUE(decoder_->HasDecodePlc());
- DecodePlcTest();
-}
-#endif
-
-TEST(AudioDecoder, CodecSampleRateHz) {
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMu));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMa));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMu_2ch));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCMa_2ch));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderILBC));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderISAC));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderISACswb));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderISACfb));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bwb));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb32kHz));
- EXPECT_EQ(48000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb48kHz));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B_2ch));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bwb_2ch));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb32kHz_2ch));
- EXPECT_EQ(48000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16Bswb48kHz_2ch));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderPCM16B_5ch));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderG722));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderG722_2ch));
- EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderRED));
- EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderAVT));
- EXPECT_EQ(8000, AudioDecoder::CodecSampleRateHz(kDecoderCNGnb));
- EXPECT_EQ(16000, AudioDecoder::CodecSampleRateHz(kDecoderCNGwb));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCNGswb32kHz));
- // TODO(tlegrand): Change 32000 to 48000 below once ACM has 48 kHz support.
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCNGswb48kHz));
- EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderArbitrary));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderOpus_2ch));
-#ifdef WEBRTC_CODEC_CELT
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32));
- EXPECT_EQ(32000, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch));
-#else
- EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32));
- EXPECT_EQ(-1, AudioDecoder::CodecSampleRateHz(kDecoderCELT_32_2ch));
-#endif
-}
-
-TEST(AudioDecoder, CodecSupported) {
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMu_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCMa_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderILBC));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISAC));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACswb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderISACfb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bwb_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb32kHz_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16Bswb48kHz_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderPCM16B_5ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderG722_2ch));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderRED));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderAVT));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGnb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGwb));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGswb32kHz));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCNGswb48kHz));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderArbitrary));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderOpus_2ch));
-#ifdef WEBRTC_CODEC_CELT
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32));
- EXPECT_TRUE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
-#else
- EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32));
- EXPECT_FALSE(AudioDecoder::CodecSupported(kDecoderCELT_32_2ch));
-#endif
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc
deleted file mode 100644
index baa912c860e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-AudioMultiVector::AudioMultiVector(size_t N) {
- assert(N > 0);
- if (N < 1) N = 1;
- for (size_t n = 0; n < N; ++n) {
- channels_.push_back(new AudioVector);
- }
- num_channels_ = N;
-}
-
-AudioMultiVector::AudioMultiVector(size_t N, size_t initial_size) {
- assert(N > 0);
- if (N < 1) N = 1;
- for (size_t n = 0; n < N; ++n) {
- channels_.push_back(new AudioVector(initial_size));
- }
- num_channels_ = N;
-}
-
-AudioMultiVector::~AudioMultiVector() {
- std::vector<AudioVector*>::iterator it = channels_.begin();
- while (it != channels_.end()) {
- delete (*it);
- ++it;
- }
-}
-
-void AudioMultiVector::Clear() {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->Clear();
- }
-}
-
-void AudioMultiVector::Zeros(size_t length) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->Clear();
- channels_[i]->Extend(length);
- }
-}
-
-void AudioMultiVector::CopyFrom(AudioMultiVector* copy_to) const {
- if (copy_to) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->CopyFrom(&(*copy_to)[i]);
- }
- }
-}
-
-void AudioMultiVector::PushBackInterleaved(const int16_t* append_this,
- size_t length) {
- assert(length % num_channels_ == 0);
- if (num_channels_ == 1) {
- // Special case to avoid extra allocation and data shuffling.
- channels_[0]->PushBack(append_this, length);
- return;
- }
- size_t length_per_channel = length / num_channels_;
- int16_t* temp_array =
- new int16_t[length_per_channel]; // Intermediate storage.
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- // Copy elements to |temp_array|.
- // Set |source_ptr| to first element of this channel.
- const int16_t* source_ptr = &append_this[channel];
- for (size_t i = 0; i < length_per_channel; ++i) {
- temp_array[i] = *source_ptr;
- source_ptr += num_channels_; // Jump to next element of this channel.
- }
- channels_[channel]->PushBack(temp_array, length_per_channel);
- }
- delete [] temp_array;
-}
-
-void AudioMultiVector::PushBack(const AudioMultiVector& append_this) {
- assert(num_channels_ == append_this.num_channels_);
- if (num_channels_ == append_this.num_channels_) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PushBack(append_this[i]);
- }
- }
-}
-
-void AudioMultiVector::PushBackFromIndex(const AudioMultiVector& append_this,
- size_t index) {
- assert(index < append_this.Size());
- index = std::min(index, append_this.Size() - 1);
- size_t length = append_this.Size() - index;
- assert(num_channels_ == append_this.num_channels_);
- if (num_channels_ == append_this.num_channels_) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PushBack(&append_this[i][index], length);
- }
- }
-}
-
-void AudioMultiVector::PopFront(size_t length) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PopFront(length);
- }
-}
-
-void AudioMultiVector::PopBack(size_t length) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PopBack(length);
- }
-}
-
-size_t AudioMultiVector::ReadInterleaved(size_t length,
- int16_t* destination) const {
- return ReadInterleavedFromIndex(0, length, destination);
-}
-
-size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index,
- size_t length,
- int16_t* destination) const {
- if (!destination) {
- return 0;
- }
- size_t index = 0; // Number of elements written to |destination| so far.
- assert(start_index <= Size());
- start_index = std::min(start_index, Size());
- if (length + start_index > Size()) {
- length = Size() - start_index;
- }
- if (num_channels_ == 1) {
- // Special case to avoid the nested for loop below.
- memcpy(destination, &(*this)[0][start_index], length * sizeof(int16_t));
- return length;
- }
- for (size_t i = 0; i < length; ++i) {
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- destination[index] = (*this)[channel][i + start_index];
- ++index;
- }
- }
- return index;
-}
-
-size_t AudioMultiVector::ReadInterleavedFromEnd(size_t length,
- int16_t* destination) const {
- length = std::min(length, Size()); // Cannot read more than Size() elements.
- return ReadInterleavedFromIndex(Size() - length, length, destination);
-}
-
-void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this,
- size_t length,
- size_t position) {
- assert(num_channels_ == insert_this.num_channels_);
- // Cap |length| at the length of |insert_this|.
- assert(length <= insert_this.Size());
- length = std::min(length, insert_this.Size());
- if (num_channels_ == insert_this.num_channels_) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->OverwriteAt(&insert_this[i][0], length, position);
- }
- }
-}
-
-void AudioMultiVector::CrossFade(const AudioMultiVector& append_this,
- size_t fade_length) {
- assert(num_channels_ == append_this.num_channels_);
- if (num_channels_ == append_this.num_channels_) {
- for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->CrossFade(append_this[i], fade_length);
- }
- }
-}
-
-size_t AudioMultiVector::Size() const {
- assert(channels_[0]);
- return channels_[0]->Size();
-}
-
-void AudioMultiVector::AssertSize(size_t required_size) {
- if (Size() < required_size) {
- size_t extend_length = required_size - Size();
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- channels_[channel]->Extend(extend_length);
- }
- }
-}
-
-bool AudioMultiVector::Empty() const {
- assert(channels_[0]);
- return channels_[0]->Empty();
-}
-
-const AudioVector& AudioMultiVector::operator[](size_t index) const {
- return *(channels_[index]);
-}
-
-AudioVector& AudioMultiVector::operator[](size_t index) {
- return *(channels_[index]);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h
deleted file mode 100644
index 2d0a7494912..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
-
-#include <string.h> // Access to size_t.
-
-#include <vector>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class AudioMultiVector {
- public:
- // Creates an empty AudioMultiVector with |N| audio channels. |N| must be
- // larger than 0.
- explicit AudioMultiVector(size_t N);
-
- // Creates an AudioMultiVector with |N| audio channels, each channel having
- // an initial size. |N| must be larger than 0.
- AudioMultiVector(size_t N, size_t initial_size);
-
- virtual ~AudioMultiVector();
-
- // Deletes all values and make the vector empty.
- virtual void Clear();
-
- // Clears the vector and inserts |length| zeros into each channel.
- virtual void Zeros(size_t length);
-
- // Copies all values from this vector to |copy_to|. Any contents in |copy_to|
- // are deleted. After the operation is done, |copy_to| will be an exact
- // replica of this object. The source and the destination must have the same
- // number of channels.
- virtual void CopyFrom(AudioMultiVector* copy_to) const;
-
- // Appends the contents of array |append_this| to the end of this
- // object. The array is assumed to be channel-interleaved. |length| must be
- // an even multiple of this object's number of channels.
- // The length of this object is increased with the |length| divided by the
- // number of channels.
- virtual void PushBackInterleaved(const int16_t* append_this, size_t length);
-
- // Appends the contents of AudioMultiVector |append_this| to this object. The
- // length of this object is increased with the length of |append_this|.
- virtual void PushBack(const AudioMultiVector& append_this);
-
- // Appends the contents of AudioMultiVector |append_this| to this object,
- // taken from |index| up until the end of |append_this|. The length of this
- // object is increased.
- virtual void PushBackFromIndex(const AudioMultiVector& append_this,
- size_t index);
-
- // Removes |length| elements from the beginning of this object, from each
- // channel.
- virtual void PopFront(size_t length);
-
- // Removes |length| elements from the end of this object, from each
- // channel.
- virtual void PopBack(size_t length);
-
- // Reads |length| samples from each channel and writes them interleaved to
- // |destination|. The total number of elements written to |destination| is
- // returned, i.e., |length| * number of channels. If the AudioMultiVector
- // contains less than |length| samples per channel, this is reflected in the
- // return value.
- virtual size_t ReadInterleaved(size_t length, int16_t* destination) const;
-
- // Like ReadInterleaved() above, but reads from |start_index| instead of from
- // the beginning.
- virtual size_t ReadInterleavedFromIndex(size_t start_index,
- size_t length,
- int16_t* destination) const;
-
- // Like ReadInterleaved() above, but reads from the end instead of from
- // the beginning.
- virtual size_t ReadInterleavedFromEnd(size_t length,
- int16_t* destination) const;
-
- // Overwrites each channel in this AudioMultiVector with values taken from
- // |insert_this|. The values are taken from the beginning of |insert_this| and
- // are inserted starting at |position|. |length| values are written into each
- // channel. If |length| and |position| are selected such that the new data
- // extends beyond the end of the current AudioVector, the vector is extended
- // to accommodate the new data. |length| is limited to the length of
- // |insert_this|.
- virtual void OverwriteAt(const AudioMultiVector& insert_this,
- size_t length,
- size_t position);
-
- // Appends |append_this| to the end of the current vector. Lets the two
- // vectors overlap by |fade_length| samples (per channel), and cross-fade
- // linearly in this region.
- virtual void CrossFade(const AudioMultiVector& append_this,
- size_t fade_length);
-
- // Returns the number of channels.
- virtual size_t Channels() const { return num_channels_; }
-
- // Returns the number of elements per channel in this AudioMultiVector.
- virtual size_t Size() const;
-
- // Verify that each channel can hold at least |required_size| elements. If
- // not, extend accordingly.
- virtual void AssertSize(size_t required_size);
-
- virtual bool Empty() const;
-
- // Accesses and modifies a channel (i.e., an AudioVector object) of this
- // AudioMultiVector.
- const AudioVector& operator[](size_t index) const;
- AudioVector& operator[](size_t index);
-
- protected:
- std::vector<AudioVector*> channels_;
- size_t num_channels_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioMultiVector);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc
deleted file mode 100644
index be05a8260f9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc
+++ /dev/null
@@ -1,308 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include "gtest/gtest.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// This is a value-parameterized test. The test cases are instantiated with
-// different values for the test parameter, which is used to determine the
-// number of channels in the AudioMultiBuffer. Note that it is not possible
-// to combine typed testing with value-parameterized testing, and since the
-// tests for AudioVector already covers a number of different type parameters,
-// this test focuses on testing different number of channels, and keeping the
-// value type constant.
-
-class AudioMultiVectorTest : public ::testing::TestWithParam<size_t> {
- protected:
- AudioMultiVectorTest()
- : num_channels_(GetParam()), // Get the test parameter.
- interleaved_length_(num_channels_ * array_length()) {
- array_interleaved_ = new int16_t[num_channels_ * array_length()];
- }
-
- ~AudioMultiVectorTest() {
- delete [] array_interleaved_;
- }
-
- virtual void SetUp() {
- // Populate test arrays.
- for (size_t i = 0; i < array_length(); ++i) {
- array_[i] = static_cast<int16_t>(i);
- }
- int16_t* ptr = array_interleaved_;
- // Write 100, 101, 102, ... for first channel.
- // Write 200, 201, 202, ... for second channel.
- // And so on.
- for (size_t i = 0; i < array_length(); ++i) {
- for (size_t j = 1; j <= num_channels_; ++j) {
- *ptr = j * 100 + i;
- ++ptr;
- }
- }
- }
-
- size_t array_length() const {
- return sizeof(array_) / sizeof(array_[0]);
- }
-
- const size_t num_channels_;
- size_t interleaved_length_;
- int16_t array_[10];
- int16_t* array_interleaved_;
-};
-
-// Create and destroy AudioMultiVector objects, both empty and with a predefined
-// length.
-TEST_P(AudioMultiVectorTest, CreateAndDestroy) {
- AudioMultiVector vec1(num_channels_);
- EXPECT_TRUE(vec1.Empty());
- EXPECT_EQ(num_channels_, vec1.Channels());
- EXPECT_EQ(0u, vec1.Size());
-
- size_t initial_size = 17;
- AudioMultiVector vec2(num_channels_, initial_size);
- EXPECT_FALSE(vec2.Empty());
- EXPECT_EQ(num_channels_, vec2.Channels());
- EXPECT_EQ(initial_size, vec2.Size());
-}
-
-// Test the subscript operator [] for getting and setting.
-TEST_P(AudioMultiVectorTest, SubscriptOperator) {
- AudioMultiVector vec(num_channels_, array_length());
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- for (size_t i = 0; i < array_length(); ++i) {
- vec[channel][i] = static_cast<int16_t>(i);
- // Make sure to use the const version.
- const AudioVector& audio_vec = vec[channel];
- EXPECT_EQ(static_cast<int16_t>(i), audio_vec[i]);
- }
- }
-}
-
-// Test the PushBackInterleaved method and the CopyFrom method. The Clear
-// method is also invoked.
-TEST_P(AudioMultiVectorTest, PushBackInterleavedAndCopy) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- AudioMultiVector vec_copy(num_channels_);
- vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|.
- ASSERT_EQ(num_channels_, vec.Channels());
- ASSERT_EQ(array_length(), vec.Size());
- ASSERT_EQ(num_channels_, vec_copy.Channels());
- ASSERT_EQ(array_length(), vec_copy.Size());
- for (size_t channel = 0; channel < vec.Channels(); ++channel) {
- for (size_t i = 0; i < array_length(); ++i) {
- EXPECT_EQ(static_cast<int16_t>((channel + 1) * 100 + i), vec[channel][i]);
- EXPECT_EQ(vec[channel][i], vec_copy[channel][i]);
- }
- }
-
- // Clear |vec| and verify that it is empty.
- vec.Clear();
- EXPECT_TRUE(vec.Empty());
-
- // Now copy the empty vector and verify that the copy becomes empty too.
- vec.CopyFrom(&vec_copy);
- EXPECT_TRUE(vec_copy.Empty());
-}
-
-// Try to copy to a NULL pointer. Nothing should happen.
-TEST_P(AudioMultiVectorTest, CopyToNull) {
- AudioMultiVector vec(num_channels_);
- AudioMultiVector* vec_copy = NULL;
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- vec.CopyFrom(vec_copy);
-}
-
-// Test the PushBack method with another AudioMultiVector as input argument.
-TEST_P(AudioMultiVectorTest, PushBackVector) {
- AudioMultiVector vec1(num_channels_, array_length());
- AudioMultiVector vec2(num_channels_, array_length());
- // Set the first vector to [0, 1, ..., array_length() - 1] +
- // 100 * channel_number.
- // Set the second vector to [array_length(), array_length() + 1, ...,
- // 2 * array_length() - 1] + 100 * channel_number.
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- for (size_t i = 0; i < array_length(); ++i) {
- vec1[channel][i] = static_cast<int16_t>(i + 100 * channel);
- vec2[channel][i] =
- static_cast<int16_t>(i + 100 * channel + array_length());
- }
- }
- // Append vec2 to the back of vec1.
- vec1.PushBack(vec2);
- ASSERT_EQ(2u * array_length(), vec1.Size());
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- for (size_t i = 0; i < 2 * array_length(); ++i) {
- EXPECT_EQ(static_cast<int16_t>(i + 100 * channel), vec1[channel][i]);
- }
- }
-}
-
-// Test the PushBackFromIndex method.
-TEST_P(AudioMultiVectorTest, PushBackFromIndex) {
- AudioMultiVector vec1(num_channels_);
- vec1.PushBackInterleaved(array_interleaved_, interleaved_length_);
- AudioMultiVector vec2(num_channels_);
-
- // Append vec1 to the back of vec2 (which is empty). Read vec1 from the second
- // last element.
- vec2.PushBackFromIndex(vec1, array_length() - 2);
- ASSERT_EQ(2u, vec2.Size());
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- for (size_t i = 0; i < 2; ++i) {
- EXPECT_EQ(array_interleaved_[channel + num_channels_ *
- (array_length() - 2 + i)], vec2[channel][i]);
- }
- }
-}
-
-// Starts with pushing some values to the vector, then test the Zeros method.
-TEST_P(AudioMultiVectorTest, Zeros) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- vec.Zeros(2 * array_length());
- ASSERT_EQ(num_channels_, vec.Channels());
- ASSERT_EQ(2u * array_length(), vec.Size());
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- for (size_t i = 0; i < 2 * array_length(); ++i) {
- EXPECT_EQ(0, vec[channel][i]);
- }
- }
-}
-
-// Test the ReadInterleaved method
-TEST_P(AudioMultiVectorTest, ReadInterleaved) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- int16_t* output = new int16_t[interleaved_length_];
- // Read 5 samples.
- size_t read_samples = 5;
- EXPECT_EQ(num_channels_ * read_samples,
- vec.ReadInterleaved(read_samples, output));
- EXPECT_EQ(0,
- memcmp(array_interleaved_, output, read_samples * sizeof(int16_t)));
-
- // Read too many samples. Expect to get all samples from the vector.
- EXPECT_EQ(interleaved_length_,
- vec.ReadInterleaved(array_length() + 1, output));
- EXPECT_EQ(0,
- memcmp(array_interleaved_, output, read_samples * sizeof(int16_t)));
-
- delete [] output;
-}
-
-// Try to read to a NULL pointer. Expected to return 0.
-TEST_P(AudioMultiVectorTest, ReadInterleavedToNull) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- int16_t* output = NULL;
- // Read 5 samples.
- size_t read_samples = 5;
- EXPECT_EQ(0u, vec.ReadInterleaved(read_samples, output));
-}
-
-// Test the PopFront method.
-TEST_P(AudioMultiVectorTest, PopFront) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- vec.PopFront(1); // Remove one element from each channel.
- ASSERT_EQ(array_length() - 1u, vec.Size());
- // Let |ptr| point to the second element of the first channel in the
- // interleaved array.
- int16_t* ptr = &array_interleaved_[num_channels_];
- for (size_t i = 0; i < array_length() - 1; ++i) {
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- EXPECT_EQ(*ptr, vec[channel][i]);
- ++ptr;
- }
- }
- vec.PopFront(array_length()); // Remove more elements than vector size.
- EXPECT_EQ(0u, vec.Size());
-}
-
-// Test the PopBack method.
-TEST_P(AudioMultiVectorTest, PopBack) {
- AudioMultiVector vec(num_channels_);
- vec.PushBackInterleaved(array_interleaved_, interleaved_length_);
- vec.PopBack(1); // Remove one element from each channel.
- ASSERT_EQ(array_length() - 1u, vec.Size());
- // Let |ptr| point to the first element of the first channel in the
- // interleaved array.
- int16_t* ptr = array_interleaved_;
- for (size_t i = 0; i < array_length() - 1; ++i) {
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- EXPECT_EQ(*ptr, vec[channel][i]);
- ++ptr;
- }
- }
- vec.PopBack(array_length()); // Remove more elements than vector size.
- EXPECT_EQ(0u, vec.Size());
-}
-
-// Test the AssertSize method.
-TEST_P(AudioMultiVectorTest, AssertSize) {
- AudioMultiVector vec(num_channels_, array_length());
- EXPECT_EQ(array_length(), vec.Size());
- // Start with asserting with smaller sizes than already allocated.
- vec.AssertSize(0);
- vec.AssertSize(array_length() - 1);
- // Nothing should have changed.
- EXPECT_EQ(array_length(), vec.Size());
- // Assert with one element longer than already allocated.
- vec.AssertSize(array_length() + 1);
- // Expect vector to have grown.
- EXPECT_EQ(array_length() + 1, vec.Size());
- // Also check the individual AudioVectors.
- for (size_t channel = 0; channel < vec.Channels(); ++channel) {
- EXPECT_EQ(array_length() + 1u, vec[channel].Size());
- }
-}
-
-// Test the PushBack method with another AudioMultiVector as input argument.
-TEST_P(AudioMultiVectorTest, OverwriteAt) {
- AudioMultiVector vec1(num_channels_);
- vec1.PushBackInterleaved(array_interleaved_, interleaved_length_);
- AudioMultiVector vec2(num_channels_);
- vec2.Zeros(3); // 3 zeros in each channel.
- // Overwrite vec2 at position 5.
- vec1.OverwriteAt(vec2, 3, 5);
- // Verify result.
- // Length remains the same.
- ASSERT_EQ(array_length(), vec1.Size());
- int16_t* ptr = array_interleaved_;
- for (size_t i = 0; i < array_length() - 1; ++i) {
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- if (i >= 5 && i <= 7) {
- // Elements 5, 6, 7 should have been replaced with zeros.
- EXPECT_EQ(0, vec1[channel][i]);
- } else {
- EXPECT_EQ(*ptr, vec1[channel][i]);
- }
- ++ptr;
- }
- }
-}
-
-INSTANTIATE_TEST_CASE_P(TestNumChannels,
- AudioMultiVectorTest,
- ::testing::Values(static_cast<size_t>(1),
- static_cast<size_t>(2),
- static_cast<size_t>(5)));
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc
deleted file mode 100644
index cbd46163068..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-void AudioVector::Clear() {
- first_free_ix_ = 0;
-}
-
-void AudioVector::CopyFrom(AudioVector* copy_to) const {
- if (copy_to) {
- copy_to->Reserve(Size());
- assert(copy_to->capacity_ >= Size());
- memcpy(copy_to->array_.get(), array_.get(), Size() * sizeof(int16_t));
- copy_to->first_free_ix_ = first_free_ix_;
- }
-}
-
-void AudioVector::PushFront(const AudioVector& prepend_this) {
- size_t insert_length = prepend_this.Size();
- Reserve(Size() + insert_length);
- memmove(&array_[insert_length], &array_[0], Size() * sizeof(int16_t));
- memcpy(&array_[0], &prepend_this.array_[0], insert_length * sizeof(int16_t));
- first_free_ix_ += insert_length;
-}
-
-void AudioVector::PushFront(const int16_t* prepend_this, size_t length) {
- // Same operation as InsertAt beginning.
- InsertAt(prepend_this, length, 0);
-}
-
-void AudioVector::PushBack(const AudioVector& append_this) {
- PushBack(append_this.array_.get(), append_this.Size());
-}
-
-void AudioVector::PushBack(const int16_t* append_this, size_t length) {
- Reserve(Size() + length);
- memcpy(&array_[first_free_ix_], append_this, length * sizeof(int16_t));
- first_free_ix_ += length;
-}
-
-void AudioVector::PopFront(size_t length) {
- if (length >= Size()) {
- // Remove all elements.
- Clear();
- } else {
- size_t remaining_samples = Size() - length;
- memmove(&array_[0], &array_[length], remaining_samples * sizeof(int16_t));
- first_free_ix_ -= length;
- }
-}
-
-void AudioVector::PopBack(size_t length) {
- // Never remove more than what is in the array.
- length = std::min(length, Size());
- first_free_ix_ -= length;
-}
-
-void AudioVector::Extend(size_t extra_length) {
- Reserve(Size() + extra_length);
- memset(&array_[first_free_ix_], 0, extra_length * sizeof(int16_t));
- first_free_ix_ += extra_length;
-}
-
-void AudioVector::InsertAt(const int16_t* insert_this,
- size_t length,
- size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
- position = std::min(Size(), position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memcpy(insert_position_ptr, insert_this, length * sizeof(int16_t));
- first_free_ix_ += length;
-}
-
-void AudioVector::InsertZerosAt(size_t length,
- size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
- position = std::min(capacity_, position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memset(insert_position_ptr, 0, length * sizeof(int16_t));
- first_free_ix_ += length;
-}
-
-void AudioVector::OverwriteAt(const int16_t* insert_this,
- size_t length,
- size_t position) {
- // Cap the insert position at the current array length.
- position = std::min(Size(), position);
- Reserve(position + length);
- memcpy(&array_[position], insert_this, length * sizeof(int16_t));
- if (position + length > Size()) {
- // Array was expanded.
- first_free_ix_ += position + length - Size();
- }
-}
-
-void AudioVector::CrossFade(const AudioVector& append_this,
- size_t fade_length) {
- // Fade length cannot be longer than the current vector or |append_this|.
- assert(fade_length <= Size());
- assert(fade_length <= append_this.Size());
- fade_length = std::min(fade_length, Size());
- fade_length = std::min(fade_length, append_this.Size());
- size_t position = Size() - fade_length;
- // Cross fade the overlapping regions.
- // |alpha| is the mixing factor in Q14.
- // TODO(hlundin): Consider skipping +1 in the denominator to produce a
- // smoother cross-fade, in particular at the end of the fade.
- int alpha_step = 16384 / (static_cast<int>(fade_length) + 1);
- int alpha = 16384;
- for (size_t i = 0; i < fade_length; ++i) {
- alpha -= alpha_step;
- array_[position + i] = (alpha * array_[position + i] +
- (16384 - alpha) * append_this[i] + 8192) >> 14;
- }
- assert(alpha >= 0); // Verify that the slope was correct.
- // Append what is left of |append_this|.
- size_t samples_to_push_back = append_this.Size() - fade_length;
- if (samples_to_push_back > 0)
- PushBack(&append_this[fade_length], samples_to_push_back);
-}
-
-const int16_t& AudioVector::operator[](size_t index) const {
- return array_[index];
-}
-
-int16_t& AudioVector::operator[](size_t index) {
- return array_[index];
-}
-
-void AudioVector::Reserve(size_t n) {
- if (capacity_ < n) {
- scoped_ptr<int16_t[]> temp_array(new int16_t[n]);
- memcpy(temp_array.get(), array_.get(), Size() * sizeof(int16_t));
- array_.swap(temp_array);
- capacity_ = n;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h
deleted file mode 100644
index 66bd518a806..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
-
-#include <string.h> // Access to size_t.
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class AudioVector {
- public:
- // Creates an empty AudioVector.
- AudioVector()
- : array_(new int16_t[kDefaultInitialSize]),
- first_free_ix_(0),
- capacity_(kDefaultInitialSize) {}
-
- // Creates an AudioVector with an initial size.
- explicit AudioVector(size_t initial_size)
- : array_(new int16_t[initial_size]),
- first_free_ix_(initial_size),
- capacity_(initial_size) {
- memset(array_.get(), 0, initial_size * sizeof(int16_t));
- }
-
- virtual ~AudioVector() {}
-
- // Deletes all values and make the vector empty.
- virtual void Clear();
-
- // Copies all values from this vector to |copy_to|. Any contents in |copy_to|
- // are deleted before the copy operation. After the operation is done,
- // |copy_to| will be an exact replica of this object.
- virtual void CopyFrom(AudioVector* copy_to) const;
-
- // Prepends the contents of AudioVector |prepend_this| to this object. The
- // length of this object is increased with the length of |prepend_this|.
- virtual void PushFront(const AudioVector& prepend_this);
-
- // Same as above, but with an array |prepend_this| with |length| elements as
- // source.
- virtual void PushFront(const int16_t* prepend_this, size_t length);
-
- // Same as PushFront but will append to the end of this object.
- virtual void PushBack(const AudioVector& append_this);
-
- // Same as PushFront but will append to the end of this object.
- virtual void PushBack(const int16_t* append_this, size_t length);
-
- // Removes |length| elements from the beginning of this object.
- virtual void PopFront(size_t length);
-
- // Removes |length| elements from the end of this object.
- virtual void PopBack(size_t length);
-
- // Extends this object with |extra_length| elements at the end. The new
- // elements are initialized to zero.
- virtual void Extend(size_t extra_length);
-
- // Inserts |length| elements taken from the array |insert_this| and insert
- // them at |position|. The length of the AudioVector is increased by |length|.
- // |position| = 0 means that the new values are prepended to the vector.
- // |position| = Size() means that the new values are appended to the vector.
- virtual void InsertAt(const int16_t* insert_this, size_t length,
- size_t position);
-
- // Like InsertAt, but inserts |length| zero elements at |position|.
- virtual void InsertZerosAt(size_t length, size_t position);
-
- // Overwrites |length| elements of this AudioVector with values taken from the
- // array |insert_this|, starting at |position|. The definition of |position|
- // is the same as for InsertAt(). If |length| and |position| are selected
- // such that the new data extends beyond the end of the current AudioVector,
- // the vector is extended to accommodate the new data.
- virtual void OverwriteAt(const int16_t* insert_this,
- size_t length,
- size_t position);
-
- // Appends |append_this| to the end of the current vector. Lets the two
- // vectors overlap by |fade_length| samples, and cross-fade linearly in this
- // region.
- virtual void CrossFade(const AudioVector& append_this, size_t fade_length);
-
- // Returns the number of elements in this AudioVector.
- virtual size_t Size() const { return first_free_ix_; }
-
- // Returns true if this AudioVector is empty.
- virtual bool Empty() const { return (first_free_ix_ == 0); }
-
- // Accesses and modifies an element of AudioVector.
- const int16_t& operator[](size_t index) const;
- int16_t& operator[](size_t index);
-
- private:
- static const size_t kDefaultInitialSize = 10;
-
- void Reserve(size_t n);
-
- scoped_ptr<int16_t[]> array_;
- size_t first_free_ix_; // The first index after the last sample in array_.
- // Note that this index may point outside of array_.
- size_t capacity_; // Allocated number of samples in the array.
-
- DISALLOW_COPY_AND_ASSIGN(AudioVector);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc
deleted file mode 100644
index de5aac2d955..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc
+++ /dev/null
@@ -1,394 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include "gtest/gtest.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class AudioVectorTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- // Populate test array.
- for (size_t i = 0; i < array_length(); ++i) {
- array_[i] = i;
- }
- }
-
- size_t array_length() const {
- return sizeof(array_) / sizeof(array_[0]);
- }
-
- int16_t array_[10];
-};
-
-// Create and destroy AudioVector objects, both empty and with a predefined
-// length.
-TEST_F(AudioVectorTest, CreateAndDestroy) {
- AudioVector vec1;
- EXPECT_TRUE(vec1.Empty());
- EXPECT_EQ(0u, vec1.Size());
-
- size_t initial_size = 17;
- AudioVector vec2(initial_size);
- EXPECT_FALSE(vec2.Empty());
- EXPECT_EQ(initial_size, vec2.Size());
-}
-
-// Test the subscript operator [] for getting and setting.
-TEST_F(AudioVectorTest, SubscriptOperator) {
- AudioVector vec(array_length());
- for (size_t i = 0; i < array_length(); ++i) {
- vec[i] = static_cast<int16_t>(i);
- const int16_t& value = vec[i]; // Make sure to use the const version.
- EXPECT_EQ(static_cast<int16_t>(i), value);
- }
-}
-
-// Test the PushBack method and the CopyFrom method. The Clear method is also
-// invoked.
-TEST_F(AudioVectorTest, PushBackAndCopy) {
- AudioVector vec;
- AudioVector vec_copy;
- vec.PushBack(array_, array_length());
- vec.CopyFrom(&vec_copy); // Copy from |vec| to |vec_copy|.
- ASSERT_EQ(array_length(), vec.Size());
- ASSERT_EQ(array_length(), vec_copy.Size());
- for (size_t i = 0; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[i]);
- EXPECT_EQ(array_[i], vec_copy[i]);
- }
-
- // Clear |vec| and verify that it is empty.
- vec.Clear();
- EXPECT_TRUE(vec.Empty());
-
- // Now copy the empty vector and verify that the copy becomes empty too.
- vec.CopyFrom(&vec_copy);
- EXPECT_TRUE(vec_copy.Empty());
-}
-
-// Try to copy to a NULL pointer. Nothing should happen.
-TEST_F(AudioVectorTest, CopyToNull) {
- AudioVector vec;
- AudioVector* vec_copy = NULL;
- vec.PushBack(array_, array_length());
- vec.CopyFrom(vec_copy);
-}
-
-// Test the PushBack method with another AudioVector as input argument.
-TEST_F(AudioVectorTest, PushBackVector) {
- static const size_t kLength = 10;
- AudioVector vec1(kLength);
- AudioVector vec2(kLength);
- // Set the first vector to [0, 1, ..., kLength - 1].
- // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1].
- for (size_t i = 0; i < kLength; ++i) {
- vec1[i] = static_cast<int16_t>(i);
- vec2[i] = static_cast<int16_t>(i + kLength);
- }
- // Append vec2 to the back of vec1.
- vec1.PushBack(vec2);
- ASSERT_EQ(2 * kLength, vec1.Size());
- for (size_t i = 0; i < 2 * kLength; ++i) {
- EXPECT_EQ(static_cast<int16_t>(i), vec1[i]);
- }
-}
-
-// Test the PushFront method.
-TEST_F(AudioVectorTest, PushFront) {
- AudioVector vec;
- vec.PushFront(array_, array_length());
- ASSERT_EQ(array_length(), vec.Size());
- for (size_t i = 0; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[i]);
- }
-}
-
-// Test the PushFront method with another AudioVector as input argument.
-TEST_F(AudioVectorTest, PushFrontVector) {
- static const size_t kLength = 10;
- AudioVector vec1(kLength);
- AudioVector vec2(kLength);
- // Set the first vector to [0, 1, ..., kLength - 1].
- // Set the second vector to [kLength, kLength + 1, ..., 2 * kLength - 1].
- for (size_t i = 0; i < kLength; ++i) {
- vec1[i] = static_cast<int16_t>(i);
- vec2[i] = static_cast<int16_t>(i + kLength);
- }
- // Prepend vec1 to the front of vec2.
- vec2.PushFront(vec1);
- ASSERT_EQ(2 * kLength, vec2.Size());
- for (size_t i = 0; i < 2 * kLength; ++i) {
- EXPECT_EQ(static_cast<int16_t>(i), vec2[i]);
- }
-}
-
-// Test the PopFront method.
-TEST_F(AudioVectorTest, PopFront) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- vec.PopFront(1); // Remove one element.
- EXPECT_EQ(array_length() - 1u, vec.Size());
- for (size_t i = 0; i < array_length() - 1; ++i) {
- EXPECT_EQ(static_cast<int16_t>(i + 1), vec[i]);
- }
- vec.PopFront(array_length()); // Remove more elements than vector size.
- EXPECT_EQ(0u, vec.Size());
-}
-
-// Test the PopBack method.
-TEST_F(AudioVectorTest, PopBack) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- vec.PopBack(1); // Remove one element.
- EXPECT_EQ(array_length() - 1u, vec.Size());
- for (size_t i = 0; i < array_length() - 1; ++i) {
- EXPECT_EQ(static_cast<int16_t>(i), vec[i]);
- }
- vec.PopBack(array_length()); // Remove more elements than vector size.
- EXPECT_EQ(0u, vec.Size());
-}
-
-// Test the Extend method.
-TEST_F(AudioVectorTest, Extend) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- vec.Extend(5); // Extend with 5 elements, which should all be zeros.
- ASSERT_EQ(array_length() + 5u, vec.Size());
- // Verify that all are zero.
- for (size_t i = array_length(); i < array_length() + 5; ++i) {
- EXPECT_EQ(0, vec[i]);
- }
-}
-
-// Test the InsertAt method with an insert position in the middle of the vector.
-TEST_F(AudioVectorTest, InsertAt) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- int insert_position = 5;
- vec.InsertAt(new_array, kNewLength, insert_position);
- // Verify that the vector looks as follows:
- // {0, 1, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
- // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
- size_t pos = 0;
- for (int i = 0; i < insert_position; ++i) {
- EXPECT_EQ(array_[i], vec[pos]);
- ++pos;
- }
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
- for (size_t i = insert_position; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[pos]);
- ++pos;
- }
-}
-
-// Test the InsertZerosAt method with an insert position in the middle of the
-// vector. Use the InsertAt method as reference.
-TEST_F(AudioVectorTest, InsertZerosAt) {
- AudioVector vec;
- AudioVector vec_ref;
- vec.PushBack(array_, array_length());
- vec_ref.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int insert_position = 5;
- vec.InsertZerosAt(kNewLength, insert_position);
- int16_t new_array[kNewLength] = {0}; // All zero elements.
- vec_ref.InsertAt(new_array, kNewLength, insert_position);
- // Verify that the vectors are identical.
- ASSERT_EQ(vec_ref.Size(), vec.Size());
- for (size_t i = 0; i < vec.Size(); ++i) {
- EXPECT_EQ(vec_ref[i], vec[i]);
- }
-}
-
-// Test the InsertAt method with an insert position at the start of the vector.
-TEST_F(AudioVectorTest, InsertAtBeginning) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- int insert_position = 0;
- vec.InsertAt(new_array, kNewLength, insert_position);
- // Verify that the vector looks as follows:
- // {100, 101, ..., 100 + kNewLength - 1,
- // 0, 1, ..., kLength - 1}.
- size_t pos = 0;
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
- for (size_t i = insert_position; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[pos]);
- ++pos;
- }
-}
-
-// Test the InsertAt method with an insert position at the end of the vector.
-TEST_F(AudioVectorTest, InsertAtEnd) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- int insert_position = array_length();
- vec.InsertAt(new_array, kNewLength, insert_position);
- // Verify that the vector looks as follows:
- // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }.
- size_t pos = 0;
- for (size_t i = 0; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[pos]);
- ++pos;
- }
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
-}
-
-// Test the InsertAt method with an insert position beyond the end of the
-// vector. Verify that a position beyond the end of the vector does not lead to
-// an error. The expected outcome is the same as if the vector end was used as
-// input position. That is, the input position should be capped at the maximum
-// allowed value.
-TEST_F(AudioVectorTest, InsertBeyondEnd) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- int insert_position = array_length() + 10; // Too large.
- vec.InsertAt(new_array, kNewLength, insert_position);
- // Verify that the vector looks as follows:
- // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }.
- size_t pos = 0;
- for (size_t i = 0; i < array_length(); ++i) {
- EXPECT_EQ(array_[i], vec[pos]);
- ++pos;
- }
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
-}
-
-// Test the OverwriteAt method with a position such that all of the new values
-// fit within the old vector.
-TEST_F(AudioVectorTest, OverwriteAt) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- size_t insert_position = 2;
- vec.OverwriteAt(new_array, kNewLength, insert_position);
- // Verify that the vector looks as follows:
- // {0, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
- // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
- size_t pos = 0;
- for (pos = 0; pos < insert_position; ++pos) {
- EXPECT_EQ(array_[pos], vec[pos]);
- }
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
- for (; pos < array_length(); ++pos) {
- EXPECT_EQ(array_[pos], vec[pos]);
- }
-}
-
-// Test the OverwriteAt method with a position such that some of the new values
-// extend beyond the end of the current vector. This is valid, and the vector is
-// expected to expand to accommodate the new values.
-TEST_F(AudioVectorTest, OverwriteBeyondEnd) {
- AudioVector vec;
- vec.PushBack(array_, array_length());
- static const int kNewLength = 5;
- int16_t new_array[kNewLength];
- // Set array elements to {100, 101, 102, ... }.
- for (int i = 0; i < kNewLength; ++i) {
- new_array[i] = 100 + i;
- }
- int insert_position = array_length() - 2;
- vec.OverwriteAt(new_array, kNewLength, insert_position);
- ASSERT_EQ(array_length() - 2u + kNewLength, vec.Size());
- // Verify that the vector looks as follows:
- // {0, ..., |insert_position| - 1, 100, 101, ..., 100 + kNewLength - 1,
- // |insert_position|, |insert_position| + 1, ..., kLength - 1}.
- int pos = 0;
- for (pos = 0; pos < insert_position; ++pos) {
- EXPECT_EQ(array_[pos], vec[pos]);
- }
- for (int i = 0; i < kNewLength; ++i) {
- EXPECT_EQ(new_array[i], vec[pos]);
- ++pos;
- }
- // Verify that we checked to the end of |vec|.
- EXPECT_EQ(vec.Size(), static_cast<size_t>(pos));
-}
-
-TEST_F(AudioVectorTest, CrossFade) {
- static const size_t kLength = 100;
- static const size_t kFadeLength = 10;
- AudioVector vec1(kLength);
- AudioVector vec2(kLength);
- // Set all vector elements to 0 in |vec1| and 100 in |vec2|.
- for (size_t i = 0; i < kLength; ++i) {
- vec1[i] = 0;
- vec2[i] = 100;
- }
- vec1.CrossFade(vec2, kFadeLength);
- ASSERT_EQ(2 * kLength - kFadeLength, vec1.Size());
- // First part untouched.
- for (size_t i = 0; i < kLength - kFadeLength; ++i) {
- EXPECT_EQ(0, vec1[i]);
- }
- // Check mixing zone.
- for (size_t i = 0 ; i < kFadeLength; ++i) {
- EXPECT_NEAR((i + 1) * 100 / (kFadeLength + 1),
- vec1[kLength - kFadeLength + i], 1);
- }
- // Second part untouched.
- for (size_t i = kLength; i < vec1.Size(); ++i) {
- EXPECT_EQ(100, vec1[i]);
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc
deleted file mode 100644
index 2dfb3c1f392..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-
-#include <assert.h>
-#include <string.h> // memcpy
-
-#include <algorithm> // min, max
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
-
-namespace webrtc {
-
-BackgroundNoise::BackgroundNoise(size_t num_channels)
- : num_channels_(num_channels),
- channel_parameters_(new ChannelParameters[num_channels_]),
- mode_(kBgnOn) {
- Reset();
-}
-
-BackgroundNoise::~BackgroundNoise() {}
-
-void BackgroundNoise::Reset() {
- initialized_ = false;
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- channel_parameters_[channel].Reset();
- }
- // Keep _bgnMode as it is.
-}
-
-void BackgroundNoise::Update(const AudioMultiVector& input,
- const PostDecodeVad& vad) {
- if (vad.running() && vad.active_speech()) {
- // Do not update the background noise parameters if we know that the signal
- // is active speech.
- return;
- }
-
- int32_t auto_correlation[kMaxLpcOrder + 1];
- int16_t fiter_output[kMaxLpcOrder + kResidualLength];
- int16_t reflection_coefficients[kMaxLpcOrder];
- int16_t lpc_coefficients[kMaxLpcOrder + 1];
-
- for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
- ChannelParameters& parameters = channel_parameters_[channel_ix];
- int16_t temp_signal_array[kVecLen + kMaxLpcOrder] = {0};
- int16_t* temp_signal = &temp_signal_array[kMaxLpcOrder];
- memcpy(temp_signal,
- &input[channel_ix][input.Size() - kVecLen],
- sizeof(int16_t) * kVecLen);
-
- int32_t sample_energy = CalculateAutoCorrelation(temp_signal, kVecLen,
- auto_correlation);
-
- if ((!vad.running() &&
- sample_energy < parameters.energy_update_threshold) ||
- (vad.running() && !vad.active_speech())) {
- // Generate LPC coefficients.
- if (auto_correlation[0] > 0) {
- // Regardless of whether the filter is actually updated or not,
- // update energy threshold levels, since we have in fact observed
- // a low energy signal.
- if (sample_energy < parameters.energy_update_threshold) {
- // Never go under 1.0 in average sample energy.
- parameters.energy_update_threshold = std::max(sample_energy, 1);
- parameters.low_energy_update_threshold = 0;
- }
-
- // Only update BGN if filter is stable, i.e., if return value from
- // Levinson-Durbin function is 1.
- if (WebRtcSpl_LevinsonDurbin(auto_correlation, lpc_coefficients,
- reflection_coefficients,
- kMaxLpcOrder) != 1) {
- return;
- }
- } else {
- // Center value in auto-correlation is not positive. Do not update.
- return;
- }
-
- // Generate the CNG gain factor by looking at the energy of the residual.
- WebRtcSpl_FilterMAFastQ12(temp_signal + kVecLen - kResidualLength,
- fiter_output, lpc_coefficients,
- kMaxLpcOrder + 1, kResidualLength);
- int32_t residual_energy = WebRtcSpl_DotProductWithScale(fiter_output,
- fiter_output,
- kResidualLength,
- 0);
-
- // Check spectral flatness.
- // Comparing the residual variance with the input signal variance tells
- // if the spectrum is flat or not.
- // If 20 * residual_energy >= sample_energy << 6, the spectrum is flat
- // enough. Also ensure that the energy is non-zero.
- if ((residual_energy * 20 >= (sample_energy << 6)) &&
- (sample_energy > 0)) {
- // Spectrum is flat enough; save filter parameters.
- // |temp_signal| + |kVecLen| - |kMaxLpcOrder| points at the first of the
- // |kMaxLpcOrder| samples in the residual signal, which will form the
- // filter state for the next noise generation.
- SaveParameters(channel_ix, lpc_coefficients,
- temp_signal + kVecLen - kMaxLpcOrder, sample_energy,
- residual_energy);
- }
- } else {
- // Will only happen if post-decode VAD is disabled and |sample_energy| is
- // not low enough. Increase the threshold for update so that it increases
- // by a factor 4 in 4 seconds.
- IncrementEnergyThreshold(channel_ix, sample_energy);
- }
- }
- return;
-}
-
-int32_t BackgroundNoise::Energy(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].energy;
-}
-
-void BackgroundNoise::SetMuteFactor(size_t channel, int16_t value) {
- assert(channel < num_channels_);
- channel_parameters_[channel].mute_factor = value;
-}
-
-int16_t BackgroundNoise::MuteFactor(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].mute_factor;
-}
-
-const int16_t* BackgroundNoise::Filter(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].filter;
-}
-
-const int16_t* BackgroundNoise::FilterState(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].filter_state;
-}
-
-void BackgroundNoise::SetFilterState(size_t channel, const int16_t* input,
- size_t length) {
- assert(channel < num_channels_);
- length = std::min(length, static_cast<size_t>(kMaxLpcOrder));
- memcpy(channel_parameters_[channel].filter_state, input,
- length * sizeof(int16_t));
-}
-
-int16_t BackgroundNoise::Scale(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].scale;
-}
-int16_t BackgroundNoise::ScaleShift(size_t channel) const {
- assert(channel < num_channels_);
- return channel_parameters_[channel].scale_shift;
-}
-
-int32_t BackgroundNoise::CalculateAutoCorrelation(
- const int16_t* signal, int length, int32_t* auto_correlation) const {
- int16_t signal_max = WebRtcSpl_MaxAbsValueW16(signal, length);
- int correlation_scale = kLogVecLen -
- WebRtcSpl_NormW32(signal_max * signal_max);
- correlation_scale = std::max(0, correlation_scale);
-
- static const int kCorrelationStep = -1;
- WebRtcSpl_CrossCorrelation(auto_correlation, signal, signal, length,
- kMaxLpcOrder + 1, correlation_scale,
- kCorrelationStep);
-
- // Number of shifts to normalize energy to energy/sample.
- int energy_sample_shift = kLogVecLen - correlation_scale;
- return auto_correlation[0] >> energy_sample_shift;
-}
-
-void BackgroundNoise::IncrementEnergyThreshold(size_t channel,
- int32_t sample_energy) {
- // TODO(hlundin): Simplify the below threshold update. What this code
- // does is simply "threshold += (increment * threshold) >> 16", but due
- // to the limited-width operations, it is not exactly the same. The
- // difference should be inaudible, but bit-exactness would not be
- // maintained.
- assert(channel < num_channels_);
- ChannelParameters& parameters = channel_parameters_[channel];
- int32_t temp_energy =
- WEBRTC_SPL_MUL_16_16_RSFT(kThresholdIncrement,
- parameters.low_energy_update_threshold, 16);
- temp_energy += kThresholdIncrement *
- (parameters.energy_update_threshold & 0xFF);
- temp_energy += (kThresholdIncrement *
- ((parameters.energy_update_threshold>>8) & 0xFF)) << 8;
- parameters.low_energy_update_threshold += temp_energy;
-
- parameters.energy_update_threshold += kThresholdIncrement *
- (parameters.energy_update_threshold>>16);
- parameters.energy_update_threshold +=
- parameters.low_energy_update_threshold >> 16;
- parameters.low_energy_update_threshold =
- parameters.low_energy_update_threshold & 0x0FFFF;
-
- // Update maximum energy.
- // Decrease by a factor 1/1024 each time.
- parameters.max_energy = parameters.max_energy -
- (parameters.max_energy >> 10);
- if (sample_energy > parameters.max_energy) {
- parameters.max_energy = sample_energy;
- }
-
- // Set |energy_update_threshold| to no less than 60 dB lower than
- // |max_energy_|. Adding 524288 assures proper rounding.
- int32_t energy_update_threshold = (parameters.max_energy + 524288) >> 20;
- if (energy_update_threshold > parameters.energy_update_threshold) {
- parameters.energy_update_threshold = energy_update_threshold;
- }
-}
-
-void BackgroundNoise::SaveParameters(size_t channel,
- const int16_t* lpc_coefficients,
- const int16_t* filter_state,
- int32_t sample_energy,
- int32_t residual_energy) {
- assert(channel < num_channels_);
- ChannelParameters& parameters = channel_parameters_[channel];
- memcpy(parameters.filter, lpc_coefficients,
- (kMaxLpcOrder+1) * sizeof(int16_t));
- memcpy(parameters.filter_state, filter_state,
- kMaxLpcOrder * sizeof(int16_t));
- // Save energy level and update energy threshold levels.
- // Never get under 1.0 in average sample energy.
- parameters.energy = std::max(sample_energy, 1);
- parameters.energy_update_threshold = parameters.energy;
- parameters.low_energy_update_threshold = 0;
-
- // Normalize residual_energy to 29 or 30 bits before sqrt.
- int norm_shift = WebRtcSpl_NormW32(residual_energy) - 1;
- if (norm_shift & 0x1) {
- norm_shift -= 1; // Even number of shifts required.
- }
- assert(norm_shift >= 0); // Should always be positive.
- residual_energy = residual_energy << norm_shift;
-
- // Calculate scale and shift factor.
- parameters.scale = WebRtcSpl_SqrtFloor(residual_energy);
- // Add 13 to the |scale_shift_|, since the random numbers table is in
- // Q13.
- // TODO(hlundin): Move the "13" to where the |scale_shift_| is used?
- parameters.scale_shift = 13 + ((kLogResidualLength + norm_shift) / 2);
-
- initialized_ = true;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h
deleted file mode 100644
index ac5446bf7f6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
-
-#include <string.h> // size_t
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class PostDecodeVad;
-
-// This class handles estimation of background noise parameters.
-class BackgroundNoise {
- public:
- // TODO(hlundin): For 48 kHz support, increase kMaxLpcOrder to 10.
- // Will work anyway, but probably sound a little worse.
- static const int kMaxLpcOrder = 8; // 32000 / 8000 + 4.
-
- explicit BackgroundNoise(size_t num_channels);
- virtual ~BackgroundNoise();
-
- void Reset();
-
- // Updates the parameter estimates based on the signal currently in the
- // |sync_buffer|, and on the latest decision in |vad| if it is running.
- void Update(const AudioMultiVector& sync_buffer,
- const PostDecodeVad& vad);
-
- // Returns |energy_| for |channel|.
- int32_t Energy(size_t channel) const;
-
- // Sets the value of |mute_factor_| for |channel| to |value|.
- void SetMuteFactor(size_t channel, int16_t value);
-
- // Returns |mute_factor_| for |channel|.
- int16_t MuteFactor(size_t channel) const;
-
- // Returns a pointer to |filter_| for |channel|.
- const int16_t* Filter(size_t channel) const;
-
- // Returns a pointer to |filter_state_| for |channel|.
- const int16_t* FilterState(size_t channel) const;
-
- // Copies |length| elements from |input| to the filter state. Will not copy
- // more than |kMaxLpcOrder| elements.
- void SetFilterState(size_t channel, const int16_t* input, size_t length);
-
- // Returns |scale_| for |channel|.
- int16_t Scale(size_t channel) const;
-
- // Returns |scale_shift_| for |channel|.
- int16_t ScaleShift(size_t channel) const;
-
- // Accessors.
- bool initialized() const { return initialized_; }
- NetEqBackgroundNoiseMode mode() const { return mode_; }
-
- // Sets the mode of the background noise playout for cases when there is long
- // duration of packet loss.
- void set_mode(NetEqBackgroundNoiseMode mode) { mode_ = mode; }
-
- private:
- static const int kThresholdIncrement = 229; // 0.0035 in Q16.
- static const int kVecLen = 256;
- static const int kLogVecLen = 8; // log2(kVecLen).
- static const int kResidualLength = 64;
- static const int kLogResidualLength = 6; // log2(kResidualLength)
-
- struct ChannelParameters {
- // Constructor.
- ChannelParameters() {
- Reset();
- }
-
- void Reset() {
- energy = 2500;
- max_energy = 0;
- energy_update_threshold = 500000;
- low_energy_update_threshold = 0;
- memset(filter_state, 0, sizeof(filter_state));
- memset(filter, 0, sizeof(filter));
- filter[0] = 4096;
- mute_factor = 0,
- scale = 20000;
- scale_shift = 24;
- }
-
- int32_t energy;
- int32_t max_energy;
- int32_t energy_update_threshold;
- int32_t low_energy_update_threshold;
- int16_t filter_state[kMaxLpcOrder];
- int16_t filter[kMaxLpcOrder + 1];
- int16_t mute_factor;
- int16_t scale;
- int16_t scale_shift;
- };
-
- int32_t CalculateAutoCorrelation(const int16_t* signal,
- int length,
- int32_t* auto_correlation) const;
-
- // Increments the energy threshold by a factor 1 + |kThresholdIncrement|.
- void IncrementEnergyThreshold(size_t channel, int32_t sample_energy);
-
- // Updates the filter parameters.
- void SaveParameters(size_t channel,
- const int16_t* lpc_coefficients,
- const int16_t* filter_state,
- int32_t sample_energy,
- int32_t residual_energy);
-
- size_t num_channels_;
- scoped_array<ChannelParameters> channel_parameters_;
- bool initialized_;
- NetEqBackgroundNoiseMode mode_;
-
- DISALLOW_COPY_AND_ASSIGN(BackgroundNoise);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc
deleted file mode 100644
index eb7b9fa1edd..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for BackgroundNoise class.
-
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(BackgroundNoise, CreateAndDestroy) {
- size_t channels = 1;
- BackgroundNoise bgn(channels);
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc
deleted file mode 100644
index 70b49310660..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-
-#include <algorithm> // Provide access to std::max.
-
-namespace webrtc {
-
-BufferLevelFilter::BufferLevelFilter() {
- Reset();
-}
-
-void BufferLevelFilter::Reset() {
- filtered_current_level_ = 0;
- level_factor_ = 253;
-}
-
-void BufferLevelFilter::Update(int buffer_size_packets,
- int time_stretched_samples,
- int packet_len_samples) {
- // Filter:
- // |filtered_current_level_| = |level_factor_| * |filtered_current_level_| +
- // (1 - |level_factor_|) * |buffer_size_packets|
- // |level_factor_| and |filtered_current_level_| are in Q8.
- // |buffer_size_packets| is in Q0.
- filtered_current_level_ = ((level_factor_ * filtered_current_level_) >> 8) +
- ((256 - level_factor_) * buffer_size_packets);
-
- // Account for time-scale operations (accelerate and pre-emptive expand).
- if (time_stretched_samples && packet_len_samples > 0) {
- // Time-scaling has been performed since last filter update. Subtract the
- // value of |time_stretched_samples| from |filtered_current_level_| after
- // converting |time_stretched_samples| from samples to packets in Q8.
- // Make sure that the filtered value remains non-negative.
- filtered_current_level_ = std::max(0,
- filtered_current_level_ -
- (time_stretched_samples << 8) / packet_len_samples);
- }
-}
-
-void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level) {
- if (target_buffer_level <= 1) {
- level_factor_ = 251;
- } else if (target_buffer_level <= 3) {
- level_factor_ = 252;
- } else if (target_buffer_level <= 7) {
- level_factor_ = 253;
- } else {
- level_factor_ = 254;
- }
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h
deleted file mode 100644
index 282ab7a2221..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-class BufferLevelFilter {
- public:
- BufferLevelFilter();
- virtual ~BufferLevelFilter() {}
- virtual void Reset();
-
- // Updates the filter. Current buffer size is |buffer_size_packets| (Q0).
- // If |time_stretched_samples| is non-zero, the value is converted to the
- // corresponding number of packets, and is subtracted from the filtered
- // value (thus bypassing the filter operation). |packet_len_samples| is the
- // number of audio samples carried in each incoming packet.
- virtual void Update(int buffer_size_packets, int time_stretched_samples,
- int packet_len_samples);
-
- // Set the current target buffer level (obtained from
- // DelayManager::base_target_level()). Used to select the appropriate
- // filter coefficient.
- virtual void SetTargetBufferLevel(int target_buffer_level);
-
- virtual int filtered_current_level() const { return filtered_current_level_; }
-
- private:
- int level_factor_; // Filter factor for the buffer level filter in Q8.
- int filtered_current_level_; // Filtered current buffer level in Q8.
-
- DISALLOW_COPY_AND_ASSIGN(BufferLevelFilter);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc
deleted file mode 100644
index ddaf08d1d38..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for BufferLevelFilter class.
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-
-#include <math.h> // Access to pow function.
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(BufferLevelFilter, CreateAndDestroy) {
- BufferLevelFilter* filter = new BufferLevelFilter();
- EXPECT_EQ(0, filter->filtered_current_level());
- delete filter;
-}
-
-TEST(BufferLevelFilter, ConvergenceTest) {
- BufferLevelFilter filter;
- for (int times = 10; times <= 50; times += 10) {
- for (int value = 100; value <= 200; value += 10) {
- filter.Reset();
- filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
- std::ostringstream ss;
- ss << "times = " << times << ", value = " << value;
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- for (int i = 0; i < times; ++i) {
- filter.Update(value, 0 /* time_stretched_samples */,
- 160 /* packet_len_samples */);
- }
- // Expect the filtered value to be (theoretically)
- // (1 - (251/256) ^ |times|) * |value|.
- double expected_value_double =
- (1 - pow(251.0 / 256.0, times)) * value;
- int expected_value = static_cast<int>(expected_value_double);
- // filtered_current_level() returns the value in Q8.
- // The actual value may differ slightly from the expected value due to
- // intermediate-stage rounding errors in the filter implementation.
- // This is why we have to use EXPECT_NEAR with a tolerance of +/-1.
- EXPECT_NEAR(expected_value, filter.filtered_current_level() >> 8, 1);
- }
- }
-}
-
-// Verify that target buffer level impacts on the filter convergence.
-TEST(BufferLevelFilter, FilterFactor) {
- BufferLevelFilter filter;
- // Update 10 times with value 100.
- const int kTimes = 10;
- const int kValue = 100;
-
- filter.SetTargetBufferLevel(3); // Makes filter coefficient 252/256.
- for (int i = 0; i < kTimes; ++i) {
- filter.Update(kValue, 0 /* time_stretched_samples */,
- 160 /* packet_len_samples */);
- }
- // Expect the filtered value to be
- // (1 - (252/256) ^ |kTimes|) * |kValue|.
- int expected_value = 14;
- // filtered_current_level() returns the value in Q8.
- EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
-
- filter.Reset();
- filter.SetTargetBufferLevel(7); // Makes filter coefficient 253/256.
- for (int i = 0; i < kTimes; ++i) {
- filter.Update(kValue, 0 /* time_stretched_samples */,
- 160 /* packet_len_samples */);
- }
- // Expect the filtered value to be
- // (1 - (253/256) ^ |kTimes|) * |kValue|.
- expected_value = 11;
- // filtered_current_level() returns the value in Q8.
- EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
-
- filter.Reset();
- filter.SetTargetBufferLevel(8); // Makes filter coefficient 254/256.
- for (int i = 0; i < kTimes; ++i) {
- filter.Update(kValue, 0 /* time_stretched_samples */,
- 160 /* packet_len_samples */);
- }
- // Expect the filtered value to be
- // (1 - (254/256) ^ |kTimes|) * |kValue|.
- expected_value = 7;
- // filtered_current_level() returns the value in Q8.
- EXPECT_EQ(expected_value, filter.filtered_current_level() >> 8);
-}
-
-
-TEST(BufferLevelFilter, TimeStretchedSamples) {
- BufferLevelFilter filter;
- filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
- // Update 10 times with value 100.
- const int kTimes = 10;
- const int kValue = 100;
- const int kPacketSizeSamples = 160;
- const int kNumPacketsStretched = 2;
- const int kTimeStretchedSamples = kNumPacketsStretched * kPacketSizeSamples;
- for (int i = 0; i < kTimes; ++i) {
- // Packet size set to 0. Do not expect the parameter
- // |kTimeStretchedSamples| to have any effect.
- filter.Update(kValue, kTimeStretchedSamples, 0 /* packet_len_samples */);
- }
- // Expect the filtered value to be
- // (1 - (251/256) ^ |kTimes|) * |kValue|.
- const int kExpectedValue = 17;
- // filtered_current_level() returns the value in Q8.
- EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
-
- // Update filter again, now with non-zero value for packet length.
- // Set the current filtered value to be the input, in order to isolate the
- // impact of |kTimeStretchedSamples|.
- filter.Update(filter.filtered_current_level() >> 8, kTimeStretchedSamples,
- kPacketSizeSamples);
- EXPECT_EQ(kExpectedValue - kNumPacketsStretched,
- filter.filtered_current_level() >> 8);
- // Try negative value and verify that we come back to the previous result.
- filter.Update(filter.filtered_current_level() >> 8, -kTimeStretchedSamples,
- kPacketSizeSamples);
- EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
-}
-
-TEST(BufferLevelFilter, TimeStretchedSamplesNegativeUnevenFrames) {
- BufferLevelFilter filter;
- filter.SetTargetBufferLevel(1); // Makes filter coefficient 251/256.
- // Update 10 times with value 100.
- const int kTimes = 10;
- const int kValue = 100;
- const int kPacketSizeSamples = 160;
- const int kTimeStretchedSamples = -3.1415 * kPacketSizeSamples;
- for (int i = 0; i < kTimes; ++i) {
- // Packet size set to 0. Do not expect the parameter
- // |kTimeStretchedSamples| to have any effect.
- filter.Update(kValue, kTimeStretchedSamples, 0 /* packet_len_samples */);
- }
- // Expect the filtered value to be
- // (1 - (251/256) ^ |kTimes|) * |kValue|.
- const int kExpectedValue = 17;
- // filtered_current_level() returns the value in Q8.
- EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
-
- // Update filter again, now with non-zero value for packet length.
- // Set the current filtered value to be the input, in order to isolate the
- // impact of |kTimeStretchedSamples|.
- filter.Update(filter.filtered_current_level() >> 8, kTimeStretchedSamples,
- kPacketSizeSamples);
- EXPECT_EQ(21, filter.filtered_current_level() >> 8);
- // Try negative value and verify that we come back to the previous result.
- filter.Update(filter.filtered_current_level() >> 8, -kTimeStretchedSamples,
- kPacketSizeSamples);
- EXPECT_EQ(kExpectedValue, filter.filtered_current_level() >> 8);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc
deleted file mode 100644
index 360767af193..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-void ComfortNoise::Reset() {
- first_call_ = true;
- internal_error_code_ = 0;
-}
-
-int ComfortNoise::UpdateParameters(Packet* packet) {
- assert(packet); // Existence is verified by caller.
- // Get comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetDecoder(
- packet->header.payloadType);
- if (!cng_decoder) {
- delete [] packet->payload;
- delete packet;
- return kUnknownPayloadType;
- }
- decoder_database_->SetActiveCngDecoder(packet->header.payloadType);
- CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
- int16_t ret = WebRtcCng_UpdateSid(cng_inst,
- packet->payload,
- packet->payload_length);
- delete [] packet->payload;
- delete packet;
- if (ret < 0) {
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- return kInternalError;
- }
- return kOK;
-}
-
-int ComfortNoise::Generate(size_t requested_length,
- AudioMultiVector* output) {
- // TODO(hlundin): Change to an enumerator and skip assert.
- assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 ||
- fs_hz_ == 48000);
- // Not adapted for multi-channel yet.
- if (output->Channels() != 1) {
- return kMultiChannelNotSupported;
- }
-
- size_t number_of_samples = requested_length;
- int16_t new_period = 0;
- if (first_call_) {
- // Generate noise and overlap slightly with old data.
- number_of_samples = requested_length + overlap_length_;
- new_period = 1;
- }
- output->AssertSize(number_of_samples);
- // Get the decoder from the database.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
- if (!cng_decoder) {
- return kUnknownPayloadType;
- }
- CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
- // The expression &(*output)[0][0] is a pointer to the first element in
- // the first channel.
- if (WebRtcCng_Generate(cng_inst, &(*output)[0][0],
- static_cast<int16_t>(number_of_samples),
- new_period) < 0) {
- // Error returned.
- output->Zeros(requested_length);
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- return kInternalError;
- }
-
- if (first_call_) {
- // Set tapering window parameters. Values are in Q15.
- int16_t muting_window; // Mixing factor for overlap data.
- int16_t muting_window_increment; // Mixing factor increment (negative).
- int16_t unmuting_window; // Mixing factor for comfort noise.
- int16_t unmuting_window_increment; // Mixing factor increment.
- if (fs_hz_ == 8000) {
- muting_window = DspHelper::kMuteFactorStart8kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement8kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart8kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement8kHz;
- } else if (fs_hz_ == 16000) {
- muting_window = DspHelper::kMuteFactorStart16kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement16kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart16kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement16kHz;
- } else if (fs_hz_ == 32000) {
- muting_window = DspHelper::kMuteFactorStart32kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement32kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart32kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement32kHz;
- } else { // fs_hz_ == 48000
- muting_window = DspHelper::kMuteFactorStart48kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement48kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart48kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement48kHz;
- }
-
- // Do overlap-add between new vector and overlap.
- size_t start_ix = sync_buffer_->Size() - overlap_length_;
- for (size_t i = 0; i < overlap_length_; i++) {
- /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
- // The expression (*output)[0][i] is the i-th element in the first
- // channel.
- (*sync_buffer_)[0][start_ix + i] =
- (((*sync_buffer_)[0][start_ix + i] * muting_window) +
- ((*output)[0][i] * unmuting_window) + 16384) >> 15;
- muting_window += muting_window_increment;
- unmuting_window += unmuting_window_increment;
- }
- // Remove |overlap_length_| samples from the front of |output| since they
- // were mixed into |sync_buffer_| above.
- output->PopFront(overlap_length_);
- }
- first_call_ = false;
- return kOK;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h
deleted file mode 100644
index 7e7c294ff0e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class DecoderDatabase;
-class SyncBuffer;
-struct Packet;
-
-// This class acts as an interface to the CNG generator.
-class ComfortNoise {
- public:
- enum ReturnCodes {
- kOK = 0,
- kUnknownPayloadType,
- kInternalError,
- kMultiChannelNotSupported
- };
-
- ComfortNoise(int fs_hz, DecoderDatabase* decoder_database,
- SyncBuffer* sync_buffer)
- : fs_hz_(fs_hz),
- first_call_(true),
- overlap_length_(5 * fs_hz_ / 8000),
- decoder_database_(decoder_database),
- sync_buffer_(sync_buffer),
- internal_error_code_(0) {
- }
-
- // Resets the state. Should be called before each new comfort noise period.
- void Reset();
-
- // Update the comfort noise generator with the parameters in |packet|.
- // Will delete the packet.
- int UpdateParameters(Packet* packet);
-
- // Generates |requested_length| samples of comfort noise and writes to
- // |output|. If this is the first in call after Reset (or first after creating
- // the object), it will also mix in comfort noise at the end of the
- // SyncBuffer object provided in the constructor.
- int Generate(size_t requested_length, AudioMultiVector* output);
-
- // Returns the last error code that was produced by the comfort noise
- // decoder. Returns 0 if no error has been encountered since the last reset.
- int internal_error_code() { return internal_error_code_; }
-
- private:
- int fs_hz_;
- bool first_call_;
- size_t overlap_length_;
- DecoderDatabase* decoder_database_;
- SyncBuffer* sync_buffer_;
- int internal_error_code_;
- DISALLOW_COPY_AND_ASSIGN(ComfortNoise);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc
deleted file mode 100644
index 0e849717125..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for ComfortNoise class.
-
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-TEST(ComfortNoise, CreateAndDestroy) {
- int fs = 8000;
- MockDecoderDatabase db;
- SyncBuffer sync_buffer(1, 1000);
- ComfortNoise cn(fs, &db, &sync_buffer);
- EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc
deleted file mode 100644
index 04b886a2e2e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-
-#include <algorithm>
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_fax.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_normal.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-DecisionLogic* DecisionLogic::Create(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter) {
- switch (playout_mode) {
- case kPlayoutOn:
- case kPlayoutStreaming:
- return new DecisionLogicNormal(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
- case kPlayoutFax:
- case kPlayoutOff:
- return new DecisionLogicFax(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
- }
- // This line cannot be reached, but must be here to avoid compiler errors.
- assert(false);
- return NULL;
-}
-
-DecisionLogic::DecisionLogic(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : decoder_database_(decoder_database),
- packet_buffer_(packet_buffer),
- delay_manager_(delay_manager),
- buffer_level_filter_(buffer_level_filter),
- cng_state_(kCngOff),
- generated_noise_samples_(0),
- packet_length_samples_(0),
- sample_memory_(0),
- prev_time_scale_(false),
- timescale_hold_off_(kMinTimescaleInterval),
- num_consecutive_expands_(0),
- playout_mode_(playout_mode) {
- delay_manager_->set_streaming_mode(playout_mode_ == kPlayoutStreaming);
- SetSampleRate(fs_hz, output_size_samples);
-}
-
-void DecisionLogic::Reset() {
- cng_state_ = kCngOff;
- generated_noise_samples_ = 0;
- packet_length_samples_ = 0;
- sample_memory_ = 0;
- prev_time_scale_ = false;
- timescale_hold_off_ = 0;
- num_consecutive_expands_ = 0;
-}
-
-void DecisionLogic::SoftReset() {
- packet_length_samples_ = 0;
- sample_memory_ = 0;
- prev_time_scale_ = false;
- timescale_hold_off_ = kMinTimescaleInterval;
-}
-
-void DecisionLogic::SetSampleRate(int fs_hz, int output_size_samples) {
- // TODO(hlundin): Change to an enumerator and skip assert.
- assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
- fs_mult_ = fs_hz / 8000;
- output_size_samples_ = output_size_samples;
-}
-
-Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf, bool* reset_decoder) {
- if (prev_mode == kModeRfc3389Cng ||
- prev_mode == kModeCodecInternalCng ||
- prev_mode == kModeExpand) {
- // If last mode was CNG (or Expand, since this could be covering up for
- // a lost CNG packet), increase the |generated_noise_samples_| counter.
- generated_noise_samples_ += output_size_samples_;
- // Remember that CNG is on. This is needed if comfort noise is interrupted
- // by DTMF.
- if (prev_mode == kModeRfc3389Cng) {
- cng_state_ = kCngRfc3389On;
- } else if (prev_mode == kModeCodecInternalCng) {
- cng_state_ = kCngInternalOn;
- }
- }
-
- const int samples_left = static_cast<int>(
- sync_buffer.FutureLength() - expand.overlap_length());
- const int cur_size_samples =
- samples_left + packet_buffer_.NumSamplesInBuffer(decoder_database_,
- decoder_frame_length);
- LOG(LS_VERBOSE) << "Buffers: " << packet_buffer_.NumPacketsInBuffer() <<
- " packets * " << decoder_frame_length << " samples/packet + " <<
- samples_left << " samples in sync buffer = " << cur_size_samples;
-
- prev_time_scale_ = prev_time_scale_ &&
- (prev_mode == kModeAccelerateSuccess ||
- prev_mode == kModeAccelerateLowEnergy ||
- prev_mode == kModePreemptiveExpandSuccess ||
- prev_mode == kModePreemptiveExpandLowEnergy);
-
- FilterBufferLevel(cur_size_samples, prev_mode);
-
- return GetDecisionSpecialized(sync_buffer, expand, decoder_frame_length,
- packet_header, prev_mode, play_dtmf,
- reset_decoder);
-}
-
-void DecisionLogic::ExpandDecision(bool is_expand_decision) {
- if (is_expand_decision) {
- num_consecutive_expands_++;
- } else {
- num_consecutive_expands_ = 0;
- }
-}
-
-void DecisionLogic::FilterBufferLevel(int buffer_size_samples,
- Modes prev_mode) {
- const int elapsed_time_ms = output_size_samples_ / (8 * fs_mult_);
- delay_manager_->UpdateCounters(elapsed_time_ms);
-
- // Do not update buffer history if currently playing CNG since it will bias
- // the filtered buffer level.
- if ((prev_mode != kModeRfc3389Cng) && (prev_mode != kModeCodecInternalCng)) {
- buffer_level_filter_->SetTargetBufferLevel(
- delay_manager_->base_target_level());
-
- int buffer_size_packets = 0;
- if (packet_length_samples_ > 0) {
- // Calculate size in packets.
- buffer_size_packets = buffer_size_samples / packet_length_samples_;
- }
- int sample_memory_local = 0;
- if (prev_time_scale_) {
- sample_memory_local = sample_memory_;
- timescale_hold_off_ = kMinTimescaleInterval;
- }
- buffer_level_filter_->Update(buffer_size_packets, sample_memory_local,
- packet_length_samples_);
- prev_time_scale_ = false;
- }
-
- timescale_hold_off_ = std::max(timescale_hold_off_ - 1, 0);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h
deleted file mode 100644
index aca5ca40559..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
-
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BufferLevelFilter;
-class DecoderDatabase;
-class DelayManager;
-class Expand;
-class PacketBuffer;
-class SyncBuffer;
-struct RTPHeader;
-
-// This is the base class for the decision tree implementations. Derived classes
-// must implement the method GetDecisionSpecialized().
-class DecisionLogic {
- public:
- // Static factory function which creates different types of objects depending
- // on the |playout_mode|.
- static DecisionLogic* Create(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
-
- // Constructor.
- DecisionLogic(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
-
- // Destructor.
- virtual ~DecisionLogic() {}
-
- // Resets object to a clean state.
- void Reset();
-
- // Resets parts of the state. Typically done when switching codecs.
- void SoftReset();
-
- // Sets the sample rate and the output block size.
- void SetSampleRate(int fs_hz, int output_size_samples);
-
- // Returns the operation that should be done next. |sync_buffer| and |expand|
- // are provided for reference. |decoder_frame_length| is the number of samples
- // obtained from the last decoded frame. If there is a packet available, the
- // packet header should be supplied in |packet_header|; otherwise it should
- // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
- // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
- // should be set to true. The output variable |reset_decoder| will be set to
- // true if a reset is required; otherwise it is left unchanged (i.e., it can
- // remain true if it was true before the call).
- // This method end with calling GetDecisionSpecialized to get the actual
- // return value.
- Operations GetDecision(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf,
- bool* reset_decoder);
-
- // These methods test the |cng_state_| for different conditions.
- bool CngRfc3389On() const { return cng_state_ == kCngRfc3389On; }
- bool CngOff() const { return cng_state_ == kCngOff; }
-
- // Resets the |cng_state_| to kCngOff.
- void SetCngOff() { cng_state_ = kCngOff; }
-
- // Reports back to DecisionLogic whether the decision to do expand remains or
- // not. Note that this is necessary, since an expand decision can be changed
- // to kNormal in NetEqImpl::GetDecision if there is still enough data in the
- // sync buffer.
- void ExpandDecision(bool is_expand_decision);
-
- // Adds |value| to |sample_memory_|.
- void AddSampleMemory(int32_t value) {
- sample_memory_ += value;
- }
-
- // Accessors and mutators.
- void set_sample_memory(int32_t value) { sample_memory_ = value; }
- int generated_noise_samples() const { return generated_noise_samples_; }
- void set_generated_noise_samples(int value) {
- generated_noise_samples_ = value;
- }
- int packet_length_samples() const { return packet_length_samples_; }
- void set_packet_length_samples(int value) {
- packet_length_samples_ = value;
- }
- void set_prev_time_scale(bool value) { prev_time_scale_ = value; }
- NetEqPlayoutMode playout_mode() const { return playout_mode_; }
-
- protected:
- // The value 6 sets maximum time-stretch rate to about 100 ms/s.
- static const int kMinTimescaleInterval = 6;
-
- enum CngState {
- kCngOff,
- kCngRfc3389On,
- kCngInternalOn
- };
-
- // Returns the operation that should be done next. |sync_buffer| and |expand|
- // are provided for reference. |decoder_frame_length| is the number of samples
- // obtained from the last decoded frame. If there is a packet available, the
- // packet header should be supplied in |packet_header|; otherwise it should
- // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
- // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
- // should be set to true. The output variable |reset_decoder| will be set to
- // true if a reset is required; otherwise it is left unchanged (i.e., it can
- // remain true if it was true before the call).
- // Should be implemented by derived classes.
- virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf,
- bool* reset_decoder) = 0;
-
- // Updates the |buffer_level_filter_| with the current buffer level
- // |buffer_size_packets|.
- void FilterBufferLevel(int buffer_size_packets, Modes prev_mode);
-
- DecoderDatabase* decoder_database_;
- const PacketBuffer& packet_buffer_;
- DelayManager* delay_manager_;
- BufferLevelFilter* buffer_level_filter_;
- int fs_mult_;
- int output_size_samples_;
- CngState cng_state_; // Remember if comfort noise is interrupted by other
- // event (e.g., DTMF).
- int generated_noise_samples_;
- int packet_length_samples_;
- int sample_memory_;
- bool prev_time_scale_;
- int timescale_hold_off_;
- int num_consecutive_expands_;
- const NetEqPlayoutMode playout_mode_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecisionLogic);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc
deleted file mode 100644
index 00c8bcf4a2b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_fax.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-Operations DecisionLogicFax::GetDecisionSpecialized(
- const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf,
- bool* reset_decoder) {
- assert(playout_mode_ == kPlayoutFax || playout_mode_ == kPlayoutOff);
- uint32_t target_timestamp = sync_buffer.end_timestamp();
- uint32_t available_timestamp = 0;
- int is_cng_packet = 0;
- if (packet_header) {
- available_timestamp = packet_header->timestamp;
- is_cng_packet =
- decoder_database_->IsComfortNoise(packet_header->payloadType);
- }
- if (is_cng_packet) {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
- - available_timestamp) >= 0) {
- // Time to play this packet now.
- return kRfc3389Cng;
- } else {
- // Wait before playing this packet.
- return kRfc3389CngNoPacket;
- }
- }
- if (!packet_header) {
- // No packet. If in CNG mode, play as usual. Otherwise, use other method to
- // generate data.
- if (cng_state_ == kCngRfc3389On) {
- // Continue playing comfort noise.
- return kRfc3389CngNoPacket;
- } else if (cng_state_ == kCngInternalOn) {
- // Continue playing codec-internal comfort noise.
- return kCodecInternalCng;
- } else {
- // Nothing to play. Generate some data to play out.
- switch (playout_mode_) {
- case kPlayoutOff:
- return kAlternativePlc;
- case kPlayoutFax:
- return kAudioRepetition;
- default:
- assert(false);
- return kUndefined;
- }
- }
- } else if (target_timestamp == available_timestamp) {
- return kNormal;
- } else {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
- - available_timestamp) >= 0) {
- return kNormal;
- } else {
- // If currently playing comfort noise, continue with that. Do not
- // increase the timestamp counter since generated_noise_samples_ will
- // be increased.
- if (cng_state_ == kCngRfc3389On) {
- return kRfc3389CngNoPacket;
- } else if (cng_state_ == kCngInternalOn) {
- return kCodecInternalCng;
- } else {
- // Otherwise, do packet-loss concealment and increase the
- // timestamp while waiting for the time to play this packet.
- switch (playout_mode_) {
- case kPlayoutOff:
- return kAlternativePlcIncreaseTimestamp;
- case kPlayoutFax:
- return kAudioRepetitionIncreaseTimestamp;
- default:
- assert(0);
- return kUndefined;
- }
- }
- }
- }
-}
-
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h
deleted file mode 100644
index 1ccd3524473..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Implementation of the DecisionLogic class for playout modes kPlayoutFax and
-// kPlayoutOff.
-class DecisionLogicFax : public DecisionLogic {
- public:
- // Constructor.
- DecisionLogicFax(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
-
- // Destructor.
- virtual ~DecisionLogicFax() {}
-
- protected:
- // Returns the operation that should be done next. |sync_buffer| and |expand|
- // are provided for reference. |decoder_frame_length| is the number of samples
- // obtained from the last decoded frame. If there is a packet available, the
- // packet header should be supplied in |packet_header|; otherwise it should
- // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
- // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
- // should be set to true. The output variable |reset_decoder| will be set to
- // true if a reset is required; otherwise it is left unchanged (i.e., it can
- // remain true if it was true before the call).
- virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf,
- bool* reset_decoder) OVERRIDE;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecisionLogicFax);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc
deleted file mode 100644
index 8688737c3ed..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_normal.h"
-
-#include <assert.h>
-
-#include <algorithm>
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-
-Operations DecisionLogicNormal::GetDecisionSpecialized(
- const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode,
- bool play_dtmf,
- bool* reset_decoder) {
- assert(playout_mode_ == kPlayoutOn || playout_mode_ == kPlayoutStreaming);
- // Guard for errors, to avoid getting stuck in error mode.
- if (prev_mode == kModeError) {
- if (!packet_header) {
- return kExpand;
- } else {
- return kUndefined; // Use kUndefined to flag for a reset.
- }
- }
-
- uint32_t target_timestamp = sync_buffer.end_timestamp();
- uint32_t available_timestamp = 0;
- int is_cng_packet = 0;
- if (packet_header) {
- available_timestamp = packet_header->timestamp;
- is_cng_packet =
- decoder_database_->IsComfortNoise(packet_header->payloadType);
- }
-
- if (is_cng_packet) {
- return CngOperation(prev_mode, target_timestamp, available_timestamp);
- }
-
- // Handle the case with no packet at all available (except maybe DTMF).
- if (!packet_header) {
- return NoPacket(play_dtmf);
- }
-
- // If the expand period was very long, reset NetEQ since it is likely that the
- // sender was restarted.
- if (num_consecutive_expands_ > kReinitAfterExpands) {
- *reset_decoder = true;
- return kNormal;
- }
-
- // Check if the required packet is available.
- if (target_timestamp == available_timestamp) {
- return ExpectedPacketAvailable(prev_mode, play_dtmf);
- } else if (IsNewerTimestamp(available_timestamp, target_timestamp)) {
- return FuturePacketAvailable(sync_buffer, expand, decoder_frame_length,
- prev_mode, target_timestamp,
- available_timestamp, play_dtmf);
- } else {
- // This implies that available_timestamp < target_timestamp, which can
- // happen when a new stream or codec is received. Signal for a reset.
- return kUndefined;
- }
-}
-
-Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
- uint32_t target_timestamp,
- uint32_t available_timestamp) {
- // Signed difference between target and available timestamp.
- int32_t timestamp_diff = (generated_noise_samples_ + target_timestamp) -
- available_timestamp;
- int32_t optimal_level_samp =
- (delay_manager_->TargetLevel() * packet_length_samples_) >> 8;
- int32_t excess_waiting_time_samp = -timestamp_diff - optimal_level_samp;
-
- if (excess_waiting_time_samp > optimal_level_samp / 2) {
- // The waiting time for this packet will be longer than 1.5
- // times the wanted buffer delay. Advance the clock to cut
- // waiting time down to the optimal.
- generated_noise_samples_ += excess_waiting_time_samp;
- timestamp_diff += excess_waiting_time_samp;
- }
-
- if (timestamp_diff < 0 && prev_mode == kModeRfc3389Cng) {
- // Not time to play this packet yet. Wait another round before using this
- // packet. Keep on playing CNG from previous CNG parameters.
- return kRfc3389CngNoPacket;
- } else {
- // Otherwise, go for the CNG packet now.
- return kRfc3389Cng;
- }
-}
-
-Operations DecisionLogicNormal::NoPacket(bool play_dtmf) {
- if (cng_state_ == kCngRfc3389On) {
- // Keep on playing comfort noise.
- return kRfc3389CngNoPacket;
- } else if (cng_state_ == kCngInternalOn) {
- // Keep on playing codec internal comfort noise.
- return kCodecInternalCng;
- } else if (play_dtmf) {
- return kDtmf;
- } else {
- // Nothing to play, do expand.
- return kExpand;
- }
-}
-
-Operations DecisionLogicNormal::ExpectedPacketAvailable(Modes prev_mode,
- bool play_dtmf) {
- if (prev_mode != kModeExpand && !play_dtmf) {
- // Check criterion for time-stretching.
- int low_limit, high_limit;
- delay_manager_->BufferLimits(&low_limit, &high_limit);
- if ((buffer_level_filter_->filtered_current_level() >= high_limit &&
- TimescaleAllowed()) ||
- buffer_level_filter_->filtered_current_level() >= high_limit << 2) {
- // Buffer level higher than limit and time-scaling allowed,
- // or buffer level really high.
- return kAccelerate;
- } else if ((buffer_level_filter_->filtered_current_level() < low_limit)
- && TimescaleAllowed()) {
- return kPreemptiveExpand;
- }
- }
- return kNormal;
-}
-
-Operations DecisionLogicNormal::FuturePacketAvailable(
- const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- Modes prev_mode,
- uint32_t target_timestamp,
- uint32_t available_timestamp,
- bool play_dtmf) {
- // Required packet is not available, but a future packet is.
- // Check if we should continue with an ongoing expand because the new packet
- // is too far into the future.
- uint32_t timestamp_leap = available_timestamp - target_timestamp;
- if ((prev_mode == kModeExpand) &&
- !ReinitAfterExpands(timestamp_leap) &&
- !MaxWaitForPacket() &&
- PacketTooEarly(timestamp_leap) &&
- UnderTargetLevel()) {
- if (play_dtmf) {
- // Still have DTMF to play, so do not do expand.
- return kDtmf;
- } else {
- // Nothing to play.
- return kExpand;
- }
- }
-
- const int samples_left = static_cast<int>(sync_buffer.FutureLength() -
- expand.overlap_length());
- const int cur_size_samples = samples_left +
- packet_buffer_.NumPacketsInBuffer() * decoder_frame_length;
-
- // If previous was comfort noise, then no merge is needed.
- if (prev_mode == kModeRfc3389Cng ||
- prev_mode == kModeCodecInternalCng) {
- // Keep the same delay as before the CNG (or maximum 70 ms in buffer as
- // safety precaution), but make sure that the number of samples in buffer
- // is no higher than 4 times the optimal level. (Note that TargetLevel()
- // is in Q8.)
- int32_t timestamp_diff = (generated_noise_samples_ + target_timestamp) -
- available_timestamp;
- if (timestamp_diff >= 0 ||
- cur_size_samples >
- 4 * ((delay_manager_->TargetLevel() * packet_length_samples_) >> 8)) {
- // Time to play this new packet.
- return kNormal;
- } else {
- // Too early to play this new packet; keep on playing comfort noise.
- if (prev_mode == kModeRfc3389Cng) {
- return kRfc3389CngNoPacket;
- } else { // prevPlayMode == kModeCodecInternalCng.
- return kCodecInternalCng;
- }
- }
- }
- // Do not merge unless we have done an expand before.
- // (Convert kAllowMergeWithoutExpand from ms to samples by multiplying with
- // fs_mult_ * 8 = fs / 1000.)
- if (prev_mode == kModeExpand ||
- (decoder_frame_length < output_size_samples_ &&
- cur_size_samples > kAllowMergeWithoutExpandMs * fs_mult_ * 8)) {
- return kMerge;
- } else if (play_dtmf) {
- // Play DTMF instead of expand.
- return kDtmf;
- } else {
- return kExpand;
- }
-}
-
-bool DecisionLogicNormal::UnderTargetLevel() const {
- return buffer_level_filter_->filtered_current_level() <=
- delay_manager_->TargetLevel();
-}
-
-bool DecisionLogicNormal::ReinitAfterExpands(uint32_t timestamp_leap) const {
- return timestamp_leap >=
- static_cast<uint32_t>(output_size_samples_ * kReinitAfterExpands);
-}
-
-bool DecisionLogicNormal::PacketTooEarly(uint32_t timestamp_leap) const {
- return timestamp_leap >
- static_cast<uint32_t>(output_size_samples_ * num_consecutive_expands_);
-}
-
-bool DecisionLogicNormal::MaxWaitForPacket() const {
- return num_consecutive_expands_ >= kMaxWaitForPacket;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h
deleted file mode 100644
index 783b001fc3e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Implementation of the DecisionLogic class for playout modes kPlayoutOn and
-// kPlayoutStreaming.
-class DecisionLogicNormal : public DecisionLogic {
- public:
- // Constructor.
- DecisionLogicNormal(int fs_hz,
- int output_size_samples,
- NetEqPlayoutMode playout_mode,
- DecoderDatabase* decoder_database,
- const PacketBuffer& packet_buffer,
- DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
-
- // Destructor.
- virtual ~DecisionLogicNormal() {}
-
- protected:
- // Returns the operation that should be done next. |sync_buffer| and |expand|
- // are provided for reference. |decoder_frame_length| is the number of samples
- // obtained from the last decoded frame. If there is a packet available, the
- // packet header should be supplied in |packet_header|; otherwise it should
- // be NULL. The mode resulting form the last call to NetEqImpl::GetAudio is
- // supplied in |prev_mode|. If there is a DTMF event to play, |play_dtmf|
- // should be set to true. The output variable |reset_decoder| will be set to
- // true if a reset is required; otherwise it is left unchanged (i.e., it can
- // remain true if it was true before the call).
- virtual Operations GetDecisionSpecialized(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length,
- const RTPHeader* packet_header,
- Modes prev_mode, bool play_dtmf,
- bool* reset_decoder);
-
- private:
- static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
- static const int kReinitAfterExpands = 100;
- static const int kMaxWaitForPacket = 10;
-
- // Returns the operation given that the next available packet is a comfort
- // noise payload (RFC 3389 only, not codec-internal).
- Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
- uint32_t available_timestamp);
-
- // Returns the operation given that no packets are available (except maybe
- // a DTMF event, flagged by setting |play_dtmf| true).
- Operations NoPacket(bool play_dtmf);
-
- // Returns the operation to do given that the expected packet is available.
- Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
-
- // Returns the operation to do given that the expected packet is not
- // available, but a packet further into the future is at hand.
- Operations FuturePacketAvailable(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length, Modes prev_mode,
- uint32_t target_timestamp,
- uint32_t available_timestamp,
- bool play_dtmf);
-
- // Checks if enough time has elapsed since the last successful timescale
- // operation was done (i.e., accelerate or preemptive expand).
- bool TimescaleAllowed() const { return timescale_hold_off_ == 0; }
-
- // Checks if the current (filtered) buffer level is under the target level.
- bool UnderTargetLevel() const;
-
- // Checks if |timestamp_leap| is so long into the future that a reset due
- // to exceeding kReinitAfterExpands will be done.
- bool ReinitAfterExpands(uint32_t timestamp_leap) const;
-
- // Checks if we still have not done enough expands to cover the distance from
- // the last decoded packet to the next available packet, the distance beeing
- // conveyed in |timestamp_leap|.
- bool PacketTooEarly(uint32_t timestamp_leap) const;
-
- // Checks if num_consecutive_expands_ >= kMaxWaitForPacket.
- bool MaxWaitForPacket() const;
-
- DISALLOW_COPY_AND_ASSIGN(DecisionLogicNormal);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc
deleted file mode 100644
index d596c0519a3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for DecisionLogic class and derived classes.
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-
-namespace webrtc {
-
-TEST(DecisionLogic, CreateAndDestroy) {
- int fs_hz = 8000;
- int output_size_samples = fs_hz / 100; // Samples per 10 ms.
- DecoderDatabase decoder_database;
- PacketBuffer packet_buffer(10, 1000);
- DelayPeakDetector delay_peak_detector;
- DelayManager delay_manager(240, &delay_peak_detector);
- BufferLevelFilter buffer_level_filter;
- DecisionLogic* logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOn, &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
- delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutStreaming,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
- delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutFax,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
- delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOff,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
- delete logic;
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc
deleted file mode 100644
index 8d87519b25a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-
-#include <assert.h>
-#include <utility> // pair
-
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-namespace webrtc {
-
-DecoderDatabase::DecoderDatabase()
- : active_decoder_(-1), active_cng_decoder_(-1) {}
-
-DecoderDatabase::~DecoderDatabase() {}
-
-DecoderDatabase::DecoderInfo::~DecoderInfo() {
- if (!external) delete decoder;
-}
-
-bool DecoderDatabase::Empty() const { return decoders_.empty(); }
-
-int DecoderDatabase::Size() const { return static_cast<int>(decoders_.size()); }
-
-void DecoderDatabase::Reset() {
- decoders_.clear();
- active_decoder_ = -1;
- active_cng_decoder_ = -1;
-}
-
-int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
- NetEqDecoder codec_type) {
- if (rtp_payload_type > kMaxRtpPayloadType) {
- return kInvalidRtpPayloadType;
- }
- if (!AudioDecoder::CodecSupported(codec_type)) {
- return kCodecNotSupported;
- }
- int fs_hz = AudioDecoder::CodecSampleRateHz(codec_type);
- std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, fs_hz, NULL, false);
- ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
- if (ret.second == false) {
- // Database already contains a decoder with type |rtp_payload_type|.
- return kDecoderExists;
- }
- return kOK;
-}
-
-int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
- NetEqDecoder codec_type,
- int fs_hz,
- AudioDecoder* decoder) {
- if (rtp_payload_type > 0x7F) {
- return kInvalidRtpPayloadType;
- }
- if (!AudioDecoder::CodecSupported(codec_type)) {
- return kCodecNotSupported;
- }
- if (fs_hz != 8000 && fs_hz != 16000 && fs_hz != 32000 && fs_hz != 48000) {
- return kInvalidSampleRate;
- }
- if (!decoder) {
- return kInvalidPointer;
- }
- decoder->Init();
- std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, fs_hz, decoder, true);
- ret = decoders_.insert(
- std::pair<uint8_t, DecoderInfo>(rtp_payload_type, info));
- if (ret.second == false) {
- // Database already contains a decoder with type |rtp_payload_type|.
- return kDecoderExists;
- }
- return kOK;
-}
-
-int DecoderDatabase::Remove(uint8_t rtp_payload_type) {
- if (decoders_.erase(rtp_payload_type) == 0) {
- // No decoder with that |rtp_payload_type|.
- return kDecoderNotFound;
- }
- if (active_decoder_ == rtp_payload_type) {
- active_decoder_ = -1; // No active decoder.
- }
- if (active_cng_decoder_ == rtp_payload_type) {
- active_cng_decoder_ = -1; // No active CNG decoder.
- }
- return kOK;
-}
-
-const DecoderDatabase::DecoderInfo* DecoderDatabase::GetDecoderInfo(
- uint8_t rtp_payload_type) const {
- DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
- if (it == decoders_.end()) {
- // Decoder not found.
- return NULL;
- }
- return &(*it).second;
-}
-
-uint8_t DecoderDatabase::GetRtpPayloadType(
- NetEqDecoder codec_type) const {
- DecoderMap::const_iterator it;
- for (it = decoders_.begin(); it != decoders_.end(); ++it) {
- if ((*it).second.codec_type == codec_type) {
- // Match found.
- return (*it).first;
- }
- }
- // No match.
- return kRtpPayloadTypeError;
-}
-
-AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
- if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type)) {
- // These are not real decoders.
- return NULL;
- }
- DecoderMap::iterator it = decoders_.find(rtp_payload_type);
- if (it == decoders_.end()) {
- // Decoder not found.
- return NULL;
- }
- DecoderInfo* info = &(*it).second;
- if (!info->decoder) {
- // Create the decoder object.
- AudioDecoder* decoder = AudioDecoder::CreateAudioDecoder(info->codec_type);
- assert(decoder); // Should not be able to have an unsupported codec here.
- info->decoder = decoder;
- info->decoder->Init();
- }
- return info->decoder;
-}
-
-bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
- NetEqDecoder codec_type) const {
- DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
- if (it == decoders_.end()) {
- // Decoder not found.
- return false;
- }
- return ((*it).second.codec_type == codec_type);
-}
-
-bool DecoderDatabase::IsComfortNoise(uint8_t rtp_payload_type) const {
- if (IsType(rtp_payload_type, kDecoderCNGnb) ||
- IsType(rtp_payload_type, kDecoderCNGwb) ||
- IsType(rtp_payload_type, kDecoderCNGswb32kHz) ||
- IsType(rtp_payload_type, kDecoderCNGswb48kHz)) {
- return true;
- } else {
- return false;
- }
-}
-
-bool DecoderDatabase::IsDtmf(uint8_t rtp_payload_type) const {
- return IsType(rtp_payload_type, kDecoderAVT);
-}
-
-bool DecoderDatabase::IsRed(uint8_t rtp_payload_type) const {
- return IsType(rtp_payload_type, kDecoderRED);
-}
-
-int DecoderDatabase::SetActiveDecoder(uint8_t rtp_payload_type,
- bool* new_decoder) {
- // Check that |rtp_payload_type| exists in the database.
- DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
- if (it == decoders_.end()) {
- // Decoder not found.
- return kDecoderNotFound;
- }
- assert(new_decoder);
- *new_decoder = false;
- if (active_decoder_ < 0) {
- // This is the first active decoder.
- *new_decoder = true;
- } else if (active_decoder_ != rtp_payload_type) {
- // Moving from one active decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_decoder_);
- if (it == decoders_.end()) {
- // Decoder not found. This should not be possible.
- assert(false);
- return kDecoderNotFound;
- }
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
- *new_decoder = true;
- }
- active_decoder_ = rtp_payload_type;
- return kOK;
-}
-
-AudioDecoder* DecoderDatabase::GetActiveDecoder() {
- if (active_decoder_ < 0) {
- // No active decoder.
- return NULL;
- }
- return GetDecoder(active_decoder_);
-}
-
-int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
- // Check that |rtp_payload_type| exists in the database.
- DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
- if (it == decoders_.end()) {
- // Decoder not found.
- return kDecoderNotFound;
- }
- if (active_cng_decoder_ >= 0 && active_cng_decoder_ != rtp_payload_type) {
- // Moving from one active CNG decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_cng_decoder_);
- if (it == decoders_.end()) {
- // Decoder not found. This should not be possible.
- assert(false);
- return kDecoderNotFound;
- }
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
- }
- active_cng_decoder_ = rtp_payload_type;
- return kOK;
-}
-
-AudioDecoder* DecoderDatabase::GetActiveCngDecoder() {
- if (active_cng_decoder_ < 0) {
- // No active CNG decoder.
- return NULL;
- }
- return GetDecoder(active_cng_decoder_);
-}
-
-int DecoderDatabase::CheckPayloadTypes(const PacketList& packet_list) const {
- PacketList::const_iterator it;
- for (it = packet_list.begin(); it != packet_list.end(); ++it) {
- if (decoders_.find((*it)->header.payloadType) == decoders_.end()) {
- // Payload type is not found.
- return kDecoderNotFound;
- }
- }
- return kOK;
-}
-
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h
deleted file mode 100644
index 9effd525ded..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
-
-#include <map>
-
-#include "webrtc/common_types.h" // NULL
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-class AudioDecoder;
-
-class DecoderDatabase {
- public:
- enum DatabaseReturnCodes {
- kOK = 0,
- kInvalidRtpPayloadType = -1,
- kCodecNotSupported = -2,
- kInvalidSampleRate = -3,
- kDecoderExists = -4,
- kDecoderNotFound = -5,
- kInvalidPointer = -6
- };
-
- // Struct used to store decoder info in the database.
- struct DecoderInfo {
- // Constructors.
- DecoderInfo()
- : codec_type(kDecoderArbitrary),
- fs_hz(8000),
- decoder(NULL),
- external(false) {
- }
- DecoderInfo(NetEqDecoder ct, int fs, AudioDecoder* dec, bool ext)
- : codec_type(ct),
- fs_hz(fs),
- decoder(dec),
- external(ext) {
- }
- // Destructor. (Defined in decoder_database.cc.)
- ~DecoderInfo();
-
- NetEqDecoder codec_type;
- int fs_hz;
- AudioDecoder* decoder;
- bool external;
- };
-
- static const uint8_t kMaxRtpPayloadType = 0x7F; // Max for a 7-bit number.
- // Maximum value for 8 bits, and an invalid RTP payload type (since it is
- // only 7 bits).
- static const uint8_t kRtpPayloadTypeError = 0xFF;
-
- DecoderDatabase();
-
- virtual ~DecoderDatabase();
-
- // Returns true if the database is empty.
- virtual bool Empty() const;
-
- // Returns the number of decoders registered in the database.
- virtual int Size() const;
-
- // Resets the database, erasing all registered payload types, and deleting
- // any AudioDecoder objects that were not externally created and inserted
- // using InsertExternal().
- virtual void Reset();
-
- // Registers |rtp_payload_type| as a decoder of type |codec_type|. Returns
- // kOK on success; otherwise an error code.
- virtual int RegisterPayload(uint8_t rtp_payload_type,
- NetEqDecoder codec_type);
-
- // Registers an externally created AudioDecoder object, and associates it
- // as a decoder of type |codec_type| with |rtp_payload_type|.
- virtual int InsertExternal(uint8_t rtp_payload_type,
- NetEqDecoder codec_type,
- int fs_hz, AudioDecoder* decoder);
-
- // Removes the entry for |rtp_payload_type| from the database.
- // Returns kDecoderNotFound or kOK depending on the outcome of the operation.
- virtual int Remove(uint8_t rtp_payload_type);
-
- // Returns a pointer to the DecoderInfo struct for |rtp_payload_type|. If
- // no decoder is registered with that |rtp_payload_type|, NULL is returned.
- virtual const DecoderInfo* GetDecoderInfo(uint8_t rtp_payload_type) const;
-
- // Returns one RTP payload type associated with |codec_type|, or
- // kDecoderNotFound if no entry exists for that value. Note that one
- // |codec_type| may be registered with several RTP payload types, and the
- // method may return any of them.
- virtual uint8_t GetRtpPayloadType(NetEqDecoder codec_type) const;
-
- // Returns a pointer to the AudioDecoder object associated with
- // |rtp_payload_type|, or NULL if none is registered. If the AudioDecoder
- // object does not exist for that decoder, the object is created.
- virtual AudioDecoder* GetDecoder(uint8_t rtp_payload_type);
-
- // Returns true if |rtp_payload_type| is registered as a |codec_type|.
- virtual bool IsType(uint8_t rtp_payload_type,
- NetEqDecoder codec_type) const;
-
- // Returns true if |rtp_payload_type| is registered as comfort noise.
- virtual bool IsComfortNoise(uint8_t rtp_payload_type) const;
-
- // Returns true if |rtp_payload_type| is registered as DTMF.
- virtual bool IsDtmf(uint8_t rtp_payload_type) const;
-
- // Returns true if |rtp_payload_type| is registered as RED.
- virtual bool IsRed(uint8_t rtp_payload_type) const;
-
- // Sets the active decoder to be |rtp_payload_type|. If this call results in a
- // change of active decoder, |new_decoder| is set to true. The previous active
- // decoder's AudioDecoder object is deleted.
- virtual int SetActiveDecoder(uint8_t rtp_payload_type, bool* new_decoder);
-
- // Returns the current active decoder, or NULL if no active decoder exists.
- virtual AudioDecoder* GetActiveDecoder();
-
- // Sets the active comfort noise decoder to be |rtp_payload_type|. If this
- // call results in a change of active comfort noise decoder, the previous
- // active decoder's AudioDecoder object is deleted.
- virtual int SetActiveCngDecoder(uint8_t rtp_payload_type);
-
- // Returns the current active comfort noise decoder, or NULL if no active
- // comfort noise decoder exists.
- virtual AudioDecoder* GetActiveCngDecoder();
-
- // Returns kOK if all packets in |packet_list| carry payload types that are
- // registered in the database. Otherwise, returns kDecoderNotFound.
- virtual int CheckPayloadTypes(const PacketList& packet_list) const;
-
- private:
- typedef std::map<uint8_t, DecoderInfo> DecoderMap;
-
- DecoderMap decoders_;
- int active_decoder_;
- int active_cng_decoder_;
-
- DISALLOW_COPY_AND_ASSIGN(DecoderDatabase);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc
deleted file mode 100644
index 76f5a099ecb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-TEST(DecoderDatabase, CreateAndDestroy) {
- DecoderDatabase db;
- EXPECT_EQ(0, db.Size());
- EXPECT_TRUE(db.Empty());
-}
-
-TEST(DecoderDatabase, InsertAndRemove) {
- DecoderDatabase db;
- const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, kDecoderPCMu));
- EXPECT_EQ(1, db.Size());
- EXPECT_FALSE(db.Empty());
- EXPECT_EQ(DecoderDatabase::kOK, db.Remove(kPayloadType));
- EXPECT_EQ(0, db.Size());
- EXPECT_TRUE(db.Empty());
-}
-
-TEST(DecoderDatabase, GetDecoderInfo) {
- DecoderDatabase db;
- const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, kDecoderPCMu));
- const DecoderDatabase::DecoderInfo* info;
- info = db.GetDecoderInfo(kPayloadType);
- ASSERT_TRUE(info != NULL);
- EXPECT_EQ(kDecoderPCMu, info->codec_type);
- EXPECT_EQ(NULL, info->decoder);
- EXPECT_EQ(8000, info->fs_hz);
- EXPECT_FALSE(info->external);
- info = db.GetDecoderInfo(kPayloadType + 1); // Other payload type.
- EXPECT_TRUE(info == NULL); // Should not be found.
-}
-
-TEST(DecoderDatabase, GetRtpPayloadType) {
- DecoderDatabase db;
- const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, kDecoderPCMu));
- EXPECT_EQ(kPayloadType, db.GetRtpPayloadType(kDecoderPCMu));
- const uint8_t expected_value = DecoderDatabase::kRtpPayloadTypeError;
- EXPECT_EQ(expected_value,
- db.GetRtpPayloadType(kDecoderISAC)); // iSAC is not registered.
-}
-
-TEST(DecoderDatabase, DISABLED_ON_ANDROID(GetDecoder)) {
- DecoderDatabase db;
- const uint8_t kPayloadType = 0;
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, kDecoderILBC));
- AudioDecoder* dec = db.GetDecoder(kPayloadType);
- ASSERT_TRUE(dec != NULL);
-}
-
-TEST(DecoderDatabase, TypeTests) {
- DecoderDatabase db;
- const uint8_t kPayloadTypePcmU = 0;
- const uint8_t kPayloadTypeCng = 13;
- const uint8_t kPayloadTypeDtmf = 100;
- const uint8_t kPayloadTypeRed = 101;
- const uint8_t kPayloadNotUsed = 102;
- // Load into database.
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypePcmU, kDecoderPCMu));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeCng, kDecoderCNGnb));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeDtmf, kDecoderAVT));
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadTypeRed, kDecoderRED));
- EXPECT_EQ(4, db.Size());
- // Test.
- EXPECT_FALSE(db.IsComfortNoise(kPayloadNotUsed));
- EXPECT_FALSE(db.IsDtmf(kPayloadNotUsed));
- EXPECT_FALSE(db.IsRed(kPayloadNotUsed));
- EXPECT_FALSE(db.IsComfortNoise(kPayloadTypePcmU));
- EXPECT_FALSE(db.IsDtmf(kPayloadTypePcmU));
- EXPECT_FALSE(db.IsRed(kPayloadTypePcmU));
- EXPECT_FALSE(db.IsType(kPayloadTypePcmU, kDecoderISAC));
- EXPECT_TRUE(db.IsType(kPayloadTypePcmU, kDecoderPCMu));
- EXPECT_TRUE(db.IsComfortNoise(kPayloadTypeCng));
- EXPECT_TRUE(db.IsDtmf(kPayloadTypeDtmf));
- EXPECT_TRUE(db.IsRed(kPayloadTypeRed));
-}
-
-TEST(DecoderDatabase, ExternalDecoder) {
- DecoderDatabase db;
- const uint8_t kPayloadType = 0;
- MockAudioDecoder decoder;
- // Load into database.
- EXPECT_EQ(DecoderDatabase::kOK,
- db.InsertExternal(kPayloadType, kDecoderPCMu, 8000,
- &decoder));
- EXPECT_EQ(1, db.Size());
- // Get decoder and make sure we get the external one.
- EXPECT_EQ(&decoder, db.GetDecoder(kPayloadType));
- // Get the decoder info struct and check it too.
- const DecoderDatabase::DecoderInfo* info;
- info = db.GetDecoderInfo(kPayloadType);
- ASSERT_TRUE(info != NULL);
- EXPECT_EQ(kDecoderPCMu, info->codec_type);
- EXPECT_EQ(&decoder, info->decoder);
- EXPECT_EQ(8000, info->fs_hz);
- EXPECT_TRUE(info->external);
- // Expect not to delete the decoder when removing it from the database, since
- // it was declared externally.
- EXPECT_CALL(decoder, Die()).Times(0);
- EXPECT_EQ(DecoderDatabase::kOK, db.Remove(kPayloadType));
- EXPECT_TRUE(db.Empty());
-
- EXPECT_CALL(decoder, Die()).Times(1); // Will be called when |db| is deleted.
-}
-
-TEST(DecoderDatabase, CheckPayloadTypes) {
- DecoderDatabase db;
- // Load a number of payloads into the database. Payload types are 0, 1, ...,
- // while the decoder type is the same for all payload types (this does not
- // matter for the test).
- const int kNumPayloads = 10;
- for (uint8_t payload_type = 0; payload_type < kNumPayloads; ++payload_type) {
- EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(payload_type, kDecoderArbitrary));
- }
- PacketList packet_list;
- for (int i = 0; i < kNumPayloads + 1; ++i) {
- // Create packet with payload type |i|. The last packet will have a payload
- // type that is not registered in the decoder database.
- Packet* packet = new Packet;
- packet->header.payloadType = i;
- packet_list.push_back(packet);
- }
-
- // Expect to return false, since the last packet is of an unknown type.
- EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
- db.CheckPayloadTypes(packet_list));
-
- delete packet_list.back();
- packet_list.pop_back(); // Remove the unknown one.
-
- EXPECT_EQ(DecoderDatabase::kOK, db.CheckPayloadTypes(packet_list));
-
- // Delete all packets.
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- delete packet_list.front();
- it = packet_list.erase(it);
- }
-}
-
-// Test the methods for setting and getting active speech and CNG decoders.
-TEST(DecoderDatabase, ActiveDecoders) {
- DecoderDatabase db;
- // Load payload types.
- ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(0, kDecoderPCMu));
- ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(103, kDecoderISAC));
- ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(13, kDecoderCNGnb));
- // Verify that no decoders are active from the start.
- EXPECT_EQ(NULL, db.GetActiveDecoder());
- EXPECT_EQ(NULL, db.GetActiveCngDecoder());
-
- // Set active speech codec.
- bool changed; // Should be true when the active decoder changed.
- EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(0, &changed));
- EXPECT_TRUE(changed);
- AudioDecoder* decoder = db.GetActiveDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
- EXPECT_EQ(kDecoderPCMu, decoder->codec_type());
-
- // Set the same again. Expect no change.
- EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(0, &changed));
- EXPECT_FALSE(changed);
- decoder = db.GetActiveDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
- EXPECT_EQ(kDecoderPCMu, decoder->codec_type());
-
- // Change active decoder.
- EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveDecoder(103, &changed));
- EXPECT_TRUE(changed);
- decoder = db.GetActiveDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
- EXPECT_EQ(kDecoderISAC, decoder->codec_type());
-
- // Remove the active decoder, and verify that the active becomes NULL.
- EXPECT_EQ(DecoderDatabase::kOK, db.Remove(103));
- EXPECT_EQ(NULL, db.GetActiveDecoder());
-
- // Set active CNG codec.
- EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveCngDecoder(13));
- decoder = db.GetActiveCngDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
- EXPECT_EQ(kDecoderCNGnb, decoder->codec_type());
-
- // Remove the active CNG decoder, and verify that the active becomes NULL.
- EXPECT_EQ(DecoderDatabase::kOK, db.Remove(13));
- EXPECT_EQ(NULL, db.GetActiveCngDecoder());
-
- // Try to set non-existing codecs as active.
- EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
- db.SetActiveDecoder(17, &changed));
- EXPECT_EQ(DecoderDatabase::kDecoderNotFound,
- db.SetActiveCngDecoder(17));
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h
deleted file mode 100644
index b6f9eb2bc14..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
-
-namespace webrtc {
-
-enum Operations {
- kNormal = 0,
- kMerge,
- kExpand,
- kAccelerate,
- kPreemptiveExpand,
- kRfc3389Cng,
- kRfc3389CngNoPacket,
- kCodecInternalCng,
- kDtmf,
- kAlternativePlc,
- kAlternativePlcIncreaseTimestamp,
- kAudioRepetition,
- kAudioRepetitionIncreaseTimestamp,
- kUndefined = -1
-};
-
-enum Modes {
- kModeNormal = 0,
- kModeExpand,
- kModeMerge,
- kModeAccelerateSuccess,
- kModeAccelerateLowEnergy,
- kModeAccelerateFail,
- kModePreemptiveExpandSuccess,
- kModePreemptiveExpandLowEnergy,
- kModePreemptiveExpandFail,
- kModeRfc3389Cng,
- kModeCodecInternalCng,
- kModeDtmf,
- kModeError,
- kModeUndefined = -1
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc
deleted file mode 100644
index e80b9de5142..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc
+++ /dev/null
@@ -1,425 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-
-#include <assert.h>
-#include <math.h>
-
-#include <algorithm> // max, min
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-DelayManager::DelayManager(int max_packets_in_buffer,
- DelayPeakDetector* peak_detector)
- : first_packet_received_(false),
- max_packets_in_buffer_(max_packets_in_buffer),
- iat_vector_(kMaxIat + 1, 0),
- iat_factor_(0),
- packet_iat_count_ms_(0),
- base_target_level_(4), // In Q0 domain.
- target_level_(base_target_level_ << 8), // In Q8 domain.
- packet_len_ms_(0),
- streaming_mode_(false),
- last_seq_no_(0),
- last_timestamp_(0),
- minimum_delay_ms_(0),
- least_required_delay_ms_(target_level_),
- maximum_delay_ms_(target_level_),
- iat_cumulative_sum_(0),
- max_iat_cumulative_sum_(0),
- max_timer_ms_(0),
- peak_detector_(*peak_detector),
- last_pack_cng_or_dtmf_(1) {
- assert(peak_detector); // Should never be NULL.
- Reset();
-}
-
-DelayManager::~DelayManager() {}
-
-const DelayManager::IATVector& DelayManager::iat_vector() const {
- return iat_vector_;
-}
-
-// Set the histogram vector to an exponentially decaying distribution
-// iat_vector_[i] = 0.5^(i+1), i = 0, 1, 2, ...
-// iat_vector_ is in Q30.
-void DelayManager::ResetHistogram() {
- // Set temp_prob to (slightly more than) 1 in Q14. This ensures that the sum
- // of iat_vector_ is 1.
- uint16_t temp_prob = 0x4002; // 16384 + 2 = 100000000000010 binary.
- IATVector::iterator it = iat_vector_.begin();
- for (; it < iat_vector_.end(); it++) {
- temp_prob >>= 1;
- (*it) = temp_prob << 16;
- }
- base_target_level_ = 4;
- target_level_ = base_target_level_ << 8;
-}
-
-int DelayManager::Update(uint16_t sequence_number,
- uint32_t timestamp,
- int sample_rate_hz) {
- if (sample_rate_hz <= 0) {
- return -1;
- }
-
- if (!first_packet_received_) {
- // Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
- last_seq_no_ = sequence_number;
- last_timestamp_ = timestamp;
- first_packet_received_ = true;
- return 0;
- }
-
- // Try calculating packet length from current and previous timestamps.
- int packet_len_ms;
- if (!IsNewerTimestamp(timestamp, last_timestamp_) ||
- !IsNewerSequenceNumber(sequence_number, last_seq_no_)) {
- // Wrong timestamp or sequence order; use stored value.
- packet_len_ms = packet_len_ms_;
- } else {
- // Calculate timestamps per packet and derive packet length in ms.
- int packet_len_samp =
- static_cast<uint32_t>(timestamp - last_timestamp_) /
- static_cast<uint16_t>(sequence_number - last_seq_no_);
- packet_len_ms = (1000 * packet_len_samp) / sample_rate_hz;
- }
-
- if (packet_len_ms > 0) {
- // Cannot update statistics unless |packet_len_ms| is valid.
- // Calculate inter-arrival time (IAT) in integer "packet times"
- // (rounding down). This is the value used as index to the histogram
- // vector |iat_vector_|.
- int iat_packets = packet_iat_count_ms_ / packet_len_ms;
-
- if (streaming_mode_) {
- UpdateCumulativeSums(packet_len_ms, sequence_number);
- }
-
- // Check for discontinuous packet sequence and re-ordering.
- if (IsNewerSequenceNumber(sequence_number, last_seq_no_ + 1)) {
- // Compensate for gap in the sequence numbers. Reduce IAT with the
- // expected extra time due to lost packets, but ensure that the IAT is
- // not negative.
- iat_packets -= static_cast<uint16_t>(sequence_number - last_seq_no_ - 1);
- iat_packets = std::max(iat_packets, 0);
- } else if (!IsNewerSequenceNumber(sequence_number, last_seq_no_)) {
- iat_packets += static_cast<uint16_t>(last_seq_no_ + 1 - sequence_number);
- }
-
- // Saturate IAT at maximum value.
- const int max_iat = kMaxIat;
- iat_packets = std::min(iat_packets, max_iat);
- UpdateHistogram(iat_packets);
- // Calculate new |target_level_| based on updated statistics.
- target_level_ = CalculateTargetLevel(iat_packets);
- if (streaming_mode_) {
- target_level_ = std::max(target_level_, max_iat_cumulative_sum_);
- }
-
- LimitTargetLevel();
- } // End if (packet_len_ms > 0).
-
- // Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
- last_seq_no_ = sequence_number;
- last_timestamp_ = timestamp;
- return 0;
-}
-
-void DelayManager::UpdateCumulativeSums(int packet_len_ms,
- uint16_t sequence_number) {
- // Calculate IAT in Q8, including fractions of a packet (i.e., more
- // accurate than |iat_packets|.
- int iat_packets_q8 = (packet_iat_count_ms_ << 8) / packet_len_ms;
- // Calculate cumulative sum IAT with sequence number compensation. The sum
- // is zero if there is no clock-drift.
- iat_cumulative_sum_ += (iat_packets_q8 -
- (static_cast<int>(sequence_number - last_seq_no_) << 8));
- // Subtract drift term.
- iat_cumulative_sum_ -= kCumulativeSumDrift;
- // Ensure not negative.
- iat_cumulative_sum_ = std::max(iat_cumulative_sum_, 0);
- if (iat_cumulative_sum_ > max_iat_cumulative_sum_) {
- // Found a new maximum.
- max_iat_cumulative_sum_ = iat_cumulative_sum_;
- max_timer_ms_ = 0;
- }
- if (max_timer_ms_ > kMaxStreamingPeakPeriodMs) {
- // Too long since the last maximum was observed; decrease max value.
- max_iat_cumulative_sum_ -= kCumulativeSumDrift;
- }
-}
-
-// Each element in the vector is first multiplied by the forgetting factor
-// |iat_factor_|. Then the vector element indicated by |iat_packets| is then
-// increased (additive) by 1 - |iat_factor_|. This way, the probability of
-// |iat_packets| is slightly increased, while the sum of the histogram remains
-// constant (=1).
-// Due to inaccuracies in the fixed-point arithmetic, the histogram may no
-// longer sum up to 1 (in Q30) after the update. To correct this, a correction
-// term is added or subtracted from the first element (or elements) of the
-// vector.
-// The forgetting factor |iat_factor_| is also updated. When the DelayManager
-// is reset, the factor is set to 0 to facilitate rapid convergence in the
-// beginning. With each update of the histogram, the factor is increased towards
-// the steady-state value |kIatFactor_|.
-void DelayManager::UpdateHistogram(size_t iat_packets) {
- assert(iat_packets < iat_vector_.size());
- int vector_sum = 0; // Sum up the vector elements as they are processed.
- // Multiply each element in |iat_vector_| with |iat_factor_|.
- for (IATVector::iterator it = iat_vector_.begin();
- it != iat_vector_.end(); ++it) {
- *it = (static_cast<int64_t>(*it) * iat_factor_) >> 15;
- vector_sum += *it;
- }
-
- // Increase the probability for the currently observed inter-arrival time
- // by 1 - |iat_factor_|. The factor is in Q15, |iat_vector_| in Q30.
- // Thus, left-shift 15 steps to obtain result in Q30.
- iat_vector_[iat_packets] += (32768 - iat_factor_) << 15;
- vector_sum += (32768 - iat_factor_) << 15; // Add to vector sum.
-
- // |iat_vector_| should sum up to 1 (in Q30), but it may not due to
- // fixed-point rounding errors.
- vector_sum -= 1 << 30; // Should be zero. Compensate if not.
- if (vector_sum != 0) {
- // Modify a few values early in |iat_vector_|.
- int flip_sign = vector_sum > 0 ? -1 : 1;
- IATVector::iterator it = iat_vector_.begin();
- while (it != iat_vector_.end() && abs(vector_sum) > 0) {
- // Add/subtract 1/16 of the element, but not more than |vector_sum|.
- int correction = flip_sign * std::min(abs(vector_sum), (*it) >> 4);
- *it += correction;
- vector_sum += correction;
- ++it;
- }
- }
- assert(vector_sum == 0); // Verify that the above is correct.
-
- // Update |iat_factor_| (changes only during the first seconds after a reset).
- // The factor converges to |kIatFactor_|.
- iat_factor_ += (kIatFactor_ - iat_factor_ + 3) >> 2;
-}
-
-// Enforces upper and lower limits for |target_level_|. The upper limit is
-// chosen to be minimum of i) 75% of |max_packets_in_buffer_|, to leave some
-// headroom for natural fluctuations around the target, and ii) equivalent of
-// |maximum_delay_ms_| in packets. Note that in practice, if no
-// |maximum_delay_ms_| is specified, this does not have any impact, since the
-// target level is far below the buffer capacity in all reasonable cases.
-// The lower limit is equivalent of |minimum_delay_ms_| in packets. We update
-// |least_required_level_| while the above limits are applied.
-// TODO(hlundin): Move this check to the buffer logistics class.
-void DelayManager::LimitTargetLevel() {
- least_required_delay_ms_ = (target_level_ * packet_len_ms_) >> 8;
-
- if (packet_len_ms_ > 0 && minimum_delay_ms_ > 0) {
- int minimum_delay_packet_q8 = (minimum_delay_ms_ << 8) / packet_len_ms_;
- target_level_ = std::max(target_level_, minimum_delay_packet_q8);
- }
-
- if (maximum_delay_ms_ > 0 && packet_len_ms_ > 0) {
- int maximum_delay_packet_q8 = (maximum_delay_ms_ << 8) / packet_len_ms_;
- target_level_ = std::min(target_level_, maximum_delay_packet_q8);
- }
-
- // Shift to Q8, then 75%.;
- int max_buffer_packets_q8 = (3 * (max_packets_in_buffer_ << 8)) / 4;
- target_level_ = std::min(target_level_, max_buffer_packets_q8);
-
- // Sanity check, at least 1 packet (in Q8).
- target_level_ = std::max(target_level_, 1 << 8);
-}
-
-int DelayManager::CalculateTargetLevel(int iat_packets) {
- int limit_probability = kLimitProbability;
- if (streaming_mode_) {
- limit_probability = kLimitProbabilityStreaming;
- }
-
- // Calculate target buffer level from inter-arrival time histogram.
- // Find the |iat_index| for which the probability of observing an
- // inter-arrival time larger than or equal to |iat_index| is less than or
- // equal to |limit_probability|. The sought probability is estimated using
- // the histogram as the reverse cumulant PDF, i.e., the sum of elements from
- // the end up until |iat_index|. Now, since the sum of all elements is 1
- // (in Q30) by definition, and since the solution is often a low value for
- // |iat_index|, it is more efficient to start with |sum| = 1 and subtract
- // elements from the start of the histogram.
- size_t index = 0; // Start from the beginning of |iat_vector_|.
- int sum = 1 << 30; // Assign to 1 in Q30.
- sum -= iat_vector_[index]; // Ensure that target level is >= 1.
-
- do {
- // Subtract the probabilities one by one until the sum is no longer greater
- // than limit_probability.
- ++index;
- sum -= iat_vector_[index];
- } while ((sum > limit_probability) && (index < iat_vector_.size() - 1));
-
- // This is the base value for the target buffer level.
- int target_level = static_cast<int>(index);
- base_target_level_ = static_cast<int>(index);
-
- // Update detector for delay peaks.
- bool delay_peak_found = peak_detector_.Update(iat_packets, target_level);
- if (delay_peak_found) {
- target_level = std::max(target_level, peak_detector_.MaxPeakHeight());
- }
-
- // Sanity check. |target_level| must be strictly positive.
- target_level = std::max(target_level, 1);
- // Scale to Q8 and assign to member variable.
- target_level_ = target_level << 8;
- return target_level_;
-}
-
-int DelayManager::SetPacketAudioLength(int length_ms) {
- if (length_ms <= 0) {
- LOG_F(LS_ERROR) << "length_ms = " << length_ms;
- return -1;
- }
- packet_len_ms_ = length_ms;
- peak_detector_.SetPacketAudioLength(packet_len_ms_);
- packet_iat_count_ms_ = 0;
- last_pack_cng_or_dtmf_ = 1; // TODO(hlundin): Legacy. Remove?
- return 0;
-}
-
-
-void DelayManager::Reset() {
- packet_len_ms_ = 0; // Packet size unknown.
- streaming_mode_ = false;
- peak_detector_.Reset();
- ResetHistogram(); // Resets target levels too.
- iat_factor_ = 0; // Adapt the histogram faster for the first few packets.
- packet_iat_count_ms_ = 0;
- max_timer_ms_ = 0;
- iat_cumulative_sum_ = 0;
- max_iat_cumulative_sum_ = 0;
- last_pack_cng_or_dtmf_ = 1;
-}
-
-int DelayManager::AverageIAT() const {
- int32_t sum_q24 = 0;
- // Using an int for the upper limit of the following for-loop so the
- // loop-counter can be int. Otherwise we need a cast where |sum_q24| is
- // updated.
- const int iat_vec_size = static_cast<int>(iat_vector_.size());
- assert(iat_vector_.size() == 65); // Algorithm is hard-coded for this size.
- for (int i = 0; i < iat_vec_size; ++i) {
- // Shift 6 to fit worst case: 2^30 * 64.
- sum_q24 += (iat_vector_[i] >> 6) * i;
- }
- // Subtract the nominal inter-arrival time 1 = 2^24 in Q24.
- sum_q24 -= (1 << 24);
- // Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
- // Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
- return ((sum_q24 >> 7) * 15625) >> 11;
-}
-
-bool DelayManager::PeakFound() const {
- return peak_detector_.peak_found();
-}
-
-void DelayManager::UpdateCounters(int elapsed_time_ms) {
- packet_iat_count_ms_ += elapsed_time_ms;
- peak_detector_.IncrementCounter(elapsed_time_ms);
- max_timer_ms_ += elapsed_time_ms;
-}
-
-void DelayManager::ResetPacketIatCount() { packet_iat_count_ms_ = 0; }
-
-// Note that |low_limit| and |higher_limit| are not assigned to
-// |minimum_delay_ms_| and |maximum_delay_ms_| defined by the client of this
-// class. They are computed from |target_level_| and used for decision making.
-void DelayManager::BufferLimits(int* lower_limit, int* higher_limit) const {
- if (!lower_limit || !higher_limit) {
- LOG_F(LS_ERROR) << "NULL pointers supplied as input";
- assert(false);
- return;
- }
-
- int window_20ms = 0x7FFF; // Default large value for legacy bit-exactness.
- if (packet_len_ms_ > 0) {
- window_20ms = (20 << 8) / packet_len_ms_;
- }
-
- // |target_level_| is in Q8 already.
- *lower_limit = (target_level_ * 3) / 4;
- // |higher_limit| is equal to |target_level_|, but should at
- // least be 20 ms higher than |lower_limit_|.
- *higher_limit = std::max(target_level_, *lower_limit + window_20ms);
-}
-
-int DelayManager::TargetLevel() const {
- return target_level_;
-}
-
-void DelayManager::LastDecoderType(NetEqDecoder decoder_type) {
- if (decoder_type == kDecoderAVT ||
- decoder_type == kDecoderCNGnb ||
- decoder_type == kDecoderCNGwb ||
- decoder_type == kDecoderCNGswb32kHz ||
- decoder_type == kDecoderCNGswb48kHz) {
- last_pack_cng_or_dtmf_ = 1;
- } else if (last_pack_cng_or_dtmf_ != 0) {
- last_pack_cng_or_dtmf_ = -1;
- }
-}
-
-bool DelayManager::SetMinimumDelay(int delay_ms) {
- // Minimum delay shouldn't be more than maximum delay, if any maximum is set.
- // Also, if possible check |delay| to less than 75% of
- // |max_packets_in_buffer_|.
- if ((maximum_delay_ms_ > 0 && delay_ms > maximum_delay_ms_) ||
- (packet_len_ms_ > 0 &&
- delay_ms > 3 * max_packets_in_buffer_ * packet_len_ms_ / 4)) {
- return false;
- }
- minimum_delay_ms_ = delay_ms;
- return true;
-}
-
-bool DelayManager::SetMaximumDelay(int delay_ms) {
- if (delay_ms == 0) {
- // Zero input unsets the maximum delay.
- maximum_delay_ms_ = 0;
- return true;
- } else if (delay_ms < minimum_delay_ms_ || delay_ms < packet_len_ms_) {
- // Maximum delay shouldn't be less than minimum delay or less than a packet.
- return false;
- }
- maximum_delay_ms_ = delay_ms;
- return true;
-}
-
-int DelayManager::least_required_delay_ms() const {
- return least_required_delay_ms_;
-}
-
-int DelayManager::base_target_level() const { return base_target_level_; }
-void DelayManager::set_streaming_mode(bool value) { streaming_mode_ = value; }
-int DelayManager::last_pack_cng_or_dtmf() const {
- return last_pack_cng_or_dtmf_;
-}
-
-void DelayManager::set_last_pack_cng_or_dtmf(int value) {
- last_pack_cng_or_dtmf_ = value;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h
deleted file mode 100644
index ed1e87b190d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
-
-#include <string.h> // Provide access to size_t.
-
-#include <vector>
-
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-class DelayPeakDetector;
-
-class DelayManager {
- public:
- typedef std::vector<int> IATVector;
-
- // Create a DelayManager object. Notify the delay manager that the packet
- // buffer can hold no more than |max_packets_in_buffer| packets (i.e., this
- // is the number of packet slots in the buffer). Supply a PeakDetector
- // object to the DelayManager.
- DelayManager(int max_packets_in_buffer, DelayPeakDetector* peak_detector);
-
- virtual ~DelayManager();
-
- // Read the inter-arrival time histogram. Mainly for testing purposes.
- virtual const IATVector& iat_vector() const;
-
- // Updates the delay manager with a new incoming packet, with
- // |sequence_number| and |timestamp| from the RTP header. This updates the
- // inter-arrival time histogram and other statistics, as well as the
- // associated DelayPeakDetector. A new target buffer level is calculated.
- // Returns 0 on success, -1 on failure (invalid sample rate).
- virtual int Update(uint16_t sequence_number,
- uint32_t timestamp,
- int sample_rate_hz);
-
- // Calculates a new target buffer level. Called from the Update() method.
- // Sets target_level_ (in Q8) and returns the same value. Also calculates
- // and updates base_target_level_, which is the target buffer level before
- // taking delay peaks into account.
- virtual int CalculateTargetLevel(int iat_packets);
-
- // Notifies the DelayManager of how much audio data is carried in each packet.
- // The method updates the DelayPeakDetector too, and resets the inter-arrival
- // time counter. Returns 0 on success, -1 on failure.
- virtual int SetPacketAudioLength(int length_ms);
-
- // Resets the DelayManager and the associated DelayPeakDetector.
- virtual void Reset();
-
- // Calculates the average inter-arrival time deviation from the histogram.
- // The result is returned as parts-per-million deviation from the nominal
- // inter-arrival time. That is, if the average inter-arrival time is equal to
- // the nominal frame time, the return value is zero. A positive value
- // corresponds to packet spacing being too large, while a negative value means
- // that the packets arrive with less spacing than expected.
- virtual int AverageIAT() const;
-
- // Returns true if peak-mode is active. That is, delay peaks were observed
- // recently. This method simply asks for the same information from the
- // DelayPeakDetector object.
- virtual bool PeakFound() const;
-
- // Notifies the counters in DelayManager and DelayPeakDetector that
- // |elapsed_time_ms| have elapsed.
- virtual void UpdateCounters(int elapsed_time_ms);
-
- // Reset the inter-arrival time counter to 0.
- virtual void ResetPacketIatCount();
-
- // Writes the lower and higher limits which the buffer level should stay
- // within to the corresponding pointers. The values are in (fractions of)
- // packets in Q8.
- virtual void BufferLimits(int* lower_limit, int* higher_limit) const;
-
- // Gets the target buffer level, in (fractions of) packets in Q8. This value
- // includes any extra delay set through the set_extra_delay_ms() method.
- virtual int TargetLevel() const;
-
- virtual void LastDecoderType(NetEqDecoder decoder_type);
-
- // Accessors and mutators.
- // Assuming |delay| is in valid range.
- virtual bool SetMinimumDelay(int delay_ms);
- virtual bool SetMaximumDelay(int delay_ms);
- virtual int least_required_delay_ms() const;
- virtual int base_target_level() const;
- virtual void set_streaming_mode(bool value);
- virtual int last_pack_cng_or_dtmf() const;
- virtual void set_last_pack_cng_or_dtmf(int value);
-
- private:
- static const int kLimitProbability = 53687091; // 1/20 in Q30.
- static const int kLimitProbabilityStreaming = 536871; // 1/2000 in Q30.
- static const int kMaxStreamingPeakPeriodMs = 600000; // 10 minutes in ms.
- static const int kCumulativeSumDrift = 2; // Drift term for cumulative sum
- // |iat_cumulative_sum_|.
- // Steady-state forgetting factor for |iat_vector_|, 0.9993 in Q15.
- static const int kIatFactor_ = 32745;
- static const int kMaxIat = 64; // Max inter-arrival time to register.
-
- // Sets |iat_vector_| to the default start distribution and sets the
- // |base_target_level_| and |target_level_| to the corresponding values.
- void ResetHistogram();
-
- // Updates |iat_cumulative_sum_| and |max_iat_cumulative_sum_|. (These are
- // used by the streaming mode.) This method is called by Update().
- void UpdateCumulativeSums(int packet_len_ms, uint16_t sequence_number);
-
- // Updates the histogram |iat_vector_|. The probability for inter-arrival time
- // equal to |iat_packets| (in integer packets) is increased slightly, while
- // all other entries are decreased. This method is called by Update().
- void UpdateHistogram(size_t iat_packets);
-
- // Makes sure that |target_level_| is not too large, taking
- // |max_packets_in_buffer_| and |extra_delay_ms_| into account. This method is
- // called by Update().
- void LimitTargetLevel();
-
- bool first_packet_received_;
- const int max_packets_in_buffer_; // Capacity of the packet buffer.
- IATVector iat_vector_; // Histogram of inter-arrival times.
- int iat_factor_; // Forgetting factor for updating the IAT histogram (Q15).
- int packet_iat_count_ms_; // Milliseconds elapsed since last packet.
- int base_target_level_; // Currently preferred buffer level before peak
- // detection and streaming mode (Q0).
- // TODO(turajs) change the comment according to the implementation of
- // minimum-delay.
- int target_level_; // Currently preferred buffer level in (fractions)
- // of packets (Q8), before adding any extra delay.
- int packet_len_ms_; // Length of audio in each incoming packet [ms].
- bool streaming_mode_;
- uint16_t last_seq_no_; // Sequence number for last received packet.
- uint32_t last_timestamp_; // Timestamp for the last received packet.
- int minimum_delay_ms_; // Externally set minimum delay.
- int least_required_delay_ms_; // Smallest preferred buffer level (same unit
- // as |target_level_|), before applying
- // |minimum_delay_ms_| and/or |maximum_delay_ms_|.
- int maximum_delay_ms_; // Externally set maximum allowed delay.
- int iat_cumulative_sum_; // Cumulative sum of delta inter-arrival times.
- int max_iat_cumulative_sum_; // Max of |iat_cumulative_sum_|.
- int max_timer_ms_; // Time elapsed since maximum was observed.
- DelayPeakDetector& peak_detector_;
- int last_pack_cng_or_dtmf_;
-
- DISALLOW_COPY_AND_ASSIGN(DelayManager);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc
deleted file mode 100644
index 482a65c9a48..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for DelayManager class.
-
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-
-#include <math.h>
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h"
-
-namespace webrtc {
-
-using ::testing::Return;
-using ::testing::_;
-
-class DelayManagerTest : public ::testing::Test {
- protected:
- static const int kMaxNumberOfPackets = 240;
- static const int kTimeStepMs = 10;
- static const int kFs = 8000;
- static const int kFrameSizeMs = 20;
- static const int kTsIncrement = kFrameSizeMs * kFs / 1000;
-
- DelayManagerTest();
- virtual void SetUp();
- virtual void TearDown();
- void SetPacketAudioLength(int lengt_ms);
- void InsertNextPacket();
- void IncreaseTime(int inc_ms);
-
- DelayManager* dm_;
- MockDelayPeakDetector detector_;
- uint16_t seq_no_;
- uint32_t ts_;
-};
-
-DelayManagerTest::DelayManagerTest()
- : dm_(NULL),
- seq_no_(0x1234),
- ts_(0x12345678) {
-}
-
-void DelayManagerTest::SetUp() {
- EXPECT_CALL(detector_, Reset())
- .Times(1);
- dm_ = new DelayManager(kMaxNumberOfPackets, &detector_);
-}
-
-void DelayManagerTest::SetPacketAudioLength(int lengt_ms) {
- EXPECT_CALL(detector_, SetPacketAudioLength(lengt_ms));
- dm_->SetPacketAudioLength(lengt_ms);
-}
-
-void DelayManagerTest::InsertNextPacket() {
- EXPECT_EQ(0, dm_->Update(seq_no_, ts_, kFs));
- seq_no_ += 1;
- ts_ += kTsIncrement;
-}
-
-void DelayManagerTest::IncreaseTime(int inc_ms) {
- for (int t = 0; t < inc_ms; t += kTimeStepMs) {
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
- }
-}
-void DelayManagerTest::TearDown() {
- EXPECT_CALL(detector_, Die());
- delete dm_;
-}
-
-TEST_F(DelayManagerTest, CreateAndDestroy) {
- // Nothing to do here. The test fixture creates and destroys the DelayManager
- // object.
-}
-
-TEST_F(DelayManagerTest, VectorInitialization) {
- const DelayManager::IATVector& vec = dm_->iat_vector();
- double sum = 0.0;
- for (size_t i = 0; i < vec.size(); i++) {
- EXPECT_NEAR(ldexp(pow(0.5, static_cast<int>(i + 1)), 30), vec[i], 65536);
- // Tolerance 65536 in Q30 corresponds to a delta of approximately 0.00006.
- sum += vec[i];
- }
- EXPECT_EQ(1 << 30, static_cast<int>(sum)); // Should be 1 in Q30.
-}
-
-TEST_F(DelayManagerTest, SetPacketAudioLength) {
- const int kLengthMs = 30;
- // Expect DelayManager to pass on the new length to the detector object.
- EXPECT_CALL(detector_, SetPacketAudioLength(kLengthMs))
- .Times(1);
- EXPECT_EQ(0, dm_->SetPacketAudioLength(kLengthMs));
- EXPECT_EQ(-1, dm_->SetPacketAudioLength(-1)); // Illegal parameter value.
-}
-
-TEST_F(DelayManagerTest, PeakFound) {
- // Expect DelayManager to pass on the question to the detector.
- // Call twice, and let the detector return true the first time and false the
- // second time.
- EXPECT_CALL(detector_, peak_found())
- .WillOnce(Return(true))
- .WillOnce(Return(false));
- EXPECT_TRUE(dm_->PeakFound());
- EXPECT_FALSE(dm_->PeakFound());
-}
-
-TEST_F(DelayManagerTest, UpdateCounters) {
- // Expect DelayManager to pass on the counter update to the detector.
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
-}
-
-TEST_F(DelayManagerTest, UpdateNormal) {
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Advance time by one frame size.
- IncreaseTime(kFrameSizeMs);
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to 1 packet, and (base) target level equal to 1 as well.
- // Return false to indicate no peaks found.
- EXPECT_CALL(detector_, Update(1, 1))
- .WillOnce(Return(false));
- InsertNextPacket();
- EXPECT_EQ(1 << 8, dm_->TargetLevel()); // In Q8.
- EXPECT_EQ(1, dm_->base_target_level());
- int lower, higher;
- dm_->BufferLimits(&lower, &higher);
- // Expect |lower| to be 75% of target level, and |higher| to be target level,
- // but also at least 20 ms higher than |lower|, which is the limiting case
- // here.
- EXPECT_EQ((1 << 8) * 3 / 4, lower);
- EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
-}
-
-TEST_F(DelayManagerTest, UpdateLongInterArrivalTime) {
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Advance time by two frame size.
- IncreaseTime(2 * kFrameSizeMs);
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to 1 packet, and (base) target level equal to 1 as well.
- // Return false to indicate no peaks found.
- EXPECT_CALL(detector_, Update(2, 2))
- .WillOnce(Return(false));
- InsertNextPacket();
- EXPECT_EQ(2 << 8, dm_->TargetLevel()); // In Q8.
- EXPECT_EQ(2, dm_->base_target_level());
- int lower, higher;
- dm_->BufferLimits(&lower, &higher);
- // Expect |lower| to be 75% of target level, and |higher| to be target level,
- // but also at least 20 ms higher than |lower|, which is the limiting case
- // here.
- EXPECT_EQ((2 << 8) * 3 / 4, lower);
- EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
-}
-
-TEST_F(DelayManagerTest, UpdatePeakFound) {
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Advance time by one frame size.
- IncreaseTime(kFrameSizeMs);
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to 1 packet, and (base) target level equal to 1 as well.
- // Return true to indicate that peaks are found. Let the peak height be 5.
- EXPECT_CALL(detector_, Update(1, 1))
- .WillOnce(Return(true));
- EXPECT_CALL(detector_, MaxPeakHeight())
- .WillOnce(Return(5));
- InsertNextPacket();
- EXPECT_EQ(5 << 8, dm_->TargetLevel());
- EXPECT_EQ(1, dm_->base_target_level()); // Base target level is w/o peaks.
- int lower, higher;
- dm_->BufferLimits(&lower, &higher);
- // Expect |lower| to be 75% of target level, and |higher| to be target level.
- EXPECT_EQ((5 << 8) * 3 / 4, lower);
- EXPECT_EQ(5 << 8, higher);
-}
-
-TEST_F(DelayManagerTest, TargetDelay) {
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Advance time by one frame size.
- IncreaseTime(kFrameSizeMs);
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to 1 packet, and (base) target level equal to 1 as well.
- // Return false to indicate no peaks found.
- EXPECT_CALL(detector_, Update(1, 1))
- .WillOnce(Return(false));
- InsertNextPacket();
- const int kExpectedTarget = 1;
- EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel()); // In Q8.
- EXPECT_EQ(1, dm_->base_target_level());
- int lower, higher;
- dm_->BufferLimits(&lower, &higher);
- // Expect |lower| to be 75% of base target level, and |higher| to be
- // lower + 20 ms headroom.
- EXPECT_EQ((1 << 8) * 3 / 4, lower);
- EXPECT_EQ(lower + (20 << 8) / kFrameSizeMs, higher);
-}
-
-TEST_F(DelayManagerTest, MaxAndRequiredDelay) {
- const int kExpectedTarget = 5;
- const int kTimeIncrement = kExpectedTarget * kFrameSizeMs;
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to |kExpectedTarget| packet. Return true to indicate peaks found.
- EXPECT_CALL(detector_, Update(kExpectedTarget, _))
- .WillRepeatedly(Return(true));
- EXPECT_CALL(detector_, MaxPeakHeight())
- .WillRepeatedly(Return(kExpectedTarget));
- IncreaseTime(kTimeIncrement);
- InsertNextPacket();
-
- // No limit is set.
- EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel());
-
- int kMaxDelayPackets = kExpectedTarget - 2;
- int kMaxDelayMs = kMaxDelayPackets * kFrameSizeMs;
- EXPECT_TRUE(dm_->SetMaximumDelay(kMaxDelayMs));
- IncreaseTime(kTimeIncrement);
- InsertNextPacket();
- EXPECT_EQ(kExpectedTarget * kFrameSizeMs, dm_->least_required_delay_ms());
- EXPECT_EQ(kMaxDelayPackets << 8, dm_->TargetLevel());
-
- // Target level at least should be one packet.
- EXPECT_FALSE(dm_->SetMaximumDelay(kFrameSizeMs - 1));
-}
-
-TEST_F(DelayManagerTest, MinAndRequiredDelay) {
- const int kExpectedTarget = 5;
- const int kTimeIncrement = kExpectedTarget * kFrameSizeMs;
- SetPacketAudioLength(kFrameSizeMs);
- // First packet arrival.
- InsertNextPacket();
- // Second packet arrival.
- // Expect detector update method to be called once with inter-arrival time
- // equal to |kExpectedTarget| packet. Return true to indicate peaks found.
- EXPECT_CALL(detector_, Update(kExpectedTarget, _))
- .WillRepeatedly(Return(true));
- EXPECT_CALL(detector_, MaxPeakHeight())
- .WillRepeatedly(Return(kExpectedTarget));
- IncreaseTime(kTimeIncrement);
- InsertNextPacket();
-
- // No limit is applied.
- EXPECT_EQ(kExpectedTarget << 8, dm_->TargetLevel());
-
- int kMinDelayPackets = kExpectedTarget + 2;
- int kMinDelayMs = kMinDelayPackets * kFrameSizeMs;
- dm_->SetMinimumDelay(kMinDelayMs);
- IncreaseTime(kTimeIncrement);
- InsertNextPacket();
- EXPECT_EQ(kExpectedTarget * kFrameSizeMs, dm_->least_required_delay_ms());
- EXPECT_EQ(kMinDelayPackets << 8, dm_->TargetLevel());
-}
-
-TEST_F(DelayManagerTest, Failures) {
- // Wrong sample rate.
- EXPECT_EQ(-1, dm_->Update(0, 0, -1));
- // Wrong packet size.
- EXPECT_EQ(-1, dm_->SetPacketAudioLength(0));
- EXPECT_EQ(-1, dm_->SetPacketAudioLength(-1));
-
- // Minimum delay higher than a maximum delay is not accepted.
- EXPECT_TRUE(dm_->SetMaximumDelay(10));
- EXPECT_FALSE(dm_->SetMinimumDelay(20));
-
- // Maximum delay less than minimum delay is not accepted.
- EXPECT_TRUE(dm_->SetMaximumDelay(100));
- EXPECT_TRUE(dm_->SetMinimumDelay(80));
- EXPECT_FALSE(dm_->SetMaximumDelay(60));
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc
deleted file mode 100644
index fd5b9c08f0e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-
-#include <algorithm> // max
-
-namespace webrtc {
-
-// The DelayPeakDetector keeps track of severe inter-arrival times, called
-// delay peaks. When a peak is observed, the "height" (the time elapsed since
-// the previous packet arrival) and the peak "period" (the time since the last
-// observed peak) is recorded in a vector. When enough peaks have been observed,
-// peak-mode is engaged and the DelayManager asks the DelayPeakDetector for
-// the worst peak height.
-
-DelayPeakDetector::DelayPeakDetector()
- : peak_found_(false),
- peak_detection_threshold_(0),
- peak_period_counter_ms_(-1) {
-}
-
-void DelayPeakDetector::Reset() {
- peak_period_counter_ms_ = -1; // Indicate that next peak is the first.
- peak_found_ = false;
- peak_history_.clear();
-}
-
-// Calculates the threshold in number of packets.
-void DelayPeakDetector::SetPacketAudioLength(int length_ms) {
- if (length_ms > 0) {
- peak_detection_threshold_ = kPeakHeightMs / length_ms;
- }
-}
-
-int DelayPeakDetector::MaxPeakHeight() const {
- int max_height = -1; // Returns -1 for an empty history.
- std::list<Peak>::const_iterator it;
- for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
- max_height = std::max(max_height, it->peak_height_packets);
- }
- return max_height;
-}
-
-int DelayPeakDetector::MaxPeakPeriod() const {
- int max_period = -1; // Returns -1 for an empty history.
- std::list<Peak>::const_iterator it;
- for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
- max_period = std::max(max_period, it->period_ms);
- }
- return max_period;
-}
-
-bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
- if (inter_arrival_time > target_level + peak_detection_threshold_ ||
- inter_arrival_time > 2 * target_level) {
- // A delay peak is observed.
- if (peak_period_counter_ms_ == -1) {
- // This is the first peak. Reset the period counter.
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= kMaxPeakPeriodMs) {
- // This is not the first peak, and the period is valid.
- // Store peak data in the vector.
- Peak peak_data;
- peak_data.period_ms = peak_period_counter_ms_;
- peak_data.peak_height_packets = inter_arrival_time;
- peak_history_.push_back(peak_data);
- while (peak_history_.size() > kMaxNumPeaks) {
- // Delete the oldest data point.
- peak_history_.pop_front();
- }
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= 2 * kMaxPeakPeriodMs) {
- // Invalid peak due to too long period. Reset period counter and start
- // looking for next peak.
- peak_period_counter_ms_ = 0;
- } else {
- // More than 2 times the maximum period has elapsed since the last peak
- // was registered. It seams that the network conditions have changed.
- // Reset the peak statistics.
- Reset();
- }
- }
- return CheckPeakConditions();
-}
-
-void DelayPeakDetector::IncrementCounter(int inc_ms) {
- if (peak_period_counter_ms_ >= 0) {
- peak_period_counter_ms_ += inc_ms;
- }
-}
-
-bool DelayPeakDetector::CheckPeakConditions() {
- size_t s = peak_history_.size();
- if (s >= kMinPeaksToTrigger &&
- peak_period_counter_ms_ <= 2 * MaxPeakPeriod()) {
- peak_found_ = true;
- } else {
- peak_found_ = false;
- }
- return peak_found_;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h
deleted file mode 100644
index dfdd2537d7d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
-
-#include <string.h> // size_t
-
-#include <list>
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-class DelayPeakDetector {
- public:
- DelayPeakDetector();
- virtual ~DelayPeakDetector() {}
- virtual void Reset();
-
- // Notifies the DelayPeakDetector of how much audio data is carried in each
- // packet.
- virtual void SetPacketAudioLength(int length_ms);
-
- // Returns true if peak-mode is active. That is, delay peaks were observed
- // recently.
- virtual bool peak_found() { return peak_found_; }
-
- // Calculates and returns the maximum delay peak height. Returns -1 if no
- // delay peaks have been observed recently. The unit is number of packets.
- virtual int MaxPeakHeight() const;
-
- // Calculates and returns the maximum delay peak distance in ms.
- // Returns -1 if no delay peaks have been observed recently.
- virtual int MaxPeakPeriod() const;
-
- // Updates the DelayPeakDetector with a new inter-arrival time (in packets)
- // and the current target buffer level (needed to decide if a peak is observed
- // or not). Returns true if peak-mode is active, false if not.
- virtual bool Update(int inter_arrival_time, int target_level);
-
- // Increments the |peak_period_counter_ms_| with |inc_ms|. Only increments
- // the counter if it is non-negative. A negative denotes that no peak has
- // been observed.
- virtual void IncrementCounter(int inc_ms);
-
- private:
- static const size_t kMaxNumPeaks = 8;
- static const size_t kMinPeaksToTrigger = 2;
- static const int kPeakHeightMs = 78;
- static const int kMaxPeakPeriodMs = 10000;
-
- typedef struct {
- int period_ms;
- int peak_height_packets;
- } Peak;
-
- bool CheckPeakConditions();
-
- std::list<Peak> peak_history_;
- bool peak_found_;
- int peak_detection_threshold_;
- int peak_period_counter_ms_;
-
- DISALLOW_COPY_AND_ASSIGN(DelayPeakDetector);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc
deleted file mode 100644
index a3b48209cff..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for DelayPeakDetector class.
-
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(DelayPeakDetector, CreateAndDestroy) {
- DelayPeakDetector* detector = new DelayPeakDetector();
- EXPECT_FALSE(detector->peak_found());
- delete detector;
-}
-
-TEST(DelayPeakDetector, EmptyHistory) {
- DelayPeakDetector detector;
- EXPECT_EQ(-1, detector.MaxPeakHeight());
- EXPECT_EQ(-1, detector.MaxPeakPeriod());
-}
-
-// Inject a series of packet arrivals into the detector. Three of the packets
-// have suffered delays. After the third delay peak, peak-mode is expected to
-// start. This should then continue until it is disengaged due to lack of peaks.
-TEST(DelayPeakDetector, TriggerPeakMode) {
- DelayPeakDetector detector;
- const int kPacketSizeMs = 30;
- detector.SetPacketAudioLength(kPacketSizeMs);
-
- // Load up normal arrival times; 0 ms, 30 ms, 60 ms, 90 ms, ...
- const int kNumPackets = 1000;
- int arrival_times_ms[kNumPackets];
- for (int i = 0; i < kNumPackets; ++i) {
- arrival_times_ms[i] = i * kPacketSizeMs;
- }
-
- // Delay three packets.
- const int kPeakDelayMs = 100;
- // First delay peak.
- arrival_times_ms[100] += kPeakDelayMs;
- // Second delay peak.
- arrival_times_ms[200] += kPeakDelayMs;
- // Third delay peak. Trigger peak-mode after this packet.
- arrival_times_ms[400] += kPeakDelayMs;
- // The second peak period is the longest, 200 packets.
- const int kWorstPeakPeriod = 200 * kPacketSizeMs;
- int peak_mode_start_ms = arrival_times_ms[400];
- // Expect to disengage after no peaks are observed for two period times.
- int peak_mode_end_ms = peak_mode_start_ms + 2 * kWorstPeakPeriod;
-
- // Load into detector.
- int time = 0;
- int next = 1; // Start with the second packet to get a proper IAT.
- while (next < kNumPackets) {
- while (next < kNumPackets && arrival_times_ms[next] <= time) {
- int iat_packets = (arrival_times_ms[next] - arrival_times_ms[next - 1]) /
- kPacketSizeMs;
- const int kTargetBufferLevel = 1; // Define peaks to be iat > 2.
- if (time < peak_mode_start_ms || time > peak_mode_end_ms) {
- EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
- } else {
- EXPECT_TRUE(detector.Update(iat_packets, kTargetBufferLevel));
- EXPECT_EQ(kWorstPeakPeriod, detector.MaxPeakPeriod());
- EXPECT_EQ(kPeakDelayMs / kPacketSizeMs + 1, detector.MaxPeakHeight());
- }
- ++next;
- }
- detector.IncrementCounter(10);
- time += 10; // Increase time 10 ms.
- }
-}
-
-// Same test as TriggerPeakMode, but with base target buffer level increased to
-// 2, in order to raise the bar for delay peaks to inter-arrival times > 4.
-// The delay pattern has peaks with delay = 3, thus should not trigger.
-TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
- DelayPeakDetector detector;
- const int kPacketSizeMs = 30;
- detector.SetPacketAudioLength(kPacketSizeMs);
-
- // Load up normal arrival times; 0 ms, 30 ms, 60 ms, 90 ms, ...
- const int kNumPackets = 1000;
- int arrival_times_ms[kNumPackets];
- for (int i = 0; i < kNumPackets; ++i) {
- arrival_times_ms[i] = i * kPacketSizeMs;
- }
-
- // Delay three packets.
- const int kPeakDelayMs = 100;
- // First delay peak.
- arrival_times_ms[100] += kPeakDelayMs;
- // Second delay peak.
- arrival_times_ms[200] += kPeakDelayMs;
- // Third delay peak.
- arrival_times_ms[400] += kPeakDelayMs;
-
- // Load into detector.
- int time = 0;
- int next = 1; // Start with the second packet to get a proper IAT.
- while (next < kNumPackets) {
- while (next < kNumPackets && arrival_times_ms[next] <= time) {
- int iat_packets = (arrival_times_ms[next] - arrival_times_ms[next - 1]) /
- kPacketSizeMs;
- const int kTargetBufferLevel = 2; // Define peaks to be iat > 4.
- EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
- ++next;
- }
- detector.IncrementCounter(10);
- time += 10; // Increase time 10 ms.
- }
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc
deleted file mode 100644
index e1aa0e53de7..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc
+++ /dev/null
@@ -1,353 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-
-#include <assert.h>
-#include <string.h> // Access to memset.
-
-#include <algorithm> // Access to min, max.
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-
-namespace webrtc {
-
-// Table of constants used in method DspHelper::ParabolicFit().
-const int16_t DspHelper::kParabolaCoefficients[17][3] = {
- { 120, 32, 64 },
- { 140, 44, 75 },
- { 150, 50, 80 },
- { 160, 57, 85 },
- { 180, 72, 96 },
- { 200, 89, 107 },
- { 210, 98, 112 },
- { 220, 108, 117 },
- { 240, 128, 128 },
- { 260, 150, 139 },
- { 270, 162, 144 },
- { 280, 174, 149 },
- { 300, 200, 160 },
- { 320, 228, 171 },
- { 330, 242, 176 },
- { 340, 257, 181 },
- { 360, 288, 192 } };
-
-// Filter coefficients used when downsampling from the indicated sample rates
-// (8, 16, 32, 48 kHz) to 4 kHz. Coefficients are in Q12. The corresponding Q0
-// values are provided in the comments before each array.
-
-// Q0 values: {0.3, 0.4, 0.3}.
-const int16_t DspHelper::kDownsample8kHzTbl[3] = { 1229, 1638, 1229 };
-
-// Q0 values: {0.15, 0.2, 0.3, 0.2, 0.15}.
-const int16_t DspHelper::kDownsample16kHzTbl[5] = { 614, 819, 1229, 819, 614 };
-
-// Q0 values: {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425}.
-const int16_t DspHelper::kDownsample32kHzTbl[7] = {
- 584, 512, 625, 667, 625, 512, 584 };
-
-// Q0 values: {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487}.
-const int16_t DspHelper::kDownsample48kHzTbl[7] = {
- 1019, 390, 427, 440, 427, 390, 1019 };
-
-int DspHelper::RampSignal(const int16_t* input,
- size_t length,
- int factor,
- int increment,
- int16_t* output) {
- int factor_q20 = (factor << 6) + 32;
- // TODO(hlundin): Add 32 to factor_q20 when converting back to Q14?
- for (size_t i = 0; i < length; ++i) {
- output[i] = (factor * input[i] + 8192) >> 14;
- factor_q20 += increment;
- factor_q20 = std::max(factor_q20, 0); // Never go negative.
- factor = std::min(factor_q20 >> 6, 16384);
- }
- return factor;
-}
-
-int DspHelper::RampSignal(int16_t* signal,
- size_t length,
- int factor,
- int increment) {
- return RampSignal(signal, length, factor, increment, signal);
-}
-
-int DspHelper::RampSignal(AudioMultiVector* signal,
- size_t start_index,
- size_t length,
- int factor,
- int increment) {
- assert(start_index + length <= signal->Size());
- if (start_index + length > signal->Size()) {
- // Wrong parameters. Do nothing and return the scale factor unaltered.
- return factor;
- }
- int end_factor = 0;
- // Loop over the channels, starting at the same |factor| each time.
- for (size_t channel = 0; channel < signal->Channels(); ++channel) {
- end_factor =
- RampSignal(&(*signal)[channel][start_index], length, factor, increment);
- }
- return end_factor;
-}
-
-void DspHelper::PeakDetection(int16_t* data, int data_length,
- int num_peaks, int fs_mult,
- int* peak_index, int16_t* peak_value) {
- int16_t min_index = 0;
- int16_t max_index = 0;
-
- for (int i = 0; i <= num_peaks - 1; i++) {
- if (num_peaks == 1) {
- // Single peak. The parabola fit assumes that an extra point is
- // available; worst case it gets a zero on the high end of the signal.
- // TODO(hlundin): This can potentially get much worse. It breaks the
- // API contract, that the length of |data| is |data_length|.
- data_length++;
- }
-
- peak_index[i] = WebRtcSpl_MaxIndexW16(data, data_length - 1);
-
- if (i != num_peaks - 1) {
- min_index = std::max(0, peak_index[i] - 2);
- max_index = std::min(data_length - 1, peak_index[i] + 2);
- }
-
- if ((peak_index[i] != 0) && (peak_index[i] != (data_length - 2))) {
- ParabolicFit(&data[peak_index[i] - 1], fs_mult, &peak_index[i],
- &peak_value[i]);
- } else {
- if (peak_index[i] == data_length - 2) {
- if (data[peak_index[i]] > data[peak_index[i] + 1]) {
- ParabolicFit(&data[peak_index[i] - 1], fs_mult, &peak_index[i],
- &peak_value[i]);
- } else if (data[peak_index[i]] <= data[peak_index[i] + 1]) {
- // Linear approximation.
- peak_value[i] = (data[peak_index[i]] + data[peak_index[i] + 1]) >> 1;
- peak_index[i] = (peak_index[i] * 2 + 1) * fs_mult;
- }
- } else {
- peak_value[i] = data[peak_index[i]];
- peak_index[i] = peak_index[i] * 2 * fs_mult;
- }
- }
-
- if (i != num_peaks - 1) {
- memset(&data[min_index], 0,
- sizeof(data[0]) * (max_index - min_index + 1));
- }
- }
-}
-
-void DspHelper::ParabolicFit(int16_t* signal_points, int fs_mult,
- int* peak_index, int16_t* peak_value) {
- uint16_t fit_index[13];
- if (fs_mult == 1) {
- fit_index[0] = 0;
- fit_index[1] = 8;
- fit_index[2] = 16;
- } else if (fs_mult == 2) {
- fit_index[0] = 0;
- fit_index[1] = 4;
- fit_index[2] = 8;
- fit_index[3] = 12;
- fit_index[4] = 16;
- } else if (fs_mult == 4) {
- fit_index[0] = 0;
- fit_index[1] = 2;
- fit_index[2] = 4;
- fit_index[3] = 6;
- fit_index[4] = 8;
- fit_index[5] = 10;
- fit_index[6] = 12;
- fit_index[7] = 14;
- fit_index[8] = 16;
- } else {
- fit_index[0] = 0;
- fit_index[1] = 1;
- fit_index[2] = 3;
- fit_index[3] = 4;
- fit_index[4] = 5;
- fit_index[5] = 7;
- fit_index[6] = 8;
- fit_index[7] = 9;
- fit_index[8] = 11;
- fit_index[9] = 12;
- fit_index[10] = 13;
- fit_index[11] = 15;
- fit_index[12] = 16;
- }
-
- // num = -3 * signal_points[0] + 4 * signal_points[1] - signal_points[2];
- // den = signal_points[0] - 2 * signal_points[1] + signal_points[2];
- int32_t num = (signal_points[0] * -3) + (signal_points[1] * 4)
- - signal_points[2];
- int32_t den = signal_points[0] + (signal_points[1] * -2) + signal_points[2];
- int32_t temp = num * 120;
- int flag = 1;
- int16_t stp = kParabolaCoefficients[fit_index[fs_mult]][0]
- - kParabolaCoefficients[fit_index[fs_mult - 1]][0];
- int16_t strt = (kParabolaCoefficients[fit_index[fs_mult]][0]
- + kParabolaCoefficients[fit_index[fs_mult - 1]][0]) / 2;
- int16_t lmt;
- if (temp < -den * strt) {
- lmt = strt - stp;
- while (flag) {
- if ((flag == fs_mult) || (temp > -den * lmt)) {
- *peak_value = (den * kParabolaCoefficients[fit_index[fs_mult - flag]][1]
- + num * kParabolaCoefficients[fit_index[fs_mult - flag]][2]
- + signal_points[0] * 256) / 256;
- *peak_index = *peak_index * 2 * fs_mult - flag;
- flag = 0;
- } else {
- flag++;
- lmt -= stp;
- }
- }
- } else if (temp > -den * (strt + stp)) {
- lmt = strt + 2 * stp;
- while (flag) {
- if ((flag == fs_mult) || (temp < -den * lmt)) {
- int32_t temp_term_1 =
- den * kParabolaCoefficients[fit_index[fs_mult+flag]][1];
- int32_t temp_term_2 =
- num * kParabolaCoefficients[fit_index[fs_mult+flag]][2];
- int32_t temp_term_3 = signal_points[0] * 256;
- *peak_value = (temp_term_1 + temp_term_2 + temp_term_3) / 256;
- *peak_index = *peak_index * 2 * fs_mult + flag;
- flag = 0;
- } else {
- flag++;
- lmt += stp;
- }
- }
- } else {
- *peak_value = signal_points[1];
- *peak_index = *peak_index * 2 * fs_mult;
- }
-}
-
-int DspHelper::MinDistortion(const int16_t* signal, int min_lag,
- int max_lag, int length,
- int32_t* distortion_value) {
- int best_index = -1;
- int32_t min_distortion = WEBRTC_SPL_WORD32_MAX;
- for (int i = min_lag; i <= max_lag; i++) {
- int32_t sum_diff = 0;
- const int16_t* data1 = signal;
- const int16_t* data2 = signal - i;
- for (int j = 0; j < length; j++) {
- sum_diff += WEBRTC_SPL_ABS_W32(data1[j] - data2[j]);
- }
- // Compare with previous minimum.
- if (sum_diff < min_distortion) {
- min_distortion = sum_diff;
- best_index = i;
- }
- }
- *distortion_value = min_distortion;
- return best_index;
-}
-
-void DspHelper::CrossFade(const int16_t* input1, const int16_t* input2,
- size_t length, int16_t* mix_factor,
- int16_t factor_decrement, int16_t* output) {
- int16_t factor = *mix_factor;
- int16_t complement_factor = 16384 - factor;
- for (size_t i = 0; i < length; i++) {
- output[i] =
- (factor * input1[i] + complement_factor * input2[i] + 8192) >> 14;
- factor -= factor_decrement;
- complement_factor += factor_decrement;
- }
- *mix_factor = factor;
-}
-
-void DspHelper::UnmuteSignal(const int16_t* input, size_t length,
- int16_t* factor, int16_t increment,
- int16_t* output) {
- uint16_t factor_16b = *factor;
- int32_t factor_32b = (static_cast<int32_t>(factor_16b) << 6) + 32;
- for (size_t i = 0; i < length; i++) {
- output[i] = (factor_16b * input[i] + 8192) >> 14;
- factor_32b = std::max(factor_32b + increment, 0);
- factor_16b = std::min(16384, factor_32b >> 6);
- }
- *factor = factor_16b;
-}
-
-void DspHelper::MuteSignal(int16_t* signal, int16_t mute_slope, size_t length) {
- int32_t factor = (16384 << 6) + 32;
- for (size_t i = 0; i < length; i++) {
- signal[i] = ((factor >> 6) * signal[i] + 8192) >> 14;
- factor -= mute_slope;
- }
-}
-
-int DspHelper::DownsampleTo4kHz(const int16_t* input, size_t input_length,
- int output_length, int input_rate_hz,
- bool compensate_delay, int16_t* output) {
- // Set filter parameters depending on input frequency.
- // NOTE: The phase delay values are wrong compared to the true phase delay
- // of the filters. However, the error is preserved (through the +1 term) for
- // consistency.
- const int16_t* filter_coefficients; // Filter coefficients.
- int16_t filter_length; // Number of coefficients.
- int16_t filter_delay; // Phase delay in samples.
- int16_t factor; // Conversion rate (inFsHz / 8000).
- switch (input_rate_hz) {
- case 8000: {
- filter_length = 3;
- factor = 2;
- filter_coefficients = kDownsample8kHzTbl;
- filter_delay = 1 + 1;
- break;
- }
- case 16000: {
- filter_length = 5;
- factor = 4;
- filter_coefficients = kDownsample16kHzTbl;
- filter_delay = 2 + 1;
- break;
- }
- case 32000: {
- filter_length = 7;
- factor = 8;
- filter_coefficients = kDownsample32kHzTbl;
- filter_delay = 3 + 1;
- break;
- }
- case 48000: {
- filter_length = 7;
- factor = 12;
- filter_coefficients = kDownsample48kHzTbl;
- filter_delay = 3 + 1;
- break;
- }
- default: {
- assert(false);
- return -1;
- }
- }
-
- if (!compensate_delay) {
- // Disregard delay compensation.
- filter_delay = 0;
- }
-
- // Returns -1 if input signal is too short; 0 otherwise.
- return WebRtcSpl_DownsampleFast(
- &input[filter_length - 1], static_cast<int>(input_length) -
- (filter_length - 1), output, output_length, filter_coefficients,
- filter_length, factor, filter_delay);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h
deleted file mode 100644
index 60cd995d840..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
-
-#include <string.h> // Access to size_t.
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// This class contains various signal processing functions, all implemented as
-// static methods.
-class DspHelper {
- public:
- // Filter coefficients used when downsampling from the indicated sample rates
- // (8, 16, 32, 48 kHz) to 4 kHz. Coefficients are in Q12.
- static const int16_t kDownsample8kHzTbl[3];
- static const int16_t kDownsample16kHzTbl[5];
- static const int16_t kDownsample32kHzTbl[7];
- static const int16_t kDownsample48kHzTbl[7];
-
- // Constants used to mute and unmute over 5 samples. The coefficients are
- // in Q15.
- static const int kMuteFactorStart8kHz = 27307;
- static const int kMuteFactorIncrement8kHz = -5461;
- static const int kUnmuteFactorStart8kHz = 5461;
- static const int kUnmuteFactorIncrement8kHz = 5461;
- static const int kMuteFactorStart16kHz = 29789;
- static const int kMuteFactorIncrement16kHz = -2979;
- static const int kUnmuteFactorStart16kHz = 2979;
- static const int kUnmuteFactorIncrement16kHz = 2979;
- static const int kMuteFactorStart32kHz = 31208;
- static const int kMuteFactorIncrement32kHz = -1560;
- static const int kUnmuteFactorStart32kHz = 1560;
- static const int kUnmuteFactorIncrement32kHz = 1560;
- static const int kMuteFactorStart48kHz = 31711;
- static const int kMuteFactorIncrement48kHz = -1057;
- static const int kUnmuteFactorStart48kHz = 1057;
- static const int kUnmuteFactorIncrement48kHz = 1057;
-
- // Multiplies the signal with a gradually changing factor.
- // The first sample is multiplied with |factor| (in Q14). For each sample,
- // |factor| is increased (additive) by the |increment| (in Q20), which can
- // be negative. Returns the scale factor after the last increment.
- static int RampSignal(const int16_t* input,
- size_t length,
- int factor,
- int increment,
- int16_t* output);
-
- // Same as above, but with the samples of |signal| being modified in-place.
- static int RampSignal(int16_t* signal,
- size_t length,
- int factor,
- int increment);
-
- // Same as above, but processes |length| samples from |signal|, starting at
- // |start_index|.
- static int RampSignal(AudioMultiVector* signal,
- size_t start_index,
- size_t length,
- int factor,
- int increment);
-
- // Peak detection with parabolic fit. Looks for |num_peaks| maxima in |data|,
- // having length |data_length| and sample rate multiplier |fs_mult|. The peak
- // locations and values are written to the arrays |peak_index| and
- // |peak_value|, respectively. Both arrays must hold at least |num_peaks|
- // elements.
- static void PeakDetection(int16_t* data, int data_length,
- int num_peaks, int fs_mult,
- int* peak_index, int16_t* peak_value);
-
- // Estimates the height and location of a maximum. The three values in the
- // array |signal_points| are used as basis for a parabolic fit, which is then
- // used to find the maximum in an interpolated signal. The |signal_points| are
- // assumed to be from a 4 kHz signal, while the maximum, written to
- // |peak_index| and |peak_value| is given in the full sample rate, as
- // indicated by the sample rate multiplier |fs_mult|.
- static void ParabolicFit(int16_t* signal_points, int fs_mult,
- int* peak_index, int16_t* peak_value);
-
- // Calculates the sum-abs-diff for |signal| when compared to a displaced
- // version of itself. Returns the displacement lag that results in the minimum
- // distortion. The resulting distortion is written to |distortion_value|.
- // The values of |min_lag| and |max_lag| are boundaries for the search.
- static int MinDistortion(const int16_t* signal, int min_lag,
- int max_lag, int length, int32_t* distortion_value);
-
- // Mixes |length| samples from |input1| and |input2| together and writes the
- // result to |output|. The gain for |input1| starts at |mix_factor| (Q14) and
- // is decreased by |factor_decrement| (Q14) for each sample. The gain for
- // |input2| is the complement 16384 - mix_factor.
- static void CrossFade(const int16_t* input1, const int16_t* input2,
- size_t length, int16_t* mix_factor,
- int16_t factor_decrement, int16_t* output);
-
- // Scales |input| with an increasing gain. Applies |factor| (Q14) to the first
- // sample and increases the gain by |increment| (Q20) for each sample. The
- // result is written to |output|. |length| samples are processed.
- static void UnmuteSignal(const int16_t* input, size_t length, int16_t* factor,
- int16_t increment, int16_t* output);
-
- // Starts at unity gain and gradually fades out |signal|. For each sample,
- // the gain is reduced by |mute_slope| (Q14). |length| samples are processed.
- static void MuteSignal(int16_t* signal, int16_t mute_slope, size_t length);
-
- // Downsamples |input| from |sample_rate_hz| to 4 kHz sample rate. The input
- // has |input_length| samples, and the method will write |output_length|
- // samples to |output|. Compensates for the phase delay of the downsampling
- // filters if |compensate_delay| is true. Returns -1 if the input is too short
- // to produce |output_length| samples, otherwise 0.
- static int DownsampleTo4kHz(const int16_t* input, size_t input_length,
- int output_length, int input_rate_hz,
- bool compensate_delay, int16_t* output);
-
- private:
- // Table of constants used in method DspHelper::ParabolicFit().
- static const int16_t kParabolaCoefficients[17][3];
-
- DISALLOW_COPY_AND_ASSIGN(DspHelper);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc
deleted file mode 100644
index 852c2ec927c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-TEST(DspHelper, RampSignalArray) {
- static const int kLen = 100;
- int16_t input[kLen];
- int16_t output[kLen];
- // Fill input with 1000.
- for (int i = 0; i < kLen; ++i) {
- input[i] = 1000;
- }
- int start_factor = 0;
- // Ramp from 0 to 1 (in Q14) over the array. Note that |increment| is in Q20,
- // while the factor is in Q14, hence the shift by 6.
- int increment = (16384 << 6) / kLen;
-
- // Test first method.
- int stop_factor = DspHelper::RampSignal(input, kLen, start_factor, increment,
- output);
- EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
- for (int i = 0; i < kLen; ++i) {
- EXPECT_EQ(1000 * i / kLen, output[i]);
- }
-
- // Test second method. (Note that this modifies |input|.)
- stop_factor = DspHelper::RampSignal(input, kLen, start_factor, increment);
- EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
- for (int i = 0; i < kLen; ++i) {
- EXPECT_EQ(1000 * i / kLen, input[i]);
- }
-}
-
-TEST(DspHelper, RampSignalAudioMultiVector) {
- static const int kLen = 100;
- static const int kChannels = 5;
- AudioMultiVector input(kChannels, kLen * 3);
- // Fill input with 1000.
- for (int i = 0; i < kLen * 3; ++i) {
- for (int channel = 0; channel < kChannels; ++channel) {
- input[channel][i] = 1000;
- }
- }
- // We want to start ramping at |start_index| and keep ramping for |kLen|
- // samples.
- int start_index = kLen;
- int start_factor = 0;
- // Ramp from 0 to 1 (in Q14) in |kLen| samples. Note that |increment| is in
- // Q20, while the factor is in Q14, hence the shift by 6.
- int increment = (16384 << 6) / kLen;
-
- int stop_factor = DspHelper::RampSignal(&input, start_index, kLen,
- start_factor, increment);
- EXPECT_EQ(16383, stop_factor); // Almost reach 1 in Q14.
- // Verify that the first |kLen| samples are left untouched.
- int i;
- for (i = 0; i < kLen; ++i) {
- for (int channel = 0; channel < kChannels; ++channel) {
- EXPECT_EQ(1000, input[channel][i]);
- }
- }
- // Verify that the next block of |kLen| samples are ramped.
- for (; i < 2 * kLen; ++i) {
- for (int channel = 0; channel < kChannels; ++channel) {
- EXPECT_EQ(1000 * (i - kLen) / kLen, input[channel][i]);
- }
- }
- // Verify the last |kLen| samples are left untouched.
- for (; i < 3 * kLen; ++i) {
- for (int channel = 0; channel < kChannels; ++channel) {
- EXPECT_EQ(1000, input[channel][i]);
- }
- }
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc
deleted file mode 100644
index 1c81ad940c4..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-
-#include <assert.h>
-#include <algorithm> // max
-
-// Modify the code to obtain backwards bit-exactness. Once bit-exactness is no
-// longer required, this #define should be removed (and the code that it
-// enables).
-#define LEGACY_BITEXACT
-
-namespace webrtc {
-
-// The ParseEvent method parses 4 bytes from |payload| according to this format
-// from RFC 4733:
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | event |E|R| volume | duration |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//
-// Legend (adapted from RFC 4733)
-// - event: The event field is a number between 0 and 255 identifying a
-// specific telephony event. The buffer will not accept any event
-// numbers larger than 15.
-// - E: If set to a value of one, the "end" bit indicates that this
-// packet contains the end of the event. For long-lasting events
-// that have to be split into segments, only the final packet for
-// the final segment will have the E bit set.
-// - R: Reserved.
-// - volume: For DTMF digits and other events representable as tones, this
-// field describes the power level of the tone, expressed in dBm0
-// after dropping the sign. Power levels range from 0 to -63 dBm0.
-// Thus, larger values denote lower volume. The buffer discards
-// values larger than 36 (i.e., lower than -36 dBm0).
-// - duration: The duration field indicates the duration of the event or segment
-// being reported, in timestamp units, expressed as an unsigned
-// integer in network byte order. For a non-zero value, the event
-// or segment began at the instant identified by the RTP timestamp
-// and has so far lasted as long as indicated by this parameter.
-// The event may or may not have ended. If the event duration
-// exceeds the maximum representable by the duration field, the
-// event is split into several contiguous segments. The buffer will
-// discard zero-duration events.
-//
-int DtmfBuffer::ParseEvent(uint32_t rtp_timestamp,
- const uint8_t* payload,
- int payload_length_bytes,
- DtmfEvent* event) {
- if (!payload || !event) {
- return kInvalidPointer;
- }
- if (payload_length_bytes < 4) {
- return kPayloadTooShort;
- }
-
- event->event_no = payload[0];
- event->end_bit = ((payload[1] & 0x80) != 0);
- event->volume = (payload[1] & 0x3F);
- event->duration = payload[2] << 8 | payload[3];
- event->timestamp = rtp_timestamp;
- return kOK;
-}
-
-// Inserts a DTMF event into the buffer. The event should be parsed from the
-// bit stream using the ParseEvent method above before inserting it in the
-// buffer.
-// DTMF events can be quite long, and in most cases the duration of the event
-// is not known when the first packet describing it is sent. To deal with that,
-// the RFC 4733 specifies that multiple packets are sent for one and the same
-// event as it is being created (typically, as the user is pressing the key).
-// These packets will all share the same start timestamp and event number,
-// while the duration will be the cumulative duration from the start. When
-// inserting a new event, the InsertEvent method tries to find a matching event
-// already in the buffer. If so, the new event is simply merged with the
-// existing one.
-int DtmfBuffer::InsertEvent(const DtmfEvent& event) {
- if (event.event_no < 0 || event.event_no > 15 ||
- event.volume < 0 || event.volume > 36 ||
- event.duration <= 0 || event.duration > 65535) {
- return kInvalidEventParameters;
- }
- DtmfList::iterator it = buffer_.begin();
- while (it != buffer_.end()) {
- if (MergeEvents(it, event)) {
- // A matching event was found and the new event was merged.
- return kOK;
- }
- ++it;
- }
- buffer_.push_back(event);
- // Sort the buffer using CompareEvents to rank the events.
- buffer_.sort(CompareEvents);
- return kOK;
-}
-
-bool DtmfBuffer::GetEvent(uint32_t current_timestamp, DtmfEvent* event) {
- DtmfList::iterator it = buffer_.begin();
- while (it != buffer_.end()) {
- // |event_end| is an estimate of where the current event ends. If the end
- // bit is set, we know that the event ends at |timestamp| + |duration|.
- uint32_t event_end = it->timestamp + it->duration;
-#ifdef LEGACY_BITEXACT
- bool next_available = false;
-#endif
- if (!it->end_bit) {
- // If the end bit is not set, we allow extrapolation of the event for
- // some time.
- event_end += max_extrapolation_samples_;
- DtmfList::iterator next = it;
- ++next;
- if (next != buffer_.end()) {
- // If there is a next event in the buffer, we will not extrapolate over
- // the start of that new event.
- event_end = std::min(event_end, next->timestamp);
-#ifdef LEGACY_BITEXACT
- next_available = true;
-#endif
- }
- }
- if (current_timestamp >= it->timestamp
- && current_timestamp <= event_end) { // TODO(hlundin): Change to <.
- // Found a matching event.
- if (event) {
- event->event_no = it->event_no;
- event->end_bit = it->end_bit;
- event->volume = it->volume;
- event->duration = it->duration;
- event->timestamp = it->timestamp;
- }
-#ifdef LEGACY_BITEXACT
- if (it->end_bit &&
- current_timestamp + frame_len_samples_ >= event_end) {
- // We are done playing this. Erase the event.
- buffer_.erase(it);
- }
-#endif
- return true;
- } else if (current_timestamp > event_end) { // TODO(hlundin): Change to >=.
- // Erase old event. Operation returns a valid pointer to the next element
- // in the list.
-#ifdef LEGACY_BITEXACT
- if (!next_available) {
- if (event) {
- event->event_no = it->event_no;
- event->end_bit = it->end_bit;
- event->volume = it->volume;
- event->duration = it->duration;
- event->timestamp = it->timestamp;
- }
- it = buffer_.erase(it);
- return true;
- } else {
- it = buffer_.erase(it);
- }
-#else
- it = buffer_.erase(it);
-#endif
- } else {
- ++it;
- }
- }
- return false;
-}
-
-int DtmfBuffer::SetSampleRate(int fs_hz) {
- if (fs_hz != 8000 &&
- fs_hz != 16000 &&
- fs_hz != 32000 &&
- fs_hz != 48000) {
- return kInvalidSampleRate;
- }
- max_extrapolation_samples_ = 7 * fs_hz / 100;
- frame_len_samples_ = fs_hz / 100;
- return kOK;
-}
-
-// The method returns true if the two events are considered to be the same.
-// The are defined as equal if they share the same timestamp and event number.
-// The special case with long-lasting events that have to be split into segments
-// is not handled in this method. These will be treated as separate events in
-// the buffer.
-bool DtmfBuffer::SameEvent(const DtmfEvent& a, const DtmfEvent& b) {
- return (a.event_no == b.event_no) && (a.timestamp == b.timestamp);
-}
-
-bool DtmfBuffer::MergeEvents(DtmfList::iterator it, const DtmfEvent& event) {
- if (SameEvent(*it, event)) {
- if (!it->end_bit) {
- // Do not extend the duration of an event for which the end bit was
- // already received.
- it->duration = std::max(event.duration, it->duration);
- }
- if (event.end_bit) {
- it->end_bit = true;
- }
- return true;
- } else {
- return false;
- }
-}
-
-// Returns true if |a| goes before |b| in the sorting order ("|a| < |b|").
-// The events are ranked using their start timestamp (taking wrap-around into
-// account). In the unlikely situation that two events share the same start
-// timestamp, the event number is used to rank the two. Note that packets
-// that belong to the same events, and therefore sharing the same start
-// timestamp, have already been merged before the sort method is called.
-bool DtmfBuffer::CompareEvents(const DtmfEvent& a, const DtmfEvent& b) {
- if (a.timestamp == b.timestamp) {
- return a.event_no < b.event_no;
- }
- // Take wrap-around into account.
- return (static_cast<uint32_t>(b.timestamp - a.timestamp) < 0xFFFFFFFF / 2);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h
deleted file mode 100644
index d08b64f4921..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
-
-#include <list>
-#include <string> // size_t
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct DtmfEvent {
- uint32_t timestamp;
- int event_no;
- int volume;
- int duration;
- bool end_bit;
-
- // Constructors
- DtmfEvent()
- : timestamp(0),
- event_no(0),
- volume(0),
- duration(0),
- end_bit(false) {
- }
- DtmfEvent(uint32_t ts, int ev, int vol, int dur, bool end)
- : timestamp(ts),
- event_no(ev),
- volume(vol),
- duration(dur),
- end_bit(end) {
- }
-};
-
-// This is the buffer holding DTMF events while waiting for them to be played.
-class DtmfBuffer {
- public:
- enum BufferReturnCodes {
- kOK = 0,
- kInvalidPointer,
- kPayloadTooShort,
- kInvalidEventParameters,
- kInvalidSampleRate
- };
-
- // Set up the buffer for use at sample rate |fs_hz|.
- explicit DtmfBuffer(int fs_hz) {
- SetSampleRate(fs_hz);
- }
-
- virtual ~DtmfBuffer() {}
-
- // Flushes the buffer.
- virtual void Flush() { buffer_.clear(); }
-
- // Static method to parse 4 bytes from |payload| as a DTMF event (RFC 4733)
- // and write the parsed information into the struct |event|. Input variable
- // |rtp_timestamp| is simply copied into the struct.
- static int ParseEvent(uint32_t rtp_timestamp,
- const uint8_t* payload,
- int payload_length_bytes,
- DtmfEvent* event);
-
- // Inserts |event| into the buffer. The method looks for a matching event and
- // merges the two if a match is found.
- virtual int InsertEvent(const DtmfEvent& event);
-
- // Checks if a DTMF event should be played at time |current_timestamp|. If so,
- // the method returns true; otherwise false. The parameters of the event to
- // play will be written to |event|.
- virtual bool GetEvent(uint32_t current_timestamp, DtmfEvent* event);
-
- // Number of events in the buffer.
- virtual size_t Length() const { return buffer_.size(); }
-
- virtual bool Empty() const { return buffer_.empty(); }
-
- // Set a new sample rate.
- virtual int SetSampleRate(int fs_hz);
-
- private:
- typedef std::list<DtmfEvent> DtmfList;
-
- int max_extrapolation_samples_;
- int frame_len_samples_; // TODO(hlundin): Remove this later.
-
- // Compares two events and returns true if they are the same.
- static bool SameEvent(const DtmfEvent& a, const DtmfEvent& b);
-
- // Merges |event| to the event pointed out by |it|. The method checks that
- // the two events are the same (using the SameEvent method), and merges them
- // if that was the case, returning true. If the events are not the same, false
- // is returned.
- bool MergeEvents(DtmfList::iterator it, const DtmfEvent& event);
-
- // Method used by the sort algorithm to rank events in the buffer.
- static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b);
-
- DtmfList buffer_;
-
- DISALLOW_COPY_AND_ASSIGN(DtmfBuffer);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc
deleted file mode 100644
index 0b5ed65b8e0..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc
+++ /dev/null
@@ -1,307 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-
-#ifdef WIN32
-#include <winsock2.h> // ntohl()
-#else
-#include <arpa/inet.h> // ntohl()
-#endif
-
-#include <iostream>
-
-#include "gtest/gtest.h"
-
-// Modify the tests so that they pass with the modifications done to DtmfBuffer
-// for backwards bit-exactness. Once bit-exactness is no longer required, this
-// #define should be removed (and the code that it enables).
-#define LEGACY_BITEXACT
-
-namespace webrtc {
-
-static int sample_rate_hz = 8000;
-
-static uint32_t MakeDtmfPayload(int event, bool end, int volume, int duration) {
- uint32_t payload = 0;
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | event |E|R| volume | duration |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- payload |= (event & 0x00FF) << 24;
- payload |= (end ? 0x00800000 : 0x00000000);
- payload |= (volume & 0x003F) << 16;
- payload |= (duration & 0xFFFF);
- payload = ntohl(payload);
- return payload;
-}
-
-static bool EqualEvents(const DtmfEvent& a,
- const DtmfEvent& b) {
- return (a.duration == b.duration
- && a.end_bit == b.end_bit
- && a.event_no == b.event_no
- && a.timestamp == b.timestamp
- && a.volume == b.volume);
-}
-
-TEST(DtmfBuffer, CreateAndDestroy) {
- DtmfBuffer* buffer = new DtmfBuffer(sample_rate_hz);
- delete buffer;
-}
-
-// Test the event parser.
-TEST(DtmfBuffer, ParseEvent) {
- int event_no = 7;
- bool end_bit = true;
- int volume = 17;
- int duration = 4711;
- uint32_t timestamp = 0x12345678;
- uint32_t payload = MakeDtmfPayload(event_no, end_bit, volume, duration);
- uint8_t* payload_ptr = reinterpret_cast<uint8_t*>(&payload);
- DtmfEvent event;
- EXPECT_EQ(DtmfBuffer::kOK,
- DtmfBuffer::ParseEvent(timestamp, payload_ptr, sizeof(payload),
- &event));
- EXPECT_EQ(duration, event.duration);
- EXPECT_EQ(end_bit, event.end_bit);
- EXPECT_EQ(event_no, event.event_no);
- EXPECT_EQ(timestamp, event.timestamp);
- EXPECT_EQ(volume, event.volume);
-
- EXPECT_EQ(DtmfBuffer::kInvalidPointer,
- DtmfBuffer::ParseEvent(timestamp, NULL, 4, &event));
-
- EXPECT_EQ(DtmfBuffer::kInvalidPointer,
- DtmfBuffer::ParseEvent(timestamp, payload_ptr, 4, NULL));
-
- EXPECT_EQ(DtmfBuffer::kPayloadTooShort,
- DtmfBuffer::ParseEvent(timestamp, payload_ptr, 3, &event));
-}
-
-TEST(DtmfBuffer, SimpleInsertAndGet) {
- int event_no = 7;
- bool end_bit = true;
- int volume = 17;
- int duration = 4711;
- uint32_t timestamp = 0x12345678;
- DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
- EXPECT_EQ(1u, buffer.Length());
- EXPECT_FALSE(buffer.Empty());
- DtmfEvent out_event;
- // Too early to get event.
- EXPECT_FALSE(buffer.GetEvent(timestamp - 10, &out_event));
- EXPECT_EQ(1u, buffer.Length());
- EXPECT_FALSE(buffer.Empty());
- // Get the event at its starting timestamp.
- EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
- EXPECT_TRUE(EqualEvents(event, out_event));
- EXPECT_EQ(1u, buffer.Length());
- EXPECT_FALSE(buffer.Empty());
- // Get the event some time into the event.
- EXPECT_TRUE(buffer.GetEvent(timestamp + duration / 2, &out_event));
- EXPECT_TRUE(EqualEvents(event, out_event));
- EXPECT_EQ(1u, buffer.Length());
- EXPECT_FALSE(buffer.Empty());
- // Give a "current" timestamp after the event has ended.
-#ifdef LEGACY_BITEXACT
- EXPECT_TRUE(buffer.GetEvent(timestamp + duration + 10, &out_event));
-#endif
- EXPECT_FALSE(buffer.GetEvent(timestamp + duration + 10, &out_event));
- EXPECT_EQ(0u, buffer.Length());
- EXPECT_TRUE(buffer.Empty());
-}
-
-TEST(DtmfBuffer, MergingPackets) {
- int event_no = 0;
- bool end_bit = false;
- int volume = 17;
- int duration = 80;
- uint32_t timestamp = 0x12345678;
- DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
-
- event.duration += 80;
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
-
- event.duration += 80;
- event.end_bit = true;
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
-
- EXPECT_EQ(1u, buffer.Length());
-
- DtmfEvent out_event;
- EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
- EXPECT_TRUE(EqualEvents(event, out_event));
-}
-
-// This test case inserts one shorter event completely overlapped by one longer
-// event. The expected outcome is that only the longer event is played.
-TEST(DtmfBuffer, OverlappingEvents) {
- int event_no = 0;
- bool end_bit = true;
- int volume = 1;
- int duration = 80;
- uint32_t timestamp = 0x12345678 + 80;
- DtmfEvent short_event(timestamp, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(short_event));
-
- event_no = 10;
- end_bit = false;
- timestamp = 0x12345678;
- DtmfEvent long_event(timestamp, event_no, volume, duration, end_bit);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
-
- long_event.duration += 80;
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
-
- long_event.duration += 80;
- long_event.end_bit = true;
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(long_event));
-
- EXPECT_EQ(2u, buffer.Length());
-
- DtmfEvent out_event;
- // Expect to get the long event.
- EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
- EXPECT_TRUE(EqualEvents(long_event, out_event));
- // Expect no more events.
-#ifdef LEGACY_BITEXACT
- EXPECT_TRUE(buffer.GetEvent(timestamp + long_event.duration + 10,
- &out_event));
- EXPECT_TRUE(EqualEvents(long_event, out_event));
- EXPECT_TRUE(buffer.GetEvent(timestamp + long_event.duration + 10,
- &out_event));
- EXPECT_TRUE(EqualEvents(short_event, out_event));
-#else
- EXPECT_FALSE(buffer.GetEvent(timestamp + long_event.duration + 10,
- &out_event));
-#endif
- EXPECT_TRUE(buffer.Empty());
-}
-
-TEST(DtmfBuffer, ExtrapolationTime) {
- int event_no = 0;
- bool end_bit = false;
- int volume = 1;
- int duration = 80;
- uint32_t timestamp = 0x12345678;
- DtmfEvent event1(timestamp, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
- EXPECT_EQ(1u, buffer.Length());
-
- DtmfEvent out_event;
- // Get the event at the start.
- EXPECT_TRUE(buffer.GetEvent(timestamp, &out_event));
- EXPECT_TRUE(EqualEvents(event1, out_event));
- // Also get the event 100 samples after the end of the event (since we're
- // missing the end bit).
- uint32_t timestamp_now = timestamp + duration + 100;
- EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
- EXPECT_TRUE(EqualEvents(event1, out_event));
- // Insert another event starting back-to-back with the previous event.
- timestamp += duration;
- event_no = 1;
- DtmfEvent event2(timestamp, event_no, volume, duration, end_bit);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
- EXPECT_EQ(2u, buffer.Length());
- // Now we expect to get the new event when supplying |timestamp_now|.
- EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
- EXPECT_TRUE(EqualEvents(event2, out_event));
- // Expect the the first event to be erased now.
- EXPECT_EQ(1u, buffer.Length());
- // Move |timestamp_now| to more than 560 samples after the end of the second
- // event. Expect that event to be erased.
- timestamp_now = timestamp + duration + 600;
-#ifdef LEGACY_BITEXACT
- EXPECT_TRUE(buffer.GetEvent(timestamp_now, &out_event));
-#endif
- EXPECT_FALSE(buffer.GetEvent(timestamp_now, &out_event));
- EXPECT_TRUE(buffer.Empty());
-}
-
-TEST(DtmfBuffer, TimestampWraparound) {
- int event_no = 0;
- bool end_bit = true;
- int volume = 1;
- int duration = 80;
- uint32_t timestamp1 = 0xFFFFFFFF - duration;
- DtmfEvent event1(timestamp1, event_no, volume, duration, end_bit);
- uint32_t timestamp2 = 0;
- DtmfEvent event2(timestamp2, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
- EXPECT_EQ(2u, buffer.Length());
- DtmfEvent out_event;
- EXPECT_TRUE(buffer.GetEvent(timestamp1, &out_event));
- EXPECT_TRUE(EqualEvents(event1, out_event));
-#ifdef LEGACY_BITEXACT
- EXPECT_EQ(1u, buffer.Length());
-#else
- EXPECT_EQ(2u, buffer.Length());
-#endif
-
- buffer.Flush();
- // Reverse the insert order. Expect same results.
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event2));
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event1));
- EXPECT_EQ(2u, buffer.Length());
- EXPECT_TRUE(buffer.GetEvent(timestamp1, &out_event));
- EXPECT_TRUE(EqualEvents(event1, out_event));
-#ifdef LEGACY_BITEXACT
- EXPECT_EQ(1u, buffer.Length());
-#else
- EXPECT_EQ(2u, buffer.Length());
-#endif
-}
-
-TEST(DtmfBuffer, InvalidEvents) {
- int event_no = 0;
- bool end_bit = true;
- int volume = 1;
- int duration = 80;
- uint32_t timestamp = 0x12345678;
- DtmfEvent event(timestamp, event_no, volume, duration, end_bit);
- DtmfBuffer buffer(sample_rate_hz);
-
- // Invalid event number.
- event.event_no = -1;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.event_no = 16;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.event_no = 0; // Valid value;
-
- // Invalid volume.
- event.volume = -1;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.volume = 37;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.volume = 0; // Valid value;
-
- // Invalid duration.
- event.duration = -1;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.duration = 0;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.duration = 0xFFFF + 1;
- EXPECT_EQ(DtmfBuffer::kInvalidEventParameters, buffer.InsertEvent(event));
- event.duration = 1; // Valid value;
-
- // Finish with a valid event, just to verify that all is ok.
- EXPECT_EQ(DtmfBuffer::kOK, buffer.InsertEvent(event));
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc
deleted file mode 100644
index c85534e9b7f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This class provides a generator for DTMF tones. The tone generation is based
-// on a sinusoid recursion. Each sinusoid is generated using a recursion
-// formula; x[n] = a * x[n-1] - x[n-2], where the coefficient
-// a = 2*cos(2*pi*f/fs). The recursion is started with x[-1] = 0 and
-// x[-2] = sin(2*pi*f/fs). (Note that with this initialization, the resulting
-// sinusoid gets a "negative" rotation; x[n] = sin(-2*pi*f/fs * n + phi), but
-// kept this way due to historical reasons.)
-// TODO(hlundin): Change to positive rotation?
-//
-// Each key on the telephone keypad corresponds to an "event", 0-15. Each event
-// is mapped to a tone pair, with a low and a high frequency. There are four
-// low and four high frequencies, each corresponding to a row and column,
-// respectively, on the keypad as illustrated below.
-//
-// 1209 Hz 1336 Hz 1477 Hz 1633 Hz
-// 697 Hz 1 2 3 12
-// 770 Hz 4 5 6 13
-// 852 Hz 7 8 9 14
-// 941 Hz 10 0 11 15
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-// The filter coefficient a = 2*cos(2*pi*f/fs) for the low frequency tone, for
-// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
-// Values are in Q14.
-const int DtmfToneGenerator::kCoeff1[4][16] = {
- { 24219, 27980, 27980, 27980, 26956, 26956, 26956, 25701, 25701, 25701,
- 24219, 24219, 27980, 26956, 25701, 24219 },
- { 30556, 31548, 31548, 31548, 31281, 31281, 31281, 30951, 30951, 30951,
- 30556, 30556, 31548, 31281, 30951, 30556 },
- { 32210, 32462, 32462, 32462, 32394, 32394, 32394, 32311, 32311, 32311,
- 32210, 32210, 32462, 32394, 32311, 32210 },
- { 32520, 32632, 32632, 32632, 32602, 32602, 32602, 32564, 32564, 32564,
- 32520, 32520, 32632, 32602, 32564, 32520 } };
-
-// The filter coefficient a = 2*cos(2*pi*f/fs) for the high frequency tone, for
-// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
-// Values are in Q14.
-const int DtmfToneGenerator::kCoeff2[4][16] = {
- { 16325, 19073, 16325, 13085, 19073, 16325, 13085, 19073, 16325, 13085,
- 19073, 13085, 9315, 9315, 9315, 9315},
- { 28361, 29144, 28361, 27409, 29144, 28361, 27409, 29144, 28361, 27409,
- 29144, 27409, 26258, 26258, 26258, 26258},
- { 31647, 31849, 31647, 31400, 31849, 31647, 31400, 31849, 31647, 31400,
- 31849, 31400, 31098, 31098, 31098, 31098},
- { 32268, 32359, 32268, 32157, 32359, 32268, 32157, 32359, 32268, 32157,
- 32359, 32157, 32022, 32022, 32022, 32022} };
-
-// The initialization value x[-2] = sin(2*pi*f/fs) for the low frequency tone,
-// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
-// Values are in Q14.
-const int DtmfToneGenerator::kInitValue1[4][16] = {
- { 11036, 8528, 8528, 8528, 9315, 9315, 9315, 10163, 10163, 10163, 11036,
- 11036, 8528, 9315, 10163, 11036},
- { 5918, 4429, 4429, 4429, 4879, 4879, 4879, 5380, 5380, 5380, 5918, 5918,
- 4429, 4879, 5380, 5918},
- { 3010, 2235, 2235, 2235, 2468, 2468, 2468, 2728, 2728, 2728, 3010, 3010,
- 2235, 2468, 2728, 3010},
- { 2013, 1493, 1493, 1493, 1649, 1649, 1649, 1823, 1823, 1823, 2013, 2013,
- 1493, 1649, 1823, 2013 } };
-
-// The initialization value x[-2] = sin(2*pi*f/fs) for the high frequency tone,
-// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
-// Values are in Q14.
-const int DtmfToneGenerator::kInitValue2[4][16] = {
- { 14206, 13323, 14206, 15021, 13323, 14206, 15021, 13323, 14206, 15021,
- 13323, 15021, 15708, 15708, 15708, 15708},
- { 8207, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8979,
- 9801, 9801, 9801, 9801},
- { 4249, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4685,
- 5164, 5164, 5164, 5164},
- { 2851, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 3148,
- 3476, 3476, 3476, 3476} };
-
-// Amplitude multipliers for volume values 0 through 36, corresponding to
-// 0 dBm0 through -36 dBm0. Values are in Q14.
-const int DtmfToneGenerator::kAmplitude[37] = {
- 16141, 14386, 12821, 11427, 10184, 9077, 8090, 7210, 6426, 5727, 5104, 4549,
- 4054, 3614, 3221, 2870, 2558, 2280, 2032, 1811, 1614, 1439, 1282, 1143,
- 1018, 908, 809, 721, 643, 573, 510, 455, 405, 361, 322, 287, 256 };
-
-// Constructor.
-DtmfToneGenerator::DtmfToneGenerator()
- : initialized_(false),
- coeff1_(0),
- coeff2_(0),
- amplitude_(0) {
-}
-
-// Initialize the DTMF generator with sample rate fs Hz (8000, 16000, 32000,
-// 48000), event (0-15) and attenuation (0-36 dB).
-// Returns 0 on success, otherwise an error code.
-int DtmfToneGenerator::Init(int fs, int event, int attenuation) {
- initialized_ = false;
- int fs_index;
- if (fs == 8000) {
- fs_index = 0;
- } else if (fs == 16000) {
- fs_index = 1;
- } else if (fs == 32000) {
- fs_index = 2;
- } else if (fs == 48000) {
- fs_index = 3;
- } else {
- assert(false);
- fs_index = 1; // Default to 8000 Hz.
- }
-
- if (event < 0 || event > 15) {
- return kParameterError; // Invalid event number.
- }
-
- if (attenuation < 0 || attenuation > 36) {
- return kParameterError; // Invalid attenuation.
- }
-
- // Look up oscillator coefficient for low and high frequencies.
- coeff1_ = kCoeff1[fs_index][event];
- coeff2_ = kCoeff2[fs_index][event];
- // Look up amplitude multiplier.
- amplitude_ = kAmplitude[attenuation];
- // Initialize sample history.
- sample_history1_[0] = kInitValue1[fs_index][event];
- sample_history1_[1] = 0;
- sample_history2_[0] = kInitValue2[fs_index][event];
- sample_history2_[1] = 0;
-
- initialized_ = true;
- return 0;
-}
-
-// Reset tone generator to uninitialized state.
-void DtmfToneGenerator::Reset() {
- initialized_ = false;
-}
-
-// Generate num_samples of DTMF signal and write to |output|.
-int DtmfToneGenerator::Generate(int num_samples,
- AudioMultiVector* output) {
- if (!initialized_) {
- return kNotInitialized;
- }
-
- if (num_samples < 0 || !output) {
- return kParameterError;
- }
- assert(output->Channels() == 1); // Not adapted for multi-channel yet.
- if (output->Channels() != 1) {
- return kStereoNotSupported;
- }
-
- output->AssertSize(num_samples);
- for (int i = 0; i < num_samples; ++i) {
- // Use recursion formula y[n] = a * y[n - 1] - y[n - 2].
- int16_t temp_val_low = ((coeff1_ * sample_history1_[1] + 8192) >> 14)
- - sample_history1_[0];
- int16_t temp_val_high = ((coeff2_ * sample_history2_[1] + 8192) >> 14)
- - sample_history2_[0];
-
- // Update recursion memory.
- sample_history1_[0] = sample_history1_[1];
- sample_history1_[1] = temp_val_low;
- sample_history2_[0] = sample_history2_[1];
- sample_history2_[1] = temp_val_high;
-
- // Attenuate the low frequency tone 3 dB.
- int32_t temp_val = kAmpMultiplier * temp_val_low + (temp_val_high << 15);
- // Normalize the signal to Q14 with proper rounding.
- temp_val = (temp_val + 16384) >> 15;
- // Scale the signal to correct volume.
- (*output)[0][i] =
- static_cast<int16_t>((temp_val * amplitude_ + 8192) >> 14);
- }
-
- return num_samples;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h
deleted file mode 100644
index e93f0b883f5..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
-
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// This class provides a generator for DTMF tones.
-class DtmfToneGenerator {
- public:
- enum ReturnCodes {
- kNotInitialized = -1,
- kParameterError = -2,
- kStereoNotSupported = -3,
- };
-
- DtmfToneGenerator();
- virtual ~DtmfToneGenerator() {}
- virtual int Init(int fs, int event, int attenuation);
- virtual void Reset();
- virtual int Generate(int num_samples, AudioMultiVector* output);
- virtual bool initialized() const { return initialized_; }
-
- private:
- static const int kCoeff1[4][16]; // 1st oscillator model coefficient table.
- static const int kCoeff2[4][16]; // 2nd oscillator model coefficient table.
- static const int kInitValue1[4][16]; // Initialization for 1st oscillator.
- static const int kInitValue2[4][16]; // Initialization for 2nd oscillator.
- static const int kAmplitude[37]; // Amplitude for 0 through -36 dBm0.
- static const int16_t kAmpMultiplier = 23171; // 3 dB attenuation (in Q15).
-
- bool initialized_; // True if generator is initialized properly.
- int coeff1_; // 1st oscillator coefficient for this event.
- int coeff2_; // 2nd oscillator coefficient for this event.
- int amplitude_; // Amplitude for this event.
- int16_t sample_history1_[2]; // Last 2 samples for the 1st oscillator.
- int16_t sample_history2_[2]; // Last 2 samples for the 2nd oscillator.
-
- DISALLOW_COPY_AND_ASSIGN(DtmfToneGenerator);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc
deleted file mode 100644
index 37e8bbda96c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for DtmfToneGenerator class.
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-
-#include <math.h>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-
-namespace webrtc {
-
-TEST(DtmfToneGenerator, CreateAndDestroy) {
- DtmfToneGenerator* tone_gen = new DtmfToneGenerator();
- delete tone_gen;
-}
-
-TEST(DtmfToneGenerator, TestErrors) {
- DtmfToneGenerator tone_gen;
- const int kNumSamples = 10;
- AudioMultiVector signal(1); // One channel.
-
- // Try to generate tones without initializing.
- EXPECT_EQ(DtmfToneGenerator::kNotInitialized,
- tone_gen.Generate(kNumSamples, &signal));
-
- const int fs = 16000; // Valid sample rate.
- const int event = 7; // Valid event.
- const int attenuation = 0; // Valid attenuation.
- // Initialize with invalid event -1.
- EXPECT_EQ(DtmfToneGenerator::kParameterError,
- tone_gen.Init(fs, -1, attenuation));
- // Initialize with invalid event 16.
- EXPECT_EQ(DtmfToneGenerator::kParameterError,
- tone_gen.Init(fs, 16, attenuation));
- // Initialize with invalid attenuation -1.
- EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Init(fs, event, -1));
- // Initialize with invalid attenuation 37.
- EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Init(fs, event, 37));
- EXPECT_FALSE(tone_gen.initialized()); // Should still be uninitialized.
-
- // Initialize with valid parameters.
- ASSERT_EQ(0, tone_gen.Init(fs, event, attenuation));
- EXPECT_TRUE(tone_gen.initialized());
- // Negative number of samples.
- EXPECT_EQ(DtmfToneGenerator::kParameterError, tone_gen.Generate(-1, &signal));
- // NULL pointer to destination.
- EXPECT_EQ(DtmfToneGenerator::kParameterError,
- tone_gen.Generate(kNumSamples, NULL));
-}
-
-TEST(DtmfToneGenerator, TestTones) {
- DtmfToneGenerator tone_gen;
- const int kAttenuation = 0;
- const int kNumSamples = 10;
- AudioMultiVector signal(1); // One channel.
-
- // Low and high frequencies for events 0 through 15.
- const double low_freq_hz[] = { 941.0, 697.0, 697.0, 697.0, 770.0, 770.0,
- 770.0, 852.0, 852.0, 852.0, 941.0, 941.0, 697.0, 770.0, 852.0, 941.0 };
- const double hi_freq_hz[] = { 1336.0, 1209.0, 1336.0, 1477.0, 1209.0, 1336.0,
- 1477.0, 1209.0, 1336.0, 1477.0, 1209.0, 1477.0, 1633.0, 1633.0, 1633.0,
- 1633.0 };
- const double attenuate_3dB = 23171.0 / 32768; // 3 dB attenuation.
- const double base_attenuation = 16141.0 / 16384.0; // This is the attenuation
- // applied to all cases.
- const int fs_vec[] = { 8000, 16000, 32000, 48000 };
- for (int f = 0; f < 4; ++f) {
- int fs = fs_vec[f];
- for (int event = 0; event <= 15; ++event) {
- std::ostringstream ss;
- ss << "Checking event " << event << " at sample rate " << fs;
- SCOPED_TRACE(ss.str());
- ASSERT_EQ(0, tone_gen.Init(fs, event, kAttenuation));
- EXPECT_TRUE(tone_gen.initialized());
- EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &signal));
-
- double f1 = low_freq_hz[event];
- double f2 = hi_freq_hz[event];
- const double pi = 3.14159265358979323846;
-
- for (int n = 0; n < kNumSamples; ++n) {
- double x = attenuate_3dB * sin(2.0 * pi * f1 / fs * (-n - 1))
- + sin(2.0 * pi * f2 / fs * (-n - 1));
- x *= base_attenuation;
- x = ldexp(x, 14); // Scale to Q14.
- static const int kChannel = 0;
- EXPECT_NEAR(x, static_cast<double>(signal[kChannel][n]), 25);
- }
-
- tone_gen.Reset();
- EXPECT_FALSE(tone_gen.initialized());
- }
- }
-}
-
-TEST(DtmfToneGenerator, TestAmplitudes) {
- DtmfToneGenerator tone_gen;
- const int kNumSamples = 10;
- AudioMultiVector signal(1); // One channel.
- AudioMultiVector ref_signal(1); // One channel.
-
- const int fs_vec[] = { 8000, 16000, 32000, 48000 };
- const int event_vec[] = { 0, 4, 9, 13 }; // Test a few events.
- for (int f = 0; f < 4; ++f) {
- int fs = fs_vec[f];
- int event = event_vec[f];
- // Create full-scale reference.
- ASSERT_EQ(0, tone_gen.Init(fs, event, 0)); // 0 attenuation.
- EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &ref_signal));
- // Test every 5 steps (to save time).
- for (int attenuation = 1; attenuation <= 36; attenuation += 5) {
- std::ostringstream ss;
- ss << "Checking event " << event << " at sample rate " << fs;
- ss << "; attenuation " << attenuation;
- SCOPED_TRACE(ss.str());
- ASSERT_EQ(0, tone_gen.Init(fs, event, attenuation));
- EXPECT_EQ(kNumSamples, tone_gen.Generate(kNumSamples, &signal));
- for (int n = 0; n < kNumSamples; ++n) {
- double attenuation_factor =
- pow(10, -static_cast<double>(attenuation)/20);
- // Verify that the attenuation is correct.
- static const int kChannel = 0;
- EXPECT_NEAR(attenuation_factor * ref_signal[kChannel][n],
- signal[kChannel][n], 2);
- }
-
- tone_gen.Reset();
- }
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc
deleted file mode 100644
index 73f2ef85a56..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc
+++ /dev/null
@@ -1,867 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-
-#include <assert.h>
-#include <string.h> // memset
-
-#include <algorithm> // min, max
-#include <limits> // numeric_limits<T>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-void Expand::Reset() {
- first_expand_ = true;
- consecutive_expands_ = 0;
- max_lag_ = 0;
- for (size_t ix = 0; ix < num_channels_; ++ix) {
- channel_parameters_[ix].expand_vector0.Clear();
- channel_parameters_[ix].expand_vector1.Clear();
- }
-}
-
-int Expand::Process(AudioMultiVector* output) {
- int16_t random_vector[kMaxSampleRate / 8000 * 120 + 30];
- int16_t scaled_random_vector[kMaxSampleRate / 8000 * 125];
- static const int kTempDataSize = 3600;
- int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
- int16_t* voiced_vector_storage = temp_data;
- int16_t* voiced_vector = &voiced_vector_storage[overlap_length_];
- static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
- int16_t unvoiced_array_memory[kNoiseLpcOrder + kMaxSampleRate / 8000 * 125];
- int16_t* unvoiced_vector = unvoiced_array_memory + kUnvoicedLpcOrder;
- int16_t* noise_vector = unvoiced_array_memory + kNoiseLpcOrder;
-
- int fs_mult = fs_hz_ / 8000;
-
- if (first_expand_) {
- // Perform initial setup if this is the first expansion since last reset.
- AnalyzeSignal(random_vector);
- first_expand_ = false;
- } else {
- // This is not the first expansion, parameters are already estimated.
- // Extract a noise segment.
- int16_t rand_length = max_lag_;
- // TODO(hlundin): This if-statement should not be needed. Should be just
- // as good to generate all of the vector in one call in either case.
- if (rand_length <= RandomVector::kRandomTableSize) {
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(rand_length, random_vector);
- } else {
- // This only applies to SWB where length could be larger than 256.
- assert(rand_length <= kMaxSampleRate / 8000 * 120 + 30);
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(RandomVector::kRandomTableSize, random_vector);
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(rand_length - RandomVector::kRandomTableSize,
- &random_vector[RandomVector::kRandomTableSize]);
- }
- }
-
-
- // Generate signal.
- UpdateLagIndex();
-
- // Voiced part.
- // Generate a weighted vector with the current lag.
- size_t expansion_vector_length = max_lag_ + overlap_length_;
- size_t current_lag = expand_lags_[current_lag_index_];
- // Copy lag+overlap data.
- size_t expansion_vector_position = expansion_vector_length - current_lag -
- overlap_length_;
- size_t temp_length = current_lag + overlap_length_;
- for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
- ChannelParameters& parameters = channel_parameters_[channel_ix];
- if (current_lag_index_ == 0) {
- // Use only expand_vector0.
- assert(expansion_vector_position + temp_length <=
- parameters.expand_vector0.Size());
- memcpy(voiced_vector_storage,
- &parameters.expand_vector0[expansion_vector_position],
- sizeof(int16_t) * temp_length);
- } else if (current_lag_index_ == 1) {
- // Mix 3/4 of expand_vector0 with 1/4 of expand_vector1.
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 3,
- &parameters.expand_vector1[expansion_vector_position], 1, 2,
- voiced_vector_storage, static_cast<int>(temp_length));
- } else if (current_lag_index_ == 2) {
- // Mix 1/2 of expand_vector0 with 1/2 of expand_vector1.
- assert(expansion_vector_position + temp_length <=
- parameters.expand_vector0.Size());
- assert(expansion_vector_position + temp_length <=
- parameters.expand_vector1.Size());
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 1,
- &parameters.expand_vector1[expansion_vector_position], 1, 1,
- voiced_vector_storage, static_cast<int>(temp_length));
- }
-
- // Get tapering window parameters. Values are in Q15.
- int16_t muting_window, muting_window_increment;
- int16_t unmuting_window, unmuting_window_increment;
- if (fs_hz_ == 8000) {
- muting_window = DspHelper::kMuteFactorStart8kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement8kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart8kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement8kHz;
- } else if (fs_hz_ == 16000) {
- muting_window = DspHelper::kMuteFactorStart16kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement16kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart16kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement16kHz;
- } else if (fs_hz_ == 32000) {
- muting_window = DspHelper::kMuteFactorStart32kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement32kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart32kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement32kHz;
- } else { // fs_ == 48000
- muting_window = DspHelper::kMuteFactorStart48kHz;
- muting_window_increment = DspHelper::kMuteFactorIncrement48kHz;
- unmuting_window = DspHelper::kUnmuteFactorStart48kHz;
- unmuting_window_increment = DspHelper::kUnmuteFactorIncrement48kHz;
- }
-
- // Smooth the expanded if it has not been muted to a low amplitude and
- // |current_voice_mix_factor| is larger than 0.5.
- if ((parameters.mute_factor > 819) &&
- (parameters.current_voice_mix_factor > 8192)) {
- size_t start_ix = sync_buffer_->Size() - overlap_length_;
- for (size_t i = 0; i < overlap_length_; i++) {
- // Do overlap add between new vector and overlap.
- (*sync_buffer_)[channel_ix][start_ix + i] =
- (((*sync_buffer_)[channel_ix][start_ix + i] * muting_window) +
- (((parameters.mute_factor * voiced_vector_storage[i]) >> 14) *
- unmuting_window) + 16384) >> 15;
- muting_window += muting_window_increment;
- unmuting_window += unmuting_window_increment;
- }
- } else if (parameters.mute_factor == 0) {
- // The expanded signal will consist of only comfort noise if
- // mute_factor = 0. Set the output length to 15 ms for best noise
- // production.
- // TODO(hlundin): This has been disabled since the length of
- // parameters.expand_vector0 and parameters.expand_vector1 no longer
- // match with expand_lags_, causing invalid reads and writes. Is it a good
- // idea to enable this again, and solve the vector size problem?
-// max_lag_ = fs_mult * 120;
-// expand_lags_[0] = fs_mult * 120;
-// expand_lags_[1] = fs_mult * 120;
-// expand_lags_[2] = fs_mult * 120;
- }
-
- // Unvoiced part.
- // Filter |scaled_random_vector| through |ar_filter_|.
- memcpy(unvoiced_vector - kUnvoicedLpcOrder, parameters.ar_filter_state,
- sizeof(int16_t) * kUnvoicedLpcOrder);
- int32_t add_constant = 0;
- if (parameters.ar_gain_scale > 0) {
- add_constant = 1 << (parameters.ar_gain_scale - 1);
- }
- WebRtcSpl_AffineTransformVector(scaled_random_vector, random_vector,
- parameters.ar_gain, add_constant,
- parameters.ar_gain_scale,
- static_cast<int>(current_lag));
- WebRtcSpl_FilterARFastQ12(scaled_random_vector, unvoiced_vector,
- parameters.ar_filter, kUnvoicedLpcOrder + 1,
- static_cast<int>(current_lag));
- memcpy(parameters.ar_filter_state,
- &(unvoiced_vector[current_lag - kUnvoicedLpcOrder]),
- sizeof(int16_t) * kUnvoicedLpcOrder);
-
- // Combine voiced and unvoiced contributions.
-
- // Set a suitable cross-fading slope.
- // For lag =
- // <= 31 * fs_mult => go from 1 to 0 in about 8 ms;
- // (>= 31 .. <= 63) * fs_mult => go from 1 to 0 in about 16 ms;
- // >= 64 * fs_mult => go from 1 to 0 in about 32 ms.
- // temp_shift = getbits(max_lag_) - 5.
- int temp_shift = (31 - WebRtcSpl_NormW32(max_lag_)) - 5;
- int16_t mix_factor_increment = 256 >> temp_shift;
- if (stop_muting_) {
- mix_factor_increment = 0;
- }
-
- // Create combined signal by shifting in more and more of unvoiced part.
- temp_shift = 8 - temp_shift; // = getbits(mix_factor_increment).
- size_t temp_lenght = (parameters.current_voice_mix_factor -
- parameters.voice_mix_factor) >> temp_shift;
- temp_lenght = std::min(temp_lenght, current_lag);
- DspHelper::CrossFade(voiced_vector, unvoiced_vector, temp_lenght,
- &parameters.current_voice_mix_factor,
- mix_factor_increment, temp_data);
-
- // End of cross-fading period was reached before end of expanded signal
- // path. Mix the rest with a fixed mixing factor.
- if (temp_lenght < current_lag) {
- if (mix_factor_increment != 0) {
- parameters.current_voice_mix_factor = parameters.voice_mix_factor;
- }
- int temp_scale = 16384 - parameters.current_voice_mix_factor;
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- voiced_vector + temp_lenght, parameters.current_voice_mix_factor,
- unvoiced_vector + temp_lenght, temp_scale, 14,
- temp_data + temp_lenght, static_cast<int>(current_lag - temp_lenght));
- }
-
- // Select muting slope depending on how many consecutive expands we have
- // done.
- if (consecutive_expands_ == 3) {
- // Let the mute factor decrease from 1.0 to 0.95 in 6.25 ms.
- // mute_slope = 0.0010 / fs_mult in Q20.
- parameters.mute_slope = std::max(parameters.mute_slope,
- static_cast<int16_t>(1049 / fs_mult));
- }
- if (consecutive_expands_ == 7) {
- // Let the mute factor decrease from 1.0 to 0.90 in 6.25 ms.
- // mute_slope = 0.0020 / fs_mult in Q20.
- parameters.mute_slope = std::max(parameters.mute_slope,
- static_cast<int16_t>(2097 / fs_mult));
- }
-
- // Mute segment according to slope value.
- if ((consecutive_expands_ != 0) || !parameters.onset) {
- // Mute to the previous level, then continue with the muting.
- WebRtcSpl_AffineTransformVector(temp_data, temp_data,
- parameters.mute_factor, 8192,
- 14, static_cast<int>(current_lag));
-
- if (!stop_muting_) {
- DspHelper::MuteSignal(temp_data, parameters.mute_slope, current_lag);
-
- // Shift by 6 to go from Q20 to Q14.
- // TODO(hlundin): Adding 8192 before shifting 6 steps seems wrong.
- // Legacy.
- int16_t gain = static_cast<int16_t>(16384 -
- (((current_lag * parameters.mute_slope) + 8192) >> 6));
- gain = ((gain * parameters.mute_factor) + 8192) >> 14;
-
- // Guard against getting stuck with very small (but sometimes audible)
- // gain.
- if ((consecutive_expands_ > 3) && (gain >= parameters.mute_factor)) {
- parameters.mute_factor = 0;
- } else {
- parameters.mute_factor = gain;
- }
- }
- }
-
- // Background noise part.
- // TODO(hlundin): Move to separate method? In BackgroundNoise class?
- if (background_noise_->initialized()) {
- // Use background noise parameters.
- memcpy(noise_vector - kNoiseLpcOrder,
- background_noise_->FilterState(channel_ix),
- sizeof(int16_t) * kNoiseLpcOrder);
-
- if (background_noise_->ScaleShift(channel_ix) > 1) {
- add_constant = 1 << (background_noise_->ScaleShift(channel_ix) - 1);
- } else {
- add_constant = 0;
- }
-
- // Scale random vector to correct energy level.
- WebRtcSpl_AffineTransformVector(
- scaled_random_vector, random_vector,
- background_noise_->Scale(channel_ix), add_constant,
- background_noise_->ScaleShift(channel_ix),
- static_cast<int>(current_lag));
-
- WebRtcSpl_FilterARFastQ12(scaled_random_vector, noise_vector,
- background_noise_->Filter(channel_ix),
- kNoiseLpcOrder + 1,
- static_cast<int>(current_lag));
-
- background_noise_->SetFilterState(
- channel_ix,
- &(noise_vector[current_lag - kNoiseLpcOrder]),
- kNoiseLpcOrder);
-
- // Unmute the background noise.
- int16_t bgn_mute_factor = background_noise_->MuteFactor(channel_ix);
- NetEqBackgroundNoiseMode bgn_mode = background_noise_->mode();
- if (bgn_mode == kBgnFade &&
- consecutive_expands_ >= kMaxConsecutiveExpands &&
- bgn_mute_factor > 0) {
- // Fade BGN to zero.
- // Calculate muting slope, approximately -2^18 / fs_hz.
- int16_t mute_slope;
- if (fs_hz_ == 8000) {
- mute_slope = -32;
- } else if (fs_hz_ == 16000) {
- mute_slope = -16;
- } else if (fs_hz_ == 32000) {
- mute_slope = -8;
- } else {
- mute_slope = -5;
- }
- // Use UnmuteSignal function with negative slope.
- // |bgn_mute_factor| is in Q14. |mute_slope| is in Q20.
- DspHelper::UnmuteSignal(noise_vector, current_lag, &bgn_mute_factor,
- mute_slope, noise_vector);
- } else if (bgn_mute_factor < 16384) {
- // If mode is kBgnOff, or if kBgnFade has started fading,
- // Use regular |mute_slope|.
- if (!stop_muting_ && bgn_mode != kBgnOff &&
- !(bgn_mode == kBgnFade &&
- consecutive_expands_ >= kMaxConsecutiveExpands)) {
- DspHelper::UnmuteSignal(noise_vector, static_cast<int>(current_lag),
- &bgn_mute_factor, parameters.mute_slope,
- noise_vector);
- } else {
- // kBgnOn and stop muting, or
- // kBgnOff (mute factor is always 0), or
- // kBgnFade has reached 0.
- WebRtcSpl_AffineTransformVector(noise_vector, noise_vector,
- bgn_mute_factor, 8192, 14,
- static_cast<int>(current_lag));
- }
- }
- // Update mute_factor in BackgroundNoise class.
- background_noise_->SetMuteFactor(channel_ix, bgn_mute_factor);
- } else {
- // BGN parameters have not been initialized; use zero noise.
- memset(noise_vector, 0, sizeof(int16_t) * current_lag);
- }
-
- // Add background noise to the combined voiced-unvoiced signal.
- for (size_t i = 0; i < current_lag; i++) {
- temp_data[i] = temp_data[i] + noise_vector[i];
- }
- if (channel_ix == 0) {
- output->AssertSize(current_lag);
- } else {
- assert(output->Size() == current_lag);
- }
- memcpy(&(*output)[channel_ix][0], temp_data,
- sizeof(temp_data[0]) * current_lag);
- }
-
- // Increase call number and cap it.
- ++consecutive_expands_;
- if (consecutive_expands_ > kMaxConsecutiveExpands) {
- consecutive_expands_ = kMaxConsecutiveExpands;
- }
-
- return 0;
-}
-
-void Expand::SetParametersForNormalAfterExpand() {
- current_lag_index_ = 0;
- lag_index_direction_ = 0;
- stop_muting_ = true; // Do not mute signal any more.
-}
-
-void Expand::SetParametersForMergeAfterExpand() {
- current_lag_index_ = -1; /* out of the 3 possible ones */
- lag_index_direction_ = 1; /* make sure we get the "optimal" lag */
- stop_muting_ = true;
-}
-
-void Expand::AnalyzeSignal(int16_t* random_vector) {
- int32_t auto_correlation[kUnvoicedLpcOrder + 1];
- int16_t reflection_coeff[kUnvoicedLpcOrder];
- int16_t correlation_vector[kMaxSampleRate / 8000 * 102];
- int best_correlation_index[kNumCorrelationCandidates];
- int16_t best_correlation[kNumCorrelationCandidates];
- int16_t best_distortion_index[kNumCorrelationCandidates];
- int16_t best_distortion[kNumCorrelationCandidates];
- int32_t correlation_vector2[(99 * kMaxSampleRate / 8000) + 1];
- int32_t best_distortion_w32[kNumCorrelationCandidates];
- static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
- int16_t unvoiced_array_memory[kNoiseLpcOrder + kMaxSampleRate / 8000 * 125];
- int16_t* unvoiced_vector = unvoiced_array_memory + kUnvoicedLpcOrder;
-
- int fs_mult = fs_hz_ / 8000;
-
- // Pre-calculate common multiplications with fs_mult.
- int fs_mult_4 = fs_mult * 4;
- int fs_mult_20 = fs_mult * 20;
- int fs_mult_120 = fs_mult * 120;
- int fs_mult_dist_len = fs_mult * kDistortionLength;
- int fs_mult_lpc_analysis_len = fs_mult * kLpcAnalysisLength;
-
- const size_t signal_length = 256 * fs_mult;
- const int16_t* audio_history =
- &(*sync_buffer_)[0][sync_buffer_->Size() - signal_length];
-
- // Initialize some member variables.
- lag_index_direction_ = 1;
- current_lag_index_ = -1;
- stop_muting_ = false;
- random_vector_->set_seed_increment(1);
- consecutive_expands_ = 0;
- for (size_t ix = 0; ix < num_channels_; ++ix) {
- channel_parameters_[ix].current_voice_mix_factor = 16384; // 1.0 in Q14.
- channel_parameters_[ix].mute_factor = 16384; // 1.0 in Q14.
- // Start with 0 gain for background noise.
- background_noise_->SetMuteFactor(ix, 0);
- }
-
- // Calculate correlation in downsampled domain (4 kHz sample rate).
- int16_t correlation_scale;
- int correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
- // If it is decided to break bit-exactness |correlation_length| should be
- // initialized to the return value of Correlation().
- Correlation(audio_history, signal_length, correlation_vector,
- &correlation_scale);
-
- // Find peaks in correlation vector.
- DspHelper::PeakDetection(correlation_vector, correlation_length,
- kNumCorrelationCandidates, fs_mult,
- best_correlation_index, best_correlation);
-
- // Adjust peak locations; cross-correlation lags start at 2.5 ms
- // (20 * fs_mult samples).
- best_correlation_index[0] += fs_mult_20;
- best_correlation_index[1] += fs_mult_20;
- best_correlation_index[2] += fs_mult_20;
-
- // Calculate distortion around the |kNumCorrelationCandidates| best lags.
- int distortion_scale = 0;
- for (int i = 0; i < kNumCorrelationCandidates; i++) {
- int16_t min_index = std::max(fs_mult_20,
- best_correlation_index[i] - fs_mult_4);
- int16_t max_index = std::min(fs_mult_120 - 1,
- best_correlation_index[i] + fs_mult_4);
- best_distortion_index[i] = DspHelper::MinDistortion(
- &(audio_history[signal_length - fs_mult_dist_len]), min_index,
- max_index, fs_mult_dist_len, &best_distortion_w32[i]);
- distortion_scale = std::max(16 - WebRtcSpl_NormW32(best_distortion_w32[i]),
- distortion_scale);
- }
- // Shift the distortion values to fit in 16 bits.
- WebRtcSpl_VectorBitShiftW32ToW16(best_distortion, kNumCorrelationCandidates,
- best_distortion_w32, distortion_scale);
-
- // Find the maximizing index |i| of the cost function
- // f[i] = best_correlation[i] / best_distortion[i].
- int32_t best_ratio = std::numeric_limits<int32_t>::min();
- int best_index = -1;
- for (int i = 0; i < kNumCorrelationCandidates; ++i) {
- int32_t ratio;
- if (best_distortion[i] > 0) {
- ratio = (best_correlation[i] << 16) / best_distortion[i];
- } else if (best_correlation[i] == 0) {
- ratio = 0; // No correlation set result to zero.
- } else {
- ratio = std::numeric_limits<int32_t>::max(); // Denominator is zero.
- }
- if (ratio > best_ratio) {
- best_index = i;
- best_ratio = ratio;
- }
- }
-
- int distortion_lag = best_distortion_index[best_index];
- int correlation_lag = best_correlation_index[best_index];
- max_lag_ = std::max(distortion_lag, correlation_lag);
-
- // Calculate the exact best correlation in the range between
- // |correlation_lag| and |distortion_lag|.
- correlation_length = distortion_lag + 10;
- correlation_length = std::min(correlation_length, fs_mult_120);
- correlation_length = std::max(correlation_length, 60 * fs_mult);
-
- int start_index = std::min(distortion_lag, correlation_lag);
- int correlation_lags = WEBRTC_SPL_ABS_W16((distortion_lag-correlation_lag))
- + 1;
- assert(correlation_lags <= 99 * fs_mult + 1); // Cannot be larger.
-
- for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) {
- ChannelParameters& parameters = channel_parameters_[channel_ix];
- // Calculate suitable scaling.
- int16_t signal_max = WebRtcSpl_MaxAbsValueW16(
- &audio_history[signal_length - correlation_length - start_index
- - correlation_lags],
- correlation_length + start_index + correlation_lags - 1);
- correlation_scale = ((31 - WebRtcSpl_NormW32(signal_max * signal_max))
- + (31 - WebRtcSpl_NormW32(correlation_length))) - 31;
- correlation_scale = std::max(static_cast<int16_t>(0), correlation_scale);
-
- // Calculate the correlation, store in |correlation_vector2|.
- WebRtcSpl_CrossCorrelation(
- correlation_vector2,
- &(audio_history[signal_length - correlation_length]),
- &(audio_history[signal_length - correlation_length - start_index]),
- correlation_length, correlation_lags, correlation_scale, -1);
-
- // Find maximizing index.
- best_index = WebRtcSpl_MaxIndexW32(correlation_vector2, correlation_lags);
- int32_t max_correlation = correlation_vector2[best_index];
- // Compensate index with start offset.
- best_index = best_index + start_index;
-
- // Calculate energies.
- int32_t energy1 = WebRtcSpl_DotProductWithScale(
- &(audio_history[signal_length - correlation_length]),
- &(audio_history[signal_length - correlation_length]),
- correlation_length, correlation_scale);
- int32_t energy2 = WebRtcSpl_DotProductWithScale(
- &(audio_history[signal_length - correlation_length - best_index]),
- &(audio_history[signal_length - correlation_length - best_index]),
- correlation_length, correlation_scale);
-
- // Calculate the correlation coefficient between the two portions of the
- // signal.
- int16_t corr_coefficient;
- if ((energy1 > 0) && (energy2 > 0)) {
- int energy1_scale = std::max(16 - WebRtcSpl_NormW32(energy1), 0);
- int energy2_scale = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
- // Make sure total scaling is even (to simplify scale factor after sqrt).
- if ((energy1_scale + energy2_scale) & 1) {
- // If sum is odd, add 1 to make it even.
- energy1_scale += 1;
- }
- int16_t scaled_energy1 = energy1 >> energy1_scale;
- int16_t scaled_energy2 = energy2 >> energy2_scale;
- int16_t sqrt_energy_product = WebRtcSpl_SqrtFloor(
- scaled_energy1 * scaled_energy2);
- // Calculate max_correlation / sqrt(energy1 * energy2) in Q14.
- int cc_shift = 14 - (energy1_scale + energy2_scale) / 2;
- max_correlation = WEBRTC_SPL_SHIFT_W32(max_correlation, cc_shift);
- corr_coefficient = WebRtcSpl_DivW32W16(max_correlation,
- sqrt_energy_product);
- corr_coefficient = std::min(static_cast<int16_t>(16384),
- corr_coefficient); // Cap at 1.0 in Q14.
- } else {
- corr_coefficient = 0;
- }
-
- // Extract the two vectors expand_vector0 and expand_vector1 from
- // |audio_history|.
- int16_t expansion_length = static_cast<int16_t>(max_lag_ + overlap_length_);
- const int16_t* vector1 = &(audio_history[signal_length - expansion_length]);
- const int16_t* vector2 = vector1 - distortion_lag;
- // Normalize the second vector to the same energy as the first.
- energy1 = WebRtcSpl_DotProductWithScale(vector1, vector1, expansion_length,
- correlation_scale);
- energy2 = WebRtcSpl_DotProductWithScale(vector2, vector2, expansion_length,
- correlation_scale);
- // Confirm that amplitude ratio sqrt(energy1 / energy2) is within 0.5 - 2.0,
- // i.e., energy1 / energy1 is within 0.25 - 4.
- int16_t amplitude_ratio;
- if ((energy1 / 4 < energy2) && (energy1 > energy2 / 4)) {
- // Energy constraint fulfilled. Use both vectors and scale them
- // accordingly.
- int16_t scaled_energy2 = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
- int16_t scaled_energy1 = scaled_energy2 - 13;
- // Calculate scaled_energy1 / scaled_energy2 in Q13.
- int32_t energy_ratio = WebRtcSpl_DivW32W16(
- WEBRTC_SPL_SHIFT_W32(energy1, -scaled_energy1),
- WEBRTC_SPL_RSHIFT_W32(energy2, scaled_energy2));
- // Calculate sqrt ratio in Q13 (sqrt of en1/en2 in Q26).
- amplitude_ratio = WebRtcSpl_SqrtFloor(energy_ratio << 13);
- // Copy the two vectors and give them the same energy.
- parameters.expand_vector0.Clear();
- parameters.expand_vector0.PushBack(vector1, expansion_length);
- parameters.expand_vector1.Clear();
- if (parameters.expand_vector1.Size() <
- static_cast<size_t>(expansion_length)) {
- parameters.expand_vector1.Extend(
- expansion_length - parameters.expand_vector1.Size());
- }
- WebRtcSpl_AffineTransformVector(&parameters.expand_vector1[0],
- const_cast<int16_t*>(vector2),
- amplitude_ratio,
- 4096,
- 13,
- expansion_length);
- } else {
- // Energy change constraint not fulfilled. Only use last vector.
- parameters.expand_vector0.Clear();
- parameters.expand_vector0.PushBack(vector1, expansion_length);
- // Copy from expand_vector0 to expand_vector1.
- parameters.expand_vector0.CopyFrom(&parameters.expand_vector1);
- // Set the energy_ratio since it is used by muting slope.
- if ((energy1 / 4 < energy2) || (energy2 == 0)) {
- amplitude_ratio = 4096; // 0.5 in Q13.
- } else {
- amplitude_ratio = 16384; // 2.0 in Q13.
- }
- }
-
- // Set the 3 lag values.
- int lag_difference = distortion_lag - correlation_lag;
- if (lag_difference == 0) {
- // |distortion_lag| and |correlation_lag| are equal.
- expand_lags_[0] = distortion_lag;
- expand_lags_[1] = distortion_lag;
- expand_lags_[2] = distortion_lag;
- } else {
- // |distortion_lag| and |correlation_lag| are not equal; use different
- // combinations of the two.
- // First lag is |distortion_lag| only.
- expand_lags_[0] = distortion_lag;
- // Second lag is the average of the two.
- expand_lags_[1] = (distortion_lag + correlation_lag) / 2;
- // Third lag is the average again, but rounding towards |correlation_lag|.
- if (lag_difference > 0) {
- expand_lags_[2] = (distortion_lag + correlation_lag - 1) / 2;
- } else {
- expand_lags_[2] = (distortion_lag + correlation_lag + 1) / 2;
- }
- }
-
- // Calculate the LPC and the gain of the filters.
- // Calculate scale value needed for auto-correlation.
- correlation_scale = WebRtcSpl_MaxAbsValueW16(
- &(audio_history[signal_length - fs_mult_lpc_analysis_len]),
- fs_mult_lpc_analysis_len);
-
- correlation_scale = std::min(16 - WebRtcSpl_NormW32(correlation_scale), 0);
- correlation_scale = std::max(correlation_scale * 2 + 7, 0);
-
- // Calculate kUnvoicedLpcOrder + 1 lags of the auto-correlation function.
- size_t temp_index = signal_length - fs_mult_lpc_analysis_len -
- kUnvoicedLpcOrder;
- // Copy signal to temporary vector to be able to pad with leading zeros.
- int16_t* temp_signal = new int16_t[fs_mult_lpc_analysis_len
- + kUnvoicedLpcOrder];
- memset(temp_signal, 0,
- sizeof(int16_t) * (fs_mult_lpc_analysis_len + kUnvoicedLpcOrder));
- memcpy(&temp_signal[kUnvoicedLpcOrder],
- &audio_history[temp_index + kUnvoicedLpcOrder],
- sizeof(int16_t) * fs_mult_lpc_analysis_len);
- WebRtcSpl_CrossCorrelation(auto_correlation,
- &temp_signal[kUnvoicedLpcOrder],
- &temp_signal[kUnvoicedLpcOrder],
- fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1,
- correlation_scale, -1);
- delete [] temp_signal;
-
- // Verify that variance is positive.
- if (auto_correlation[0] > 0) {
- // Estimate AR filter parameters using Levinson-Durbin algorithm;
- // kUnvoicedLpcOrder + 1 filter coefficients.
- int16_t stability = WebRtcSpl_LevinsonDurbin(auto_correlation,
- parameters.ar_filter,
- reflection_coeff,
- kUnvoicedLpcOrder);
-
- // Keep filter parameters only if filter is stable.
- if (stability != 1) {
- // Set first coefficient to 4096 (1.0 in Q12).
- parameters.ar_filter[0] = 4096;
- // Set remaining |kUnvoicedLpcOrder| coefficients to zero.
- WebRtcSpl_MemSetW16(parameters.ar_filter + 1, 0, kUnvoicedLpcOrder);
- }
- }
-
- if (channel_ix == 0) {
- // Extract a noise segment.
- int16_t noise_length;
- if (distortion_lag < 40) {
- noise_length = 2 * distortion_lag + 30;
- } else {
- noise_length = distortion_lag + 30;
- }
- if (noise_length <= RandomVector::kRandomTableSize) {
- memcpy(random_vector, RandomVector::kRandomTable,
- sizeof(int16_t) * noise_length);
- } else {
- // Only applies to SWB where length could be larger than
- // |kRandomTableSize|.
- memcpy(random_vector, RandomVector::kRandomTable,
- sizeof(int16_t) * RandomVector::kRandomTableSize);
- assert(noise_length <= kMaxSampleRate / 8000 * 120 + 30);
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(
- noise_length - RandomVector::kRandomTableSize,
- &random_vector[RandomVector::kRandomTableSize]);
- }
- }
-
- // Set up state vector and calculate scale factor for unvoiced filtering.
- memcpy(parameters.ar_filter_state,
- &(audio_history[signal_length - kUnvoicedLpcOrder]),
- sizeof(int16_t) * kUnvoicedLpcOrder);
- memcpy(unvoiced_vector - kUnvoicedLpcOrder,
- &(audio_history[signal_length - 128 - kUnvoicedLpcOrder]),
- sizeof(int16_t) * kUnvoicedLpcOrder);
- WebRtcSpl_FilterMAFastQ12(
- const_cast<int16_t*>(&audio_history[signal_length - 128]),
- unvoiced_vector, parameters.ar_filter, kUnvoicedLpcOrder + 1, 128);
- int16_t unvoiced_prescale;
- if (WebRtcSpl_MaxAbsValueW16(unvoiced_vector, 128) > 4000) {
- unvoiced_prescale = 4;
- } else {
- unvoiced_prescale = 0;
- }
- int32_t unvoiced_energy = WebRtcSpl_DotProductWithScale(unvoiced_vector,
- unvoiced_vector,
- 128,
- unvoiced_prescale);
-
- // Normalize |unvoiced_energy| to 28 or 29 bits to preserve sqrt() accuracy.
- int16_t unvoiced_scale = WebRtcSpl_NormW32(unvoiced_energy) - 3;
- // Make sure we do an odd number of shifts since we already have 7 shifts
- // from dividing with 128 earlier. This will make the total scale factor
- // even, which is suitable for the sqrt.
- unvoiced_scale += ((unvoiced_scale & 0x1) ^ 0x1);
- unvoiced_energy = WEBRTC_SPL_SHIFT_W32(unvoiced_energy, unvoiced_scale);
- int32_t unvoiced_gain = WebRtcSpl_SqrtFloor(unvoiced_energy);
- parameters.ar_gain_scale = 13
- + (unvoiced_scale + 7 - unvoiced_prescale) / 2;
- parameters.ar_gain = unvoiced_gain;
-
- // Calculate voice_mix_factor from corr_coefficient.
- // Let x = corr_coefficient. Then, we compute:
- // if (x > 0.48)
- // voice_mix_factor = (-5179 + 19931x - 16422x^2 + 5776x^3) / 4096;
- // else
- // voice_mix_factor = 0;
- if (corr_coefficient > 7875) {
- int16_t x1, x2, x3;
- x1 = corr_coefficient; // |corr_coefficient| is in Q14.
- x2 = (x1 * x1) >> 14; // Shift 14 to keep result in Q14.
- x3 = (x1 * x2) >> 14;
- static const int kCoefficients[4] = { -5179, 19931, -16422, 5776 };
- int32_t temp_sum = kCoefficients[0] << 14;
- temp_sum += kCoefficients[1] * x1;
- temp_sum += kCoefficients[2] * x2;
- temp_sum += kCoefficients[3] * x3;
- parameters.voice_mix_factor = temp_sum / 4096;
- parameters.voice_mix_factor = std::min(parameters.voice_mix_factor,
- static_cast<int16_t>(16384));
- parameters.voice_mix_factor = std::max(parameters.voice_mix_factor,
- static_cast<int16_t>(0));
- } else {
- parameters.voice_mix_factor = 0;
- }
-
- // Calculate muting slope. Reuse value from earlier scaling of
- // |expand_vector0| and |expand_vector1|.
- int16_t slope = amplitude_ratio;
- if (slope > 12288) {
- // slope > 1.5.
- // Calculate (1 - (1 / slope)) / distortion_lag =
- // (slope - 1) / (distortion_lag * slope).
- // |slope| is in Q13, so 1 corresponds to 8192. Shift up to Q25 before
- // the division.
- // Shift the denominator from Q13 to Q5 before the division. The result of
- // the division will then be in Q20.
- int16_t temp_ratio = WebRtcSpl_DivW32W16((slope - 8192) << 12,
- (distortion_lag * slope) >> 8);
- if (slope > 14746) {
- // slope > 1.8.
- // Divide by 2, with proper rounding.
- parameters.mute_slope = (temp_ratio + 1) / 2;
- } else {
- // Divide by 8, with proper rounding.
- parameters.mute_slope = (temp_ratio + 4) / 8;
- }
- parameters.onset = true;
- } else {
- // Calculate (1 - slope) / distortion_lag.
- // Shift |slope| by 7 to Q20 before the division. The result is in Q20.
- parameters.mute_slope = WebRtcSpl_DivW32W16((8192 - slope) << 7,
- distortion_lag);
- if (parameters.voice_mix_factor <= 13107) {
- // Make sure the mute factor decreases from 1.0 to 0.9 in no more than
- // 6.25 ms.
- // mute_slope >= 0.005 / fs_mult in Q20.
- parameters.mute_slope = std::max(static_cast<int16_t>(5243 / fs_mult),
- parameters.mute_slope);
- } else if (slope > 8028) {
- parameters.mute_slope = 0;
- }
- parameters.onset = false;
- }
- }
-}
-
-int16_t Expand::Correlation(const int16_t* input, size_t input_length,
- int16_t* output, int16_t* output_scale) const {
- // Set parameters depending on sample rate.
- const int16_t* filter_coefficients;
- int16_t num_coefficients;
- int16_t downsampling_factor;
- if (fs_hz_ == 8000) {
- num_coefficients = 3;
- downsampling_factor = 2;
- filter_coefficients = DspHelper::kDownsample8kHzTbl;
- } else if (fs_hz_ == 16000) {
- num_coefficients = 5;
- downsampling_factor = 4;
- filter_coefficients = DspHelper::kDownsample16kHzTbl;
- } else if (fs_hz_ == 32000) {
- num_coefficients = 7;
- downsampling_factor = 8;
- filter_coefficients = DspHelper::kDownsample32kHzTbl;
- } else { // fs_hz_ == 48000.
- num_coefficients = 7;
- downsampling_factor = 12;
- filter_coefficients = DspHelper::kDownsample48kHzTbl;
- }
-
- // Correlate from lag 10 to lag 60 in downsampled domain.
- // (Corresponds to 20-120 for narrow-band, 40-240 for wide-band, and so on.)
- static const int kCorrelationStartLag = 10;
- static const int kNumCorrelationLags = 54;
- static const int kCorrelationLength = 60;
- // Downsample to 4 kHz sample rate.
- static const int kDownsampledLength = kCorrelationStartLag
- + kNumCorrelationLags + kCorrelationLength;
- int16_t downsampled_input[kDownsampledLength];
- static const int kFilterDelay = 0;
- WebRtcSpl_DownsampleFast(
- input + input_length - kDownsampledLength * downsampling_factor,
- kDownsampledLength * downsampling_factor, downsampled_input,
- kDownsampledLength, filter_coefficients, num_coefficients,
- downsampling_factor, kFilterDelay);
-
- // Normalize |downsampled_input| to using all 16 bits.
- int16_t max_value = WebRtcSpl_MaxAbsValueW16(downsampled_input,
- kDownsampledLength);
- int16_t norm_shift = 16 - WebRtcSpl_NormW32(max_value);
- WebRtcSpl_VectorBitShiftW16(downsampled_input, kDownsampledLength,
- downsampled_input, norm_shift);
-
- int32_t correlation[kNumCorrelationLags];
- static const int kCorrelationShift = 6;
- WebRtcSpl_CrossCorrelation(
- correlation,
- &downsampled_input[kDownsampledLength - kCorrelationLength],
- &downsampled_input[kDownsampledLength - kCorrelationLength
- - kCorrelationStartLag],
- kCorrelationLength, kNumCorrelationLags, kCorrelationShift, -1);
-
- // Normalize and move data from 32-bit to 16-bit vector.
- int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
- kNumCorrelationLags);
- int16_t norm_shift2 = std::max(18 - WebRtcSpl_NormW32(max_correlation), 0);
- WebRtcSpl_VectorBitShiftW32ToW16(output, kNumCorrelationLags, correlation,
- norm_shift2);
- // Total scale factor (right shifts) of correlation value.
- *output_scale = 2 * norm_shift + kCorrelationShift + norm_shift2;
- return kNumCorrelationLags;
-}
-
-void Expand::UpdateLagIndex() {
- current_lag_index_ = current_lag_index_ + lag_index_direction_;
- // Change direction if needed.
- if (current_lag_index_ <= 0) {
- lag_index_direction_ = 1;
- }
- if (current_lag_index_ >= kNumLags - 1) {
- lag_index_direction_ = -1;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h
deleted file mode 100644
index 25ae61903a6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BackgroundNoise;
-class RandomVector;
-class SyncBuffer;
-
-// This class handles extrapolation of audio data from the sync_buffer to
-// produce packet-loss concealment.
-// TODO(hlundin): Refactor this class to divide the long methods into shorter
-// ones.
-class Expand {
- public:
- Expand(BackgroundNoise* background_noise,
- SyncBuffer* sync_buffer,
- RandomVector* random_vector,
- int fs,
- size_t num_channels)
- : background_noise_(background_noise),
- sync_buffer_(sync_buffer),
- random_vector_(random_vector),
- first_expand_(true),
- fs_hz_(fs),
- num_channels_(num_channels),
- overlap_length_(5 * fs / 8000),
- lag_index_direction_(0),
- current_lag_index_(0),
- stop_muting_(false),
- channel_parameters_(new ChannelParameters[num_channels_]) {
- assert(fs == 8000 || fs == 16000 || fs == 32000 || fs == 48000);
- assert(fs <= kMaxSampleRate); // Should not be possible.
- assert(num_channels_ > 0);
- memset(expand_lags_, 0, sizeof(expand_lags_));
- Reset();
- }
-
- virtual ~Expand() {}
-
- // Resets the object.
- void Reset();
-
- // The main method to produce concealment data. The data is appended to the
- // end of |output|.
- int Process(AudioMultiVector* output);
-
- // Prepare the object to do extra expansion during normal operation following
- // a period of expands.
- void SetParametersForNormalAfterExpand();
-
- // Prepare the object to do extra expansion during merge operation following
- // a period of expands.
- void SetParametersForMergeAfterExpand();
-
- // Sets the mute factor for |channel| to |value|.
- void SetMuteFactor(int16_t value, size_t channel) {
- assert(channel < num_channels_);
- channel_parameters_[channel].mute_factor = value;
- }
-
- // Returns the mute factor for |channel|.
- int16_t MuteFactor(size_t channel) {
- assert(channel < num_channels_);
- return channel_parameters_[channel].mute_factor;
- }
-
- // Accessors and mutators.
- size_t overlap_length() const { return overlap_length_; }
- int16_t max_lag() const { return max_lag_; }
-
- private:
- static const int kUnvoicedLpcOrder = 6;
- static const int kNumCorrelationCandidates = 3;
- static const int kDistortionLength = 20;
- static const int kLpcAnalysisLength = 160;
- static const int kMaxSampleRate = 48000;
- static const int kNumLags = 3;
- static const int kMaxConsecutiveExpands = 200;
-
- struct ChannelParameters {
- // Constructor.
- ChannelParameters()
- : mute_factor(16384),
- ar_gain(0),
- ar_gain_scale(0),
- voice_mix_factor(0),
- current_voice_mix_factor(0),
- onset(false),
- mute_slope(0) {
- memset(ar_filter, 0, sizeof(ar_filter));
- memset(ar_filter_state, 0, sizeof(ar_filter_state));
- }
- int16_t mute_factor;
- int16_t ar_filter[kUnvoicedLpcOrder + 1];
- int16_t ar_filter_state[kUnvoicedLpcOrder];
- int16_t ar_gain;
- int16_t ar_gain_scale;
- int16_t voice_mix_factor; /* Q14 */
- int16_t current_voice_mix_factor; /* Q14 */
- AudioVector expand_vector0;
- AudioVector expand_vector1;
- bool onset;
- int16_t mute_slope; /* Q20 */
- };
-
- // Analyze the signal history in |sync_buffer_|, and set up all parameters
- // necessary to produce concealment data.
- void AnalyzeSignal(int16_t* random_vector);
-
- // Calculate the auto-correlation of |input|, with length |input_length|
- // samples. The correlation is calculated from a downsampled version of
- // |input|, and is written to |output|. The scale factor is written to
- // |output_scale|. Returns the length of the correlation vector.
- int16_t Correlation(const int16_t* input, size_t input_length,
- int16_t* output, int16_t* output_scale) const;
-
- void UpdateLagIndex();
-
- BackgroundNoise* background_noise_;
- SyncBuffer* sync_buffer_;
- RandomVector* random_vector_;
- bool first_expand_;
- int fs_hz_;
- size_t num_channels_;
- size_t overlap_length_;
- int consecutive_expands_;
- int16_t max_lag_;
- size_t expand_lags_[kNumLags];
- int lag_index_direction_;
- int current_lag_index_;
- bool stop_muting_;
- scoped_array<ChannelParameters> channel_parameters_;
-
- DISALLOW_COPY_AND_ASSIGN(Expand);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc
deleted file mode 100644
index a63ed142f03..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for Expand class.
-
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-TEST(Expand, CreateAndDestroy) {
- int fs = 8000;
- size_t channels = 1;
- BackgroundNoise bgn(channels);
- SyncBuffer sync_buffer(1, 1000);
- RandomVector random_vector;
- Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h
deleted file mode 100644
index f3bcc711f36..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
-
-#include <stdlib.h> // NULL
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-enum NetEqDecoder {
- kDecoderPCMu,
- kDecoderPCMa,
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
- kDecoderILBC,
- kDecoderISAC,
- kDecoderISACswb,
- kDecoderISACfb,
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- kDecoderPCM16Bswb48kHz,
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
- kDecoderPCM16Bswb48kHz_2ch,
- kDecoderPCM16B_5ch,
- kDecoderG722,
- kDecoderG722_2ch,
- kDecoderRED,
- kDecoderAVT,
- kDecoderCNGnb,
- kDecoderCNGwb,
- kDecoderCNGswb32kHz,
- kDecoderCNGswb48kHz,
- kDecoderArbitrary,
- kDecoderOpus,
- kDecoderOpus_2ch,
- kDecoderCELT_32,
- kDecoderCELT_32_2ch,
-};
-
-// This is the interface class for decoders in NetEQ. Each codec type will have
-// and implementation of this class.
-class AudioDecoder {
- public:
- enum SpeechType {
- kSpeech = 1,
- kComfortNoise = 2
- };
-
- // Used by PacketDuration below. Save the value -1 for errors.
- enum { kNotImplemented = -2 };
-
- explicit AudioDecoder(enum NetEqDecoder type)
- : codec_type_(type),
- channels_(1),
- state_(NULL) {
- }
-
- virtual ~AudioDecoder() {}
-
- // Decodes |encode_len| bytes from |encoded| and writes the result in
- // |decoded|. The number of samples from all channels produced is in
- // the return value. If the decoder produced comfort noise, |speech_type|
- // is set to kComfortNoise, otherwise it is kSpeech.
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) = 0;
-
- // Same as Decode(), but interfaces to the decoders redundant decode function.
- // The default implementation simply calls the regular Decode() method.
- virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type);
-
- // Indicates if the decoder implements the DecodePlc method.
- virtual bool HasDecodePlc() const;
-
- // Calls the packet-loss concealment of the decoder to update the state after
- // one or several lost packets.
- virtual int DecodePlc(int num_frames, int16_t* decoded);
-
- // Initializes the decoder.
- virtual int Init() = 0;
-
- // Notifies the decoder of an incoming packet to NetEQ.
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp);
-
- // Returns the last error code from the decoder.
- virtual int ErrorCode();
-
- // Returns the duration in samples of the payload in |encoded| which is
- // |encoded_len| bytes long. Returns kNotImplemented if no duration estimate
- // is available, or -1 in case of an error.
- virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
-
- virtual NetEqDecoder codec_type() const;
-
- // Returns the underlying decoder state.
- void* state() { return state_; }
-
- // Returns true if |codec_type| is supported.
- static bool CodecSupported(NetEqDecoder codec_type);
-
- // Returns the sample rate for |codec_type|.
- static int CodecSampleRateHz(NetEqDecoder codec_type);
-
- // Creates an AudioDecoder object of type |codec_type|. Returns NULL for
- // for unsupported codecs, and when creating an AudioDecoder is not
- // applicable (e.g., for RED and DTMF/AVT types).
- static AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type);
-
- size_t channels() const { return channels_; }
-
- protected:
- static SpeechType ConvertSpeechType(int16_t type);
-
- enum NetEqDecoder codec_type_;
- size_t channels_;
- void* state_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoder);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h
deleted file mode 100644
index 617393093f3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
-
-#include <string.h> // Provide access to size_t.
-
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-struct WebRtcRTPHeader;
-
-struct NetEqNetworkStatistics {
- uint16_t current_buffer_size_ms; // Current jitter buffer size in ms.
- uint16_t preferred_buffer_size_ms; // Target buffer size in ms.
- uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky
- // jitter; 0 otherwise.
- uint16_t packet_loss_rate; // Loss rate (network + late) in Q14.
- uint16_t packet_discard_rate; // Late loss rate in Q14.
- uint16_t expand_rate; // Fraction (of original stream) of synthesized
- // speech inserted through expansion (in Q14).
- uint16_t preemptive_rate; // Fraction of data inserted through pre-emptive
- // expansion (in Q14).
- uint16_t accelerate_rate; // Fraction of data removed through acceleration
- // (in Q14).
- int32_t clockdrift_ppm; // Average clock-drift in parts-per-million
- // (positive or negative).
- int added_zero_samples; // Number of zero samples added in "off" mode.
-};
-
-enum NetEqOutputType {
- kOutputNormal,
- kOutputPLC,
- kOutputCNG,
- kOutputPLCtoCNG,
- kOutputVADPassive
-};
-
-enum NetEqPlayoutMode {
- kPlayoutOn,
- kPlayoutOff,
- kPlayoutFax,
- kPlayoutStreaming
-};
-
-enum NetEqBackgroundNoiseMode {
- kBgnOn, // Default behavior with eternal noise.
- kBgnFade, // Noise fades to zero after some time.
- kBgnOff // Background noise is always zero.
-};
-
-// This is the interface class for NetEq.
-class NetEq {
- public:
- enum ReturnCodes {
- kOK = 0,
- kFail = -1,
- kNotImplemented = -2
- };
-
- enum ErrorCodes {
- kNoError = 0,
- kOtherError,
- kInvalidRtpPayloadType,
- kUnknownRtpPayloadType,
- kCodecNotSupported,
- kDecoderExists,
- kDecoderNotFound,
- kInvalidSampleRate,
- kInvalidPointer,
- kAccelerateError,
- kPreemptiveExpandError,
- kComfortNoiseErrorCode,
- kDecoderErrorCode,
- kOtherDecoderError,
- kInvalidOperation,
- kDtmfParameterError,
- kDtmfParsingError,
- kDtmfInsertError,
- kStereoNotSupported,
- kSampleUnderrun,
- kDecodedTooMuch,
- kFrameSplitError,
- kRedundancySplitError,
- kPacketBufferCorruption,
- kOversizePacket,
- kSyncPacketNotAccepted
- };
-
- static const int kMaxNumPacketsInBuffer = 240; // TODO(hlundin): Remove.
- static const int kMaxBytesInBuffer = 113280; // TODO(hlundin): Remove.
-
- // Creates a new NetEq object, starting at the sample rate |sample_rate_hz|.
- // (Note that it will still change the sample rate depending on what payloads
- // are being inserted; |sample_rate_hz| is just for startup configuration.)
- static NetEq* Create(int sample_rate_hz);
-
- virtual ~NetEq() {}
-
- // Inserts a new packet into NetEq. The |receive_timestamp| is an indication
- // of the time when the packet was received, and should be measured with
- // the same tick rate as the RTP timestamp of the current payload.
- // Returns 0 on success, -1 on failure.
- virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- int length_bytes,
- uint32_t receive_timestamp) = 0;
-
- // Inserts a sync-packet into packet queue. Sync-packets are decoded to
- // silence and are intended to keep AV-sync intact in an event of long packet
- // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq
- // might insert sync-packet when they observe that buffer level of NetEq is
- // decreasing below a certain threshold, defined by the application.
- // Sync-packets should have the same payload type as the last audio payload
- // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change
- // can be implied by inserting a sync-packet.
- // Returns kOk on success, kFail on failure.
- virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
- uint32_t receive_timestamp) = 0;
-
- // Instructs NetEq to deliver 10 ms of audio data. The data is written to
- // |output_audio|, which can hold (at least) |max_length| elements.
- // The number of channels that were written to the output is provided in
- // the output variable |num_channels|, and each channel contains
- // |samples_per_channel| elements. If more than one channel is written,
- // the samples are interleaved.
- // The speech type is written to |type|, if |type| is not NULL.
- // Returns kOK on success, or kFail in case of an error.
- virtual int GetAudio(size_t max_length, int16_t* output_audio,
- int* samples_per_channel, int* num_channels,
- NetEqOutputType* type) = 0;
-
- // Associates |rtp_payload_type| with |codec| and stores the information in
- // the codec database. Returns 0 on success, -1 on failure.
- virtual int RegisterPayloadType(enum NetEqDecoder codec,
- uint8_t rtp_payload_type) = 0;
-
- // Provides an externally created decoder object |decoder| to insert in the
- // decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder operates at the
- // frequency |sample_rate_hz|. Returns kOK on success, kFail on failure.
- virtual int RegisterExternalDecoder(AudioDecoder* decoder,
- enum NetEqDecoder codec,
- int sample_rate_hz,
- uint8_t rtp_payload_type) = 0;
-
- // Removes |rtp_payload_type| from the codec database. Returns 0 on success,
- // -1 on failure.
- virtual int RemovePayloadType(uint8_t rtp_payload_type) = 0;
-
- // Sets a minimum delay in millisecond for packet buffer. The minimum is
- // maintained unless a higher latency is dictated by channel condition.
- // Returns true if the minimum is successfully applied, otherwise false is
- // returned.
- virtual bool SetMinimumDelay(int delay_ms) = 0;
-
- // Sets a maximum delay in milliseconds for packet buffer. The latency will
- // not exceed the given value, even required delay (given the channel
- // conditions) is higher.
- virtual bool SetMaximumDelay(int delay_ms) = 0;
-
- // The smallest latency required. This is computed bases on inter-arrival
- // time and internal NetEq logic. Note that in computing this latency none of
- // the user defined limits (applied by calling setMinimumDelay() and/or
- // SetMaximumDelay()) are applied.
- virtual int LeastRequiredDelayMs() const = 0;
-
- // Not implemented.
- virtual int SetTargetDelay() = 0;
-
- // Not implemented.
- virtual int TargetDelay() = 0;
-
- // Not implemented.
- virtual int CurrentDelay() = 0;
-
- // Sets the playout mode to |mode|.
- virtual void SetPlayoutMode(NetEqPlayoutMode mode) = 0;
-
- // Returns the current playout mode.
- virtual NetEqPlayoutMode PlayoutMode() const = 0;
-
- // Writes the current network statistics to |stats|. The statistics are reset
- // after the call.
- virtual int NetworkStatistics(NetEqNetworkStatistics* stats) = 0;
-
- // Writes the last packet waiting times (in ms) to |waiting_times|. The number
- // of values written is no more than 100, but may be smaller if the interface
- // is polled again before 100 packets has arrived.
- virtual void WaitingTimes(std::vector<int>* waiting_times) = 0;
-
- // Writes the current RTCP statistics to |stats|. The statistics are reset
- // and a new report period is started with the call.
- virtual void GetRtcpStatistics(RtcpStatistics* stats) = 0;
-
- // Same as RtcpStatistics(), but does not reset anything.
- virtual void GetRtcpStatisticsNoReset(RtcpStatistics* stats) = 0;
-
- // Enables post-decode VAD. When enabled, GetAudio() will return
- // kOutputVADPassive when the signal contains no speech.
- virtual void EnableVad() = 0;
-
- // Disables post-decode VAD.
- virtual void DisableVad() = 0;
-
- // Returns the RTP timestamp for the last sample delivered by GetAudio().
- virtual uint32_t PlayoutTimestamp() = 0;
-
- // Not implemented.
- virtual int SetTargetNumberOfChannels() = 0;
-
- // Not implemented.
- virtual int SetTargetSampleRate() = 0;
-
- // Returns the error code for the last occurred error. If no error has
- // occurred, 0 is returned.
- virtual int LastError() = 0;
-
- // Returns the error code last returned by a decoder (audio or comfort noise).
- // When LastError() returns kDecoderErrorCode or kComfortNoiseErrorCode, check
- // this method to get the decoder's error code.
- virtual int LastDecoderError() = 0;
-
- // Flushes both the packet buffer and the sync buffer.
- virtual void FlushBuffers() = 0;
-
- // Current usage of packet-buffer and it's limits.
- virtual void PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const = 0;
-
- // Get sequence number and timestamp of the latest RTP.
- // This method is to facilitate NACK.
- virtual int DecodedRtpInfo(int* sequence_number,
- uint32_t* timestamp) const = 0;
-
- // Sets the background noise mode.
- virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) = 0;
-
- // Gets the background noise mode.
- virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const = 0;
-
- protected:
- NetEq() {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(NetEq);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc
deleted file mode 100644
index 463b2ca784c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc
+++ /dev/null
@@ -1,360 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
-
-#include <assert.h>
-#include <string.h> // memmove, memcpy, memset, size_t
-
-#include <algorithm> // min, max
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-int Merge::Process(int16_t* input, size_t input_length,
- int16_t* external_mute_factor_array,
- AudioMultiVector* output) {
- // TODO(hlundin): Change to an enumerator and skip assert.
- assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 ||
- fs_hz_ == 48000);
- assert(fs_hz_ <= kMaxSampleRate); // Should not be possible.
-
- int old_length;
- int expand_period;
- // Get expansion data to overlap and mix with.
- int expanded_length = GetExpandedSignal(&old_length, &expand_period);
-
- // Transfer input signal to an AudioMultiVector.
- AudioMultiVector input_vector(num_channels_);
- input_vector.PushBackInterleaved(input, input_length);
- size_t input_length_per_channel = input_vector.Size();
- assert(input_length_per_channel == input_length / num_channels_);
-
- int16_t best_correlation_index = 0;
- size_t output_length = 0;
-
- for (size_t channel = 0; channel < num_channels_; ++channel) {
- int16_t* input_channel = &input_vector[channel][0];
- int16_t* expanded_channel = &expanded_[channel][0];
- int16_t expanded_max, input_max;
- int16_t new_mute_factor = SignalScaling(
- input_channel, static_cast<int>(input_length_per_channel),
- expanded_channel, &expanded_max, &input_max);
-
- // Adjust muting factor (product of "main" muting factor and expand muting
- // factor).
- int16_t* external_mute_factor = &external_mute_factor_array[channel];
- *external_mute_factor =
- (*external_mute_factor * expand_->MuteFactor(channel)) >> 14;
-
- // Update |external_mute_factor| if it is lower than |new_mute_factor|.
- if (new_mute_factor > *external_mute_factor) {
- *external_mute_factor = std::min(new_mute_factor,
- static_cast<int16_t>(16384));
- }
-
- if (channel == 0) {
- // Downsample, correlate, and find strongest correlation period for the
- // master (i.e., first) channel only.
- // Downsample to 4kHz sample rate.
- Downsample(input_channel, static_cast<int>(input_length_per_channel),
- expanded_channel, expanded_length);
-
- // Calculate the lag of the strongest correlation period.
- best_correlation_index = CorrelateAndPeakSearch(
- expanded_max, input_max, old_length,
- static_cast<int>(input_length_per_channel), expand_period);
- }
-
- static const int kTempDataSize = 3600;
- int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
- int16_t* decoded_output = temp_data + best_correlation_index;
-
- // Mute the new decoded data if needed (and unmute it linearly).
- // This is the overlapping part of expanded_signal.
- int interpolation_length = std::min(
- kMaxCorrelationLength * fs_mult_,
- expanded_length - best_correlation_index);
- interpolation_length = std::min(interpolation_length,
- static_cast<int>(input_length_per_channel));
- if (*external_mute_factor < 16384) {
- // Set a suitable muting slope (Q20). 0.004 for NB, 0.002 for WB,
- // and so on.
- int increment = 4194 / fs_mult_;
- *external_mute_factor = DspHelper::RampSignal(input_channel,
- interpolation_length,
- *external_mute_factor,
- increment);
- DspHelper::UnmuteSignal(&input_channel[interpolation_length],
- input_length_per_channel - interpolation_length,
- external_mute_factor, increment,
- &decoded_output[interpolation_length]);
- } else {
- // No muting needed.
- memmove(
- &decoded_output[interpolation_length],
- &input_channel[interpolation_length],
- sizeof(int16_t) * (input_length_per_channel - interpolation_length));
- }
-
- // Do overlap and mix linearly.
- int increment = 16384 / (interpolation_length + 1); // In Q14.
- int16_t mute_factor = 16384 - increment;
- memmove(temp_data, expanded_channel,
- sizeof(int16_t) * best_correlation_index);
- DspHelper::CrossFade(&expanded_channel[best_correlation_index],
- input_channel, interpolation_length,
- &mute_factor, increment, decoded_output);
-
- output_length = best_correlation_index + input_length_per_channel;
- if (channel == 0) {
- assert(output->Empty()); // Output should be empty at this point.
- output->AssertSize(output_length);
- } else {
- assert(output->Size() == output_length);
- }
- memcpy(&(*output)[channel][0], temp_data,
- sizeof(temp_data[0]) * output_length);
- }
-
- // Copy back the first part of the data to |sync_buffer_| and remove it from
- // |output|.
- sync_buffer_->ReplaceAtIndex(*output, old_length, sync_buffer_->next_index());
- output->PopFront(old_length);
-
- // Return new added length. |old_length| samples were borrowed from
- // |sync_buffer_|.
- return static_cast<int>(output_length) - old_length;
-}
-
-int Merge::GetExpandedSignal(int* old_length, int* expand_period) {
- // Check how much data that is left since earlier.
- *old_length = static_cast<int>(sync_buffer_->FutureLength());
- // Should never be less than overlap_length.
- assert(*old_length >= static_cast<int>(expand_->overlap_length()));
- // Generate data to merge the overlap with using expand.
- expand_->SetParametersForMergeAfterExpand();
-
- if (*old_length >= 210 * kMaxSampleRate / 8000) {
- // TODO(hlundin): Write test case for this.
- // The number of samples available in the sync buffer is more than what fits
- // in expanded_signal. Keep the first 210 * kMaxSampleRate / 8000 samples,
- // but shift them towards the end of the buffer. This is ok, since all of
- // the buffer will be expand data anyway, so as long as the beginning is
- // left untouched, we're fine.
- int16_t length_diff = *old_length - 210 * kMaxSampleRate / 8000;
- sync_buffer_->InsertZerosAtIndex(length_diff, sync_buffer_->next_index());
- *old_length = 210 * kMaxSampleRate / 8000;
- // This is the truncated length.
- }
- // This assert should always be true thanks to the if statement above.
- assert(210 * kMaxSampleRate / 8000 - *old_length >= 0);
-
- AudioMultiVector expanded_temp(num_channels_);
- expand_->Process(&expanded_temp);
- *expand_period = static_cast<int>(expanded_temp.Size()); // Samples per
- // channel.
-
- expanded_.Clear();
- // Copy what is left since earlier into the expanded vector.
- expanded_.PushBackFromIndex(*sync_buffer_, sync_buffer_->next_index());
- assert(expanded_.Size() == static_cast<size_t>(*old_length));
- assert(expanded_temp.Size() > 0);
- // Do "ugly" copy and paste from the expanded in order to generate more data
- // to correlate (but not interpolate) with.
- const int required_length = (120 + 80 + 2) * fs_mult_;
- if (expanded_.Size() < static_cast<size_t>(required_length)) {
- while (expanded_.Size() < static_cast<size_t>(required_length)) {
- // Append one more pitch period each time.
- expanded_.PushBack(expanded_temp);
- }
- // Trim the length to exactly |required_length|.
- expanded_.PopBack(expanded_.Size() - required_length);
- }
- assert(expanded_.Size() >= static_cast<size_t>(required_length));
- return required_length;
-}
-
-int16_t Merge::SignalScaling(const int16_t* input, int input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const {
- // Adjust muting factor if new vector is more or less of the BGN energy.
- const int mod_input_length = std::min(64 * fs_mult_, input_length);
- *expanded_max = WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
- *input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
-
- // Calculate energy of expanded signal.
- // |log_fs_mult| is log2(fs_mult_), but is not exact for 48000 Hz.
- int log_fs_mult = 30 - WebRtcSpl_NormW32(fs_mult_);
- int expanded_shift = 6 + log_fs_mult
- - WebRtcSpl_NormW32(*expanded_max * *expanded_max);
- expanded_shift = std::max(expanded_shift, 0);
- int32_t energy_expanded = WebRtcSpl_DotProductWithScale(expanded_signal,
- expanded_signal,
- mod_input_length,
- expanded_shift);
-
- // Calculate energy of input signal.
- int input_shift = 6 + log_fs_mult -
- WebRtcSpl_NormW32(*input_max * *input_max);
- input_shift = std::max(input_shift, 0);
- int32_t energy_input = WebRtcSpl_DotProductWithScale(input, input,
- mod_input_length,
- input_shift);
-
- // Align to the same Q-domain.
- if (input_shift > expanded_shift) {
- energy_expanded = energy_expanded >> (input_shift - expanded_shift);
- } else {
- energy_input = energy_input >> (expanded_shift - input_shift);
- }
-
- // Calculate muting factor to use for new frame.
- int16_t mute_factor;
- if (energy_input > energy_expanded) {
- // Normalize |energy_input| to 14 bits.
- int16_t temp_shift = WebRtcSpl_NormW32(energy_input) - 17;
- energy_input = WEBRTC_SPL_SHIFT_W32(energy_input, temp_shift);
- // Put |energy_expanded| in a domain 14 higher, so that
- // energy_expanded / energy_input is in Q14.
- energy_expanded = WEBRTC_SPL_SHIFT_W32(energy_expanded, temp_shift + 14);
- // Calculate sqrt(energy_expanded / energy_input) in Q14.
- mute_factor = WebRtcSpl_SqrtFloor((energy_expanded / energy_input) << 14);
- } else {
- // Set to 1 (in Q14) when |expanded| has higher energy than |input|.
- mute_factor = 16384;
- }
-
- return mute_factor;
-}
-
-// TODO(hlundin): There are some parameter values in this method that seem
-// strange. Compare with Expand::Correlation.
-void Merge::Downsample(const int16_t* input, int input_length,
- const int16_t* expanded_signal, int expanded_length) {
- const int16_t* filter_coefficients;
- int num_coefficients;
- int decimation_factor = fs_hz_ / 4000;
- static const int kCompensateDelay = 0;
- int length_limit = fs_hz_ / 100;
- if (fs_hz_ == 8000) {
- filter_coefficients = DspHelper::kDownsample8kHzTbl;
- num_coefficients = 3;
- } else if (fs_hz_ == 16000) {
- filter_coefficients = DspHelper::kDownsample16kHzTbl;
- num_coefficients = 5;
- } else if (fs_hz_ == 32000) {
- filter_coefficients = DspHelper::kDownsample32kHzTbl;
- num_coefficients = 7;
- } else { // fs_hz_ == 48000
- filter_coefficients = DspHelper::kDownsample48kHzTbl;
- num_coefficients = 7;
- // TODO(hlundin) Why is |length_limit| not 480 (legacy)?
- length_limit = 320;
- }
- int signal_offset = num_coefficients - 1;
- WebRtcSpl_DownsampleFast(&expanded_signal[signal_offset],
- expanded_length - signal_offset,
- expanded_downsampled_, kExpandDownsampLength,
- filter_coefficients, num_coefficients,
- decimation_factor, kCompensateDelay);
- if (input_length <= length_limit) {
- // Not quite long enough, so we have to cheat a bit.
- int16_t temp_len = input_length - signal_offset;
- // TODO(hlundin): Should |downsamp_temp_len| be corrected for round-off
- // errors? I.e., (temp_len + decimation_factor - 1) / decimation_factor?
- int16_t downsamp_temp_len = temp_len / decimation_factor;
- WebRtcSpl_DownsampleFast(&input[signal_offset], temp_len,
- input_downsampled_, downsamp_temp_len,
- filter_coefficients, num_coefficients,
- decimation_factor, kCompensateDelay);
- memset(&input_downsampled_[downsamp_temp_len], 0,
- sizeof(int16_t) * (kInputDownsampLength - downsamp_temp_len));
- } else {
- WebRtcSpl_DownsampleFast(&input[signal_offset],
- input_length - signal_offset, input_downsampled_,
- kInputDownsampLength, filter_coefficients,
- num_coefficients, decimation_factor,
- kCompensateDelay);
- }
-}
-
-int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- int start_position, int input_length,
- int expand_period) const {
- // Calculate correlation without any normalization.
- const int max_corr_length = kMaxCorrelationLength;
- int stop_position_downsamp = std::min(
- max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
- int16_t correlation_shift = 0;
- if (expanded_max * input_max > 26843546) {
- correlation_shift = 3;
- }
-
- int32_t correlation[kMaxCorrelationLength];
- WebRtcSpl_CrossCorrelation(correlation, input_downsampled_,
- expanded_downsampled_, kInputDownsampLength,
- stop_position_downsamp, correlation_shift, 1);
-
- // Normalize correlation to 14 bits and copy to a 16-bit array.
- static const int kPadLength = 4;
- int16_t correlation16[kPadLength + kMaxCorrelationLength + kPadLength] = {0};
- int16_t* correlation_ptr = &correlation16[kPadLength];
- int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
- stop_position_downsamp);
- int16_t norm_shift = std::max(0, 17 - WebRtcSpl_NormW32(max_correlation));
- WebRtcSpl_VectorBitShiftW32ToW16(correlation_ptr, stop_position_downsamp,
- correlation, norm_shift);
-
- // Calculate allowed starting point for peak finding.
- // The peak location bestIndex must fulfill two criteria:
- // (1) w16_bestIndex + input_length <
- // timestamps_per_call_ + expand_->overlap_length();
- // (2) w16_bestIndex + input_length < start_position.
- int start_index = timestamps_per_call_ +
- static_cast<int>(expand_->overlap_length());
- start_index = std::max(start_position, start_index);
- start_index = std::max(start_index - input_length, 0);
- // Downscale starting index to 4kHz domain. (fs_mult_ * 2 = fs_hz_ / 4000.)
- int start_index_downsamp = start_index / (fs_mult_ * 2);
-
- // Calculate a modified |stop_position_downsamp| to account for the increased
- // start index |start_index_downsamp| and the effective array length.
- int modified_stop_pos =
- std::min(stop_position_downsamp,
- kMaxCorrelationLength + kPadLength - start_index_downsamp);
- int best_correlation_index;
- int16_t best_correlation;
- static const int kNumCorrelationCandidates = 1;
- DspHelper::PeakDetection(&correlation_ptr[start_index_downsamp],
- modified_stop_pos, kNumCorrelationCandidates,
- fs_mult_, &best_correlation_index,
- &best_correlation);
- // Compensate for modified start index.
- best_correlation_index += start_index;
-
- // Ensure that underrun does not occur for 10ms case => we have to get at
- // least 10ms + overlap . (This should never happen thanks to the above
- // modification of peak-finding starting point.)
- while ((best_correlation_index + input_length) <
- static_cast<int>(timestamps_per_call_ + expand_->overlap_length()) ||
- best_correlation_index + input_length < start_position) {
- assert(false); // Should never happen.
- best_correlation_index += expand_period; // Jump one lag ahead.
- }
- return best_correlation_index;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h
deleted file mode 100644
index f1f64e6c538..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class Expand;
-class SyncBuffer;
-
-// This class handles the transition from expansion to normal operation.
-// When a packet is not available for decoding when needed, the expand operation
-// is called to generate extrapolation data. If the missing packet arrives,
-// i.e., it was just delayed, it can be decoded and appended directly to the
-// end of the expanded data (thanks to how the Expand class operates). However,
-// if a later packet arrives instead, the loss is a fact, and the new data must
-// be stitched together with the end of the expanded data. This stitching is
-// what the Merge class does.
-class Merge {
- public:
- Merge(int fs_hz, size_t num_channels, Expand* expand, SyncBuffer* sync_buffer)
- : fs_hz_(fs_hz),
- fs_mult_(fs_hz_ / 8000),
- num_channels_(num_channels),
- timestamps_per_call_(fs_hz_ / 100),
- expand_(expand),
- sync_buffer_(sync_buffer),
- expanded_(num_channels_) {
- assert(num_channels_ > 0);
- }
-
- // The main method to produce the audio data. The decoded data is supplied in
- // |input|, having |input_length| samples in total for all channels
- // (interleaved). The result is written to |output|. The number of channels
- // allocated in |output| defines the number of channels that will be used when
- // de-interleaving |input|. The values in |external_mute_factor_array| (Q14)
- // will be used to scale the audio, and is updated in the process. The array
- // must have |num_channels_| elements.
- int Process(int16_t* input, size_t input_length,
- int16_t* external_mute_factor_array,
- AudioMultiVector* output);
-
- private:
- static const int kMaxSampleRate = 48000;
- static const int kExpandDownsampLength = 100;
- static const int kInputDownsampLength = 40;
- static const int kMaxCorrelationLength = 60;
-
- // Calls |expand_| to get more expansion data to merge with. The data is
- // written to |expanded_signal_|. Returns the length of the expanded data,
- // while |expand_period| will be the number of samples in one expansion period
- // (typically one pitch period). The value of |old_length| will be the number
- // of samples that were taken from the |sync_buffer_|.
- int GetExpandedSignal(int* old_length, int* expand_period);
-
- // Analyzes |input| and |expanded_signal| to find maximum values. Returns
- // a muting factor (Q14) to be used on the new data.
- int16_t SignalScaling(const int16_t* input, int input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const;
-
- // Downsamples |input| (|input_length| samples) and |expanded_signal| to
- // 4 kHz sample rate. The downsampled signals are written to
- // |input_downsampled_| and |expanded_downsampled_|, respectively.
- void Downsample(const int16_t* input, int input_length,
- const int16_t* expanded_signal, int expanded_length);
-
- // Calculates cross-correlation between |input_downsampled_| and
- // |expanded_downsampled_|, and finds the correlation maximum. The maximizing
- // lag is returned.
- int16_t CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- int start_position, int input_length,
- int expand_period) const;
-
- const int fs_hz_;
- const int fs_mult_; // fs_hz_ / 8000.
- const size_t num_channels_;
- const int timestamps_per_call_;
- Expand* expand_;
- SyncBuffer* sync_buffer_;
- int16_t expanded_downsampled_[kExpandDownsampLength];
- int16_t input_downsampled_[kInputDownsampLength];
- AudioMultiVector expanded_;
-
- DISALLOW_COPY_AND_ASSIGN(Merge);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc
deleted file mode 100644
index 1d7b1f1fed1..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for Merge class.
-
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
-
-#include <vector>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-TEST(Merge, CreateAndDestroy) {
- int fs = 8000;
- size_t channels = 1;
- BackgroundNoise bgn(channels);
- SyncBuffer sync_buffer(1, 1000);
- RandomVector random_vector;
- Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
- Merge merge(fs, channels, &expand, &sync_buffer);
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h
deleted file mode 100644
index a6d587447d6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockAudioDecoder : public AudioDecoder {
- public:
- MockAudioDecoder() : AudioDecoder(kDecoderArbitrary) {}
- virtual ~MockAudioDecoder() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD4(Decode, int(const uint8_t*, size_t, int16_t*,
- AudioDecoder::SpeechType*));
- MOCK_CONST_METHOD0(HasDecodePlc, bool());
- MOCK_METHOD2(DecodePlc, int(int, int16_t*));
- MOCK_METHOD0(Init, int());
- MOCK_METHOD5(IncomingPacket, int(const uint8_t*, size_t, uint16_t, uint32_t,
- uint32_t));
- MOCK_METHOD0(ErrorCode, int());
- MOCK_CONST_METHOD0(codec_type, NetEqDecoder());
- MOCK_METHOD1(CodecSupported, bool(NetEqDecoder));
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h
deleted file mode 100644
index 7a4747b0d31..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
-
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockAudioVector : public AudioVector {
- public:
- MOCK_METHOD0(Clear,
- void());
- MOCK_CONST_METHOD1(CopyFrom,
- void(AudioVector<T>* copy_to));
- MOCK_METHOD1(PushFront,
- void(const AudioVector<T>& prepend_this));
- MOCK_METHOD2(PushFront,
- void(const T* prepend_this, size_t length));
- MOCK_METHOD1(PushBack,
- void(const AudioVector<T>& append_this));
- MOCK_METHOD2(PushBack,
- void(const T* append_this, size_t length));
- MOCK_METHOD1(PopFront,
- void(size_t length));
- MOCK_METHOD1(PopBack,
- void(size_t length));
- MOCK_METHOD1(Extend,
- void(size_t extra_length));
- MOCK_METHOD3(InsertAt,
- void(const T* insert_this, size_t length, size_t position));
- MOCK_METHOD3(OverwriteAt,
- void(const T* insert_this, size_t length, size_t position));
- MOCK_CONST_METHOD0(Size,
- size_t());
- MOCK_CONST_METHOD0(Empty,
- bool());
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h
deleted file mode 100644
index 87265517290..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockBufferLevelFilter : public BufferLevelFilter {
- public:
- virtual ~MockBufferLevelFilter() { Die(); }
- MOCK_METHOD0(Die,
- void());
- MOCK_METHOD0(Reset,
- void());
- MOCK_METHOD3(Update,
- void(int buffer_size_packets, int time_stretched_samples,
- int packet_len_samples));
- MOCK_METHOD1(SetTargetBufferLevel,
- void(int target_buffer_level));
- MOCK_CONST_METHOD0(filtered_current_level,
- int());
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h
deleted file mode 100644
index c4ca25a5274..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockDecoderDatabase : public DecoderDatabase {
- public:
- virtual ~MockDecoderDatabase() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_CONST_METHOD0(Empty,
- bool());
- MOCK_CONST_METHOD0(Size,
- int());
- MOCK_METHOD0(Reset,
- void());
- MOCK_METHOD2(RegisterPayload,
- int(uint8_t rtp_payload_type, NetEqDecoder codec_type));
- MOCK_METHOD4(InsertExternal,
- int(uint8_t rtp_payload_type, NetEqDecoder codec_type, int fs_hz,
- AudioDecoder* decoder));
- MOCK_METHOD1(Remove,
- int(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD1(GetDecoderInfo,
- const DecoderInfo*(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD1(GetRtpPayloadType,
- uint8_t(NetEqDecoder codec_type));
- MOCK_METHOD1(GetDecoder,
- AudioDecoder*(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD2(IsType,
- bool(uint8_t rtp_payload_type, NetEqDecoder codec_type));
- MOCK_CONST_METHOD1(IsComfortNoise,
- bool(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD1(IsDtmf,
- bool(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD1(IsRed,
- bool(uint8_t rtp_payload_type));
- MOCK_METHOD2(SetActiveDecoder,
- int(uint8_t rtp_payload_type, bool* new_decoder));
- MOCK_METHOD0(GetActiveDecoder,
- AudioDecoder*());
- MOCK_METHOD1(SetActiveCngDecoder,
- int(uint8_t rtp_payload_type));
- MOCK_METHOD0(GetActiveCngDecoder,
- AudioDecoder*());
- MOCK_CONST_METHOD1(CheckPayloadTypes,
- int(const PacketList& packet_list));
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h
deleted file mode 100644
index 1edfb873716..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockDelayManager : public DelayManager {
- public:
- MockDelayManager(int max_packets_in_buffer, DelayPeakDetector* peak_detector)
- : DelayManager(max_packets_in_buffer, peak_detector) {}
- virtual ~MockDelayManager() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_CONST_METHOD0(iat_vector,
- const IATVector&());
- MOCK_METHOD3(Update,
- int(uint16_t sequence_number, uint32_t timestamp, int sample_rate_hz));
- MOCK_METHOD1(CalculateTargetLevel,
- int(int iat_packets));
- MOCK_METHOD1(SetPacketAudioLength,
- int(int length_ms));
- MOCK_METHOD0(Reset,
- void());
- MOCK_CONST_METHOD0(AverageIAT,
- int());
- MOCK_CONST_METHOD0(PeakFound,
- bool());
- MOCK_METHOD1(UpdateCounters,
- void(int elapsed_time_ms));
- MOCK_METHOD0(ResetPacketIatCount,
- void());
- MOCK_CONST_METHOD2(BufferLimits,
- void(int* lower_limit, int* higher_limit));
- MOCK_CONST_METHOD0(TargetLevel,
- int());
- MOCK_METHOD1(LastDecoderType,
- void(NetEqDecoder decoder_type));
- MOCK_METHOD1(set_extra_delay_ms,
- void(int16_t delay));
- MOCK_CONST_METHOD0(base_target_level,
- int());
- MOCK_METHOD1(set_streaming_mode,
- void(bool value));
- MOCK_CONST_METHOD0(last_pack_cng_or_dtmf,
- int());
- MOCK_METHOD1(set_last_pack_cng_or_dtmf,
- void(int value));
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h
deleted file mode 100644
index 211b2b91e12..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
-
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockDelayPeakDetector : public DelayPeakDetector {
- public:
- virtual ~MockDelayPeakDetector() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(SetPacketAudioLength, void(int length_ms));
- MOCK_METHOD0(peak_found, bool());
- MOCK_CONST_METHOD0(MaxPeakHeight, int());
- MOCK_CONST_METHOD0(MaxPeakPeriod, int());
- MOCK_METHOD2(Update, bool(int inter_arrival_time, int target_level));
- MOCK_METHOD1(IncrementCounter, void(int inc_ms));
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h
deleted file mode 100644
index 5a89db46f68..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockDtmfBuffer : public DtmfBuffer {
- public:
- MockDtmfBuffer(int fs) : DtmfBuffer(fs) {}
- virtual ~MockDtmfBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush,
- void());
- MOCK_METHOD1(InsertEvent,
- int(const DtmfEvent& event));
- MOCK_METHOD2(GetEvent,
- bool(uint32_t current_timestamp, DtmfEvent* event));
- MOCK_CONST_METHOD0(Length,
- size_t());
- MOCK_CONST_METHOD0(Empty,
- bool());
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h
deleted file mode 100644
index d34f7470ef2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
-
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockDtmfToneGenerator : public DtmfToneGenerator {
- public:
- virtual ~MockDtmfToneGenerator() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD3(Init,
- int(int fs, int event, int attenuation));
- MOCK_METHOD0(Reset,
- void());
- MOCK_METHOD2(Generate,
- int(int num_samples, AudioMultiVector* output));
- MOCK_CONST_METHOD0(initialized,
- bool());
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h
deleted file mode 100644
index efc0c715837..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
-
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-#include "gmock/gmock.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-using ::testing::_;
-using ::testing::Invoke;
-
-// Implement an external version of the PCM16b decoder. This is a copy from
-// audio_decoder_impl.{cc, h}.
-class ExternalPcm16B : public AudioDecoder {
- public:
- explicit ExternalPcm16B(enum NetEqDecoder type)
- : AudioDecoder(type) {
- }
-
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type;
- int16_t ret = WebRtcPcm16b_DecodeW16(
- state_, reinterpret_cast<int16_t*>(const_cast<uint8_t*>(encoded)),
- static_cast<int16_t>(encoded_len), decoded, &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
- }
-
- virtual int Init() { return 0; }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(ExternalPcm16B);
-};
-
-// Create a mock of ExternalPcm16B which delegates all calls to the real object.
-// The reason is that we can then track that the correct calls are being made.
-class MockExternalPcm16B : public ExternalPcm16B {
- public:
- explicit MockExternalPcm16B(enum NetEqDecoder type)
- : ExternalPcm16B(type),
- real_(type) {
- // By default, all calls are delegated to the real object.
- ON_CALL(*this, Decode(_, _, _, _))
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::Decode));
- ON_CALL(*this, HasDecodePlc())
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::HasDecodePlc));
- ON_CALL(*this, DecodePlc(_, _))
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::DecodePlc));
- ON_CALL(*this, Init())
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::Init));
- ON_CALL(*this, IncomingPacket(_, _, _, _, _))
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::IncomingPacket));
- ON_CALL(*this, ErrorCode())
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::ErrorCode));
- ON_CALL(*this, codec_type())
- .WillByDefault(Invoke(&real_, &ExternalPcm16B::codec_type));
- }
- virtual ~MockExternalPcm16B() { Die(); }
-
- MOCK_METHOD0(Die, void());
- MOCK_METHOD4(Decode,
- int(const uint8_t* encoded, size_t encoded_len, int16_t* decoded,
- SpeechType* speech_type));
- MOCK_CONST_METHOD0(HasDecodePlc,
- bool());
- MOCK_METHOD2(DecodePlc,
- int(int num_frames, int16_t* decoded));
- MOCK_METHOD0(Init,
- int());
- MOCK_METHOD5(IncomingPacket,
- int(const uint8_t* payload, size_t payload_len,
- uint16_t rtp_sequence_number, uint32_t rtp_timestamp,
- uint32_t arrival_timestamp));
- MOCK_METHOD0(ErrorCode,
- int());
- MOCK_CONST_METHOD0(codec_type,
- NetEqDecoder());
-
- private:
- ExternalPcm16B real_;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h
deleted file mode 100644
index 37fa90de737..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockPacketBuffer : public PacketBuffer {
- public:
- MockPacketBuffer(size_t max_number_of_packets, size_t max_payload_memory)
- : PacketBuffer(max_number_of_packets, max_payload_memory) {}
- virtual ~MockPacketBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush,
- void());
- MOCK_CONST_METHOD0(Empty,
- bool());
- MOCK_METHOD1(InsertPacket,
- int(Packet* packet));
- MOCK_METHOD4(InsertPacketList,
- int(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- uint8_t* current_rtp_payload_type,
- uint8_t* current_cng_rtp_payload_type));
- MOCK_CONST_METHOD1(NextTimestamp,
- int(uint32_t* next_timestamp));
- MOCK_CONST_METHOD2(NextHigherTimestamp,
- int(uint32_t timestamp, uint32_t* next_timestamp));
- MOCK_CONST_METHOD0(NextRtpHeader,
- const RTPHeader*());
- MOCK_METHOD1(GetNextPacket,
- Packet*(int* discard_count));
- MOCK_METHOD0(DiscardNextPacket,
- int());
- MOCK_METHOD1(DiscardOldPackets,
- int(uint32_t timestamp_limit));
- MOCK_CONST_METHOD0(NumPacketsInBuffer,
- int());
- MOCK_METHOD1(IncrementWaitingTimes,
- void(int));
- MOCK_CONST_METHOD0(current_memory_bytes,
- int());
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h
deleted file mode 100644
index f3d8c9b048d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-
-#include "gmock/gmock.h"
-
-namespace webrtc {
-
-class MockPayloadSplitter : public PayloadSplitter {
- public:
- MOCK_METHOD1(SplitRed,
- int(PacketList* packet_list));
- MOCK_METHOD2(CheckRedPayloads,
- int(PacketList* packet_list, const DecoderDatabase& decoder_database));
- MOCK_METHOD2(SplitAudio,
- int(PacketList* packet_list, const DecoderDatabase& decoder_database));
- MOCK_METHOD4(SplitBySamples,
- void(const Packet* packet, int bytes_per_ms, int timestamps_per_ms,
- PacketList* new_packets));
- MOCK_METHOD4(SplitByFrames,
- int(const Packet* packet, int bytes_per_frame, int timestamps_per_frame,
- PacketList* new_packets));
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc
deleted file mode 100644
index 1ec71a2a6fb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-namespace webrtc {
-
-// Creates all classes needed and inject them into a new NetEqImpl object.
-// Return the new object.
-NetEq* NetEq::Create(int sample_rate_hz) {
- BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
- DecoderDatabase* decoder_database = new DecoderDatabase;
- DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
- DelayManager* delay_manager = new DelayManager(kMaxNumPacketsInBuffer,
- delay_peak_detector);
- DtmfBuffer* dtmf_buffer = new DtmfBuffer(sample_rate_hz);
- DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
- PacketBuffer* packet_buffer = new PacketBuffer(kMaxNumPacketsInBuffer,
- kMaxBytesInBuffer);
- PayloadSplitter* payload_splitter = new PayloadSplitter;
- TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
- return new NetEqImpl(sample_rate_hz,
- buffer_level_filter,
- decoder_database,
- delay_manager,
- delay_peak_detector,
- dtmf_buffer,
- dtmf_tone_generator,
- packet_buffer,
- payload_splitter,
- timestamp_scaler);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi
deleted file mode 100644
index 41fdb31ea6b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'variables': {
- 'neteq_dependencies': [
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'iSACFix',
- 'CNG',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- ],
- 'neteq_defines': [],
- 'conditions': [
- ['include_opus==1', {
- 'neteq_dependencies': ['webrtc_opus',],
- 'neteq_defines': ['WEBRTC_CODEC_OPUS',],
- }],
- ],
- },
- 'targets': [
- {
- 'target_name': 'NetEq4',
- 'type': 'static_library',
- 'dependencies': [
- '<@(neteq_dependencies)',
- ],
- 'defines': [
- '<@(neteq_defines)',
- ],
- 'include_dirs': [
- 'interface',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'interface',
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- 'interface/audio_decoder.h',
- 'interface/neteq.h',
- 'accelerate.cc',
- 'accelerate.h',
- 'audio_decoder_impl.cc',
- 'audio_decoder_impl.h',
- 'audio_decoder.cc',
- 'audio_multi_vector.cc',
- 'audio_multi_vector.h',
- 'audio_vector.cc',
- 'audio_vector.h',
- 'background_noise.cc',
- 'background_noise.h',
- 'buffer_level_filter.cc',
- 'buffer_level_filter.h',
- 'comfort_noise.cc',
- 'comfort_noise.h',
- 'decision_logic.cc',
- 'decision_logic.h',
- 'decision_logic_fax.cc',
- 'decision_logic_fax.h',
- 'decision_logic_normal.cc',
- 'decision_logic_normal.h',
- 'decoder_database.cc',
- 'decoder_database.h',
- 'defines.h',
- 'delay_manager.cc',
- 'delay_manager.h',
- 'delay_peak_detector.cc',
- 'delay_peak_detector.h',
- 'dsp_helper.cc',
- 'dsp_helper.h',
- 'dtmf_buffer.cc',
- 'dtmf_buffer.h',
- 'dtmf_tone_generator.cc',
- 'dtmf_tone_generator.h',
- 'expand.cc',
- 'expand.h',
- 'merge.cc',
- 'merge.h',
- 'neteq_impl.cc',
- 'neteq_impl.h',
- 'neteq.cc',
- 'statistics_calculator.cc',
- 'statistics_calculator.h',
- 'normal.cc',
- 'normal.h',
- 'packet_buffer.cc',
- 'packet_buffer.h',
- 'payload_splitter.cc',
- 'payload_splitter.h',
- 'post_decode_vad.cc',
- 'post_decode_vad.h',
- 'preemptive_expand.cc',
- 'preemptive_expand.h',
- 'random_vector.cc',
- 'random_vector.h',
- 'rtcp.cc',
- 'rtcp.h',
- 'sync_buffer.cc',
- 'sync_buffer.h',
- 'timestamp_scaler.cc',
- 'timestamp_scaler.h',
- 'time_stretch.cc',
- 'time_stretch.h',
- ],
- },
- ], # targets
- 'conditions': [
- ['include_tests==1', {
- 'includes': ['neteq_tests.gypi',],
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests',
- 'type': '<(gtest_target_type)',
- 'dependencies': [
- '<@(neteq_dependencies)',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'defines': [
- 'AUDIO_DECODER_UNITTEST',
- 'WEBRTC_CODEC_G722',
- 'WEBRTC_CODEC_ILBC',
- 'WEBRTC_CODEC_ISACFX',
- 'WEBRTC_CODEC_ISAC',
- 'WEBRTC_CODEC_PCM16',
- '<@(neteq_defines)',
- ],
- 'sources': [
- 'audio_decoder_impl.cc',
- 'audio_decoder_impl.h',
- 'audio_decoder_unittest.cc',
- 'audio_decoder.cc',
- 'interface/audio_decoder.h',
- ],
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are
- # using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'dependencies': [
- '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
- ],
- }],
- ],
- }, # audio_decoder_unittests
-
- {
- 'target_name': 'neteq_unittest_tools',
- 'type': 'static_library',
- 'dependencies': [
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'tools',
- ],
- },
- 'include_dirs': [
- 'tools',
- ],
- 'sources': [
- 'tools/audio_loop.cc',
- 'tools/audio_loop.h',
- 'tools/input_audio_file.cc',
- 'tools/input_audio_file.h',
- 'tools/rtp_generator.cc',
- 'tools/rtp_generator.h',
- ],
- }, # neteq_unittest_tools
- ], # targets
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are using
- # Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_decoder_unittests_apk',
- ],
- },
- ],
- }],
- ['test_isolation_mode != "noop"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_run',
- 'type': 'none',
- 'dependencies': [
- 'audio_decoder_unittests',
- ],
- 'includes': [
- '../../../build/isolate.gypi',
- 'audio_decoder_unittests.isolate',
- ],
- 'sources': [
- 'audio_decoder_unittests.isolate',
- ],
- },
- ],
- }],
- ],
- }], # include_tests
- ], # conditions
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc
deleted file mode 100644
index fec25e985e1..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Test to verify correct operation for externally created decoders.
-
-#include <string>
-#include <list>
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-using ::testing::_;
-
-// This test encodes a few packets of PCM16b 32 kHz data and inserts it into two
-// different NetEq instances. The first instance uses the internal version of
-// the decoder object, while the second one uses an externally created decoder
-// object (ExternalPcm16B wrapped in MockExternalPcm16B, both defined above).
-// The test verifies that the output from both instances match.
-class NetEqExternalDecoderTest : public ::testing::Test {
- protected:
- static const int kTimeStepMs = 10;
- static const int kMaxBlockSize = 480; // 10 ms @ 48 kHz.
- static const uint8_t kPayloadType = 95;
- static const int kSampleRateHz = 32000;
-
- NetEqExternalDecoderTest()
- : sample_rate_hz_(kSampleRateHz),
- samples_per_ms_(sample_rate_hz_ / 1000),
- frame_size_ms_(10),
- frame_size_samples_(frame_size_ms_ * samples_per_ms_),
- output_size_samples_(frame_size_ms_ * samples_per_ms_),
- neteq_external_(NetEq::Create(sample_rate_hz_)),
- neteq_(NetEq::Create(sample_rate_hz_)),
- external_decoder_(new MockExternalPcm16B(kDecoderPCM16Bswb32kHz)),
- rtp_generator_(samples_per_ms_),
- payload_size_bytes_(0),
- last_send_time_(0),
- last_arrival_time_(0) {
- input_ = new int16_t[frame_size_samples_];
- encoded_ = new uint8_t[2 * frame_size_samples_];
- }
-
- ~NetEqExternalDecoderTest() {
- delete neteq_external_;
- delete neteq_;
- // We will now delete the decoder ourselves, so expecting Die to be called.
- EXPECT_CALL(*external_decoder_, Die()).Times(1);
- delete external_decoder_;
- delete [] input_;
- delete [] encoded_;
- }
-
- virtual void SetUp() {
- const std::string file_name =
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- input_file_.reset(new test::InputAudioFile(file_name));
- assert(sample_rate_hz_ == 32000);
- NetEqDecoder decoder = kDecoderPCM16Bswb32kHz;
- EXPECT_CALL(*external_decoder_, Init());
- // NetEq is not allowed to delete the external decoder (hence Times(0)).
- EXPECT_CALL(*external_decoder_, Die()).Times(0);
- ASSERT_EQ(NetEq::kOK,
- neteq_external_->RegisterExternalDecoder(external_decoder_,
- decoder,
- sample_rate_hz_,
- kPayloadType));
- ASSERT_EQ(NetEq::kOK,
- neteq_->RegisterPayloadType(decoder, kPayloadType));
- }
-
- virtual void TearDown() {}
-
- int GetNewPackets() {
- if (!input_file_->Read(frame_size_samples_, input_)) {
- return -1;
- }
- payload_size_bytes_ = WebRtcPcm16b_Encode(input_, frame_size_samples_,
- encoded_);
- if (frame_size_samples_ * 2 != payload_size_bytes_) {
- return -1;
- }
- int next_send_time = rtp_generator_.GetRtpHeader(kPayloadType,
- frame_size_samples_,
- &rtp_header_);
- return next_send_time;
- }
-
- void VerifyOutput(size_t num_samples) {
- for (size_t i = 0; i < num_samples; ++i) {
- ASSERT_EQ(output_[i], output_external_[i]) <<
- "Diff in sample " << i << ".";
- }
- }
-
- virtual int GetArrivalTime(int send_time) {
- int arrival_time = last_arrival_time_ + (send_time - last_send_time_);
- last_send_time_ = send_time;
- last_arrival_time_ = arrival_time;
- return arrival_time;
- }
-
- virtual bool Lost() { return false; }
-
- void RunTest(int num_loops) {
- // Get next input packets (mono and multi-channel).
- int next_send_time;
- int next_arrival_time;
- do {
- next_send_time = GetNewPackets();
- ASSERT_NE(-1, next_send_time);
- next_arrival_time = GetArrivalTime(next_send_time);
- } while (Lost()); // If lost, immediately read the next packet.
-
- EXPECT_CALL(*external_decoder_, Decode(_, payload_size_bytes_, _, _))
- .Times(num_loops);
-
- int time_now = 0;
- for (int k = 0; k < num_loops; ++k) {
- while (time_now >= next_arrival_time) {
- // Insert packet in regular instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header_, encoded_,
- payload_size_bytes_,
- next_arrival_time));
- // Insert packet in external decoder instance.
- EXPECT_CALL(*external_decoder_,
- IncomingPacket(_, payload_size_bytes_,
- rtp_header_.header.sequenceNumber,
- rtp_header_.header.timestamp,
- next_arrival_time));
- ASSERT_EQ(NetEq::kOK,
- neteq_external_->InsertPacket(rtp_header_, encoded_,
- payload_size_bytes_,
- next_arrival_time));
- // Get next input packet.
- do {
- next_send_time = GetNewPackets();
- ASSERT_NE(-1, next_send_time);
- next_arrival_time = GetArrivalTime(next_send_time);
- } while (Lost()); // If lost, immediately read the next packet.
- }
- NetEqOutputType output_type;
- // Get audio from regular instance.
- int samples_per_channel;
- int num_channels;
- EXPECT_EQ(NetEq::kOK,
- neteq_->GetAudio(kMaxBlockSize, output_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(1, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
- // Get audio from external decoder instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_external_->GetAudio(kMaxBlockSize, output_external_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(1, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
- std::ostringstream ss;
- ss << "Lap number " << k << ".";
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- // Compare mono and multi-channel.
- ASSERT_NO_FATAL_FAILURE(VerifyOutput(output_size_samples_));
-
- time_now += kTimeStepMs;
- }
- }
-
- const int sample_rate_hz_;
- const int samples_per_ms_;
- const int frame_size_ms_;
- const int frame_size_samples_;
- const int output_size_samples_;
- NetEq* neteq_external_;
- NetEq* neteq_;
- MockExternalPcm16B* external_decoder_;
- test::RtpGenerator rtp_generator_;
- int16_t* input_;
- uint8_t* encoded_;
- int16_t output_[kMaxBlockSize];
- int16_t output_external_[kMaxBlockSize];
- WebRtcRTPHeader rtp_header_;
- int payload_size_bytes_;
- int last_send_time_;
- int last_arrival_time_;
- scoped_ptr<test::InputAudioFile> input_file_;
-};
-
-TEST_F(NetEqExternalDecoderTest, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(100); // Run 100 laps @ 10 ms each in the test loop.
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
deleted file mode 100644
index fb27af2cff8..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
+++ /dev/null
@@ -1,1902 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
-
-#include <assert.h>
-#include <memory.h> // memset
-
-#include <algorithm>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-// Modify the code to obtain backwards bit-exactness. Once bit-exactness is no
-// longer required, this #define should be removed (and the code that it
-// enables).
-#define LEGACY_BITEXACT
-
-namespace webrtc {
-
-NetEqImpl::NetEqImpl(int fs,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler)
- : buffer_level_filter_(buffer_level_filter),
- decoder_database_(decoder_database),
- delay_manager_(delay_manager),
- delay_peak_detector_(delay_peak_detector),
- dtmf_buffer_(dtmf_buffer),
- dtmf_tone_generator_(dtmf_tone_generator),
- packet_buffer_(packet_buffer),
- payload_splitter_(payload_splitter),
- timestamp_scaler_(timestamp_scaler),
- vad_(new PostDecodeVad()),
- last_mode_(kModeNormal),
- mute_factor_array_(NULL),
- decoded_buffer_length_(kMaxFrameSize),
- decoded_buffer_(new int16_t[decoded_buffer_length_]),
- playout_timestamp_(0),
- new_codec_(false),
- timestamp_(0),
- reset_decoder_(false),
- current_rtp_payload_type_(0xFF), // Invalid RTP payload type.
- current_cng_rtp_payload_type_(0xFF), // Invalid RTP payload type.
- ssrc_(0),
- first_packet_(true),
- error_code_(0),
- decoder_error_code_(0),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- decoded_packet_sequence_number_(-1),
- decoded_packet_timestamp_(0) {
- if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
- LOG(LS_ERROR) << "Sample rate " << fs << " Hz not supported. " <<
- "Changing to 8000 Hz.";
- fs = 8000;
- }
- LOG(LS_INFO) << "Create NetEqImpl object with fs = " << fs << ".";
- fs_hz_ = fs;
- fs_mult_ = fs / 8000;
- output_size_samples_ = kOutputSizeMs * 8 * fs_mult_;
- decoder_frame_length_ = 3 * output_size_samples_;
- WebRtcSpl_Init();
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- kPlayoutOn,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
- SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
-}
-
-NetEqImpl::~NetEqImpl() {
- LOG(LS_INFO) << "Deleting NetEqImpl object.";
-}
-
-int NetEqImpl::InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- int length_bytes,
- uint32_t receive_timestamp) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "InsertPacket: ts=" << rtp_header.header.timestamp <<
- ", sn=" << rtp_header.header.sequenceNumber <<
- ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
- ", ssrc=" << rtp_header.header.ssrc <<
- ", len=" << length_bytes;
- int error = InsertPacketInternal(rtp_header, payload, length_bytes,
- receive_timestamp, false);
- if (error != 0) {
- LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
- error_code_ = error;
- return kFail;
- }
- return kOK;
-}
-
-int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
- uint32_t receive_timestamp) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "InsertPacket-Sync: ts="
- << rtp_header.header.timestamp <<
- ", sn=" << rtp_header.header.sequenceNumber <<
- ", pt=" << static_cast<int>(rtp_header.header.payloadType) <<
- ", ssrc=" << rtp_header.header.ssrc;
-
- const uint8_t kSyncPayload[] = { 's', 'y', 'n', 'c' };
- int error = InsertPacketInternal(
- rtp_header, kSyncPayload, sizeof(kSyncPayload), receive_timestamp, true);
-
- if (error != 0) {
- LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
- error_code_ = error;
- return kFail;
- }
- return kOK;
-}
-
-int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio,
- int* samples_per_channel, int* num_channels,
- NetEqOutputType* type) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG(LS_VERBOSE) << "GetAudio";
- int error = GetAudioInternal(max_length, output_audio, samples_per_channel,
- num_channels);
- LOG(LS_VERBOSE) << "Produced " << *samples_per_channel <<
- " samples/channel for " << *num_channels << " channel(s)";
- if (error != 0) {
- LOG_FERR1(LS_WARNING, GetAudioInternal, error);
- error_code_ = error;
- return kFail;
- }
- if (type) {
- *type = LastOutputType();
- }
- return kOK;
-}
-
-int NetEqImpl::RegisterPayloadType(enum NetEqDecoder codec,
- uint8_t rtp_payload_type) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG_API2(static_cast<int>(rtp_payload_type), codec);
- int ret = decoder_database_->RegisterPayload(rtp_payload_type, codec);
- if (ret != DecoderDatabase::kOK) {
- LOG_FERR2(LS_WARNING, RegisterPayload, rtp_payload_type, codec);
- switch (ret) {
- case DecoderDatabase::kInvalidRtpPayloadType:
- error_code_ = kInvalidRtpPayloadType;
- break;
- case DecoderDatabase::kCodecNotSupported:
- error_code_ = kCodecNotSupported;
- break;
- case DecoderDatabase::kDecoderExists:
- error_code_ = kDecoderExists;
- break;
- default:
- error_code_ = kOtherError;
- }
- return kFail;
- }
- return kOK;
-}
-
-int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
- enum NetEqDecoder codec,
- int sample_rate_hz,
- uint8_t rtp_payload_type) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG_API2(static_cast<int>(rtp_payload_type), codec);
- if (!decoder) {
- LOG(LS_ERROR) << "Cannot register external decoder with NULL pointer";
- assert(false);
- return kFail;
- }
- int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
- sample_rate_hz, decoder);
- if (ret != DecoderDatabase::kOK) {
- LOG_FERR2(LS_WARNING, InsertExternal, rtp_payload_type, codec);
- switch (ret) {
- case DecoderDatabase::kInvalidRtpPayloadType:
- error_code_ = kInvalidRtpPayloadType;
- break;
- case DecoderDatabase::kCodecNotSupported:
- error_code_ = kCodecNotSupported;
- break;
- case DecoderDatabase::kDecoderExists:
- error_code_ = kDecoderExists;
- break;
- case DecoderDatabase::kInvalidSampleRate:
- error_code_ = kInvalidSampleRate;
- break;
- case DecoderDatabase::kInvalidPointer:
- error_code_ = kInvalidPointer;
- break;
- default:
- error_code_ = kOtherError;
- }
- return kFail;
- }
- return kOK;
-}
-
-int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG_API1(static_cast<int>(rtp_payload_type));
- int ret = decoder_database_->Remove(rtp_payload_type);
- if (ret == DecoderDatabase::kOK) {
- return kOK;
- } else if (ret == DecoderDatabase::kDecoderNotFound) {
- error_code_ = kDecoderNotFound;
- } else {
- error_code_ = kOtherError;
- }
- LOG_FERR1(LS_WARNING, Remove, rtp_payload_type);
- return kFail;
-}
-
-bool NetEqImpl::SetMinimumDelay(int delay_ms) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (delay_ms >= 0 && delay_ms < 10000) {
- assert(delay_manager_.get());
- return delay_manager_->SetMinimumDelay(delay_ms);
- }
- return false;
-}
-
-bool NetEqImpl::SetMaximumDelay(int delay_ms) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (delay_ms >= 0 && delay_ms < 10000) {
- assert(delay_manager_.get());
- return delay_manager_->SetMaximumDelay(delay_ms);
- }
- return false;
-}
-
-int NetEqImpl::LeastRequiredDelayMs() const {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(delay_manager_.get());
- return delay_manager_->least_required_delay_ms();
-}
-
-void NetEqImpl::SetPlayoutMode(NetEqPlayoutMode mode) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (!decision_logic_.get() || mode != decision_logic_->playout_mode()) {
- // The reset() method calls delete for the old object.
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- mode,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
- }
-}
-
-NetEqPlayoutMode NetEqImpl::PlayoutMode() const {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(decision_logic_.get());
- return decision_logic_->playout_mode();
-}
-
-int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(decoder_database_.get());
- const int total_samples_in_buffers = packet_buffer_->NumSamplesInBuffer(
- decoder_database_.get(), decoder_frame_length_) +
- static_cast<int>(sync_buffer_->FutureLength());
- assert(delay_manager_.get());
- assert(decision_logic_.get());
- stats_.GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
- decoder_frame_length_, *delay_manager_.get(),
- *decision_logic_.get(), stats);
- return 0;
-}
-
-void NetEqImpl::WaitingTimes(std::vector<int>* waiting_times) {
- CriticalSectionScoped lock(crit_sect_.get());
- stats_.WaitingTimes(waiting_times);
-}
-
-void NetEqImpl::GetRtcpStatistics(RtcpStatistics* stats) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (stats) {
- rtcp_.GetStatistics(false, stats);
- }
-}
-
-void NetEqImpl::GetRtcpStatisticsNoReset(RtcpStatistics* stats) {
- CriticalSectionScoped lock(crit_sect_.get());
- if (stats) {
- rtcp_.GetStatistics(true, stats);
- }
-}
-
-void NetEqImpl::EnableVad() {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(vad_.get());
- vad_->Enable();
-}
-
-void NetEqImpl::DisableVad() {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(vad_.get());
- vad_->Disable();
-}
-
-uint32_t NetEqImpl::PlayoutTimestamp() {
- CriticalSectionScoped lock(crit_sect_.get());
- return timestamp_scaler_->ToExternal(playout_timestamp_);
-}
-
-int NetEqImpl::LastError() {
- CriticalSectionScoped lock(crit_sect_.get());
- return error_code_;
-}
-
-int NetEqImpl::LastDecoderError() {
- CriticalSectionScoped lock(crit_sect_.get());
- return decoder_error_code_;
-}
-
-void NetEqImpl::FlushBuffers() {
- CriticalSectionScoped lock(crit_sect_.get());
- LOG_API0();
- packet_buffer_->Flush();
- assert(sync_buffer_.get());
- assert(expand_.get());
- sync_buffer_->Flush();
- sync_buffer_->set_next_index(sync_buffer_->next_index() -
- expand_->overlap_length());
- // Set to wait for new codec.
- first_packet_ = true;
-}
-
-void NetEqImpl::PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const {
- CriticalSectionScoped lock(crit_sect_.get());
- packet_buffer_->BufferStat(current_num_packets, max_num_packets,
- current_memory_size_bytes, max_memory_size_bytes);
-}
-
-int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const {
- CriticalSectionScoped lock(crit_sect_.get());
- if (decoded_packet_sequence_number_ < 0)
- return -1;
- *sequence_number = decoded_packet_sequence_number_;
- *timestamp = decoded_packet_timestamp_;
- return 0;
-}
-
-void NetEqImpl::SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode) {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(background_noise_.get());
- background_noise_->set_mode(mode);
-}
-
-NetEqBackgroundNoiseMode NetEqImpl::BackgroundNoiseMode() const {
- CriticalSectionScoped lock(crit_sect_.get());
- assert(background_noise_.get());
- return background_noise_->mode();
-}
-
-// Methods below this line are private.
-
-int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- int length_bytes,
- uint32_t receive_timestamp,
- bool is_sync_packet) {
- if (!payload) {
- LOG_F(LS_ERROR) << "payload == NULL";
- return kInvalidPointer;
- }
- // Sanity checks for sync-packets.
- if (is_sync_packet) {
- if (decoder_database_->IsDtmf(rtp_header.header.payloadType) ||
- decoder_database_->IsRed(rtp_header.header.payloadType) ||
- decoder_database_->IsComfortNoise(rtp_header.header.payloadType)) {
- LOG_F(LS_ERROR) << "Sync-packet with an unacceptable payload type "
- << rtp_header.header.payloadType;
- return kSyncPacketNotAccepted;
- }
- if (first_packet_ ||
- rtp_header.header.payloadType != current_rtp_payload_type_ ||
- rtp_header.header.ssrc != ssrc_) {
- // Even if |current_rtp_payload_type_| is 0xFF, sync-packet isn't
- // accepted.
- LOG_F(LS_ERROR) << "Changing codec, SSRC or first packet "
- "with sync-packet.";
- return kSyncPacketNotAccepted;
- }
- }
- PacketList packet_list;
- RTPHeader main_header;
- {
- // Convert to Packet.
- // Create |packet| within this separate scope, since it should not be used
- // directly once it's been inserted in the packet list. This way, |packet|
- // is not defined outside of this block.
- Packet* packet = new Packet;
- packet->header.markerBit = false;
- packet->header.payloadType = rtp_header.header.payloadType;
- packet->header.sequenceNumber = rtp_header.header.sequenceNumber;
- packet->header.timestamp = rtp_header.header.timestamp;
- packet->header.ssrc = rtp_header.header.ssrc;
- packet->header.numCSRCs = 0;
- packet->payload_length = length_bytes;
- packet->primary = true;
- packet->waiting_time = 0;
- packet->payload = new uint8_t[packet->payload_length];
- packet->sync_packet = is_sync_packet;
- if (!packet->payload) {
- LOG_F(LS_ERROR) << "Payload pointer is NULL.";
- }
- assert(payload); // Already checked above.
- memcpy(packet->payload, payload, packet->payload_length);
- // Insert packet in a packet list.
- packet_list.push_back(packet);
- // Save main payloads header for later.
- memcpy(&main_header, &packet->header, sizeof(main_header));
- }
-
- bool update_sample_rate_and_channels = false;
- // Reinitialize NetEq if it's needed (changed SSRC or first call).
- if ((main_header.ssrc != ssrc_) || first_packet_) {
- rtcp_.Init(main_header.sequenceNumber);
- first_packet_ = false;
-
- // Flush the packet buffer and DTMF buffer.
- packet_buffer_->Flush();
- dtmf_buffer_->Flush();
-
- // Store new SSRC.
- ssrc_ = main_header.ssrc;
-
- // Update audio buffer timestamp.
- sync_buffer_->IncreaseEndTimestamp(main_header.timestamp - timestamp_);
-
- // Update codecs.
- timestamp_ = main_header.timestamp;
- current_rtp_payload_type_ = main_header.payloadType;
-
- // Set MCU to update codec on next SignalMCU call.
- new_codec_ = true;
-
- // Reset timestamp scaling.
- timestamp_scaler_->Reset();
-
- // Triger an update of sampling rate and the number of channels.
- update_sample_rate_and_channels = true;
- }
-
- // Update RTCP statistics, only for regular packets.
- if (!is_sync_packet)
- rtcp_.Update(main_header, receive_timestamp);
-
- // Check for RED payload type, and separate payloads into several packets.
- if (decoder_database_->IsRed(main_header.payloadType)) {
- assert(!is_sync_packet); // We had a sanity check for this.
- if (payload_splitter_->SplitRed(&packet_list) != PayloadSplitter::kOK) {
- LOG_FERR1(LS_WARNING, SplitRed, packet_list.size());
- PacketBuffer::DeleteAllPackets(&packet_list);
- return kRedundancySplitError;
- }
- // Only accept a few RED payloads of the same type as the main data,
- // DTMF events and CNG.
- payload_splitter_->CheckRedPayloads(&packet_list, *decoder_database_);
- // Update the stored main payload header since the main payload has now
- // changed.
- memcpy(&main_header, &packet_list.front()->header, sizeof(main_header));
- }
-
- // Check payload types.
- if (decoder_database_->CheckPayloadTypes(packet_list) ==
- DecoderDatabase::kDecoderNotFound) {
- LOG_FERR1(LS_WARNING, CheckPayloadTypes, packet_list.size());
- PacketBuffer::DeleteAllPackets(&packet_list);
- return kUnknownRtpPayloadType;
- }
-
- // Scale timestamp to internal domain (only for some codecs).
- timestamp_scaler_->ToInternal(&packet_list);
-
- // Process DTMF payloads. Cycle through the list of packets, and pick out any
- // DTMF payloads found.
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- Packet* current_packet = (*it);
- assert(current_packet);
- assert(current_packet->payload);
- if (decoder_database_->IsDtmf(current_packet->header.payloadType)) {
- assert(!current_packet->sync_packet); // We had a sanity check for this.
- DtmfEvent event;
- int ret = DtmfBuffer::ParseEvent(
- current_packet->header.timestamp,
- current_packet->payload,
- current_packet->payload_length,
- &event);
- if (ret != DtmfBuffer::kOK) {
- LOG_FERR2(LS_WARNING, ParseEvent, ret,
- current_packet->payload_length);
- PacketBuffer::DeleteAllPackets(&packet_list);
- return kDtmfParsingError;
- }
- if (dtmf_buffer_->InsertEvent(event) != DtmfBuffer::kOK) {
- LOG_FERR0(LS_WARNING, InsertEvent);
- PacketBuffer::DeleteAllPackets(&packet_list);
- return kDtmfInsertError;
- }
- // TODO(hlundin): Let the destructor of Packet handle the payload.
- delete [] current_packet->payload;
- delete current_packet;
- it = packet_list.erase(it);
- } else {
- ++it;
- }
- }
-
- // Split payloads into smaller chunks. This also verifies that all payloads
- // are of a known payload type. SplitAudio() method is protected against
- // sync-packets.
- int ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
- if (ret != PayloadSplitter::kOK) {
- LOG_FERR1(LS_WARNING, SplitAudio, packet_list.size());
- PacketBuffer::DeleteAllPackets(&packet_list);
- switch (ret) {
- case PayloadSplitter::kUnknownPayloadType:
- return kUnknownRtpPayloadType;
- case PayloadSplitter::kFrameSplitError:
- return kFrameSplitError;
- default:
- return kOtherError;
- }
- }
-
- // Update bandwidth estimate, if the packet is not sync-packet.
- if (!packet_list.empty() && !packet_list.front()->sync_packet) {
- // The list can be empty here if we got nothing but DTMF payloads.
- AudioDecoder* decoder =
- decoder_database_->GetDecoder(main_header.payloadType);
- assert(decoder); // Should always get a valid object, since we have
- // already checked that the payload types are known.
- decoder->IncomingPacket(packet_list.front()->payload,
- packet_list.front()->payload_length,
- packet_list.front()->header.sequenceNumber,
- packet_list.front()->header.timestamp,
- receive_timestamp);
- }
-
- // Insert packets in buffer.
- int temp_bufsize = packet_buffer_->NumPacketsInBuffer();
- ret = packet_buffer_->InsertPacketList(
- &packet_list,
- *decoder_database_,
- &current_rtp_payload_type_,
- &current_cng_rtp_payload_type_);
- if (ret == PacketBuffer::kFlushed) {
- // Reset DSP timestamp etc. if packet buffer flushed.
- new_codec_ = true;
- update_sample_rate_and_channels = true;
- LOG_F(LS_WARNING) << "Packet buffer flushed";
- } else if (ret == PacketBuffer::kOversizePacket) {
- LOG_F(LS_WARNING) << "Packet larger than packet buffer";
- return kOversizePacket;
- } else if (ret != PacketBuffer::kOK) {
- LOG_FERR1(LS_WARNING, InsertPacketList, packet_list.size());
- PacketBuffer::DeleteAllPackets(&packet_list);
- return kOtherError;
- }
- if (current_rtp_payload_type_ != 0xFF) {
- const DecoderDatabase::DecoderInfo* dec_info =
- decoder_database_->GetDecoderInfo(current_rtp_payload_type_);
- if (!dec_info) {
- assert(false); // Already checked that the payload type is known.
- }
- }
-
- if (update_sample_rate_and_channels && !packet_buffer_->Empty()) {
- // We do not use |current_rtp_payload_type_| to |set payload_type|, but
- // get the next RTP header from |packet_buffer_| to obtain the payload type.
- // The reason for it is the following corner case. If NetEq receives a
- // CNG packet with a sample rate different than the current CNG then it
- // flushes its buffer, assuming send codec must have been changed. However,
- // payload type of the hypothetically new send codec is not known.
- const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
- assert(rtp_header);
- int payload_type = rtp_header->payloadType;
- AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
- assert(decoder); // Payloads are already checked to be valid.
- const DecoderDatabase::DecoderInfo* decoder_info =
- decoder_database_->GetDecoderInfo(payload_type);
- assert(decoder_info);
- if (decoder_info->fs_hz != fs_hz_ ||
- decoder->channels() != algorithm_buffer_->Channels())
- SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels());
- }
-
- // TODO(hlundin): Move this code to DelayManager class.
- const DecoderDatabase::DecoderInfo* dec_info =
- decoder_database_->GetDecoderInfo(main_header.payloadType);
- assert(dec_info); // Already checked that the payload type is known.
- delay_manager_->LastDecoderType(dec_info->codec_type);
- if (delay_manager_->last_pack_cng_or_dtmf() == 0) {
- // Calculate the total speech length carried in each packet.
- temp_bufsize = packet_buffer_->NumPacketsInBuffer() - temp_bufsize;
- temp_bufsize *= decoder_frame_length_;
-
- if ((temp_bufsize > 0) &&
- (temp_bufsize != decision_logic_->packet_length_samples())) {
- decision_logic_->set_packet_length_samples(temp_bufsize);
- delay_manager_->SetPacketAudioLength((1000 * temp_bufsize) / fs_hz_);
- }
-
- // Update statistics.
- if ((int32_t) (main_header.timestamp - timestamp_) >= 0 &&
- !new_codec_) {
- // Only update statistics if incoming packet is not older than last played
- // out packet, and if new codec flag is not set.
- delay_manager_->Update(main_header.sequenceNumber, main_header.timestamp,
- fs_hz_);
- }
- } else if (delay_manager_->last_pack_cng_or_dtmf() == -1) {
- // This is first "normal" packet after CNG or DTMF.
- // Reset packet time counter and measure time until next packet,
- // but don't update statistics.
- delay_manager_->set_last_pack_cng_or_dtmf(0);
- delay_manager_->ResetPacketIatCount();
- }
- return 0;
-}
-
-int NetEqImpl::GetAudioInternal(size_t max_length, int16_t* output,
- int* samples_per_channel, int* num_channels) {
- PacketList packet_list;
- DtmfEvent dtmf_event;
- Operations operation;
- bool play_dtmf;
- int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
- &play_dtmf);
- if (return_value != 0) {
- LOG_FERR1(LS_WARNING, GetDecision, return_value);
- assert(false);
- last_mode_ = kModeError;
- return return_value;
- }
- LOG(LS_VERBOSE) << "GetDecision returned operation=" << operation <<
- " and " << packet_list.size() << " packet(s)";
-
- AudioDecoder::SpeechType speech_type;
- int length = 0;
- int decode_return_value = Decode(&packet_list, &operation,
- &length, &speech_type);
-
- assert(vad_.get());
- bool sid_frame_available =
- (operation == kRfc3389Cng && !packet_list.empty());
- vad_->Update(decoded_buffer_.get(), length, speech_type,
- sid_frame_available, fs_hz_);
-
- algorithm_buffer_->Clear();
- switch (operation) {
- case kNormal: {
- DoNormal(decoded_buffer_.get(), length, speech_type, play_dtmf);
- break;
- }
- case kMerge: {
- DoMerge(decoded_buffer_.get(), length, speech_type, play_dtmf);
- break;
- }
- case kExpand: {
- return_value = DoExpand(play_dtmf);
- break;
- }
- case kAccelerate: {
- return_value = DoAccelerate(decoded_buffer_.get(), length, speech_type,
- play_dtmf);
- break;
- }
- case kPreemptiveExpand: {
- return_value = DoPreemptiveExpand(decoded_buffer_.get(), length,
- speech_type, play_dtmf);
- break;
- }
- case kRfc3389Cng:
- case kRfc3389CngNoPacket: {
- return_value = DoRfc3389Cng(&packet_list, play_dtmf);
- break;
- }
- case kCodecInternalCng: {
- // This handles the case when there is no transmission and the decoder
- // should produce internal comfort noise.
- // TODO(hlundin): Write test for codec-internal CNG.
- DoCodecInternalCng();
- break;
- }
- case kDtmf: {
- // TODO(hlundin): Write test for this.
- return_value = DoDtmf(dtmf_event, &play_dtmf);
- break;
- }
- case kAlternativePlc: {
- // TODO(hlundin): Write test for this.
- DoAlternativePlc(false);
- break;
- }
- case kAlternativePlcIncreaseTimestamp: {
- // TODO(hlundin): Write test for this.
- DoAlternativePlc(true);
- break;
- }
- case kAudioRepetitionIncreaseTimestamp: {
- // TODO(hlundin): Write test for this.
- sync_buffer_->IncreaseEndTimestamp(output_size_samples_);
- // Skipping break on purpose. Execution should move on into the
- // next case.
- }
- case kAudioRepetition: {
- // TODO(hlundin): Write test for this.
- // Copy last |output_size_samples_| from |sync_buffer_| to
- // |algorithm_buffer|.
- algorithm_buffer_->PushBackFromIndex(
- *sync_buffer_, sync_buffer_->Size() - output_size_samples_);
- expand_->Reset();
- break;
- }
- case kUndefined: {
- LOG_F(LS_ERROR) << "Invalid operation kUndefined.";
- assert(false); // This should not happen.
- last_mode_ = kModeError;
- return kInvalidOperation;
- }
- } // End of switch.
- if (return_value < 0) {
- return return_value;
- }
-
- if (last_mode_ != kModeRfc3389Cng) {
- comfort_noise_->Reset();
- }
-
- // Copy from |algorithm_buffer| to |sync_buffer_|.
- sync_buffer_->PushBack(*algorithm_buffer_);
-
- // Extract data from |sync_buffer_| to |output|.
- size_t num_output_samples_per_channel = output_size_samples_;
- size_t num_output_samples = output_size_samples_ * sync_buffer_->Channels();
- if (num_output_samples > max_length) {
- LOG(LS_WARNING) << "Output array is too short. " << max_length << " < " <<
- output_size_samples_ << " * " << sync_buffer_->Channels();
- num_output_samples = max_length;
- num_output_samples_per_channel = static_cast<int>(
- max_length / sync_buffer_->Channels());
- }
- int samples_from_sync = static_cast<int>(
- sync_buffer_->GetNextAudioInterleaved(num_output_samples_per_channel,
- output));
- *num_channels = static_cast<int>(sync_buffer_->Channels());
- LOG(LS_VERBOSE) << "Sync buffer (" << *num_channels << " channel(s)):" <<
- " insert " << algorithm_buffer_->Size() << " samples, extract " <<
- samples_from_sync << " samples";
- if (samples_from_sync != output_size_samples_) {
- LOG_F(LS_ERROR) << "samples_from_sync != output_size_samples_";
- // TODO(minyue): treatment of under-run, filling zeros
- memset(output, 0, num_output_samples * sizeof(int16_t));
- *samples_per_channel = output_size_samples_;
- return kSampleUnderrun;
- }
- *samples_per_channel = output_size_samples_;
-
- // Should always have overlap samples left in the |sync_buffer_|.
- assert(sync_buffer_->FutureLength() >= expand_->overlap_length());
-
- if (play_dtmf) {
- return_value = DtmfOverdub(dtmf_event, sync_buffer_->Channels(), output);
- }
-
- // Update the background noise parameters if last operation wrote data
- // straight from the decoder to the |sync_buffer_|. That is, none of the
- // operations that modify the signal can be followed by a parameter update.
- if ((last_mode_ == kModeNormal) ||
- (last_mode_ == kModeAccelerateFail) ||
- (last_mode_ == kModePreemptiveExpandFail) ||
- (last_mode_ == kModeRfc3389Cng) ||
- (last_mode_ == kModeCodecInternalCng)) {
- background_noise_->Update(*sync_buffer_, *vad_.get());
- }
-
- if (operation == kDtmf) {
- // DTMF data was written the end of |sync_buffer_|.
- // Update index to end of DTMF data in |sync_buffer_|.
- sync_buffer_->set_dtmf_index(sync_buffer_->Size());
- }
-
- if ((last_mode_ != kModeExpand) && (last_mode_ != kModeRfc3389Cng)) {
- // If last operation was neither expand, nor comfort noise, calculate the
- // |playout_timestamp_| from the |sync_buffer_|. However, do not update the
- // |playout_timestamp_| if it would be moved "backwards".
- uint32_t temp_timestamp = sync_buffer_->end_timestamp() -
- static_cast<uint32_t>(sync_buffer_->FutureLength());
- if (static_cast<int32_t>(temp_timestamp - playout_timestamp_) > 0) {
- playout_timestamp_ = temp_timestamp;
- }
- } else {
- // Use dead reckoning to estimate the |playout_timestamp_|.
- playout_timestamp_ += output_size_samples_;
- }
-
- if (decode_return_value) return decode_return_value;
- return return_value;
-}
-
-int NetEqImpl::GetDecision(Operations* operation,
- PacketList* packet_list,
- DtmfEvent* dtmf_event,
- bool* play_dtmf) {
- // Initialize output variables.
- *play_dtmf = false;
- *operation = kUndefined;
-
- // Increment time counters.
- packet_buffer_->IncrementWaitingTimes();
- stats_.IncreaseCounter(output_size_samples_, fs_hz_);
-
- assert(sync_buffer_.get());
- uint32_t end_timestamp = sync_buffer_->end_timestamp();
- if (!new_codec_) {
- packet_buffer_->DiscardOldPackets(end_timestamp);
- }
- const RTPHeader* header = packet_buffer_->NextRtpHeader();
-
- if (decision_logic_->CngRfc3389On()) {
- // Because of timestamp peculiarities, we have to "manually" disallow using
- // a CNG packet with the same timestamp as the one that was last played.
- // This can happen when using redundancy and will cause the timing to shift.
- while (header &&
- decoder_database_->IsComfortNoise(header->payloadType) &&
- end_timestamp >= header->timestamp) {
- // Don't use this packet, discard it.
- // TODO(hlundin): Write test for this case.
- if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
- assert(false); // Must be ok by design.
- }
- // Check buffer again.
- if (!new_codec_) {
- packet_buffer_->DiscardOldPackets(end_timestamp);
- }
- header = packet_buffer_->NextRtpHeader();
- }
- }
-
- assert(expand_.get());
- const int samples_left = static_cast<int>(sync_buffer_->FutureLength() -
- expand_->overlap_length());
- if (last_mode_ == kModeAccelerateSuccess ||
- last_mode_ == kModeAccelerateLowEnergy ||
- last_mode_ == kModePreemptiveExpandSuccess ||
- last_mode_ == kModePreemptiveExpandLowEnergy) {
- // Subtract (samples_left + output_size_samples_) from sampleMemory.
- decision_logic_->AddSampleMemory(-(samples_left + output_size_samples_));
- }
-
- // Check if it is time to play a DTMF event.
- if (dtmf_buffer_->GetEvent(end_timestamp +
- decision_logic_->generated_noise_samples(),
- dtmf_event)) {
- *play_dtmf = true;
- }
-
- // Get instruction.
- assert(sync_buffer_.get());
- assert(expand_.get());
- *operation = decision_logic_->GetDecision(*sync_buffer_,
- *expand_,
- decoder_frame_length_,
- header,
- last_mode_,
- *play_dtmf,
- &reset_decoder_);
-
- // Check if we already have enough samples in the |sync_buffer_|. If so,
- // change decision to normal, unless the decision was merge, accelerate, or
- // preemptive expand.
- if (samples_left >= output_size_samples_ &&
- *operation != kMerge &&
- *operation != kAccelerate &&
- *operation != kPreemptiveExpand) {
- *operation = kNormal;
- return 0;
- }
-
- decision_logic_->ExpandDecision(*operation == kExpand);
-
- // Check conditions for reset.
- if (new_codec_ || *operation == kUndefined) {
- // The only valid reason to get kUndefined is that new_codec_ is set.
- assert(new_codec_);
- if (*play_dtmf && !header) {
- timestamp_ = dtmf_event->timestamp;
- } else {
- assert(header);
- if (!header) {
- LOG_F(LS_ERROR) << "Packet missing where it shouldn't.";
- return -1;
- }
- timestamp_ = header->timestamp;
- if (*operation == kRfc3389CngNoPacket
-#ifndef LEGACY_BITEXACT
- // Without this check, it can happen that a non-CNG packet is sent to
- // the CNG decoder as if it was a SID frame. This is clearly a bug,
- // but is kept for now to maintain bit-exactness with the test
- // vectors.
- && decoder_database_->IsComfortNoise(header->payloadType)
-#endif
- ) {
- // Change decision to CNG packet, since we do have a CNG packet, but it
- // was considered too early to use. Now, use it anyway.
- *operation = kRfc3389Cng;
- } else if (*operation != kRfc3389Cng) {
- *operation = kNormal;
- }
- }
- // Adjust |sync_buffer_| timestamp before setting |end_timestamp| to the
- // new value.
- sync_buffer_->IncreaseEndTimestamp(timestamp_ - end_timestamp);
- end_timestamp = timestamp_;
- new_codec_ = false;
- decision_logic_->SoftReset();
- buffer_level_filter_->Reset();
- delay_manager_->Reset();
- stats_.ResetMcu();
- }
-
- int required_samples = output_size_samples_;
- const int samples_10_ms = 80 * fs_mult_;
- const int samples_20_ms = 2 * samples_10_ms;
- const int samples_30_ms = 3 * samples_10_ms;
-
- switch (*operation) {
- case kExpand: {
- timestamp_ = end_timestamp;
- return 0;
- }
- case kRfc3389CngNoPacket:
- case kCodecInternalCng: {
- return 0;
- }
- case kDtmf: {
- // TODO(hlundin): Write test for this.
- // Update timestamp.
- timestamp_ = end_timestamp;
- if (decision_logic_->generated_noise_samples() > 0 &&
- last_mode_ != kModeDtmf) {
- // Make a jump in timestamp due to the recently played comfort noise.
- uint32_t timestamp_jump = decision_logic_->generated_noise_samples();
- sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
- timestamp_ += timestamp_jump;
- }
- decision_logic_->set_generated_noise_samples(0);
- return 0;
- }
- case kAccelerate: {
- // In order to do a accelerate we need at least 30 ms of audio data.
- if (samples_left >= samples_30_ms) {
- // Already have enough data, so we do not need to extract any more.
- decision_logic_->set_sample_memory(samples_left);
- decision_logic_->set_prev_time_scale(true);
- return 0;
- } else if (samples_left >= samples_10_ms &&
- decoder_frame_length_ >= samples_30_ms) {
- // Avoid decoding more data as it might overflow the playout buffer.
- *operation = kNormal;
- return 0;
- } else if (samples_left < samples_20_ms &&
- decoder_frame_length_ < samples_30_ms) {
- // Build up decoded data by decoding at least 20 ms of audio data. Do
- // not perform accelerate yet, but wait until we only need to do one
- // decoding.
- required_samples = 2 * output_size_samples_;
- *operation = kNormal;
- }
- // If none of the above is true, we have one of two possible situations:
- // (1) 20 ms <= samples_left < 30 ms and decoder_frame_length_ < 30 ms; or
- // (2) samples_left < 10 ms and decoder_frame_length_ >= 30 ms.
- // In either case, we move on with the accelerate decision, and decode one
- // frame now.
- break;
- }
- case kPreemptiveExpand: {
- // In order to do a preemptive expand we need at least 30 ms of decoded
- // audio data.
- if ((samples_left >= samples_30_ms) ||
- (samples_left >= samples_10_ms &&
- decoder_frame_length_ >= samples_30_ms)) {
- // Already have enough data, so we do not need to extract any more.
- // Or, avoid decoding more data as it might overflow the playout buffer.
- // Still try preemptive expand, though.
- decision_logic_->set_sample_memory(samples_left);
- decision_logic_->set_prev_time_scale(true);
- return 0;
- }
- if (samples_left < samples_20_ms &&
- decoder_frame_length_ < samples_30_ms) {
- // Build up decoded data by decoding at least 20 ms of audio data.
- // Still try to perform preemptive expand.
- required_samples = 2 * output_size_samples_;
- }
- // Move on with the preemptive expand decision.
- break;
- }
- default: {
- // Do nothing.
- }
- }
-
- // Get packets from buffer.
- int extracted_samples = 0;
- if (header &&
- *operation != kAlternativePlc &&
- *operation != kAlternativePlcIncreaseTimestamp &&
- *operation != kAudioRepetition &&
- *operation != kAudioRepetitionIncreaseTimestamp) {
- sync_buffer_->IncreaseEndTimestamp(header->timestamp - end_timestamp);
- if (decision_logic_->CngOff()) {
- // Adjustment of timestamp only corresponds to an actual packet loss
- // if comfort noise is not played. If comfort noise was just played,
- // this adjustment of timestamp is only done to get back in sync with the
- // stream timestamp; no loss to report.
- stats_.LostSamples(header->timestamp - end_timestamp);
- }
-
- if (*operation != kRfc3389Cng) {
- // We are about to decode and use a non-CNG packet.
- decision_logic_->SetCngOff();
- }
- // Reset CNG timestamp as a new packet will be delivered.
- // (Also if this is a CNG packet, since playedOutTS is updated.)
- decision_logic_->set_generated_noise_samples(0);
-
- extracted_samples = ExtractPackets(required_samples, packet_list);
- if (extracted_samples < 0) {
- LOG_F(LS_WARNING) << "Failed to extract packets from buffer.";
- return kPacketBufferCorruption;
- }
- }
-
- if (*operation == kAccelerate ||
- *operation == kPreemptiveExpand) {
- decision_logic_->set_sample_memory(samples_left + extracted_samples);
- decision_logic_->set_prev_time_scale(true);
- }
-
- if (*operation == kAccelerate) {
- // Check that we have enough data (30ms) to do accelerate.
- if (extracted_samples + samples_left < samples_30_ms) {
- // TODO(hlundin): Write test for this.
- // Not enough, do normal operation instead.
- *operation = kNormal;
- }
- }
-
- timestamp_ = end_timestamp;
- return 0;
-}
-
-int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
- int* decoded_length,
- AudioDecoder::SpeechType* speech_type) {
- *speech_type = AudioDecoder::kSpeech;
- AudioDecoder* decoder = NULL;
- if (!packet_list->empty()) {
- const Packet* packet = packet_list->front();
- int payload_type = packet->header.payloadType;
- if (!decoder_database_->IsComfortNoise(payload_type)) {
- decoder = decoder_database_->GetDecoder(payload_type);
- assert(decoder);
- if (!decoder) {
- LOG_FERR1(LS_WARNING, GetDecoder, payload_type);
- PacketBuffer::DeleteAllPackets(packet_list);
- return kDecoderNotFound;
- }
- bool decoder_changed;
- decoder_database_->SetActiveDecoder(payload_type, &decoder_changed);
- if (decoder_changed) {
- // We have a new decoder. Re-init some values.
- const DecoderDatabase::DecoderInfo* decoder_info = decoder_database_
- ->GetDecoderInfo(payload_type);
- assert(decoder_info);
- if (!decoder_info) {
- LOG_FERR1(LS_WARNING, GetDecoderInfo, payload_type);
- PacketBuffer::DeleteAllPackets(packet_list);
- return kDecoderNotFound;
- }
- // We should have correct sampling rate and number of channels. They
- // are set when packets are inserted.
- if (decoder_info->fs_hz != fs_hz_ ||
- decoder->channels() != algorithm_buffer_->Channels()) {
- LOG_F(LS_ERROR) << "Sampling rate or number of channels mismatch.";
- assert(false);
- SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels());
- }
- sync_buffer_->set_end_timestamp(timestamp_);
- playout_timestamp_ = timestamp_;
- }
- }
- }
-
- if (reset_decoder_) {
- // TODO(hlundin): Write test for this.
- // Reset decoder.
- if (decoder) {
- decoder->Init();
- }
- // Reset comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
- if (cng_decoder) {
- cng_decoder->Init();
- }
- reset_decoder_ = false;
- }
-
-#ifdef LEGACY_BITEXACT
- // Due to a bug in old SignalMCU, it could happen that CNG operation was
- // decided, but a speech packet was provided. The speech packet will be used
- // to update the comfort noise decoder, as if it was a SID frame, which is
- // clearly wrong.
- if (*operation == kRfc3389Cng) {
- return 0;
- }
-#endif
-
- *decoded_length = 0;
- // Update codec-internal PLC state.
- if ((*operation == kMerge) && decoder && decoder->HasDecodePlc()) {
- decoder->DecodePlc(1, &decoded_buffer_[*decoded_length]);
- }
-
- int return_value = DecodeLoop(packet_list, operation, decoder,
- decoded_length, speech_type);
-
- if (*decoded_length < 0) {
- // Error returned from the decoder.
- *decoded_length = 0;
- sync_buffer_->IncreaseEndTimestamp(decoder_frame_length_);
- int error_code = 0;
- if (decoder)
- error_code = decoder->ErrorCode();
- if (error_code != 0) {
- // Got some error code from the decoder.
- decoder_error_code_ = error_code;
- return_value = kDecoderErrorCode;
- } else {
- // Decoder does not implement error codes. Return generic error.
- return_value = kOtherDecoderError;
- }
- LOG_FERR2(LS_WARNING, DecodeLoop, error_code, packet_list->size());
- *operation = kExpand; // Do expansion to get data instead.
- }
- if (*speech_type != AudioDecoder::kComfortNoise) {
- // Don't increment timestamp if codec returned CNG speech type
- // since in this case, the we will increment the CNGplayedTS counter.
- // Increase with number of samples per channel.
- assert(*decoded_length == 0 ||
- (decoder && decoder->channels() == sync_buffer_->Channels()));
- sync_buffer_->IncreaseEndTimestamp(
- *decoded_length / static_cast<int>(sync_buffer_->Channels()));
- }
- return return_value;
-}
-
-int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation,
- AudioDecoder* decoder, int* decoded_length,
- AudioDecoder::SpeechType* speech_type) {
- Packet* packet = NULL;
- if (!packet_list->empty()) {
- packet = packet_list->front();
- }
- // Do decoding.
- while (packet &&
- !decoder_database_->IsComfortNoise(packet->header.payloadType)) {
- assert(decoder); // At this point, we must have a decoder object.
- // The number of channels in the |sync_buffer_| should be the same as the
- // number decoder channels.
- assert(sync_buffer_->Channels() == decoder->channels());
- assert(decoded_buffer_length_ >= kMaxFrameSize * decoder->channels());
- assert(*operation == kNormal || *operation == kAccelerate ||
- *operation == kMerge || *operation == kPreemptiveExpand);
- packet_list->pop_front();
- int payload_length = packet->payload_length;
- int16_t decode_length;
- if (packet->sync_packet) {
- // Decode to silence with the same frame size as the last decode.
- LOG(LS_VERBOSE) << "Decoding sync-packet: " <<
- " ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
- memset(&decoded_buffer_[*decoded_length], 0, decoder_frame_length_ *
- decoder->channels() * sizeof(decoded_buffer_[0]));
- decode_length = decoder_frame_length_;
- } else if (!packet->primary) {
- // This is a redundant payload; call the special decoder method.
- LOG(LS_VERBOSE) << "Decoding packet (redundant):" <<
- " ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
- decode_length = decoder->DecodeRedundant(
- packet->payload, packet->payload_length,
- &decoded_buffer_[*decoded_length], speech_type);
- } else {
- LOG(LS_VERBOSE) << "Decoding packet: ts=" << packet->header.timestamp <<
- ", sn=" << packet->header.sequenceNumber <<
- ", pt=" << static_cast<int>(packet->header.payloadType) <<
- ", ssrc=" << packet->header.ssrc <<
- ", len=" << packet->payload_length;
- decode_length = decoder->Decode(packet->payload,
- packet->payload_length,
- &decoded_buffer_[*decoded_length],
- speech_type);
- }
-
- delete[] packet->payload;
- delete packet;
- packet = NULL;
- if (decode_length > 0) {
- *decoded_length += decode_length;
- // Update |decoder_frame_length_| with number of samples per channel.
- decoder_frame_length_ = decode_length /
- static_cast<int>(decoder->channels());
- LOG(LS_VERBOSE) << "Decoded " << decode_length << " samples (" <<
- decoder->channels() << " channel(s) -> " << decoder_frame_length_ <<
- " samples per channel)";
- } else if (decode_length < 0) {
- // Error.
- LOG_FERR2(LS_WARNING, Decode, decode_length, payload_length);
- *decoded_length = -1;
- PacketBuffer::DeleteAllPackets(packet_list);
- break;
- }
- if (*decoded_length > static_cast<int>(decoded_buffer_length_)) {
- // Guard against overflow.
- LOG_F(LS_WARNING) << "Decoded too much.";
- PacketBuffer::DeleteAllPackets(packet_list);
- return kDecodedTooMuch;
- }
- if (!packet_list->empty()) {
- packet = packet_list->front();
- } else {
- packet = NULL;
- }
- } // End of decode loop.
-
- // If the list is not empty at this point, either a decoding error terminated
- // the while-loop, or list must hold exactly one CNG packet.
- assert(packet_list->empty() || *decoded_length < 0 ||
- (packet_list->size() == 1 && packet &&
- decoder_database_->IsComfortNoise(packet->header.payloadType)));
- return 0;
-}
-
-void NetEqImpl::DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf) {
- assert(normal_.get());
- assert(mute_factor_array_.get());
- normal_->Process(decoded_buffer, decoded_length, last_mode_,
- mute_factor_array_.get(), algorithm_buffer_.get());
- if (decoded_length != 0) {
- last_mode_ = kModeNormal;
- }
-
- // If last packet was decoded as an inband CNG, set mode to CNG instead.
- if ((speech_type == AudioDecoder::kComfortNoise)
- || ((last_mode_ == kModeCodecInternalCng)
- && (decoded_length == 0))) {
- // TODO(hlundin): Remove second part of || statement above.
- last_mode_ = kModeCodecInternalCng;
- }
-
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
-}
-
-void NetEqImpl::DoMerge(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf) {
- assert(mute_factor_array_.get());
- assert(merge_.get());
- int new_length = merge_->Process(decoded_buffer, decoded_length,
- mute_factor_array_.get(),
- algorithm_buffer_.get());
-
- // Update in-call and post-call statistics.
- if (expand_->MuteFactor(0) == 0) {
- // Expand generates only noise.
- stats_.ExpandedNoiseSamples(new_length - static_cast<int>(decoded_length));
- } else {
- // Expansion generates more than only noise.
- stats_.ExpandedVoiceSamples(new_length - static_cast<int>(decoded_length));
- }
-
- last_mode_ = kModeMerge;
- // If last packet was decoded as an inband CNG, set mode to CNG instead.
- if (speech_type == AudioDecoder::kComfortNoise) {
- last_mode_ = kModeCodecInternalCng;
- }
- expand_->Reset();
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
-}
-
-int NetEqImpl::DoExpand(bool play_dtmf) {
- while ((sync_buffer_->FutureLength() - expand_->overlap_length()) <
- static_cast<size_t>(output_size_samples_)) {
- algorithm_buffer_->Clear();
- int return_value = expand_->Process(algorithm_buffer_.get());
- int length = static_cast<int>(algorithm_buffer_->Size());
-
- // Update in-call and post-call statistics.
- if (expand_->MuteFactor(0) == 0) {
- // Expand operation generates only noise.
- stats_.ExpandedNoiseSamples(length);
- } else {
- // Expand operation generates more than only noise.
- stats_.ExpandedVoiceSamples(length);
- }
-
- last_mode_ = kModeExpand;
-
- if (return_value < 0) {
- return return_value;
- }
-
- sync_buffer_->PushBack(*algorithm_buffer_);
- algorithm_buffer_->Clear();
- }
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
- return 0;
-}
-
-int NetEqImpl::DoAccelerate(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type,
- bool play_dtmf) {
- const size_t required_samples = 240 * fs_mult_; // Must have 30 ms.
- size_t borrowed_samples_per_channel = 0;
- size_t num_channels = algorithm_buffer_->Channels();
- size_t decoded_length_per_channel = decoded_length / num_channels;
- if (decoded_length_per_channel < required_samples) {
- // Must move data from the |sync_buffer_| in order to get 30 ms.
- borrowed_samples_per_channel = static_cast<int>(required_samples -
- decoded_length_per_channel);
- memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
- decoded_buffer,
- sizeof(int16_t) * decoded_length);
- sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
- decoded_buffer);
- decoded_length = required_samples * num_channels;
- }
-
- int16_t samples_removed;
- Accelerate::ReturnCodes return_code = accelerate_->Process(
- decoded_buffer, decoded_length, algorithm_buffer_.get(),
- &samples_removed);
- stats_.AcceleratedSamples(samples_removed);
- switch (return_code) {
- case Accelerate::kSuccess:
- last_mode_ = kModeAccelerateSuccess;
- break;
- case Accelerate::kSuccessLowEnergy:
- last_mode_ = kModeAccelerateLowEnergy;
- break;
- case Accelerate::kNoStretch:
- last_mode_ = kModeAccelerateFail;
- break;
- case Accelerate::kError:
- // TODO(hlundin): Map to kModeError instead?
- last_mode_ = kModeAccelerateFail;
- return kAccelerateError;
- }
-
- if (borrowed_samples_per_channel > 0) {
- // Copy borrowed samples back to the |sync_buffer_|.
- size_t length = algorithm_buffer_->Size();
- if (length < borrowed_samples_per_channel) {
- // This destroys the beginning of the buffer, but will not cause any
- // problems.
- sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
- sync_buffer_->Size() -
- borrowed_samples_per_channel);
- sync_buffer_->PushFrontZeros(borrowed_samples_per_channel - length);
- algorithm_buffer_->PopFront(length);
- assert(algorithm_buffer_->Empty());
- } else {
- sync_buffer_->ReplaceAtIndex(*algorithm_buffer_,
- borrowed_samples_per_channel,
- sync_buffer_->Size() -
- borrowed_samples_per_channel);
- algorithm_buffer_->PopFront(borrowed_samples_per_channel);
- }
- }
-
- // If last packet was decoded as an inband CNG, set mode to CNG instead.
- if (speech_type == AudioDecoder::kComfortNoise) {
- last_mode_ = kModeCodecInternalCng;
- }
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
- expand_->Reset();
- return 0;
-}
-
-int NetEqImpl::DoPreemptiveExpand(int16_t* decoded_buffer,
- size_t decoded_length,
- AudioDecoder::SpeechType speech_type,
- bool play_dtmf) {
- const size_t required_samples = 240 * fs_mult_; // Must have 30 ms.
- size_t num_channels = algorithm_buffer_->Channels();
- int borrowed_samples_per_channel = 0;
- int old_borrowed_samples_per_channel = 0;
- size_t decoded_length_per_channel = decoded_length / num_channels;
- if (decoded_length_per_channel < required_samples) {
- // Must move data from the |sync_buffer_| in order to get 30 ms.
- borrowed_samples_per_channel = static_cast<int>(required_samples -
- decoded_length_per_channel);
- // Calculate how many of these were already played out.
- old_borrowed_samples_per_channel = static_cast<int>(
- borrowed_samples_per_channel - sync_buffer_->FutureLength());
- old_borrowed_samples_per_channel = std::max(
- 0, old_borrowed_samples_per_channel);
- memmove(&decoded_buffer[borrowed_samples_per_channel * num_channels],
- decoded_buffer,
- sizeof(int16_t) * decoded_length);
- sync_buffer_->ReadInterleavedFromEnd(borrowed_samples_per_channel,
- decoded_buffer);
- decoded_length = required_samples * num_channels;
- }
-
- int16_t samples_added;
- PreemptiveExpand::ReturnCodes return_code = preemptive_expand_->Process(
- decoded_buffer, static_cast<int>(decoded_length),
- old_borrowed_samples_per_channel,
- algorithm_buffer_.get(), &samples_added);
- stats_.PreemptiveExpandedSamples(samples_added);
- switch (return_code) {
- case PreemptiveExpand::kSuccess:
- last_mode_ = kModePreemptiveExpandSuccess;
- break;
- case PreemptiveExpand::kSuccessLowEnergy:
- last_mode_ = kModePreemptiveExpandLowEnergy;
- break;
- case PreemptiveExpand::kNoStretch:
- last_mode_ = kModePreemptiveExpandFail;
- break;
- case PreemptiveExpand::kError:
- // TODO(hlundin): Map to kModeError instead?
- last_mode_ = kModePreemptiveExpandFail;
- return kPreemptiveExpandError;
- }
-
- if (borrowed_samples_per_channel > 0) {
- // Copy borrowed samples back to the |sync_buffer_|.
- sync_buffer_->ReplaceAtIndex(
- *algorithm_buffer_, borrowed_samples_per_channel,
- sync_buffer_->Size() - borrowed_samples_per_channel);
- algorithm_buffer_->PopFront(borrowed_samples_per_channel);
- }
-
- // If last packet was decoded as an inband CNG, set mode to CNG instead.
- if (speech_type == AudioDecoder::kComfortNoise) {
- last_mode_ = kModeCodecInternalCng;
- }
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
- expand_->Reset();
- return 0;
-}
-
-int NetEqImpl::DoRfc3389Cng(PacketList* packet_list, bool play_dtmf) {
- if (!packet_list->empty()) {
- // Must have exactly one SID frame at this point.
- assert(packet_list->size() == 1);
- Packet* packet = packet_list->front();
- packet_list->pop_front();
- if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
-#ifdef LEGACY_BITEXACT
- // This can happen due to a bug in GetDecision. Change the payload type
- // to a CNG type, and move on. Note that this means that we are in fact
- // sending a non-CNG payload to the comfort noise decoder for decoding.
- // Clearly wrong, but will maintain bit-exactness with legacy.
- if (fs_hz_ == 8000) {
- packet->header.payloadType =
- decoder_database_->GetRtpPayloadType(kDecoderCNGnb);
- } else if (fs_hz_ == 16000) {
- packet->header.payloadType =
- decoder_database_->GetRtpPayloadType(kDecoderCNGwb);
- } else if (fs_hz_ == 32000) {
- packet->header.payloadType =
- decoder_database_->GetRtpPayloadType(kDecoderCNGswb32kHz);
- } else if (fs_hz_ == 48000) {
- packet->header.payloadType =
- decoder_database_->GetRtpPayloadType(kDecoderCNGswb48kHz);
- }
- assert(decoder_database_->IsComfortNoise(packet->header.payloadType));
-#else
- LOG(LS_ERROR) << "Trying to decode non-CNG payload as CNG.";
- return kOtherError;
-#endif
- }
- // UpdateParameters() deletes |packet|.
- if (comfort_noise_->UpdateParameters(packet) ==
- ComfortNoise::kInternalError) {
- LOG_FERR0(LS_WARNING, UpdateParameters);
- algorithm_buffer_->Zeros(output_size_samples_);
- return -comfort_noise_->internal_error_code();
- }
- }
- int cn_return = comfort_noise_->Generate(output_size_samples_,
- algorithm_buffer_.get());
- expand_->Reset();
- last_mode_ = kModeRfc3389Cng;
- if (!play_dtmf) {
- dtmf_tone_generator_->Reset();
- }
- if (cn_return == ComfortNoise::kInternalError) {
- LOG_FERR1(LS_WARNING, comfort_noise_->Generate, cn_return);
- decoder_error_code_ = comfort_noise_->internal_error_code();
- return kComfortNoiseErrorCode;
- } else if (cn_return == ComfortNoise::kUnknownPayloadType) {
- LOG_FERR1(LS_WARNING, comfort_noise_->Generate, cn_return);
- return kUnknownRtpPayloadType;
- }
- return 0;
-}
-
-void NetEqImpl::DoCodecInternalCng() {
- int length = 0;
- // TODO(hlundin): Will probably need a longer buffer for multi-channel.
- int16_t decoded_buffer[kMaxFrameSize];
- AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
- if (decoder) {
- const uint8_t* dummy_payload = NULL;
- AudioDecoder::SpeechType speech_type;
- length = decoder->Decode(dummy_payload, 0, decoded_buffer, &speech_type);
- }
- assert(mute_factor_array_.get());
- normal_->Process(decoded_buffer, length, last_mode_, mute_factor_array_.get(),
- algorithm_buffer_.get());
- last_mode_ = kModeCodecInternalCng;
- expand_->Reset();
-}
-
-int NetEqImpl::DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf) {
- // This block of the code and the block further down, handling |dtmf_switch|
- // are commented out. Otherwise playing out-of-band DTMF would fail in VoE
- // test, DtmfTest.ManualSuccessfullySendsOutOfBandTelephoneEvents. This is
- // equivalent to |dtmf_switch| always be false.
- //
- // See http://webrtc-codereview.appspot.com/1195004/ for discussion
- // On this issue. This change might cause some glitches at the point of
- // switch from audio to DTMF. Issue 1545 is filed to track this.
- //
- // bool dtmf_switch = false;
- // if ((last_mode_ != kModeDtmf) && dtmf_tone_generator_->initialized()) {
- // // Special case; see below.
- // // We must catch this before calling Generate, since |initialized| is
- // // modified in that call.
- // dtmf_switch = true;
- // }
-
- int dtmf_return_value = 0;
- if (!dtmf_tone_generator_->initialized()) {
- // Initialize if not already done.
- dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
- dtmf_event.volume);
- }
-
- if (dtmf_return_value == 0) {
- // Generate DTMF signal.
- dtmf_return_value = dtmf_tone_generator_->Generate(output_size_samples_,
- algorithm_buffer_.get());
- }
-
- if (dtmf_return_value < 0) {
- algorithm_buffer_->Zeros(output_size_samples_);
- return dtmf_return_value;
- }
-
- // if (dtmf_switch) {
- // // This is the special case where the previous operation was DTMF
- // // overdub, but the current instruction is "regular" DTMF. We must make
- // // sure that the DTMF does not have any discontinuities. The first DTMF
- // // sample that we generate now must be played out immediately, therefore
- // // it must be copied to the speech buffer.
- // // TODO(hlundin): This code seems incorrect. (Legacy.) Write test and
- // // verify correct operation.
- // assert(false);
- // // Must generate enough data to replace all of the |sync_buffer_|
- // // "future".
- // int required_length = sync_buffer_->FutureLength();
- // assert(dtmf_tone_generator_->initialized());
- // dtmf_return_value = dtmf_tone_generator_->Generate(required_length,
- // algorithm_buffer_);
- // assert((size_t) required_length == algorithm_buffer_->Size());
- // if (dtmf_return_value < 0) {
- // algorithm_buffer_->Zeros(output_size_samples_);
- // return dtmf_return_value;
- // }
- //
- // // Overwrite the "future" part of the speech buffer with the new DTMF
- // // data.
- // // TODO(hlundin): It seems that this overwriting has gone lost.
- // // Not adapted for multi-channel yet.
- // assert(algorithm_buffer_->Channels() == 1);
- // if (algorithm_buffer_->Channels() != 1) {
- // LOG(LS_WARNING) << "DTMF not supported for more than one channel";
- // return kStereoNotSupported;
- // }
- // // Shuffle the remaining data to the beginning of algorithm buffer.
- // algorithm_buffer_->PopFront(sync_buffer_->FutureLength());
- // }
-
- sync_buffer_->IncreaseEndTimestamp(output_size_samples_);
- expand_->Reset();
- last_mode_ = kModeDtmf;
-
- // Set to false because the DTMF is already in the algorithm buffer.
- *play_dtmf = false;
- return 0;
-}
-
-void NetEqImpl::DoAlternativePlc(bool increase_timestamp) {
- AudioDecoder* decoder = decoder_database_->GetActiveDecoder();
- int length;
- if (decoder && decoder->HasDecodePlc()) {
- // Use the decoder's packet-loss concealment.
- // TODO(hlundin): Will probably need a longer buffer for multi-channel.
- int16_t decoded_buffer[kMaxFrameSize];
- length = decoder->DecodePlc(1, decoded_buffer);
- if (length > 0) {
- algorithm_buffer_->PushBackInterleaved(decoded_buffer, length);
- } else {
- length = 0;
- }
- } else {
- // Do simple zero-stuffing.
- length = output_size_samples_;
- algorithm_buffer_->Zeros(length);
- // By not advancing the timestamp, NetEq inserts samples.
- stats_.AddZeros(length);
- }
- if (increase_timestamp) {
- sync_buffer_->IncreaseEndTimestamp(length);
- }
- expand_->Reset();
-}
-
-int NetEqImpl::DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
- int16_t* output) const {
- size_t out_index = 0;
- int overdub_length = output_size_samples_; // Default value.
-
- if (sync_buffer_->dtmf_index() > sync_buffer_->next_index()) {
- // Special operation for transition from "DTMF only" to "DTMF overdub".
- out_index = std::min(
- sync_buffer_->dtmf_index() - sync_buffer_->next_index(),
- static_cast<size_t>(output_size_samples_));
- overdub_length = output_size_samples_ - static_cast<int>(out_index);
- }
-
- AudioMultiVector dtmf_output(num_channels);
- int dtmf_return_value = 0;
- if (!dtmf_tone_generator_->initialized()) {
- dtmf_return_value = dtmf_tone_generator_->Init(fs_hz_, dtmf_event.event_no,
- dtmf_event.volume);
- }
- if (dtmf_return_value == 0) {
- dtmf_return_value = dtmf_tone_generator_->Generate(overdub_length,
- &dtmf_output);
- assert((size_t) overdub_length == dtmf_output.Size());
- }
- dtmf_output.ReadInterleaved(overdub_length, &output[out_index]);
- return dtmf_return_value < 0 ? dtmf_return_value : 0;
-}
-
-int NetEqImpl::ExtractPackets(int required_samples, PacketList* packet_list) {
- bool first_packet = true;
- uint8_t prev_payload_type = 0;
- uint32_t prev_timestamp = 0;
- uint16_t prev_sequence_number = 0;
- bool next_packet_available = false;
-
- const RTPHeader* header = packet_buffer_->NextRtpHeader();
- assert(header);
- if (!header) {
- return -1;
- }
- uint32_t first_timestamp = header->timestamp;
- int extracted_samples = 0;
-
- // Packet extraction loop.
- do {
- timestamp_ = header->timestamp;
- int discard_count = 0;
- Packet* packet = packet_buffer_->GetNextPacket(&discard_count);
- // |header| may be invalid after the |packet_buffer_| operation.
- header = NULL;
- if (!packet) {
- LOG_FERR1(LS_ERROR, GetNextPacket, discard_count) <<
- "Should always be able to extract a packet here";
- assert(false); // Should always be able to extract a packet here.
- return -1;
- }
- stats_.PacketsDiscarded(discard_count);
- // Store waiting time in ms; packets->waiting_time is in "output blocks".
- stats_.StoreWaitingTime(packet->waiting_time * kOutputSizeMs);
- assert(packet->payload_length > 0);
- packet_list->push_back(packet); // Store packet in list.
-
- if (first_packet) {
- first_packet = false;
- decoded_packet_sequence_number_ = prev_sequence_number =
- packet->header.sequenceNumber;
- decoded_packet_timestamp_ = prev_timestamp = packet->header.timestamp;
- prev_payload_type = packet->header.payloadType;
- }
-
- // Store number of extracted samples.
- int packet_duration = 0;
- AudioDecoder* decoder = decoder_database_->GetDecoder(
- packet->header.payloadType);
- if (decoder) {
- packet_duration = packet->sync_packet ? decoder_frame_length_ :
- decoder->PacketDuration(packet->payload, packet->payload_length);
- } else {
- LOG_FERR1(LS_WARNING, GetDecoder, packet->header.payloadType) <<
- "Could not find a decoder for a packet about to be extracted.";
- assert(false);
- }
- if (packet_duration <= 0) {
- // Decoder did not return a packet duration. Assume that the packet
- // contains the same number of samples as the previous one.
- packet_duration = decoder_frame_length_;
- }
- extracted_samples = packet->header.timestamp - first_timestamp +
- packet_duration;
-
- // Check what packet is available next.
- header = packet_buffer_->NextRtpHeader();
- next_packet_available = false;
- if (header && prev_payload_type == header->payloadType) {
- int16_t seq_no_diff = header->sequenceNumber - prev_sequence_number;
- int32_t ts_diff = header->timestamp - prev_timestamp;
- if (seq_no_diff == 1 ||
- (seq_no_diff == 0 && ts_diff == decoder_frame_length_)) {
- // The next sequence number is available, or the next part of a packet
- // that was split into pieces upon insertion.
- next_packet_available = true;
- }
- prev_sequence_number = header->sequenceNumber;
- }
- } while (extracted_samples < required_samples && next_packet_available);
-
- return extracted_samples;
-}
-
-void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
- LOG_API2(fs_hz, channels);
- // TODO(hlundin): Change to an enumerator and skip assert.
- assert(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000);
- assert(channels > 0);
-
- fs_hz_ = fs_hz;
- fs_mult_ = fs_hz / 8000;
- output_size_samples_ = kOutputSizeMs * 8 * fs_mult_;
- decoder_frame_length_ = 3 * output_size_samples_; // Initialize to 30ms.
-
- last_mode_ = kModeNormal;
-
- // Create a new array of mute factors and set all to 1.
- mute_factor_array_.reset(new int16_t[channels]);
- for (size_t i = 0; i < channels; ++i) {
- mute_factor_array_[i] = 16384; // 1.0 in Q14.
- }
-
- // Reset comfort noise decoder, if there is one active.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
- if (cng_decoder) {
- cng_decoder->Init();
- }
-
- // Reinit post-decode VAD with new sample rate.
- assert(vad_.get()); // Cannot be NULL here.
- vad_->Init();
-
- // Delete algorithm buffer and create a new one.
- algorithm_buffer_.reset(new AudioMultiVector(channels));
-
- // Delete sync buffer and create a new one.
- sync_buffer_.reset(new SyncBuffer(channels, kSyncBufferSize * fs_mult_));
-
-
- // Delete BackgroundNoise object and create a new one, while preserving its
- // mode.
- NetEqBackgroundNoiseMode current_mode = kBgnOn;
- if (background_noise_.get())
- current_mode = background_noise_->mode();
- background_noise_.reset(new BackgroundNoise(channels));
- background_noise_->set_mode(current_mode);
-
- // Reset random vector.
- random_vector_.Reset();
-
- // Delete Expand object and create a new one.
- expand_.reset(new Expand(background_noise_.get(), sync_buffer_.get(),
- &random_vector_, fs_hz, channels));
- // Move index so that we create a small set of future samples (all 0).
- sync_buffer_->set_next_index(sync_buffer_->next_index() -
- expand_->overlap_length());
-
- normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_,
- expand_.get()));
- merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
- accelerate_.reset(new Accelerate(fs_hz, channels, *background_noise_));
- preemptive_expand_.reset(new PreemptiveExpand(fs_hz, channels,
- *background_noise_));
-
- // Delete ComfortNoise object and create a new one.
- comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(),
- sync_buffer_.get()));
-
- // Verify that |decoded_buffer_| is long enough.
- if (decoded_buffer_length_ < kMaxFrameSize * channels) {
- // Reallocate to larger size.
- decoded_buffer_length_ = kMaxFrameSize * channels;
- decoded_buffer_.reset(new int16_t[decoded_buffer_length_]);
- }
-
- // Communicate new sample rate and output size to DecisionLogic object.
- assert(decision_logic_.get());
- decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
-}
-
-NetEqOutputType NetEqImpl::LastOutputType() {
- assert(vad_.get());
- assert(expand_.get());
- if (last_mode_ == kModeCodecInternalCng || last_mode_ == kModeRfc3389Cng) {
- return kOutputCNG;
- } else if (last_mode_ == kModeExpand && expand_->MuteFactor(0) == 0) {
- // Expand mode has faded down to background noise only (very long expand).
- return kOutputPLCtoCNG;
- } else if (last_mode_ == kModeExpand) {
- return kOutputPLC;
- } else if (vad_->running() && !vad_->active_speech()) {
- return kOutputVADPassive;
- } else {
- return kOutputNormal;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h
deleted file mode 100644
index 83dd58b5acb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h
+++ /dev/null
@@ -1,360 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
-
-#include <vector>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h" // Declare PacketList.
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/rtcp.h"
-#include "webrtc/modules/audio_coding/neteq4/statistics_calculator.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class Accelerate;
-class BackgroundNoise;
-class BufferLevelFilter;
-class ComfortNoise;
-class CriticalSectionWrapper;
-class DecisionLogic;
-class DecoderDatabase;
-class DelayManager;
-class DelayPeakDetector;
-class DtmfBuffer;
-class DtmfToneGenerator;
-class Expand;
-class Merge;
-class Normal;
-class PacketBuffer;
-class PayloadSplitter;
-class PostDecodeVad;
-class PreemptiveExpand;
-class RandomVector;
-class SyncBuffer;
-class TimestampScaler;
-struct DtmfEvent;
-
-class NetEqImpl : public webrtc::NetEq {
- public:
- // Creates a new NetEqImpl object. The object will assume ownership of all
- // injected dependencies, and will delete them when done.
- NetEqImpl(int fs,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler);
-
- virtual ~NetEqImpl();
-
- // Inserts a new packet into NetEq. The |receive_timestamp| is an indication
- // of the time when the packet was received, and should be measured with
- // the same tick rate as the RTP timestamp of the current payload.
- // Returns 0 on success, -1 on failure.
- virtual int InsertPacket(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- int length_bytes,
- uint32_t receive_timestamp);
-
- // Inserts a sync-packet into packet queue. Sync-packets are decoded to
- // silence and are intended to keep AV-sync intact in an event of long packet
- // losses when Video NACK is enabled but Audio NACK is not. Clients of NetEq
- // might insert sync-packet when they observe that buffer level of NetEq is
- // decreasing below a certain threshold, defined by the application.
- // Sync-packets should have the same payload type as the last audio payload
- // type, i.e. they cannot have DTMF or CNG payload type, nor a codec change
- // can be implied by inserting a sync-packet.
- // Returns kOk on success, kFail on failure.
- virtual int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
- uint32_t receive_timestamp);
-
- // Instructs NetEq to deliver 10 ms of audio data. The data is written to
- // |output_audio|, which can hold (at least) |max_length| elements.
- // The number of channels that were written to the output is provided in
- // the output variable |num_channels|, and each channel contains
- // |samples_per_channel| elements. If more than one channel is written,
- // the samples are interleaved.
- // The speech type is written to |type|, if |type| is not NULL.
- // Returns kOK on success, or kFail in case of an error.
- virtual int GetAudio(size_t max_length, int16_t* output_audio,
- int* samples_per_channel, int* num_channels,
- NetEqOutputType* type);
-
- // Associates |rtp_payload_type| with |codec| and stores the information in
- // the codec database. Returns kOK on success, kFail on failure.
- virtual int RegisterPayloadType(enum NetEqDecoder codec,
- uint8_t rtp_payload_type);
-
- // Provides an externally created decoder object |decoder| to insert in the
- // decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder operates at the
- // frequency |sample_rate_hz|. Returns kOK on success, kFail on failure.
- virtual int RegisterExternalDecoder(AudioDecoder* decoder,
- enum NetEqDecoder codec,
- int sample_rate_hz,
- uint8_t rtp_payload_type);
-
- // Removes |rtp_payload_type| from the codec database. Returns 0 on success,
- // -1 on failure.
- virtual int RemovePayloadType(uint8_t rtp_payload_type);
-
- virtual bool SetMinimumDelay(int delay_ms);
-
- virtual bool SetMaximumDelay(int delay_ms);
-
- virtual int LeastRequiredDelayMs() const;
-
- virtual int SetTargetDelay() { return kNotImplemented; }
-
- virtual int TargetDelay() { return kNotImplemented; }
-
- virtual int CurrentDelay() { return kNotImplemented; }
-
- // Sets the playout mode to |mode|.
- virtual void SetPlayoutMode(NetEqPlayoutMode mode);
-
- // Returns the current playout mode.
- virtual NetEqPlayoutMode PlayoutMode() const;
-
- // Writes the current network statistics to |stats|. The statistics are reset
- // after the call.
- virtual int NetworkStatistics(NetEqNetworkStatistics* stats);
-
- // Writes the last packet waiting times (in ms) to |waiting_times|. The number
- // of values written is no more than 100, but may be smaller if the interface
- // is polled again before 100 packets has arrived.
- virtual void WaitingTimes(std::vector<int>* waiting_times);
-
- // Writes the current RTCP statistics to |stats|. The statistics are reset
- // and a new report period is started with the call.
- virtual void GetRtcpStatistics(RtcpStatistics* stats);
-
- // Same as RtcpStatistics(), but does not reset anything.
- virtual void GetRtcpStatisticsNoReset(RtcpStatistics* stats);
-
- // Enables post-decode VAD. When enabled, GetAudio() will return
- // kOutputVADPassive when the signal contains no speech.
- virtual void EnableVad();
-
- // Disables post-decode VAD.
- virtual void DisableVad();
-
- // Returns the RTP timestamp for the last sample delivered by GetAudio().
- virtual uint32_t PlayoutTimestamp();
-
- virtual int SetTargetNumberOfChannels() { return kNotImplemented; }
-
- virtual int SetTargetSampleRate() { return kNotImplemented; }
-
- // Returns the error code for the last occurred error. If no error has
- // occurred, 0 is returned.
- virtual int LastError();
-
- // Returns the error code last returned by a decoder (audio or comfort noise).
- // When LastError() returns kDecoderErrorCode or kComfortNoiseErrorCode, check
- // this method to get the decoder's error code.
- virtual int LastDecoderError();
-
- // Flushes both the packet buffer and the sync buffer.
- virtual void FlushBuffers();
-
- virtual void PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const;
-
- // Get sequence number and timestamp of the latest RTP.
- // This method is to facilitate NACK.
- virtual int DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const;
-
- // Sets background noise mode.
- virtual void SetBackgroundNoiseMode(NetEqBackgroundNoiseMode mode);
-
- // Gets background noise mode.
- virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const;
-
- private:
- static const int kOutputSizeMs = 10;
- static const int kMaxFrameSize = 2880; // 60 ms @ 48 kHz.
- // TODO(hlundin): Provide a better value for kSyncBufferSize.
- static const int kSyncBufferSize = 2 * kMaxFrameSize;
-
- // Inserts a new packet into NetEq. This is used by the InsertPacket method
- // above. Returns 0 on success, otherwise an error code.
- // TODO(hlundin): Merge this with InsertPacket above?
- int InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
- const uint8_t* payload,
- int length_bytes,
- uint32_t receive_timestamp,
- bool is_sync_packet);
-
-
- // Delivers 10 ms of audio data. The data is written to |output|, which can
- // hold (at least) |max_length| elements. The number of channels that were
- // written to the output is provided in the output variable |num_channels|,
- // and each channel contains |samples_per_channel| elements. If more than one
- // channel is written, the samples are interleaved.
- // Returns 0 on success, otherwise an error code.
- int GetAudioInternal(size_t max_length, int16_t* output,
- int* samples_per_channel, int* num_channels);
-
-
- // Provides a decision to the GetAudioInternal method. The decision what to
- // do is written to |operation|. Packets to decode are written to
- // |packet_list|, and a DTMF event to play is written to |dtmf_event|. When
- // DTMF should be played, |play_dtmf| is set to true by the method.
- // Returns 0 on success, otherwise an error code.
- int GetDecision(Operations* operation,
- PacketList* packet_list,
- DtmfEvent* dtmf_event,
- bool* play_dtmf);
-
- // Decodes the speech packets in |packet_list|, and writes the results to
- // |decoded_buffer|, which is allocated to hold |decoded_buffer_length|
- // elements. The length of the decoded data is written to |decoded_length|.
- // The speech type -- speech or (codec-internal) comfort noise -- is written
- // to |speech_type|. If |packet_list| contains any SID frames for RFC 3389
- // comfort noise, those are not decoded.
- int Decode(PacketList* packet_list, Operations* operation,
- int* decoded_length, AudioDecoder::SpeechType* speech_type);
-
- // Sub-method to Decode(). Performs the actual decoding.
- int DecodeLoop(PacketList* packet_list, Operations* operation,
- AudioDecoder* decoder, int* decoded_length,
- AudioDecoder::SpeechType* speech_type);
-
- // Sub-method which calls the Normal class to perform the normal operation.
- void DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
-
- // Sub-method which calls the Merge class to perform the merge operation.
- void DoMerge(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
-
- // Sub-method which calls the Expand class to perform the expand operation.
- int DoExpand(bool play_dtmf);
-
- // Sub-method which calls the Accelerate class to perform the accelerate
- // operation.
- int DoAccelerate(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
-
- // Sub-method which calls the PreemptiveExpand class to perform the
- // preemtive expand operation.
- int DoPreemptiveExpand(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
-
- // Sub-method which calls the ComfortNoise class to generate RFC 3389 comfort
- // noise. |packet_list| can either contain one SID frame to update the
- // noise parameters, or no payload at all, in which case the previously
- // received parameters are used.
- int DoRfc3389Cng(PacketList* packet_list, bool play_dtmf);
-
- // Calls the audio decoder to generate codec-internal comfort noise when
- // no packet was received.
- void DoCodecInternalCng();
-
- // Calls the DtmfToneGenerator class to generate DTMF tones.
- int DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf);
-
- // Produces packet-loss concealment using alternative methods. If the codec
- // has an internal PLC, it is called to generate samples. Otherwise, the
- // method performs zero-stuffing.
- void DoAlternativePlc(bool increase_timestamp);
-
- // Overdub DTMF on top of |output|.
- int DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
- int16_t* output) const;
-
- // Extracts packets from |packet_buffer_| to produce at least
- // |required_samples| samples. The packets are inserted into |packet_list|.
- // Returns the number of samples that the packets in the list will produce, or
- // -1 in case of an error.
- int ExtractPackets(int required_samples, PacketList* packet_list);
-
- // Resets various variables and objects to new values based on the sample rate
- // |fs_hz| and |channels| number audio channels.
- void SetSampleRateAndChannels(int fs_hz, size_t channels);
-
- // Returns the output type for the audio produced by the latest call to
- // GetAudio().
- NetEqOutputType LastOutputType();
-
- scoped_ptr<BackgroundNoise> background_noise_;
- scoped_ptr<BufferLevelFilter> buffer_level_filter_;
- scoped_ptr<DecoderDatabase> decoder_database_;
- scoped_ptr<DelayManager> delay_manager_;
- scoped_ptr<DelayPeakDetector> delay_peak_detector_;
- scoped_ptr<DtmfBuffer> dtmf_buffer_;
- scoped_ptr<DtmfToneGenerator> dtmf_tone_generator_;
- scoped_ptr<PacketBuffer> packet_buffer_;
- scoped_ptr<PayloadSplitter> payload_splitter_;
- scoped_ptr<TimestampScaler> timestamp_scaler_;
- scoped_ptr<DecisionLogic> decision_logic_;
- scoped_ptr<PostDecodeVad> vad_;
- scoped_ptr<AudioMultiVector> algorithm_buffer_;
- scoped_ptr<SyncBuffer> sync_buffer_;
- scoped_ptr<Expand> expand_;
- scoped_ptr<Normal> normal_;
- scoped_ptr<Merge> merge_;
- scoped_ptr<Accelerate> accelerate_;
- scoped_ptr<PreemptiveExpand> preemptive_expand_;
- RandomVector random_vector_;
- scoped_ptr<ComfortNoise> comfort_noise_;
- Rtcp rtcp_;
- StatisticsCalculator stats_;
- int fs_hz_;
- int fs_mult_;
- int output_size_samples_;
- int decoder_frame_length_;
- Modes last_mode_;
- scoped_array<int16_t> mute_factor_array_;
- size_t decoded_buffer_length_;
- scoped_array<int16_t> decoded_buffer_;
- uint32_t playout_timestamp_;
- bool new_codec_;
- uint32_t timestamp_;
- bool reset_decoder_;
- uint8_t current_rtp_payload_type_;
- uint8_t current_cng_rtp_payload_type_;
- uint32_t ssrc_;
- bool first_packet_;
- int error_code_; // Store last error code.
- int decoder_error_code_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
-
- // These values are used by NACK module to estimate time-to-play of
- // a missing packet. Occasionally, NetEq might decide to decode more
- // than one packet. Therefore, these values store sequence number and
- // timestamp of the first packet pulled from the packet buffer. In
- // such cases, these values do not exactly represent the sequence number
- // or timestamp associated with a 10ms audio pulled from NetEq. NACK
- // module is designed to compensate for this.
- int decoded_packet_sequence_number_;
- uint32_t decoded_packet_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc
deleted file mode 100644
index 7a82053918c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-using ::testing::Return;
-using ::testing::ReturnNull;
-using ::testing::_;
-using ::testing::SetArgPointee;
-using ::testing::InSequence;
-using ::testing::Invoke;
-using ::testing::WithArg;
-
-namespace webrtc {
-
-// This function is called when inserting a packet list into the mock packet
-// buffer. The purpose is to delete all inserted packets properly, to avoid
-// memory leaks in the test.
-int DeletePacketsAndReturnOk(PacketList* packet_list) {
- PacketBuffer::DeleteAllPackets(packet_list);
- return PacketBuffer::kOK;
-}
-
-class NetEqImplTest : public ::testing::Test {
- protected:
- static const int kInitSampleRateHz = 8000;
- NetEqImplTest() {
- buffer_level_filter_ = new MockBufferLevelFilter;
- decoder_database_ = new MockDecoderDatabase;
- delay_peak_detector_ = new MockDelayPeakDetector;
- EXPECT_CALL(*delay_peak_detector_, Reset()).Times(1);
- delay_manager_ = new MockDelayManager(NetEq::kMaxNumPacketsInBuffer,
- delay_peak_detector_);
- dtmf_buffer_ = new MockDtmfBuffer(kInitSampleRateHz);
- dtmf_tone_generator_ = new MockDtmfToneGenerator;
- packet_buffer_ = new MockPacketBuffer(NetEq::kMaxNumPacketsInBuffer,
- NetEq::kMaxBytesInBuffer);
- payload_splitter_ = new MockPayloadSplitter;
- timestamp_scaler_ = new TimestampScaler(*decoder_database_);
- EXPECT_CALL(*decoder_database_, GetActiveCngDecoder())
- .WillOnce(ReturnNull());
- neteq_ = new NetEqImpl(kInitSampleRateHz,
- buffer_level_filter_,
- decoder_database_,
- delay_manager_,
- delay_peak_detector_,
- dtmf_buffer_,
- dtmf_tone_generator_,
- packet_buffer_,
- payload_splitter_,
- timestamp_scaler_);
- }
-
- virtual ~NetEqImplTest() {
- EXPECT_CALL(*buffer_level_filter_, Die()).Times(1);
- EXPECT_CALL(*decoder_database_, Die()).Times(1);
- EXPECT_CALL(*delay_manager_, Die()).Times(1);
- EXPECT_CALL(*delay_peak_detector_, Die()).Times(1);
- EXPECT_CALL(*dtmf_buffer_, Die()).Times(1);
- EXPECT_CALL(*dtmf_tone_generator_, Die()).Times(1);
- EXPECT_CALL(*packet_buffer_, Die()).Times(1);
- delete neteq_;
- }
-
- NetEqImpl* neteq_;
- MockBufferLevelFilter* buffer_level_filter_;
- MockDecoderDatabase* decoder_database_;
- MockDelayPeakDetector* delay_peak_detector_;
- MockDelayManager* delay_manager_;
- MockDtmfBuffer* dtmf_buffer_;
- MockDtmfToneGenerator* dtmf_tone_generator_;
- MockPacketBuffer* packet_buffer_;
- MockPayloadSplitter* payload_splitter_;
- TimestampScaler* timestamp_scaler_;
-};
-
-
-// This tests the interface class NetEq.
-// TODO(hlundin): Move to separate file?
-TEST(NetEq, CreateAndDestroy) {
- NetEq* neteq = NetEq::Create(8000);
- delete neteq;
-}
-
-TEST_F(NetEqImplTest, RegisterPayloadType) {
- uint8_t rtp_payload_type = 0;
- NetEqDecoder codec_type = kDecoderPCMu;
- EXPECT_CALL(*decoder_database_,
- RegisterPayload(rtp_payload_type, codec_type));
- neteq_->RegisterPayloadType(codec_type, rtp_payload_type);
-}
-
-TEST_F(NetEqImplTest, RemovePayloadType) {
- uint8_t rtp_payload_type = 0;
- EXPECT_CALL(*decoder_database_,
- Remove(rtp_payload_type))
- .WillOnce(Return(DecoderDatabase::kDecoderNotFound));
- // Check that kFail is returned when database returns kDecoderNotFound.
- EXPECT_EQ(NetEq::kFail, neteq_->RemovePayloadType(rtp_payload_type));
-}
-
-TEST_F(NetEqImplTest, InsertPacket) {
- const int kPayloadLength = 100;
- const uint8_t kPayloadType = 0;
- const uint16_t kFirstSequenceNumber = 0x1234;
- const uint32_t kFirstTimestamp = 0x12345678;
- const uint32_t kSsrc = 0x87654321;
- const uint32_t kFirstReceiveTime = 17;
- uint8_t payload[kPayloadLength] = {0};
- WebRtcRTPHeader rtp_header;
- rtp_header.header.payloadType = kPayloadType;
- rtp_header.header.sequenceNumber = kFirstSequenceNumber;
- rtp_header.header.timestamp = kFirstTimestamp;
- rtp_header.header.ssrc = kSsrc;
-
- // Create a mock decoder object.
- MockAudioDecoder mock_decoder;
- // BWE update function called with first packet.
- EXPECT_CALL(mock_decoder, IncomingPacket(_,
- kPayloadLength,
- kFirstSequenceNumber,
- kFirstTimestamp,
- kFirstReceiveTime));
- // BWE update function called with second packet.
- EXPECT_CALL(mock_decoder, IncomingPacket(_,
- kPayloadLength,
- kFirstSequenceNumber + 1,
- kFirstTimestamp + 160,
- kFirstReceiveTime + 155));
- EXPECT_CALL(mock_decoder, Die()).Times(1); // Called when deleted.
-
- // Expectations for decoder database.
- EXPECT_CALL(*decoder_database_, IsRed(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not RED.
- EXPECT_CALL(*decoder_database_, CheckPayloadTypes(_))
- .Times(2)
- .WillRepeatedly(Return(DecoderDatabase::kOK)); // Payload type is valid.
- EXPECT_CALL(*decoder_database_, IsDtmf(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not DTMF.
- EXPECT_CALL(*decoder_database_, GetDecoder(kPayloadType))
- .Times(3)
- .WillRepeatedly(Return(&mock_decoder));
- EXPECT_CALL(*decoder_database_, IsComfortNoise(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not CNG.
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderPCMu;
- EXPECT_CALL(*decoder_database_, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(&info));
-
- // Expectations for packet buffer.
- EXPECT_CALL(*packet_buffer_, NumPacketsInBuffer())
- .WillOnce(Return(0)) // First packet.
- .WillOnce(Return(1)) // Second packet.
- .WillOnce(Return(2)); // Second packet, checking after it was inserted.
- EXPECT_CALL(*packet_buffer_, Flush())
- .Times(1);
- EXPECT_CALL(*packet_buffer_, InsertPacketList(_, _, _, _))
- .Times(2)
- .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType),
- WithArg<0>(Invoke(DeletePacketsAndReturnOk))));
- // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based
- // index) is a pointer, and the variable pointed to is set to kPayloadType.
- // Also invoke the function DeletePacketsAndReturnOk to properly delete all
- // packets in the list (to avoid memory leaks in the test).
- EXPECT_CALL(*packet_buffer_, NextRtpHeader())
- .Times(1)
- .WillOnce(Return(&rtp_header.header));
-
- // Expectations for DTMF buffer.
- EXPECT_CALL(*dtmf_buffer_, Flush())
- .Times(1);
-
- // Expectations for delay manager.
- {
- // All expectations within this block must be called in this specific order.
- InSequence sequence; // Dummy variable.
- // Expectations when the first packet is inserted.
- EXPECT_CALL(*delay_manager_, LastDecoderType(kDecoderPCMu))
- .Times(1);
- EXPECT_CALL(*delay_manager_, last_pack_cng_or_dtmf())
- .Times(2)
- .WillRepeatedly(Return(-1));
- EXPECT_CALL(*delay_manager_, set_last_pack_cng_or_dtmf(0))
- .Times(1);
- EXPECT_CALL(*delay_manager_, ResetPacketIatCount()).Times(1);
- // Expectations when the second packet is inserted. Slightly different.
- EXPECT_CALL(*delay_manager_, LastDecoderType(kDecoderPCMu))
- .Times(1);
- EXPECT_CALL(*delay_manager_, last_pack_cng_or_dtmf())
- .WillOnce(Return(0));
- }
-
- // Expectations for payload splitter.
- EXPECT_CALL(*payload_splitter_, SplitAudio(_, _))
- .Times(2)
- .WillRepeatedly(Return(PayloadSplitter::kOK));
-
- // Insert first packet.
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength, kFirstReceiveTime);
-
- // Insert second packet.
- rtp_header.header.timestamp += 160;
- rtp_header.header.sequenceNumber += 1;
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength,
- kFirstReceiveTime + 155);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc
deleted file mode 100644
index d6c4150ec62..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc
+++ /dev/null
@@ -1,418 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Test to verify correct stereo and multi-channel operation.
-
-#include <string>
-#include <list>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-
-namespace webrtc {
-
-struct TestParameters {
- int frame_size;
- int sample_rate;
- int num_channels;
-};
-
-// This is a parameterized test. The test parameters are supplied through a
-// TestParameters struct, which is obtained through the GetParam() method.
-//
-// The objective of the test is to create a mono input signal and a
-// multi-channel input signal, where each channel is identical to the mono
-// input channel. The two input signals are processed through their respective
-// NetEq instances. After that, the output signals are compared. The expected
-// result is that each channel in the multi-channel output is identical to the
-// mono output.
-class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
- protected:
- static const int kTimeStepMs = 10;
- static const int kMaxBlockSize = 480; // 10 ms @ 48 kHz.
- static const uint8_t kPayloadTypeMono = 95;
- static const uint8_t kPayloadTypeMulti = 96;
-
- NetEqStereoTest()
- : num_channels_(GetParam().num_channels),
- sample_rate_hz_(GetParam().sample_rate),
- samples_per_ms_(sample_rate_hz_ / 1000),
- frame_size_ms_(GetParam().frame_size),
- frame_size_samples_(frame_size_ms_ * samples_per_ms_),
- output_size_samples_(10 * samples_per_ms_),
- neteq_mono_(NetEq::Create(sample_rate_hz_)),
- neteq_(NetEq::Create(sample_rate_hz_)),
- rtp_generator_mono_(samples_per_ms_),
- rtp_generator_(samples_per_ms_),
- payload_size_bytes_(0),
- multi_payload_size_bytes_(0),
- last_send_time_(0),
- last_arrival_time_(0) {
- input_ = new int16_t[frame_size_samples_];
- encoded_ = new uint8_t[2 * frame_size_samples_];
- input_multi_channel_ = new int16_t[frame_size_samples_ * num_channels_];
- encoded_multi_channel_ = new uint8_t[frame_size_samples_ * 2 *
- num_channels_];
- output_multi_channel_ = new int16_t[kMaxBlockSize * num_channels_];
- }
-
- ~NetEqStereoTest() {
- delete neteq_mono_;
- delete neteq_;
- delete [] input_;
- delete [] encoded_;
- delete [] input_multi_channel_;
- delete [] encoded_multi_channel_;
- delete [] output_multi_channel_;
- }
-
- virtual void SetUp() {
- const std::string file_name =
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
- input_file_.reset(new test::InputAudioFile(file_name));
- NetEqDecoder mono_decoder;
- NetEqDecoder multi_decoder;
- switch (sample_rate_hz_) {
- case 8000:
- mono_decoder = kDecoderPCM16B;
- if (num_channels_ == 2) {
- multi_decoder = kDecoderPCM16B_2ch;
- } else if (num_channels_ == 5) {
- multi_decoder = kDecoderPCM16B_5ch;
- } else {
- FAIL() << "Only 2 and 5 channels supported for 8000 Hz.";
- }
- break;
- case 16000:
- mono_decoder = kDecoderPCM16Bwb;
- if (num_channels_ == 2) {
- multi_decoder = kDecoderPCM16Bwb_2ch;
- } else {
- FAIL() << "More than 2 channels is not supported for 16000 Hz.";
- }
- break;
- case 32000:
- mono_decoder = kDecoderPCM16Bswb32kHz;
- if (num_channels_ == 2) {
- multi_decoder = kDecoderPCM16Bswb32kHz_2ch;
- } else {
- FAIL() << "More than 2 channels is not supported for 32000 Hz.";
- }
- break;
- case 48000:
- mono_decoder = kDecoderPCM16Bswb48kHz;
- if (num_channels_ == 2) {
- multi_decoder = kDecoderPCM16Bswb48kHz_2ch;
- } else {
- FAIL() << "More than 2 channels is not supported for 48000 Hz.";
- }
- break;
- default:
- FAIL() << "We shouldn't get here.";
- }
- ASSERT_EQ(NetEq::kOK,
- neteq_mono_->RegisterPayloadType(mono_decoder,
- kPayloadTypeMono));
- ASSERT_EQ(NetEq::kOK,
- neteq_->RegisterPayloadType(multi_decoder,
- kPayloadTypeMulti));
- }
-
- virtual void TearDown() {}
-
- int GetNewPackets() {
- if (!input_file_->Read(frame_size_samples_, input_)) {
- return -1;
- }
- payload_size_bytes_ = WebRtcPcm16b_Encode(input_, frame_size_samples_,
- encoded_);
- if (frame_size_samples_ * 2 != payload_size_bytes_) {
- return -1;
- }
- int next_send_time = rtp_generator_mono_.GetRtpHeader(kPayloadTypeMono,
- frame_size_samples_,
- &rtp_header_mono_);
- test::InputAudioFile::DuplicateInterleaved(input_, frame_size_samples_,
- num_channels_,
- input_multi_channel_);
- multi_payload_size_bytes_ = WebRtcPcm16b_Encode(
- input_multi_channel_, frame_size_samples_ * num_channels_,
- encoded_multi_channel_);
- if (frame_size_samples_ * 2 * num_channels_ != multi_payload_size_bytes_) {
- return -1;
- }
- rtp_generator_.GetRtpHeader(kPayloadTypeMulti, frame_size_samples_,
- &rtp_header_);
- return next_send_time;
- }
-
- void VerifyOutput(size_t num_samples) {
- for (size_t i = 0; i < num_samples; ++i) {
- for (int j = 0; j < num_channels_; ++j) {
- ASSERT_EQ(output_[i], output_multi_channel_[i * num_channels_ + j]) <<
- "Diff in sample " << i << ", channel " << j << ".";
- }
- }
- }
-
- virtual int GetArrivalTime(int send_time) {
- int arrival_time = last_arrival_time_ + (send_time - last_send_time_);
- last_send_time_ = send_time;
- last_arrival_time_ = arrival_time;
- return arrival_time;
- }
-
- virtual bool Lost() { return false; }
-
- void RunTest(int num_loops) {
- // Get next input packets (mono and multi-channel).
- int next_send_time;
- int next_arrival_time;
- do {
- next_send_time = GetNewPackets();
- ASSERT_NE(-1, next_send_time);
- next_arrival_time = GetArrivalTime(next_send_time);
- } while (Lost()); // If lost, immediately read the next packet.
-
- int time_now = 0;
- for (int k = 0; k < num_loops; ++k) {
- while (time_now >= next_arrival_time) {
- // Insert packet in mono instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_mono_->InsertPacket(rtp_header_mono_, encoded_,
- payload_size_bytes_,
- next_arrival_time));
- // Insert packet in multi-channel instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_->InsertPacket(rtp_header_, encoded_multi_channel_,
- multi_payload_size_bytes_,
- next_arrival_time));
- // Get next input packets (mono and multi-channel).
- do {
- next_send_time = GetNewPackets();
- ASSERT_NE(-1, next_send_time);
- next_arrival_time = GetArrivalTime(next_send_time);
- } while (Lost()); // If lost, immediately read the next packet.
- }
- NetEqOutputType output_type;
- // Get audio from mono instance.
- int samples_per_channel;
- int num_channels;
- EXPECT_EQ(NetEq::kOK,
- neteq_mono_->GetAudio(kMaxBlockSize, output_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(1, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
- // Get audio from multi-channel instance.
- ASSERT_EQ(NetEq::kOK,
- neteq_->GetAudio(kMaxBlockSize * num_channels_,
- output_multi_channel_,
- &samples_per_channel, &num_channels,
- &output_type));
- EXPECT_EQ(num_channels_, num_channels);
- EXPECT_EQ(output_size_samples_, samples_per_channel);
- std::ostringstream ss;
- ss << "Lap number " << k << ".";
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- // Compare mono and multi-channel.
- ASSERT_NO_FATAL_FAILURE(VerifyOutput(output_size_samples_));
-
- time_now += kTimeStepMs;
- }
- }
-
- const int num_channels_;
- const int sample_rate_hz_;
- const int samples_per_ms_;
- const int frame_size_ms_;
- const int frame_size_samples_;
- const int output_size_samples_;
- NetEq* neteq_mono_;
- NetEq* neteq_;
- test::RtpGenerator rtp_generator_mono_;
- test::RtpGenerator rtp_generator_;
- int16_t* input_;
- int16_t* input_multi_channel_;
- uint8_t* encoded_;
- uint8_t* encoded_multi_channel_;
- int16_t output_[kMaxBlockSize];
- int16_t* output_multi_channel_;
- WebRtcRTPHeader rtp_header_mono_;
- WebRtcRTPHeader rtp_header_;
- int payload_size_bytes_;
- int multi_payload_size_bytes_;
- int last_send_time_;
- int last_arrival_time_;
- scoped_ptr<test::InputAudioFile> input_file_;
-};
-
-class NetEqStereoTestNoJitter : public NetEqStereoTest {
- protected:
- NetEqStereoTestNoJitter()
- : NetEqStereoTest() {
- // Start the sender 100 ms before the receiver to pre-fill the buffer.
- // This is to avoid doing preemptive expand early in the test.
- // TODO(hlundin): Mock the decision making instead to control the modes.
- last_arrival_time_ = -100;
- }
-};
-
-TEST_P(NetEqStereoTestNoJitter, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(8);
-}
-
-class NetEqStereoTestPositiveDrift : public NetEqStereoTest {
- protected:
- NetEqStereoTestPositiveDrift()
- : NetEqStereoTest(),
- drift_factor(0.9) {
- // Start the sender 100 ms before the receiver to pre-fill the buffer.
- // This is to avoid doing preemptive expand early in the test.
- // TODO(hlundin): Mock the decision making instead to control the modes.
- last_arrival_time_ = -100;
- }
- virtual int GetArrivalTime(int send_time) {
- int arrival_time = last_arrival_time_ +
- drift_factor * (send_time - last_send_time_);
- last_send_time_ = send_time;
- last_arrival_time_ = arrival_time;
- return arrival_time;
- }
-
- double drift_factor;
-};
-
-TEST_P(NetEqStereoTestPositiveDrift, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(100);
-}
-
-class NetEqStereoTestNegativeDrift : public NetEqStereoTestPositiveDrift {
- protected:
- NetEqStereoTestNegativeDrift()
- : NetEqStereoTestPositiveDrift() {
- drift_factor = 1.1;
- last_arrival_time_ = 0;
- }
-};
-
-TEST_P(NetEqStereoTestNegativeDrift, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(100);
-}
-
-class NetEqStereoTestDelays : public NetEqStereoTest {
- protected:
- static const int kDelayInterval = 10;
- static const int kDelay = 1000;
- NetEqStereoTestDelays()
- : NetEqStereoTest(),
- frame_index_(0) {
- }
-
- virtual int GetArrivalTime(int send_time) {
- // Deliver immediately, unless we have a back-log.
- int arrival_time = std::min(last_arrival_time_, send_time);
- if (++frame_index_ % kDelayInterval == 0) {
- // Delay this packet.
- arrival_time += kDelay;
- }
- last_send_time_ = send_time;
- last_arrival_time_ = arrival_time;
- return arrival_time;
- }
-
- int frame_index_;
-};
-
-TEST_P(NetEqStereoTestDelays, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(1000);
-}
-
-class NetEqStereoTestLosses : public NetEqStereoTest {
- protected:
- static const int kLossInterval = 10;
- NetEqStereoTestLosses()
- : NetEqStereoTest(),
- frame_index_(0) {
- }
-
- virtual bool Lost() {
- return (++frame_index_) % kLossInterval == 0;
- }
-
- int frame_index_;
-};
-
-TEST_P(NetEqStereoTestLosses, DISABLED_ON_ANDROID(RunTest)) {
- RunTest(100);
-}
-
-
-// Creates a list of parameter sets.
-std::list<TestParameters> GetTestParameters() {
- std::list<TestParameters> l;
- const int sample_rates[] = {8000, 16000, 32000};
- const int num_rates = sizeof(sample_rates) / sizeof(sample_rates[0]);
- // Loop through sample rates.
- for (int rate_index = 0; rate_index < num_rates; ++rate_index) {
- int sample_rate = sample_rates[rate_index];
- // Loop through all frame sizes between 10 and 60 ms.
- for (int frame_size = 10; frame_size <= 60; frame_size += 10) {
- TestParameters p;
- p.frame_size = frame_size;
- p.sample_rate = sample_rate;
- p.num_channels = 2;
- l.push_back(p);
- if (sample_rate == 8000) {
- // Add a five-channel test for 8000 Hz.
- p.num_channels = 5;
- l.push_back(p);
- }
- }
- }
- return l;
-}
-
-// Pretty-printing the test parameters in case of an error.
-void PrintTo(const TestParameters& p, ::std::ostream* os) {
- *os << "{frame_size = " << p.frame_size <<
- ", num_channels = " << p.num_channels <<
- ", sample_rate = " << p.sample_rate << "}";
-}
-
-// Instantiate the tests. Each test is instantiated using the function above,
-// so that all different parameter combinations are tested.
-INSTANTIATE_TEST_CASE_P(MultiChannel,
- NetEqStereoTestNoJitter,
- ::testing::ValuesIn(GetTestParameters()));
-
-INSTANTIATE_TEST_CASE_P(MultiChannel,
- NetEqStereoTestPositiveDrift,
- ::testing::ValuesIn(GetTestParameters()));
-
-INSTANTIATE_TEST_CASE_P(MultiChannel,
- NetEqStereoTestNegativeDrift,
- ::testing::ValuesIn(GetTestParameters()));
-
-INSTANTIATE_TEST_CASE_P(MultiChannel,
- NetEqStereoTestDelays,
- ::testing::ValuesIn(GetTestParameters()));
-
-INSTANTIATE_TEST_CASE_P(MultiChannel,
- NetEqStereoTestLosses,
- ::testing::ValuesIn(GetTestParameters()));
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi
deleted file mode 100644
index a2b9265613f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'targets': [
- {
- 'target_name': 'neteq_rtpplay',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4',
- 'NetEq4TestTools',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'tools/neteq_rtpplay.cc',
- ],
- 'defines': [
- ],
- }, # neteq_rtpplay
-
- {
- 'target_name': 'RTPencode',
- 'type': 'executable',
- 'dependencies': [
- # TODO(hlundin): Make RTPencode use ACM to encode files.
- 'NetEq4TestTools',# Test helpers
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- ],
- 'defines': [
- 'CODEC_ILBC',
- 'CODEC_PCM16B',
- 'CODEC_G711',
- 'CODEC_G722',
- 'CODEC_ISAC',
- 'CODEC_PCM16B_WB',
- 'CODEC_ISAC_SWB',
- 'CODEC_PCM16B_32KHZ',
- 'CODEC_CNGCODEC8',
- 'CODEC_CNGCODEC16',
- 'CODEC_CNGCODEC32',
- 'CODEC_ATEVENT_DECODE',
- 'CODEC_RED',
- ],
- 'include_dirs': [
- 'interface',
- 'test',
- '<(webrtc_root)',
- ],
- 'sources': [
- 'test/RTPencode.cc',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
-
- {
- 'target_name': 'RTPjitter',
- 'type': 'executable',
- 'dependencies': [
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'test/RTPjitter.cc',
- ],
- },
-
- {
- 'target_name': 'RTPanalyze',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4TestTools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'test/RTPanalyze.cc',
- ],
- },
-
- {
- 'target_name': 'RTPchange',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4TestTools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'test/RTPchange.cc',
- ],
- },
-
- {
- 'target_name': 'RTPtimeshift',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4TestTools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'test/RTPtimeshift.cc',
- ],
- },
-
- {
- 'target_name': 'RTPcat',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4TestTools',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'test/RTPcat.cc',
- ],
- },
-
- {
- 'target_name': 'rtp_to_text',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4TestTools',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- ],
- 'sources': [
- 'test/rtp_to_text.cc',
- ],
- },
-
- {
- 'target_name': 'neteq4_speed_test',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq4',
- 'neteq_unittest_tools',
- 'PCM16B',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'test/neteq_speed_test.cc',
- ],
- },
-
- {
- 'target_name': 'NetEq4TestTools',
- # Collection of useful functions used in other tests.
- 'type': 'static_library',
- 'variables': {
- # Expects RTP packets without payloads when enabled.
- 'neteq_dummy_rtp%': 0,
- },
- 'dependencies': [
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'interface',
- 'test',
- '<(webrtc_root)',
- ],
- },
- 'defines': [
- ],
- 'include_dirs': [
- 'interface',
- 'test',
- '<(webrtc_root)',
- ],
- 'sources': [
- 'test/NETEQTEST_DummyRTPpacket.cc',
- 'test/NETEQTEST_DummyRTPpacket.h',
- 'test/NETEQTEST_RTPpacket.cc',
- 'test/NETEQTEST_RTPpacket.h',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
- ], # targets
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc
deleted file mode 100644
index 965f75f2cdd..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc
+++ /dev/null
@@ -1,1219 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests for NetEQ.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-
-#include <stdlib.h>
-#include <string.h> // memset
-
-#include <cmath>
-#include <set>
-#include <string>
-#include <vector>
-
-#include "gflags/gflags.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-#include "webrtc/typedefs.h"
-
-DEFINE_bool(gen_ref, false, "Generate reference files.");
-
-namespace webrtc {
-
-static bool IsAllZero(const int16_t* buf, int buf_length) {
- bool all_zero = true;
- for (int n = 0; n < buf_length && all_zero; ++n)
- all_zero = buf[n] == 0;
- return all_zero;
-}
-
-static bool IsAllNonZero(const int16_t* buf, int buf_length) {
- bool all_non_zero = true;
- for (int n = 0; n < buf_length && all_non_zero; ++n)
- all_non_zero = buf[n] != 0;
- return all_non_zero;
-}
-
-class RefFiles {
- public:
- RefFiles(const std::string& input_file, const std::string& output_file);
- ~RefFiles();
- template<class T> void ProcessReference(const T& test_results);
- template<typename T, size_t n> void ProcessReference(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void WriteToFile(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void ReadFromFileAndCompare(
- const T (&test_results)[n],
- size_t length);
- void WriteToFile(const NetEqNetworkStatistics& stats);
- void ReadFromFileAndCompare(const NetEqNetworkStatistics& stats);
- void WriteToFile(const RtcpStatistics& stats);
- void ReadFromFileAndCompare(const RtcpStatistics& stats);
-
- FILE* input_fp_;
- FILE* output_fp_;
-};
-
-RefFiles::RefFiles(const std::string &input_file,
- const std::string &output_file)
- : input_fp_(NULL),
- output_fp_(NULL) {
- if (!input_file.empty()) {
- input_fp_ = fopen(input_file.c_str(), "rb");
- EXPECT_TRUE(input_fp_ != NULL);
- }
- if (!output_file.empty()) {
- output_fp_ = fopen(output_file.c_str(), "wb");
- EXPECT_TRUE(output_fp_ != NULL);
- }
-}
-
-RefFiles::~RefFiles() {
- if (input_fp_) {
- EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
- fclose(input_fp_);
- }
- if (output_fp_) fclose(output_fp_);
-}
-
-template<class T>
-void RefFiles::ProcessReference(const T& test_results) {
- WriteToFile(test_results);
- ReadFromFileAndCompare(test_results);
-}
-
-template<typename T, size_t n>
-void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
- WriteToFile(test_results, length);
- ReadFromFileAndCompare(test_results, length);
-}
-
-template<typename T, size_t n>
-void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
- if (output_fp_) {
- ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
- }
-}
-
-template<typename T, size_t n>
-void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
- size_t length) {
- if (input_fp_) {
- // Read from ref file.
- T* ref = new T[length];
- ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
- // Compare
- ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
- delete [] ref;
- }
-}
-
-void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&stats, sizeof(NetEqNetworkStatistics), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const NetEqNetworkStatistics& stats) {
- if (input_fp_) {
- // Read from ref file.
- size_t stat_size = sizeof(NetEqNetworkStatistics);
- NetEqNetworkStatistics ref_stats;
- ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
- // Compare
- EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
- }
-}
-
-void RefFiles::WriteToFile(const RtcpStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.cumulative_lost),
- sizeof(stats.cumulative_lost), 1, output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.extended_max_sequence_number),
- sizeof(stats.extended_max_sequence_number), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const RtcpStatistics& stats) {
- if (input_fp_) {
- // Read from ref file.
- RtcpStatistics ref_stats;
- ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
- sizeof(ref_stats.fraction_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.cumulative_lost),
- sizeof(ref_stats.cumulative_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.extended_max_sequence_number),
- sizeof(ref_stats.extended_max_sequence_number), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
- input_fp_));
- // Compare
- EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
- EXPECT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost);
- EXPECT_EQ(ref_stats.extended_max_sequence_number,
- stats.extended_max_sequence_number);
- EXPECT_EQ(ref_stats.jitter, stats.jitter);
- }
-}
-
-class NetEqDecodingTest : public ::testing::Test {
- protected:
- // NetEQ must be polled for data once every 10 ms. Thus, neither of the
- // constants below can be changed.
- static const int kTimeStepMs = 10;
- static const int kBlockSize8kHz = kTimeStepMs * 8;
- static const int kBlockSize16kHz = kTimeStepMs * 16;
- static const int kBlockSize32kHz = kTimeStepMs * 32;
- static const int kMaxBlockSize = kBlockSize32kHz;
- static const int kInitSampleRateHz = 8000;
-
- NetEqDecodingTest();
- virtual void SetUp();
- virtual void TearDown();
- void SelectDecoders(NetEqDecoder* used_codec);
- void LoadDecoders();
- void OpenInputFile(const std::string &rtp_file);
- void Process(NETEQTEST_RTPpacket* rtp_ptr, int* out_len);
- void DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file);
- void DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file);
- static void PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcRTPHeader* rtp_info);
- static void PopulateCng(int frame_index,
- int timestamp,
- WebRtcRTPHeader* rtp_info,
- uint8_t* payload,
- int* payload_len);
-
- void CheckBgnOff(int sampling_rate, NetEqBackgroundNoiseMode bgn_mode);
-
- void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers,
- bool expect_seq_no_wrap, bool expect_timestamp_wrap);
-
- NetEq* neteq_;
- FILE* rtp_fp_;
- unsigned int sim_clock_;
- int16_t out_data_[kMaxBlockSize];
- int output_sample_rate_;
-};
-
-// Allocating the static const so that it can be passed by reference.
-const int NetEqDecodingTest::kTimeStepMs;
-const int NetEqDecodingTest::kBlockSize8kHz;
-const int NetEqDecodingTest::kBlockSize16kHz;
-const int NetEqDecodingTest::kBlockSize32kHz;
-const int NetEqDecodingTest::kMaxBlockSize;
-const int NetEqDecodingTest::kInitSampleRateHz;
-
-NetEqDecodingTest::NetEqDecodingTest()
- : neteq_(NULL),
- rtp_fp_(NULL),
- sim_clock_(0),
- output_sample_rate_(kInitSampleRateHz) {
- memset(out_data_, 0, sizeof(out_data_));
-}
-
-void NetEqDecodingTest::SetUp() {
- neteq_ = NetEq::Create(kInitSampleRateHz);
- ASSERT_TRUE(neteq_);
- LoadDecoders();
-}
-
-void NetEqDecodingTest::TearDown() {
- delete neteq_;
- if (rtp_fp_)
- fclose(rtp_fp_);
-}
-
-void NetEqDecodingTest::LoadDecoders() {
- // Load PCMu.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCMu, 0));
- // Load PCMa.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCMa, 8));
-#ifndef WEBRTC_ANDROID
- // Load iLBC.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderILBC, 102));
-#endif // WEBRTC_ANDROID
- // Load iSAC.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, 103));
-#ifndef WEBRTC_ANDROID
- // Load iSAC SWB.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACswb, 104));
- // Load iSAC FB.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISACfb, 105));
-#endif // WEBRTC_ANDROID
- // Load PCM16B nb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16B, 93));
- // Load PCM16B wb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bwb, 94));
- // Load PCM16B swb32.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bswb32kHz, 95));
- // Load CNG 8 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGnb, 13));
- // Load CNG 16 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGwb, 98));
-}
-
-void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
- rtp_fp_ = fopen(rtp_file.c_str(), "rb");
- ASSERT_TRUE(rtp_fp_ != NULL);
- ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
-}
-
-void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int* out_len) {
- // Check if time to receive.
- while ((sim_clock_ >= rtp->time()) &&
- (rtp->dataLen() >= 0)) {
- if (rtp->dataLen() > 0) {
- WebRtcRTPHeader rtpInfo;
- rtp->parseHeader(&rtpInfo);
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtpInfo,
- rtp->payload(),
- rtp->payloadLen(),
- rtp->time() * (output_sample_rate_ / 1000)));
- }
- // Get next packet.
- ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
- }
-
- // Get audio from NetEq.
- NetEqOutputType type;
- int num_channels;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, out_len,
- &num_channels, &type));
- ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
- (*out_len == kBlockSize16kHz) ||
- (*out_len == kBlockSize32kHz));
- output_sample_rate_ = *out_len / 10 * 1000;
-
- // Increase time.
- sim_clock_ += kTimeStepMs;
-}
-
-void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file) {
- OpenInputFile(rtp_file);
-
- std::string ref_out_file = "";
- if (ref_file.empty()) {
- ref_out_file = webrtc::test::OutputPath() + "neteq_universal_ref.pcm";
- }
- RefFiles ref_files(ref_file, ref_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- int i = 0;
- while (rtp.dataLen() >= 0) {
- std::ostringstream ss;
- ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- int out_len = 0;
- ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
- ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(out_data_, out_len));
- }
-}
-
-void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file) {
- OpenInputFile(rtp_file);
- std::string stat_out_file = "";
- if (stat_ref_file.empty()) {
- stat_out_file = webrtc::test::OutputPath() +
- "neteq_network_stats.dat";
- }
- RefFiles network_stat_files(stat_ref_file, stat_out_file);
-
- std::string rtcp_out_file = "";
- if (rtcp_ref_file.empty()) {
- rtcp_out_file = webrtc::test::OutputPath() +
- "neteq_rtcp_stats.dat";
- }
- RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- while (rtp.dataLen() >= 0) {
- int out_len;
- Process(&rtp, &out_len);
-
- // Query the network statistics API once per second
- if (sim_clock_ % 1000 == 0) {
- // Process NetworkStatistics.
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- network_stat_files.ProcessReference(network_stats);
-
- // Process RTCPstat.
- RtcpStatistics rtcp_stats;
- neteq_->GetRtcpStatistics(&rtcp_stats);
- rtcp_stat_files.ProcessReference(rtcp_stats);
- }
- }
-}
-
-void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcRTPHeader* rtp_info) {
- rtp_info->header.sequenceNumber = frame_index;
- rtp_info->header.timestamp = timestamp;
- rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
- rtp_info->header.payloadType = 94; // PCM16b WB codec.
- rtp_info->header.markerBit = 0;
-}
-
-void NetEqDecodingTest::PopulateCng(int frame_index,
- int timestamp,
- WebRtcRTPHeader* rtp_info,
- uint8_t* payload,
- int* payload_len) {
- rtp_info->header.sequenceNumber = frame_index;
- rtp_info->header.timestamp = timestamp;
- rtp_info->header.ssrc = 0x1234; // Just an arbitrary SSRC.
- rtp_info->header.payloadType = 98; // WB CNG.
- rtp_info->header.markerBit = 0;
- payload[0] = 64; // Noise level -64 dBov, quite arbitrarily chosen.
- *payload_len = 1; // Only noise level, no spectral parameters.
-}
-
-void NetEqDecodingTest::CheckBgnOff(int sampling_rate_hz,
- NetEqBackgroundNoiseMode bgn_mode) {
- int expected_samples_per_channel = 0;
- uint8_t payload_type = 0xFF; // Invalid.
- if (sampling_rate_hz == 8000) {
- expected_samples_per_channel = kBlockSize8kHz;
- payload_type = 93; // PCM 16, 8 kHz.
- } else if (sampling_rate_hz == 16000) {
- expected_samples_per_channel = kBlockSize16kHz;
- payload_type = 94; // PCM 16, 16 kHZ.
- } else if (sampling_rate_hz == 32000) {
- expected_samples_per_channel = kBlockSize32kHz;
- payload_type = 95; // PCM 16, 32 kHz.
- } else {
- ASSERT_TRUE(false); // Unsupported test case.
- }
-
- NetEqOutputType type;
- int16_t output[kBlockSize32kHz]; // Maximum size is chosen.
- int16_t input[kBlockSize32kHz]; // Maximum size is chosen.
-
- // Payload of 10 ms of PCM16 32 kHz.
- uint8_t payload[kBlockSize32kHz * sizeof(int16_t)];
-
- // Random payload.
- for (int n = 0; n < expected_samples_per_channel; ++n) {
- input[n] = (rand() & ((1 << 10) - 1)) - ((1 << 5) - 1);
- }
- int enc_len_bytes = WebRtcPcm16b_EncodeW16(
- input, expected_samples_per_channel, reinterpret_cast<int16_t*>(payload));
- ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2);
-
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = payload_type;
-
- int number_channels = 0;
- int samples_per_channel = 0;
-
- uint32_t receive_timestamp = 0;
- for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
- number_channels = 0;
- samples_per_channel = 0;
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info, payload, enc_len_bytes, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
- &number_channels, &type));
- ASSERT_EQ(1, number_channels);
- ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
- ASSERT_EQ(kOutputNormal, type);
-
- // Next packet.
- rtp_info.header.timestamp += expected_samples_per_channel;
- rtp_info.header.sequenceNumber++;
- receive_timestamp += expected_samples_per_channel;
- }
-
- number_channels = 0;
- samples_per_channel = 0;
-
- // Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull one
- // frame without checking speech-type. This is the first frame pulled without
- // inserting any packet, and might not be labeled as PCL.
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
- &number_channels, &type));
- ASSERT_EQ(1, number_channels);
- ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
-
- // To be able to test the fading of background noise we need at lease to pull
- // 610 frames.
- const int kFadingThreshold = 610;
-
- // Test several CNG-to-PLC packet for the expected behavior. The number 20 is
- // arbitrary, but sufficiently large to test enough number of frames.
- const int kNumPlcToCngTestFrames = 20;
- bool plc_to_cng = false;
- for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) {
- number_channels = 0;
- samples_per_channel = 0;
- memset(output, 1, sizeof(output)); // Set to non-zero.
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize32kHz, output, &samples_per_channel,
- &number_channels, &type));
- ASSERT_EQ(1, number_channels);
- ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
- if (type == kOutputPLCtoCNG) {
- plc_to_cng = true;
- double sum_squared = 0;
- for (int k = 0; k < number_channels * samples_per_channel; ++k)
- sum_squared += output[k] * output[k];
- if (bgn_mode == kBgnOn) {
- EXPECT_NE(0, sum_squared);
- } else if (bgn_mode == kBgnOff || n > kFadingThreshold) {
- EXPECT_EQ(0, sum_squared);
- }
- } else {
- EXPECT_EQ(kOutputPLC, type);
- }
- }
- EXPECT_TRUE(plc_to_cng); // Just to be sure that PLC-to-CNG has occurred.
-}
-
-#if defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)
-// Disabled for Windows 64-bit until webrtc:1458 is fixed.
-#define MAYBE_TestBitExactness DISABLED_TestBitExactness
-#else
-#define MAYBE_TestBitExactness TestBitExactness
-#endif
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(MAYBE_TestBitExactness)) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal_new.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kInputRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq4_universal_ref.pcm";
-#else
- const std::string kInputRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm");
-#endif
-
- if (FLAGS_gen_ref) {
- DecodeAndCompare(kInputRtpFile, "");
- } else {
- DecodeAndCompare(kInputRtpFile, kInputRefFile);
- }
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestNetworkStatistics)) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal_new.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kNetworkStatRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq4_network_stats.dat";
-#else
- const std::string kNetworkStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat");
-#endif
- const std::string kRtcpStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat");
- if (FLAGS_gen_ref) {
- DecodeAndCheckStats(kInputRtpFile, "", "");
- } else {
- DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
- }
-}
-
-// TODO(hlundin): Re-enable test once the statistics interface is up and again.
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestFrameWaitingTimeStatistics)) {
- // Use fax mode to avoid time-scaling. This is to simplify the testing of
- // packet waiting times in the packet buffer.
- neteq_->SetPlayoutMode(kPlayoutFax);
- ASSERT_EQ(kPlayoutFax, neteq_->PlayoutMode());
- // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
- size_t num_frames = 30;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (size_t i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcRTPHeader rtp_info;
- rtp_info.header.sequenceNumber = i;
- rtp_info.header.timestamp = i * kSamples;
- rtp_info.header.ssrc = 0x1234; // Just an arbitrary SSRC.
- rtp_info.header.payloadType = 94; // PCM16b WB codec.
- rtp_info.header.markerBit = 0;
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- }
- // Pull out all data.
- for (size_t i = 0; i < num_frames; ++i) {
- int out_len;
- int num_channels;
- NetEqOutputType type;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- std::vector<int> waiting_times;
- neteq_->WaitingTimes(&waiting_times);
- EXPECT_EQ(num_frames, waiting_times.size());
- // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
- // spacing (per definition), we expect the delay to increase with 10 ms for
- // each packet.
- for (size_t i = 0; i < waiting_times.size(); ++i) {
- EXPECT_EQ(static_cast<int>(i + 1) * 10, waiting_times[i]);
- }
-
- // Check statistics again and make sure it's been reset.
- neteq_->WaitingTimes(&waiting_times);
- int len = waiting_times.size();
- EXPECT_EQ(0, len);
-
- // Process > 100 frames, and make sure that that we get statistics
- // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
- num_frames = 110;
- for (size_t i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcRTPHeader rtp_info;
- rtp_info.header.sequenceNumber = i;
- rtp_info.header.timestamp = i * kSamples;
- rtp_info.header.ssrc = 0x1235; // Just an arbitrary SSRC.
- rtp_info.header.payloadType = 94; // PCM16b WB codec.
- rtp_info.header.markerBit = 0;
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- int out_len;
- int num_channels;
- NetEqOutputType type;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- neteq_->WaitingTimes(&waiting_times);
- EXPECT_EQ(100u, waiting_times.size());
-}
-
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(TestAverageInterArrivalTimeNegative)) {
- const int kNumFrames = 3000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- while (frame_index < kNumFrames) {
- // Insert one packet each time, except every 10th time where we insert two
- // packets at once. This will create a negative clock-drift of approx. 10%.
- int num_packets = (frame_index % 10 == 0 ? 2 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- int out_len;
- int num_channels;
- NetEqOutputType type;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- EXPECT_EQ(-103196, network_stats.clockdrift_ppm);
-}
-
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(TestAverageInterArrivalTimePositive)) {
- const int kNumFrames = 5000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (int i = 0; i < kNumFrames; ++i) {
- // Insert one packet each time, except every 10th time where we don't insert
- // any packet. This will create a positive clock-drift of approx. 11%.
- int num_packets = (i % 10 == 9 ? 0 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- int out_len;
- int num_channels;
- NetEqOutputType type;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- EXPECT_EQ(110946, network_stats.clockdrift_ppm);
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
- uint16_t seq_no = 0;
- uint32_t timestamp = 0;
- const int kFrameSizeMs = 30;
- const int kSamples = kFrameSizeMs * 16;
- const int kPayloadBytes = kSamples * 2;
- // Apply a clock drift of -25 ms / s (sender faster than receiver).
- const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
- double next_input_time_ms = 0.0;
- double t_ms;
- NetEqOutputType type;
-
- // Insert speech for 5 seconds.
- const int kSpeechDurationMs = 5000;
- for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- int out_len;
- int num_channels;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- EXPECT_EQ(kOutputNormal, type);
- int32_t delay_before = timestamp - neteq_->PlayoutTimestamp();
-
- // Insert CNG for 1 minute (= 60000 ms).
- const int kCngPeriodMs = 100;
- const int kCngPeriodSamples = kCngPeriodMs * 16; // Period in 16 kHz samples.
- const int kCngDurationMs = 60000;
- for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one CNG frame each 100 ms.
- uint8_t payload[kPayloadBytes];
- int payload_len;
- WebRtcRTPHeader rtp_info;
- PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
- ++seq_no;
- timestamp += kCngPeriodSamples;
- next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
- }
- // Pull out data once.
- int out_len;
- int num_channels;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- }
-
- EXPECT_EQ(kOutputCNG, type);
-
- // Insert speech again until output type is speech.
- while (type != kOutputNormal) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- int out_len;
- int num_channels;
- ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
- &num_channels, &type));
- ASSERT_EQ(kBlockSize16kHz, out_len);
- // Increase clock.
- t_ms += 10;
- }
-
- int32_t delay_after = timestamp - neteq_->PlayoutTimestamp();
- // Compare delay before and after, and make sure it differs less than 20 ms.
- EXPECT_LE(delay_after, delay_before + 20 * 16);
- EXPECT_GE(delay_after, delay_before - 20 * 16);
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(UnknownPayloadType)) {
- const int kPayloadBytes = 100;
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = 1; // Not registered as a decoder.
- EXPECT_EQ(NetEq::kFail,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- EXPECT_EQ(NetEq::kUnknownRtpPayloadType, neteq_->LastError());
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(OversizePacket)) {
- // Payload size is greater than packet buffer size
- const int kPayloadBytes = NetEq::kMaxBytesInBuffer + 1;
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = 103; // iSAC, no packet splitting.
- EXPECT_EQ(NetEq::kFail,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- EXPECT_EQ(NetEq::kOversizePacket, neteq_->LastError());
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
- const int kPayloadBytes = 100;
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = 103; // iSAC, but the payload is invalid.
- EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- NetEqOutputType type;
- // Set all of |out_data_| to 1, and verify that it was set to 0 by the call
- // to GetAudio.
- for (int i = 0; i < kMaxBlockSize; ++i) {
- out_data_[i] = 1;
- }
- int num_channels;
- int samples_per_channel;
- EXPECT_EQ(NetEq::kFail,
- neteq_->GetAudio(kMaxBlockSize, out_data_,
- &samples_per_channel, &num_channels, &type));
- // Verify that there is a decoder error to check.
- EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
- // Code 6730 is an iSAC error code.
- EXPECT_EQ(6730, neteq_->LastDecoderError());
- // Verify that the first 160 samples are set to 0, and that the remaining
- // samples are left unmodified.
- static const int kExpectedOutputLength = 160; // 10 ms at 16 kHz sample rate.
- for (int i = 0; i < kExpectedOutputLength; ++i) {
- std::ostringstream ss;
- ss << "i = " << i;
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- EXPECT_EQ(0, out_data_[i]);
- }
- for (int i = kExpectedOutputLength; i < kMaxBlockSize; ++i) {
- std::ostringstream ss;
- ss << "i = " << i;
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- EXPECT_EQ(1, out_data_[i]);
- }
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(GetAudioBeforeInsertPacket)) {
- NetEqOutputType type;
- // Set all of |out_data_| to 1, and verify that it was set to 0 by the call
- // to GetAudio.
- for (int i = 0; i < kMaxBlockSize; ++i) {
- out_data_[i] = 1;
- }
- int num_channels;
- int samples_per_channel;
- EXPECT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_,
- &samples_per_channel,
- &num_channels, &type));
- // Verify that the first block of samples is set to 0.
- static const int kExpectedOutputLength =
- kInitSampleRateHz / 100; // 10 ms at initial sample rate.
- for (int i = 0; i < kExpectedOutputLength; ++i) {
- std::ostringstream ss;
- ss << "i = " << i;
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- EXPECT_EQ(0, out_data_[i]);
- }
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(BackgroundNoise)) {
- neteq_->SetBackgroundNoiseMode(kBgnOn);
- CheckBgnOff(8000, kBgnOn);
- CheckBgnOff(16000, kBgnOn);
- CheckBgnOff(32000, kBgnOn);
- EXPECT_EQ(kBgnOn, neteq_->BackgroundNoiseMode());
-
- neteq_->SetBackgroundNoiseMode(kBgnOff);
- CheckBgnOff(8000, kBgnOff);
- CheckBgnOff(16000, kBgnOff);
- CheckBgnOff(32000, kBgnOff);
- EXPECT_EQ(kBgnOff, neteq_->BackgroundNoiseMode());
-
- neteq_->SetBackgroundNoiseMode(kBgnFade);
- CheckBgnOff(8000, kBgnFade);
- CheckBgnOff(16000, kBgnFade);
- CheckBgnOff(32000, kBgnFade);
- EXPECT_EQ(kBgnFade, neteq_->BackgroundNoiseMode());
-}
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketInsert)) {
- WebRtcRTPHeader rtp_info;
- uint32_t receive_timestamp = 0;
- // For the readability use the following payloads instead of the defaults of
- // this test.
- uint8_t kPcm16WbPayloadType = 1;
- uint8_t kCngNbPayloadType = 2;
- uint8_t kCngWbPayloadType = 3;
- uint8_t kCngSwb32PayloadType = 4;
- uint8_t kCngSwb48PayloadType = 5;
- uint8_t kAvtPayloadType = 6;
- uint8_t kRedPayloadType = 7;
- uint8_t kIsacPayloadType = 9; // Payload type 8 is already registered.
-
- // Register decoders.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderPCM16Bwb,
- kPcm16WbPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGnb, kCngNbPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGwb, kCngWbPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb32kHz,
- kCngSwb32PayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderCNGswb48kHz,
- kCngSwb48PayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderAVT, kAvtPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderRED, kRedPayloadType));
- ASSERT_EQ(0, neteq_->RegisterPayloadType(kDecoderISAC, kIsacPayloadType));
-
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = kPcm16WbPayloadType;
-
- // The first packet injected cannot be sync-packet.
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- // Payload length of 10 ms PCM16 16 kHz.
- const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
- uint8_t payload[kPayloadBytes] = {0};
- ASSERT_EQ(0, neteq_->InsertPacket(
- rtp_info, payload, kPayloadBytes, receive_timestamp));
-
- // Next packet. Last packet contained 10 ms audio.
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
-
- // Unacceptable payload types CNG, AVT (DTMF), RED.
- rtp_info.header.payloadType = kCngNbPayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- rtp_info.header.payloadType = kCngWbPayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- rtp_info.header.payloadType = kCngSwb32PayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- rtp_info.header.payloadType = kCngSwb48PayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- rtp_info.header.payloadType = kAvtPayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- rtp_info.header.payloadType = kRedPayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- // Change of codec cannot be initiated with a sync packet.
- rtp_info.header.payloadType = kIsacPayloadType;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- // Change of SSRC is not allowed with a sync packet.
- rtp_info.header.payloadType = kPcm16WbPayloadType;
- ++rtp_info.header.ssrc;
- EXPECT_EQ(-1, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-
- --rtp_info.header.ssrc;
- EXPECT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
-}
-
-// First insert several noise like packets, then sync-packets. Decoding all
-// packets should not produce error, statistics should not show any packet loss
-// and sync-packets should decode to zero.
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) {
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
- uint8_t payload[kPayloadBytes];
- int16_t decoded[kBlockSize16kHz];
- for (int n = 0; n < kPayloadBytes; ++n) {
- payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
- }
- // Insert some packets which decode to noise. We are not interested in
- // actual decoded values.
- NetEqOutputType output_type;
- int num_channels;
- int samples_per_channel;
- uint32_t receive_timestamp = 0;
- int delay_samples = 0;
- for (int n = 0; n < 100; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
-
- // Even if there is RTP packet in NetEq's buffer, the first frame pulled
- // from NetEq starts with few zero samples. Here we measure this delay.
- if (n == 0) {
- while(decoded[delay_samples] == 0) delay_samples++;
- }
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
- }
- const int kNumSyncPackets = 10;
- // Insert sync-packets, the decoded sequence should be all-zero.
- for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
- EXPECT_TRUE(IsAllZero(&decoded[delay_samples],
- samples_per_channel * num_channels - delay_samples));
- delay_samples = 0; // Delay only matters in the first frame.
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
- }
- // We insert a regular packet, if sync packet are not correctly buffered then
- // network statistics would show some packet loss.
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- // Make sure the last inserted packet is decoded and there are non-zero
- // samples.
- EXPECT_FALSE(IsAllZero(decoded, samples_per_channel * num_channels));
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- // Expecting a "clean" network.
- EXPECT_EQ(0, network_stats.packet_loss_rate);
- EXPECT_EQ(0, network_stats.expand_rate);
- EXPECT_EQ(0, network_stats.accelerate_rate);
- EXPECT_EQ(0, network_stats.preemptive_rate);
-}
-
-// Test if the size of the packet buffer reported correctly when containing
-// sync packets. Also, test if network packets override sync packets. That is to
-// prefer decoding a network packet to a sync packet, if both have same sequence
-// number and timestamp.
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(SyncPacketBufferSizeAndOverridenByNetworkPackets)) {
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
- uint8_t payload[kPayloadBytes];
- int16_t decoded[kBlockSize16kHz];
- for (int n = 0; n < kPayloadBytes; ++n) {
- payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
- }
- // Insert some packets which decode to noise. We are not interested in
- // actual decoded values.
- NetEqOutputType output_type;
- int num_channels;
- int samples_per_channel;
- uint32_t receive_timestamp = 0;
- for (int n = 0; n < 1; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
- }
- const int kNumSyncPackets = 10;
-
- WebRtcRTPHeader first_sync_packet_rtp_info;
- memcpy(&first_sync_packet_rtp_info, &rtp_info, sizeof(rtp_info));
-
- // Insert sync-packets, but no decoding.
- for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
- }
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- EXPECT_EQ(kNumSyncPackets * 10, network_stats.current_buffer_size_ms);
-
- // Rewind |rtp_info| to that of the first sync packet.
- memcpy(&rtp_info, &first_sync_packet_rtp_info, sizeof(rtp_info));
-
- // Insert.
- for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- rtp_info.header.sequenceNumber++;
- rtp_info.header.timestamp += kBlockSize16kHz;
- receive_timestamp += kBlockSize16kHz;
- }
-
- // Decode.
- for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
- EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels));
- }
-}
-
-void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
- uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers,
- bool expect_seq_no_wrap,
- bool expect_timestamp_wrap) {
- uint16_t seq_no = start_seq_no;
- uint32_t timestamp = start_timestamp;
- const int kBlocksPerFrame = 3; // Number of 10 ms blocks per frame.
- const int kFrameSizeMs = kBlocksPerFrame * kTimeStepMs;
- const int kSamples = kBlockSize16kHz * kBlocksPerFrame;
- const int kPayloadBytes = kSamples * sizeof(int16_t);
- double next_input_time_ms = 0.0;
- int16_t decoded[kBlockSize16kHz];
- int num_channels;
- int samples_per_channel;
- NetEqOutputType output_type;
- uint32_t receive_timestamp = 0;
-
- // Insert speech for 1 second.
- const int kSpeechDurationMs = 2000;
- int packets_inserted = 0;
- uint16_t last_seq_no;
- uint32_t last_timestamp;
- bool timestamp_wrapped = false;
- bool seq_no_wrapped = false;
- for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) {
- // This sequence number was not in the set to drop. Insert it.
- ASSERT_EQ(0,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- ++packets_inserted;
- }
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
-
- // Due to internal NetEq logic, preferred buffer-size is about 4 times the
- // packet size for first few packets. Therefore we refrain from checking
- // the criteria.
- if (packets_inserted > 4) {
- // Expect preferred and actual buffer size to be no more than 2 frames.
- EXPECT_LE(network_stats.preferred_buffer_size_ms, kFrameSizeMs * 2);
- EXPECT_LE(network_stats.current_buffer_size_ms, kFrameSizeMs * 2);
- }
- last_seq_no = seq_no;
- last_timestamp = timestamp;
-
- ++seq_no;
- timestamp += kSamples;
- receive_timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs);
-
- seq_no_wrapped |= seq_no < last_seq_no;
- timestamp_wrapped |= timestamp < last_timestamp;
- }
- // Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
- ASSERT_EQ(1, num_channels);
-
- // Expect delay (in samples) to be less than 2 packets.
- EXPECT_LE(timestamp - neteq_->PlayoutTimestamp(),
- static_cast<uint32_t>(kSamples * 2));
-
- }
- // Make sure we have actually tested wrap-around.
- ASSERT_EQ(expect_seq_no_wrap, seq_no_wrapped);
- ASSERT_EQ(expect_timestamp_wrap, timestamp_wrapped);
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrap) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers; // Don't drop any packets.
- WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false);
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- drop_seq_numbers.insert(0xFFFF);
- drop_seq_numbers.insert(0x0);
- WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false);
-}
-
-TEST_F(NetEqDecodingTest, TimestampWrap) {
- // Start with a timestamp that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0, 0xFFFFFFFF - 3000, drop_seq_numbers, false, true);
-}
-
-TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) {
- // Start with a timestamp and a sequence number that will wrap at the same
- // time.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0xFFFF - 10, 0xFFFFFFFF - 5000, drop_seq_numbers, true, true);
-}
-
-} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc
deleted file mode 100644
index 8d9c020f96d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
-
-#include <string.h> // memset, memcpy
-
-#include <algorithm> // min
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-namespace webrtc {
-
-int Normal::Process(const int16_t* input,
- size_t length,
- Modes last_mode,
- int16_t* external_mute_factor_array,
- AudioMultiVector* output) {
- if (length == 0) {
- // Nothing to process.
- output->Clear();
- return static_cast<int>(length);
- }
-
- assert(output->Empty());
- // Output should be empty at this point.
- output->PushBackInterleaved(input, length);
- int16_t* signal = &(*output)[0][0];
-
- const unsigned fs_mult = fs_hz_ / 8000;
- assert(fs_mult > 0);
- // fs_shift = log2(fs_mult), rounded down.
- // Note that |fs_shift| is not "exact" for 48 kHz.
- // TODO(hlundin): Investigate this further.
- const int fs_shift = 30 - WebRtcSpl_NormW32(fs_mult);
-
- // Check if last RecOut call resulted in an Expand. If so, we have to take
- // care of some cross-fading and unmuting.
- if (last_mode == kModeExpand) {
- // Generate interpolation data using Expand.
- // First, set Expand parameters to appropriate values.
- expand_->SetParametersForNormalAfterExpand();
-
- // Call Expand.
- AudioMultiVector expanded(output->Channels());
- expand_->Process(&expanded);
- expand_->Reset();
-
- for (size_t channel_ix = 0; channel_ix < output->Channels(); ++channel_ix) {
- // Adjust muting factor (main muting factor times expand muting factor).
- external_mute_factor_array[channel_ix] = static_cast<int16_t>(
- WEBRTC_SPL_MUL_16_16_RSFT(external_mute_factor_array[channel_ix],
- expand_->MuteFactor(channel_ix), 14));
-
- int16_t* signal = &(*output)[channel_ix][0];
- size_t length_per_channel = length / output->Channels();
- // Find largest absolute value in new data.
- int16_t decoded_max = WebRtcSpl_MaxAbsValueW16(
- signal, static_cast<int>(length_per_channel));
- // Adjust muting factor if needed (to BGN level).
- int energy_length = std::min(static_cast<int>(fs_mult * 64),
- static_cast<int>(length_per_channel));
- int scaling = 6 + fs_shift
- - WebRtcSpl_NormW32(decoded_max * decoded_max);
- scaling = std::max(scaling, 0); // |scaling| should always be >= 0.
- int32_t energy = WebRtcSpl_DotProductWithScale(signal, signal,
- energy_length, scaling);
- energy = energy / (energy_length >> scaling);
-
- int mute_factor;
- if ((energy != 0) &&
- (energy > background_noise_.Energy(channel_ix))) {
- // Normalize new frame energy to 15 bits.
- scaling = WebRtcSpl_NormW32(energy) - 16;
- // We want background_noise_.energy() / energy in Q14.
- int32_t bgn_energy =
- background_noise_.Energy(channel_ix) << (scaling+14);
- int16_t energy_scaled = energy << scaling;
- int16_t ratio = WebRtcSpl_DivW32W16(bgn_energy, energy_scaled);
- mute_factor = WebRtcSpl_SqrtFloor(static_cast<int32_t>(ratio) << 14);
- } else {
- mute_factor = 16384; // 1.0 in Q14.
- }
- if (mute_factor > external_mute_factor_array[channel_ix]) {
- external_mute_factor_array[channel_ix] = std::min(mute_factor, 16384);
- }
-
- // If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
- int16_t increment = 64 / fs_mult;
- for (size_t i = 0; i < length_per_channel; i++) {
- // Scale with mute factor.
- assert(channel_ix < output->Channels());
- assert(i < output->Size());
- int32_t scaled_signal = (*output)[channel_ix][i] *
- external_mute_factor_array[channel_ix];
- // Shift 14 with proper rounding.
- (*output)[channel_ix][i] = (scaled_signal + 8192) >> 14;
- // Increase mute_factor towards 16384.
- external_mute_factor_array[channel_ix] =
- std::min(external_mute_factor_array[channel_ix] + increment, 16384);
- }
-
- // Interpolate the expanded data into the new vector.
- // (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
- assert(fs_shift < 3); // Will always be 0, 1, or, 2.
- increment = 4 >> fs_shift;
- int fraction = increment;
- for (size_t i = 0; i < 8 * fs_mult; i++) {
- // TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8
- // now for legacy bit-exactness.
- assert(channel_ix < output->Channels());
- assert(i < output->Size());
- (*output)[channel_ix][i] =
- (fraction * (*output)[channel_ix][i] +
- (32 - fraction) * expanded[channel_ix][i] + 8) >> 5;
- fraction += increment;
- }
- }
- } else if (last_mode == kModeRfc3389Cng) {
- assert(output->Channels() == 1); // Not adapted for multi-channel yet.
- static const int kCngLength = 32;
- int16_t cng_output[kCngLength];
- // Reset mute factor and start up fresh.
- external_mute_factor_array[0] = 16384;
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
-
- if (cng_decoder) {
- CNG_dec_inst* cng_inst = static_cast<CNG_dec_inst*>(cng_decoder->state());
- // Generate long enough for 32kHz.
- if (WebRtcCng_Generate(cng_inst, cng_output, kCngLength, 0) < 0) {
- // Error returned; set return vector to all zeros.
- memset(cng_output, 0, sizeof(cng_output));
- }
- } else {
- // If no CNG instance is defined, just copy from the decoded data.
- // (This will result in interpolating the decoded with itself.)
- memcpy(cng_output, signal, fs_mult * 8 * sizeof(int16_t));
- }
- // Interpolate the CNG into the new vector.
- // (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
- assert(fs_shift < 3); // Will always be 0, 1, or, 2.
- int16_t increment = 4 >> fs_shift;
- int16_t fraction = increment;
- for (size_t i = 0; i < 8 * fs_mult; i++) {
- // TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8 now
- // for legacy bit-exactness.
- signal[i] =
- (fraction * signal[i] + (32 - fraction) * cng_output[i] + 8) >> 5;
- fraction += increment;
- }
- } else if (external_mute_factor_array[0] < 16384) {
- // Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are
- // still ramping up from previous muting.
- // If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
- int16_t increment = 64 / fs_mult;
- size_t length_per_channel = length / output->Channels();
- for (size_t i = 0; i < length_per_channel; i++) {
- for (size_t channel_ix = 0; channel_ix < output->Channels();
- ++channel_ix) {
- // Scale with mute factor.
- assert(channel_ix < output->Channels());
- assert(i < output->Size());
- int32_t scaled_signal = (*output)[channel_ix][i] *
- external_mute_factor_array[channel_ix];
- // Shift 14 with proper rounding.
- (*output)[channel_ix][i] = (scaled_signal + 8192) >> 14;
- // Increase mute_factor towards 16384.
- external_mute_factor_array[channel_ix] =
- std::min(16384, external_mute_factor_array[channel_ix] + increment);
- }
- }
- }
-
- return static_cast<int>(length);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h
deleted file mode 100644
index fa14685f9bb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
-
-#include <string.h> // Access to size_t.
-
-#include <vector>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BackgroundNoise;
-class DecoderDatabase;
-class Expand;
-
-// This class provides the "Normal" DSP operation, that is performed when
-// there is no data loss, no need to stretch the timing of the signal, and
-// no other "special circumstances" are at hand.
-class Normal {
- public:
- Normal(int fs_hz, DecoderDatabase* decoder_database,
- const BackgroundNoise& background_noise,
- Expand* expand)
- : fs_hz_(fs_hz),
- decoder_database_(decoder_database),
- background_noise_(background_noise),
- expand_(expand) {
- }
-
- virtual ~Normal() {}
-
- // Performs the "Normal" operation. The decoder data is supplied in |input|,
- // having |length| samples in total for all channels (interleaved). The
- // result is written to |output|. The number of channels allocated in
- // |output| defines the number of channels that will be used when
- // de-interleaving |input|. |last_mode| contains the mode used in the previous
- // GetAudio call (i.e., not the current one), and |external_mute_factor| is
- // a pointer to the mute factor in the NetEqImpl class.
- int Process(const int16_t* input, size_t length,
- Modes last_mode,
- int16_t* external_mute_factor_array,
- AudioMultiVector* output);
-
- private:
- int fs_hz_;
- DecoderDatabase* decoder_database_;
- const BackgroundNoise& background_noise_;
- Expand* expand_;
-
- DISALLOW_COPY_AND_ASSIGN(Normal);
-};
-
-} // namespace webrtc
-#endif // SRC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc
deleted file mode 100644
index 2bd7b894f42..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for Normal class.
-
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
-
-#include <vector>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-TEST(Normal, CreateAndDestroy) {
- MockDecoderDatabase db;
- int fs = 8000;
- size_t channels = 1;
- BackgroundNoise bgn(channels);
- SyncBuffer sync_buffer(1, 1000);
- RandomVector random_vector;
- Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
- Normal normal(fs, &db, bgn, &expand);
- EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h
deleted file mode 100644
index 4518f91381a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
-
-#include <list>
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Struct for holding RTP packets.
-struct Packet {
- RTPHeader header;
- uint8_t* payload; // Datagram excluding RTP header and header extension.
- int payload_length;
- bool primary; // Primary, i.e., not redundant payload.
- int waiting_time;
- bool sync_packet;
-
- // Constructor.
- Packet()
- : payload(NULL),
- payload_length(0),
- primary(true),
- waiting_time(0),
- sync_packet(false) {
- }
-
- // Comparison operators. Establish a packet ordering based on (1) timestamp,
- // (2) sequence number, (3) regular packet vs sync-packet and (4) redundancy.
- // Timestamp and sequence numbers are compared taking wrap-around into
- // account. If both timestamp and sequence numbers are identical and one of
- // the packets is sync-packet, the regular packet is considered earlier. For
- // two regular packets with the same sequence number and timestamp a primary
- // payload is considered "smaller" than a secondary.
- bool operator==(const Packet& rhs) const {
- return (this->header.timestamp == rhs.header.timestamp &&
- this->header.sequenceNumber == rhs.header.sequenceNumber &&
- this->primary == rhs.primary &&
- this->sync_packet == rhs.sync_packet);
- }
- bool operator!=(const Packet& rhs) const { return !operator==(rhs); }
- bool operator<(const Packet& rhs) const {
- if (this->header.timestamp == rhs.header.timestamp) {
- if (this->header.sequenceNumber == rhs.header.sequenceNumber) {
- // Timestamp and sequence numbers are identical. A sync packet should
- // be recognized "larger" (i.e. "later") compared to a "network packet"
- // (regular packet from network not sync-packet). If none of the packets
- // are sync-packets, then deem the left hand side to be "smaller"
- // (i.e., "earlier") if it is primary, and right hand side is not.
- //
- // The condition on sync packets to be larger than "network packets,"
- // given same RTP sequence number and timestamp, guarantees that a
- // "network packet" to be inserted in an earlier position into
- // |packet_buffer_| compared to a sync packet of same timestamp and
- // sequence number.
- if (rhs.sync_packet)
- return true;
- if (this->sync_packet)
- return false;
- return (this->primary && !rhs.primary);
- }
- return (static_cast<uint16_t>(rhs.header.sequenceNumber
- - this->header.sequenceNumber) < 0xFFFF / 2);
- }
- return (static_cast<uint32_t>(rhs.header.timestamp
- - this->header.timestamp) < 0xFFFFFFFF / 2);
- }
- bool operator>(const Packet& rhs) const { return rhs.operator<(*this); }
- bool operator<=(const Packet& rhs) const { return !operator>(rhs); }
- bool operator>=(const Packet& rhs) const { return !operator<(rhs); }
-};
-
-// A list of packets.
-typedef std::list<Packet*> PacketList;
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc
deleted file mode 100644
index d19abbaa84d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is the implementation of the PacketBuffer class. It is mostly based on
-// an STL list. The list is kept sorted at all times so that the next packet to
-// decode is at the beginning of the list.
-
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-
-#include <algorithm> // find_if()
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-
-namespace webrtc {
-
-// Predicate used when inserting packets in the buffer list.
-// Operator() returns true when |packet| goes before |new_packet|.
-class NewTimestampIsLarger {
- public:
- explicit NewTimestampIsLarger(const Packet* new_packet)
- : new_packet_(new_packet) {
- }
- bool operator()(Packet* packet) {
- return (*new_packet_ >= *packet);
- }
-
- private:
- const Packet* new_packet_;
-};
-
-// Constructor. The arguments define the maximum number of slots and maximum
-// payload memory (excluding RTP headers) that the buffer will accept.
-PacketBuffer::PacketBuffer(size_t max_number_of_packets,
- size_t max_memory_bytes)
- : max_number_of_packets_(max_number_of_packets),
- max_memory_bytes_(max_memory_bytes),
- current_memory_bytes_(0) {
-}
-
-// Destructor. All packets in the buffer will be destroyed.
-PacketBuffer::~PacketBuffer() {
- Flush();
-}
-
-// Flush the buffer. All packets in the buffer will be destroyed.
-void PacketBuffer::Flush() {
- DeleteAllPackets(&buffer_);
- current_memory_bytes_ = 0;
-}
-
-int PacketBuffer::InsertPacket(Packet* packet) {
- if (!packet || !packet->payload) {
- if (packet) {
- delete packet;
- }
- return kInvalidPacket;
- }
-
- int return_val = kOK;
-
- if ((buffer_.size() >= max_number_of_packets_) ||
- (current_memory_bytes_ + packet->payload_length
- > static_cast<int>(max_memory_bytes_))) {
- // Buffer is full. Flush it.
- Flush();
- return_val = kFlushed;
- if ((buffer_.size() >= max_number_of_packets_) ||
- (current_memory_bytes_ + packet->payload_length
- > static_cast<int>(max_memory_bytes_))) {
- // Buffer is still too small for the packet. Either the buffer limits are
- // really small, or the packet is really large. Delete the packet and
- // return an error.
- delete [] packet->payload;
- delete packet;
- return kOversizePacket;
- }
- }
-
- // Get an iterator pointing to the place in the buffer where the new packet
- // should be inserted. The list is searched from the back, since the most
- // likely case is that the new packet should be near the end of the list.
- PacketList::reverse_iterator rit = std::find_if(
- buffer_.rbegin(), buffer_.rend(),
- NewTimestampIsLarger(packet));
- buffer_.insert(rit.base(), packet); // Insert the packet at that position.
- current_memory_bytes_ += packet->payload_length;
-
- return return_val;
-}
-
-int PacketBuffer::InsertPacketList(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- uint8_t* current_rtp_payload_type,
- uint8_t* current_cng_rtp_payload_type) {
- bool flushed = false;
- while (!packet_list->empty()) {
- Packet* packet = packet_list->front();
- if (decoder_database.IsComfortNoise(packet->header.payloadType)) {
- if (*current_cng_rtp_payload_type != 0xFF &&
- *current_cng_rtp_payload_type != packet->header.payloadType) {
- // New CNG payload type implies new codec type.
- *current_rtp_payload_type = 0xFF;
- Flush();
- flushed = true;
- }
- *current_cng_rtp_payload_type = packet->header.payloadType;
- } else if (!decoder_database.IsDtmf(packet->header.payloadType)) {
- // This must be speech.
- if (*current_rtp_payload_type != 0xFF &&
- *current_rtp_payload_type != packet->header.payloadType) {
- *current_cng_rtp_payload_type = 0xFF;
- Flush();
- flushed = true;
- }
- *current_rtp_payload_type = packet->header.payloadType;
- }
- int return_val = InsertPacket(packet);
- packet_list->pop_front();
- if (return_val == kFlushed) {
- // The buffer flushed, but this is not an error. We can still continue.
- flushed = true;
- } else if (return_val != kOK) {
- // An error occurred. Delete remaining packets in list and return.
- DeleteAllPackets(packet_list);
- return return_val;
- }
- }
- return flushed ? kFlushed : kOK;
-}
-
-int PacketBuffer::NextTimestamp(uint32_t* next_timestamp) const {
- if (Empty()) {
- return kBufferEmpty;
- }
- if (!next_timestamp) {
- return kInvalidPointer;
- }
- *next_timestamp = buffer_.front()->header.timestamp;
- return kOK;
-}
-
-int PacketBuffer::NextHigherTimestamp(uint32_t timestamp,
- uint32_t* next_timestamp) const {
- if (Empty()) {
- return kBufferEmpty;
- }
- if (!next_timestamp) {
- return kInvalidPointer;
- }
- PacketList::const_iterator it;
- for (it = buffer_.begin(); it != buffer_.end(); ++it) {
- if ((*it)->header.timestamp >= timestamp) {
- // Found a packet matching the search.
- *next_timestamp = (*it)->header.timestamp;
- return kOK;
- }
- }
- return kNotFound;
-}
-
-const RTPHeader* PacketBuffer::NextRtpHeader() const {
- if (Empty()) {
- return NULL;
- }
- return const_cast<const RTPHeader*>(&(buffer_.front()->header));
-}
-
-Packet* PacketBuffer::GetNextPacket(int* discard_count) {
- if (Empty()) {
- // Buffer is empty.
- return NULL;
- }
-
- Packet* packet = buffer_.front();
- // Assert that the packet sanity checks in InsertPacket method works.
- assert(packet && packet->payload);
- buffer_.pop_front();
- current_memory_bytes_ -= packet->payload_length;
- assert(current_memory_bytes_ >= 0); // Assert bookkeeping is correct.
- // Discard other packets with the same timestamp. These are duplicates or
- // redundant payloads that should not be used.
- if (discard_count) {
- *discard_count = 0;
- }
- while (!Empty() &&
- buffer_.front()->header.timestamp == packet->header.timestamp) {
- if (DiscardNextPacket() != kOK) {
- assert(false); // Must be ok by design.
- }
- if (discard_count) {
- ++(*discard_count);
- }
- }
- return packet;
-}
-
-int PacketBuffer::DiscardNextPacket() {
- if (Empty()) {
- return kBufferEmpty;
- }
- Packet* temp_packet = buffer_.front();
- // Assert that the packet sanity checks in InsertPacket method works.
- assert(temp_packet && temp_packet->payload);
- current_memory_bytes_ -= temp_packet->payload_length;
- assert(current_memory_bytes_ >= 0); // Assert bookkeeping is correct.
- DeleteFirstPacket(&buffer_);
- return kOK;
-}
-
-int PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit) {
- int discard_count = 0;
- while (!Empty() &&
- timestamp_limit != buffer_.front()->header.timestamp &&
- static_cast<uint32_t>(timestamp_limit
- - buffer_.front()->header.timestamp) <
- 0xFFFFFFFF / 2) {
- if (DiscardNextPacket() != kOK) {
- assert(false); // Must be ok by design.
- }
- ++discard_count;
- }
- return 0;
-}
-
-int PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
- int last_decoded_length) const {
- PacketList::const_iterator it;
- int num_samples = 0;
- int last_duration = last_decoded_length;
- for (it = buffer_.begin(); it != buffer_.end(); ++it) {
- Packet* packet = (*it);
- AudioDecoder* decoder =
- decoder_database->GetDecoder(packet->header.payloadType);
- if (decoder) {
- int duration = packet->sync_packet ? last_duration :
- decoder->PacketDuration(packet->payload, packet->payload_length);
- if (duration >= 0) {
- last_duration = duration; // Save the most up-to-date (valid) duration.
- }
- }
- num_samples += last_duration;
- }
- return num_samples;
-}
-
-void PacketBuffer::IncrementWaitingTimes(int inc) {
- PacketList::iterator it;
- for (it = buffer_.begin(); it != buffer_.end(); ++it) {
- (*it)->waiting_time += inc;
- }
-}
-
-bool PacketBuffer::DeleteFirstPacket(PacketList* packet_list) {
- if (packet_list->empty()) {
- return false;
- }
- Packet* first_packet = packet_list->front();
- delete [] first_packet->payload;
- delete first_packet;
- packet_list->pop_front();
- return true;
-}
-
-void PacketBuffer::DeleteAllPackets(PacketList* packet_list) {
- while (DeleteFirstPacket(packet_list)) {
- // Continue while the list is not empty.
- }
-}
-
-void PacketBuffer::BufferStat(int* num_packets,
- int* max_num_packets,
- int* current_memory_bytes,
- int* max_memory_bytes) const {
- *num_packets = static_cast<int>(buffer_.size());
- *max_num_packets = static_cast<int>(max_number_of_packets_);
- *current_memory_bytes = current_memory_bytes_;
- *max_memory_bytes = static_cast<int>(max_memory_bytes_);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h
deleted file mode 100644
index e964c28f207..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-class DecoderDatabase;
-
-// This is the actual buffer holding the packets before decoding.
-class PacketBuffer {
- public:
- enum BufferReturnCodes {
- kOK = 0,
- kFlushed,
- kNotFound,
- kBufferEmpty,
- kInvalidPacket,
- kInvalidPointer,
- kOversizePacket
- };
-
- // Constructor creates a buffer which can hold a maximum of
- // |max_number_of_packets| packets and |max_payload_memory| bytes of payload,
- // excluding RTP headers.
- PacketBuffer(size_t max_number_of_packets, size_t max_payload_memory);
-
- // Deletes all packets in the buffer before destroying the buffer.
- virtual ~PacketBuffer();
-
- // Flushes the buffer and deletes all packets in it.
- virtual void Flush();
-
- // Returns true for an empty buffer.
- virtual bool Empty() const { return buffer_.empty(); }
-
- // Inserts |packet| into the buffer. The buffer will take over ownership of
- // the packet object.
- // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer
- // was flushed due to overfilling.
- virtual int InsertPacket(Packet* packet);
-
- // Inserts a list of packets into the buffer. The buffer will take over
- // ownership of the packet objects.
- // Returns PacketBuffer::kOK if all packets were inserted successfully.
- // If the buffer was flushed due to overfilling, only a subset of the list is
- // inserted, and PacketBuffer::kFlushed is returned.
- // The last three parameters are included for legacy compatibility.
- // TODO(hlundin): Redesign to not use current_*_payload_type and
- // decoder_database.
- virtual int InsertPacketList(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- uint8_t* current_rtp_payload_type,
- uint8_t* current_cng_rtp_payload_type);
-
- // Gets the timestamp for the first packet in the buffer and writes it to the
- // output variable |next_timestamp|.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int NextTimestamp(uint32_t* next_timestamp) const;
-
- // Gets the timestamp for the first packet in the buffer with a timestamp no
- // lower than the input limit |timestamp|. The result is written to the output
- // variable |next_timestamp|.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int NextHigherTimestamp(uint32_t timestamp,
- uint32_t* next_timestamp) const;
-
- // Returns a (constant) pointer the RTP header of the first packet in the
- // buffer. Returns NULL if the buffer is empty.
- virtual const RTPHeader* NextRtpHeader() const;
-
- // Extracts the first packet in the buffer and returns a pointer to it.
- // Returns NULL if the buffer is empty. The caller is responsible for deleting
- // the packet.
- // Subsequent packets with the same timestamp as the one extracted will be
- // discarded and properly deleted. The number of discarded packets will be
- // written to the output variable |discard_count|.
- virtual Packet* GetNextPacket(int* discard_count);
-
- // Discards the first packet in the buffer. The packet is deleted.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int DiscardNextPacket();
-
- // Discards all packets that are (strictly) older than |timestamp_limit|.
- // Returns number of packets discarded.
- virtual int DiscardOldPackets(uint32_t timestamp_limit);
-
- // Returns the number of packets in the buffer, including duplicates and
- // redundant packets.
- virtual int NumPacketsInBuffer() const {
- return static_cast<int>(buffer_.size());
- }
-
- // Returns the number of samples in the buffer, including samples carried in
- // duplicate and redundant packets.
- virtual int NumSamplesInBuffer(DecoderDatabase* decoder_database,
- int last_decoded_length) const;
-
- // Increase the waiting time counter for every packet in the buffer by |inc|.
- // The default value for |inc| is 1.
- virtual void IncrementWaitingTimes(int inc = 1);
-
- virtual void BufferStat(int* num_packets,
- int* max_num_packets,
- int* current_memory_bytes,
- int* max_memory_bytes) const;
-
- virtual int current_memory_bytes() const { return current_memory_bytes_; }
-
- // Static method that properly deletes the first packet, and its payload
- // array, in |packet_list|. Returns false if |packet_list| already was empty,
- // otherwise true.
- static bool DeleteFirstPacket(PacketList* packet_list);
-
- // Static method that properly deletes all packets, and their payload arrays,
- // in |packet_list|.
- static void DeleteAllPackets(PacketList* packet_list);
-
- private:
- size_t max_number_of_packets_;
- size_t max_memory_bytes_;
- int current_memory_bytes_;
- PacketList buffer_;
- DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc
deleted file mode 100644
index c8109dc6dff..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc
+++ /dev/null
@@ -1,560 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for PacketBuffer class.
-
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-
-using ::testing::Return;
-using ::testing::_;
-
-namespace webrtc {
-
-// Helper class to generate packets. Packets must be deleted by the user.
-class PacketGenerator {
- public:
- PacketGenerator(uint16_t seq_no, uint32_t ts, uint8_t pt, int frame_size);
- virtual ~PacketGenerator() {}
- Packet* NextPacket(int payload_size_bytes);
- void SkipPacket();
-
- uint16_t seq_no_;
- uint32_t ts_;
- uint8_t pt_;
- int frame_size_;
-};
-
-PacketGenerator::PacketGenerator(uint16_t seq_no, uint32_t ts, uint8_t pt,
- int frame_size)
- : seq_no_(seq_no),
- ts_(ts),
- pt_(pt),
- frame_size_(frame_size) {
-}
-
-Packet* PacketGenerator::NextPacket(int payload_size_bytes) {
- Packet* packet = new Packet;
- packet->header.sequenceNumber = seq_no_;
- packet->header.timestamp = ts_;
- packet->header.payloadType = pt_;
- packet->header.markerBit = false;
- packet->header.ssrc = 0x12345678;
- packet->header.numCSRCs = 0;
- packet->header.paddingLength = 0;
- packet->payload_length = payload_size_bytes;
- packet->primary = true;
- packet->payload = new uint8_t[payload_size_bytes];
- ++seq_no_;
- ts_ += frame_size_;
- return packet;
-}
-
-void PacketGenerator::SkipPacket() {
- ++seq_no_;
- ts_ += frame_size_;
-}
-
-
-// Start of test definitions.
-
-TEST(PacketBuffer, CreateAndDestroy) {
- PacketBuffer* buffer = new PacketBuffer(10, 1000); // 10 packets, 1000 bytes.
- EXPECT_TRUE(buffer->Empty());
- delete buffer;
-}
-
-TEST(PacketBuffer, InsertPacket) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
- PacketGenerator gen(17u, 4711u, 0, 10);
-
- const int payload_len = 100;
- Packet* packet = gen.NextPacket(payload_len);
-
- EXPECT_EQ(0, buffer.InsertPacket(packet));
- uint32_t next_ts;
- EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
- EXPECT_EQ(4711u, next_ts);
- EXPECT_FALSE(buffer.Empty());
- EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len, buffer.current_memory_bytes());
- const RTPHeader* hdr = buffer.NextRtpHeader();
- EXPECT_EQ(&(packet->header), hdr); // Compare pointer addresses.
-
- // Do not explicitly flush buffer or delete packet to test that it is deleted
- // with the buffer. (Tested with Valgrind or similar tool.)
-}
-
-// Test to flush buffer.
-TEST(PacketBuffer, FlushBuffer) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
- PacketGenerator gen(0, 0, 0, 10);
- const int payload_len = 10;
-
- // Insert 10 small packets; should be ok.
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- }
- EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_FALSE(buffer.Empty());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
-
- buffer.Flush();
- // Buffer should delete the payloads itself.
- EXPECT_EQ(0, buffer.NumPacketsInBuffer());
- EXPECT_TRUE(buffer.Empty());
- EXPECT_EQ(0, buffer.current_memory_bytes());
-}
-
-// Test to fill the buffer over the limits, and verify that it flushes.
-TEST(PacketBuffer, OverfillBuffer) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
- PacketGenerator gen(0, 0, 0, 10);
-
- // Insert 10 small packets; should be ok.
- const int payload_len = 10;
- int i;
- for (i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- }
- EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
- uint32_t next_ts;
- EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
- EXPECT_EQ(0u, next_ts); // Expect first inserted packet to be first in line.
-
- // Insert 11th packet; should flush the buffer and insert it after flushing.
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacket(packet));
- EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len, buffer.current_memory_bytes());
- EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
- // Expect last inserted packet to be first in line.
- EXPECT_EQ(packet->header.timestamp, next_ts);
-
- // Insert 2 large packets; expect to flush when inserting the second one.
- const int large_payload_len = 500;
- packet = gen.NextPacket(large_payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- EXPECT_EQ(2, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len + large_payload_len, buffer.current_memory_bytes());
-
- packet = gen.NextPacket(large_payload_len);
- EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacket(packet));
- EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(large_payload_len, buffer.current_memory_bytes());
-
- // Flush buffer to delete remaining packets.
- buffer.Flush();
-}
-
-// Test inserting a list of packets.
-TEST(PacketBuffer, InsertPacketList) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
- PacketGenerator gen(0, 0, 0, 10);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- list.push_back(packet);
- }
-
- MockDecoderDatabase decoder_database;
- EXPECT_CALL(decoder_database, IsComfortNoise(0))
- .WillRepeatedly(Return(false));
- EXPECT_CALL(decoder_database, IsDtmf(0))
- .WillRepeatedly(Return(false));
- uint8_t current_pt = 0xFF;
- uint8_t current_cng_pt = 0xFF;
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacketList(&list,
- decoder_database,
- &current_pt,
- &current_cng_pt));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
- EXPECT_EQ(0, current_pt); // Current payload type changed to 0.
- EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
-
- buffer.Flush(); // Clean up.
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test inserting a list of packets. Last packet is of a different payload type.
-// Expecting the buffer to flush.
-// TODO(hlundin): Remove this test when legacy operation is no longer needed.
-TEST(PacketBuffer, InsertPacketListChangePayloadType) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
- PacketGenerator gen(0, 0, 0, 10);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- list.push_back(packet);
- }
- // Insert 11th packet of another payload type (not CNG).
- Packet* packet = gen.NextPacket(payload_len);
- packet->header.payloadType = 1;
- list.push_back(packet);
-
-
- MockDecoderDatabase decoder_database;
- EXPECT_CALL(decoder_database, IsComfortNoise(_))
- .WillRepeatedly(Return(false));
- EXPECT_CALL(decoder_database, IsDtmf(_))
- .WillRepeatedly(Return(false));
- uint8_t current_pt = 0xFF;
- uint8_t current_cng_pt = 0xFF;
- EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacketList(&list,
- decoder_database,
- &current_pt,
- &current_cng_pt));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(1, buffer.NumPacketsInBuffer()); // Only the last packet.
- EXPECT_EQ(1 * payload_len, buffer.current_memory_bytes());
- EXPECT_EQ(1, current_pt); // Current payload type changed to 0.
- EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
-
- buffer.Flush(); // Clean up.
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test inserting a number of packets, and verifying correct extraction order.
-// The packets inserted are as follows:
-// Packet no. Seq. no. Primary TS Secondary TS
-// 0 0xFFFD 0xFFFFFFD7 -
-// 1 0xFFFE 0xFFFFFFE1 0xFFFFFFD7
-// 2 0xFFFF 0xFFFFFFEB 0xFFFFFFE1
-// 3 0x0000 0xFFFFFFF5 0xFFFFFFEB
-// 4 0x0001 0xFFFFFFFF 0xFFFFFFF5
-// 5 0x0002 0x0000000A 0xFFFFFFFF
-// 6 MISSING--0x0003------0x00000014----0x0000000A--MISSING
-// 7 0x0004 0x0000001E 0x00000014
-// 8 0x0005 0x00000028 0x0000001E
-// 9 0x0006 0x00000032 0x00000028
-TEST(PacketBuffer, ExtractOrderRedundancy) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
- const uint32_t ts_increment = 10; // Samples per packet.
- const uint16_t start_seq_no = 0xFFFF - 2; // Wraps after 3 packets.
- const uint32_t start_ts = 0xFFFFFFFF -
- 4 * ts_increment; // Wraps after 5 packets.
- const uint8_t primary_pt = 0;
- const uint8_t secondary_pt = 1;
- PacketGenerator gen(start_seq_no, start_ts, primary_pt, ts_increment);
- // Insert secondary payloads too. (Simulating RED.)
- PacketGenerator red_gen(start_seq_no + 1, start_ts, secondary_pt,
- ts_increment);
-
- // Insert 9 small packets (skip one).
- for (int i = 0; i < 10; ++i) {
- const int payload_len = 10;
- if (i == 6) {
- // Skip this packet.
- gen.SkipPacket();
- red_gen.SkipPacket();
- continue;
- }
- // Primary payload.
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- if (i >= 1) {
- // Secondary payload.
- packet = red_gen.NextPacket(payload_len);
- packet->primary = false;
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- }
- }
- EXPECT_EQ(17, buffer.NumPacketsInBuffer()); // 9 primary + 8 secondary
-
- uint16_t current_seq_no = start_seq_no;
- uint32_t current_ts = start_ts;
-
- for (int i = 0; i < 10; ++i) {
- // Extract packets.
- int drop_count = 0;
- Packet* packet = buffer.GetNextPacket(&drop_count);
- ASSERT_FALSE(packet == NULL);
- if (i == 6) {
- // Special case for the dropped primary payload.
- // Expect secondary payload, and one step higher sequence number.
- EXPECT_EQ(current_seq_no + 1, packet->header.sequenceNumber);
- EXPECT_EQ(current_ts, packet->header.timestamp);
- EXPECT_FALSE(packet->primary);
- EXPECT_EQ(1, packet->header.payloadType);
- EXPECT_EQ(0, drop_count);
- } else {
- EXPECT_EQ(current_seq_no, packet->header.sequenceNumber);
- EXPECT_EQ(current_ts, packet->header.timestamp);
- EXPECT_TRUE(packet->primary);
- EXPECT_EQ(0, packet->header.payloadType);
- if (i == 5 || i == 9) {
- // No duplicate TS for dropped packet or for last primary payload.
- EXPECT_EQ(0, drop_count);
- } else {
- EXPECT_EQ(1, drop_count);
- }
- }
- ++current_seq_no;
- current_ts += ts_increment;
- delete [] packet->payload;
- delete packet;
- }
-}
-
-TEST(PacketBuffer, DiscardPackets) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
- const uint16_t start_seq_no = 17;
- const uint32_t start_ts = 4711;
- const uint32_t ts_increment = 10;
- PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- buffer.InsertPacket(packet);
- }
- EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
-
- // Discard them one by one and make sure that the right packets are at the
- // front of the buffer.
- uint32_t current_ts = start_ts;
- for (int i = 0; i < 10; ++i) {
- uint32_t ts;
- EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&ts));
- EXPECT_EQ(current_ts, ts);
- EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket());
- current_ts += ts_increment;
- }
- EXPECT_TRUE(buffer.Empty());
-}
-
-TEST(PacketBuffer, Reordering) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
- const uint16_t start_seq_no = 17;
- const uint32_t start_ts = 4711;
- const uint32_t ts_increment = 10;
- PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
- const int payload_len = 10;
-
- // Generate 10 small packets and insert them into a PacketList. Insert every
- // odd packet to the front, and every even packet to the back, thus creating
- // a (rather strange) reordering.
- PacketList list;
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- if (i % 2) {
- list.push_front(packet);
- } else {
- list.push_back(packet);
- }
- }
-
- MockDecoderDatabase decoder_database;
- EXPECT_CALL(decoder_database, IsComfortNoise(0))
- .WillRepeatedly(Return(false));
- EXPECT_CALL(decoder_database, IsDtmf(0))
- .WillRepeatedly(Return(false));
- uint8_t current_pt = 0xFF;
- uint8_t current_cng_pt = 0xFF;
-
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacketList(&list,
- decoder_database,
- &current_pt,
- &current_cng_pt));
- EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
-
- // Extract them and make sure that come out in the right order.
- uint32_t current_ts = start_ts;
- for (int i = 0; i < 10; ++i) {
- Packet* packet = buffer.GetNextPacket(NULL);
- ASSERT_FALSE(packet == NULL);
- EXPECT_EQ(current_ts, packet->header.timestamp);
- current_ts += ts_increment;
- delete [] packet->payload;
- delete packet;
- }
- EXPECT_TRUE(buffer.Empty());
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-TEST(PacketBuffer, Failures) {
- const uint16_t start_seq_no = 17;
- const uint32_t start_ts = 4711;
- const uint32_t ts_increment = 10;
- int payload_len = 100;
- PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
-
- PacketBuffer* buffer = new PacketBuffer(0, 1000); // 0 packets, 1000 bytes.
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOversizePacket, buffer->InsertPacket(packet));
- delete buffer;
-
- buffer = new PacketBuffer(100, 10); // 100 packets, 10 bytes.
- packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOversizePacket, buffer->InsertPacket(packet));
- delete buffer;
-
- buffer = new PacketBuffer(100, 10000); // 100 packets, 10000 bytes.
- packet = NULL;
- EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
- packet = gen.NextPacket(payload_len);
- delete [] packet->payload;
- packet->payload = NULL;
- EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
- // Packet is deleted by the PacketBuffer.
-
- // Buffer should still be empty. Test all empty-checks.
- uint32_t temp_ts;
- EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->NextTimestamp(&temp_ts));
- EXPECT_EQ(PacketBuffer::kBufferEmpty,
- buffer->NextHigherTimestamp(0, &temp_ts));
- EXPECT_EQ(NULL, buffer->NextRtpHeader());
- EXPECT_EQ(NULL, buffer->GetNextPacket(NULL));
- EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->DiscardNextPacket());
- EXPECT_EQ(0, buffer->DiscardOldPackets(0)); // 0 packets discarded.
-
- // Insert one packet to make the buffer non-empty.
- packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer->InsertPacket(packet));
- EXPECT_EQ(PacketBuffer::kInvalidPointer, buffer->NextTimestamp(NULL));
- EXPECT_EQ(PacketBuffer::kInvalidPointer,
- buffer->NextHigherTimestamp(0, NULL));
- delete buffer;
-
- // Insert packet list of three packets, where the second packet has an invalid
- // payload. Expect first packet to be inserted, and the remaining two to be
- // discarded.
- buffer = new PacketBuffer(100, 1000); // 100 packets, 1000 bytes.
- PacketList list;
- list.push_back(gen.NextPacket(payload_len)); // Valid packet.
- packet = gen.NextPacket(payload_len);
- delete [] packet->payload;
- packet->payload = NULL; // Invalid.
- list.push_back(packet);
- list.push_back(gen.NextPacket(payload_len)); // Valid packet.
- MockDecoderDatabase decoder_database;
- EXPECT_CALL(decoder_database, IsComfortNoise(0))
- .WillRepeatedly(Return(false));
- EXPECT_CALL(decoder_database, IsDtmf(0))
- .WillRepeatedly(Return(false));
- uint8_t current_pt = 0xFF;
- uint8_t current_cng_pt = 0xFF;
- EXPECT_EQ(PacketBuffer::kInvalidPacket,
- buffer->InsertPacketList(&list,
- decoder_database,
- &current_pt,
- &current_cng_pt));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(1, buffer->NumPacketsInBuffer());
- delete buffer;
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test packet comparison function.
-// The function should return true if the first packet "goes before" the second.
-TEST(PacketBuffer, ComparePackets) {
- PacketGenerator gen(0, 0, 0, 10);
- Packet* a = gen.NextPacket(10); // SN = 0, TS = 0.
- Packet* b = gen.NextPacket(10); // SN = 1, TS = 10.
- EXPECT_FALSE(*a == *b);
- EXPECT_TRUE(*a != *b);
- EXPECT_TRUE(*a < *b);
- EXPECT_FALSE(*a > *b);
- EXPECT_TRUE(*a <= *b);
- EXPECT_FALSE(*a >= *b);
-
- // Testing wrap-around case; 'a' is earlier but has a larger timestamp value.
- a->header.timestamp = 0xFFFFFFFF - 10;
- EXPECT_FALSE(*a == *b);
- EXPECT_TRUE(*a != *b);
- EXPECT_TRUE(*a < *b);
- EXPECT_FALSE(*a > *b);
- EXPECT_TRUE(*a <= *b);
- EXPECT_FALSE(*a >= *b);
-
- // Test equal packets.
- EXPECT_TRUE(*a == *a);
- EXPECT_FALSE(*a != *a);
- EXPECT_FALSE(*a < *a);
- EXPECT_FALSE(*a > *a);
- EXPECT_TRUE(*a <= *a);
- EXPECT_TRUE(*a >= *a);
-
- // Test equal timestamps but different sequence numbers (0 and 1).
- a->header.timestamp = b->header.timestamp;
- EXPECT_FALSE(*a == *b);
- EXPECT_TRUE(*a != *b);
- EXPECT_TRUE(*a < *b);
- EXPECT_FALSE(*a > *b);
- EXPECT_TRUE(*a <= *b);
- EXPECT_FALSE(*a >= *b);
-
- // Test equal timestamps but different sequence numbers (32767 and 1).
- a->header.sequenceNumber = 0xFFFF;
- EXPECT_FALSE(*a == *b);
- EXPECT_TRUE(*a != *b);
- EXPECT_TRUE(*a < *b);
- EXPECT_FALSE(*a > *b);
- EXPECT_TRUE(*a <= *b);
- EXPECT_FALSE(*a >= *b);
-
- // Test equal timestamps and sequence numbers, but only 'b' is primary.
- a->header.sequenceNumber = b->header.sequenceNumber;
- a->primary = false;
- b->primary = true;
- EXPECT_FALSE(*a == *b);
- EXPECT_TRUE(*a != *b);
- EXPECT_FALSE(*a < *b);
- EXPECT_TRUE(*a > *b);
- EXPECT_FALSE(*a <= *b);
- EXPECT_TRUE(*a >= *b);
-
- delete [] a->payload;
- delete a;
- delete [] b->payload;
- delete b;
-}
-
-// Test the DeleteFirstPacket DeleteAllPackets methods.
-TEST(PacketBuffer, DeleteAllPackets) {
- PacketGenerator gen(0, 0, 0, 10);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- Packet* packet = gen.NextPacket(payload_len);
- list.push_back(packet);
- }
- EXPECT_TRUE(PacketBuffer::DeleteFirstPacket(&list));
- EXPECT_EQ(9u, list.size());
- PacketBuffer::DeleteAllPackets(&list);
- EXPECT_TRUE(list.empty());
- EXPECT_FALSE(PacketBuffer::DeleteFirstPacket(&list));
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc
deleted file mode 100644
index 56039a57ec6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc
+++ /dev/null
@@ -1,372 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-
-namespace webrtc {
-
-// The method loops through a list of packets {A, B, C, ...}. Each packet is
-// split into its corresponding RED payloads, {A1, A2, ...}, which is
-// temporarily held in the list |new_packets|.
-// When the first packet in |packet_list| has been processed, the orignal packet
-// is replaced by the new ones in |new_packets|, so that |packet_list| becomes:
-// {A1, A2, ..., B, C, ...}. The method then continues with B, and C, until all
-// the original packets have been replaced by their split payloads.
-int PayloadSplitter::SplitRed(PacketList* packet_list) {
- int ret = kOK;
- PacketList::iterator it = packet_list->begin();
- while (it != packet_list->end()) {
- PacketList new_packets; // An empty list to store the split packets in.
- Packet* red_packet = (*it);
- assert(red_packet->payload);
- uint8_t* payload_ptr = red_packet->payload;
-
- // Read RED headers (according to RFC 2198):
- //
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // |F| block PT | timestamp offset | block length |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // Last RED header:
- // 0 1 2 3 4 5 6 7
- // +-+-+-+-+-+-+-+-+
- // |0| Block PT |
- // +-+-+-+-+-+-+-+-+
-
- bool last_block = false;
- int sum_length = 0;
- while (!last_block) {
- Packet* new_packet = new Packet;
- new_packet->header = red_packet->header;
- // Check the F bit. If F == 0, this was the last block.
- last_block = ((*payload_ptr & 0x80) == 0);
- // Bits 1 through 7 are payload type.
- new_packet->header.payloadType = payload_ptr[0] & 0x7F;
- if (last_block) {
- // No more header data to read.
- ++sum_length; // Account for RED header size of 1 byte.
- new_packet->payload_length = red_packet->payload_length - sum_length;
- new_packet->primary = true; // Last block is always primary.
- payload_ptr += 1; // Advance to first payload byte.
- } else {
- // Bits 8 through 21 are timestamp offset.
- int timestamp_offset = (payload_ptr[1] << 6) +
- ((payload_ptr[2] & 0xFC) >> 2);
- new_packet->header.timestamp = red_packet->header.timestamp -
- timestamp_offset;
- // Bits 22 through 31 are payload length.
- new_packet->payload_length = ((payload_ptr[2] & 0x03) << 8) +
- payload_ptr[3];
- new_packet->primary = false;
- payload_ptr += 4; // Advance to next RED header.
- }
- sum_length += new_packet->payload_length;
- sum_length += 4; // Account for RED header size of 4 bytes.
- // Store in new list of packets.
- new_packets.push_back(new_packet);
- }
-
- // Populate the new packets with payload data.
- // |payload_ptr| now points at the first payload byte.
- PacketList::iterator new_it;
- for (new_it = new_packets.begin(); new_it != new_packets.end(); ++new_it) {
- int payload_length = (*new_it)->payload_length;
- if (payload_ptr + payload_length >
- red_packet->payload + red_packet->payload_length) {
- // The block lengths in the RED headers do not match the overall packet
- // length. Something is corrupt. Discard this and the remaining
- // payloads from this packet.
- while (new_it != new_packets.end()) {
- // Payload should not have been allocated yet.
- assert(!(*new_it)->payload);
- delete (*new_it);
- new_it = new_packets.erase(new_it);
- }
- ret = kRedLengthMismatch;
- break;
- }
- (*new_it)->payload = new uint8_t[payload_length];
- memcpy((*new_it)->payload, payload_ptr, payload_length);
- payload_ptr += payload_length;
- }
- // Reverse the order of the new packets, so that the primary payload is
- // always first.
- new_packets.reverse();
- // Insert new packets into original list, before the element pointed to by
- // iterator |it|.
- packet_list->splice(it, new_packets, new_packets.begin(),
- new_packets.end());
- // Delete old packet payload.
- delete [] (*it)->payload;
- delete (*it);
- // Remove |it| from the packet list. This operation effectively moves the
- // iterator |it| to the next packet in the list. Thus, we do not have to
- // increment it manually.
- it = packet_list->erase(it);
- }
- return ret;
-}
-
-int PayloadSplitter::CheckRedPayloads(PacketList* packet_list,
- const DecoderDatabase& decoder_database) {
- PacketList::iterator it = packet_list->begin();
- int main_payload_type = -1;
- int num_deleted_packets = 0;
- while (it != packet_list->end()) {
- uint8_t this_payload_type = (*it)->header.payloadType;
- if (!decoder_database.IsDtmf(this_payload_type) &&
- !decoder_database.IsComfortNoise(this_payload_type)) {
- if (main_payload_type == -1) {
- // This is the first packet in the list which is non-DTMF non-CNG.
- main_payload_type = this_payload_type;
- } else {
- if (this_payload_type != main_payload_type) {
- // We do not allow redundant payloads of a different type.
- // Discard this payload.
- delete [] (*it)->payload;
- delete (*it);
- // Remove |it| from the packet list. This operation effectively
- // moves the iterator |it| to the next packet in the list. Thus, we
- // do not have to increment it manually.
- it = packet_list->erase(it);
- ++num_deleted_packets;
- continue;
- }
- }
- }
- ++it;
- }
- return num_deleted_packets;
-}
-
-int PayloadSplitter::SplitAudio(PacketList* packet_list,
- const DecoderDatabase& decoder_database) {
- PacketList::iterator it = packet_list->begin();
- // Iterate through all packets in |packet_list|.
- while (it != packet_list->end()) {
- Packet* packet = (*it); // Just to make the notation more intuitive.
- // Get codec type for this payload.
- const DecoderDatabase::DecoderInfo* info =
- decoder_database.GetDecoderInfo(packet->header.payloadType);
- if (!info) {
- return kUnknownPayloadType;
- }
- // No splitting for a sync-packet.
- if (packet->sync_packet) {
- ++it;
- continue;
- }
- PacketList new_packets;
- switch (info->codec_type) {
- case kDecoderPCMu:
- case kDecoderPCMa: {
- // 8 bytes per ms; 8 timestamps per ms.
- SplitBySamples(packet, 8, 8, &new_packets);
- break;
- }
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch: {
- // 2 * 8 bytes per ms; 8 timestamps per ms.
- SplitBySamples(packet, 2 * 8, 8, &new_packets);
- break;
- }
- case kDecoderG722: {
- // 8 bytes per ms; 16 timestamps per ms.
- SplitBySamples(packet, 8, 16, &new_packets);
- break;
- }
- case kDecoderPCM16B: {
- // 16 bytes per ms; 8 timestamps per ms.
- SplitBySamples(packet, 16, 8, &new_packets);
- break;
- }
- case kDecoderPCM16Bwb: {
- // 32 bytes per ms; 16 timestamps per ms.
- SplitBySamples(packet, 32, 16, &new_packets);
- break;
- }
- case kDecoderPCM16Bswb32kHz: {
- // 64 bytes per ms; 32 timestamps per ms.
- SplitBySamples(packet, 64, 32, &new_packets);
- break;
- }
- case kDecoderPCM16Bswb48kHz: {
- // 96 bytes per ms; 48 timestamps per ms.
- SplitBySamples(packet, 96, 48, &new_packets);
- break;
- }
- case kDecoderPCM16B_2ch: {
- // 2 * 16 bytes per ms; 8 timestamps per ms.
- SplitBySamples(packet, 2 * 16, 8, &new_packets);
- break;
- }
- case kDecoderPCM16Bwb_2ch: {
- // 2 * 32 bytes per ms; 16 timestamps per ms.
- SplitBySamples(packet, 2 * 32, 16, &new_packets);
- break;
- }
- case kDecoderPCM16Bswb32kHz_2ch: {
- // 2 * 64 bytes per ms; 32 timestamps per ms.
- SplitBySamples(packet, 2 * 64, 32, &new_packets);
- break;
- }
- case kDecoderPCM16Bswb48kHz_2ch: {
- // 2 * 96 bytes per ms; 48 timestamps per ms.
- SplitBySamples(packet, 2 * 96, 48, &new_packets);
- break;
- }
- case kDecoderPCM16B_5ch: {
- // 5 * 16 bytes per ms; 8 timestamps per ms.
- SplitBySamples(packet, 5 * 16, 8, &new_packets);
- break;
- }
- case kDecoderILBC: {
- int bytes_per_frame;
- int timestamps_per_frame;
- if (packet->payload_length >= 950) {
- return kTooLargePayload;
- } else if (packet->payload_length % 38 == 0) {
- // 20 ms frames.
- bytes_per_frame = 38;
- timestamps_per_frame = 160;
- } else if (packet->payload_length % 50 == 0) {
- // 30 ms frames.
- bytes_per_frame = 50;
- timestamps_per_frame = 240;
- } else {
- return kFrameSplitError;
- }
- int ret = SplitByFrames(packet, bytes_per_frame, timestamps_per_frame,
- &new_packets);
- if (ret < 0) {
- return ret;
- } else if (ret == kNoSplit) {
- // Do not split at all. Simply advance to the next packet in the list.
- ++it;
- // We do not have any new packets to insert, and should not delete the
- // old one. Skip the code after the switch case, and jump straight to
- // the next packet in the while loop.
- continue;
- }
- break;
- }
- default: {
- // Do not split at all. Simply advance to the next packet in the list.
- ++it;
- // We do not have any new packets to insert, and should not delete the
- // old one. Skip the code after the switch case, and jump straight to
- // the next packet in the while loop.
- continue;
- }
- }
- // Insert new packets into original list, before the element pointed to by
- // iterator |it|.
- packet_list->splice(it, new_packets, new_packets.begin(),
- new_packets.end());
- // Delete old packet payload.
- delete [] (*it)->payload;
- delete (*it);
- // Remove |it| from the packet list. This operation effectively moves the
- // iterator |it| to the next packet in the list. Thus, we do not have to
- // increment it manually.
- it = packet_list->erase(it);
- }
- return 0;
-}
-
-void PayloadSplitter::SplitBySamples(const Packet* packet,
- int bytes_per_ms,
- int timestamps_per_ms,
- PacketList* new_packets) {
- assert(packet);
- assert(new_packets);
-
- int split_size_bytes = packet->payload_length;
-
- // Find a "chunk size" >= 20 ms and < 40 ms.
- int min_chunk_size = bytes_per_ms * 20;
- // Reduce the split size by half as long as |split_size_bytes| is at least
- // twice the minimum chunk size (so that the resulting size is at least as
- // large as the minimum chunk size).
- while (split_size_bytes >= 2 * min_chunk_size) {
- split_size_bytes >>= 1;
- }
- int timestamps_per_chunk =
- split_size_bytes * timestamps_per_ms / bytes_per_ms;
- uint32_t timestamp = packet->header.timestamp;
-
- uint8_t* payload_ptr = packet->payload;
- int len = packet->payload_length;
- while (len >= (2 * split_size_bytes)) {
- Packet* new_packet = new Packet;
- new_packet->payload_length = split_size_bytes;
- new_packet->header = packet->header;
- new_packet->header.timestamp = timestamp;
- timestamp += timestamps_per_chunk;
- new_packet->primary = packet->primary;
- new_packet->payload = new uint8_t[split_size_bytes];
- memcpy(new_packet->payload, payload_ptr, split_size_bytes);
- payload_ptr += split_size_bytes;
- new_packets->push_back(new_packet);
- len -= split_size_bytes;
- }
-
- if (len > 0) {
- Packet* new_packet = new Packet;
- new_packet->payload_length = len;
- new_packet->header = packet->header;
- new_packet->header.timestamp = timestamp;
- new_packet->primary = packet->primary;
- new_packet->payload = new uint8_t[len];
- memcpy(new_packet->payload, payload_ptr, len);
- new_packets->push_back(new_packet);
- }
-}
-
-int PayloadSplitter::SplitByFrames(const Packet* packet,
- int bytes_per_frame,
- int timestamps_per_frame,
- PacketList* new_packets) {
- if (packet->payload_length % bytes_per_frame != 0) {
- return kFrameSplitError;
- }
-
- int num_frames = packet->payload_length / bytes_per_frame;
- if (num_frames == 1) {
- // Special case. Do not split the payload.
- return kNoSplit;
- }
-
- uint32_t timestamp = packet->header.timestamp;
- uint8_t* payload_ptr = packet->payload;
- int len = packet->payload_length;
- while (len > 0) {
- assert(len >= bytes_per_frame);
- Packet* new_packet = new Packet;
- new_packet->payload_length = bytes_per_frame;
- new_packet->header = packet->header;
- new_packet->header.timestamp = timestamp;
- timestamp += timestamps_per_frame;
- new_packet->primary = packet->primary;
- new_packet->payload = new uint8_t[bytes_per_frame];
- memcpy(new_packet->payload, payload_ptr, bytes_per_frame);
- payload_ptr += bytes_per_frame;
- new_packets->push_back(new_packet);
- len -= bytes_per_frame;
- }
- return kOK;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h
deleted file mode 100644
index 3768c2f2b1c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class DecoderDatabase;
-
-// This class handles splitting of payloads into smaller parts.
-// The class does not have any member variables, and the methods could have
-// been made static. The reason for not making them static is testability.
-// With this design, the splitting functionality can be mocked during testing
-// of the NetEqImpl class.
-class PayloadSplitter {
- public:
- enum SplitterReturnCodes {
- kOK = 0,
- kNoSplit = 1,
- kTooLargePayload = -1,
- kFrameSplitError = -2,
- kUnknownPayloadType = -3,
- kRedLengthMismatch = -4
- };
-
- PayloadSplitter() {}
-
- virtual ~PayloadSplitter() {}
-
- // Splits each packet in |packet_list| into its separate RED payloads. Each
- // RED payload is packetized into a Packet. The original elements in
- // |packet_list| are properly deleted, and replaced by the new packets.
- // Note that all packets in |packet_list| must be RED payloads, i.e., have
- // RED headers according to RFC 2198 at the very beginning of the payload.
- // Returns kOK or an error.
- virtual int SplitRed(PacketList* packet_list);
-
- // Checks all packets in |packet_list|. Packets that are DTMF events or
- // comfort noise payloads are kept. Except that, only one single payload type
- // is accepted. Any packet with another payload type is discarded.
- virtual int CheckRedPayloads(PacketList* packet_list,
- const DecoderDatabase& decoder_database);
-
- // Iterates through |packet_list| and, if possible, splits each audio payload
- // into suitable size chunks. The result is written back to |packet_list| as
- // new packets. The decoder database is needed to get information about which
- // payload type each packet contains.
- virtual int SplitAudio(PacketList* packet_list,
- const DecoderDatabase& decoder_database);
-
- private:
- // Splits the payload in |packet|. The payload is assumed to be from a
- // sample-based codec.
- virtual void SplitBySamples(const Packet* packet,
- int bytes_per_ms,
- int timestamps_per_ms,
- PacketList* new_packets);
-
- // Splits the payload in |packet|. The payload will be split into chunks of
- // size |bytes_per_frame|, corresponding to a |timestamps_per_frame|
- // RTP timestamps.
- virtual int SplitByFrames(const Packet* packet,
- int bytes_per_frame,
- int timestamps_per_frame,
- PacketList* new_packets);
-
- DISALLOW_COPY_AND_ASSIGN(PayloadSplitter);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc
deleted file mode 100644
index 5a7a6ca3e4e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc
+++ /dev/null
@@ -1,694 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for PayloadSplitter class.
-
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-
-#include <assert.h>
-
-#include <utility> // pair
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
-using ::testing::Return;
-using ::testing::ReturnNull;
-
-namespace webrtc {
-
-static const int kRedPayloadType = 100;
-static const int kPayloadLength = 10;
-static const int kRedHeaderLength = 4; // 4 bytes RED header.
-static const uint16_t kSequenceNumber = 0;
-static const uint32_t kBaseTimestamp = 0x12345678;
-
-// RED headers (according to RFC 2198):
-//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// |F| block PT | timestamp offset | block length |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//
-// Last RED header:
-// 0 1 2 3 4 5 6 7
-// +-+-+-+-+-+-+-+-+
-// |0| Block PT |
-// +-+-+-+-+-+-+-+-+
-
-// Creates a RED packet, with |num_payloads| payloads, with payload types given
-// by the values in array |payload_types| (which must be of length
-// |num_payloads|). Each redundant payload is |timestamp_offset| samples
-// "behind" the the previous payload.
-Packet* CreateRedPayload(int num_payloads,
- uint8_t* payload_types,
- int timestamp_offset) {
- Packet* packet = new Packet;
- packet->header.payloadType = kRedPayloadType;
- packet->header.timestamp = kBaseTimestamp;
- packet->header.sequenceNumber = kSequenceNumber;
- packet->payload_length = (kPayloadLength + 1) +
- (num_payloads - 1) * (kPayloadLength + kRedHeaderLength);
- uint8_t* payload = new uint8_t[packet->payload_length];
- uint8_t* payload_ptr = payload;
- for (int i = 0; i < num_payloads; ++i) {
- // Write the RED headers.
- if (i == num_payloads - 1) {
- // Special case for last payload.
- *payload_ptr = payload_types[i] & 0x7F; // F = 0;
- ++payload_ptr;
- break;
- }
- *payload_ptr = payload_types[i] & 0x7F;
- // Not the last block; set F = 1.
- *payload_ptr |= 0x80;
- ++payload_ptr;
- int this_offset = (num_payloads - i - 1) * timestamp_offset;
- *payload_ptr = this_offset >> 6;
- ++payload_ptr;
- assert(kPayloadLength <= 1023); // Max length described by 10 bits.
- *payload_ptr = ((this_offset & 0x3F) << 2) | (kPayloadLength >> 8);
- ++payload_ptr;
- *payload_ptr = kPayloadLength & 0xFF;
- ++payload_ptr;
- }
- for (int i = 0; i < num_payloads; ++i) {
- // Write |i| to all bytes in each payload.
- memset(payload_ptr, i, kPayloadLength);
- payload_ptr += kPayloadLength;
- }
- packet->payload = payload;
- return packet;
-}
-
-// Create a packet with all payload bytes set to |payload_value|.
-Packet* CreatePacket(uint8_t payload_type, int payload_length,
- uint8_t payload_value) {
- Packet* packet = new Packet;
- packet->header.payloadType = payload_type;
- packet->header.timestamp = kBaseTimestamp;
- packet->header.sequenceNumber = kSequenceNumber;
- packet->payload_length = payload_length;
- uint8_t* payload = new uint8_t[packet->payload_length];
- memset(payload, payload_value, payload_length);
- packet->payload = payload;
- return packet;
-}
-
-// Checks that |packet| has the attributes given in the remaining parameters.
-void VerifyPacket(const Packet* packet,
- int payload_length,
- uint8_t payload_type,
- uint16_t sequence_number,
- uint32_t timestamp,
- uint8_t payload_value,
- bool primary = true) {
- EXPECT_EQ(payload_length, packet->payload_length);
- EXPECT_EQ(payload_type, packet->header.payloadType);
- EXPECT_EQ(sequence_number, packet->header.sequenceNumber);
- EXPECT_EQ(timestamp, packet->header.timestamp);
- EXPECT_EQ(primary, packet->primary);
- ASSERT_FALSE(packet->payload == NULL);
- for (int i = 0; i < packet->payload_length; ++i) {
- EXPECT_EQ(payload_value, packet->payload[i]);
- }
-}
-
-// Start of test definitions.
-
-TEST(PayloadSplitter, CreateAndDestroy) {
- PayloadSplitter* splitter = new PayloadSplitter;
- delete splitter;
-}
-
-// Packet A is split into A1 and A2.
-TEST(RedPayloadSplitter, OnePacketTwoPayloads) {
- uint8_t payload_types[] = {0, 0};
- const int kTimestampOffset = 160;
- Packet* packet = CreateRedPayload(2, payload_types, kTimestampOffset);
- PacketList packet_list;
- packet_list.push_back(packet);
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
- ASSERT_EQ(2u, packet_list.size());
- // Check first packet. The first in list should always be the primary payload.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber,
- kBaseTimestamp, 1, true);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check second packet.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
- kBaseTimestamp - kTimestampOffset, 0, false);
- delete [] packet->payload;
- delete packet;
-}
-
-// Packets A and B are not split at all. Only the RED header in each packet is
-// removed.
-TEST(RedPayloadSplitter, TwoPacketsOnePayload) {
- uint8_t payload_types[] = {0};
- const int kTimestampOffset = 160;
- // Create first packet, with a single RED payload.
- Packet* packet = CreateRedPayload(1, payload_types, kTimestampOffset);
- PacketList packet_list;
- packet_list.push_back(packet);
- // Create second packet, with a single RED payload.
- packet = CreateRedPayload(1, payload_types, kTimestampOffset);
- // Manually change timestamp and sequence number of second packet.
- packet->header.timestamp += kTimestampOffset;
- packet->header.sequenceNumber++;
- packet_list.push_back(packet);
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
- ASSERT_EQ(2u, packet_list.size());
- // Check first packet.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
- kBaseTimestamp, 0, true);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check second packet.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber + 1,
- kBaseTimestamp + kTimestampOffset, 0, true);
- delete [] packet->payload;
- delete packet;
-}
-
-// Packets A and B are split into packets A1, A2, A3, B1, B2, B3, with
-// attributes as follows:
-//
-// A1* A2 A3 B1* B2 B3
-// Payload type 0 1 2 0 1 2
-// Timestamp b b-o b-2o b+o b b-o
-// Sequence number 0 0 0 1 1 1
-//
-// b = kBaseTimestamp, o = kTimestampOffset, * = primary.
-TEST(RedPayloadSplitter, TwoPacketsThreePayloads) {
- uint8_t payload_types[] = {2, 1, 0}; // Primary is the last one.
- const int kTimestampOffset = 160;
- // Create first packet, with 3 RED payloads.
- Packet* packet = CreateRedPayload(3, payload_types, kTimestampOffset);
- PacketList packet_list;
- packet_list.push_back(packet);
- // Create first packet, with 3 RED payloads.
- packet = CreateRedPayload(3, payload_types, kTimestampOffset);
- // Manually change timestamp and sequence number of second packet.
- packet->header.timestamp += kTimestampOffset;
- packet->header.sequenceNumber++;
- packet_list.push_back(packet);
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kOK, splitter.SplitRed(&packet_list));
- ASSERT_EQ(6u, packet_list.size());
- // Check first packet, A1.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[2], kSequenceNumber,
- kBaseTimestamp, 2, true);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check second packet, A2.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber,
- kBaseTimestamp - kTimestampOffset, 1, false);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check third packet, A3.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
- kBaseTimestamp - 2 * kTimestampOffset, 0, false);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check fourth packet, B1.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[2], kSequenceNumber + 1,
- kBaseTimestamp + kTimestampOffset, 2, true);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check fifth packet, B2.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[1], kSequenceNumber + 1,
- kBaseTimestamp, 1, false);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- // Check sixth packet, B3.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber + 1,
- kBaseTimestamp - kTimestampOffset, 0, false);
- delete [] packet->payload;
- delete packet;
-}
-
-// Creates a list with 4 packets with these payload types:
-// 0 = CNGnb
-// 1 = PCMu
-// 2 = DTMF (AVT)
-// 3 = iLBC
-// We expect the method CheckRedPayloads to discard the iLBC packet, since it
-// is a non-CNG, non-DTMF payload of another type than the first speech payload
-// found in the list (which is PCMu).
-TEST(RedPayloadSplitter, CheckRedPayloads) {
- PacketList packet_list;
- for (int i = 0; i <= 3; ++i) {
- // Create packet with payload type |i|, payload length 10 bytes, all 0.
- Packet* packet = CreatePacket(i, 10, 0);
- packet_list.push_back(packet);
- }
-
- // Use a real DecoderDatabase object here instead of a mock, since it is
- // easier to just register the payload types and let the actual implementation
- // do its job.
- DecoderDatabase decoder_database;
- decoder_database.RegisterPayload(0, kDecoderCNGnb);
- decoder_database.RegisterPayload(1, kDecoderPCMu);
- decoder_database.RegisterPayload(2, kDecoderAVT);
- decoder_database.RegisterPayload(3, kDecoderILBC);
-
- PayloadSplitter splitter;
- splitter.CheckRedPayloads(&packet_list, decoder_database);
-
- ASSERT_EQ(3u, packet_list.size()); // Should have dropped the last packet.
- // Verify packets. The loop verifies that payload types 0, 1, and 2 are in the
- // list.
- for (int i = 0; i <= 2; ++i) {
- Packet* packet = packet_list.front();
- VerifyPacket(packet, 10, i, kSequenceNumber, kBaseTimestamp, 0, true);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
- }
- EXPECT_TRUE(packet_list.empty());
-}
-
-// Packet A is split into A1, A2 and A3. But the length parameter is off, so
-// the last payloads should be discarded.
-TEST(RedPayloadSplitter, WrongPayloadLength) {
- uint8_t payload_types[] = {0, 0, 0};
- const int kTimestampOffset = 160;
- Packet* packet = CreateRedPayload(3, payload_types, kTimestampOffset);
- // Manually tamper with the payload length of the packet.
- // This is one byte too short for the second payload (out of three).
- // We expect only the first payload to be returned.
- packet->payload_length -= kPayloadLength + 1;
- PacketList packet_list;
- packet_list.push_back(packet);
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kRedLengthMismatch,
- splitter.SplitRed(&packet_list));
- ASSERT_EQ(1u, packet_list.size());
- // Check first packet.
- packet = packet_list.front();
- VerifyPacket(packet, kPayloadLength, payload_types[0], kSequenceNumber,
- kBaseTimestamp - 2 * kTimestampOffset, 0, false);
- delete [] packet->payload;
- delete packet;
- packet_list.pop_front();
-}
-
-// Test that iSAC, iSAC-swb, RED, DTMF, CNG, and "Arbitrary" payloads do not
-// get split.
-TEST(AudioPayloadSplitter, NonSplittable) {
- // Set up packets with different RTP payload types. The actual values do not
- // matter, since we are mocking the decoder database anyway.
- PacketList packet_list;
- for (int i = 0; i < 6; ++i) {
- // Let the payload type be |i|, and the payload value 10 * |i|.
- packet_list.push_back(CreatePacket(i, kPayloadLength, 10 * i));
- }
-
- MockDecoderDatabase decoder_database;
- // Tell the mock decoder database to return DecoderInfo structs with different
- // codec types.
- // Use scoped pointers to avoid having to delete them later.
- scoped_ptr<DecoderDatabase::DecoderInfo> info0(
- new DecoderDatabase::DecoderInfo(kDecoderISAC, 16000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(0))
- .WillRepeatedly(Return(info0.get()));
- scoped_ptr<DecoderDatabase::DecoderInfo> info1(
- new DecoderDatabase::DecoderInfo(kDecoderISACswb, 32000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(1))
- .WillRepeatedly(Return(info1.get()));
- scoped_ptr<DecoderDatabase::DecoderInfo> info2(
- new DecoderDatabase::DecoderInfo(kDecoderRED, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(2))
- .WillRepeatedly(Return(info2.get()));
- scoped_ptr<DecoderDatabase::DecoderInfo> info3(
- new DecoderDatabase::DecoderInfo(kDecoderAVT, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(3))
- .WillRepeatedly(Return(info3.get()));
- scoped_ptr<DecoderDatabase::DecoderInfo> info4(
- new DecoderDatabase::DecoderInfo(kDecoderCNGnb, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(4))
- .WillRepeatedly(Return(info4.get()));
- scoped_ptr<DecoderDatabase::DecoderInfo> info5(
- new DecoderDatabase::DecoderInfo(kDecoderArbitrary, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(5))
- .WillRepeatedly(Return(info5.get()));
-
- PayloadSplitter splitter;
- EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
- EXPECT_EQ(6u, packet_list.size());
-
- // Check that all payloads are intact.
- uint8_t payload_type = 0;
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- VerifyPacket((*it), kPayloadLength, payload_type, kSequenceNumber,
- kBaseTimestamp, 10 * payload_type);
- ++payload_type;
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-// Test unknown payload type.
-TEST(AudioPayloadSplitter, UnknownPayloadType) {
- PacketList packet_list;
- static const uint8_t kPayloadType = 17; // Just a random number.
- int kPayloadLengthBytes = 4711; // Random number.
- packet_list.push_back(CreatePacket(kPayloadType, kPayloadLengthBytes, 0));
-
- MockDecoderDatabase decoder_database;
- // Tell the mock decoder database to return NULL when asked for decoder info.
- // This signals that the decoder database does not recognize the payload type.
- EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(ReturnNull());
-
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kUnknownPayloadType,
- splitter.SplitAudio(&packet_list, decoder_database));
- EXPECT_EQ(1u, packet_list.size());
-
-
- // Delete the packets and payloads to avoid having the test leak memory.
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-class SplitBySamplesTest : public ::testing::TestWithParam<NetEqDecoder> {
- protected:
- virtual void SetUp() {
- decoder_type_ = GetParam();
- switch (decoder_type_) {
- case kDecoderPCMu:
- case kDecoderPCMa:
- bytes_per_ms_ = 8;
- samples_per_ms_ = 8;
- break;
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch:
- bytes_per_ms_ = 2 * 8;
- samples_per_ms_ = 8;
- break;
- case kDecoderG722:
- bytes_per_ms_ = 8;
- samples_per_ms_ = 16;
- break;
- case kDecoderPCM16B:
- bytes_per_ms_ = 16;
- samples_per_ms_ = 8;
- break;
- case kDecoderPCM16Bwb:
- bytes_per_ms_ = 32;
- samples_per_ms_ = 16;
- break;
- case kDecoderPCM16Bswb32kHz:
- bytes_per_ms_ = 64;
- samples_per_ms_ = 32;
- break;
- case kDecoderPCM16Bswb48kHz:
- bytes_per_ms_ = 96;
- samples_per_ms_ = 48;
- break;
- case kDecoderPCM16B_2ch:
- bytes_per_ms_ = 2 * 16;
- samples_per_ms_ = 8;
- break;
- case kDecoderPCM16Bwb_2ch:
- bytes_per_ms_ = 2 * 32;
- samples_per_ms_ = 16;
- break;
- case kDecoderPCM16Bswb32kHz_2ch:
- bytes_per_ms_ = 2 * 64;
- samples_per_ms_ = 32;
- break;
- case kDecoderPCM16Bswb48kHz_2ch:
- bytes_per_ms_ = 2 * 96;
- samples_per_ms_ = 48;
- break;
- case kDecoderPCM16B_5ch:
- bytes_per_ms_ = 5 * 16;
- samples_per_ms_ = 8;
- break;
- default:
- assert(false);
- break;
- }
- }
- int bytes_per_ms_;
- int samples_per_ms_;
- NetEqDecoder decoder_type_;
-};
-
-// Test splitting sample-based payloads.
-TEST_P(SplitBySamplesTest, PayloadSizes) {
- PacketList packet_list;
- static const uint8_t kPayloadType = 17; // Just a random number.
- for (int payload_size_ms = 10; payload_size_ms <= 60; payload_size_ms += 10) {
- // The payload values are set to be the same as the payload_size, so that
- // one can distinguish from which packet the split payloads come from.
- int payload_size_bytes = payload_size_ms * bytes_per_ms_;
- packet_list.push_back(CreatePacket(kPayloadType, payload_size_bytes,
- payload_size_ms));
- }
-
- MockDecoderDatabase decoder_database;
- // Tell the mock decoder database to return DecoderInfo structs with different
- // codec types.
- // Use scoped pointers to avoid having to delete them later.
- // (Sample rate is set to 8000 Hz, but does not matter.)
- scoped_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(decoder_type_, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(info.get()));
-
- PayloadSplitter splitter;
- EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
- // The payloads are expected to be split as follows:
- // 10 ms -> 10 ms
- // 20 ms -> 20 ms
- // 30 ms -> 30 ms
- // 40 ms -> 20 + 20 ms
- // 50 ms -> 25 + 25 ms
- // 60 ms -> 30 + 30 ms
- int expected_size_ms[] = {10, 20, 30, 20, 20, 25, 25, 30, 30};
- int expected_payload_value[] = {10, 20, 30, 40, 40, 50, 50, 60, 60};
- int expected_timestamp_offset_ms[] = {0, 0, 0, 0, 20, 0, 25, 0, 30};
- size_t expected_num_packets =
- sizeof(expected_size_ms) / sizeof(expected_size_ms[0]);
- EXPECT_EQ(expected_num_packets, packet_list.size());
-
- PacketList::iterator it = packet_list.begin();
- int i = 0;
- while (it != packet_list.end()) {
- int length_bytes = expected_size_ms[i] * bytes_per_ms_;
- uint32_t expected_timestamp = kBaseTimestamp +
- expected_timestamp_offset_ms[i] * samples_per_ms_;
- VerifyPacket((*it), length_bytes, kPayloadType, kSequenceNumber,
- expected_timestamp, expected_payload_value[i]);
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- ++i;
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-INSTANTIATE_TEST_CASE_P(
- PayloadSplitter, SplitBySamplesTest,
- ::testing::Values(kDecoderPCMu, kDecoderPCMa, kDecoderPCMu_2ch,
- kDecoderPCMa_2ch, kDecoderG722, kDecoderPCM16B,
- kDecoderPCM16Bwb, kDecoderPCM16Bswb32kHz,
- kDecoderPCM16Bswb48kHz, kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch, kDecoderPCM16Bswb32kHz_2ch,
- kDecoderPCM16Bswb48kHz_2ch, kDecoderPCM16B_5ch));
-
-
-class SplitIlbcTest : public ::testing::TestWithParam<std::pair<int, int> > {
- protected:
- virtual void SetUp() {
- const std::pair<int, int> parameters = GetParam();
- num_frames_ = parameters.first;
- frame_length_ms_ = parameters.second;
- frame_length_bytes_ = (frame_length_ms_ == 20) ? 38 : 50;
- }
- size_t num_frames_;
- int frame_length_ms_;
- int frame_length_bytes_;
-};
-
-// Test splitting sample-based payloads.
-TEST_P(SplitIlbcTest, NumFrames) {
- PacketList packet_list;
- static const uint8_t kPayloadType = 17; // Just a random number.
- const int frame_length_samples = frame_length_ms_ * 8;
- int payload_length_bytes = frame_length_bytes_ * num_frames_;
- Packet* packet = CreatePacket(kPayloadType, payload_length_bytes, 0);
- // Fill payload with increasing integers {0, 1, 2, ...}.
- for (int i = 0; i < packet->payload_length; ++i) {
- packet->payload[i] = static_cast<uint8_t>(i);
- }
- packet_list.push_back(packet);
-
- MockDecoderDatabase decoder_database;
- // Tell the mock decoder database to return DecoderInfo structs with different
- // codec types.
- // Use scoped pointers to avoid having to delete them later.
- scoped_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(info.get()));
-
- PayloadSplitter splitter;
- EXPECT_EQ(0, splitter.SplitAudio(&packet_list, decoder_database));
- EXPECT_EQ(num_frames_, packet_list.size());
-
- PacketList::iterator it = packet_list.begin();
- int frame_num = 0;
- uint8_t payload_value = 0;
- while (it != packet_list.end()) {
- Packet* packet = (*it);
- EXPECT_EQ(kBaseTimestamp + frame_length_samples * frame_num,
- packet->header.timestamp);
- EXPECT_EQ(frame_length_bytes_, packet->payload_length);
- EXPECT_EQ(kPayloadType, packet->header.payloadType);
- EXPECT_EQ(kSequenceNumber, packet->header.sequenceNumber);
- EXPECT_EQ(true, packet->primary);
- ASSERT_FALSE(packet->payload == NULL);
- for (int i = 0; i < packet->payload_length; ++i) {
- EXPECT_EQ(payload_value, packet->payload[i]);
- ++payload_value;
- }
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- ++frame_num;
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-// Test 1 through 5 frames of 20 and 30 ms size.
-// Also test the maximum number of frames in one packet for 20 and 30 ms.
-// The maximum is defined by the largest payload length that can be uniquely
-// resolved to a frame size of either 38 bytes (20 ms) or 50 bytes (30 ms).
-INSTANTIATE_TEST_CASE_P(
- PayloadSplitter, SplitIlbcTest,
- ::testing::Values(std::pair<int, int>(1, 20), // 1 frame, 20 ms.
- std::pair<int, int>(2, 20), // 2 frames, 20 ms.
- std::pair<int, int>(3, 20), // And so on.
- std::pair<int, int>(4, 20),
- std::pair<int, int>(5, 20),
- std::pair<int, int>(24, 20),
- std::pair<int, int>(1, 30),
- std::pair<int, int>(2, 30),
- std::pair<int, int>(3, 30),
- std::pair<int, int>(4, 30),
- std::pair<int, int>(5, 30),
- std::pair<int, int>(18, 30)));
-
-// Test too large payload size.
-TEST(IlbcPayloadSplitter, TooLargePayload) {
- PacketList packet_list;
- static const uint8_t kPayloadType = 17; // Just a random number.
- int kPayloadLengthBytes = 950;
- Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
- packet_list.push_back(packet);
-
- MockDecoderDatabase decoder_database;
- scoped_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(info.get()));
-
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kTooLargePayload,
- splitter.SplitAudio(&packet_list, decoder_database));
- EXPECT_EQ(1u, packet_list.size());
-
- // Delete the packets and payloads to avoid having the test leak memory.
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-// Payload not an integer number of frames.
-TEST(IlbcPayloadSplitter, UnevenPayload) {
- PacketList packet_list;
- static const uint8_t kPayloadType = 17; // Just a random number.
- int kPayloadLengthBytes = 39; // Not an even number of frames.
- Packet* packet = CreatePacket(kPayloadType, kPayloadLengthBytes, 0);
- packet_list.push_back(packet);
-
- MockDecoderDatabase decoder_database;
- scoped_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(kDecoderILBC, 8000, NULL, false));
- EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(info.get()));
-
- PayloadSplitter splitter;
- EXPECT_EQ(PayloadSplitter::kFrameSplitError,
- splitter.SplitAudio(&packet_list, decoder_database));
- EXPECT_EQ(1u, packet_list.size());
-
- // Delete the packets and payloads to avoid having the test leak memory.
- PacketList::iterator it = packet_list.begin();
- while (it != packet_list.end()) {
- delete [] (*it)->payload;
- delete (*it);
- it = packet_list.erase(it);
- }
-
- // The destructor is called when decoder_database goes out of scope.
- EXPECT_CALL(decoder_database, Die());
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc
deleted file mode 100644
index c3d5c7edd71..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
-
-namespace webrtc {
-
-PostDecodeVad::~PostDecodeVad() {
- if (vad_instance_)
- WebRtcVad_Free(vad_instance_);
-}
-
-void PostDecodeVad::Enable() {
- if (!vad_instance_) {
- // Create the instance.
- if (WebRtcVad_Create(&vad_instance_) != 0) {
- // Failed to create instance.
- Disable();
- return;
- }
- }
- Init();
- enabled_ = true;
-}
-
-void PostDecodeVad::Disable() {
- enabled_ = false;
- running_ = false;
-}
-
-void PostDecodeVad::Init() {
- running_ = false;
- if (vad_instance_) {
- WebRtcVad_Init(vad_instance_);
- WebRtcVad_set_mode(vad_instance_, kVadMode);
- running_ = true;
- }
-}
-
-void PostDecodeVad::Update(int16_t* signal, int length,
- AudioDecoder::SpeechType speech_type,
- bool sid_frame,
- int fs_hz) {
- if (!vad_instance_ || !enabled_) {
- return;
- }
-
- if (speech_type == AudioDecoder::kComfortNoise || sid_frame ||
- fs_hz > 16000) {
- // TODO(hlundin): Remove restriction on fs_hz.
- running_ = false;
- active_speech_ = true;
- sid_interval_counter_ = 0;
- } else if (!running_) {
- ++sid_interval_counter_;
- }
-
- if (sid_interval_counter_ >= kVadAutoEnable) {
- Init();
- }
-
- if (length > 0 && running_) {
- int vad_sample_index = 0;
- active_speech_ = false;
- // Loop through frame sizes 30, 20, and 10 ms.
- for (int vad_frame_size_ms = 30; vad_frame_size_ms >= 10;
- vad_frame_size_ms -= 10) {
- int vad_frame_size_samples = vad_frame_size_ms * fs_hz / 1000;
- while (length - vad_sample_index >= vad_frame_size_samples) {
- int vad_return = WebRtcVad_Process(
- vad_instance_, fs_hz, &signal[vad_sample_index],
- vad_frame_size_samples);
- active_speech_ |= (vad_return == 1);
- vad_sample_index += vad_frame_size_samples;
- }
- }
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h
deleted file mode 100644
index eb197d9ef24..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
-
-#include <string> // size_t
-
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/common_types.h" // NULL
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class PostDecodeVad {
- public:
- PostDecodeVad()
- : enabled_(false),
- running_(false),
- active_speech_(true),
- sid_interval_counter_(0),
- vad_instance_(NULL) {
- }
-
- virtual ~PostDecodeVad();
-
- // Enables post-decode VAD.
- void Enable();
-
- // Disables post-decode VAD.
- void Disable();
-
- // Initializes post-decode VAD.
- void Init();
-
- // Updates post-decode VAD with the audio data in |signal| having |length|
- // samples. The data is of type |speech_type|, at the sample rate |fs_hz|.
- void Update(int16_t* signal, int length,
- AudioDecoder::SpeechType speech_type, bool sid_frame, int fs_hz);
-
- // Accessors.
- bool enabled() const { return enabled_; }
- bool running() const { return running_; }
- bool active_speech() const { return active_speech_; }
-
- private:
- static const int kVadMode = 0; // Sets aggressiveness to "Normal".
- // Number of Update() calls without CNG/SID before re-enabling VAD.
- static const int kVadAutoEnable = 3000;
-
- bool enabled_;
- bool running_;
- bool active_speech_;
- int sid_interval_counter_;
- ::VadInst* vad_instance_;
-
- DISALLOW_COPY_AND_ASSIGN(PostDecodeVad);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc
deleted file mode 100644
index a4d9da8e166..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for PostDecodeVad class.
-
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(PostDecodeVad, CreateAndDestroy) {
- PostDecodeVad vad;
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
deleted file mode 100644
index ac787eb1d66..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
-
-#include <algorithm> // min, max
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-
-namespace webrtc {
-
-PreemptiveExpand::ReturnCodes PreemptiveExpand::Process(
- const int16_t* input,
- int input_length,
- int old_data_length,
- AudioMultiVector* output,
- int16_t* length_change_samples) {
- old_data_length_per_channel_ = old_data_length;
- // Input length must be (almost) 30 ms.
- // Also, the new part must be at least |overlap_samples_| elements.
- static const int k15ms = 120; // 15 ms = 120 samples at 8 kHz sample rate.
- if (num_channels_ == 0 ||
- input_length / num_channels_ < (2 * k15ms - 1) * fs_mult_ ||
- old_data_length >= input_length / num_channels_ - overlap_samples_) {
- // Length of input data too short to do preemptive expand. Simply move all
- // data from input to output.
- output->PushBackInterleaved(input, input_length);
- return kError;
- }
- return TimeStretch::Process(input, input_length, output,
- length_change_samples);
-}
-
-void PreemptiveExpand::SetParametersForPassiveSpeech(size_t len,
- int16_t* best_correlation,
- int* peak_index) const {
- // When the signal does not contain any active speech, the correlation does
- // not matter. Simply set it to zero.
- *best_correlation = 0;
-
- // For low energy expansion, the new data can be less than 15 ms,
- // but we must ensure that best_correlation is not larger than the length of
- // the new data.
- // but we must ensure that best_correlation is not larger than the new data.
- *peak_index = std::min(*peak_index,
- static_cast<int>(len - old_data_length_per_channel_));
-}
-
-PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch(
- const int16_t *input, size_t input_length, size_t peak_index,
- int16_t best_correlation, bool active_speech,
- AudioMultiVector* output) const {
- // Pre-calculate common multiplication with |fs_mult_|.
- // 120 corresponds to 15 ms.
- int fs_mult_120 = fs_mult_ * 120;
- assert(old_data_length_per_channel_ >= 0); // Make sure it's been set.
- // Check for strong correlation (>0.9 in Q14) and at least 15 ms new data,
- // or passive speech.
- if (((best_correlation > kCorrelationThreshold) &&
- (old_data_length_per_channel_ <= fs_mult_120)) ||
- !active_speech) {
- // Do accelerate operation by overlap add.
-
- // Set length of the first part, not to be modified.
- size_t unmodified_length = std::max(old_data_length_per_channel_,
- fs_mult_120);
- // Copy first part, including cross-fade region.
- output->PushBackInterleaved(
- input, (unmodified_length + peak_index) * num_channels_);
- // Copy the last |peak_index| samples up to 15 ms to |temp_vector|.
- AudioMultiVector temp_vector(num_channels_);
- temp_vector.PushBackInterleaved(
- &input[(unmodified_length - peak_index) * num_channels_],
- peak_index * num_channels_);
- // Cross-fade |temp_vector| onto the end of |output|.
- output->CrossFade(temp_vector, peak_index);
- // Copy the last unmodified part, 15 ms + pitch period until the end.
- output->PushBackInterleaved(
- &input[unmodified_length * num_channels_],
- input_length - unmodified_length * num_channels_);
-
- if (active_speech) {
- return kSuccess;
- } else {
- return kSuccessLowEnergy;
- }
- } else {
- // Accelerate not allowed. Simply move all data from decoded to outData.
- output->PushBackInterleaved(input, input_length);
- return kNoStretch;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
deleted file mode 100644
index 4cd92cc0bb6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BackgroundNoise;
-
-// This class implements the PreemptiveExpand operation. Most of the work is
-// done in the base class TimeStretch, which is shared with the Accelerate
-// operation. In the PreemptiveExpand class, the operations that are specific to
-// PreemptiveExpand are implemented.
-class PreemptiveExpand : public TimeStretch {
- public:
- PreemptiveExpand(int sample_rate_hz, size_t num_channels,
- const BackgroundNoise& background_noise)
- : TimeStretch(sample_rate_hz, num_channels, background_noise),
- old_data_length_per_channel_(-1),
- overlap_samples_(5 * sample_rate_hz / 8000) {
- }
-
- virtual ~PreemptiveExpand() {}
-
- // This method performs the actual PreemptiveExpand operation. The samples are
- // read from |input|, of length |input_length| elements, and are written to
- // |output|. The number of samples added through time-stretching is
- // is provided in the output |length_change_samples|. The method returns
- // the outcome of the operation as an enumerator value.
- ReturnCodes Process(const int16_t *pw16_decoded,
- int len,
- int old_data_len,
- AudioMultiVector* output,
- int16_t* length_change_samples);
-
- protected:
- // Sets the parameters |best_correlation| and |peak_index| to suitable
- // values when the signal contains no active speech.
- virtual void SetParametersForPassiveSpeech(size_t len,
- int16_t* w16_bestCorr,
- int* w16_bestIndex) const;
-
- // Checks the criteria for performing the time-stretching operation and,
- // if possible, performs the time-stretching.
- virtual ReturnCodes CheckCriteriaAndStretch(
- const int16_t *pw16_decoded, size_t len, size_t w16_bestIndex,
- int16_t w16_bestCorr, bool w16_VAD,
- AudioMultiVector* output) const;
-
- private:
- int old_data_length_per_channel_;
- int overlap_samples_;
-
- DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc
deleted file mode 100644
index 823909f1352..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-
-namespace webrtc {
-
-const int16_t RandomVector::kRandomTable[RandomVector::kRandomTableSize] = {
- 2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380,
- -4959, -1280, -21716, 7133, -1522, 13458, -3902, 2789, -675, 3441, 5016,
- -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314, 5426,
- 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112,
- -613, 201, -10367, -2960, -2419, 3442, 4299, -6116, -6092, 1552, -1650,
- -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968, 12052, -5845,
- -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552,
- -9084, -5753, 8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403,
- 11407, 6232, -1683, 24340, -11166, 4017, -10448, 3153, -2936, 6212, 2891,
- -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403,
- 2205, 4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339,
- 5079, -2645, -9515, 6622, 14651, 15852, 359, 122, 8246, -3502, -6696, -3679,
- -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219, 1141,
- 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117,
- 13792, 5742, 16168, 8661, -1609, -6095, 1881, 14380, -5588, 6758, -6425,
- -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952, -10146, -4667,
- -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559,
- 4740, -4819, 992, -8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588,
- -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188, -1022, 4852, 10101,
- -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678,
- -1600, -9230, 68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947,
- 4341, 1014, -4889, -2603, 1246, -5630, -3596, -870, -1298, 2784, -3317,
- -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579 };
-
-void RandomVector::Reset() {
- seed_ = 777;
- seed_increment_ = 1;
-}
-
-void RandomVector::Generate(size_t length, int16_t* output) {
- for (size_t i = 0; i < length; i++) {
- seed_ += seed_increment_;
- size_t position = seed_ & (kRandomTableSize - 1);
- output[i] = kRandomTable[position];
- }
-}
-
-void RandomVector::IncreaseSeedIncrement(int16_t increase_by) {
- seed_increment_+= increase_by;
- seed_increment_ &= kRandomTableSize - 1;
-}
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h
deleted file mode 100644
index 64cfe0d9dd0..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
-
-#include <string.h> // size_t
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// This class generates pseudo-random samples.
-class RandomVector {
- public:
- static const int kRandomTableSize = 256;
- static const int16_t kRandomTable[kRandomTableSize];
-
- RandomVector()
- : seed_(777),
- seed_increment_(1) {
- }
-
- void Reset();
-
- void Generate(size_t length, int16_t* output);
-
- void IncreaseSeedIncrement(int16_t increase_by);
-
- // Accessors and mutators.
- int16_t seed_increment() { return seed_increment_; }
- void set_seed_increment(int16_t value) { seed_increment_ = value; }
-
- private:
- uint32_t seed_;
- int16_t seed_increment_;
-
- DISALLOW_COPY_AND_ASSIGN(RandomVector);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc
deleted file mode 100644
index 83193e2a7bb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for RandomVector class.
-
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(RandomVector, CreateAndDestroy) {
- RandomVector random_vector;
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc
deleted file mode 100644
index f9dcf449198..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/rtcp.h"
-
-#include <algorithm>
-#include <string.h>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-
-void Rtcp::Init(uint16_t start_sequence_number) {
- cycles_ = 0;
- max_seq_no_ = start_sequence_number;
- base_seq_no_ = start_sequence_number;
- received_packets_ = 0;
- received_packets_prior_ = 0;
- expected_prior_ = 0;
- jitter_ = 0;
- transit_ = 0;
-}
-
-void Rtcp::Update(const RTPHeader& rtp_header, uint32_t receive_timestamp) {
- // Update number of received packets, and largest packet number received.
- received_packets_++;
- int16_t sn_diff = rtp_header.sequenceNumber - max_seq_no_;
- if (sn_diff >= 0) {
- if (rtp_header.sequenceNumber < max_seq_no_) {
- // Wrap-around detected.
- cycles_++;
- }
- max_seq_no_ = rtp_header.sequenceNumber;
- }
-
- // Calculate jitter according to RFC 3550, and update previous timestamps.
- // Note that the value in |jitter_| is in Q4.
- if (received_packets_ > 1) {
- int32_t ts_diff = receive_timestamp - (rtp_header.timestamp - transit_);
- ts_diff = WEBRTC_SPL_ABS_W32(ts_diff);
- int32_t jitter_diff = (ts_diff << 4) - jitter_;
- // Calculate 15 * jitter_ / 16 + jitter_diff / 16 (with proper rounding).
- jitter_ = jitter_ + ((jitter_diff + 8) >> 4);
- }
- transit_ = rtp_header.timestamp - receive_timestamp;
-}
-
-void Rtcp::GetStatistics(bool no_reset, RtcpStatistics* stats) {
- // Extended highest sequence number received.
- stats->extended_max_sequence_number =
- (static_cast<int>(cycles_) << 16) + max_seq_no_;
-
- // Calculate expected number of packets and compare it with the number of
- // packets that were actually received. The cumulative number of lost packets
- // can be extracted.
- uint32_t expected_packets =
- stats->extended_max_sequence_number - base_seq_no_ + 1;
- if (received_packets_ == 0) {
- // No packets received, assume none lost.
- stats->cumulative_lost = 0;
- } else if (expected_packets > received_packets_) {
- stats->cumulative_lost = expected_packets - received_packets_;
- if (stats->cumulative_lost > 0xFFFFFF) {
- stats->cumulative_lost = 0xFFFFFF;
- }
- } else {
- stats->cumulative_lost = 0;
- }
-
- // Fraction lost since last report.
- uint32_t expected_since_last = expected_packets - expected_prior_;
- uint32_t received_since_last = received_packets_ - received_packets_prior_;
- if (!no_reset) {
- expected_prior_ = expected_packets;
- received_packets_prior_ = received_packets_;
- }
- int32_t lost = expected_since_last - received_since_last;
- if (expected_since_last == 0 || lost <= 0 || received_packets_ == 0) {
- stats->fraction_lost = 0;
- } else {
- stats->fraction_lost = std::min(0xFFU, (lost << 8) / expected_since_last);
- }
-
- stats->jitter = jitter_ >> 4; // Scaling from Q4.
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h
deleted file mode 100644
index 00cbbd15836..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-struct RTPHeader;
-
-class Rtcp {
- public:
- Rtcp() {
- Init(0);
- }
-
- ~Rtcp() {}
-
- // Resets the RTCP statistics, and sets the first received sequence number.
- void Init(uint16_t start_sequence_number);
-
- // Updates the RTCP statistics with a new received packet.
- void Update(const RTPHeader& rtp_header, uint32_t receive_timestamp);
-
- // Returns the current RTCP statistics. If |no_reset| is true, the statistics
- // are not reset, otherwise they are.
- void GetStatistics(bool no_reset, RtcpStatistics* stats);
-
- private:
- uint16_t cycles_; // The number of wrap-arounds for the sequence number.
- uint16_t max_seq_no_; // The maximum sequence number received. Starts over
- // from 0 after wrap-around.
- uint16_t base_seq_no_; // The sequence number of the first received packet.
- uint32_t received_packets_; // The number of packets that have been received.
- uint32_t received_packets_prior_; // Number of packets received when last
- // report was generated.
- uint32_t expected_prior_; // Expected number of packets, at the time of the
- // last report.
- uint32_t jitter_; // Current jitter value.
- int32_t transit_; // Clock difference for previous packet.
-
- DISALLOW_COPY_AND_ASSIGN(Rtcp);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc
deleted file mode 100644
index b6e9222d475..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/statistics_calculator.h"
-
-#include <assert.h>
-#include <string.h> // memset
-
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-
-namespace webrtc {
-
-StatisticsCalculator::StatisticsCalculator()
- : preemptive_samples_(0),
- accelerate_samples_(0),
- added_zero_samples_(0),
- expanded_voice_samples_(0),
- expanded_noise_samples_(0),
- discarded_packets_(0),
- lost_timestamps_(0),
- last_report_timestamp_(0),
- len_waiting_times_(0),
- next_waiting_time_index_(0) {
- memset(waiting_times_, 0, kLenWaitingTimes * sizeof(waiting_times_[0]));
-}
-
-void StatisticsCalculator::Reset() {
- preemptive_samples_ = 0;
- accelerate_samples_ = 0;
- added_zero_samples_ = 0;
- expanded_voice_samples_ = 0;
- expanded_noise_samples_ = 0;
-}
-
-void StatisticsCalculator::ResetMcu() {
- discarded_packets_ = 0;
- lost_timestamps_ = 0;
- last_report_timestamp_ = 0;
-}
-
-void StatisticsCalculator::ResetWaitingTimeStatistics() {
- memset(waiting_times_, 0, kLenWaitingTimes * sizeof(waiting_times_[0]));
- len_waiting_times_ = 0;
- next_waiting_time_index_ = 0;
-}
-
-void StatisticsCalculator::ExpandedVoiceSamples(int num_samples) {
- expanded_voice_samples_ += num_samples;
-}
-
-void StatisticsCalculator::ExpandedNoiseSamples(int num_samples) {
- expanded_noise_samples_ += num_samples;
-}
-
-void StatisticsCalculator::PreemptiveExpandedSamples(int num_samples) {
- preemptive_samples_ += num_samples;
-}
-
-void StatisticsCalculator::AcceleratedSamples(int num_samples) {
- accelerate_samples_ += num_samples;
-}
-
-void StatisticsCalculator::AddZeros(int num_samples) {
- added_zero_samples_ += num_samples;
-}
-
-void StatisticsCalculator::PacketsDiscarded(int num_packets) {
- discarded_packets_ += num_packets;
-}
-
-void StatisticsCalculator::LostSamples(int num_samples) {
- lost_timestamps_ += num_samples;
-}
-
-void StatisticsCalculator::IncreaseCounter(int num_samples, int fs_hz) {
- last_report_timestamp_ += num_samples;
- if (last_report_timestamp_ >
- static_cast<uint32_t>(fs_hz * kMaxReportPeriod)) {
- lost_timestamps_ = 0;
- last_report_timestamp_ = 0;
- discarded_packets_ = 0;
- }
-}
-
-void StatisticsCalculator::StoreWaitingTime(int waiting_time_ms) {
- assert(next_waiting_time_index_ < kLenWaitingTimes);
- waiting_times_[next_waiting_time_index_] = waiting_time_ms;
- next_waiting_time_index_++;
- if (next_waiting_time_index_ >= kLenWaitingTimes) {
- next_waiting_time_index_ = 0;
- }
- if (len_waiting_times_ < kLenWaitingTimes) {
- len_waiting_times_++;
- }
-}
-
-void StatisticsCalculator::GetNetworkStatistics(
- int fs_hz,
- int num_samples_in_buffers,
- int samples_per_packet,
- const DelayManager& delay_manager,
- const DecisionLogic& decision_logic,
- NetEqNetworkStatistics *stats) {
- if (fs_hz <= 0 || !stats) {
- assert(false);
- return;
- }
-
- stats->added_zero_samples = added_zero_samples_;
- stats->current_buffer_size_ms = num_samples_in_buffers * 1000 / fs_hz;
- const int ms_per_packet = decision_logic.packet_length_samples() /
- (fs_hz / 1000);
- stats->preferred_buffer_size_ms = (delay_manager.TargetLevel() >> 8) *
- ms_per_packet;
- stats->jitter_peaks_found = delay_manager.PeakFound();
- stats->clockdrift_ppm = delay_manager.AverageIAT();
-
- stats->packet_loss_rate = CalculateQ14Ratio(lost_timestamps_,
- last_report_timestamp_);
-
- const unsigned discarded_samples = discarded_packets_ * samples_per_packet;
- stats->packet_discard_rate = CalculateQ14Ratio(discarded_samples,
- last_report_timestamp_);
-
- stats->accelerate_rate = CalculateQ14Ratio(accelerate_samples_,
- last_report_timestamp_);
-
- stats->preemptive_rate = CalculateQ14Ratio(preemptive_samples_,
- last_report_timestamp_);
-
- stats->expand_rate = CalculateQ14Ratio(expanded_voice_samples_ +
- expanded_noise_samples_,
- last_report_timestamp_);
-
- // Reset counters.
- ResetMcu();
- Reset();
-}
-
-void StatisticsCalculator::WaitingTimes(std::vector<int>* waiting_times) {
- if (!waiting_times) {
- return;
- }
- waiting_times->assign(waiting_times_, waiting_times_ + len_waiting_times_);
- ResetWaitingTimeStatistics();
-}
-
-int StatisticsCalculator::CalculateQ14Ratio(uint32_t numerator,
- uint32_t denominator) {
- if (numerator == 0) {
- return 0;
- } else if (numerator < denominator) {
- // Ratio must be smaller than 1 in Q14.
- assert((numerator << 14) / denominator < (1 << 14));
- return (numerator << 14) / denominator;
- } else {
- // Will not produce a ratio larger than 1, since this is probably an error.
- return 1 << 14;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h
deleted file mode 100644
index 25f8a14bb9a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
-
-#include <vector>
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class DecisionLogic;
-class DelayManager;
-
-// This class handles various network statistics in NetEq.
-class StatisticsCalculator {
- public:
- StatisticsCalculator();
-
- virtual ~StatisticsCalculator() {}
-
- // Resets most of the counters.
- void Reset();
-
- // Resets the counters that are not handled by Reset().
- void ResetMcu();
-
- // Resets the waiting time statistics.
- void ResetWaitingTimeStatistics();
-
- // Reports that |num_samples| samples were produced through expansion, and
- // that the expansion produced other than just noise samples.
- void ExpandedVoiceSamples(int num_samples);
-
- // Reports that |num_samples| samples were produced through expansion, and
- // that the expansion produced only noise samples.
- void ExpandedNoiseSamples(int num_samples);
-
- // Reports that |num_samples| samples were produced through preemptive
- // expansion.
- void PreemptiveExpandedSamples(int num_samples);
-
- // Reports that |num_samples| samples were removed through accelerate.
- void AcceleratedSamples(int num_samples);
-
- // Reports that |num_samples| zeros were inserted into the output.
- void AddZeros(int num_samples);
-
- // Reports that |num_packets| packets were discarded.
- void PacketsDiscarded(int num_packets);
-
- // Reports that |num_samples| were lost.
- void LostSamples(int num_samples);
-
- // Increases the report interval counter with |num_samples| at a sample rate
- // of |fs_hz|.
- void IncreaseCounter(int num_samples, int fs_hz);
-
- // Stores new packet waiting time in waiting time statistics.
- void StoreWaitingTime(int waiting_time_ms);
-
- // Returns the current network statistics in |stats|. The current sample rate
- // is |fs_hz|, the total number of samples in packet buffer and sync buffer
- // yet to play out is |num_samples_in_buffers|, and the number of samples per
- // packet is |samples_per_packet|.
- void GetNetworkStatistics(int fs_hz,
- int num_samples_in_buffers,
- int samples_per_packet,
- const DelayManager& delay_manager,
- const DecisionLogic& decision_logic,
- NetEqNetworkStatistics *stats);
-
- void WaitingTimes(std::vector<int>* waiting_times);
-
- private:
- static const int kMaxReportPeriod = 60; // Seconds before auto-reset.
- static const int kLenWaitingTimes = 100;
-
- // Calculates numerator / denominator, and returns the value in Q14.
- static int CalculateQ14Ratio(uint32_t numerator, uint32_t denominator);
-
- uint32_t preemptive_samples_;
- uint32_t accelerate_samples_;
- int added_zero_samples_;
- uint32_t expanded_voice_samples_;
- uint32_t expanded_noise_samples_;
- int discarded_packets_;
- uint32_t lost_timestamps_;
- uint32_t last_report_timestamp_;
- int waiting_times_[kLenWaitingTimes]; // Used as a circular buffer.
- int len_waiting_times_;
- int next_waiting_time_index_;
-
- DISALLOW_COPY_AND_ASSIGN(StatisticsCalculator);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc
deleted file mode 100644
index 75ee6ece082..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include <algorithm> // Access to min.
-
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-namespace webrtc {
-
-size_t SyncBuffer::FutureLength() const {
- return Size() - next_index_;
-}
-
-void SyncBuffer::PushBack(const AudioMultiVector& append_this) {
- size_t samples_added = append_this.Size();
- AudioMultiVector::PushBack(append_this);
- AudioMultiVector::PopFront(samples_added);
- if (samples_added <= next_index_) {
- next_index_ -= samples_added;
- } else {
- // This means that we are pushing out future data that was never used.
-// assert(false);
- // TODO(hlundin): This assert must be disabled to support 60 ms frames.
- // This should not happen even for 60 ms frames, but it does. Investigate
- // why.
- next_index_ = 0;
- }
- dtmf_index_ -= std::min(dtmf_index_, samples_added);
-}
-
-void SyncBuffer::PushFrontZeros(size_t length) {
- InsertZerosAtIndex(length, 0);
-}
-
-void SyncBuffer::InsertZerosAtIndex(size_t length, size_t position) {
- position = std::min(position, Size());
- length = std::min(length, Size() - position);
- AudioMultiVector::PopBack(length);
- for (size_t channel = 0; channel < Channels(); ++channel) {
- channels_[channel]->InsertZerosAt(length, position);
- }
- if (next_index_ >= position) {
- // We are moving the |next_index_| sample.
- set_next_index(next_index_ + length); // Overflow handled by subfunction.
- }
- if (dtmf_index_ > 0 && dtmf_index_ >= position) {
- // We are moving the |dtmf_index_| sample.
- set_dtmf_index(dtmf_index_ + length); // Overflow handled by subfunction.
- }
-}
-
-void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this,
- size_t length,
- size_t position) {
- position = std::min(position, Size()); // Cap |position| in the valid range.
- length = std::min(length, Size() - position);
- AudioMultiVector::OverwriteAt(insert_this, length, position);
-}
-
-void SyncBuffer::ReplaceAtIndex(const AudioMultiVector& insert_this,
- size_t position) {
- ReplaceAtIndex(insert_this, insert_this.Size(), position);
-}
-
-size_t SyncBuffer::GetNextAudioInterleaved(size_t requested_len,
- int16_t* output) {
- if (!output) {
- assert(false);
- return 0;
- }
- size_t samples_to_read = std::min(FutureLength(), requested_len);
- ReadInterleavedFromIndex(next_index_, samples_to_read, output);
- next_index_ += samples_to_read;
- return samples_to_read;
-}
-
-void SyncBuffer::IncreaseEndTimestamp(uint32_t increment) {
- end_timestamp_ += increment;
-}
-
-void SyncBuffer::Flush() {
- Zeros(Size());
- next_index_ = Size();
- end_timestamp_ = 0;
- dtmf_index_ = 0;
-}
-
-void SyncBuffer::set_next_index(size_t value) {
- // Cannot set |next_index_| larger than the size of the buffer.
- next_index_ = std::min(value, Size());
-}
-
-void SyncBuffer::set_dtmf_index(size_t value) {
- // Cannot set |dtmf_index_| larger than the size of the buffer.
- dtmf_index_ = std::min(value, Size());
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h
deleted file mode 100644
index e1e5daf1b78..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class SyncBuffer : public AudioMultiVector {
- public:
- SyncBuffer(size_t channels, size_t length)
- : AudioMultiVector(channels, length),
- next_index_(length),
- end_timestamp_(0),
- dtmf_index_(0) {}
-
- virtual ~SyncBuffer() {}
-
- // Returns the number of samples yet to play out form the buffer.
- size_t FutureLength() const;
-
- // Adds the contents of |append_this| to the back of the SyncBuffer. Removes
- // the same number of samples from the beginning of the SyncBuffer, to
- // maintain a constant buffer size. The |next_index_| is updated to reflect
- // the move of the beginning of "future" data.
- void PushBack(const AudioMultiVector& append_this);
-
- // Adds |length| zeros to the beginning of each channel. Removes
- // the same number of samples from the end of the SyncBuffer, to
- // maintain a constant buffer size. The |next_index_| is updated to reflect
- // the move of the beginning of "future" data.
- // Note that this operation may delete future samples that are waiting to
- // be played.
- void PushFrontZeros(size_t length);
-
- // Inserts |length| zeros into each channel at index |position|. The size of
- // the SyncBuffer is kept constant, which means that the last |length|
- // elements in each channel will be purged.
- virtual void InsertZerosAtIndex(size_t length, size_t position);
-
- // Overwrites each channel in this SyncBuffer with values taken from
- // |insert_this|. The values are taken from the beginning of |insert_this| and
- // are inserted starting at |position|. |length| values are written into each
- // channel. The size of the SyncBuffer is kept constant. That is, if |length|
- // and |position| are selected such that the new data would extend beyond the
- // end of the current SyncBuffer, the buffer is not extended.
- // The |next_index_| is not updated.
- virtual void ReplaceAtIndex(const AudioMultiVector& insert_this,
- size_t length,
- size_t position);
-
- // Same as the above method, but where all of |insert_this| is written (with
- // the same constraints as above, that the SyncBuffer is not extended).
- virtual void ReplaceAtIndex(const AudioMultiVector& insert_this,
- size_t position);
-
- // Reads |requested_len| samples from each channel and writes them interleaved
- // into |output|. The |next_index_| is updated to point to the sample to read
- // next time.
- size_t GetNextAudioInterleaved(size_t requested_len, int16_t* output);
-
- // Adds |increment| to |end_timestamp_|.
- void IncreaseEndTimestamp(uint32_t increment);
-
- // Flushes the buffer. The buffer will contain only zeros after the flush, and
- // |next_index_| will point to the end, like when the buffer was first
- // created.
- void Flush();
-
- const AudioVector& Channel(size_t n) { return *channels_[n]; }
-
- // Accessors and mutators.
- size_t next_index() const { return next_index_; }
- void set_next_index(size_t value);
- uint32_t end_timestamp() const { return end_timestamp_; }
- void set_end_timestamp(uint32_t value) { end_timestamp_ = value; }
- size_t dtmf_index() const { return dtmf_index_; }
- void set_dtmf_index(size_t value);
-
- private:
- size_t next_index_;
- uint32_t end_timestamp_; // The timestamp of the last sample in the buffer.
- size_t dtmf_index_; // Index to the first non-DTMF sample in the buffer.
-
- DISALLOW_COPY_AND_ASSIGN(SyncBuffer);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc
deleted file mode 100644
index 1aafa22ab88..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-
-#include "gtest/gtest.h"
-
-namespace webrtc {
-
-TEST(SyncBuffer, CreateAndDestroy) {
- // Create a SyncBuffer with two channels and 10 samples each.
- static const size_t kLen = 10;
- static const size_t kChannels = 2;
- SyncBuffer sync_buffer(kChannels, kLen);
- EXPECT_EQ(kChannels, sync_buffer.Channels());
- EXPECT_EQ(kLen, sync_buffer.Size());
- // When the buffer is empty, the next index to play out is at the end.
- EXPECT_EQ(kLen, sync_buffer.next_index());
- // Verify that all elements are zero.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kLen; ++i) {
- EXPECT_EQ(0, sync_buffer[channel][i]);
- }
- }
-}
-
-TEST(SyncBuffer, SetNextIndex) {
- // Create a SyncBuffer with two channels and 100 samples each.
- static const size_t kLen = 100;
- static const size_t kChannels = 2;
- SyncBuffer sync_buffer(kChannels, kLen);
- sync_buffer.set_next_index(0);
- EXPECT_EQ(0u, sync_buffer.next_index());
- sync_buffer.set_next_index(kLen / 2);
- EXPECT_EQ(kLen / 2, sync_buffer.next_index());
- sync_buffer.set_next_index(kLen);
- EXPECT_EQ(kLen, sync_buffer.next_index());
- // Try to set larger than the buffer size; should cap at buffer size.
- sync_buffer.set_next_index(kLen + 1);
- EXPECT_EQ(kLen, sync_buffer.next_index());
-}
-
-TEST(SyncBuffer, PushBackAndFlush) {
- // Create a SyncBuffer with two channels and 100 samples each.
- static const size_t kLen = 100;
- static const size_t kChannels = 2;
- SyncBuffer sync_buffer(kChannels, kLen);
- static const size_t kNewLen = 10;
- AudioMultiVector new_data(kChannels, kNewLen);
- // Populate |new_data|.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kNewLen; ++i) {
- new_data[channel][i] = i;
- }
- }
- // Push back |new_data| into |sync_buffer|. This operation should pop out
- // data from the front of |sync_buffer|, so that the size of the buffer
- // remains the same. The |next_index_| should also move with the same length.
- sync_buffer.PushBack(new_data);
- ASSERT_EQ(kLen, sync_buffer.Size());
- // Verify that |next_index_| moved accordingly.
- EXPECT_EQ(kLen - kNewLen, sync_buffer.next_index());
- // Verify the new contents.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kNewLen; ++i) {
- EXPECT_EQ(new_data[channel][i],
- sync_buffer[channel][sync_buffer.next_index() + i]);
- }
- }
-
- // Now flush the buffer, and verify that it is all zeros, and that next_index
- // points to the end.
- sync_buffer.Flush();
- ASSERT_EQ(kLen, sync_buffer.Size());
- EXPECT_EQ(kLen, sync_buffer.next_index());
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kLen; ++i) {
- EXPECT_EQ(0, sync_buffer[channel][i]);
- }
- }
-}
-
-TEST(SyncBuffer, PushFrontZeros) {
- // Create a SyncBuffer with two channels and 100 samples each.
- static const size_t kLen = 100;
- static const size_t kChannels = 2;
- SyncBuffer sync_buffer(kChannels, kLen);
- static const size_t kNewLen = 10;
- AudioMultiVector new_data(kChannels, kNewLen);
- // Populate |new_data|.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kNewLen; ++i) {
- new_data[channel][i] = 1000 + i;
- }
- }
- sync_buffer.PushBack(new_data);
- EXPECT_EQ(kLen, sync_buffer.Size());
-
- // Push |kNewLen| - 1 zeros into each channel in the front of the SyncBuffer.
- sync_buffer.PushFrontZeros(kNewLen - 1);
- EXPECT_EQ(kLen, sync_buffer.Size()); // Size should remain the same.
- // Verify that |next_index_| moved accordingly. Should be at the end - 1.
- EXPECT_EQ(kLen - 1, sync_buffer.next_index());
- // Verify the zeros.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kNewLen - 1; ++i) {
- EXPECT_EQ(0, sync_buffer[channel][i]);
- }
- }
- // Verify that the correct data is at the end of the SyncBuffer.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- EXPECT_EQ(1000, sync_buffer[channel][sync_buffer.next_index()]);
- }
-}
-
-TEST(SyncBuffer, GetNextAudioInterleaved) {
- // Create a SyncBuffer with two channels and 100 samples each.
- static const size_t kLen = 100;
- static const size_t kChannels = 2;
- SyncBuffer sync_buffer(kChannels, kLen);
- static const size_t kNewLen = 10;
- AudioMultiVector new_data(kChannels, kNewLen);
- // Populate |new_data|.
- for (size_t channel = 0; channel < kChannels; ++channel) {
- for (size_t i = 0; i < kNewLen; ++i) {
- new_data[channel][i] = i;
- }
- }
- // Push back |new_data| into |sync_buffer|. This operation should pop out
- // data from the front of |sync_buffer|, so that the size of the buffer
- // remains the same. The |next_index_| should also move with the same length.
- sync_buffer.PushBack(new_data);
-
- // Read to interleaved output. Read in two batches, where each read operation
- // should automatically update the |net_index_| in the SyncBuffer.
- int16_t output[kChannels * kNewLen];
- // Note that |samples_read| is the number of samples read from each channel.
- // That is, the number of samples written to |output| is
- // |samples_read| * |kChannels|.
- size_t samples_read = sync_buffer.GetNextAudioInterleaved(kNewLen / 2,
- output);
- samples_read +=
- sync_buffer.GetNextAudioInterleaved(kNewLen / 2,
- &output[samples_read * kChannels]);
- EXPECT_EQ(kNewLen, samples_read);
-
- // Verify the data.
- int16_t* output_ptr = output;
- for (size_t i = 0; i < kNewLen; ++i) {
- for (size_t channel = 0; channel < kChannels; ++channel) {
- EXPECT_EQ(new_data[channel][i], *output_ptr);
- ++output_ptr;
- }
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc
deleted file mode 100644
index 5b6b3ba9666..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
-
-#include <algorithm> // min, max
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
-namespace webrtc {
-
-TimeStretch::ReturnCodes TimeStretch::Process(
- const int16_t* input,
- size_t input_len,
- AudioMultiVector* output,
- int16_t* length_change_samples) {
-
- // Pre-calculate common multiplication with |fs_mult_|.
- int fs_mult_120 = fs_mult_ * 120; // Corresponds to 15 ms.
-
- const int16_t* signal;
- scoped_array<int16_t> signal_array;
- size_t signal_len;
- if (num_channels_ == 1) {
- signal = input;
- signal_len = input_len;
- } else {
- // We want |signal| to be only the first channel of |input|, which is
- // interleaved. Thus, we take the first sample, skip forward |num_channels|
- // samples, and continue like that.
- signal_len = input_len / num_channels_;
- signal_array.reset(new int16_t[signal_len]);
- signal = signal_array.get();
- size_t j = master_channel_;
- for (size_t i = 0; i < signal_len; ++i) {
- signal_array[i] = input[j];
- j += num_channels_;
- }
- }
-
- // Find maximum absolute value of input signal.
- max_input_value_ = WebRtcSpl_MaxAbsValueW16(signal,
- static_cast<int>(signal_len));
-
- // Downsample to 4 kHz sample rate and calculate auto-correlation.
- DspHelper::DownsampleTo4kHz(signal, signal_len, kDownsampledLen,
- sample_rate_hz_, true /* compensate delay*/,
- downsampled_input_);
- AutoCorrelation();
-
- // Find the strongest correlation peak.
- static const int kNumPeaks = 1;
- int peak_index;
- int16_t peak_value;
- DspHelper::PeakDetection(auto_correlation_, kCorrelationLen, kNumPeaks,
- fs_mult_, &peak_index, &peak_value);
- // Assert that |peak_index| stays within boundaries.
- assert(peak_index >= 0);
- assert(peak_index <= (2 * kCorrelationLen - 1) * fs_mult_);
-
- // Compensate peak_index for displaced starting position. The displacement
- // happens in AutoCorrelation(). Here, |kMinLag| is in the down-sampled 4 kHz
- // domain, while the |peak_index| is in the original sample rate; hence, the
- // multiplication by fs_mult_ * 2.
- peak_index += kMinLag * fs_mult_ * 2;
- // Assert that |peak_index| stays within boundaries.
- assert(peak_index >= 20 * fs_mult_);
- assert(peak_index <= 20 * fs_mult_ + (2 * kCorrelationLen - 1) * fs_mult_);
-
- // Calculate scaling to ensure that |peak_index| samples can be square-summed
- // without overflowing.
- int scaling = 31 - WebRtcSpl_NormW32(max_input_value_ * max_input_value_) -
- WebRtcSpl_NormW32(peak_index);
- scaling = std::max(0, scaling);
-
- // |vec1| starts at 15 ms minus one pitch period.
- const int16_t* vec1 = &signal[fs_mult_120 - peak_index];
- // |vec2| start at 15 ms.
- const int16_t* vec2 = &signal[fs_mult_120];
- // Calculate energies for |vec1| and |vec2|, assuming they both contain
- // |peak_index| samples.
- int32_t vec1_energy =
- WebRtcSpl_DotProductWithScale(vec1, vec1, peak_index, scaling);
- int32_t vec2_energy =
- WebRtcSpl_DotProductWithScale(vec2, vec2, peak_index, scaling);
-
- // Calculate cross-correlation between |vec1| and |vec2|.
- int32_t cross_corr =
- WebRtcSpl_DotProductWithScale(vec1, vec2, peak_index, scaling);
-
- // Check if the signal seems to be active speech or not (simple VAD).
- bool active_speech = SpeechDetection(vec1_energy, vec2_energy, peak_index,
- scaling);
-
- int16_t best_correlation;
- if (!active_speech) {
- SetParametersForPassiveSpeech(signal_len, &best_correlation, &peak_index);
- } else {
- // Calculate correlation:
- // cross_corr / sqrt(vec1_energy * vec2_energy).
-
- // Start with calculating scale values.
- int energy1_scale = std::max(0, 16 - WebRtcSpl_NormW32(vec1_energy));
- int energy2_scale = std::max(0, 16 - WebRtcSpl_NormW32(vec2_energy));
-
- // Make sure total scaling is even (to simplify scale factor after sqrt).
- if ((energy1_scale + energy2_scale) & 1) {
- // The sum is odd.
- energy1_scale += 1;
- }
-
- // Scale energies to int16_t.
- int16_t vec1_energy_int16 =
- static_cast<int16_t>(vec1_energy >> energy1_scale);
- int16_t vec2_energy_int16 =
- static_cast<int16_t>(vec2_energy >> energy2_scale);
-
- // Calculate square-root of energy product.
- int16_t sqrt_energy_prod = WebRtcSpl_SqrtFloor(vec1_energy_int16 *
- vec2_energy_int16);
-
- // Calculate cross_corr / sqrt(en1*en2) in Q14.
- int temp_scale = 14 - (energy1_scale + energy2_scale) / 2;
- cross_corr = WEBRTC_SPL_SHIFT_W32(cross_corr, temp_scale);
- cross_corr = std::max(0, cross_corr); // Don't use if negative.
- best_correlation = WebRtcSpl_DivW32W16(cross_corr, sqrt_energy_prod);
- // Make sure |best_correlation| is no larger than 1 in Q14.
- best_correlation = std::min(static_cast<int16_t>(16384), best_correlation);
- }
-
-
- // Check accelerate criteria and stretch the signal.
- ReturnCodes return_value = CheckCriteriaAndStretch(
- input, input_len, peak_index, best_correlation, active_speech, output);
- switch (return_value) {
- case kSuccess:
- *length_change_samples = peak_index;
- break;
- case kSuccessLowEnergy:
- *length_change_samples = peak_index;
- break;
- case kNoStretch:
- case kError:
- *length_change_samples = 0;
- break;
- }
- return return_value;
-}
-
-void TimeStretch::AutoCorrelation() {
- // Set scaling factor for cross correlation to protect against overflow.
- int scaling = kLogCorrelationLen - WebRtcSpl_NormW32(
- max_input_value_ * max_input_value_);
- scaling = std::max(0, scaling);
-
- // Calculate correlation from lag kMinLag to lag kMaxLag in 4 kHz domain.
- int32_t auto_corr[kCorrelationLen];
- WebRtcSpl_CrossCorrelation(auto_corr, &downsampled_input_[kMaxLag],
- &downsampled_input_[kMaxLag - kMinLag],
- kCorrelationLen, kMaxLag - kMinLag, scaling, -1);
-
- // Normalize correlation to 14 bits and write to |auto_correlation_|.
- int32_t max_corr = WebRtcSpl_MaxAbsValueW32(auto_corr, kCorrelationLen);
- scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
- WebRtcSpl_VectorBitShiftW32ToW16(auto_correlation_, kCorrelationLen,
- auto_corr, scaling);
-}
-
-bool TimeStretch::SpeechDetection(int32_t vec1_energy, int32_t vec2_energy,
- int peak_index, int scaling) const {
- // Check if the signal seems to be active speech or not (simple VAD).
- // If (vec1_energy + vec2_energy) / (2 * peak_index) <=
- // 8 * background_noise_energy, then we say that the signal contains no
- // active speech.
- // Rewrite the inequality as:
- // (vec1_energy + vec2_energy) / 16 <= peak_index * background_noise_energy.
- // The two sides of the inequality will be denoted |left_side| and
- // |right_side|.
- int32_t left_side = (vec1_energy + vec2_energy) / 16;
- int32_t right_side;
- if (background_noise_.initialized()) {
- right_side = background_noise_.Energy(master_channel_);
- } else {
- // If noise parameters have not been estimated, use a fixed threshold.
- right_side = 75000;
- }
- int right_scale = 16 - WebRtcSpl_NormW32(right_side);
- right_scale = std::max(0, right_scale);
- left_side = left_side >> right_scale;
- right_side = peak_index * (right_side >> right_scale);
-
- // Scale |left_side| properly before comparing with |right_side|.
- // (|scaling| is the scale factor before energy calculation, thus the scale
- // factor for the energy is 2 * scaling.)
- if (WebRtcSpl_NormW32(left_side) < 2 * scaling) {
- // Cannot scale only |left_side|, must scale |right_side| too.
- int temp_scale = WebRtcSpl_NormW32(left_side);
- left_side = left_side << temp_scale;
- right_side = right_side >> (2 * scaling - temp_scale);
- } else {
- left_side = left_side << 2 * scaling;
- }
- return left_side > right_side;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h
deleted file mode 100644
index f0f58b83ad9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
-
-#include <assert.h>
-#include <string.h> // memset, size_t
-
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declarations.
-class BackgroundNoise;
-
-// This is the base class for Accelerate and PreemptiveExpand. This class
-// cannot be instantiated, but must be used through either of the derived
-// classes.
-class TimeStretch {
- public:
- enum ReturnCodes {
- kSuccess = 0,
- kSuccessLowEnergy = 1,
- kNoStretch = 2,
- kError = -1
- };
-
- TimeStretch(int sample_rate_hz, size_t num_channels,
- const BackgroundNoise& background_noise)
- : sample_rate_hz_(sample_rate_hz),
- fs_mult_(sample_rate_hz / 8000),
- num_channels_(static_cast<int>(num_channels)),
- master_channel_(0), // First channel is master.
- background_noise_(background_noise),
- max_input_value_(0) {
- assert(sample_rate_hz_ == 8000 ||
- sample_rate_hz_ == 16000 ||
- sample_rate_hz_ == 32000 ||
- sample_rate_hz_ == 48000);
- assert(num_channels_ > 0);
- assert(static_cast<int>(master_channel_) < num_channels_);
- memset(auto_correlation_, 0, sizeof(auto_correlation_));
- }
-
- virtual ~TimeStretch() {}
-
- // This method performs the processing common to both Accelerate and
- // PreemptiveExpand.
- ReturnCodes Process(const int16_t* input,
- size_t input_len,
- AudioMultiVector* output,
- int16_t* length_change_samples);
-
- protected:
- // Sets the parameters |best_correlation| and |peak_index| to suitable
- // values when the signal contains no active speech. This method must be
- // implemented by the sub-classes.
- virtual void SetParametersForPassiveSpeech(size_t input_length,
- int16_t* best_correlation,
- int* peak_index) const = 0;
-
- // Checks the criteria for performing the time-stretching operation and,
- // if possible, performs the time-stretching. This method must be implemented
- // by the sub-classes.
- virtual ReturnCodes CheckCriteriaAndStretch(
- const int16_t* input, size_t input_length, size_t peak_index,
- int16_t best_correlation, bool active_speech,
- AudioMultiVector* output) const = 0;
-
- static const int kCorrelationLen = 50;
- static const int kLogCorrelationLen = 6; // >= log2(kCorrelationLen).
- static const int kMinLag = 10;
- static const int kMaxLag = 60;
- static const int kDownsampledLen = kCorrelationLen + kMaxLag;
- static const int kCorrelationThreshold = 14746; // 0.9 in Q14.
-
- const int sample_rate_hz_;
- const int fs_mult_; // Sample rate multiplier = sample_rate_hz_ / 8000.
- const int num_channels_;
- const size_t master_channel_;
- const BackgroundNoise& background_noise_;
- int16_t max_input_value_;
- int16_t downsampled_input_[kDownsampledLen];
- // Adding 1 to the size of |auto_correlation_| because of how it is used
- // by the peak-detection algorithm.
- int16_t auto_correlation_[kCorrelationLen + 1];
-
- private:
- // Calculates the auto-correlation of |downsampled_input_| and writes the
- // result to |auto_correlation_|.
- void AutoCorrelation();
-
- // Performs a simple voice-activity detection based on the input parameters.
- bool SpeechDetection(int32_t vec1_energy, int32_t vec2_energy,
- int peak_index, int scaling) const;
-
- DISALLOW_COPY_AND_ASSIGN(TimeStretch);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc
deleted file mode 100644
index cf8131f3a06..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for Accelerate and PreemptiveExpand classes.
-
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-
-namespace webrtc {
-
-TEST(TimeStretch, CreateAndDestroy) {
- int sample_rate = 8000;
- size_t num_channels = 1;
- BackgroundNoise bgn(num_channels);
- Accelerate accelerate(sample_rate, num_channels, bgn);
- PreemptiveExpand preemptive_expand(sample_rate, num_channels, bgn);
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc
deleted file mode 100644
index b2b5b40a3a6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-void TimestampScaler::ToInternal(Packet* packet) {
- if (!packet) {
- return;
- }
- packet->header.timestamp = ToInternal(packet->header.timestamp,
- packet->header.payloadType);
-}
-
-void TimestampScaler::ToInternal(PacketList* packet_list) {
- PacketList::iterator it;
- for (it = packet_list->begin(); it != packet_list->end(); ++it) {
- ToInternal(*it);
- }
-}
-
-uint32_t TimestampScaler::ToInternal(uint32_t external_timestamp,
- uint8_t rtp_payload_type) {
- const DecoderDatabase::DecoderInfo* info =
- decoder_database_.GetDecoderInfo(rtp_payload_type);
- if (!info) {
- // Payload type is unknown. Do not scale.
- return external_timestamp;
- }
- switch (info->codec_type) {
- case kDecoderG722:
- case kDecoderG722_2ch: {
- // Use timestamp scaling with factor 2 (two output samples per RTP
- // timestamp).
- numerator_ = 2;
- denominator_ = 1;
- break;
- }
- case kDecoderOpus:
- case kDecoderOpus_2ch:
- case kDecoderISACfb:
- case kDecoderCNGswb48kHz: {
- // Use timestamp scaling with factor 2/3 (32 kHz sample rate, but RTP
- // timestamps run on 48 kHz).
- // TODO(tlegrand): Remove scaling for kDecoderCNGswb48kHz once ACM has
- // full 48 kHz support.
- numerator_ = 2;
- denominator_ = 3;
- }
- case kDecoderAVT:
- case kDecoderCNGnb:
- case kDecoderCNGwb:
- case kDecoderCNGswb32kHz: {
- // Do not change the timestamp scaling settings for DTMF or CNG.
- break;
- }
- default: {
- // Do not use timestamp scaling for any other codec.
- numerator_ = 1;
- denominator_ = 1;
- break;
- }
- }
-
- if (!(numerator_ == 1 && denominator_ == 1)) {
- // We have a scale factor != 1.
- if (!first_packet_received_) {
- external_ref_ = external_timestamp;
- internal_ref_ = external_timestamp;
- first_packet_received_ = true;
- }
- int32_t external_diff = external_timestamp - external_ref_;
- assert(denominator_ > 0); // Should not be possible.
- external_ref_ = external_timestamp;
- internal_ref_ += (external_diff * numerator_) / denominator_;
- LOG(LS_VERBOSE) << "Converting timestamp: " << external_timestamp <<
- " -> " << internal_ref_;
- return internal_ref_;
- } else {
- // No scaling.
- return external_timestamp;
- }
-}
-
-
-uint32_t TimestampScaler::ToExternal(uint32_t internal_timestamp) const {
- if (!first_packet_received_ || (numerator_ == 1 && denominator_ == 1)) {
- // Not initialized, or scale factor is 1.
- return internal_timestamp;
- } else {
- int32_t internal_diff = internal_timestamp - internal_ref_;
- assert(numerator_ > 0); // Should not be possible.
- // Do not update references in this method.
- // Switch |denominator_| and |numerator_| to convert the other way.
- return external_ref_ + (internal_diff * denominator_) / numerator_;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h
deleted file mode 100644
index e165076a5e5..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-class DecoderDatabase;
-
-// This class scales timestamps for codecs that need timestamp scaling.
-// This is done for codecs where one RTP timestamp does not correspond to
-// one sample.
-class TimestampScaler {
- public:
- explicit TimestampScaler(const DecoderDatabase& decoder_database)
- : first_packet_received_(false),
- numerator_(1),
- denominator_(1),
- external_ref_(0),
- internal_ref_(0),
- decoder_database_(decoder_database) {}
-
- virtual ~TimestampScaler() {}
-
- // Start over.
- virtual void Reset() { first_packet_received_ = false; }
-
- // Scale the timestamp in |packet| from external to internal.
- virtual void ToInternal(Packet* packet);
-
- // Scale the timestamp for all packets in |packet_list| from external to
- // internal.
- virtual void ToInternal(PacketList* packet_list);
-
- // Returns the internal equivalent of |external_timestamp|, given the
- // RTP payload type |rtp_payload_type|.
- virtual uint32_t ToInternal(uint32_t external_timestamp,
- uint8_t rtp_payload_type);
-
- // Scales back to external timestamp. This is the inverse of ToInternal().
- virtual uint32_t ToExternal(uint32_t internal_timestamp) const;
-
- private:
- bool first_packet_received_;
- int numerator_;
- int denominator_;
- uint32_t external_ref_;
- uint32_t internal_ref_;
- const DecoderDatabase& decoder_database_;
-
- DISALLOW_COPY_AND_ASSIGN(TimestampScaler);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc
deleted file mode 100644
index c676094672f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc
+++ /dev/null
@@ -1,327 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-
-using ::testing::Return;
-using ::testing::ReturnNull;
-using ::testing::_;
-
-namespace webrtc {
-
-TEST(TimestampScaler, TestNoScaling) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderPCMu; // Does not use scaled timestamps.
- static const uint8_t kRtpPayloadType = 0;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- for (uint32_t timestamp = 0xFFFFFFFF - 5; timestamp != 5; ++timestamp) {
- // Scale to internal timestamp.
- EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(timestamp, scaler.ToExternal(timestamp));
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestNoScalingLargeStep) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderPCMu; // Does not use scaled timestamps.
- static const uint8_t kRtpPayloadType = 0;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- static const uint32_t kStep = 160;
- uint32_t start_timestamp = 0;
- // |external_timestamp| will be a large positive value.
- start_timestamp = start_timestamp - 5 * kStep;
- for (uint32_t timestamp = start_timestamp; timestamp != 5 * kStep;
- timestamp += kStep) {
- // Scale to internal timestamp.
- EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(timestamp, scaler.ToExternal(timestamp));
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestG722) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- uint32_t external_timestamp = 0xFFFFFFFF - 5;
- uint32_t internal_timestamp = external_timestamp;
- for (; external_timestamp != 5; ++external_timestamp) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- internal_timestamp += 2;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestG722LargeStep) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- static const uint32_t kStep = 320;
- uint32_t external_timestamp = 0;
- // |external_timestamp| will be a large positive value.
- external_timestamp = external_timestamp - 5 * kStep;
- uint32_t internal_timestamp = external_timestamp;
- for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- // Internal timestamp should be incremented with twice the step.
- internal_timestamp += 2 * kStep;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestG722WithCng) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info_g722, info_cng;
- info_g722.codec_type = kDecoderG722; // Uses a factor 2 scaling.
- info_cng.codec_type = kDecoderCNGwb;
- static const uint8_t kRtpPayloadTypeG722 = 17;
- static const uint8_t kRtpPayloadTypeCng = 13;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeG722))
- .WillRepeatedly(Return(&info_g722));
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeCng))
- .WillRepeatedly(Return(&info_cng));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- uint32_t external_timestamp = 0xFFFFFFFF - 5;
- uint32_t internal_timestamp = external_timestamp;
- bool next_is_cng = false;
- for (; external_timestamp != 5; ++external_timestamp) {
- // Alternate between G.722 and CNG every other packet.
- if (next_is_cng) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadTypeCng));
- next_is_cng = false;
- } else {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadTypeG722));
- next_is_cng = true;
- }
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- internal_timestamp += 2;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-// Make sure that the method ToInternal(Packet* packet) is wired up correctly.
-// Since it is simply calling the other ToInternal method, we are not doing
-// as many tests here.
-TEST(TimestampScaler, TestG722Packet) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderG722; // Does uses a factor 2 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- uint32_t external_timestamp = 0xFFFFFFFF - 5;
- uint32_t internal_timestamp = external_timestamp;
- Packet packet;
- packet.header.payloadType = kRtpPayloadType;
- for (; external_timestamp != 5; ++external_timestamp) {
- packet.header.timestamp = external_timestamp;
- // Scale to internal timestamp.
- scaler.ToInternal(&packet);
- EXPECT_EQ(internal_timestamp, packet.header.timestamp);
- internal_timestamp += 2;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-// Make sure that the method ToInternal(PacketList* packet_list) is wired up
-// correctly. Since it is simply calling the ToInternal(Packet* packet) method,
-// we are not doing as many tests here.
-TEST(TimestampScaler, TestG722PacketList) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- uint32_t external_timestamp = 0xFFFFFFFF - 5;
- uint32_t internal_timestamp = external_timestamp;
- Packet packet1;
- packet1.header.payloadType = kRtpPayloadType;
- packet1.header.timestamp = external_timestamp;
- Packet packet2;
- packet2.header.payloadType = kRtpPayloadType;
- packet2.header.timestamp = external_timestamp + 10;
- PacketList packet_list;
- packet_list.push_back(&packet1);
- packet_list.push_back(&packet2);
-
- scaler.ToInternal(&packet_list);
- EXPECT_EQ(internal_timestamp, packet1.header.timestamp);
- EXPECT_EQ(internal_timestamp + 20, packet2.header.timestamp);
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestG722Reset) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderG722; // Uses a factor 2 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- uint32_t external_timestamp = 0xFFFFFFFF - 5;
- uint32_t internal_timestamp = external_timestamp;
- for (; external_timestamp != 5; ++external_timestamp) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- internal_timestamp += 2;
- }
- // Reset the scaler. After this, we expect the internal and external to start
- // over at the same value again.
- scaler.Reset();
- internal_timestamp = external_timestamp;
- for (; external_timestamp != 15; ++external_timestamp) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- internal_timestamp += 2;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestOpusLargeStep) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderOpus; // Uses a factor 2/3 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- static const uint32_t kStep = 960;
- uint32_t external_timestamp = 0;
- // |external_timestamp| will be a large positive value.
- external_timestamp = external_timestamp - 5 * kStep;
- uint32_t internal_timestamp = external_timestamp;
- for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- // Internal timestamp should be incremented with twice the step.
- internal_timestamp += 2 * kStep / 3;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, TestIsacFbLargeStep) {
- MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderISACfb; // Uses a factor 2/3 scaling.
- static const uint8_t kRtpPayloadType = 17;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillRepeatedly(Return(&info));
-
- TimestampScaler scaler(db);
- // Test both sides of the timestamp wrap-around.
- static const uint32_t kStep = 960;
- uint32_t external_timestamp = 0;
- // |external_timestamp| will be a large positive value.
- external_timestamp = external_timestamp - 5 * kStep;
- uint32_t internal_timestamp = external_timestamp;
- for (; external_timestamp != 5 * kStep; external_timestamp += kStep) {
- // Scale to internal timestamp.
- EXPECT_EQ(internal_timestamp,
- scaler.ToInternal(external_timestamp, kRtpPayloadType));
- // Scale back.
- EXPECT_EQ(external_timestamp, scaler.ToExternal(internal_timestamp));
- // Internal timestamp should be incremented with twice the step.
- internal_timestamp += 2 * kStep / 3;
- }
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-TEST(TimestampScaler, Failures) {
- static const uint8_t kRtpPayloadType = 17;
- MockDecoderDatabase db;
- EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
- .WillOnce(ReturnNull()); // Return NULL to indicate unknown payload type.
-
- TimestampScaler scaler(db);
- uint32_t timestamp = 4711; // Some number.
- EXPECT_EQ(timestamp, scaler.ToInternal(timestamp, kRtpPayloadType));
-
- Packet* packet = NULL;
- scaler.ToInternal(packet); // Should not crash. That's all we can test.
-
- EXPECT_CALL(db, Die()); // Called when database object is deleted.
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc
deleted file mode 100644
index 94ea5bef015..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/tools/audio_loop.h"
-
-#include <assert.h>
-#include <stdio.h>
-#include <string.h>
-
-namespace webrtc {
-namespace test {
-
-bool AudioLoop::Init(const std::string file_name,
- size_t max_loop_length_samples,
- size_t block_length_samples) {
- FILE* fp = fopen(file_name.c_str(), "rb");
- if (!fp) return false;
-
- audio_array_.reset(new int16_t[max_loop_length_samples +
- block_length_samples]);
- size_t samples_read = fread(audio_array_.get(), sizeof(int16_t),
- max_loop_length_samples, fp);
- fclose(fp);
-
- // Block length must be shorter than the loop length.
- if (block_length_samples > samples_read) return false;
-
- // Add an extra block length of samples to the end of the array, starting
- // over again from the beginning of the array. This is done to simplify
- // the reading process when reading over the end of the loop.
- memcpy(&audio_array_[samples_read], audio_array_.get(),
- block_length_samples * sizeof(int16_t));
-
- loop_length_samples_ = samples_read;
- block_length_samples_ = block_length_samples;
- return true;
-}
-
-const int16_t* AudioLoop::GetNextBlock() {
- // Check that the AudioLoop is initialized.
- if (block_length_samples_ == 0) return NULL;
-
- const int16_t* output_ptr = &audio_array_[next_index_];
- next_index_ = (next_index_ + block_length_samples_) % loop_length_samples_;
- return output_ptr;
-}
-
-
-} // namespace test
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h
deleted file mode 100644
index 038ca370e72..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
-
-#include <string>
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace test {
-
-// Class serving as an infinite source of audio, realized by looping an audio
-// clip.
-class AudioLoop {
- public:
- AudioLoop()
- : next_index_(0),
- loop_length_samples_(0),
- block_length_samples_(0),
- audio_array_(NULL) {
- }
-
- virtual ~AudioLoop() {}
-
- // Initializes the AudioLoop by reading from |file_name|. The loop will be no
- // longer than |max_loop_length_samples|, if the length of the file is
- // greater. Otherwise, the loop length is the same as the file length.
- // The audio will be delivered in blocks of |block_length_samples|.
- // Returns false if the initialization failed, otherwise true.
- bool Init(const std::string file_name, size_t max_loop_length_samples,
- size_t block_length_samples);
-
- // Returns a pointer to the next block of audio. The number given as
- // |block_length_samples| to the Init() function determines how many samples
- // that can be safely read from the pointer.
- const int16_t* GetNextBlock();
-
- private:
- size_t next_index_;
- size_t loop_length_samples_;
- size_t block_length_samples_;
- scoped_array<int16_t> audio_array_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioLoop);
-};
-
-} // namespace test
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc
deleted file mode 100644
index 62692e27dc4..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-
-namespace webrtc {
-namespace test {
-
-InputAudioFile::InputAudioFile(const std::string file_name) {
- fp_ = fopen(file_name.c_str(), "rb");
-}
-
-InputAudioFile::~InputAudioFile() { fclose(fp_); }
-
-bool InputAudioFile::Read(size_t samples, int16_t* destination) {
- if (!fp_) {
- return false;
- }
- size_t samples_read = fread(destination, sizeof(int16_t), samples, fp_);
- if (samples_read < samples) {
- // Rewind and read the missing samples.
- rewind(fp_);
- size_t missing_samples = samples - samples_read;
- if (fread(destination, sizeof(int16_t), missing_samples, fp_) <
- missing_samples) {
- // Could not read enough even after rewinding the file.
- return false;
- }
- }
- return true;
-}
-
-void InputAudioFile::DuplicateInterleaved(const int16_t* source, size_t samples,
- size_t channels,
- int16_t* destination) {
- for (size_t i = 0; i < samples; ++i) {
- for (size_t j = 0; j < channels; ++j) {
- destination[i * channels + j] = source[i];
- }
- }
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h
deleted file mode 100644
index de51ff88b8c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
-
-#include <stdio.h>
-
-#include <string>
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace test {
-
-// Class for handling a looping input audio file.
-class InputAudioFile {
- public:
- explicit InputAudioFile(const std::string file_name);
-
- virtual ~InputAudioFile();
-
- // Reads |samples| elements from source file to |destination|. Returns true
- // if the read was successful, otherwise false. If the file end is reached,
- // the file is rewound and reading continues from the beginning.
- // The output |destination| must have the capacity to hold |samples| elements.
- bool Read(size_t samples, int16_t* destination);
-
- // Creates a multi-channel signal from a mono signal. Each sample is repeated
- // |channels| times to create an interleaved multi-channel signal where all
- // channels are identical. The output |destination| must have the capacity to
- // hold samples * channels elements.
- static void DuplicateInterleaved(const int16_t* source, size_t samples,
- size_t channels, int16_t* destination);
-
- private:
- FILE* fp_;
- DISALLOW_COPY_AND_ASSIGN(InputAudioFile);
-};
-
-} // namespace test
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc
deleted file mode 100644
index f0ca51f2f70..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-#include <stdio.h>
-
-#include <algorithm>
-#include <iostream>
-#include <string>
-
-#include "google/gflags.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/typedefs.h"
-
-using webrtc::NetEq;
-using webrtc::WebRtcRTPHeader;
-
-// Flag validators.
-static bool ValidatePayloadType(const char* flagname, int32_t value) {
- if (value >= 0 && value <= 127) // Value is ok.
- return true;
- printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
- return false;
-}
-
-// Define command line flags.
-DEFINE_int32(pcmu, 0, "RTP payload type for PCM-u");
-static const bool pcmu_dummy =
- google::RegisterFlagValidator(&FLAGS_pcmu, &ValidatePayloadType);
-DEFINE_int32(pcma, 8, "RTP payload type for PCM-a");
-static const bool pcma_dummy =
- google::RegisterFlagValidator(&FLAGS_pcma, &ValidatePayloadType);
-DEFINE_int32(ilbc, 102, "RTP payload type for iLBC");
-static const bool ilbc_dummy =
- google::RegisterFlagValidator(&FLAGS_ilbc, &ValidatePayloadType);
-DEFINE_int32(isac, 103, "RTP payload type for iSAC");
-static const bool isac_dummy =
- google::RegisterFlagValidator(&FLAGS_isac, &ValidatePayloadType);
-DEFINE_int32(isac_swb, 104, "RTP payload type for iSAC-swb (32 kHz)");
-static const bool isac_swb_dummy =
- google::RegisterFlagValidator(&FLAGS_isac_swb, &ValidatePayloadType);
-DEFINE_int32(pcm16b, 93, "RTP payload type for PCM16b-nb (8 kHz)");
-static const bool pcm16b_dummy =
- google::RegisterFlagValidator(&FLAGS_pcm16b, &ValidatePayloadType);
-DEFINE_int32(pcm16b_wb, 94, "RTP payload type for PCM16b-wb (16 kHz)");
-static const bool pcm16b_wb_dummy =
- google::RegisterFlagValidator(&FLAGS_pcm16b_wb, &ValidatePayloadType);
-DEFINE_int32(pcm16b_swb32, 95, "RTP payload type for PCM16b-swb32 (32 kHz)");
-static const bool pcm16b_swb32_dummy =
- google::RegisterFlagValidator(&FLAGS_pcm16b_swb32, &ValidatePayloadType);
-DEFINE_int32(pcm16b_swb48, 96, "RTP payload type for PCM16b-swb48 (48 kHz)");
-static const bool pcm16b_swb48_dummy =
- google::RegisterFlagValidator(&FLAGS_pcm16b_swb48, &ValidatePayloadType);
-DEFINE_int32(g722, 9, "RTP payload type for G.722");
-static const bool g722_dummy =
- google::RegisterFlagValidator(&FLAGS_g722, &ValidatePayloadType);
-DEFINE_int32(avt, 106, "RTP payload type for AVT/DTMF");
-static const bool avt_dummy =
- google::RegisterFlagValidator(&FLAGS_avt, &ValidatePayloadType);
-DEFINE_int32(red, 117, "RTP payload type for redundant audio (RED)");
-static const bool red_dummy =
- google::RegisterFlagValidator(&FLAGS_red, &ValidatePayloadType);
-DEFINE_int32(cn_nb, 13, "RTP payload type for comfort noise (8 kHz)");
-static const bool cn_nb_dummy =
- google::RegisterFlagValidator(&FLAGS_cn_nb, &ValidatePayloadType);
-DEFINE_int32(cn_wb, 98, "RTP payload type for comfort noise (16 kHz)");
-static const bool cn_wb_dummy =
- google::RegisterFlagValidator(&FLAGS_cn_wb, &ValidatePayloadType);
-DEFINE_int32(cn_swb32, 99, "RTP payload type for comfort noise (32 kHz)");
-static const bool cn_swb32_dummy =
- google::RegisterFlagValidator(&FLAGS_cn_swb32, &ValidatePayloadType);
-DEFINE_int32(cn_swb48, 100, "RTP payload type for comfort noise (48 kHz)");
-static const bool cn_swb48_dummy =
- google::RegisterFlagValidator(&FLAGS_cn_swb48, &ValidatePayloadType);
-DEFINE_bool(codec_map, false, "Prints the mapping between RTP payload type and "
- "codec");
-DEFINE_bool(dummy_rtp, false, "The input file contains ""dummy"" RTP data, "
- "i.e., only headers");
-
-// Declaring helper functions (defined further down in this file).
-std::string CodecName(webrtc::NetEqDecoder codec);
-void RegisterPayloadTypes(NetEq* neteq);
-void PrintCodecMapping();
-
-int main(int argc, char* argv[]) {
- static const int kMaxChannels = 5;
- static const int kMaxSamplesPerMs = 48000 / 1000;
- static const int kOutputBlockSizeMs = 10;
-
- std::string program_name = argv[0];
- std::string usage = "Tool for decoding an RTP dump file using NetEq.\n"
- "Run " + program_name + " --helpshort for usage.\n"
- "Example usage:\n" + program_name +
- " input.rtp output.pcm\n";
- google::SetUsageMessage(usage);
- google::ParseCommandLineFlags(&argc, &argv, true);
-
- if (FLAGS_codec_map) {
- PrintCodecMapping();
- }
-
- if (argc != 3) {
- if (FLAGS_codec_map) {
- // We have already printed the codec map. Just end the program.
- return 0;
- }
- // Print usage information.
- std::cout << google::ProgramUsage();
- return 0;
- }
-
- FILE* in_file = fopen(argv[1], "rb");
- if (!in_file) {
- std::cerr << "Cannot open input file " << argv[1] << std::endl;
- exit(1);
- }
- std::cout << "Input file: " << argv[1] << std::endl;
-
- FILE* out_file = fopen(argv[2], "wb");
- if (!in_file) {
- std::cerr << "Cannot open output file " << argv[2] << std::endl;
- exit(1);
- }
- std::cout << "Output file: " << argv[2] << std::endl;
-
- // Read RTP file header.
- if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0) {
- std::cerr << "Wrong format in RTP file" << std::endl;
- exit(1);
- }
-
- // Enable tracing.
- webrtc::Trace::CreateTrace();
- webrtc::Trace::SetTraceFile((webrtc::test::OutputPath() +
- "neteq_trace.txt").c_str());
- webrtc::Trace::set_level_filter(webrtc::kTraceAll);
-
- // Initialize NetEq instance.
- int sample_rate_hz = 16000;
- NetEq* neteq = NetEq::Create(sample_rate_hz);
- RegisterPayloadTypes(neteq);
-
- // Read first packet.
- NETEQTEST_RTPpacket *rtp;
- if (!FLAGS_dummy_rtp) {
- rtp = new NETEQTEST_RTPpacket();
- } else {
- rtp = new NETEQTEST_DummyRTPpacket();
- }
- rtp->readFromFile(in_file);
- if (!rtp) {
- std::cout << "Warning: RTP file is empty" << std::endl;
- }
-
- // This is the main simulation loop.
- int time_now_ms = rtp->time(); // Start immediately with the first packet.
- int next_input_time_ms = rtp->time();
- int next_output_time_ms = time_now_ms;
- if (time_now_ms % kOutputBlockSizeMs != 0) {
- // Make sure that next_output_time_ms is rounded up to the next multiple
- // of kOutputBlockSizeMs. (Legacy bit-exactness.)
- next_output_time_ms +=
- kOutputBlockSizeMs - time_now_ms % kOutputBlockSizeMs;
- }
- while (rtp->dataLen() >= 0) {
- // Check if it is time to insert packet.
- while (time_now_ms >= next_input_time_ms && rtp->dataLen() >= 0) {
- if (rtp->dataLen() > 0) {
- // Parse RTP header.
- WebRtcRTPHeader rtp_header;
- rtp->parseHeader(&rtp_header);
- int error = neteq->InsertPacket(rtp_header, rtp->payload(),
- rtp->payloadLen(),
- rtp->time() * sample_rate_hz / 1000);
- if (error != NetEq::kOK) {
- std::cerr << "InsertPacket returned error code " <<
- neteq->LastError() << std::endl;
- }
- }
- // Get next packet from file.
- rtp->readFromFile(in_file);
- next_input_time_ms = rtp->time();
- }
-
- // Check if it is time to get output audio.
- if (time_now_ms >= next_output_time_ms) {
- static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs *
- kMaxChannels;
- int16_t out_data[kOutDataLen];
- int num_channels;
- int samples_per_channel;
- int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
- &num_channels, NULL);
- if (error != NetEq::kOK) {
- std::cerr << "GetAudio returned error code " <<
- neteq->LastError() << std::endl;
- } else {
- // Calculate sample rate from output size.
- sample_rate_hz = 1000 * samples_per_channel / kOutputBlockSizeMs;
- }
-
- // Write to file.
- size_t write_len = samples_per_channel * num_channels;
- if (fwrite(out_data, sizeof(out_data[0]), write_len, out_file) !=
- write_len) {
- std::cerr << "Error while writing to file" << std::endl;
- webrtc::Trace::ReturnTrace();
- exit(1);
- }
- next_output_time_ms += kOutputBlockSizeMs;
- }
- // Advance time to next event.
- time_now_ms = std::min(next_input_time_ms, next_output_time_ms);
- }
-
- std::cout << "Simulation done" << std::endl;
-
- fclose(in_file);
- fclose(out_file);
- delete neteq;
- webrtc::Trace::ReturnTrace();
- return 0;
-}
-
-
-// Help functions.
-
-// Maps a codec type to a printable name string.
-std::string CodecName(webrtc::NetEqDecoder codec) {
- switch (codec) {
- case webrtc::kDecoderPCMu:
- return "PCM-u";
- case webrtc::kDecoderPCMa:
- return "PCM-a";
- case webrtc::kDecoderILBC:
- return "iLBC";
- case webrtc::kDecoderISAC:
- return "iSAC";
- case webrtc::kDecoderISACswb:
- return "iSAC-swb (32 kHz)";
- case webrtc::kDecoderPCM16B:
- return "PCM16b-nb (8 kHz)";
- case webrtc::kDecoderPCM16Bwb:
- return "PCM16b-wb (16 kHz)";
- case webrtc::kDecoderPCM16Bswb32kHz:
- return "PCM16b-swb32 (32 kHz)";
- case webrtc::kDecoderPCM16Bswb48kHz:
- return "PCM16b-swb48 (48 kHz)";
- case webrtc::kDecoderG722:
- return "G.722";
- case webrtc::kDecoderRED:
- return "redundant audio (RED)";
- case webrtc::kDecoderAVT:
- return "AVT/DTMF";
- case webrtc::kDecoderCNGnb:
- return "comfort noise (8 kHz)";
- case webrtc::kDecoderCNGwb:
- return "comfort noise (16 kHz)";
- case webrtc::kDecoderCNGswb32kHz:
- return "comfort noise (32 kHz)";
- case webrtc::kDecoderCNGswb48kHz:
- return "comfort noise (48 kHz)";
- default:
- assert(false);
- return "undefined";
- }
-}
-
-// Registers all decoders in |neteq|.
-void RegisterPayloadTypes(NetEq* neteq) {
- assert(neteq);
- int error;
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCMu, FLAGS_pcmu);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcmu <<
- " as " << CodecName(webrtc::kDecoderPCMu).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCMa, FLAGS_pcma);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcma <<
- " as " << CodecName(webrtc::kDecoderPCMa).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderILBC, FLAGS_ilbc);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_ilbc <<
- " as " << CodecName(webrtc::kDecoderILBC).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderISAC, FLAGS_isac);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_isac <<
- " as " << CodecName(webrtc::kDecoderISAC).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderISACswb, FLAGS_isac_swb);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_isac_swb <<
- " as " << CodecName(webrtc::kDecoderISACswb).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16B, FLAGS_pcm16b);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcm16b <<
- " as " << CodecName(webrtc::kDecoderPCM16B).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bwb,
- FLAGS_pcm16b_wb);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcm16b_wb <<
- " as " << CodecName(webrtc::kDecoderPCM16Bwb).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bswb32kHz,
- FLAGS_pcm16b_swb32);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcm16b_swb32 <<
- " as " << CodecName(webrtc::kDecoderPCM16Bswb32kHz).c_str() <<
- std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderPCM16Bswb48kHz,
- FLAGS_pcm16b_swb48);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_pcm16b_swb48 <<
- " as " << CodecName(webrtc::kDecoderPCM16Bswb48kHz).c_str() <<
- std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderG722, FLAGS_g722);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_g722 <<
- " as " << CodecName(webrtc::kDecoderG722).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderAVT, FLAGS_avt);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_avt <<
- " as " << CodecName(webrtc::kDecoderAVT).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderRED, FLAGS_red);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_red <<
- " as " << CodecName(webrtc::kDecoderRED).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderCNGnb, FLAGS_cn_nb);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_cn_nb <<
- " as " << CodecName(webrtc::kDecoderCNGnb).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderCNGwb, FLAGS_cn_wb);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_cn_wb <<
- " as " << CodecName(webrtc::kDecoderCNGwb).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderCNGswb32kHz,
- FLAGS_cn_swb32);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_cn_swb32 <<
- " as " << CodecName(webrtc::kDecoderCNGswb32kHz).c_str() << std::endl;
- exit(1);
- }
- error = neteq->RegisterPayloadType(webrtc::kDecoderCNGswb48kHz,
- FLAGS_cn_swb48);
- if (error) {
- std::cerr << "Cannot register payload type " << FLAGS_cn_swb48 <<
- " as " << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << std::endl;
- exit(1);
- }
-}
-
-void PrintCodecMapping() {
- std::cout << CodecName(webrtc::kDecoderPCMu).c_str() << ": " << FLAGS_pcmu <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderPCMa).c_str() << ": " << FLAGS_pcma <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderILBC).c_str() << ": " << FLAGS_ilbc <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderISAC).c_str() << ": " << FLAGS_isac <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderISACswb).c_str() << ": " <<
- FLAGS_isac_swb << std::endl;
- std::cout << CodecName(webrtc::kDecoderPCM16B).c_str() << ": " <<
- FLAGS_pcm16b << std::endl;
- std::cout << CodecName(webrtc::kDecoderPCM16Bwb).c_str() << ": " <<
- FLAGS_pcm16b_wb << std::endl;
- std::cout << CodecName(webrtc::kDecoderPCM16Bswb32kHz).c_str() << ": " <<
- FLAGS_pcm16b_swb32 << std::endl;
- std::cout << CodecName(webrtc::kDecoderPCM16Bswb48kHz).c_str() << ": " <<
- FLAGS_pcm16b_swb48 << std::endl;
- std::cout << CodecName(webrtc::kDecoderG722).c_str() << ": " << FLAGS_g722 <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderAVT).c_str() << ": " << FLAGS_avt <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderRED).c_str() << ": " << FLAGS_red <<
- std::endl;
- std::cout << CodecName(webrtc::kDecoderCNGnb).c_str() << ": " <<
- FLAGS_cn_nb << std::endl;
- std::cout << CodecName(webrtc::kDecoderCNGwb).c_str() << ": " <<
- FLAGS_cn_wb << std::endl;
- std::cout << CodecName(webrtc::kDecoderCNGswb32kHz).c_str() << ": " <<
- FLAGS_cn_swb32 << std::endl;
- std::cout << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << ": " <<
- FLAGS_cn_swb48 << std::endl;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc
deleted file mode 100644
index 8d9a89d5450..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
-
-namespace webrtc {
-namespace test {
-
-uint32_t RtpGenerator::GetRtpHeader(uint8_t payload_type,
- size_t payload_length_samples,
- WebRtcRTPHeader* rtp_header) {
- assert(rtp_header);
- if (!rtp_header) {
- return 0;
- }
- rtp_header->header.sequenceNumber = seq_number_++;
- rtp_header->header.timestamp = timestamp_;
- timestamp_ += static_cast<uint32_t>(payload_length_samples);
- rtp_header->header.payloadType = payload_type;
- rtp_header->header.markerBit = false;
- rtp_header->header.ssrc = ssrc_;
- rtp_header->header.numCSRCs = 0;
- rtp_header->frameType = kAudioFrameSpeech;
-
- uint32_t this_send_time = next_send_time_ms_;
- assert(samples_per_ms_ > 0);
- next_send_time_ms_ += ((1.0 + drift_factor_) * payload_length_samples) /
- samples_per_ms_;
- return this_send_time;
-}
-
-void RtpGenerator::set_drift_factor(double factor) {
- if (factor > -1.0) {
- drift_factor_ = factor;
- }
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h
deleted file mode 100644
index ece7ef29808..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
-
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-namespace test {
-
-// Class for generating RTP headers.
-class RtpGenerator {
- public:
- RtpGenerator(int samples_per_ms,
- uint16_t start_seq_number = 0,
- uint32_t start_timestamp = 0,
- uint32_t start_send_time_ms = 0,
- uint32_t ssrc = 0x12345678)
- : seq_number_(start_seq_number),
- timestamp_(start_timestamp),
- next_send_time_ms_(start_send_time_ms),
- ssrc_(ssrc),
- samples_per_ms_(samples_per_ms),
- drift_factor_(0.0) {
- }
-
- // Writes the next RTP header to |rtp_header|, which will be of type
- // |payload_type|. Returns the send time for this packet (in ms). The value of
- // |payload_length_samples| determines the send time for the next packet.
- uint32_t GetRtpHeader(uint8_t payload_type, size_t payload_length_samples,
- WebRtcRTPHeader* rtp_header);
-
- void set_drift_factor(double factor);
-
- private:
- uint16_t seq_number_;
- uint32_t timestamp_;
- uint32_t next_send_time_ms_;
- const uint32_t ssrc_;
- const int samples_per_ms_;
- double drift_factor_;
- DISALLOW_COPY_AND_ASSIGN(RtpGenerator);
-};
-
-} // namespace test
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
index 352537d6ef7..2969ecebe66 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
@@ -57,7 +57,7 @@ public:
// Add/remove participants as candidates for mixing.
virtual int32_t SetMixabilityStatus(MixerParticipant& participant,
- const bool mixable) = 0;
+ bool mixable) = 0;
// mixable is set to true if a participant is a candidate for mixing.
virtual int32_t MixabilityStatus(MixerParticipant& participant,
bool& mixable) = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index c5cf137f53c..26ef3e881f2 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -19,6 +19,13 @@
namespace webrtc {
namespace {
+struct ParticipantFramePair {
+ MixerParticipant* participant;
+ AudioFrame* audioFrame;
+};
+
+typedef std::list<ParticipantFramePair*> ParticipantFramePairList;
+
// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
// These effects are applied to |frame| itself prior to mixing. Assumes that
// |mixed_frame| always has at least as many channels as |frame|. Supports
@@ -40,20 +47,18 @@ void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame) {
}
// Return the max number of channels from a |list| composed of AudioFrames.
-int MaxNumChannels(const ListWrapper& list) {
- ListItem* item = list.First();
+int MaxNumChannels(const AudioFrameList* list) {
int max_num_channels = 1;
- while (item) {
- AudioFrame* frame = static_cast<AudioFrame*>(item->GetItem());
- max_num_channels = std::max(max_num_channels, frame->num_channels_);
- item = list.Next(item);
+ for (AudioFrameList::const_iterator iter = list->begin();
+ iter != list->end();
+ ++iter) {
+ max_num_channels = std::max(max_num_channels, (*iter)->num_channels_);
}
return max_num_channels;
}
void SetParticipantStatistics(ParticipantStatistics* stats,
- const AudioFrame& frame)
-{
+ const AudioFrame& frame) {
stats->participant = frame.id_;
stats->level = 0; // TODO(andrew): to what should this be set?
}
@@ -61,58 +66,47 @@ void SetParticipantStatistics(ParticipantStatistics* stats,
} // namespace
MixerParticipant::MixerParticipant()
- : _mixHistory(new MixHistory())
-{
+ : _mixHistory(new MixHistory()) {
}
-MixerParticipant::~MixerParticipant()
-{
+MixerParticipant::~MixerParticipant() {
delete _mixHistory;
}
-int32_t MixerParticipant::IsMixed(bool& mixed) const
-{
+int32_t MixerParticipant::IsMixed(bool& mixed) const {
return _mixHistory->IsMixed(mixed);
}
MixHistory::MixHistory()
- : _isMixed(0)
-{
+ : _isMixed(0) {
}
-MixHistory::~MixHistory()
-{
+MixHistory::~MixHistory() {
}
-int32_t MixHistory::IsMixed(bool& mixed) const
-{
+int32_t MixHistory::IsMixed(bool& mixed) const {
mixed = _isMixed;
return 0;
}
-int32_t MixHistory::WasMixed(bool& wasMixed) const
-{
+int32_t MixHistory::WasMixed(bool& wasMixed) const {
// Was mixed is the same as is mixed depending on perspective. This function
// is for the perspective of AudioConferenceMixerImpl.
return IsMixed(wasMixed);
}
-int32_t MixHistory::SetIsMixed(const bool mixed)
-{
+int32_t MixHistory::SetIsMixed(const bool mixed) {
_isMixed = mixed;
return 0;
}
-void MixHistory::ResetMixedStatus()
-{
+void MixHistory::ResetMixedStatus() {
_isMixed = false;
}
-AudioConferenceMixer* AudioConferenceMixer::Create(int id)
-{
+AudioConferenceMixer* AudioConferenceMixer::Create(int id) {
AudioConferenceMixerImpl* mixer = new AudioConferenceMixerImpl(id);
- if(!mixer->Init())
- {
+ if(!mixer->Init()) {
delete mixer;
return NULL;
}
@@ -140,11 +134,9 @@ AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
_timeStamp(0),
_timeScheduler(kProcessPeriodicityInMs),
_mixedAudioLevel(),
- _processCalls(0)
-{}
+ _processCalls(0) {}
-bool AudioConferenceMixerImpl::Init()
-{
+bool AudioConferenceMixerImpl::Init() {
_crit.reset(CriticalSectionWrapper::CreateCriticalSection());
if (_crit.get() == NULL)
return false;
@@ -153,8 +145,10 @@ bool AudioConferenceMixerImpl::Init()
if(_cbCrit.get() == NULL)
return false;
- _limiter.reset(AudioProcessing::Create(_id));
- if(_limiter.get() == NULL)
+ Config config;
+ config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
+ _limiter.reset(AudioProcessing::Create(config));
+ if(!_limiter.get())
return false;
MemoryPool<AudioFrame>::CreateMemoryPool(_audioFramePool,
@@ -165,10 +159,6 @@ bool AudioConferenceMixerImpl::Init()
if(SetOutputFrequency(kDefaultFrequency) == -1)
return false;
- // Assume mono.
- if (!SetNumLimiterChannels(1))
- return false;
-
if(_limiter->gain_control()->set_mode(GainControl::kFixedDigital) !=
_limiter->kNoError)
return false;
@@ -192,25 +182,21 @@ bool AudioConferenceMixerImpl::Init()
return true;
}
-AudioConferenceMixerImpl::~AudioConferenceMixerImpl()
-{
+AudioConferenceMixerImpl::~AudioConferenceMixerImpl() {
MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool);
assert(_audioFramePool == NULL);
}
-int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id)
-{
+int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id) {
_id = id;
return 0;
}
// Process should be called every kProcessPeriodicityInMs ms
-int32_t AudioConferenceMixerImpl::TimeUntilNextProcess()
-{
+int32_t AudioConferenceMixerImpl::TimeUntilNextProcess() {
int32_t timeUntilNextProcess = 0;
CriticalSectionScoped cs(_crit.get());
- if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0)
- {
+ if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"failed in TimeToNextUpdate() call");
// Sanity check
@@ -220,9 +206,8 @@ int32_t AudioConferenceMixerImpl::TimeUntilNextProcess()
return timeUntilNextProcess;
}
-int32_t AudioConferenceMixerImpl::Process()
-{
- uint32_t remainingParticipantsAllowedToMix =
+int32_t AudioConferenceMixerImpl::Process() {
+ size_t remainingParticipantsAllowedToMix =
kMaximumAmountOfMixedParticipants;
{
CriticalSectionScoped cs(_crit.get());
@@ -233,9 +218,9 @@ int32_t AudioConferenceMixerImpl::Process()
_timeScheduler.UpdateScheduler();
}
- ListWrapper mixList;
- ListWrapper rampOutList;
- ListWrapper additionalFramesList;
+ AudioFrameList mixList;
+ AudioFrameList rampOutList;
+ AudioFrameList additionalFramesList;
std::map<int, MixerParticipant*> mixedParticipantsMap;
{
CriticalSectionScoped cs(_cbCrit.get());
@@ -246,41 +231,34 @@ int32_t AudioConferenceMixerImpl::Process()
// information.
// TODO(henrike): this is probably more appropriate to do in
// GetLowestMixingFrequency().
- if (lowFreq == 12000)
- {
+ if (lowFreq == 12000) {
lowFreq = 16000;
} else if (lowFreq == 24000) {
lowFreq = 32000;
}
- if(lowFreq <= 0)
- {
+ if(lowFreq <= 0) {
CriticalSectionScoped cs(_crit.get());
_processCalls--;
return 0;
- } else {
- switch(lowFreq)
- {
+ } else {
+ switch(lowFreq) {
case 8000:
- if(OutputFrequency() != kNbInHz)
- {
+ if(OutputFrequency() != kNbInHz) {
SetOutputFrequency(kNbInHz);
}
break;
case 16000:
- if(OutputFrequency() != kWbInHz)
- {
+ if(OutputFrequency() != kWbInHz) {
SetOutputFrequency(kWbInHz);
}
break;
case 32000:
- if(OutputFrequency() != kSwbInHz)
- {
+ if(OutputFrequency() != kSwbInHz) {
SetOutputFrequency(kSwbInHz);
}
break;
case 48000:
- if(OutputFrequency() != kFbInHz)
- {
+ if(OutputFrequency() != kFbInHz) {
SetOutputFrequency(kFbInHz);
}
break;
@@ -293,19 +271,17 @@ int32_t AudioConferenceMixerImpl::Process()
}
}
- UpdateToMix(mixList, rampOutList, &mixedParticipantsMap,
+ UpdateToMix(&mixList, &rampOutList, &mixedParticipantsMap,
remainingParticipantsAllowedToMix);
- GetAdditionalAudio(additionalFramesList);
+ GetAdditionalAudio(&additionalFramesList);
UpdateMixedStatus(mixedParticipantsMap);
- _scratchParticipantsToMixAmount =
- static_cast<uint32_t>(mixedParticipantsMap.size());
+ _scratchParticipantsToMixAmount = mixedParticipantsMap.size();
}
// Get an AudioFrame for mixing from the memory pool.
AudioFrame* mixedAudio = NULL;
- if(_audioFramePool->PopMemory(mixedAudio) == -1)
- {
+ if(_audioFramePool->PopMemory(mixedAudio) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
@@ -322,12 +298,9 @@ int32_t AudioConferenceMixerImpl::Process()
// with an API instead of dynamically.
// Find the max channels over all mixing lists.
- const int num_mixed_channels = std::max(MaxNumChannels(mixList),
- std::max(MaxNumChannels(additionalFramesList),
- MaxNumChannels(rampOutList)));
-
- if (!SetNumLimiterChannels(num_mixed_channels))
- retval = -1;
+ const int num_mixed_channels = std::max(MaxNumChannels(&mixList),
+ std::max(MaxNumChannels(&additionalFramesList),
+ MaxNumChannels(&rampOutList)));
mixedAudio->UpdateFrame(-1, _timeStamp, NULL, 0, _outputFrequency,
AudioFrame::kNormalSpeech,
@@ -335,18 +308,15 @@ int32_t AudioConferenceMixerImpl::Process()
_timeStamp += _sampleSize;
- MixFromList(*mixedAudio, mixList);
- MixAnonomouslyFromList(*mixedAudio, additionalFramesList);
- MixAnonomouslyFromList(*mixedAudio, rampOutList);
+ MixFromList(*mixedAudio, &mixList);
+ MixAnonomouslyFromList(*mixedAudio, &additionalFramesList);
+ MixAnonomouslyFromList(*mixedAudio, &rampOutList);
- if(mixedAudio->samples_per_channel_ == 0)
- {
+ if(mixedAudio->samples_per_channel_ == 0) {
// Nothing was mixed, set the audio samples to silence.
mixedAudio->samples_per_channel_ = _sampleSize;
mixedAudio->Mute();
- }
- else
- {
+ } else {
// Only call the limiter if we have something to mix.
if(!LimitMixedAudio(*mixedAudio))
retval = -1;
@@ -355,12 +325,10 @@ int32_t AudioConferenceMixerImpl::Process()
_mixedAudioLevel.ComputeLevel(mixedAudio->data_,_sampleSize);
audioLevel = _mixedAudioLevel.GetLevel();
- if(_mixerStatusCb)
- {
+ if(_mixerStatusCb) {
_scratchVadPositiveParticipantsAmount = 0;
- UpdateVADPositiveParticipants(mixList);
- if(_amountOf10MsUntilNextCallback-- == 0)
- {
+ UpdateVADPositiveParticipants(&mixList);
+ if(_amountOf10MsUntilNextCallback-- == 0) {
_amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks;
timeForMixerCallback = true;
}
@@ -369,8 +337,7 @@ int32_t AudioConferenceMixerImpl::Process()
{
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver != NULL)
- {
+ if(_mixReceiver != NULL) {
const AudioFrame** dummy = NULL;
_mixReceiver->NewMixedAudio(
_id,
@@ -380,12 +347,11 @@ int32_t AudioConferenceMixerImpl::Process()
}
if((_mixerStatusCallback != NULL) &&
- timeForMixerCallback)
- {
+ timeForMixerCallback) {
_mixerStatusCallback->MixedParticipants(
_id,
_scratchMixedParticipants,
- _scratchParticipantsToMixAmount);
+ static_cast<uint32_t>(_scratchParticipantsToMixAmount));
_mixerStatusCallback->VADPositiveParticipants(
_id,
@@ -397,9 +363,9 @@ int32_t AudioConferenceMixerImpl::Process()
// Reclaim all outstanding memory.
_audioFramePool->PushMemory(mixedAudio);
- ClearAudioFrameList(mixList);
- ClearAudioFrameList(rampOutList);
- ClearAudioFrameList(additionalFramesList);
+ ClearAudioFrameList(&mixList);
+ ClearAudioFrameList(&rampOutList);
+ ClearAudioFrameList(&additionalFramesList);
{
CriticalSectionScoped cs(_crit.get());
_processCalls--;
@@ -408,22 +374,18 @@ int32_t AudioConferenceMixerImpl::Process()
}
int32_t AudioConferenceMixerImpl::RegisterMixedStreamCallback(
- AudioMixerOutputReceiver& mixReceiver)
-{
+ AudioMixerOutputReceiver& mixReceiver) {
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver != NULL)
- {
+ if(_mixReceiver != NULL) {
return -1;
}
_mixReceiver = &mixReceiver;
return 0;
}
-int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
-{
+int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback() {
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver == NULL)
- {
+ if(_mixReceiver == NULL) {
return -1;
}
_mixReceiver = NULL;
@@ -431,16 +393,8 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
}
int32_t AudioConferenceMixerImpl::SetOutputFrequency(
- const Frequency frequency)
-{
+ const Frequency frequency) {
CriticalSectionScoped cs(_crit.get());
- const int error = _limiter->set_sample_rate_hz(frequency);
- if(error != _limiter->kNoError)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
- "Error from AudioProcessing: %d", error);
- return -1;
- }
_outputFrequency = frequency;
_sampleSize = (_outputFrequency*kProcessPeriodicityInMs) / 1000;
@@ -449,36 +403,15 @@ int32_t AudioConferenceMixerImpl::SetOutputFrequency(
}
AudioConferenceMixer::Frequency
-AudioConferenceMixerImpl::OutputFrequency() const
-{
+AudioConferenceMixerImpl::OutputFrequency() const {
CriticalSectionScoped cs(_crit.get());
return _outputFrequency;
}
-bool AudioConferenceMixerImpl::SetNumLimiterChannels(int numChannels)
-{
- if(_limiter->num_input_channels() != numChannels)
- {
- const int error = _limiter->set_num_channels(numChannels,
- numChannels);
- if(error != _limiter->kNoError)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
- "Error from AudioProcessing: %d", error);
- assert(false);
- return false;
- }
- }
-
- return true;
-}
-
int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
AudioMixerStatusReceiver& mixerStatusCallback,
- const uint32_t amountOf10MsBetweenCallbacks)
-{
- if(amountOf10MsBetweenCallbacks == 0)
- {
+ const uint32_t amountOf10MsBetweenCallbacks) {
+ if(amountOf10MsBetweenCallbacks == 0) {
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioMixerServer,
@@ -488,8 +421,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
}
{
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixerStatusCallback != NULL)
- {
+ if(_mixerStatusCallback != NULL) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixer status callback already registered");
return -1;
@@ -505,8 +437,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
return 0;
}
-int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
-{
+int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() {
{
CriticalSectionScoped cs(_crit.get());
if(!_mixerStatusCb)
@@ -526,38 +457,31 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
MixerParticipant& participant,
- const bool mixable)
-{
- if (!mixable)
- {
+ bool mixable) {
+ if (!mixable) {
// Anonymous participants are in a separate list. Make sure that the
// participant is in the _participantList if it is being mixed.
SetAnonymousMixabilityStatus(participant, false);
}
- uint32_t numMixedParticipants;
+ size_t numMixedParticipants;
{
CriticalSectionScoped cs(_cbCrit.get());
const bool isMixed =
- IsParticipantInList(participant,_participantList);
+ IsParticipantInList(participant, &_participantList);
// API must be called with a new state.
- if(!(mixable ^ isMixed))
- {
+ if(!(mixable ^ isMixed)) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixable is aready %s",
isMixed ? "ON" : "off");
return -1;
}
bool success = false;
- if(mixable)
- {
- success = AddParticipantToList(participant,_participantList);
- }
- else
- {
- success = RemoveParticipantFromList(participant,_participantList);
+ if(mixable) {
+ success = AddParticipantToList(participant, &_participantList);
+ } else {
+ success = RemoveParticipantFromList(participant, &_participantList);
}
- if(!success)
- {
+ if(!success) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"failed to %s participant",
mixable ? "add" : "remove");
@@ -565,13 +489,12 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
return -1;
}
- int numMixedNonAnonymous = _participantList.GetSize();
- if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants)
- {
+ size_t numMixedNonAnonymous = _participantList.size();
+ if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants) {
numMixedNonAnonymous = kMaximumAmountOfMixedParticipants;
}
- numMixedParticipants = numMixedNonAnonymous +
- _additionalParticipantList.GetSize();
+ numMixedParticipants =
+ numMixedNonAnonymous + _additionalParticipantList.size();
}
// A MixerParticipant was added or removed. Make sure the scratch
// buffer is updated if necessary.
@@ -583,40 +506,34 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
int32_t AudioConferenceMixerImpl::MixabilityStatus(
MixerParticipant& participant,
- bool& mixable)
-{
+ bool& mixable) {
CriticalSectionScoped cs(_cbCrit.get());
- mixable = IsParticipantInList(participant, _participantList);
+ mixable = IsParticipantInList(participant, &_participantList);
return 0;
}
int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
- MixerParticipant& participant, const bool anonymous)
-{
+ MixerParticipant& participant, const bool anonymous) {
CriticalSectionScoped cs(_cbCrit.get());
- if(IsParticipantInList(participant, _additionalParticipantList))
- {
- if(anonymous)
- {
+ if(IsParticipantInList(participant, &_additionalParticipantList)) {
+ if(anonymous) {
return 0;
}
- if(!RemoveParticipantFromList(participant, _additionalParticipantList))
- {
+ if(!RemoveParticipantFromList(participant,
+ &_additionalParticipantList)) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"unable to remove participant from anonymous list");
assert(false);
return -1;
}
- return AddParticipantToList(participant, _participantList) ? 0 : -1;
+ return AddParticipantToList(participant, &_participantList) ? 0 : -1;
}
- if(!anonymous)
- {
+ if(!anonymous) {
return 0;
}
const bool mixable = RemoveParticipantFromList(participant,
- _participantList);
- if(!mixable)
- {
+ &_participantList);
+ if(!mixable) {
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioMixerServer,
@@ -626,39 +543,33 @@ int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
// already registered.
return -1;
}
- return AddParticipantToList(participant, _additionalParticipantList) ?
+ return AddParticipantToList(participant, &_additionalParticipantList) ?
0 : -1;
}
int32_t AudioConferenceMixerImpl::AnonymousMixabilityStatus(
- MixerParticipant& participant, bool& mixable)
-{
+ MixerParticipant& participant, bool& mixable) {
CriticalSectionScoped cs(_cbCrit.get());
mixable = IsParticipantInList(participant,
- _additionalParticipantList);
+ &_additionalParticipantList);
return 0;
}
int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency(
- Frequency freq)
-{
+ Frequency freq) {
// Make sure that only allowed sampling frequencies are used. Use closest
// higher sampling frequency to avoid losing information.
- if (static_cast<int>(freq) == 12000)
- {
+ if (static_cast<int>(freq) == 12000) {
freq = kWbInHz;
} else if (static_cast<int>(freq) == 24000) {
freq = kSwbInHz;
}
if((freq == kNbInHz) || (freq == kWbInHz) || (freq == kSwbInHz) ||
- (freq == kLowestPossible))
- {
+ (freq == kLowestPossible)) {
_minimumMixingFreq=freq;
return 0;
- }
- else
- {
+ } else {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"SetMinimumMixingFrequency incorrect frequency: %i",freq);
assert(false);
@@ -668,20 +579,17 @@ int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency(
// Check all AudioFrames that are to be mixed. The highest sampling frequency
// found is the lowest that can be used without losing information.
-int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency()
-{
+int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency() {
const int participantListFrequency =
- GetLowestMixingFrequencyFromList(_participantList);
+ GetLowestMixingFrequencyFromList(&_participantList);
const int anonymousListFrequency =
- GetLowestMixingFrequencyFromList(_additionalParticipantList);
+ GetLowestMixingFrequencyFromList(&_additionalParticipantList);
const int highestFreq =
(participantListFrequency > anonymousListFrequency) ?
participantListFrequency : anonymousListFrequency;
// Check if the user specified a lowest mixing frequency.
- if(_minimumMixingFreq != kLowestPossible)
- {
- if(_minimumMixingFreq > highestFreq)
- {
+ if(_minimumMixingFreq != kLowestPossible) {
+ if(_minimumMixingFreq > highestFreq) {
return _minimumMixingFreq;
}
}
@@ -689,60 +597,47 @@ int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency()
}
int32_t AudioConferenceMixerImpl::GetLowestMixingFrequencyFromList(
- ListWrapper& mixList)
-{
+ MixerParticipantList* mixList) {
int32_t highestFreq = 8000;
- ListItem* item = mixList.First();
- while(item)
- {
- MixerParticipant* participant =
- static_cast<MixerParticipant*>(item->GetItem());
- const int32_t neededFrequency = participant->NeededFrequency(_id);
- if(neededFrequency > highestFreq)
- {
+ for (MixerParticipantList::iterator iter = mixList->begin();
+ iter != mixList->end();
+ ++iter) {
+ const int32_t neededFrequency = (*iter)->NeededFrequency(_id);
+ if(neededFrequency > highestFreq) {
highestFreq = neededFrequency;
}
- item = mixList.Next(item);
}
return highestFreq;
}
void AudioConferenceMixerImpl::UpdateToMix(
- ListWrapper& mixList,
- ListWrapper& rampOutList,
+ AudioFrameList* mixList,
+ AudioFrameList* rampOutList,
std::map<int, MixerParticipant*>* mixParticipantList,
- uint32_t& maxAudioFrameCounter) {
+ size_t& maxAudioFrameCounter) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateToMix(mixList,rampOutList,mixParticipantList,%d)",
maxAudioFrameCounter);
- const uint32_t mixListStartSize = mixList.GetSize();
- ListWrapper activeList; // Elements are AudioFrames
+ const size_t mixListStartSize = mixList->size();
+ AudioFrameList activeList;
// Struct needed by the passive lists to keep track of which AudioFrame
// belongs to which MixerParticipant.
- struct ParticipantFramePair
- {
- MixerParticipant* participant;
- AudioFrame* audioFrame;
- };
- ListWrapper passiveWasNotMixedList; // Elements are MixerParticipant
- ListWrapper passiveWasMixedList; // Elements are MixerParticipant
- ListItem* item = _participantList.First();
- while(item)
- {
+ ParticipantFramePairList passiveWasNotMixedList;
+ ParticipantFramePairList passiveWasMixedList;
+ for (MixerParticipantList::iterator participant = _participantList.begin();
+ participant != _participantList.end();
+ ++participant) {
// Stop keeping track of passive participants if there are already
// enough participants available (they wont be mixed anyway).
bool mustAddToPassiveList = (maxAudioFrameCounter >
- (activeList.GetSize() +
- passiveWasMixedList.GetSize() +
- passiveWasNotMixedList.GetSize()));
+ (activeList.size() +
+ passiveWasMixedList.size() +
+ passiveWasNotMixedList.size()));
- MixerParticipant* participant = static_cast<MixerParticipant*>(
- item->GetItem());
bool wasMixed = false;
- participant->_mixHistory->WasMixed(wasMixed);
+ (*participant)->_mixHistory->WasMixed(wasMixed);
AudioFrame* audioFrame = NULL;
- if(_audioFramePool->PopMemory(audioFrame) == -1)
- {
+ if(_audioFramePool->PopMemory(audioFrame) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
@@ -750,56 +645,51 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if(participant->GetAudioFrame(_id,*audioFrame) != 0)
- {
+ if((*participant)->GetAudioFrame(_id,*audioFrame) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
_audioFramePool->PushMemory(audioFrame);
- item = _participantList.Next(item);
continue;
}
+ if (_participantList.size() != 1) {
+ // TODO(wu): Issue 3390, add support for multiple participants case.
+ audioFrame->ntp_time_ms_ = -1;
+ }
+
// TODO(henrike): this assert triggers in some test cases where SRTP is
// used which prevents NetEQ from making a VAD. Temporarily disable this
// assert until the problem is fixed on a higher level.
// assert(audioFrame->vad_activity_ != AudioFrame::kVadUnknown);
- if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown)
- {
+ if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"invalid VAD state from participant");
}
- if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
- {
- if(!wasMixed)
- {
+ if(audioFrame->vad_activity_ == AudioFrame::kVadActive) {
+ if(!wasMixed) {
RampIn(*audioFrame);
}
- if(activeList.GetSize() >= maxAudioFrameCounter)
- {
+ if(activeList.size() >= maxAudioFrameCounter) {
// There are already more active participants than should be
// mixed. Only keep the ones with the highest energy.
- ListItem* replaceItem = NULL;
+ AudioFrameList::iterator replaceItem;
CalculateEnergy(*audioFrame);
uint32_t lowestEnergy = audioFrame->energy_;
- ListItem* activeItem = activeList.First();
- while(activeItem)
- {
- AudioFrame* replaceFrame = static_cast<AudioFrame*>(
- activeItem->GetItem());
- CalculateEnergy(*replaceFrame);
- if(replaceFrame->energy_ < lowestEnergy)
- {
- replaceItem = activeItem;
- lowestEnergy = replaceFrame->energy_;
+ bool found_replace_item = false;
+ for (AudioFrameList::iterator iter = activeList.begin();
+ iter != activeList.end();
+ ++iter) {
+ CalculateEnergy(**iter);
+ if((*iter)->energy_ < lowestEnergy) {
+ replaceItem = iter;
+ lowestEnergy = (*iter)->energy_;
+ found_replace_item = true;
}
- activeItem = activeList.Next(activeItem);
}
- if(replaceItem != NULL)
- {
- AudioFrame* replaceFrame = static_cast<AudioFrame*>(
- replaceItem->GetItem());
+ if(found_replace_item) {
+ AudioFrame* replaceFrame = *replaceItem;
bool replaceWasMixed = false;
std::map<int, MixerParticipant*>::iterator it =
@@ -812,255 +702,219 @@ void AudioConferenceMixerImpl::UpdateToMix(
it->second->_mixHistory->WasMixed(replaceWasMixed);
mixParticipantList->erase(replaceFrame->id_);
- activeList.Erase(replaceItem);
+ activeList.erase(replaceItem);
- activeList.PushFront(static_cast<void*>(audioFrame));
- (*mixParticipantList)[audioFrame->id_] = participant;
+ activeList.push_front(audioFrame);
+ (*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
if (replaceWasMixed) {
RampOut(*replaceFrame);
- rampOutList.PushBack(static_cast<void*>(replaceFrame));
- assert(rampOutList.GetSize() <=
+ rampOutList->push_back(replaceFrame);
+ assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
_audioFramePool->PushMemory(replaceFrame);
}
} else {
- if(wasMixed)
- {
+ if(wasMixed) {
RampOut(*audioFrame);
- rampOutList.PushBack(static_cast<void*>(audioFrame));
- assert(rampOutList.GetSize() <=
+ rampOutList->push_back(audioFrame);
+ assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
_audioFramePool->PushMemory(audioFrame);
}
}
} else {
- activeList.PushFront(static_cast<void*>(audioFrame));
- (*mixParticipantList)[audioFrame->id_] = participant;
+ activeList.push_front(audioFrame);
+ (*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
} else {
- if(wasMixed)
- {
+ if(wasMixed) {
ParticipantFramePair* pair = new ParticipantFramePair;
pair->audioFrame = audioFrame;
- pair->participant = participant;
- passiveWasMixedList.PushBack(static_cast<void*>(pair));
+ pair->participant = *participant;
+ passiveWasMixedList.push_back(pair);
} else if(mustAddToPassiveList) {
RampIn(*audioFrame);
ParticipantFramePair* pair = new ParticipantFramePair;
pair->audioFrame = audioFrame;
- pair->participant = participant;
- passiveWasNotMixedList.PushBack(static_cast<void*>(pair));
+ pair->participant = *participant;
+ passiveWasNotMixedList.push_back(pair);
} else {
_audioFramePool->PushMemory(audioFrame);
}
}
- item = _participantList.Next(item);
}
- assert(activeList.GetSize() <= maxAudioFrameCounter);
+ assert(activeList.size() <= maxAudioFrameCounter);
// At this point it is known which participants should be mixed. Transfer
// this information to this functions output parameters.
- while(!activeList.Empty())
- {
- ListItem* mixItem = activeList.First();
- mixList.PushBack(mixItem->GetItem());
- activeList.Erase(mixItem);
+ for (AudioFrameList::iterator iter = activeList.begin();
+ iter != activeList.end();
+ ++iter) {
+ mixList->push_back(*iter);
}
+ activeList.clear();
// Always mix a constant number of AudioFrames. If there aren't enough
// active participants mix passive ones. Starting with those that was mixed
// last iteration.
- while(!passiveWasMixedList.Empty())
- {
- ListItem* mixItem = passiveWasMixedList.First();
- ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
- mixItem->GetItem());
- if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
- {
- mixList.PushBack(pair->audioFrame);
- (*mixParticipantList)[pair->audioFrame->id_] =
- pair->participant;
+ for (ParticipantFramePairList::iterator iter = passiveWasMixedList.begin();
+ iter != passiveWasMixedList.end();
+ ++iter) {
+ if(mixList->size() < maxAudioFrameCounter + mixListStartSize) {
+ mixList->push_back((*iter)->audioFrame);
+ (*mixParticipantList)[(*iter)->audioFrame->id_] =
+ (*iter)->participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
+ } else {
+ _audioFramePool->PushMemory((*iter)->audioFrame);
}
- else
- {
- _audioFramePool->PushMemory(pair->audioFrame);
- }
- delete pair;
- passiveWasMixedList.Erase(mixItem);
+ delete *iter;
}
// And finally the ones that have not been mixed for a while.
- while(!passiveWasNotMixedList.Empty())
- {
- ListItem* mixItem = passiveWasNotMixedList.First();
- ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
- mixItem->GetItem());
- if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
- {
- mixList.PushBack(pair->audioFrame);
- (*mixParticipantList)[pair->audioFrame->id_] = pair->participant;
+ for (ParticipantFramePairList::iterator iter =
+ passiveWasNotMixedList.begin();
+ iter != passiveWasNotMixedList.end();
+ ++iter) {
+ if(mixList->size() < maxAudioFrameCounter + mixListStartSize) {
+ mixList->push_back((*iter)->audioFrame);
+ (*mixParticipantList)[(*iter)->audioFrame->id_] =
+ (*iter)->participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
+ } else {
+ _audioFramePool->PushMemory((*iter)->audioFrame);
}
- else
- {
- _audioFramePool->PushMemory(pair->audioFrame);
- }
- delete pair;
- passiveWasNotMixedList.Erase(mixItem);
+ delete *iter;
}
- assert(maxAudioFrameCounter + mixListStartSize >= mixList.GetSize());
- maxAudioFrameCounter += mixListStartSize - mixList.GetSize();
+ assert(maxAudioFrameCounter + mixListStartSize >= mixList->size());
+ maxAudioFrameCounter += mixListStartSize - mixList->size();
}
void AudioConferenceMixerImpl::GetAdditionalAudio(
- ListWrapper& additionalFramesList)
-{
+ AudioFrameList* additionalFramesList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"GetAdditionalAudio(additionalFramesList)");
- ListItem* item = _additionalParticipantList.First();
- while(item)
- {
- // The GetAudioFrame() callback may remove the current item. Store the
- // next item just in case that happens.
- ListItem* nextItem = _additionalParticipantList.Next(item);
-
- MixerParticipant* participant = static_cast<MixerParticipant*>(
- item->GetItem());
+ // The GetAudioFrame() callback may result in the participant being removed
+ // from additionalParticipantList_. If that happens it will invalidate any
+ // iterators. Create a copy of the participants list such that the list of
+ // participants can be traversed safely.
+ MixerParticipantList additionalParticipantList;
+ additionalParticipantList.insert(additionalParticipantList.begin(),
+ _additionalParticipantList.begin(),
+ _additionalParticipantList.end());
+
+ for (MixerParticipantList::iterator participant =
+ additionalParticipantList.begin();
+ participant != additionalParticipantList.end();
+ ++participant) {
AudioFrame* audioFrame = NULL;
- if(_audioFramePool->PopMemory(audioFrame) == -1)
- {
+ if(_audioFramePool->PopMemory(audioFrame) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
return;
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if(participant->GetAudioFrame(_id, *audioFrame) != 0)
- {
+ if((*participant)->GetAudioFrame(_id, *audioFrame) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
_audioFramePool->PushMemory(audioFrame);
- item = nextItem;
continue;
}
- if(audioFrame->samples_per_channel_ == 0)
- {
+ if(audioFrame->samples_per_channel_ == 0) {
// Empty frame. Don't use it.
_audioFramePool->PushMemory(audioFrame);
- item = nextItem;
continue;
}
- additionalFramesList.PushBack(static_cast<void*>(audioFrame));
- item = nextItem;
+ additionalFramesList->push_back(audioFrame);
}
}
void AudioConferenceMixerImpl::UpdateMixedStatus(
- std::map<int, MixerParticipant*>& mixedParticipantsMap)
-{
+ std::map<int, MixerParticipant*>& mixedParticipantsMap) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateMixedStatus(mixedParticipantsMap)");
assert(mixedParticipantsMap.size() <= kMaximumAmountOfMixedParticipants);
// Loop through all participants. If they are in the mix map they
// were mixed.
- ListItem* participantItem = _participantList.First();
- while(participantItem != NULL)
- {
+ for (MixerParticipantList::iterator participant = _participantList.begin();
+ participant != _participantList.end();
+ ++participant) {
bool isMixed = false;
- MixerParticipant* participant =
- static_cast<MixerParticipant*>(participantItem->GetItem());
-
for (std::map<int, MixerParticipant*>::iterator it =
mixedParticipantsMap.begin();
it != mixedParticipantsMap.end();
++it) {
- if (it->second == participant) {
+ if (it->second == *participant) {
isMixed = true;
break;
}
}
- participant->_mixHistory->SetIsMixed(isMixed);
- participantItem = _participantList.Next(participantItem);
+ (*participant)->_mixHistory->SetIsMixed(isMixed);
}
}
-void AudioConferenceMixerImpl::ClearAudioFrameList(ListWrapper& audioFrameList)
-{
+void AudioConferenceMixerImpl::ClearAudioFrameList(
+ AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"ClearAudioFrameList(audioFrameList)");
- ListItem* item = audioFrameList.First();
- while(item)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- _audioFramePool->PushMemory(audioFrame);
- audioFrameList.Erase(item);
- item = audioFrameList.First();
+ for (AudioFrameList::iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ _audioFramePool->PushMemory(*iter);
}
+ audioFrameList->clear();
}
void AudioConferenceMixerImpl::UpdateVADPositiveParticipants(
- ListWrapper& mixList)
-{
+ AudioFrameList* mixList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateVADPositiveParticipants(mixList)");
- ListItem* item = mixList.First();
- while(item != NULL)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- CalculateEnergy(*audioFrame);
- if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
- {
+ for (AudioFrameList::iterator iter = mixList->begin();
+ iter != mixList->end();
+ ++iter) {
+ CalculateEnergy(**iter);
+ if((*iter)->vad_activity_ == AudioFrame::kVadActive) {
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].participant =
- audioFrame->id_;
+ (*iter)->id_;
// TODO(andrew): to what should this be set?
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].level = 0;
_scratchVadPositiveParticipantsAmount++;
}
- item = mixList.Next(item);
}
}
bool AudioConferenceMixerImpl::IsParticipantInList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) const {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"IsParticipantInList(participant,participantList)");
- ListItem* item = participantList.First();
- while(item != NULL)
- {
- MixerParticipant* rhsParticipant =
- static_cast<MixerParticipant*>(item->GetItem());
- if(&participant == rhsParticipant)
- {
+ for (MixerParticipantList::const_iterator iter = participantList->begin();
+ iter != participantList->end();
+ ++iter) {
+ if(&participant == *iter) {
return true;
}
- item = participantList.Next(item);
}
return false;
}
bool AudioConferenceMixerImpl::AddParticipantToList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"AddParticipantToList(participant, participantList)");
- if(participantList.PushBack(static_cast<void*>(&participant)) == -1)
- {
- return false;
- }
+ participantList->push_back(&participant);
// Make sure that the mixed status is correct for new MixerParticipant.
participant._mixHistory->ResetMixedStatus();
return true;
@@ -1068,52 +922,53 @@ bool AudioConferenceMixerImpl::AddParticipantToList(
bool AudioConferenceMixerImpl::RemoveParticipantFromList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"RemoveParticipantFromList(participant, participantList)");
- ListItem* item = participantList.First();
- while(item)
- {
- if(item->GetItem() == &participant)
- {
- participantList.Erase(item);
+ for (MixerParticipantList::iterator iter = participantList->begin();
+ iter != participantList->end();
+ ++iter) {
+ if(*iter == &participant) {
+ participantList->erase(iter);
// Participant is no longer mixed, reset to default.
participant._mixHistory->ResetMixedStatus();
return true;
}
- item = participantList.Next(item);
}
return false;
}
int32_t AudioConferenceMixerImpl::MixFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList)
-{
+ const AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"MixFromList(mixedAudio, audioFrameList)");
- uint32_t position = 0;
- ListItem* item = audioFrameList.First();
- if(item == NULL)
- {
- return 0;
- }
+ if(audioFrameList->empty()) return 0;
- if(_numMixedParticipants == 1)
- {
+ uint32_t position = 0;
+ if(_numMixedParticipants == 1) {
// No mixing required here; skip the saturation protection.
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+ AudioFrame* audioFrame = audioFrameList->front();
mixedAudio.CopyFrom(*audioFrame);
SetParticipantStatistics(&_scratchMixedParticipants[position],
*audioFrame);
return 0;
}
- while(item != NULL)
- {
- if(position >= kMaximumAmountOfMixedParticipants)
- {
+ if (audioFrameList->size() == 1) {
+ mixedAudio.timestamp_ = audioFrameList->front()->timestamp_;
+ mixedAudio.elapsed_time_ms_ = audioFrameList->front()->elapsed_time_ms_;
+ } else {
+ // TODO(wu): Issue 3390.
+ // Audio frame timestamp is only supported in one channel case.
+ mixedAudio.timestamp_ = 0;
+ mixedAudio.elapsed_time_ms_ = -1;
+ }
+
+ for (AudioFrameList::const_iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ if(position >= kMaximumAmountOfMixedParticipants) {
WEBRTC_TRACE(
kTraceMemory,
kTraceAudioMixerServer,
@@ -1124,14 +979,12 @@ int32_t AudioConferenceMixerImpl::MixFromList(
assert(false);
position = 0;
}
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- MixFrames(&mixedAudio, audioFrame);
+ MixFrames(&mixedAudio, (*iter));
SetParticipantStatistics(&_scratchMixedParticipants[position],
- *audioFrame);
+ **iter);
position++;
- item = audioFrameList.Next(item);
}
return 0;
@@ -1140,35 +993,29 @@ int32_t AudioConferenceMixerImpl::MixFromList(
// TODO(andrew): consolidate this function with MixFromList.
int32_t AudioConferenceMixerImpl::MixAnonomouslyFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList)
-{
+ const AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"MixAnonomouslyFromList(mixedAudio, audioFrameList)");
- ListItem* item = audioFrameList.First();
- if(item == NULL)
- return 0;
- if(_numMixedParticipants == 1)
- {
+ if(audioFrameList->empty()) return 0;
+
+ if(_numMixedParticipants == 1) {
// No mixing required here; skip the saturation protection.
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+ AudioFrame* audioFrame = audioFrameList->front();
mixedAudio.CopyFrom(*audioFrame);
return 0;
}
- while(item != NULL)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- MixFrames(&mixedAudio, audioFrame);
- item = audioFrameList.Next(item);
+ for (AudioFrameList::const_iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ MixFrames(&mixedAudio, *iter);
}
return 0;
}
-bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
-{
- if(_numMixedParticipants == 1)
- {
+bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio) {
+ if(_numMixedParticipants == 1) {
return true;
}
@@ -1187,8 +1034,7 @@ bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
// negative value is undefined).
mixedAudio += mixedAudio;
- if(error != _limiter->kNoError)
- {
+ if(error != _limiter->kNoError) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"Error from AudioProcessing: %d", error);
assert(false);
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index 737acbb0ba4..31dc71e5dce 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+#include <list>
#include <map>
#include "webrtc/engine_configurations.h"
@@ -19,13 +20,15 @@
#include "webrtc/modules/audio_conference_mixer/source/memory_pool.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class AudioProcessing;
class CriticalSectionWrapper;
+typedef std::list<AudioFrame*> AudioFrameList;
+typedef std::list<MixerParticipant*> MixerParticipantList;
+
// Cheshire cat implementation of MixerParticipant's non virtual functions.
class MixHistory
{
@@ -74,7 +77,7 @@ public:
const uint32_t amountOf10MsBetweenCallbacks);
virtual int32_t UnRegisterMixerStatusCallback();
virtual int32_t SetMixabilityStatus(MixerParticipant& participant,
- const bool mixable);
+ bool mixable);
virtual int32_t MixabilityStatus(MixerParticipant& participant,
bool& mixable);
virtual int32_t SetMinimumMixingFrequency(Frequency freq);
@@ -89,10 +92,6 @@ private:
int32_t SetOutputFrequency(const Frequency frequency);
Frequency OutputFrequency() const;
- // Must be called whenever an audio frame indicates the number of channels
- // has changed.
- bool SetNumLimiterChannels(int numChannels);
-
// Fills mixList with the AudioFrames pointers that should be used when
// mixing. Fills mixParticipantList with ParticipantStatistics for the
// participants who's AudioFrames are inside mixList.
@@ -102,18 +101,18 @@ private:
// used to be mixed but shouldn't be mixed any longer. These AudioFrames
// should be ramped out over this AudioFrame to avoid audio discontinuities.
void UpdateToMix(
- ListWrapper& mixList,
- ListWrapper& rampOutList,
+ AudioFrameList* mixList,
+ AudioFrameList* rampOutList,
std::map<int, MixerParticipant*>* mixParticipantList,
- uint32_t& maxAudioFrameCounter);
+ size_t& maxAudioFrameCounter);
// Return the lowest mixing frequency that can be used without having to
// downsample any audio.
int32_t GetLowestMixingFrequency();
- int32_t GetLowestMixingFrequencyFromList(ListWrapper& mixList);
+ int32_t GetLowestMixingFrequencyFromList(MixerParticipantList* mixList);
// Return the AudioFrames that should be mixed anonymously.
- void GetAdditionalAudio(ListWrapper& additionalFramesList);
+ void GetAdditionalAudio(AudioFrameList* additionalFramesList);
// Update the MixHistory of all MixerParticipants. mixedParticipantsList
// should contain a map of MixerParticipants that have been mixed.
@@ -121,44 +120,44 @@ private:
std::map<int, MixerParticipant*>& mixedParticipantsList);
// Clears audioFrameList and reclaims all memory associated with it.
- void ClearAudioFrameList(ListWrapper& audioFrameList);
+ void ClearAudioFrameList(AudioFrameList* audioFrameList);
// Update the list of MixerParticipants who have a positive VAD. mixList
// should be a list of AudioFrames
void UpdateVADPositiveParticipants(
- ListWrapper& mixList);
+ AudioFrameList* mixList);
// This function returns true if it finds the MixerParticipant in the
// specified list of MixerParticipants.
bool IsParticipantInList(
MixerParticipant& participant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList) const;
// Add/remove the MixerParticipant to the specified
// MixerParticipant list.
bool AddParticipantToList(
MixerParticipant& participant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList);
bool RemoveParticipantFromList(
MixerParticipant& removeParticipant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList);
// Mix the AudioFrames stored in audioFrameList into mixedAudio.
int32_t MixFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList);
+ const AudioFrameList* audioFrameList);
// Mix the AudioFrames stored in audioFrameList into mixedAudio. No
// record will be kept of this mix (e.g. the corresponding MixerParticipants
// will not be marked as IsMixed()
int32_t MixAnonomouslyFromList(AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList);
+ const AudioFrameList* audioFrameList);
bool LimitMixedAudio(AudioFrame& mixedAudio);
// Scratch memory
// Note that the scratch memory may only be touched in the scope of
// Process().
- uint32_t _scratchParticipantsToMixAmount;
+ size_t _scratchParticipantsToMixAmount;
ParticipantStatistics _scratchMixedParticipants[
kMaximumAmountOfMixedParticipants];
uint32_t _scratchVadPositiveParticipantsAmount;
@@ -176,9 +175,9 @@ private:
AudioMixerOutputReceiver* _mixReceiver;
AudioMixerStatusReceiver* _mixerStatusCallback;
- uint32_t _amountOf10MsBetweenCallbacks;
- uint32_t _amountOf10MsUntilNextCallback;
- bool _mixerStatusCb;
+ uint32_t _amountOf10MsBetweenCallbacks;
+ uint32_t _amountOf10MsUntilNextCallback;
+ bool _mixerStatusCb;
// The current sample frequency and sample size when mixing.
Frequency _outputFrequency;
@@ -188,10 +187,11 @@ private:
MemoryPool<AudioFrame>* _audioFramePool;
// List of all participants. Note all lists are disjunct
- ListWrapper _participantList; // May be mixed.
- ListWrapper _additionalParticipantList; // Always mixed, anonomously.
+ MixerParticipantList _participantList; // May be mixed.
+ // Always mixed, anonomously.
+ MixerParticipantList _additionalParticipantList;
- uint32_t _numMixedParticipants;
+ size_t _numMixedParticipants;
uint32_t _timeStamp;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
index 679d608f50f..3dce5c8bea6 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
@@ -41,10 +41,6 @@ const int rampSize = sizeof(rampArray)/sizeof(rampArray[0]);
namespace webrtc {
void CalculateEnergy(AudioFrame& audioFrame)
{
- if(audioFrame.energy_ != 0xffffffff)
- {
- return;
- }
audioFrame.energy_ = 0;
for(int position = 0; position < audioFrame.samples_per_channel_;
position++)
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
index 6d4dccf8ae3..04e7cd52254 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
@@ -12,9 +12,9 @@
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
#include <assert.h>
+#include <list>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -40,7 +40,7 @@ private:
bool _terminate;
- ListWrapper _memoryPool;
+ std::list<MemoryType*> _memoryPool;
uint32_t _initialPoolSize;
uint32_t _createdMemory;
@@ -51,7 +51,6 @@ template<class MemoryType>
MemoryPoolImpl<MemoryType>::MemoryPoolImpl(int32_t initialPoolSize)
: _crit(CriticalSectionWrapper::CreateCriticalSection()),
_terminate(false),
- _memoryPool(),
_initialPoolSize(initialPoolSize),
_createdMemory(0),
_outstandingMemory(0)
@@ -76,20 +75,17 @@ int32_t MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
memory = NULL;
return -1;
}
- ListItem* item = _memoryPool.First();
- if(item == NULL)
- {
+ if (_memoryPool.empty()) {
// _memoryPool empty create new memory.
CreateMemory(_initialPoolSize);
- item = _memoryPool.First();
- if(item == NULL)
+ if(_memoryPool.empty())
{
memory = NULL;
return -1;
}
}
- memory = static_cast<MemoryType*>(item->GetItem());
- _memoryPool.Erase(item);
+ memory = _memoryPool.front();
+ _memoryPool.pop_front();
_outstandingMemory++;
return 0;
}
@@ -103,7 +99,7 @@ int32_t MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
}
CriticalSectionScoped cs(_crit);
_outstandingMemory--;
- if(_memoryPool.GetSize() > (_initialPoolSize << 1))
+ if(_memoryPool.size() > (_initialPoolSize << 1))
{
// Reclaim memory if less than half of the pool is unused.
_createdMemory--;
@@ -111,7 +107,7 @@ int32_t MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
memory = NULL;
return 0;
}
- _memoryPool.PushBack(static_cast<void*>(memory));
+ _memoryPool.push_back(memory);
memory = NULL;
return 0;
}
@@ -127,21 +123,15 @@ template<class MemoryType>
int32_t MemoryPoolImpl<MemoryType>::Terminate()
{
CriticalSectionScoped cs(_crit);
- assert(_createdMemory == _outstandingMemory + _memoryPool.GetSize());
+ assert(_createdMemory == _outstandingMemory + _memoryPool.size());
_terminate = true;
// Reclaim all memory.
while(_createdMemory > 0)
{
- ListItem* item = _memoryPool.First();
- if(item == NULL)
- {
- // There is memory that hasn't been returned yet.
- return -1;
- }
- MemoryType* memory = static_cast<MemoryType*>(item->GetItem());
+ MemoryType* memory = _memoryPool.front();
+ _memoryPool.pop_front();
delete memory;
- _memoryPool.Erase(item);
_createdMemory--;
}
return 0;
@@ -158,7 +148,7 @@ int32_t MemoryPoolImpl<MemoryType>::CreateMemory(
{
return -1;
}
- _memoryPool.PushBack(static_cast<void*>(memory));
+ _memoryPool.push_back(memory);
_createdMemory++;
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/Android.mk b/chromium/third_party/webrtc/modules/audio_device/Android.mk
index affa5e1c2d4..4b3b9124d7b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/Android.mk
+++ b/chromium/third_party/webrtc/modules/audio_device/Android.mk
@@ -25,7 +25,8 @@ LOCAL_SRC_FILES := \
android/audio_device_android_opensles.cc \
android/audio_device_utility_android.cc \
dummy/audio_device_utility_dummy.cc \
- dummy/audio_device_dummy.cc
+ dummy/audio_device_dummy.cc \
+ dummy/file_audio_device.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
diff --git a/chromium/third_party/webrtc/modules/audio_device/OWNERS b/chromium/third_party/webrtc/modules/audio_device/OWNERS
index a07ced37b30..7bb3cd52378 100644
--- a/chromium/third_party/webrtc/modules/audio_device/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_device/OWNERS
@@ -2,3 +2,10 @@ henrikg@webrtc.org
henrika@webrtc.org
niklas.enbom@webrtc.org
xians@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
index 28112698259..f851f4703d2 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
@@ -30,6 +30,12 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
}
return InputType::SetAndroidAudioDeviceObjects(javaVM, env, context);
}
+
+ static void ClearAndroidAudioDeviceObjects() {
+ OutputType::ClearAndroidAudioDeviceObjects();
+ InputType::ClearAndroidAudioDeviceObjects();
+ }
+
explicit AudioDeviceTemplate(const int32_t id)
: output_(id),
input_(id, &output_) {
@@ -169,11 +175,6 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
return -1;
}
- int32_t SpeakerIsAvailable(
- bool& available) { // NOLINT
- return output_.SpeakerIsAvailable(available);
- }
-
int32_t InitSpeaker() {
return output_.InitSpeaker();
}
@@ -182,11 +183,6 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
return output_.SpeakerIsInitialized();
}
- int32_t MicrophoneIsAvailable(
- bool& available) { // NOLINT
- return input_.MicrophoneIsAvailable(available);
- }
-
int32_t InitMicrophone() {
return input_.InitMicrophone();
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
index 597b8aaa389..e577b72fd4b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
@@ -56,7 +56,7 @@ class FineAudioBuffer {
int bytes_per_10_ms_;
// Storage for samples that are not yet asked for.
- scoped_array<int8_t> cache_buffer_;
+ scoped_ptr<int8_t[]> cache_buffer_;
int cached_buffer_start_; // Location of first unread sample.
int cached_bytes_; // Number of bytes stored in cache.
};
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
index 69ba741d18a..e1f03f8f3c3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
@@ -80,7 +80,7 @@ void RunFineBufferTest(int sample_rate, int frame_size_in_samples) {
FineAudioBuffer fine_buffer(&audio_device_buffer, kFrameSizeBytes,
sample_rate);
- scoped_array<int8_t> out_buffer;
+ scoped_ptr<int8_t[]> out_buffer;
out_buffer.reset(
new int8_t[fine_buffer.RequiredBufferSizeBytes()]);
for (int i = 0; i < kNumberOfFrames; ++i) {
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
index e276fcc5230..f22d8bf7ef8 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
@@ -75,6 +75,9 @@ int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
return 0;
}
+void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
+}
+
int32_t OpenSlesInput::Init() {
assert(!initialized_);
@@ -177,11 +180,6 @@ int32_t OpenSlesInput::SetAGC(bool enable) {
return 0;
}
-int32_t OpenSlesInput::MicrophoneIsAvailable(bool& available) { // NOLINT
- available = true;
- return 0;
-}
-
int32_t OpenSlesInput::InitMicrophone() {
assert(initialized_);
assert(!recording_);
@@ -291,7 +289,7 @@ void OpenSlesInput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
- rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
+ rec_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
for (int i = 0; i < TotalBuffersUsed(); ++i) {
rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
index ea5144b70fe..d27d82435db 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
@@ -41,6 +41,7 @@ class OpenSlesInput {
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
void* env,
void* context);
+ static void ClearAndroidAudioDeviceObjects();
// Main initializaton and termination
int32_t Init();
@@ -76,7 +77,6 @@ class OpenSlesInput {
bool AGC() const { return agc_enabled_; }
// Audio mixer initialization
- int32_t MicrophoneIsAvailable(bool& available); // NOLINT
int32_t InitMicrophone();
bool MicrophoneIsInitialized() const { return mic_initialized_; }
@@ -205,7 +205,7 @@ class OpenSlesInput {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
// Holds all allocated memory such that it is deallocated properly.
- scoped_array<scoped_array<int8_t> > rec_buf_;
+ scoped_ptr<scoped_ptr<int8_t[]>[]> rec_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time RecorderSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer contains audio data from the device.
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
index 3823305532d..377789b2371 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
@@ -76,6 +76,10 @@ int32_t OpenSlesOutput::SetAndroidAudioDeviceObjects(void* javaVM,
return 0;
}
+void OpenSlesOutput::ClearAndroidAudioDeviceObjects() {
+ AudioManagerJni::ClearAndroidAudioDeviceObjects();
+}
+
int32_t OpenSlesOutput::Init() {
assert(!initialized_);
@@ -184,11 +188,6 @@ int32_t OpenSlesOutput::StopPlayout() {
return 0;
}
-int32_t OpenSlesOutput::SpeakerIsAvailable(bool& available) { // NOLINT
- available = true;
- return 0;
-}
-
int32_t OpenSlesOutput::InitSpeaker() {
assert(!playing_);
speaker_initialized_ = true;
@@ -341,7 +340,7 @@ void OpenSlesOutput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
- play_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
+ play_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
int required_buffer_size = fine_buffer_->RequiredBufferSizeBytes();
for (int i = 0; i < TotalBuffersUsed(); ++i) {
play_buf_[i].reset(new int8_t[required_buffer_size]);
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
index 38ca969a019..aa9b5bf1213 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
@@ -41,6 +41,7 @@ class OpenSlesOutput : public PlayoutDelayProvider {
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
void* env,
void* context);
+ static void ClearAndroidAudioDeviceObjects();
// Main initializaton and termination
int32_t Init();
@@ -73,7 +74,6 @@ class OpenSlesOutput : public PlayoutDelayProvider {
bool Playing() const { return playing_; }
// Audio mixer initialization
- int32_t SpeakerIsAvailable(bool& available); // NOLINT
int32_t InitSpeaker();
bool SpeakerIsInitialized() const { return speaker_initialized_; }
@@ -223,7 +223,7 @@ class OpenSlesOutput : public PlayoutDelayProvider {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
scoped_ptr<FineAudioBuffer> fine_buffer_;
- scoped_array<scoped_array<int8_t> > play_buf_;
+ scoped_ptr<scoped_ptr<int8_t[]>[]> play_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time PlayerSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer is ready to be played out to device.
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
index d65ab9fbb63..73d4d61dd3f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
+#include <assert.h>
+
static int UpdatePos(int pos, int capacity) {
return (pos + 1) % capacity;
}
@@ -18,14 +20,16 @@ namespace webrtc {
namespace subtle {
-#if defined(__ARMEL__)
+#if defined(__aarch64__)
+// From http://http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm64_gcc.h
+inline void MemoryBarrier() {
+ __asm__ __volatile__ ("dmb ish" ::: "memory");
+}
+
+#elif defined(__ARMEL__)
// From http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm_gcc.h
-// Note that it is only the MemoryBarrier function that makes this class arm
-// specific. Borrowing other MemoryBarrier implementations, this class could
-// be extended to more platforms.
inline void MemoryBarrier() {
- // Note: This is a function call, which is also an implicit compiler
- // barrier.
+ // Note: This is a function call, which is also an implicit compiler barrier.
typedef void (*KernelMemoryBarrierFunc)();
((KernelMemoryBarrierFunc)0xffff0fa0)();
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
index a1fcfaab417..092b1d5e090 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
@@ -35,7 +35,7 @@ class SingleRwFifo {
int capacity() const { return capacity_; }
private:
- scoped_array<int8_t*> queue_;
+ scoped_ptr<int8_t*[]> queue_;
int capacity_;
Atomic32 size_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
index c722c2756cd..9925baaa88f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
@@ -90,7 +90,7 @@ class SingleRwFifoTest : public testing::Test {
protected:
SingleRwFifo fifo_;
// Memory area for proper de-allocation.
- scoped_array<int8_t> buffer_[kCapacity];
+ scoped_ptr<int8_t[]> buffer_[kCapacity];
std::list<int8_t*> memory_queue_;
int pushed_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
index 1e14747f3df..a64856b5da5 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
@@ -20,7 +20,7 @@
'.',
'../interface',
'include',
- 'dummy', # dummy audio device
+ 'dummy', # Contains dummy audio device implementations.
],
'direct_dependent_settings': {
'include_dirs': [
@@ -45,6 +45,8 @@
'dummy/audio_device_dummy.h',
'dummy/audio_device_utility_dummy.cc',
'dummy/audio_device_utility_dummy.h',
+ 'dummy/file_audio_device.cc',
+ 'dummy/file_audio_device.h',
],
'conditions': [
['OS=="linux"', {
@@ -77,6 +79,13 @@
'WEBRTC_DUMMY_AUDIO_BUILD',
],
}],
+ ['build_with_chromium==0', {
+ 'sources': [
+ # Don't link these into Chrome since they contain static data.
+ 'dummy/file_audio_device_factory.cc',
+ 'dummy/file_audio_device_factory.h',
+ ],
+ }],
['include_internal_audio_device==1', {
'sources': [
'linux/alsasymboltable_linux.cc',
@@ -246,66 +255,6 @@
},
],
}],
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'libopensl-demo-jni',
- 'type': 'loadable_module',
- 'dependencies': [
- 'audio_device',
- ],
- 'sources': [
- 'android/test/jni/opensl_runner.cc',
- 'android/test/fake_audio_device_buffer.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-llog',
- '-lOpenSLES',
- ],
- },
- },
- {
- 'target_name': 'OpenSlDemo',
- 'type': 'none',
- 'dependencies': [
- 'libopensl-demo-jni',
- '<(modules_java_gyp_path):*',
- ],
- 'actions': [
- {
- # TODO(henrik): Convert building of the demo to a proper GYP
- # target so this action is not needed once chromium's
- # apk-building machinery can be used. (crbug.com/225101)
- 'action_name': 'build_opensldemo_apk',
- 'variables': {
- 'android_opensl_demo_root': '<(webrtc_root)/modules/audio_device/android/test',
- },
- 'inputs' : [
- '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
- '<(PRODUCT_DIR)/libopensl-demo-jni.so',
- '<!@(find <(android_opensl_demo_root)/src -name "*.java")',
- '<!@(find <(android_opensl_demo_root)/res -name "*.xml")',
- '<!@(find <(android_opensl_demo_root)/res -name "*.png")',
- '<(android_opensl_demo_root)/AndroidManifest.xml',
- '<(android_opensl_demo_root)/build.xml',
- '<(android_opensl_demo_root)/project.properties',
- ],
- 'outputs': ['<(PRODUCT_DIR)/OpenSlDemo-debug.apk'],
- 'action': ['bash', '-ec',
- 'rm -f <(_outputs) && '
- 'mkdir -p <(android_opensl_demo_root)/libs/<(android_app_abi) && '
- '<(android_strip) -o <(android_opensl_demo_root)/libs/<(android_app_abi)/libopensl-demo-jni.so <(PRODUCT_DIR)/libopensl-demo-jni.so && '
- 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_opensl_demo_root)/libs/ &&'
- 'cd <(android_opensl_demo_root) && '
- 'ant debug && '
- 'cd - && '
- 'cp <(android_opensl_demo_root)/bin/OpenSlDemo-debug.apk <(_outputs)'
- ],
- },
- ],
- }],
- }],
['OS=="android" and enable_android_opensl==1', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
index db5cc322f98..42fdaad22cb 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
@@ -548,13 +548,16 @@ int32_t AudioDeviceBuffer::RequestPlayoutData(uint32_t nSamples)
if (_ptrCbAudioTransport)
{
uint32_t res(0);
-
+ int64_t elapsed_time_ms = -1;
+ int64_t ntp_time_ms = -1;
res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples,
playBytesPerSample,
playChannels,
playSampleRate,
&_playBuffer[0],
- nSamplesOut);
+ nSamplesOut,
+ &elapsed_time_ms,
+ &ntp_time_ms);
if (res != 0)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "NeedMorePlayData() failed");
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
index 8038028029b..a4c320eaf75 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
@@ -76,10 +76,8 @@ class AudioDeviceGeneric
uint16_t& volumeRight) const = 0;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) = 0;
virtual int32_t InitSpeaker() = 0;
virtual bool SpeakerIsInitialized() const = 0;
- virtual int32_t MicrophoneIsAvailable(bool& available) = 0;
virtual int32_t InitMicrophone() = 0;
virtual bool MicrophoneIsInitialized() const = 0;
@@ -137,7 +135,7 @@ class AudioDeviceGeneric
// CPU load
virtual int32_t CPULoad(uint16_t& load) const = 0;
-
+
// Native sample rate controls (samples/sec)
virtual int32_t SetRecordingSampleRate(
const uint32_t samplesPerSec);
@@ -147,7 +145,7 @@ class AudioDeviceGeneric
// Speaker audio routing (for mobile devices)
virtual int32_t SetLoudspeakerStatus(bool enable);
virtual int32_t GetLoudspeakerStatus(bool& enable) const;
-
+
// Reset Audio Device (for mobile devices)
virtual int32_t ResetAudioDevice();
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
index 05201c0e2a5..58411e3b94d 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
@@ -45,8 +45,14 @@
#include "audio_device_utility_mac.h"
#include "audio_device_mac.h"
#endif
+
+#if defined(WEBRTC_DUMMY_FILE_DEVICES)
+#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+#endif
+
#include "webrtc/modules/audio_device/dummy/audio_device_dummy.h"
#include "webrtc/modules/audio_device/dummy/audio_device_utility_dummy.h"
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
@@ -203,6 +209,14 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
{
ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
}
+#elif defined(WEBRTC_DUMMY_FILE_DEVICES)
+ ptrAudioDevice = FileAudioDeviceFactory::CreateFileAudioDevice(Id());
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "Will use file-playing dummy device.");
+ if (ptrAudioDevice != NULL)
+ {
+ ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+ }
#else
const AudioLayer audioLayer(PlatformAudioLayer());
@@ -641,27 +655,6 @@ bool AudioDeviceModuleImpl::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceModuleImpl::SpeakerIsAvailable(bool* available)
-{
- CHECK_INITIALIZED();
-
- bool isAvailable(0);
-
- if (_ptrAudioDevice->SpeakerIsAvailable(isAvailable) == -1)
- {
- return -1;
- }
-
- *available = isAvailable;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", available);
- return (0);
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -672,27 +665,6 @@ int32_t AudioDeviceModuleImpl::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceModuleImpl::MicrophoneIsAvailable(bool* available)
-{
- CHECK_INITIALIZED();
-
- bool isAvailable(0);
-
- if (_ptrAudioDevice->MicrophoneIsAvailable(isAvailable) == -1)
- {
- return -1;
- }
-
- *available = isAvailable;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
- return (0);
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
@@ -1750,8 +1722,6 @@ int32_t AudioDeviceModuleImpl::StopRawOutputFileRecording()
CHECK_INITIALIZED();
return (_audioDeviceBuffer.StopOutputFileRecording());
-
- return 0;
}
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
index 26bd54399b2..a545d580f4b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
@@ -115,10 +115,8 @@ public:
uint16_t* volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool* available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool* available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate b/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
index 69e877c14f1..ebe8bfb40b9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
index 379b25766d8..92199f6d295 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
@@ -91,14 +91,10 @@ int32_t AudioDeviceDummy::WaveOutVolume(uint16_t& volumeLeft,
return -1;
}
-int32_t AudioDeviceDummy::SpeakerIsAvailable(bool& available) { return -1; }
-
int32_t AudioDeviceDummy::InitSpeaker() { return -1; }
bool AudioDeviceDummy::SpeakerIsInitialized() const { return false; }
-int32_t AudioDeviceDummy::MicrophoneIsAvailable(bool& available) { return -1; }
-
int32_t AudioDeviceDummy::InitMicrophone() { return -1; }
bool AudioDeviceDummy::MicrophoneIsInitialized() const { return false; }
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
index 47d7aab79e4..41e52e95443 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
@@ -76,10 +76,8 @@ class AudioDeviceDummy : public AudioDeviceGeneric {
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc
new file mode 100644
index 00000000000..e7771c66de8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc
@@ -0,0 +1,586 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <iostream>
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/system_wrappers/interface/thread_wrapper.h"
+
+namespace webrtc {
+
+int kRecordingFixedSampleRate = 48000;
+int kRecordingNumChannels = 2;
+int kPlayoutFixedSampleRate = 48000;
+int kPlayoutNumChannels = 2;
+int kPlayoutBufferSize = kPlayoutFixedSampleRate / 100
+ * kPlayoutNumChannels * 2;
+int kRecordingBufferSize = kRecordingFixedSampleRate / 100
+ * kRecordingNumChannels * 2;
+
+FileAudioDevice::FileAudioDevice(const int32_t id,
+ const char* inputFilename,
+ const char* outputFile):
+ _ptrAudioBuffer(NULL),
+ _recordingBuffer(NULL),
+ _playoutBuffer(NULL),
+ _recordingFramesLeft(0),
+ _playoutFramesLeft(0),
+ _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _recordingBufferSizeIn10MS(0),
+ _recordingFramesIn10MS(0),
+ _playoutFramesIn10MS(0),
+ _ptrThreadRec(NULL),
+ _ptrThreadPlay(NULL),
+ _recThreadID(0),
+ _playThreadID(0),
+ _playing(false),
+ _recording(false),
+ _lastCallPlayoutMillis(0),
+ _lastCallRecordMillis(0),
+ _outputFile(*FileWrapper::Create()),
+ _inputFile(*FileWrapper::Create()),
+ _outputFilename(outputFile),
+ _inputFilename(inputFilename),
+ _clock(Clock::GetRealTimeClock()) {
+}
+
+FileAudioDevice::~FileAudioDevice() {
+ _outputFile.Flush();
+ _outputFile.CloseFile();
+ delete &_outputFile;
+ _inputFile.Flush();
+ _inputFile.CloseFile();
+ delete &_inputFile;
+}
+
+int32_t FileAudioDevice::ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::Init() { return 0; }
+
+int32_t FileAudioDevice::Terminate() { return 0; }
+
+bool FileAudioDevice::Initialized() const { return true; }
+
+int16_t FileAudioDevice::PlayoutDevices() {
+ return 1;
+}
+
+int16_t FileAudioDevice::RecordingDevices() {
+ return 1;
+}
+
+int32_t FileAudioDevice::PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const char* kName = "dummy_device";
+ const char* kGuid = "dummy_device_unique_id";
+ if (index < 1) {
+ memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(guid, 0, kAdmMaxGuidSize);
+ memcpy(name, kName, strlen(kName));
+ memcpy(guid, kGuid, strlen(guid));
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const char* kName = "dummy_device";
+ const char* kGuid = "dummy_device_unique_id";
+ if (index < 1) {
+ memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(guid, 0, kAdmMaxGuidSize);
+ memcpy(name, kName, strlen(kName));
+ memcpy(guid, kGuid, strlen(guid));
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetPlayoutDevice(uint16_t index) {
+ if (index == 0) {
+ _playout_index = index;
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetRecordingDevice(uint16_t index) {
+ if (index == 0) {
+ _record_index = index;
+ return _record_index;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) {
+ return -1;
+}
+
+int32_t FileAudioDevice::PlayoutIsAvailable(bool& available) {
+ if (_playout_index == 0) {
+ available = true;
+ return _playout_index;
+ }
+ available = false;
+ return -1;
+}
+
+int32_t FileAudioDevice::InitPlayout() {
+ if (_ptrAudioBuffer)
+ {
+ // Update webrtc audio buffer with the selected parameters
+ _ptrAudioBuffer->SetPlayoutSampleRate(kPlayoutFixedSampleRate);
+ _ptrAudioBuffer->SetPlayoutChannels(kPlayoutNumChannels);
+ }
+ return 0;
+}
+
+bool FileAudioDevice::PlayoutIsInitialized() const {
+ return true;
+}
+
+int32_t FileAudioDevice::RecordingIsAvailable(bool& available) {
+ if (_record_index == 0) {
+ available = true;
+ return _record_index;
+ }
+ available = false;
+ return -1;
+}
+
+int32_t FileAudioDevice::InitRecording() {
+ CriticalSectionScoped lock(&_critSect);
+
+ if (_recording) {
+ return -1;
+ }
+
+ _recordingFramesIn10MS = kRecordingFixedSampleRate/100;
+
+ if (_ptrAudioBuffer) {
+ _ptrAudioBuffer->SetRecordingSampleRate(kRecordingFixedSampleRate);
+ _ptrAudioBuffer->SetRecordingChannels(kRecordingNumChannels);
+ }
+ return 0;
+}
+
+bool FileAudioDevice::RecordingIsInitialized() const {
+ return true;
+}
+
+int32_t FileAudioDevice::StartPlayout() {
+ if (_playing)
+ {
+ return 0;
+ }
+
+ _playing = true;
+ _playoutFramesLeft = 0;
+
+ if (!_playoutBuffer)
+ _playoutBuffer = new int8_t[2 *
+ kPlayoutNumChannels *
+ kPlayoutFixedSampleRate/100];
+ if (!_playoutBuffer)
+ {
+ _playing = false;
+ return -1;
+ }
+
+ // PLAYOUT
+ const char* threadName = "webrtc_audio_module_play_thread";
+ _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc,
+ this,
+ kRealtimePriority,
+ threadName);
+ if (_ptrThreadPlay == NULL)
+ {
+ _playing = false;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+
+ if (_outputFile.OpenFile(_outputFilename.c_str(),
+ false, false, false) == -1) {
+ printf("Failed to open playout file %s!", _outputFilename.c_str());
+ _playing = false;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+
+ unsigned int threadID(0);
+ if (!_ptrThreadPlay->Start(threadID))
+ {
+ _playing = false;
+ delete _ptrThreadPlay;
+ _ptrThreadPlay = NULL;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+ _playThreadID = threadID;
+
+ return 0;
+}
+
+int32_t FileAudioDevice::StopPlayout() {
+ {
+ CriticalSectionScoped lock(&_critSect);
+ _playing = false;
+ }
+
+ // stop playout thread first
+ if (_ptrThreadPlay && !_ptrThreadPlay->Stop())
+ {
+ return -1;
+ }
+ else {
+ delete _ptrThreadPlay;
+ _ptrThreadPlay = NULL;
+ }
+
+ CriticalSectionScoped lock(&_critSect);
+
+ _playoutFramesLeft = 0;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ _outputFile.Flush();
+ _outputFile.CloseFile();
+ return 0;
+}
+
+bool FileAudioDevice::Playing() const {
+ return true;
+}
+
+int32_t FileAudioDevice::StartRecording() {
+ _recording = true;
+
+ // Make sure we only create the buffer once.
+ _recordingBufferSizeIn10MS = _recordingFramesIn10MS *
+ kRecordingNumChannels *
+ 2;
+ if (!_recordingBuffer) {
+ _recordingBuffer = new int8_t[_recordingBufferSizeIn10MS];
+ }
+
+ if (_inputFile.OpenFile(_inputFilename.c_str(), true,
+ true, false) == -1) {
+ printf("Failed to open audio input file %s!\n",
+ _inputFilename.c_str());
+ _recording = false;
+ delete[] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+
+ const char* threadName = "webrtc_audio_module_capture_thread";
+ _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
+ this,
+ kRealtimePriority,
+ threadName);
+ if (_ptrThreadRec == NULL)
+ {
+ _recording = false;
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+
+ unsigned int threadID(0);
+ if (!_ptrThreadRec->Start(threadID))
+ {
+ _recording = false;
+ delete _ptrThreadRec;
+ _ptrThreadRec = NULL;
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+ _recThreadID = threadID;
+
+ return 0;
+}
+
+
+int32_t FileAudioDevice::StopRecording() {
+ {
+ CriticalSectionScoped lock(&_critSect);
+ _recording = false;
+ }
+
+ if (_ptrThreadRec && !_ptrThreadRec->Stop())
+ {
+ return -1;
+ }
+ else {
+ delete _ptrThreadRec;
+ _ptrThreadRec = NULL;
+ }
+
+ CriticalSectionScoped lock(&_critSect);
+ _recordingFramesLeft = 0;
+ if (_recordingBuffer)
+ {
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ }
+ return 0;
+}
+
+bool FileAudioDevice::Recording() const {
+ return _recording;
+}
+
+int32_t FileAudioDevice::SetAGC(bool enable) { return -1; }
+
+bool FileAudioDevice::AGC() const { return false; }
+
+int32_t FileAudioDevice::SetWaveOutVolume(uint16_t volumeLeft,
+ uint16_t volumeRight) {
+ return -1;
+}
+
+int32_t FileAudioDevice::WaveOutVolume(uint16_t& volumeLeft,
+ uint16_t& volumeRight) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::InitSpeaker() { return -1; }
+
+bool FileAudioDevice::SpeakerIsInitialized() const { return false; }
+
+int32_t FileAudioDevice::InitMicrophone() { return 0; }
+
+bool FileAudioDevice::MicrophoneIsInitialized() const { return true; }
+
+int32_t FileAudioDevice::SpeakerVolumeIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetSpeakerVolume(uint32_t volume) { return -1; }
+
+int32_t FileAudioDevice::SpeakerVolume(uint32_t& volume) const { return -1; }
+
+int32_t FileAudioDevice::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MinSpeakerVolume(uint32_t& minVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::SpeakerVolumeStepSize(uint16_t& stepSize) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MicrophoneVolumeIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneVolume(uint32_t volume) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneVolume(uint32_t& volume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MinMicrophoneVolume(uint32_t& minVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MicrophoneVolumeStepSize(uint16_t& stepSize) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::SpeakerMuteIsAvailable(bool& available) { return -1; }
+
+int32_t FileAudioDevice::SetSpeakerMute(bool enable) { return -1; }
+
+int32_t FileAudioDevice::SpeakerMute(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::MicrophoneMuteIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneMute(bool enable) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneMute(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::MicrophoneBoostIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneBoost(bool enable) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneBoost(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::StereoPlayoutIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+int32_t FileAudioDevice::SetStereoPlayout(bool enable) {
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoPlayout(bool& enabled) const {
+ enabled = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoRecordingIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::SetStereoRecording(bool enable) {
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoRecording(bool& enabled) const {
+ enabled = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::SetPlayoutBuffer(
+ const AudioDeviceModule::BufferType type,
+ uint16_t sizeMS) {
+ return 0;
+}
+
+int32_t FileAudioDevice::PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const {
+ type = _playBufType;
+ return 0;
+}
+
+int32_t FileAudioDevice::PlayoutDelay(uint16_t& delayMS) const {
+ return 0;
+}
+
+int32_t FileAudioDevice::RecordingDelay(uint16_t& delayMS) const { return -1; }
+
+int32_t FileAudioDevice::CPULoad(uint16_t& load) const { return -1; }
+
+bool FileAudioDevice::PlayoutWarning() const { return false; }
+
+bool FileAudioDevice::PlayoutError() const { return false; }
+
+bool FileAudioDevice::RecordingWarning() const { return false; }
+
+bool FileAudioDevice::RecordingError() const { return false; }
+
+void FileAudioDevice::ClearPlayoutWarning() {}
+
+void FileAudioDevice::ClearPlayoutError() {}
+
+void FileAudioDevice::ClearRecordingWarning() {}
+
+void FileAudioDevice::ClearRecordingError() {}
+
+void FileAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ CriticalSectionScoped lock(&_critSect);
+
+ _ptrAudioBuffer = audioBuffer;
+
+ // Inform the AudioBuffer about default settings for this implementation.
+ // Set all values to zero here since the actual settings will be done by
+ // InitPlayout and InitRecording later.
+ _ptrAudioBuffer->SetRecordingSampleRate(0);
+ _ptrAudioBuffer->SetPlayoutSampleRate(0);
+ _ptrAudioBuffer->SetRecordingChannels(0);
+ _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+bool FileAudioDevice::PlayThreadFunc(void* pThis)
+{
+ return (static_cast<FileAudioDevice*>(pThis)->PlayThreadProcess());
+}
+
+bool FileAudioDevice::RecThreadFunc(void* pThis)
+{
+ return (static_cast<FileAudioDevice*>(pThis)->RecThreadProcess());
+}
+
+bool FileAudioDevice::PlayThreadProcess()
+{
+ if(!_playing)
+ return false;
+
+ uint64_t currentTime = _clock->CurrentNtpInMilliseconds();
+ _critSect.Enter();
+
+ if (_lastCallPlayoutMillis == 0 ||
+ currentTime - _lastCallPlayoutMillis >= 10)
+ {
+ _critSect.Leave();
+ _ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS);
+ _critSect.Enter();
+
+ _playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer);
+ assert(_playoutFramesLeft == _playoutFramesIn10MS);
+ if (_outputFile.Open()) {
+ _outputFile.Write(_playoutBuffer, kPlayoutBufferSize);
+ _outputFile.Flush();
+ }
+ _lastCallPlayoutMillis = currentTime;
+ }
+ _playoutFramesLeft = 0;
+ _critSect.Leave();
+ SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+ return true;
+}
+
+bool FileAudioDevice::RecThreadProcess()
+{
+ if (!_recording)
+ return false;
+
+ uint64_t currentTime = _clock->CurrentNtpInMilliseconds();
+ _critSect.Enter();
+
+ if (_lastCallRecordMillis == 0 ||
+ currentTime - _lastCallRecordMillis >= 10) {
+ if (_inputFile.Open()) {
+ if (_inputFile.Read(_recordingBuffer, kRecordingBufferSize) > 0) {
+ _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
+ _recordingFramesIn10MS);
+ } else {
+ _inputFile.Rewind();
+ }
+ _lastCallRecordMillis = currentTime;
+ _critSect.Leave();
+ _ptrAudioBuffer->DeliverRecordedData();
+ _critSect.Enter();
+ }
+ }
+
+ _critSect.Leave();
+ SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+ return true;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h
new file mode 100644
index 00000000000..6f417eb2e0b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
+#define WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
+
+#include <stdio.h>
+
+#include <string>
+
+#include "webrtc/modules/audio_device/audio_device_generic.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/file_wrapper.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+
+// This is a fake audio device which plays audio from a file as its microphone
+// and plays out into a file.
+class FileAudioDevice : public AudioDeviceGeneric {
+ public:
+ // Constructs a file audio device with |id|. It will read audio from
+ // |inputFilename| and record output audio to |outputFilename|.
+ //
+ // The input file should be a readable 48k stereo raw file, and the output
+ // file should point to a writable location. The output format will also be
+ // 48k stereo raw audio.
+ FileAudioDevice(const int32_t id,
+ const char* inputFilename,
+ const char* outputFilename);
+ virtual ~FileAudioDevice();
+
+ // Retrieve the currently utilized audio layer
+ virtual int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const OVERRIDE;
+
+ // Main initializaton and termination
+ virtual int32_t Init() OVERRIDE;
+ virtual int32_t Terminate() OVERRIDE;
+ virtual bool Initialized() const OVERRIDE;
+
+ // Device enumeration
+ virtual int16_t PlayoutDevices() OVERRIDE;
+ virtual int16_t RecordingDevices() OVERRIDE;
+ virtual int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) OVERRIDE;
+ virtual int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) OVERRIDE;
+
+ // Device selection
+ virtual int32_t SetPlayoutDevice(uint16_t index) OVERRIDE;
+ virtual int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) OVERRIDE;
+ virtual int32_t SetRecordingDevice(uint16_t index) OVERRIDE;
+ virtual int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) OVERRIDE;
+
+ // Audio transport initialization
+ virtual int32_t PlayoutIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t InitPlayout() OVERRIDE;
+ virtual bool PlayoutIsInitialized() const OVERRIDE;
+ virtual int32_t RecordingIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t InitRecording() OVERRIDE;
+ virtual bool RecordingIsInitialized() const OVERRIDE;
+
+ // Audio transport control
+ virtual int32_t StartPlayout() OVERRIDE;
+ virtual int32_t StopPlayout() OVERRIDE;
+ virtual bool Playing() const OVERRIDE;
+ virtual int32_t StartRecording() OVERRIDE;
+ virtual int32_t StopRecording() OVERRIDE;
+ virtual bool Recording() const OVERRIDE;
+
+ // Microphone Automatic Gain Control (AGC)
+ virtual int32_t SetAGC(bool enable) OVERRIDE;
+ virtual bool AGC() const OVERRIDE;
+
+ // Volume control based on the Windows Wave API (Windows only)
+ virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+ uint16_t volumeRight) OVERRIDE;
+ virtual int32_t WaveOutVolume(uint16_t& volumeLeft,
+ uint16_t& volumeRight) const OVERRIDE;
+
+ // Audio mixer initialization
+ virtual int32_t InitSpeaker() OVERRIDE;
+ virtual bool SpeakerIsInitialized() const OVERRIDE;
+ virtual int32_t InitMicrophone() OVERRIDE;
+ virtual bool MicrophoneIsInitialized() const OVERRIDE;
+
+ // Speaker volume controls
+ virtual int32_t SpeakerVolumeIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetSpeakerVolume(uint32_t volume) OVERRIDE;
+ virtual int32_t SpeakerVolume(uint32_t& volume) const OVERRIDE;
+ virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const OVERRIDE;
+ virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const OVERRIDE;
+ virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const OVERRIDE;
+
+ // Microphone volume controls
+ virtual int32_t MicrophoneVolumeIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneVolume(uint32_t volume) OVERRIDE;
+ virtual int32_t MicrophoneVolume(uint32_t& volume) const OVERRIDE;
+ virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const OVERRIDE;
+ virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const OVERRIDE;
+ virtual int32_t MicrophoneVolumeStepSize(uint16_t& stepSize) const OVERRIDE;
+
+ // Speaker mute control
+ virtual int32_t SpeakerMuteIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetSpeakerMute(bool enable) OVERRIDE;
+ virtual int32_t SpeakerMute(bool& enabled) const OVERRIDE;
+
+ // Microphone mute control
+ virtual int32_t MicrophoneMuteIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneMute(bool enable) OVERRIDE;
+ virtual int32_t MicrophoneMute(bool& enabled) const OVERRIDE;
+
+ // Microphone boost control
+ virtual int32_t MicrophoneBoostIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneBoost(bool enable) OVERRIDE;
+ virtual int32_t MicrophoneBoost(bool& enabled) const OVERRIDE;
+
+ // Stereo support
+ virtual int32_t StereoPlayoutIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetStereoPlayout(bool enable) OVERRIDE;
+ virtual int32_t StereoPlayout(bool& enabled) const OVERRIDE;
+ virtual int32_t StereoRecordingIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetStereoRecording(bool enable) OVERRIDE;
+ virtual int32_t StereoRecording(bool& enabled) const OVERRIDE;
+
+ // Delay information and control
+ virtual int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+ uint16_t sizeMS) OVERRIDE;
+ virtual int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const OVERRIDE;
+ virtual int32_t PlayoutDelay(uint16_t& delayMS) const OVERRIDE;
+ virtual int32_t RecordingDelay(uint16_t& delayMS) const OVERRIDE;
+
+ // CPU load
+ virtual int32_t CPULoad(uint16_t& load) const OVERRIDE;
+
+ virtual bool PlayoutWarning() const OVERRIDE;
+ virtual bool PlayoutError() const OVERRIDE;
+ virtual bool RecordingWarning() const OVERRIDE;
+ virtual bool RecordingError() const OVERRIDE;
+ virtual void ClearPlayoutWarning() OVERRIDE;
+ virtual void ClearPlayoutError() OVERRIDE;
+ virtual void ClearRecordingWarning() OVERRIDE;
+ virtual void ClearRecordingError() OVERRIDE;
+
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) OVERRIDE;
+
+ private:
+ static bool RecThreadFunc(void*);
+ static bool PlayThreadFunc(void*);
+ bool RecThreadProcess();
+ bool PlayThreadProcess();
+
+ int32_t _playout_index;
+ int32_t _record_index;
+ AudioDeviceModule::BufferType _playBufType;
+ AudioDeviceBuffer* _ptrAudioBuffer;
+ int8_t* _recordingBuffer; // In bytes.
+ int8_t* _playoutBuffer; // In bytes.
+ uint32_t _recordingFramesLeft;
+ uint32_t _playoutFramesLeft;
+ CriticalSectionWrapper& _critSect;
+
+ uint32_t _recordingBufferSizeIn10MS;
+ uint32_t _recordingFramesIn10MS;
+ uint32_t _playoutFramesIn10MS;
+
+ ThreadWrapper* _ptrThreadRec;
+ ThreadWrapper* _ptrThreadPlay;
+ uint32_t _recThreadID;
+ uint32_t _playThreadID;
+
+ bool _playing;
+ bool _recording;
+ uint64_t _lastCallPlayoutMillis;
+ uint64_t _lastCallRecordMillis;
+
+ FileWrapper& _outputFile;
+ FileWrapper& _inputFile;
+ std::string _outputFilename;
+ std::string _inputFilename;
+
+ Clock* _clock;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
new file mode 100644
index 00000000000..db35bf111b1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+
+#include <cstring>
+
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
+
+namespace webrtc {
+
+char FileAudioDeviceFactory::_inputAudioFilename[MAX_FILENAME_LEN] = "";
+char FileAudioDeviceFactory::_outputAudioFilename[MAX_FILENAME_LEN] = "";
+
+FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice(
+ const int32_t id) {
+ // Bail out here if the files aren't set.
+ if (strlen(_inputAudioFilename) == 0 || strlen(_outputAudioFilename) == 0) {
+ printf("Was compiled with WEBRTC_DUMMY_AUDIO_PLAY_STATIC_FILE "
+ "but did not set input/output files to use. Bailing out.\n");
+ exit(1);
+ }
+ return new FileAudioDevice(id, _inputAudioFilename, _outputAudioFilename);
+}
+
+void FileAudioDeviceFactory::SetFilenamesToUse(
+ const char* inputAudioFilename, const char* outputAudioFilename) {
+ assert(strlen(inputAudioFilename) < MAX_FILENAME_LEN &&
+ strlen(outputAudioFilename) < MAX_FILENAME_LEN);
+
+ // Copy the strings since we don't know the lifetime of the input pointers.
+ strncpy(_inputAudioFilename, inputAudioFilename, MAX_FILENAME_LEN);
+ strncpy(_outputAudioFilename, outputAudioFilename, MAX_FILENAME_LEN);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h
new file mode 100644
index 00000000000..9975d7b90e0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
+#define WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
+
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+class FileAudioDevice;
+
+// This class is used by audio_device_impl.cc when WebRTC is compiled with
+// WEBRTC_DUMMY_FILE_DEVICES. The application must include this file and set the
+// filenames to use before the audio device module is initialized. This is
+// intended for test tools which use the audio device module.
+class FileAudioDeviceFactory {
+ public:
+ static FileAudioDevice* CreateFileAudioDevice(const int32_t id);
+
+ // The input file must be a readable 48k stereo raw file. The output
+ // file must be writable. The strings will be copied.
+ static void SetFilenamesToUse(const char* inputAudioFilename,
+ const char* outputAudioFilename);
+
+ private:
+ static const uint32_t MAX_FILENAME_LEN = 256;
+ static char _inputAudioFilename[MAX_FILENAME_LEN];
+ static char _outputAudioFilename[MAX_FILENAME_LEN];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
index eb403aa9b98..ec40274de49 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
@@ -107,10 +107,8 @@ class AudioDeviceModule : public RefCountedModule {
uint16_t* volumeRight) const = 0;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool* available) = 0;
virtual int32_t InitSpeaker() = 0;
virtual bool SpeakerIsInitialized() const = 0;
- virtual int32_t MicrophoneIsAvailable(bool* available) = 0;
virtual int32_t InitMicrophone() = 0;
virtual bool MicrophoneIsInitialized() const = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
index c37c4b13955..56a584ef9ea 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
@@ -63,14 +63,16 @@ public:
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
- uint32_t& newMicLevel) = 0;
+ uint32_t& newMicLevel) = 0;
virtual int32_t NeedMorePlayData(const uint32_t nSamples,
const uint8_t nBytesPerSample,
const uint8_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
- uint32_t& nSamplesOut) = 0;
+ uint32_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) = 0;
// Method to pass captured data directly and unmixed to network channels.
// |channel_ids| contains a list of VoE channels which are the
@@ -85,8 +87,8 @@ public:
// will be ignored.
// The return value is the new microphone volume, in the range of |0, 255].
// When the volume does not need to be updated, it returns 0.
- // TODO(xians): Make the interface pure virtual after libjingle has its
- // implementation.
+ // TODO(xians): Remove this interface after Chrome and Libjingle switches
+ // to OnData().
virtual int OnDataAvailable(const int voe_channels[],
int number_of_voe_channels,
const int16_t* audio_data,
@@ -98,6 +100,37 @@ public:
bool key_pressed,
bool need_audio_processing) { return 0; }
+ // Method to pass the captured audio data to the specific VoE channel.
+ // |voe_channel| is the id of the VoE channel which is the sink to the
+ // capture data.
+ // TODO(xians): Remove this interface after Libjingle switches to
+ // PushCaptureData().
+ virtual void OnData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {}
+
+ // Method to push the captured audio data to the specific VoE channel.
+ // The data will not undergo audio processing.
+ // |voe_channel| is the id of the VoE channel which is the sink to the
+ // capture data.
+ // TODO(xians): Make the interface pure virtual after Libjingle
+ // has its implementation.
+ virtual void PushCaptureData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {}
+
+ // Method to pull mixed render audio data from all active VoE channels.
+ // The data will not be passed as reference for audio processing internally.
+ // TODO(xians): Support getting the unmixed render data from specific VoE
+ // channel.
+ virtual void PullRenderData(int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {}
+
protected:
virtual ~AudioTransport() {}
};
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
index 0248317550d..5cdf54fe8cd 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
@@ -25,19 +25,11 @@ class FakeAudioDeviceModule : public AudioDeviceModule {
return 0;
}
virtual int32_t Init() { return 0; }
- virtual int32_t SpeakerIsAvailable(bool* available) {
- *available = true;
- return 0;
- }
virtual int32_t InitSpeaker() { return 0; }
virtual int32_t SetPlayoutDevice(uint16_t index) { return 0; }
virtual int32_t SetPlayoutDevice(WindowsDeviceType device) { return 0; }
virtual int32_t SetStereoPlayout(bool enable) { return 0; }
virtual int32_t StopPlayout() { return 0; }
- virtual int32_t MicrophoneIsAvailable(bool* available) {
- *available = true;
- return 0;
- }
virtual int32_t InitMicrophone() { return 0; }
virtual int32_t SetRecordingDevice(uint16_t index) { return 0; }
virtual int32_t SetRecordingDevice(WindowsDeviceType device) { return 0; }
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
index bad3915f14f..7a7189a2b7f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
@@ -175,15 +175,6 @@ bool AudioDeviceIPhone::Initialized() const {
return (_initialized);
}
-int32_t AudioDeviceIPhone::SpeakerIsAvailable(bool& available) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
- "%s", __FUNCTION__);
-
- // speaker is always available in IOS
- available = true;
- return 0;
-}
-
int32_t AudioDeviceIPhone::InitSpeaker() {
WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
"%s", __FUNCTION__);
@@ -214,30 +205,6 @@ int32_t AudioDeviceIPhone::InitSpeaker() {
return 0;
}
-int32_t AudioDeviceIPhone::MicrophoneIsAvailable(bool& available) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
- "%s", __FUNCTION__);
-
- available = false;
-
- OSStatus result = -1;
- UInt32 channel = 0;
- UInt32 size = sizeof(channel);
- result = AudioSessionGetProperty(kAudioSessionProperty_AudioInputAvailable,
- &size, &channel);
- if (channel != 0) {
- // API is not supported on this platform, we return available = true
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
- _id, " API call not supported on this version");
- available = true;
- return 0;
- }
-
- available = (channel == 0) ? false : true;
-
- return 0;
-}
-
int32_t AudioDeviceIPhone::InitMicrophone() {
WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
"%s", __FUNCTION__);
@@ -1332,7 +1299,7 @@ int32_t AudioDeviceIPhone::InitPlayOrRecord() {
// todo: Add 48 kHz (increase buffer sizes). Other fs?
if ((playoutDesc.mSampleRate > 44090.0)
&& (playoutDesc.mSampleRate < 44110.0)) {
- _adbSampFreq = 44000;
+ _adbSampFreq = 44100;
} else if ((playoutDesc.mSampleRate > 15990.0)
&& (playoutDesc.mSampleRate < 16010.0)) {
_adbSampFreq = 16000;
@@ -1716,7 +1683,10 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
if (_playoutDelayMeasurementCounter >= 100) {
// Update HW and OS delay every second, unlikely to change
- _playoutDelay = 0;
+ // Since this is eventually rounded to integral ms, add 0.5ms
+ // here to get round-to-nearest-int behavior instead of
+ // truncation.
+ float totalDelaySeconds = 0.0005;
// HW output latency
Float32 f32(0);
@@ -1727,7 +1697,8 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW latency (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// HW buffer duration
f32 = 0;
@@ -1737,7 +1708,8 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW buffer duration (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// AU latency
Float64 f64(0);
@@ -1748,16 +1720,17 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error AU latency (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f64 * 1000000);
+ assert(f64 >= 0);
+ totalDelaySeconds += f64;
// To ms
- _playoutDelay = (_playoutDelay - 500) / 1000;
+ _playoutDelay = static_cast<uint32_t>(totalDelaySeconds / 1000);
// Reset counter
_playoutDelayMeasurementCounter = 0;
}
- // todo: Add playout buffer? (Only used for 44.1 kHz)
+ // todo: Add playout buffer?
}
void AudioDeviceIPhone::UpdateRecordingDelay() {
@@ -1766,7 +1739,10 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
if (_recordingDelayMeasurementCounter >= 100) {
// Update HW and OS delay every second, unlikely to change
- _recordingDelayHWAndOS = 0;
+ // Since this is eventually rounded to integral ms, add 0.5ms
+ // here to get round-to-nearest-int behavior instead of
+ // truncation.
+ float totalDelaySeconds = 0.0005;
// HW input latency
Float32 f32(0);
@@ -1777,7 +1753,8 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW latency (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// HW buffer duration
f32 = 0;
@@ -1787,7 +1764,8 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW buffer duration (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// AU latency
Float64 f64(0);
@@ -1799,10 +1777,12 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error AU latency (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f64 * 1000000);
+ assert(f64 >= 0);
+ totalDelaySeconds += f64;
// To ms
- _recordingDelayHWAndOS = (_recordingDelayHWAndOS - 500) / 1000;
+ _recordingDelayHWAndOS =
+ static_cast<uint32_t>(totalDelaySeconds / 1000);
// Reset counter
_recordingDelayMeasurementCounter = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
index fdaf94d6a60..011b6acf481 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
@@ -90,10 +90,8 @@ public:
uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
index caa1efed8ff..67a845c845b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -180,7 +180,7 @@ int32_t AudioDeviceLinuxALSA::Init()
{
return 0;
}
-
+#if defined(USE_X11)
//Get X display handle for typing detection
_XDisplay = XOpenDisplay(NULL);
if (!_XDisplay)
@@ -188,7 +188,7 @@ int32_t AudioDeviceLinuxALSA::Init()
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
" failed to open X display, typing detection will not work");
}
-
+#endif
_playWarning = 0;
_playError = 0;
_recWarning = 0;
@@ -254,13 +254,13 @@ int32_t AudioDeviceLinuxALSA::Terminate()
_critSect.Enter();
}
-
+#if defined(USE_X11)
if (_XDisplay)
{
XCloseDisplay(_XDisplay);
_XDisplay = NULL;
}
-
+#endif
_initialized = false;
_outputDeviceIsSpecified = false;
_inputDeviceIsSpecified = false;
@@ -273,34 +273,6 @@ bool AudioDeviceLinuxALSA::Initialized() const
return (_initialized);
}
-int32_t AudioDeviceLinuxALSA::SpeakerIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
-
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker
- // exists
- available = true;
-
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxALSA::InitSpeaker()
{
@@ -316,34 +288,6 @@ int32_t AudioDeviceLinuxALSA::InitSpeaker()
return _mixerManager.OpenSpeaker(devName);
}
-int32_t AudioDeviceLinuxALSA::MicrophoneIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid
- // microphone exists
- available = true;
-
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxALSA::InitMicrophone()
{
@@ -2342,7 +2286,7 @@ bool AudioDeviceLinuxALSA::RecThreadProcess()
bool AudioDeviceLinuxALSA::KeyPressed() const{
-
+#if defined(USE_X11)
char szKey[32];
unsigned int i = 0;
char state = 0;
@@ -2360,5 +2304,8 @@ bool AudioDeviceLinuxALSA::KeyPressed() const{
// Save old state
memcpy((char*)_oldKeyState, (char*)szKey, sizeof(_oldKeyState));
return (state != 0);
+#else
+ return false;
+#endif
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
index 35abc152fc8..1d75c8e94a0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -15,8 +15,9 @@
#include "webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-
+#if defined(USE_X11)
#include <X11/Xlib.h>
+#endif
#include <alsa/asoundlib.h>
#include <sys/ioctl.h>
#include <sys/soundcard.h>
@@ -89,10 +90,8 @@ public:
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
@@ -135,7 +134,7 @@ public:
virtual int32_t StereoRecordingIsAvailable(bool& available) OVERRIDE;
virtual int32_t SetStereoRecording(bool enable) OVERRIDE;
virtual int32_t StereoRecording(bool& enabled) const OVERRIDE;
-
+
// Delay information and control
virtual int32_t SetPlayoutBuffer(
const AudioDeviceModule::BufferType type,
@@ -174,8 +173,8 @@ private:
bool KeyPressed() const;
private:
- void Lock() { _critSect.Enter(); };
- void UnLock() { _critSect.Leave(); };
+ void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); };
+ void UnLock() UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); };
private:
inline int32_t InputSanityCheckAfterUnlockedPeriod() const;
inline int32_t OutputSanityCheckAfterUnlockedPeriod() const;
@@ -188,7 +187,7 @@ private:
private:
AudioDeviceBuffer* _ptrAudioBuffer;
-
+
CriticalSectionWrapper& _critSect;
ThreadWrapper* _ptrThreadRec;
@@ -250,7 +249,9 @@ private:
uint16_t _playBufDelayFixed; // fixed playback delay
char _oldKeyState[32];
+#if defined(USE_X11)
Display* _XDisplay;
+#endif
};
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
index e095eed0d5b..00d4afed892 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
@@ -341,34 +341,6 @@ bool AudioDeviceLinuxPulse::Initialized() const
return (_initialized);
}
-int32_t AudioDeviceLinuxPulse::SpeakerIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
-
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker exists
- //
- available = true;
-
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxPulse::InitSpeaker()
{
@@ -414,34 +386,6 @@ int32_t AudioDeviceLinuxPulse::InitSpeaker()
return 0;
}
-int32_t AudioDeviceLinuxPulse::MicrophoneIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid microphone
- // exists
- available = true;
-
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxPulse::InitMicrophone()
{
@@ -2613,7 +2557,7 @@ int32_t AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
int32_t AudioDeviceLinuxPulse::ProcessRecordedData(
int8_t *bufferData,
uint32_t bufferSizeInSamples,
- uint32_t recDelay)
+ uint32_t recDelay) EXCLUSIVE_LOCKS_REQUIRED(_critSect)
{
uint32_t currentMicLevel(0);
uint32_t newMicLevel(0);
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
index 43228a1babf..cde7e464390 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
@@ -151,10 +151,8 @@ public:
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
@@ -224,16 +222,12 @@ public:
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) OVERRIDE;
private:
- void Lock()
- {
+ void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) {
_critSect.Enter();
}
- ;
- void UnLock()
- {
+ void UnLock() UNLOCK_FUNCTION(_critSect) {
_critSect.Leave();
}
- ;
void WaitForOperationCompletion(pa_operation* paOperation) const;
void WaitForSuccess(pa_operation* paOperation) const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
index b5186fa7bf2..052390a658e 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
@@ -32,7 +32,7 @@
#include <stddef.h> // for NULL
#include <string.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/trace.h"
// This file provides macros for creating "symbol table" classes to simplify the
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
index b07c94dd11c..2c875796f18 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
@@ -571,7 +571,6 @@ int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available)
return 0;
}
-
int32_t AudioDeviceMac::InitMicrophone()
{
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
index a2662239286..fae4041133b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
@@ -108,10 +108,8 @@ public:
uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
@@ -167,7 +165,6 @@ public:
// CPU load
virtual int32_t CPULoad(uint16_t& load) const;
-public:
virtual bool PlayoutWarning() const;
virtual bool PlayoutError() const;
virtual bool RecordingWarning() const;
@@ -177,10 +174,12 @@ public:
virtual void ClearRecordingWarning();
virtual void ClearRecordingError();
-public:
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
private:
+ virtual int32_t MicrophoneIsAvailable(bool& available);
+ virtual int32_t SpeakerIsAvailable(bool& available);
+
void Lock()
{
_critSect.Enter();
@@ -236,7 +235,6 @@ private:
int32_t
HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
-private:
static OSStatus deviceIOProc(AudioDeviceID device,
const AudioTimeStamp *now,
const AudioBufferList *inputData,
@@ -284,10 +282,8 @@ private:
bool CaptureWorkerThread();
bool RenderWorkerThread();
-private:
bool KeyPressed();
-private:
AudioDeviceBuffer* _ptrAudioBuffer;
CriticalSectionWrapper& _critSect;
@@ -325,7 +321,6 @@ private:
AudioDeviceModule::BufferType _playBufType;
-private:
bool _initialized;
bool _isShutDown;
bool _recording;
@@ -361,7 +356,6 @@ private:
int32_t _renderDelayOffsetSamples;
-private:
uint16_t _playBufDelayFixed; // fixed playback delay
uint16_t _playWarning;
@@ -378,7 +372,6 @@ private:
int _captureBufSizeSamples;
int _renderBufSizeSamples;
-private:
// Typing detection
// 0x5c is key "9", after that comes function keys.
bool prev_key_state_[0x5d];
diff --git a/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS b/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
index a71b821189a..32b5e49af1f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
@@ -751,25 +751,6 @@ bool AudioDeviceWindowsCore::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsCore::SpeakerIsAvailable(bool& available)
-{
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_ptrDeviceOut == NULL)
- {
- return -1;
- }
-
- available = true;
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -852,25 +833,6 @@ int32_t AudioDeviceWindowsCore::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsCore::MicrophoneIsAvailable(bool& available)
-{
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_ptrDeviceIn == NULL)
- {
- return -1;
- }
-
- available = true;
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
index 7a9a5245ca9..4d30928c5ed 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
@@ -138,10 +138,8 @@ public:
virtual int32_t WaveOutVolume(uint16_t& volumeLeft, uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
index c05d0a29a7a..e2e515b1004 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
@@ -490,33 +490,6 @@ bool AudioDeviceWindowsWave::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsWave::SpeakerIsAvailable(bool& available)
-{
-
- // Enumerate all avaliable speakers and make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker exists
- //
- available = true;
-
- // Close the initialized output mixer
- //
- _mixerManager.CloseSpeaker();
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -555,33 +528,6 @@ int32_t AudioDeviceWindowsWave::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsWave::MicrophoneIsAvailable(bool& available)
-{
-
- // Enumerate all avaliable microphones and make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid microphone exists
- //
- available = true;
-
- // Close the initialized input mixer
- //
- _mixerManager.CloseMicrophone();
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
index 480bbcbc778..1767b90a858 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
@@ -94,10 +94,8 @@ public:
virtual int32_t WaveOutVolume(uint16_t& volumeLeft, uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/OWNERS b/chromium/third_party/webrtc/modules/audio_processing/OWNERS
index 5a2563444b6..7c1f7881c2c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_processing/OWNERS
@@ -1,2 +1,8 @@
+aluebs@webrtc.org
andrew@webrtc.org
bjornv@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk b/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
index 3ad52b96625..181e87d9a76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
@@ -47,3 +47,12 @@ ifndef NDK_ROOT
include external/stlport/libstlport.mk
endif
include $(BUILD_STATIC_LIBRARY)
+
+#########################
+# Build the neon library.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+LOCAL_SRC_FILES += \
+ aec_core_neon.c
+
+endif # ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
index bfa087c23de..207c6dc3bfc 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
@@ -67,7 +67,7 @@ static const float sqrtHanning[65] = {
// Matlab code to produce table:
// weightCurve = [0 ; 0.3 * sqrt(linspace(0,1,64))' + 0.1];
// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', weightCurve);
-const float WebRtcAec_weightCurve[65] = {
+ALIGN16_BEG const float ALIGN16_END WebRtcAec_weightCurve[65] = {
0.0000f, 0.1000f, 0.1378f, 0.1535f, 0.1655f, 0.1756f, 0.1845f, 0.1926f,
0.2000f, 0.2069f, 0.2134f, 0.2195f, 0.2254f, 0.2309f, 0.2363f, 0.2414f,
0.2464f, 0.2512f, 0.2558f, 0.2604f, 0.2648f, 0.2690f, 0.2732f, 0.2773f,
@@ -81,7 +81,7 @@ const float WebRtcAec_weightCurve[65] = {
// Matlab code to produce table:
// overDriveCurve = [sqrt(linspace(0,1,65))' + 1];
// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', overDriveCurve);
-const float WebRtcAec_overDriveCurve[65] = {
+ALIGN16_BEG const float ALIGN16_END WebRtcAec_overDriveCurve[65] = {
1.0000f, 1.1250f, 1.1768f, 1.2165f, 1.2500f, 1.2795f, 1.3062f, 1.3307f,
1.3536f, 1.3750f, 1.3953f, 1.4146f, 1.4330f, 1.4507f, 1.4677f, 1.4841f,
1.5000f, 1.5154f, 1.5303f, 1.5449f, 1.5590f, 1.5728f, 1.5863f, 1.5995f,
@@ -116,7 +116,7 @@ extern int webrtc_aec_instance_count;
// "Private" function prototypes.
static void ProcessBlock(AecCore* aec);
-static void NonLinearProcessing(AecCore* aec, short* output, short* outputH);
+static void NonLinearProcessing(AecCore* aec, float* output, float* outputH);
static void GetHighbandGain(const float* lambda, float* nlpGainHband);
@@ -160,28 +160,28 @@ int WebRtcAec_CreateAec(AecCore** aecInst) {
return -1;
}
- aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->nearFrBuf) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->outFrBuf) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->nearFrBufH) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->outFrBufH) {
WebRtcAec_FreeAec(aec);
aec = NULL;
@@ -419,6 +419,7 @@ WebRtcAec_FilterFar_t WebRtcAec_FilterFar;
WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal;
WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation;
WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+WebRtcAec_ComfortNoise_t WebRtcAec_ComfortNoise;
int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
int i;
@@ -472,9 +473,21 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
aec->delay_logging_enabled = 0;
memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram));
+ aec->reported_delay_enabled = 1;
aec->extended_filter_enabled = 0;
aec->num_partitions = kNormalNumPartitions;
+ // Update the delay estimator with filter length. We use half the
+ // |num_partitions| to take the echo path into account. In practice we say
+ // that the echo has a duration of maximum half |num_partitions|, which is not
+ // true, but serves as a crude measure.
+ WebRtc_set_allowed_offset(aec->delay_estimator, aec->num_partitions / 2);
+ // TODO(bjornv): I currently hard coded the enable. Once we've established
+ // that AECM has no performance regression, robust_validation will be enabled
+ // all the time and the APIs to turn it on/off will be removed. Hence, remove
+ // this line then.
+ WebRtc_enable_robust_validation(aec->delay_estimator, 1);
+
// Default target suppression mode.
aec->nlp_mode = 1;
@@ -557,6 +570,7 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
WebRtcAec_FilterAdaptation = FilterAdaptation;
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
+ WebRtcAec_ComfortNoise = ComfortNoise;
#if defined(WEBRTC_ARCH_X86_FAMILY)
if (WebRtc_GetCPUInfo(kSSE2)) {
@@ -564,6 +578,14 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
}
#endif
+#if defined(MIPS_FPU_LE)
+ WebRtcAec_InitAec_mips();
+#endif
+
+#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON)
+ WebRtcAec_InitAec_neon();
+#endif
+
aec_rdft_init();
return 0;
@@ -599,11 +621,11 @@ int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements) {
}
void WebRtcAec_ProcessFrame(AecCore* aec,
- const short* nearend,
- const short* nearendH,
+ const float* nearend,
+ const float* nearendH,
int knownDelay,
- int16_t* out,
- int16_t* outH) {
+ float* out,
+ float* outH) {
int out_elements = 0;
// For each frame the process is as follows:
@@ -724,7 +746,7 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) {
// Calculate the L1 norm, with median value as central moment.
for (i = 0; i < kHistorySizeBlocks; i++) {
- l1_norm += (float)(fabs(i - my_median) * self->delay_histogram[i]);
+ l1_norm += (float)abs(i - my_median) * self->delay_histogram[i];
}
*std = (int)(l1_norm / (float)num_delay_values + 0.5f) * kMsPerBlock;
@@ -768,9 +790,19 @@ void WebRtcAec_SetConfigCore(AecCore* self,
}
}
+void WebRtcAec_enable_reported_delay(AecCore* self, int enable) {
+ self->reported_delay_enabled = enable;
+}
+
+int WebRtcAec_reported_delay_enabled(AecCore* self) {
+ return self->reported_delay_enabled;
+}
+
void WebRtcAec_enable_delay_correction(AecCore* self, int enable) {
self->extended_filter_enabled = enable;
self->num_partitions = enable ? kExtendedNumPartitions : kNormalNumPartitions;
+ // Update the delay estimator with filter length. See InitAEC() for details.
+ WebRtc_set_allowed_offset(self->delay_estimator, self->num_partitions / 2);
}
int WebRtcAec_delay_correction_enabled(AecCore* self) {
@@ -786,7 +818,7 @@ void WebRtcAec_SetSystemDelay(AecCore* self, int delay) {
static void ProcessBlock(AecCore* aec) {
int i;
- float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN];
+ float y[PART_LEN], e[PART_LEN];
float scale;
float fft[PART_LEN2];
@@ -805,30 +837,22 @@ static void ProcessBlock(AecCore* aec) {
const float ramp = 1.0002f;
const float gInitNoise[2] = {0.999f, 0.001f};
- int16_t nearend[PART_LEN];
- int16_t* nearend_ptr = NULL;
- int16_t output[PART_LEN];
- int16_t outputH[PART_LEN];
+ float nearend[PART_LEN];
+ float* nearend_ptr = NULL;
+ float output[PART_LEN];
+ float outputH[PART_LEN];
float* xf_ptr = NULL;
- memset(dH, 0, sizeof(dH));
+ // Concatenate old and new nearend blocks.
if (aec->sampFreq == 32000) {
- // Get the upper band first so we can reuse |nearend|.
WebRtc_ReadBuffer(aec->nearFrBufH, (void**)&nearend_ptr, nearend, PART_LEN);
- for (i = 0; i < PART_LEN; i++) {
- dH[i] = (float)(nearend_ptr[i]);
- }
- memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN);
+ memcpy(aec->dBufH + PART_LEN, nearend_ptr, sizeof(nearend));
}
WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN);
+ memcpy(aec->dBuf + PART_LEN, nearend_ptr, sizeof(nearend));
// ---------- Ooura fft ----------
- // Concatenate old and new nearend blocks.
- for (i = 0; i < PART_LEN; i++) {
- d[i] = (float)(nearend_ptr[i]);
- }
- memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN);
#ifdef WEBRTC_AEC_DEBUG_DUMP
{
@@ -940,7 +964,7 @@ static void ProcessBlock(AecCore* aec) {
}
for (i = 0; i < PART_LEN; i++) {
- e[i] = d[i] - y[i];
+ e[i] = nearend_ptr[i] - y[i];
}
// Error fft
@@ -999,7 +1023,7 @@ static void ProcessBlock(AecCore* aec) {
#endif
}
-static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
+static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1];
complex_t comfortNoiseHband[PART_LEN1];
float fft[PART_LEN2];
@@ -1266,7 +1290,7 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw);
// Add comfort noise.
- ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
+ WebRtcAec_ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
// TODO(bjornv): Investigate how to take the windowing below into account if
// needed.
@@ -1293,12 +1317,12 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
fft[i] *= scale; // fft scaling
fft[i] = fft[i] * sqrtHanning[i] + aec->outBuf[i];
- // Saturation protection
- output[i] = (short)WEBRTC_SPL_SAT(
- WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
-
fft[PART_LEN + i] *= scale; // fft scaling
aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i];
+
+ // Saturate output to keep it in the allowed range.
+ output[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
}
// For H band
@@ -1323,8 +1347,8 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
// compute gain factor
for (i = 0; i < PART_LEN; i++) {
- dtmp = (float)aec->dBufH[i];
- dtmp = (float)dtmp * nlpGainHband; // for variable gain
+ dtmp = aec->dBufH[i];
+ dtmp = dtmp * nlpGainHband; // for variable gain
// add some comfort noise where Hband is attenuated
if (flagHbandCn == 1) {
@@ -1332,8 +1356,8 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
dtmp += cnScaleHband * fft[i];
}
- // Saturation protection
- outputH[i] = (short)WEBRTC_SPL_SAT(
+ // Saturate output to keep it in the allowed range.
+ outputH[i] = WEBRTC_SPL_SAT(
WEBRTC_SPL_WORD16_MAX, dtmp, WEBRTC_SPL_WORD16_MIN);
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
index d3c6d7e2b2e..93bfed46688 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
@@ -22,17 +22,6 @@
#define PART_LEN1 (PART_LEN + 1) // Unique fft coefficients
#define PART_LEN2 (PART_LEN * 2) // Length of partition * 2
-// Delay estimator constants, used for logging.
-enum {
- kMaxDelayBlocks = 60
-};
-enum {
- kLookaheadBlocks = 15
-};
-enum {
- kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks
-};
-
typedef float complex_t[2];
// For performance reasons, some arrays of complex numbers are replaced by twice
// as long arrays of float, all the real parts followed by all the imaginary
@@ -65,14 +54,20 @@ int WebRtcAec_CreateAec(AecCore** aec);
int WebRtcAec_FreeAec(AecCore* aec);
int WebRtcAec_InitAec(AecCore* aec, int sampFreq);
void WebRtcAec_InitAec_SSE2(void);
+#if defined(MIPS_FPU_LE)
+void WebRtcAec_InitAec_mips(void);
+#endif
+#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON)
+void WebRtcAec_InitAec_neon(void);
+#endif
void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend);
void WebRtcAec_ProcessFrame(AecCore* aec,
- const short* nearend,
- const short* nearendH,
+ const float* nearend,
+ const float* nearendH,
int knownDelay,
- int16_t* out,
- int16_t* outH);
+ float* out,
+ float* outH);
// A helper function to call WebRtc_MoveReadPtr() for all far-end buffers.
// Returns the number of elements moved, and adjusts |system_delay| by the
@@ -101,6 +96,12 @@ void WebRtcAec_SetConfigCore(AecCore* self,
int metrics_mode,
int delay_logging);
+// Non-zero enables, zero disables.
+void WebRtcAec_enable_reported_delay(AecCore* self, int enable);
+
+// Returns non-zero if reported delay is enabled and zero if disabled.
+int WebRtcAec_reported_delay_enabled(AecCore* self);
+
// We now interpret delay correction to mean an extended filter length feature.
// We reuse the delay correction infrastructure to avoid changes through to
// libjingle. See details along with |DelayCorrection| in
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
index 193369382ca..1c560f91c9c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
@@ -26,6 +26,17 @@ enum {
};
static const int kNormalNumPartitions = 12;
+// Delay estimator constants, used for logging.
+enum {
+ kMaxDelayBlocks = 60
+};
+enum {
+ kLookaheadBlocks = 15
+};
+enum {
+ kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks
+};
+
// Extended filter adaptation parameters.
// TODO(ajm): No narrowband tuning yet.
static const float kExtendedMu = 0.4f;
@@ -122,6 +133,7 @@ struct AecCore {
void* delay_estimator_farend;
void* delay_estimator;
+ int reported_delay_enabled; // 0 = disabled, otherwise enabled.
// 1 = extended filter mode enabled, 0 = disabled.
int extended_filter_enabled;
// Runtime selection of number of filter partitions.
@@ -151,4 +163,11 @@ typedef void (*WebRtcAec_OverdriveAndSuppress_t)(AecCore* aec,
float efw[2][PART_LEN1]);
extern WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+typedef void (*WebRtcAec_ComfortNoise_t)(AecCore* aec,
+ float efw[2][PART_LEN1],
+ complex_t* comfortNoiseHband,
+ const float* noisePow,
+ const float* lambda);
+extern WebRtcAec_ComfortNoise_t WebRtcAec_ComfortNoise;
+
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c
new file mode 100644
index 00000000000..d861e10f908
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c
@@ -0,0 +1,774 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, which is presented with time-aligned signals.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+
+#include <math.h>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+
+static const int flagHbandCn = 1; // flag for adding comfort noise in H band
+extern const float WebRtcAec_weightCurve[65];
+extern const float WebRtcAec_overDriveCurve[65];
+
+void WebRtcAec_ComfortNoise_mips(AecCore* aec,
+ float efw[2][PART_LEN1],
+ complex_t* comfortNoiseHband,
+ const float* noisePow,
+ const float* lambda) {
+ int i, num;
+ float rand[PART_LEN];
+ float noise, noiseAvg, tmp, tmpAvg;
+ int16_t randW16[PART_LEN];
+ complex_t u[PART_LEN1];
+
+ const float pi2 = 6.28318530717959f;
+ const float pi2t = pi2 / 32768;
+
+ // Generate a uniform random array on [0 1]
+ WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
+
+ int16_t *randWptr = randW16;
+ float randTemp, randTemp2, randTemp3, randTemp4;
+ short tmp1s, tmp2s, tmp3s, tmp4s;
+
+ for (i = 0; i < PART_LEN; i+=4) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lh %[tmp1s], 0(%[randWptr]) \n\t"
+ "lh %[tmp2s], 2(%[randWptr]) \n\t"
+ "lh %[tmp3s], 4(%[randWptr]) \n\t"
+ "lh %[tmp4s], 6(%[randWptr]) \n\t"
+ "mtc1 %[tmp1s], %[randTemp] \n\t"
+ "mtc1 %[tmp2s], %[randTemp2] \n\t"
+ "mtc1 %[tmp3s], %[randTemp3] \n\t"
+ "mtc1 %[tmp4s], %[randTemp4] \n\t"
+ "cvt.s.w %[randTemp], %[randTemp] \n\t"
+ "cvt.s.w %[randTemp2], %[randTemp2] \n\t"
+ "cvt.s.w %[randTemp3], %[randTemp3] \n\t"
+ "cvt.s.w %[randTemp4], %[randTemp4] \n\t"
+ "addiu %[randWptr], %[randWptr], 8 \n\t"
+ "mul.s %[randTemp], %[randTemp], %[pi2t] \n\t"
+ "mul.s %[randTemp2], %[randTemp2], %[pi2t] \n\t"
+ "mul.s %[randTemp3], %[randTemp3], %[pi2t] \n\t"
+ "mul.s %[randTemp4], %[randTemp4], %[pi2t] \n\t"
+ ".set pop \n\t"
+ : [randWptr] "+r" (randWptr), [randTemp] "=&f" (randTemp),
+ [randTemp2] "=&f" (randTemp2), [randTemp3] "=&f" (randTemp3),
+ [randTemp4] "=&f" (randTemp4), [tmp1s] "=&r" (tmp1s),
+ [tmp2s] "=&r" (tmp2s), [tmp3s] "=&r" (tmp3s),
+ [tmp4s] "=&r" (tmp4s)
+ : [pi2t] "f" (pi2t)
+ : "memory"
+ );
+
+ u[i+1][0] = (float)cos(randTemp);
+ u[i+1][1] = (float)sin(randTemp);
+ u[i+2][0] = (float)cos(randTemp2);
+ u[i+2][1] = (float)sin(randTemp2);
+ u[i+3][0] = (float)cos(randTemp3);
+ u[i+3][1] = (float)sin(randTemp3);
+ u[i+4][0] = (float)cos(randTemp4);
+ u[i+4][1] = (float)sin(randTemp4);
+ }
+
+ // Reject LF noise
+ float *u_ptr = &u[1][0];
+ float noise2, noise3, noise4;
+ float tmp1f, tmp2f, tmp3f, tmp4f, tmp5f, tmp6f, tmp7f, tmp8f;
+
+ u[0][0] = 0;
+ u[0][1] = 0;
+ for (i = 1; i < PART_LEN1; i+=4) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[noise], 4(%[noisePow]) \n\t"
+ "lwc1 %[noise2], 8(%[noisePow]) \n\t"
+ "lwc1 %[noise3], 12(%[noisePow]) \n\t"
+ "lwc1 %[noise4], 16(%[noisePow]) \n\t"
+ "sqrt.s %[noise], %[noise] \n\t"
+ "sqrt.s %[noise2], %[noise2] \n\t"
+ "sqrt.s %[noise3], %[noise3] \n\t"
+ "sqrt.s %[noise4], %[noise4] \n\t"
+ "lwc1 %[tmp1f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp2f], 4(%[u_ptr]) \n\t"
+ "lwc1 %[tmp3f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 12(%[u_ptr]) \n\t"
+ "lwc1 %[tmp5f], 16(%[u_ptr]) \n\t"
+ "lwc1 %[tmp6f], 20(%[u_ptr]) \n\t"
+ "lwc1 %[tmp7f], 24(%[u_ptr]) \n\t"
+ "lwc1 %[tmp8f], 28(%[u_ptr]) \n\t"
+ "addiu %[noisePow], %[noisePow], 16 \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[noise] \n\t"
+ "mul.s %[tmp2f], %[tmp2f], %[noise] \n\t"
+ "mul.s %[tmp3f], %[tmp3f], %[noise2] \n\t"
+ "mul.s %[tmp4f], %[tmp4f], %[noise2] \n\t"
+ "mul.s %[tmp5f], %[tmp5f], %[noise3] \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[noise3] \n\t"
+ "swc1 %[tmp1f], 0(%[u_ptr]) \n\t"
+ "swc1 %[tmp3f], 8(%[u_ptr]) \n\t"
+ "mul.s %[tmp8f], %[tmp8f], %[noise4] \n\t"
+ "mul.s %[tmp7f], %[tmp7f], %[noise4] \n\t"
+ "neg.s %[tmp2f] \n\t"
+ "neg.s %[tmp4f] \n\t"
+ "neg.s %[tmp6f] \n\t"
+ "neg.s %[tmp8f] \n\t"
+ "swc1 %[tmp5f], 16(%[u_ptr]) \n\t"
+ "swc1 %[tmp7f], 24(%[u_ptr]) \n\t"
+ "swc1 %[tmp2f], 4(%[u_ptr]) \n\t"
+ "swc1 %[tmp4f], 12(%[u_ptr]) \n\t"
+ "swc1 %[tmp6f], 20(%[u_ptr]) \n\t"
+ "swc1 %[tmp8f], 28(%[u_ptr]) \n\t"
+ "addiu %[u_ptr], %[u_ptr], 32 \n\t"
+ ".set pop \n\t"
+ : [u_ptr] "+r" (u_ptr), [noisePow] "+r" (noisePow),
+ [noise] "=&f" (noise), [noise2] "=&f" (noise2),
+ [noise3] "=&f" (noise3), [noise4] "=&f" (noise4),
+ [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f),
+ [tmp3f] "=&f" (tmp3f), [tmp4f] "=&f" (tmp4f),
+ [tmp5f] "=&f" (tmp5f), [tmp6f] "=&f" (tmp6f),
+ [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f)
+ :
+ : "memory"
+ );
+ }
+ u[PART_LEN][1] = 0;
+ noisePow -= PART_LEN;
+
+ u_ptr = &u[0][0];
+ float *u_ptr_end = &u[PART_LEN][0];
+ float *efw_ptr_0 = &efw[0][0];
+ float *efw_ptr_1 = &efw[1][0];
+ float tmp9f, tmp10f;
+ const float tmp1c = 1.0;
+ const float tmp2c = 0.0;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[tmp1f], 0(%[lambda]) \n\t"
+ "lwc1 %[tmp6f], 4(%[lambda]) \n\t"
+ "addiu %[lambda], %[lambda], 8 \n\t"
+ "c.lt.s %[tmp1f], %[tmp1c] \n\t"
+ "bc1f 4f \n\t"
+ " nop \n\t"
+ "c.lt.s %[tmp6f], %[tmp1c] \n\t"
+ "bc1f 3f \n\t"
+ " nop \n\t"
+ "2: \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
+ "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
+ "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
+ "sqrt.s %[tmp1f], %[tmp1f] \n\t"
+ "sqrt.s %[tmp6f], %[tmp6f] \n\t"
+ "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
+ "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
+ "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
+ "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
+ "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
+ "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
+ "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
+ "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
+ "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "b 5f \n\t"
+ " swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "3: \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
+ "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
+ "sqrt.s %[tmp1f], %[tmp1f] \n\t"
+ "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
+ "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
+ "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
+ "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "b 5f \n\t"
+ " swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "4: \n\t"
+ "c.lt.s %[tmp6f], %[tmp1c] \n\t"
+ "bc1f 5f \n\t"
+ " nop \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
+ "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
+ "sqrt.s %[tmp6f], %[tmp6f] \n\t"
+ "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
+ "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
+ "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
+ "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "5: \n\t"
+ "addiu %[u_ptr], %[u_ptr], 16 \n\t"
+ "addiu %[efw_ptr_0], %[efw_ptr_0], 8 \n\t"
+ "bne %[u_ptr], %[u_ptr_end], 1b \n\t"
+ " addiu %[efw_ptr_1], %[efw_ptr_1], 8 \n\t"
+ ".set pop \n\t"
+ : [lambda] "+r" (lambda), [u_ptr] "+r" (u_ptr),
+ [efw_ptr_0] "+r" (efw_ptr_0), [efw_ptr_1] "+r" (efw_ptr_1),
+ [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f), [tmp3f] "=&f" (tmp3f),
+ [tmp4f] "=&f" (tmp4f), [tmp5f] "=&f" (tmp5f),
+ [tmp6f] "=&f" (tmp6f), [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f),
+ [tmp9f] "=&f" (tmp9f), [tmp10f] "=&f" (tmp10f)
+ : [tmp1c] "f" (tmp1c), [tmp2c] "f" (tmp2c), [u_ptr_end] "r" (u_ptr_end)
+ : "memory"
+ );
+
+ lambda -= PART_LEN;
+ tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[PART_LEN] * lambda[PART_LEN], 0));
+ //tmp = 1 - lambda[i];
+ efw[0][PART_LEN] += tmp * u[PART_LEN][0];
+ efw[1][PART_LEN] += tmp * u[PART_LEN][1];
+
+ // For H band comfort noise
+ // TODO: don't compute noise and "tmp" twice. Use the previous results.
+ noiseAvg = 0.0;
+ tmpAvg = 0.0;
+ num = 0;
+ if (aec->sampFreq == 32000 && flagHbandCn == 1) {
+ for (i = 0; i < PART_LEN; i++) {
+ rand[i] = ((float)randW16[i]) / 32768;
+ }
+
+ // average noise scale
+ // average over second half of freq spectrum (i.e., 4->8khz)
+ // TODO: we shouldn't need num. We know how many elements we're summing.
+ for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+ num++;
+ noiseAvg += sqrtf(noisePow[i]);
+ }
+ noiseAvg /= (float)num;
+
+ // average nlp scale
+ // average over second half of freq spectrum (i.e., 4->8khz)
+ // TODO: we shouldn't need num. We know how many elements we're summing.
+ num = 0;
+ for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+ num++;
+ tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
+ }
+ tmpAvg /= (float)num;
+
+ // Use average noise for H band
+ // TODO: we should probably have a new random vector here.
+ // Reject LF noise
+ u[0][0] = 0;
+ u[0][1] = 0;
+ for (i = 1; i < PART_LEN1; i++) {
+ tmp = pi2 * rand[i - 1];
+
+ // Use average noise for H band
+ u[i][0] = noiseAvg * (float)cos(tmp);
+ u[i][1] = -noiseAvg * (float)sin(tmp);
+ }
+ u[PART_LEN][1] = 0;
+
+ for (i = 0; i < PART_LEN1; i++) {
+ // Use average NLP weight for H band
+ comfortNoiseHband[i][0] = tmpAvg * u[i][0];
+ comfortNoiseHband[i][1] = tmpAvg * u[i][1];
+ }
+ }
+}
+
+void WebRtcAec_FilterFar_mips(AecCore *aec, float yf[2][PART_LEN1]) {
+ int i;
+ for (i = 0; i < aec->num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int pos = i * PART_LEN1;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= aec->num_partitions) {
+ xPos -= aec->num_partitions * (PART_LEN1);
+ }
+ float *yf0 = yf[0];
+ float *yf1 = yf[1];
+ float *aRe = aec->xfBuf[0] + xPos;
+ float *aIm = aec->xfBuf[1] + xPos;
+ float *bRe = aec->wfBuf[0] + pos;
+ float *bIm = aec->wfBuf[1] + pos;
+ float f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13;
+ int len = PART_LEN1 >> 1;
+ int len1 = PART_LEN1 & 1;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 4(%[bRe]) \n\t"
+ "lwc1 %[f6], 4(%[bIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+ "mul.s %[f9], %[f4], %[f5] \n\t"
+ "mul.s %[f4], %[f4], %[f6] \n\t"
+ "lwc1 %[f7], 4(%[aIm]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f12], %[f2], %[f3] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "mul.s %[f11], %[f6], %[f7] \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "sub.s %[f8], %[f8], %[f12] \n\t"
+ "mul.s %[f12], %[f7], %[f5] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "add.s %[f1], %[f0], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+ "sub.s %[f9], %[f9], %[f11] \n\t"
+ "lwc1 %[f6], 4(%[yf0]) \n\t"
+ "add.s %[f4], %[f4], %[f12] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "nmsub.s %[f8], %[f8], %[f2], %[f3] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "madd.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+ "nmsub.s %[f9], %[f9], %[f6], %[f7] \n\t"
+ "lwc1 %[f6], 4(%[yf0]) \n\t"
+ "madd.s %[f4], %[f4], %[f7], %[f5] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "lwc1 %[f5], 4(%[yf1]) \n\t"
+ "add.s %[f2], %[f2], %[f8] \n\t"
+ "addiu %[bRe], %[bRe], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "add.s %[f3], %[f3], %[f1] \n\t"
+ "add.s %[f6], %[f6], %[f9] \n\t"
+ "add.s %[f5], %[f5], %[f4] \n\t"
+ "swc1 %[f2], 0(%[yf0]) \n\t"
+ "swc1 %[f3], 0(%[yf1]) \n\t"
+ "swc1 %[f6], 4(%[yf0]) \n\t"
+ "swc1 %[f5], 4(%[yf1]) \n\t"
+ "addiu %[yf0], %[yf0], 8 \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[yf1], %[yf1], 8 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f12], %[f2], %[f3] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "sub.s %[f8], %[f8], %[f12] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "add.s %[f1], %[f0], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[f8], %[f8], %[f2], %[f3] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "madd.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "add.s %[f2], %[f2], %[f8] \n\t"
+ "add.s %[f3], %[f3], %[f1] \n\t"
+ "swc1 %[f2], 0(%[yf0]) \n\t"
+ "swc1 %[f3], 0(%[yf1]) \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [f8] "=&f" (f8),
+ [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [f13] "=&f" (f13), [aRe] "+r" (aRe),
+ [aIm] "+r" (aIm), [bRe] "+r" (bRe), [bIm] "+r" (bIm),
+ [yf0] "+r" (yf0), [yf1] "+r" (yf1), [len] "+r" (len)
+ : [len1] "r" (len1)
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_FilterAdaptation_mips(AecCore *aec,
+ float *fft,
+ float ef[2][PART_LEN1]) {
+ int i;
+ for (i = 0; i < aec->num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+ int pos;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= aec->num_partitions) {
+ xPos -= aec->num_partitions * PART_LEN1;
+ }
+
+ pos = i * PART_LEN1;
+ float *aRe = aec->xfBuf[0] + xPos;
+ float *aIm = aec->xfBuf[1] + xPos;
+ float *bRe = ef[0];
+ float *bIm = ef[1];
+ float *fft_tmp = fft;
+
+ float f0, f1, f2, f3, f4, f5, f6 ,f7, f8, f9, f10, f11, f12;
+ int len = PART_LEN >> 1;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 4(%[bRe]) \n\t"
+ "lwc1 %[f6], 4(%[bIm]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[bRe], %[bRe], 8 \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f9], %[f4], %[f5] \n\t"
+ "lwc1 %[f7], 4(%[aIm]) \n\t"
+ "mul.s %[f4], %[f4], %[f6] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f10], %[f3], %[f2] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "mul.s %[f11], %[f7], %[f6] \n\t"
+ "mul.s %[f5], %[f7], %[f5] \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "add.s %[f8], %[f8], %[f10] \n\t"
+ "sub.s %[f1], %[f0], %[f1] \n\t"
+ "add.s %[f9], %[f9], %[f11] \n\t"
+ "sub.s %[f5], %[f4], %[f5] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "madd.s %[f8], %[f8], %[f3], %[f2] \n\t"
+ "nmsub.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "madd.s %[f9], %[f9], %[f7], %[f6] \n\t"
+ "nmsub.s %[f5], %[f4], %[f7], %[f5] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f8], 0(%[fft_tmp]) \n\t"
+ "swc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "swc1 %[f9], 8(%[fft_tmp]) \n\t"
+ "swc1 %[f5], 12(%[fft_tmp]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f10], %[f3], %[f2] \n\t"
+ "add.s %[f8], %[f8], %[f10] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[f8], %[f8], %[f3], %[f2] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f8], 4(%[fft]) \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [f8] "=&f" (f8),
+ [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [aRe] "+r" (aRe), [aIm] "+r" (aIm),
+ [bRe] "+r" (bRe), [bIm] "+r" (bIm), [fft_tmp] "+r" (fft_tmp),
+ [len] "+r" (len), [fft] "=&r" (fft)
+ :
+ : "memory"
+ );
+
+ aec_rdft_inverse_128(fft);
+ memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
+
+ // fft scaling
+ {
+ float scale = 2.0f / PART_LEN2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[fft_tmp], %[fft], 0 \n\t"
+ "addiu %[len], $zero, 8 \n\t"
+ "1: \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "lwc1 %[f0], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f3], 12(%[fft_tmp]) \n\t"
+ "mul.s %[f0], %[f0], %[scale] \n\t"
+ "mul.s %[f1], %[f1], %[scale] \n\t"
+ "mul.s %[f2], %[f2], %[scale] \n\t"
+ "mul.s %[f3], %[f3], %[scale] \n\t"
+ "lwc1 %[f4], 16(%[fft_tmp]) \n\t"
+ "lwc1 %[f5], 20(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 24(%[fft_tmp]) \n\t"
+ "lwc1 %[f7], 28(%[fft_tmp]) \n\t"
+ "mul.s %[f4], %[f4], %[scale] \n\t"
+ "mul.s %[f5], %[f5], %[scale] \n\t"
+ "mul.s %[f6], %[f6], %[scale] \n\t"
+ "mul.s %[f7], %[f7], %[scale] \n\t"
+ "swc1 %[f0], 0(%[fft_tmp]) \n\t"
+ "swc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "swc1 %[f2], 8(%[fft_tmp]) \n\t"
+ "swc1 %[f3], 12(%[fft_tmp]) \n\t"
+ "swc1 %[f4], 16(%[fft_tmp]) \n\t"
+ "swc1 %[f5], 20(%[fft_tmp]) \n\t"
+ "swc1 %[f6], 24(%[fft_tmp]) \n\t"
+ "swc1 %[f7], 28(%[fft_tmp]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[fft_tmp], %[fft_tmp], 32 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [len] "=&r" (len),
+ [fft_tmp] "=&r" (fft_tmp)
+ : [scale] "f" (scale), [fft] "r" (fft)
+ : "memory"
+ );
+ }
+ aec_rdft_forward_128(fft);
+ aRe = aec->wfBuf[0] + pos;
+ aIm = aec->wfBuf[1] + pos;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[fft_tmp], %[fft], 0 \n\t"
+ "addiu %[len], $zero, 31 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 256(%[aRe]) \n\t"
+ "lwc1 %[f3], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 4(%[aIm]) \n\t"
+ "lwc1 %[f7], 12(%[fft_tmp]) \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "add.s %[f4], %[f4], %[f5] \n\t"
+ "add.s %[f6], %[f6], %[f7] \n\t"
+ "addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "swc1 %[f0], 0(%[aRe]) \n\t"
+ "swc1 %[f2], 256(%[aRe]) \n\t"
+ "swc1 %[f4], 4(%[aRe]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "swc1 %[f6], 4(%[aIm]) \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 0(%[aIm]) \n\t"
+ "lwc1 %[f3], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 4(%[aIm]) \n\t"
+ "lwc1 %[f7], 12(%[fft_tmp]) \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "add.s %[f4], %[f4], %[f5] \n\t"
+ "add.s %[f6], %[f6], %[f7] \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "swc1 %[f0], 0(%[aRe]) \n\t"
+ "swc1 %[f2], 0(%[aIm]) \n\t"
+ "swc1 %[f4], 4(%[aRe]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "swc1 %[f6], 4(%[aIm]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[aIm], %[aIm], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [len] "=&r" (len),
+ [fft_tmp] "=&r" (fft_tmp)
+ : [aRe] "r" (aRe), [aIm] "r" (aIm), [fft] "r" (fft)
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_OverdriveAndSuppress_mips(AecCore *aec,
+ float hNl[PART_LEN1],
+ const float hNlFb,
+ float efw[2][PART_LEN1]) {
+ int i;
+ const float one = 1.0;
+ float *p_hNl, *p_efw0, *p_efw1;
+ float *p_WebRtcAec_wC;
+ float temp1, temp2, temp3, temp4;
+
+ p_hNl = &hNl[0];
+ p_efw0 = &efw[0][0];
+ p_efw1 = &efw[1][0];
+ p_WebRtcAec_wC = (float*)&WebRtcAec_weightCurve[0];
+
+ for (i = 0; i < PART_LEN1; i++) {
+ // Weight subbands
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "lwc1 %[temp2], 0(%[p_wC]) \n\t"
+ "c.lt.s %[hNlFb], %[temp1] \n\t"
+ "bc1f 1f \n\t"
+ " mul.s %[temp3], %[temp2], %[hNlFb] \n\t"
+ "sub.s %[temp4], %[one], %[temp2] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[temp1], %[temp1], %[temp4] \n\t"
+ "add.s %[temp1], %[temp3], %[temp1] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[temp1], %[temp3], %[temp1], %[temp4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "1: \n\t"
+ "addiu %[p_wC], %[p_wC], 4 \n\t"
+ ".set pop \n\t"
+ : [temp1] "=&f" (temp1), [temp2] "=&f" (temp2), [temp3] "=&f" (temp3),
+ [temp4] "=&f" (temp4), [p_wC] "+r" (p_WebRtcAec_wC)
+ : [hNlFb] "f" (hNlFb), [one] "f" (one), [p_hNl] "r" (p_hNl)
+ : "memory"
+ );
+
+ hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+ __asm __volatile (
+ "lwc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "lwc1 %[temp3], 0(%[p_efw1]) \n\t"
+ "lwc1 %[temp2], 0(%[p_efw0]) \n\t"
+ "addiu %[p_hNl], %[p_hNl], 4 \n\t"
+ "mul.s %[temp3], %[temp3], %[temp1] \n\t"
+ "mul.s %[temp2], %[temp2], %[temp1] \n\t"
+ "addiu %[p_efw0], %[p_efw0], 4 \n\t"
+ "addiu %[p_efw1], %[p_efw1], 4 \n\t"
+ "neg.s %[temp4], %[temp3] \n\t"
+ "swc1 %[temp2], -4(%[p_efw0]) \n\t"
+ "swc1 %[temp4], -4(%[p_efw1]) \n\t"
+ : [temp1] "=&f" (temp1), [temp2] "=&f" (temp2), [temp3] "=&f" (temp3),
+ [temp4] "=&f" (temp4), [p_efw0] "+r" (p_efw0), [p_efw1] "+r" (p_efw1),
+ [p_hNl] "+r" (p_hNl)
+ :
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_ScaleErrorSignal_mips(AecCore *aec, float ef[2][PART_LEN1]) {
+ const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
+ const float error_threshold = aec->extended_filter_enabled
+ ? kExtendedErrorThreshold
+ : aec->normal_error_threshold;
+ int len = (PART_LEN1);
+ float *ef0 = ef[0];
+ float *ef1 = ef[1];
+ float *xPow = aec->xPow;
+ float fac1 = 1e-10f;
+ float err_th2 = error_threshold * error_threshold;
+ float f0, f1, f2;
+#if !defined(MIPS32_R2_LE)
+ float f3;
+#endif
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[xPow]) \n\t"
+ "lwc1 %[f1], 0(%[ef0]) \n\t"
+ "lwc1 %[f2], 0(%[ef1]) \n\t"
+ "add.s %[f0], %[f0], %[fac1] \n\t"
+ "div.s %[f1], %[f1], %[f0] \n\t"
+ "div.s %[f2], %[f2], %[f0] \n\t"
+ "mul.s %[f0], %[f1], %[f1] \n\t"
+#if defined(MIPS32_R2_LE)
+ "madd.s %[f0], %[f0], %[f2], %[f2] \n\t"
+#else
+ "mul.s %[f3], %[f2], %[f2] \n\t"
+ "add.s %[f0], %[f0], %[f3] \n\t"
+#endif
+ "c.le.s %[f0], %[err_th2] \n\t"
+ "nop \n\t"
+ "bc1t 2f \n\t"
+ " nop \n\t"
+ "sqrt.s %[f0], %[f0] \n\t"
+ "add.s %[f0], %[f0], %[fac1] \n\t"
+ "div.s %[f0], %[err_th], %[f0] \n\t"
+ "mul.s %[f1], %[f1], %[f0] \n\t"
+ "mul.s %[f2], %[f2], %[f0] \n\t"
+ "2: \n\t"
+ "mul.s %[f1], %[f1], %[mu] \n\t"
+ "mul.s %[f2], %[f2], %[mu] \n\t"
+ "swc1 %[f1], 0(%[ef0]) \n\t"
+ "swc1 %[f2], 0(%[ef1]) \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "addiu %[xPow], %[xPow], 4 \n\t"
+ "addiu %[ef0], %[ef0], 4 \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[ef1], %[ef1], 4 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+#if !defined(MIPS32_R2_LE)
+ [f3] "=&f" (f3),
+#endif
+ [xPow] "+r" (xPow), [ef0] "+r" (ef0), [ef1] "+r" (ef1),
+ [len] "+r" (len)
+ : [fac1] "f" (fac1), [err_th2] "f" (err_th2), [mu] "f" (mu),
+ [err_th] "f" (error_threshold)
+ : "memory"
+ );
+}
+
+void WebRtcAec_InitAec_mips(void)
+{
+ WebRtcAec_FilterFar = WebRtcAec_FilterFar_mips;
+ WebRtcAec_FilterAdaptation = WebRtcAec_FilterAdaptation_mips;
+ WebRtcAec_ScaleErrorSignal = WebRtcAec_ScaleErrorSignal_mips;
+ WebRtcAec_ComfortNoise = WebRtcAec_ComfortNoise_mips;
+ WebRtcAec_OverdriveAndSuppress = WebRtcAec_OverdriveAndSuppress_mips;
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c
new file mode 100644
index 00000000000..cec0a7e3379
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c
@@ -0,0 +1,304 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, neon version of speed-critical functions.
+ *
+ * Based on aec_core_sse2.c.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+
+#include <arm_neon.h>
+#include <math.h>
+#include <string.h> // memset
+
+#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+
+enum { kShiftExponentIntoTopMantissa = 8 };
+enum { kFloatExponentShift = 23 };
+
+__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) {
+ return aRe * bRe - aIm * bIm;
+}
+
+static void FilterAdaptationNEON(AecCore* aec,
+ float* fft,
+ float ef[2][PART_LEN1]) {
+ int i;
+ const int num_partitions = aec->num_partitions;
+ for (i = 0; i < num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int pos = i * PART_LEN1;
+ int j;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= num_partitions) {
+ xPos -= num_partitions * PART_LEN1;
+ }
+
+ // Process the whole array...
+ for (j = 0; j < PART_LEN; j += 4) {
+ // Load xfBuf and ef.
+ const float32x4_t xfBuf_re = vld1q_f32(&aec->xfBuf[0][xPos + j]);
+ const float32x4_t xfBuf_im = vld1q_f32(&aec->xfBuf[1][xPos + j]);
+ const float32x4_t ef_re = vld1q_f32(&ef[0][j]);
+ const float32x4_t ef_im = vld1q_f32(&ef[1][j]);
+ // Calculate the product of conjugate(xfBuf) by ef.
+ // re(conjugate(a) * b) = aRe * bRe + aIm * bIm
+ // im(conjugate(a) * b)= aRe * bIm - aIm * bRe
+ const float32x4_t a = vmulq_f32(xfBuf_re, ef_re);
+ const float32x4_t e = vmlaq_f32(a, xfBuf_im, ef_im);
+ const float32x4_t c = vmulq_f32(xfBuf_re, ef_im);
+ const float32x4_t f = vmlsq_f32(c, xfBuf_im, ef_re);
+ // Interleave real and imaginary parts.
+ const float32x4x2_t g_n_h = vzipq_f32(e, f);
+ // Store
+ vst1q_f32(&fft[2 * j + 0], g_n_h.val[0]);
+ vst1q_f32(&fft[2 * j + 4], g_n_h.val[1]);
+ }
+ // ... and fixup the first imaginary entry.
+ fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
+ -aec->xfBuf[1][xPos + PART_LEN],
+ ef[0][PART_LEN],
+ ef[1][PART_LEN]);
+
+ aec_rdft_inverse_128(fft);
+ memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
+
+ // fft scaling
+ {
+ const float scale = 2.0f / PART_LEN2;
+ const float32x4_t scale_ps = vmovq_n_f32(scale);
+ for (j = 0; j < PART_LEN; j += 4) {
+ const float32x4_t fft_ps = vld1q_f32(&fft[j]);
+ const float32x4_t fft_scale = vmulq_f32(fft_ps, scale_ps);
+ vst1q_f32(&fft[j], fft_scale);
+ }
+ }
+ aec_rdft_forward_128(fft);
+
+ {
+ const float wt1 = aec->wfBuf[1][pos];
+ aec->wfBuf[0][pos + PART_LEN] += fft[1];
+ for (j = 0; j < PART_LEN; j += 4) {
+ float32x4_t wtBuf_re = vld1q_f32(&aec->wfBuf[0][pos + j]);
+ float32x4_t wtBuf_im = vld1q_f32(&aec->wfBuf[1][pos + j]);
+ const float32x4_t fft0 = vld1q_f32(&fft[2 * j + 0]);
+ const float32x4_t fft4 = vld1q_f32(&fft[2 * j + 4]);
+ const float32x4x2_t fft_re_im = vuzpq_f32(fft0, fft4);
+ wtBuf_re = vaddq_f32(wtBuf_re, fft_re_im.val[0]);
+ wtBuf_im = vaddq_f32(wtBuf_im, fft_re_im.val[1]);
+
+ vst1q_f32(&aec->wfBuf[0][pos + j], wtBuf_re);
+ vst1q_f32(&aec->wfBuf[1][pos + j], wtBuf_im);
+ }
+ aec->wfBuf[1][pos] = wt1;
+ }
+ }
+}
+
+extern const float WebRtcAec_weightCurve[65];
+extern const float WebRtcAec_overDriveCurve[65];
+
+static float32x4_t vpowq_f32(float32x4_t a, float32x4_t b) {
+ // a^b = exp2(b * log2(a))
+ // exp2(x) and log2(x) are calculated using polynomial approximations.
+ float32x4_t log2_a, b_log2_a, a_exp_b;
+
+ // Calculate log2(x), x = a.
+ {
+ // To calculate log2(x), we decompose x like this:
+ // x = y * 2^n
+ // n is an integer
+ // y is in the [1.0, 2.0) range
+ //
+ // log2(x) = log2(y) + n
+ // n can be evaluated by playing with float representation.
+ // log2(y) in a small range can be approximated, this code uses an order
+ // five polynomial approximation. The coefficients have been
+ // estimated with the Remez algorithm and the resulting
+ // polynomial has a maximum relative error of 0.00086%.
+
+ // Compute n.
+ // This is done by masking the exponent, shifting it into the top bit of
+ // the mantissa, putting eight into the biased exponent (to shift/
+ // compensate the fact that the exponent has been shifted in the top/
+ // fractional part and finally getting rid of the implicit leading one
+ // from the mantissa by substracting it out.
+ const uint32x4_t vec_float_exponent_mask = vdupq_n_u32(0x7F800000);
+ const uint32x4_t vec_eight_biased_exponent = vdupq_n_u32(0x43800000);
+ const uint32x4_t vec_implicit_leading_one = vdupq_n_u32(0x43BF8000);
+ const uint32x4_t two_n = vandq_u32(vreinterpretq_u32_f32(a),
+ vec_float_exponent_mask);
+ const uint32x4_t n_1 = vshrq_n_u32(two_n, kShiftExponentIntoTopMantissa);
+ const uint32x4_t n_0 = vorrq_u32(n_1, vec_eight_biased_exponent);
+ const float32x4_t n =
+ vsubq_f32(vreinterpretq_f32_u32(n_0),
+ vreinterpretq_f32_u32(vec_implicit_leading_one));
+ // Compute y.
+ const uint32x4_t vec_mantissa_mask = vdupq_n_u32(0x007FFFFF);
+ const uint32x4_t vec_zero_biased_exponent_is_one = vdupq_n_u32(0x3F800000);
+ const uint32x4_t mantissa = vandq_u32(vreinterpretq_u32_f32(a),
+ vec_mantissa_mask);
+ const float32x4_t y =
+ vreinterpretq_f32_u32(vorrq_u32(mantissa,
+ vec_zero_biased_exponent_is_one));
+ // Approximate log2(y) ~= (y - 1) * pol5(y).
+ // pol5(y) = C5 * y^5 + C4 * y^4 + C3 * y^3 + C2 * y^2 + C1 * y + C0
+ const float32x4_t C5 = vdupq_n_f32(-3.4436006e-2f);
+ const float32x4_t C4 = vdupq_n_f32(3.1821337e-1f);
+ const float32x4_t C3 = vdupq_n_f32(-1.2315303f);
+ const float32x4_t C2 = vdupq_n_f32(2.5988452f);
+ const float32x4_t C1 = vdupq_n_f32(-3.3241990f);
+ const float32x4_t C0 = vdupq_n_f32(3.1157899f);
+ float32x4_t pol5_y = C5;
+ pol5_y = vmlaq_f32(C4, y, pol5_y);
+ pol5_y = vmlaq_f32(C3, y, pol5_y);
+ pol5_y = vmlaq_f32(C2, y, pol5_y);
+ pol5_y = vmlaq_f32(C1, y, pol5_y);
+ pol5_y = vmlaq_f32(C0, y, pol5_y);
+ const float32x4_t y_minus_one =
+ vsubq_f32(y, vreinterpretq_f32_u32(vec_zero_biased_exponent_is_one));
+ const float32x4_t log2_y = vmulq_f32(y_minus_one, pol5_y);
+
+ // Combine parts.
+ log2_a = vaddq_f32(n, log2_y);
+ }
+
+ // b * log2(a)
+ b_log2_a = vmulq_f32(b, log2_a);
+
+ // Calculate exp2(x), x = b * log2(a).
+ {
+ // To calculate 2^x, we decompose x like this:
+ // x = n + y
+ // n is an integer, the value of x - 0.5 rounded down, therefore
+ // y is in the [0.5, 1.5) range
+ //
+ // 2^x = 2^n * 2^y
+ // 2^n can be evaluated by playing with float representation.
+ // 2^y in a small range can be approximated, this code uses an order two
+ // polynomial approximation. The coefficients have been estimated
+ // with the Remez algorithm and the resulting polynomial has a
+ // maximum relative error of 0.17%.
+ // To avoid over/underflow, we reduce the range of input to ]-127, 129].
+ const float32x4_t max_input = vdupq_n_f32(129.f);
+ const float32x4_t min_input = vdupq_n_f32(-126.99999f);
+ const float32x4_t x_min = vminq_f32(b_log2_a, max_input);
+ const float32x4_t x_max = vmaxq_f32(x_min, min_input);
+ // Compute n.
+ const float32x4_t half = vdupq_n_f32(0.5f);
+ const float32x4_t x_minus_half = vsubq_f32(x_max, half);
+ const int32x4_t x_minus_half_floor = vcvtq_s32_f32(x_minus_half);
+
+ // Compute 2^n.
+ const int32x4_t float_exponent_bias = vdupq_n_s32(127);
+ const int32x4_t two_n_exponent =
+ vaddq_s32(x_minus_half_floor, float_exponent_bias);
+ const float32x4_t two_n =
+ vreinterpretq_f32_s32(vshlq_n_s32(two_n_exponent, kFloatExponentShift));
+ // Compute y.
+ const float32x4_t y = vsubq_f32(x_max, vcvtq_f32_s32(x_minus_half_floor));
+
+ // Approximate 2^y ~= C2 * y^2 + C1 * y + C0.
+ const float32x4_t C2 = vdupq_n_f32(3.3718944e-1f);
+ const float32x4_t C1 = vdupq_n_f32(6.5763628e-1f);
+ const float32x4_t C0 = vdupq_n_f32(1.0017247f);
+ float32x4_t exp2_y = C2;
+ exp2_y = vmlaq_f32(C1, y, exp2_y);
+ exp2_y = vmlaq_f32(C0, y, exp2_y);
+
+ // Combine parts.
+ a_exp_b = vmulq_f32(exp2_y, two_n);
+ }
+
+ return a_exp_b;
+}
+
+static void OverdriveAndSuppressNEON(AecCore* aec,
+ float hNl[PART_LEN1],
+ const float hNlFb,
+ float efw[2][PART_LEN1]) {
+ int i;
+ const float32x4_t vec_hNlFb = vmovq_n_f32(hNlFb);
+ const float32x4_t vec_one = vdupq_n_f32(1.0f);
+ const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
+ const float32x4_t vec_overDriveSm = vmovq_n_f32(aec->overDriveSm);
+
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ // Weight subbands
+ float32x4_t vec_hNl = vld1q_f32(&hNl[i]);
+ const float32x4_t vec_weightCurve = vld1q_f32(&WebRtcAec_weightCurve[i]);
+ const uint32x4_t bigger = vcgtq_f32(vec_hNl, vec_hNlFb);
+ const float32x4_t vec_weightCurve_hNlFb = vmulq_f32(vec_weightCurve,
+ vec_hNlFb);
+ const float32x4_t vec_one_weightCurve = vsubq_f32(vec_one, vec_weightCurve);
+ const float32x4_t vec_one_weightCurve_hNl = vmulq_f32(vec_one_weightCurve,
+ vec_hNl);
+ const uint32x4_t vec_if0 = vandq_u32(vmvnq_u32(bigger),
+ vreinterpretq_u32_f32(vec_hNl));
+ const float32x4_t vec_one_weightCurve_add =
+ vaddq_f32(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl);
+ const uint32x4_t vec_if1 =
+ vandq_u32(bigger, vreinterpretq_u32_f32(vec_one_weightCurve_add));
+
+ vec_hNl = vreinterpretq_f32_u32(vorrq_u32(vec_if0, vec_if1));
+
+ {
+ const float32x4_t vec_overDriveCurve =
+ vld1q_f32(&WebRtcAec_overDriveCurve[i]);
+ const float32x4_t vec_overDriveSm_overDriveCurve =
+ vmulq_f32(vec_overDriveSm, vec_overDriveCurve);
+ vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
+ vst1q_f32(&hNl[i], vec_hNl);
+ }
+
+ // Suppress error signal
+ {
+ float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
+ float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
+ vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
+ vst1q_f32(&efw[0][i], vec_efw_re);
+ vst1q_f32(&efw[1][i], vec_efw_im);
+ }
+ }
+
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
+ // Weight subbands
+ if (hNl[i] > hNlFb) {
+ hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
+ (1 - WebRtcAec_weightCurve[i]) * hNl[i];
+ }
+
+ hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+ // Suppress error signal
+ efw[0][i] *= hNl[i];
+ efw[1][i] *= hNl[i];
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ efw[1][i] *= -1;
+ }
+}
+
+void WebRtcAec_InitAec_neon(void) {
+ WebRtcAec_FilterAdaptation = FilterAdaptationNEON;
+ WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressNEON;
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
index a19e8877bbc..7731b37b224 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
@@ -116,7 +116,7 @@ static void bitrv2_32(int* ip, float* a) {
}
}
-static void bitrv2_128(float* a) {
+static void bitrv2_128_C(float* a) {
/*
Following things have been attempted but are no faster:
(a) Storing the swap indexes in a LUT (index calculations are done
@@ -512,7 +512,7 @@ static void cftmdl_128_C(float* a) {
}
}
-static void cftfsub_128(float* a) {
+static void cftfsub_128_C(float* a) {
int j, j1, j2, j3, l;
float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
@@ -542,7 +542,7 @@ static void cftfsub_128(float* a) {
}
}
-static void cftbsub_128(float* a) {
+static void cftbsub_128_C(float* a) {
int j, j1, j2, j3, l;
float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
@@ -640,17 +640,26 @@ rft_sub_128_t cft1st_128;
rft_sub_128_t cftmdl_128;
rft_sub_128_t rftfsub_128;
rft_sub_128_t rftbsub_128;
+rft_sub_128_t cftfsub_128;
+rft_sub_128_t cftbsub_128;
+rft_sub_128_t bitrv2_128;
void aec_rdft_init(void) {
cft1st_128 = cft1st_128_C;
cftmdl_128 = cftmdl_128_C;
rftfsub_128 = rftfsub_128_C;
rftbsub_128 = rftbsub_128_C;
+ cftfsub_128 = cftfsub_128_C;
+ cftbsub_128 = cftbsub_128_C;
+ bitrv2_128 = bitrv2_128_C;
#if defined(WEBRTC_ARCH_X86_FAMILY)
if (WebRtc_GetCPUInfo(kSSE2)) {
aec_rdft_init_sse2();
}
#endif
+#if defined(MIPS_FPU_LE)
+ aec_rdft_init_mips();
+#endif
// init library constants.
makewt_32();
makect_32();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
index 3380633ce6c..795c57d44c2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
@@ -33,13 +33,13 @@ extern float rdft_w[64];
extern float rdft_wk3ri_first[32];
extern float rdft_wk3ri_second[32];
// constants used by SSE2 but initialized in C path.
-extern float rdft_wk1r[32];
-extern float rdft_wk2r[32];
-extern float rdft_wk3r[32];
-extern float rdft_wk1i[32];
-extern float rdft_wk2i[32];
-extern float rdft_wk3i[32];
-extern float cftmdl_wk1r[4];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk1r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk2r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk3r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk1i[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk2i[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk3i[32];
+extern ALIGN16_BEG float ALIGN16_END cftmdl_wk1r[4];
// code path selection function pointers
typedef void (*rft_sub_128_t)(float* a);
@@ -47,6 +47,9 @@ extern rft_sub_128_t rftfsub_128;
extern rft_sub_128_t rftbsub_128;
extern rft_sub_128_t cft1st_128;
extern rft_sub_128_t cftmdl_128;
+extern rft_sub_128_t cftfsub_128;
+extern rft_sub_128_t cftbsub_128;
+extern rft_sub_128_t bitrv2_128;
// entry points
void aec_rdft_init(void);
@@ -54,4 +57,8 @@ void aec_rdft_init_sse2(void);
void aec_rdft_forward_128(float* a);
void aec_rdft_inverse_128(float* a);
+#if defined(MIPS_FPU_LE)
+void aec_rdft_init_mips(void);
+#endif
+
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
new file mode 100644
index 00000000000..a0dac5f135c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
@@ -0,0 +1,1213 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+#include "webrtc/typedefs.h"
+
+static void bitrv2_128_mips(float *a) {
+ // n is 128
+ float xr, xi, yr, yi;
+
+ xr = a[8];
+ xi = a[9];
+ yr = a[16];
+ yi = a[17];
+ a[8] = yr;
+ a[9] = yi;
+ a[16] = xr;
+ a[17] = xi;
+
+ xr = a[64];
+ xi = a[65];
+ yr = a[2];
+ yi = a[3];
+ a[64] = yr;
+ a[65] = yi;
+ a[2] = xr;
+ a[3] = xi;
+
+ xr = a[72];
+ xi = a[73];
+ yr = a[18];
+ yi = a[19];
+ a[72] = yr;
+ a[73] = yi;
+ a[18] = xr;
+ a[19] = xi;
+
+ xr = a[80];
+ xi = a[81];
+ yr = a[10];
+ yi = a[11];
+ a[80] = yr;
+ a[81] = yi;
+ a[10] = xr;
+ a[11] = xi;
+
+ xr = a[88];
+ xi = a[89];
+ yr = a[26];
+ yi = a[27];
+ a[88] = yr;
+ a[89] = yi;
+ a[26] = xr;
+ a[27] = xi;
+
+ xr = a[74];
+ xi = a[75];
+ yr = a[82];
+ yi = a[83];
+ a[74] = yr;
+ a[75] = yi;
+ a[82] = xr;
+ a[83] = xi;
+
+ xr = a[32];
+ xi = a[33];
+ yr = a[4];
+ yi = a[5];
+ a[32] = yr;
+ a[33] = yi;
+ a[4] = xr;
+ a[5] = xi;
+
+ xr = a[40];
+ xi = a[41];
+ yr = a[20];
+ yi = a[21];
+ a[40] = yr;
+ a[41] = yi;
+ a[20] = xr;
+ a[21] = xi;
+
+ xr = a[48];
+ xi = a[49];
+ yr = a[12];
+ yi = a[13];
+ a[48] = yr;
+ a[49] = yi;
+ a[12] = xr;
+ a[13] = xi;
+
+ xr = a[56];
+ xi = a[57];
+ yr = a[28];
+ yi = a[29];
+ a[56] = yr;
+ a[57] = yi;
+ a[28] = xr;
+ a[29] = xi;
+
+ xr = a[34];
+ xi = a[35];
+ yr = a[68];
+ yi = a[69];
+ a[34] = yr;
+ a[35] = yi;
+ a[68] = xr;
+ a[69] = xi;
+
+ xr = a[42];
+ xi = a[43];
+ yr = a[84];
+ yi = a[85];
+ a[42] = yr;
+ a[43] = yi;
+ a[84] = xr;
+ a[85] = xi;
+
+ xr = a[50];
+ xi = a[51];
+ yr = a[76];
+ yi = a[77];
+ a[50] = yr;
+ a[51] = yi;
+ a[76] = xr;
+ a[77] = xi;
+
+ xr = a[58];
+ xi = a[59];
+ yr = a[92];
+ yi = a[93];
+ a[58] = yr;
+ a[59] = yi;
+ a[92] = xr;
+ a[93] = xi;
+
+ xr = a[44];
+ xi = a[45];
+ yr = a[52];
+ yi = a[53];
+ a[44] = yr;
+ a[45] = yi;
+ a[52] = xr;
+ a[53] = xi;
+
+ xr = a[96];
+ xi = a[97];
+ yr = a[6];
+ yi = a[7];
+ a[96] = yr;
+ a[97] = yi;
+ a[6] = xr;
+ a[7] = xi;
+
+ xr = a[104];
+ xi = a[105];
+ yr = a[22];
+ yi = a[23];
+ a[104] = yr;
+ a[105] = yi;
+ a[22] = xr;
+ a[23] = xi;
+
+ xr = a[112];
+ xi = a[113];
+ yr = a[14];
+ yi = a[15];
+ a[112] = yr;
+ a[113] = yi;
+ a[14] = xr;
+ a[15] = xi;
+
+ xr = a[120];
+ xi = a[121];
+ yr = a[30];
+ yi = a[31];
+ a[120] = yr;
+ a[121] = yi;
+ a[30] = xr;
+ a[31] = xi;
+
+ xr = a[98];
+ xi = a[99];
+ yr = a[70];
+ yi = a[71];
+ a[98] = yr;
+ a[99] = yi;
+ a[70] = xr;
+ a[71] = xi;
+
+ xr = a[106];
+ xi = a[107];
+ yr = a[86];
+ yi = a[87];
+ a[106] = yr;
+ a[107] = yi;
+ a[86] = xr;
+ a[87] = xi;
+
+ xr = a[114];
+ xi = a[115];
+ yr = a[78];
+ yi = a[79];
+ a[114] = yr;
+ a[115] = yi;
+ a[78] = xr;
+ a[79] = xi;
+
+ xr = a[122];
+ xi = a[123];
+ yr = a[94];
+ yi = a[95];
+ a[122] = yr;
+ a[123] = yi;
+ a[94] = xr;
+ a[95] = xi;
+
+ xr = a[100];
+ xi = a[101];
+ yr = a[38];
+ yi = a[39];
+ a[100] = yr;
+ a[101] = yi;
+ a[38] = xr;
+ a[39] = xi;
+
+ xr = a[108];
+ xi = a[109];
+ yr = a[54];
+ yi = a[55];
+ a[108] = yr;
+ a[109] = yi;
+ a[54] = xr;
+ a[55] = xi;
+
+ xr = a[116];
+ xi = a[117];
+ yr = a[46];
+ yi = a[47];
+ a[116] = yr;
+ a[117] = yi;
+ a[46] = xr;
+ a[47] = xi;
+
+ xr = a[124];
+ xi = a[125];
+ yr = a[62];
+ yi = a[63];
+ a[124] = yr;
+ a[125] = yi;
+ a[62] = xr;
+ a[63] = xi;
+
+ xr = a[110];
+ xi = a[111];
+ yr = a[118];
+ yi = a[119];
+ a[110] = yr;
+ a[111] = yi;
+ a[118] = xr;
+ a[119] = xi;
+}
+
+static void cft1st_128_mips(float *a) {
+ float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int a_ptr, p1_rdft, p2_rdft, count;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // first 16
+ "lwc1 %[f0], 0(%[a]) \n\t"
+ "lwc1 %[f1], 4(%[a]) \n\t"
+ "lwc1 %[f2], 8(%[a]) \n\t"
+ "lwc1 %[f3], 12(%[a]) \n\t"
+ "lwc1 %[f4], 16(%[a]) \n\t"
+ "lwc1 %[f5], 20(%[a]) \n\t"
+ "lwc1 %[f6], 24(%[a]) \n\t"
+ "lwc1 %[f7], 28(%[a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[a]) \n\t"
+ "swc1 %[f1], 4(%[a]) \n\t"
+ "swc1 %[f2], 8(%[a]) \n\t"
+ "swc1 %[f3], 12(%[a]) \n\t"
+ "swc1 %[f4], 16(%[a]) \n\t"
+ "swc1 %[f5], 20(%[a]) \n\t"
+ "swc1 %[f6], 24(%[a]) \n\t"
+ "swc1 %[f7], 28(%[a]) \n\t"
+ "lwc1 %[f0], 32(%[a]) \n\t"
+ "lwc1 %[f1], 36(%[a]) \n\t"
+ "lwc1 %[f2], 40(%[a]) \n\t"
+ "lwc1 %[f3], 44(%[a]) \n\t"
+ "lwc1 %[f4], 48(%[a]) \n\t"
+ "lwc1 %[f5], 52(%[a]) \n\t"
+ "lwc1 %[f6], 56(%[a]) \n\t"
+ "lwc1 %[f7], 60(%[a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk2r], 8(%[rdft_w]) \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f6], %[x3i], %[x1r] \n\t"
+ "sub.s %[f7], %[x3r], %[x1i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[x1r], %[f2], %[f3] \n\t"
+ "add.s %[x1i], %[f3], %[f2] \n\t"
+ "sub.s %[x3r], %[f7], %[f6] \n\t"
+ "add.s %[x3i], %[f7], %[f6] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "mul.s %[f2], %[wk2r], %[x1r] \n\t"
+ "mul.s %[f3], %[wk2r], %[x1i] \n\t"
+ "mul.s %[f6], %[wk2r], %[x3r] \n\t"
+ "mul.s %[f7], %[wk2r], %[x3i] \n\t"
+ "sub.s %[f5], %[x2i], %[x0i] \n\t"
+ "swc1 %[f0], 32(%[a]) \n\t"
+ "swc1 %[f1], 36(%[a]) \n\t"
+ "swc1 %[f2], 40(%[a]) \n\t"
+ "swc1 %[f3], 44(%[a]) \n\t"
+ "swc1 %[f5], 48(%[a]) \n\t"
+ "swc1 %[f4], 52(%[a]) \n\t"
+ "swc1 %[f6], 56(%[a]) \n\t"
+ "swc1 %[f7], 60(%[a]) \n\t"
+ // prepare for loop
+ "addiu %[a_ptr], %[a], 64 \n\t"
+ "addiu %[p1_rdft], %[rdft_w], 8 \n\t"
+ "addiu %[p2_rdft], %[rdft_w], 16 \n\t"
+ "addiu %[count], $zero, 7 \n\t"
+ // loop
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[a_ptr]) \n\t"
+ "lwc1 %[f1], 4(%[a_ptr]) \n\t"
+ "lwc1 %[f2], 8(%[a_ptr]) \n\t"
+ "lwc1 %[f3], 12(%[a_ptr]) \n\t"
+ "lwc1 %[f4], 16(%[a_ptr]) \n\t"
+ "lwc1 %[f5], 20(%[a_ptr]) \n\t"
+ "lwc1 %[f6], 24(%[a_ptr]) \n\t"
+ "lwc1 %[f7], 28(%[a_ptr]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk2i], 4(%[p1_rdft]) \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "lwc1 %[wk1r], 0(%[p2_rdft]) \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "lwc1 %[wk3r], 8(%[first]) \n\t"
+ "mul.s %[x3r], %[wk2r], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2r], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "lwc1 %[wk1i], 4(%[p2_rdft]) \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+ "lwc1 %[wk3i], 12(%[first]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[wk1r], %[wk2i], %[f1] \n\t"
+ "mul.s %[f0], %[wk2i], %[f0] \n\t"
+ "sub.s %[x3r], %[x3r], %[wk1r] \n\t"
+ "add.s %[x3i], %[x3i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[x0r], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[x2r], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[x0r] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[x2r] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[x3r], %[x3r], %[wk2i], %[f1] \n\t"
+ "madd.s %[x3i], %[x3i], %[wk2i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[a_ptr]) \n\t"
+ "swc1 %[f1], 4(%[a_ptr]) \n\t"
+ "swc1 %[x1r], 8(%[a_ptr]) \n\t"
+ "swc1 %[x1i], 12(%[a_ptr]) \n\t"
+ "swc1 %[x3r], 16(%[a_ptr]) \n\t"
+ "swc1 %[x3i], 20(%[a_ptr]) \n\t"
+ "swc1 %[f6], 24(%[a_ptr]) \n\t"
+ "swc1 %[f7], 28(%[a_ptr]) \n\t"
+ "lwc1 %[f0], 32(%[a_ptr]) \n\t"
+ "lwc1 %[f1], 36(%[a_ptr]) \n\t"
+ "lwc1 %[f2], 40(%[a_ptr]) \n\t"
+ "lwc1 %[f3], 44(%[a_ptr]) \n\t"
+ "lwc1 %[f4], 48(%[a_ptr]) \n\t"
+ "lwc1 %[f5], 52(%[a_ptr]) \n\t"
+ "lwc1 %[f6], 56(%[a_ptr]) \n\t"
+ "lwc1 %[f7], 60(%[a_ptr]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk1r], 8(%[p2_rdft]) \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "lwc1 %[wk3r], 8(%[second]) \n\t"
+ "mul.s %[x3r], %[wk2i], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2i], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+ "lwc1 %[wk1i], 12(%[p2_rdft]) \n\t"
+ "lwc1 %[wk3i], 12(%[second]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[wk1r], %[wk2r], %[f1] \n\t"
+ "mul.s %[f0], %[wk2r], %[f0] \n\t"
+ "add.s %[x3r], %[x3r], %[wk1r] \n\t"
+ "neg.s %[x3r], %[x3r] \n\t"
+ "sub.s %[x3i], %[f0], %[x3i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[x0r], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[x2r], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[x0r] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[x2r] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmadd.s %[x3r], %[x3r], %[wk2r], %[f1] \n\t"
+ "msub.s %[x3i], %[x3i], %[wk2r], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[wk2r], 8(%[p1_rdft]) \n\t"
+ "addiu %[a_ptr], %[a_ptr], 64 \n\t"
+ "addiu %[p1_rdft], %[p1_rdft], 8 \n\t"
+ "addiu %[p2_rdft], %[p2_rdft], 16 \n\t"
+ "addiu %[first], %[first], 8 \n\t"
+ "swc1 %[f0], -32(%[a_ptr]) \n\t"
+ "swc1 %[f1], -28(%[a_ptr]) \n\t"
+ "swc1 %[x1r], -24(%[a_ptr]) \n\t"
+ "swc1 %[x1i], -20(%[a_ptr]) \n\t"
+ "swc1 %[x3r], -16(%[a_ptr]) \n\t"
+ "swc1 %[x3i], -12(%[a_ptr]) \n\t"
+ "swc1 %[f6], -8(%[a_ptr]) \n\t"
+ "swc1 %[f7], -4(%[a_ptr]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[second], %[second], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [wk1r] "=&f" (wk1r),
+ [wk1i] "=&f" (wk1i), [wk2r] "=&f" (wk2r), [wk2i] "=&f" (wk2i),
+ [wk3r] "=&f" (wk3r), [wk3i] "=&f" (wk3i), [a_ptr] "=&r" (a_ptr),
+ [p1_rdft] "=&r" (p1_rdft), [p2_rdft] "=&r" (p2_rdft),
+ [count] "=&r" (count)
+ : [a] "r" (a), [rdft_w] "r" (rdft_w), [first] "r" (rdft_wk3ri_first),
+ [second] "r" (rdft_wk3ri_second)
+ : "memory"
+ );
+}
+
+static void cftmdl_128_mips(float *a) {
+ float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int tmp_a, count;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[f2], 32(%[tmp_a]) \n\t"
+ "swc1 %[f3], 36(%[tmp_a]) \n\t"
+ "swc1 %[f4], 64(%[tmp_a]) \n\t"
+ "swc1 %[f5], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+ wk2r = rdft_w[2];
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 128 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x1r], %[x3i] \n\t"
+ "add.s %[f1], %[x1i], %[x3r] \n\t"
+ "sub.s %[f2], %[x3r], %[x1i] \n\t"
+ "add.s %[f3], %[x3i], %[x1r] \n\t"
+ "add.s %[f4], %[x0r], %[x2r] \n\t"
+ "add.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f6], %[f0], %[f1] \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "sub.s %[f7], %[f2], %[f3] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "sub.s %[f1], %[x2i], %[x0i] \n\t"
+ "mul.s %[f6], %[f6], %[wk2r] \n\t"
+ "mul.s %[f0], %[f0], %[wk2r] \n\t"
+ "sub.s %[f3], %[x0r], %[x2r] \n\t"
+ "mul.s %[f7], %[f7], %[wk2r] \n\t"
+ "mul.s %[f2], %[f2], %[wk2r] \n\t"
+ "swc1 %[f4], 0(%[tmp_a]) \n\t"
+ "swc1 %[f5], 4(%[tmp_a]) \n\t"
+ "swc1 %[f6], 32(%[tmp_a]) \n\t"
+ "swc1 %[f0], 36(%[tmp_a]) \n\t"
+ "swc1 %[f1], 64(%[tmp_a]) \n\t"
+ "swc1 %[f3], 68(%[tmp_a]) \n\t"
+ "swc1 %[f7], 96(%[tmp_a]) \n\t"
+ "swc1 %[f2], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk2r] "f" (wk2r)
+ : "memory"
+ );
+ wk2i = rdft_w[3];
+ wk1r = rdft_w[4];
+ wk1i = rdft_w[5];
+ wk3r = rdft_wk3ri_first[2];
+ wk3i = rdft_wk3ri_first[3];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 256 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "mul.s %[x3r], %[wk2r], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2r], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f1], %[wk2i], %[f1] \n\t"
+ "mul.s %[f0], %[wk2i], %[f0] \n\t"
+ "sub.s %[x3r], %[x3r], %[f1] \n\t"
+ "add.s %[x3i], %[x3i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[f2], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[f5], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[f2] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[f5] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[x3r], %[x3r], %[wk2i], %[f1] \n\t"
+ "madd.s %[x3i], %[x3i], %[wk2i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[x1r], 32(%[tmp_a]) \n\t"
+ "swc1 %[x1i], 36(%[tmp_a]) \n\t"
+ "swc1 %[x3r], 64(%[tmp_a]) \n\t"
+ "swc1 %[x3i], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk1r] "f" (wk1r), [wk1i] "f" (wk1i), [wk2r] "f" (wk2r),
+ [wk2i] "f" (wk2i), [wk3r] "f" (wk3r), [wk3i] "f" (wk3i)
+ : "memory"
+ );
+
+ wk1r = rdft_w[6];
+ wk1i = rdft_w[7];
+ wk3r = rdft_wk3ri_second[2];
+ wk3i = rdft_wk3ri_second[3];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 384 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "mul.s %[x3r], %[wk2i], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2i], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f1], %[wk2r], %[f1] \n\t"
+ "mul.s %[f0], %[wk2r], %[f0] \n\t"
+ "add.s %[x3r], %[x3r], %[f1] \n\t"
+ "neg.s %[x3r], %[x3r] \n\t"
+ "sub.s %[x3i], %[f0], %[x3i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[f2], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[f5], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[f2] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[f5] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmadd.s %[x3r], %[x3r], %[wk2r], %[f1] \n\t"
+ "msub.s %[x3i], %[x3i], %[wk2r], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[x1r], 32(%[tmp_a]) \n\t"
+ "swc1 %[x1i], 36(%[tmp_a]) \n\t"
+ "swc1 %[x3r], 64(%[tmp_a]) \n\t"
+ "swc1 %[x3i], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk1r] "f" (wk1r), [wk1i] "f" (wk1i), [wk2r] "f" (wk2r),
+ [wk2i] "f" (wk2i), [wk3r] "f" (wk3r), [wk3i] "f" (wk3i)
+ : "memory"
+ );
+}
+
+static void cftfsub_128_mips(float *a) {
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int tmp_a, count;
+
+ cft1st_128(a);
+ cftmdl_128(a);
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 16 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 128(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 132(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 256(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 260(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 384(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 388(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[f2], 128(%[tmp_a]) \n\t"
+ "swc1 %[f3], 132(%[tmp_a]) \n\t"
+ "swc1 %[f4], 256(%[tmp_a]) \n\t"
+ "swc1 %[f5], 260(%[tmp_a]) \n\t"
+ "swc1 %[f6], 384(%[tmp_a]) \n\t"
+ "swc1 %[f7], 388(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+}
+
+static void cftbsub_128_mips(float *a) {
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ float f8, f9, f10, f11, f12, f13, f14, f15;
+ float f16, f17, f18, f19, f20, f21, f22, f23;
+ int tmp_a, count;
+
+ cft1st_128(a);
+ cftmdl_128(a);
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 8 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 128(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 132(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 256(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 260(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 384(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 388(%[tmp_a]) \n\t"
+ "lwc1 %[f8], 8(%[tmp_a]) \n\t"
+ "lwc1 %[f9], 12(%[tmp_a]) \n\t"
+ "lwc1 %[f10], 136(%[tmp_a]) \n\t"
+ "lwc1 %[f11], 140(%[tmp_a]) \n\t"
+ "lwc1 %[f12], 264(%[tmp_a]) \n\t"
+ "lwc1 %[f13], 268(%[tmp_a]) \n\t"
+ "lwc1 %[f14], 392(%[tmp_a]) \n\t"
+ "lwc1 %[f15], 396(%[tmp_a]) \n\t"
+ "add.s %[f16], %[f0], %[f2] \n\t"
+ "add.s %[f17], %[f1], %[f3] \n\t"
+ "add.s %[f18], %[f4], %[f6] \n\t"
+ "add.s %[f19], %[f5], %[f7] \n\t"
+ "sub.s %[f20], %[f0], %[f2] \n\t"
+ "sub.s %[f21], %[f3], %[f1] \n\t"
+ "sub.s %[f22], %[f4], %[f6] \n\t"
+ "sub.s %[f23], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[f8], %[f10] \n\t"
+ "add.s %[f1], %[f9], %[f11] \n\t"
+ "add.s %[f2], %[f12], %[f14] \n\t"
+ "add.s %[f3], %[f13], %[f15] \n\t"
+ "sub.s %[f4], %[f8], %[f10] \n\t"
+ "sub.s %[f5], %[f11], %[f9] \n\t"
+ "sub.s %[f6], %[f12], %[f14] \n\t"
+ "sub.s %[f7], %[f13], %[f15] \n\t"
+ "add.s %[f8], %[f16], %[f18] \n\t"
+ "add.s %[f9], %[f17], %[f19] \n\t"
+ "sub.s %[f12], %[f16], %[f18] \n\t"
+ "sub.s %[f13], %[f19], %[f17] \n\t"
+ "sub.s %[f10], %[f20], %[f23] \n\t"
+ "sub.s %[f11], %[f21], %[f22] \n\t"
+ "add.s %[f14], %[f20], %[f23] \n\t"
+ "add.s %[f15], %[f21], %[f22] \n\t"
+ "neg.s %[f9], %[f9] \n\t"
+ "add.s %[f16], %[f0], %[f2] \n\t"
+ "add.s %[f17], %[f1], %[f3] \n\t"
+ "sub.s %[f20], %[f0], %[f2] \n\t"
+ "sub.s %[f21], %[f3], %[f1] \n\t"
+ "sub.s %[f18], %[f4], %[f7] \n\t"
+ "sub.s %[f19], %[f5], %[f6] \n\t"
+ "add.s %[f22], %[f4], %[f7] \n\t"
+ "add.s %[f23], %[f5], %[f6] \n\t"
+ "neg.s %[f17], %[f17] \n\t"
+ "swc1 %[f8], 0(%[tmp_a]) \n\t"
+ "swc1 %[f10], 128(%[tmp_a]) \n\t"
+ "swc1 %[f11], 132(%[tmp_a]) \n\t"
+ "swc1 %[f12], 256(%[tmp_a]) \n\t"
+ "swc1 %[f13], 260(%[tmp_a]) \n\t"
+ "swc1 %[f14], 384(%[tmp_a]) \n\t"
+ "swc1 %[f15], 388(%[tmp_a]) \n\t"
+ "swc1 %[f9], 4(%[tmp_a]) \n\t"
+ "swc1 %[f16], 8(%[tmp_a]) \n\t"
+ "swc1 %[f18], 136(%[tmp_a]) \n\t"
+ "swc1 %[f19], 140(%[tmp_a]) \n\t"
+ "swc1 %[f20], 264(%[tmp_a]) \n\t"
+ "swc1 %[f21], 268(%[tmp_a]) \n\t"
+ "swc1 %[f22], 392(%[tmp_a]) \n\t"
+ "swc1 %[f23], 396(%[tmp_a]) \n\t"
+ "swc1 %[f17], 12(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 16 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [f8] "=&f" (f8), [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [f13] "=&f" (f13), [f14] "=&f" (f14),
+ [f15] "=&f" (f15), [f16] "=&f" (f16), [f17] "=&f" (f17),
+ [f18] "=&f" (f18), [f19] "=&f" (f19), [f20] "=&f" (f20),
+ [f21] "=&f" (f21), [f22] "=&f" (f22), [f23] "=&f" (f23),
+ [tmp_a] "=&r" (tmp_a), [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+}
+
+static void rftfsub_128_mips(float *a) {
+ const float *c = rdft_w + 32;
+ float wkr, wki, xr, xi, yr, yi;
+ const float temp = 0.5f;
+ float aj20=0, aj21=0, ak20=0, ak21=0, ck1=0;
+ float *a1 = a;
+ float *a2 = a;
+ float *c1 = rdft_w + 33;
+ float *c2 = c1 + 30;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "addiu %[c2], %[c2],-4 \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "sub.s %[yr], %[yr], %[xi] \n\t"
+ "add.s %[yi], %[yi], %[xr] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "madd.s %[yi], %[yi], %[wki], %[xr] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 4 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[aj21], %[yi] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[ak21], %[yi] \n\t"
+ "addiu %[a2], %[a2], 8 \n\t"
+ "swc1 %[aj20], 0(%[a2]) \n\t"
+ "swc1 %[aj21], 4(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "addiu %[a1], %[a1], -8 \n\t"
+ //15x2 passes:
+ "1: \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "lwc1 $f0, -4(%[c2]) \n\t"
+ "lwc1 $f2, 16(%[a2]) \n\t"
+ "lwc1 $f3, 20(%[a2]) \n\t"
+ "lwc1 $f8, 496(%[a1]) \n\t"
+ "lwc1 $f7, 500(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "sub.s $f0, %[temp], $f0 \n\t"
+ "sub.s $f6, $f2, $f8 \n\t"
+ "add.s $f4, $f3, $f7 \n\t"
+ "lwc1 $f5, 4(%[c1]) \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+ "mul.s $f1, $f0, $f6 \n\t"
+ "mul.s $f0, $f0, $f4 \n\t"
+ "addiu %[c2], %[c2], -8 \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "mul.s $f4, $f5, $f4 \n\t"
+ "mul.s $f6, $f5, $f6 \n\t"
+ "sub.s %[yr], %[yr], %[xi] \n\t"
+ "add.s %[yi], %[yi], %[xr] \n\t"
+ "sub.s $f1, $f1, $f4 \n\t"
+ "add.s $f0, $f0, $f6 \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "madd.s %[yi], %[yi], %[wki], %[xr] \n\t"
+ "nmsub.s $f1, $f1, $f5, $f4 \n\t"
+ "madd.s $f0, $f0, $f5, $f6 \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 8 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[aj21], %[yi] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[ak21], %[yi] \n\t"
+ "sub.s $f2, $f2, $f1 \n\t"
+ "sub.s $f3, $f3, $f0 \n\t"
+ "add.s $f1, $f8, $f1 \n\t"
+ "sub.s $f0, $f7, $f0 \n\t"
+ "swc1 %[aj20], 8(%[a2]) \n\t"
+ "swc1 %[aj21], 12(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "swc1 $f2, 16(%[a2]) \n\t"
+ "swc1 $f3, 20(%[a2]) \n\t"
+ "swc1 $f1, 496(%[a1]) \n\t"
+ "swc1 $f0, 500(%[a1]) \n\t"
+ "addiu %[a2], %[a2], 16 \n\t"
+ "bne %[c2], %[c], 1b \n\t"
+ " addiu %[a1], %[a1], -16 \n\t"
+ ".set pop \n\t"
+ : [a] "+r" (a), [c] "+r" (c), [a1] "+r" (a1), [a2] "+r" (a2),
+ [c1] "+r" (c1), [c2] "+r" (c2), [wkr] "=&f" (wkr), [wki] "=&f" (wki),
+ [xr] "=&f" (xr), [xi] "=&f" (xi), [yr] "=&f" (yr), [yi] "=&f" (yi),
+ [aj20] "=&f" (aj20), [aj21] "=&f" (aj21), [ak20] "=&f" (ak20),
+ [ak21] "=&f" (ak21), [ck1] "=&f" (ck1)
+ : [temp] "f" (temp)
+ : "memory", "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8"
+ );
+}
+
+static void rftbsub_128_mips(float *a) {
+ const float *c = rdft_w + 32;
+ float wkr, wki, xr, xi, yr, yi;
+ a[1] = -a[1];
+ a[65] = -a[65];
+ const float temp = 0.5f;
+ float aj20=0, aj21=0, ak20=0, ak21=0, ck1=0;
+ float *a1 = a;
+ float *a2 = a;
+ float *c1 = rdft_w + 33;
+ float *c2 = c1 + 30;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "addiu %[c2], %[c2], -4 \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "add.s %[yr], %[yr], %[xi] \n\t"
+ "sub.s %[yi], %[yi], %[xr] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "nmsub.s %[yi], %[yi], %[wki], %[xr] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1],4 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[yi], %[aj21] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[yi], %[ak21] \n\t"
+ "addiu %[a2], %[a2], 8 \n\t"
+ "swc1 %[aj20], 0(%[a2]) \n\t"
+ "swc1 %[aj21], 4(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "addiu %[a1], %[a1], -8 \n\t"
+ //15x2 passes:
+ "1: \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "lwc1 $f0, -4(%[c2]) \n\t"
+ "lwc1 $f2, 16(%[a2]) \n\t"
+ "lwc1 $f3, 20(%[a2]) \n\t"
+ "lwc1 $f8, 496(%[a1]) \n\t"
+ "lwc1 $f7, 500(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "sub.s $f0, %[temp], $f0 \n\t"
+ "sub.s $f6, $f2, $f8 \n\t"
+ "add.s $f4, $f3, $f7 \n\t"
+ "lwc1 $f5, 4(%[c1]) \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+ "mul.s $f1, $f0, $f6 \n\t"
+ "mul.s $f0, $f0, $f4 \n\t"
+ "addiu %[c2], %[c2], -8 \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "mul.s $f4, $f5, $f4 \n\t"
+ "mul.s $f6, $f5, $f6 \n\t"
+ "add.s %[yr], %[yr], %[xi] \n\t"
+ "sub.s %[yi], %[yi], %[xr] \n\t"
+ "add.s $f1, $f1, $f4 \n\t"
+ "sub.s $f0, $f0, $f6 \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "nmsub.s %[yi], %[yi], %[wki], %[xr] \n\t"
+ "madd.s $f1, $f1, $f5, $f4 \n\t"
+ "nmsub.s $f0, $f0, $f5, $f6 \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 8 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[yi], %[aj21] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[yi], %[ak21] \n\t"
+ "sub.s $f2, $f2, $f1 \n\t"
+ "sub.s $f3, $f0, $f3 \n\t"
+ "add.s $f1, $f8, $f1 \n\t"
+ "sub.s $f0, $f0, $f7 \n\t"
+ "swc1 %[aj20], 8(%[a2]) \n\t"
+ "swc1 %[aj21], 12(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "swc1 $f2, 16(%[a2]) \n\t"
+ "swc1 $f3, 20(%[a2]) \n\t"
+ "swc1 $f1, 496(%[a1]) \n\t"
+ "swc1 $f0, 500(%[a1]) \n\t"
+ "addiu %[a2], %[a2], 16 \n\t"
+ "bne %[c2], %[c], 1b \n\t"
+ " addiu %[a1], %[a1], -16 \n\t"
+ ".set pop \n\t"
+ : [a] "+r" (a), [c] "+r" (c), [a1] "+r" (a1), [a2] "+r" (a2),
+ [c1] "+r" (c1), [c2] "+r" (c2), [wkr] "=&f" (wkr), [wki] "=&f" (wki),
+ [xr] "=&f" (xr), [xi] "=&f" (xi), [yr] "=&f" (yr), [yi] "=&f" (yi),
+ [aj20] "=&f" (aj20), [aj21] "=&f" (aj21), [ak20] "=&f" (ak20),
+ [ak21] "=&f" (ak21), [ck1] "=&f" (ck1)
+ : [temp] "f" (temp)
+ : "memory", "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8"
+ );
+}
+
+void aec_rdft_init_mips(void) {
+ cft1st_128 = cft1st_128_mips;
+ cftmdl_128 = cftmdl_128_mips;
+ rftfsub_128 = rftfsub_128_mips;
+ rftbsub_128 = rftbsub_128_mips;
+ cftfsub_128 = cftfsub_128_mips;
+ cftbsub_128 = cftbsub_128_mips;
+ bitrv2_128 = bitrv2_128_mips;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
index bbdd5f628b2..ba3b9243e19 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
@@ -104,18 +104,18 @@ int webrtc_aec_instance_count = 0;
static void EstBufDelayNormal(aecpc_t* aecInst);
static void EstBufDelayExtended(aecpc_t* aecInst);
static int ProcessNormal(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
static void ProcessExtended(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
@@ -254,7 +254,7 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
aecpc->checkBuffSize = 1;
aecpc->firstVal = 0;
- aecpc->startup_phase = 1;
+ aecpc->startup_phase = WebRtcAec_reported_delay_enabled(aecpc->aec);
aecpc->bufSizeStart = 0;
aecpc->checkBufSizeCtr = 0;
aecpc->msInSndCardBuf = 0;
@@ -372,10 +372,10 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
}
int32_t WebRtcAec_Process(void* aecInst,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew) {
@@ -632,10 +632,10 @@ AecCore* WebRtcAec_aec_core(void* handle) {
}
static int ProcessNormal(aecpc_t* aecpc,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew) {
@@ -689,10 +689,10 @@ static int ProcessNormal(aecpc_t* aecpc,
if (aecpc->startup_phase) {
// Only needed if they don't already point to the same place.
if (nearend != out) {
- memcpy(out, nearend, sizeof(short) * nrOfSamples);
+ memcpy(out, nearend, sizeof(*out) * nrOfSamples);
}
if (nearendH != outH) {
- memcpy(outH, nearendH, sizeof(short) * nrOfSamples);
+ memcpy(outH, nearendH, sizeof(*outH) * nrOfSamples);
}
// The AEC is in the start up mode
@@ -766,7 +766,9 @@ static int ProcessNormal(aecpc_t* aecpc,
}
} else {
// AEC is enabled.
- EstBufDelayNormal(aecpc);
+ if (WebRtcAec_reported_delay_enabled(aecpc->aec)) {
+ EstBufDelayNormal(aecpc);
+ }
// Note that 1 frame is supported for NB and 2 frames for WB.
for (i = 0; i < nFrames; i++) {
@@ -787,10 +789,10 @@ static int ProcessNormal(aecpc_t* aecpc,
}
static void ProcessExtended(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew) {
@@ -821,10 +823,10 @@ static void ProcessExtended(aecpc_t* self,
if (!self->farend_started) {
// Only needed if they don't already point to the same place.
if (near != out) {
- memcpy(out, near, sizeof(short) * num_samples);
+ memcpy(out, near, sizeof(*out) * num_samples);
}
if (near_high != out_high) {
- memcpy(out_high, near_high, sizeof(short) * num_samples);
+ memcpy(out_high, near_high, sizeof(*out_high) * num_samples);
}
return;
}
@@ -842,7 +844,9 @@ static void ProcessExtended(aecpc_t* self,
self->startup_phase = 0;
}
- EstBufDelayExtended(self);
+ if (WebRtcAec_reported_delay_enabled(self->aec)) {
+ EstBufDelayExtended(self);
+ }
{
// |delay_diff_offset| gives us the option to manually rewind the delay on
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h b/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
index 4c852cf64bf..dc64a345c3e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
@@ -68,7 +68,7 @@ extern "C" {
*
* Inputs Description
* -------------------------------------------------------------------
- * void **aecInst Pointer to the AEC instance to be created
+ * void** aecInst Pointer to the AEC instance to be created
* and initialized
*
* Outputs Description
@@ -83,7 +83,7 @@ int32_t WebRtcAec_Create(void** aecInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
@@ -97,7 +97,7 @@ int32_t WebRtcAec_Free(void* aecInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
* int32_t sampFreq Sampling frequency of data
* int32_t scSampFreq Soundcard sampling frequency
*
@@ -113,8 +113,8 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
- * int16_t *farend In buffer containing one frame of
+ * void* aecInst Pointer to the AEC instance
+ * int16_t* farend In buffer containing one frame of
* farend signal for L band
* int16_t nrOfSamples Number of samples in farend buffer
*
@@ -132,10 +132,10 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
- * int16_t *nearend In buffer containing one frame of
+ * void* aecInst Pointer to the AEC instance
+ * float* nearend In buffer containing one frame of
* nearend+echo signal for L band
- * int16_t *nearendH In buffer containing one frame of
+ * float* nearendH In buffer containing one frame of
* nearend+echo signal for H band
* int16_t nrOfSamples Number of samples in nearend buffer
* int16_t msInSndCardBuf Delay estimate for sound card and
@@ -146,18 +146,18 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * int16_t *out Out buffer, one frame of processed nearend
+ * float* out Out buffer, one frame of processed nearend
* for L band
- * int16_t *outH Out buffer, one frame of processed nearend
+ * float* outH Out buffer, one frame of processed nearend
* for H band
* int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAec_Process(void* aecInst,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew);
@@ -167,7 +167,7 @@ int32_t WebRtcAec_Process(void* aecInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
* AecConfig config Config instance that contains all
* properties to be set
*
@@ -183,11 +183,11 @@ int WebRtcAec_set_config(void* handle, AecConfig config);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
- * int *status 0: Almost certainly nearend single-talk
+ * int* status 0: Almost certainly nearend single-talk
* 1: Might not be neared single-talk
* int return 0: OK
* -1: error
@@ -199,11 +199,11 @@ int WebRtcAec_get_echo_status(void* handle, int* status);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
- * AecMetrics *metrics Struct which will be filled out with the
+ * AecMetrics* metrics Struct which will be filled out with the
* current echo metrics.
* int return 0: OK
* -1: error
@@ -232,7 +232,7 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
index a19030ae350..a13d47622c9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
@@ -9,12 +9,12 @@
*/
#include "testing/gtest/include/gtest/gtest.h"
-
extern "C" {
#include "webrtc/modules/audio_processing/aec/aec_core.h"
}
#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
namespace {
@@ -46,16 +46,18 @@ class SystemDelayTest : public ::testing::Test {
aecpc_t* self_;
int samples_per_frame_;
// Dummy input/output speech data.
- int16_t far_[160];
- int16_t near_[160];
- int16_t out_[160];
+ static const int kSamplesPerChunk = 160;
+ int16_t far_[kSamplesPerChunk];
+ float near_[kSamplesPerChunk];
+ float out_[kSamplesPerChunk];
};
SystemDelayTest::SystemDelayTest()
: handle_(NULL), self_(NULL), samples_per_frame_(0) {
// Dummy input data are set with more or less arbitrary non-zero values.
memset(far_, 1, sizeof(far_));
- memset(near_, 2, sizeof(near_));
+ for (int i = 0; i < kSamplesPerChunk; i++)
+ near_[i] = 514.0;
memset(out_, 0, sizeof(out_));
}
@@ -251,6 +253,9 @@ TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) {
// conditions, but with an empty internal far-end buffer. Once that is done we
// verify that the system delay is increased correctly until we have reach an
// internal buffer size of 75% of what's been reported.
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
@@ -332,6 +337,9 @@ TEST_F(SystemDelayTest, CorrectDelayDuringDrift) {
// device buffer. The drift is simulated by decreasing the reported device
// buffer size by 1 ms every 100 ms. If the device buffer size goes below 30
// ms we jump (add) 10 ms to give a repeated pattern.
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
RunStableStartup();
@@ -365,6 +373,9 @@ TEST_F(SystemDelayTest, ShouldRecoverAfterGlitch) {
// the device.
// The system is said to be in a non-causal state if the difference between
// the device buffer and system delay is less than a block (64 samples).
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
RunStableStartup();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
index fc94f1b889e..0f34874612d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
@@ -266,6 +266,9 @@ int WebRtcAecm_CreateCore(AecmCore_t **aecmInst)
aecm = NULL;
return -1;
}
+ // TODO(bjornv): Explicitly disable robust delay validation until no
+ // performance regression has been established. Then remove the line.
+ WebRtc_enable_robust_validation(aecm->delay_estimator, 0);
aecm->real_fft = WebRtcSpl_CreateRealFFT(PART_LEN_SHIFT);
if (aecm->real_fft == NULL) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
index 63d4ac90280..f8491e97378 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
@@ -260,7 +260,7 @@ static int TimeToFrequencyDomain(AecmCore_t* aecm,
__asm __volatile(
"smulbb %[tmp32no1], %[real], %[real]\n\t"
"smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t"
- :[tmp32no1]"+r"(tmp32no1),
+ :[tmp32no1]"+&r"(tmp32no1),
[tmp32no2]"=r"(tmp32no2)
:[real]"r"(freq_signal[i].real),
[imag]"r"(freq_signal[i].imag)
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
index b896de0a214..088bbf03f74 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
@@ -443,27 +443,14 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
// Call the AECM
/*WebRtcAecm_ProcessFrame(aecm->aecmCore, farend, &nearend[FRAME_LEN * i],
&out[FRAME_LEN * i], aecm->knownDelay);*/
- if (nearendClean == NULL)
- {
- if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
- farend_ptr,
- &nearendNoisy[FRAME_LEN * i],
- NULL,
- &out[FRAME_LEN * i]) == -1)
- {
- return -1;
- }
- } else
- {
- if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
- farend_ptr,
- &nearendNoisy[FRAME_LEN * i],
- &nearendClean[FRAME_LEN * i],
- &out[FRAME_LEN * i]) == -1)
- {
- return -1;
- }
- }
+ if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
+ farend_ptr,
+ &nearendNoisy[FRAME_LEN * i],
+ (nearendClean
+ ? &nearendClean[FRAME_LEN * i]
+ : NULL),
+ &out[FRAME_LEN * i]) == -1)
+ return -1;
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h b/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
index 8ea2e87e2ff..ac43576dd26 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
@@ -45,7 +45,7 @@ extern "C" {
*
* Inputs Description
* -------------------------------------------------------------------
- * void **aecmInst Pointer to the AECM instance to be
+ * void** aecmInst Pointer to the AECM instance to be
* created and initialized
*
* Outputs Description
@@ -60,11 +60,11 @@ int32_t WebRtcAecm_Create(void **aecmInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_Free(void *aecmInst);
@@ -74,7 +74,7 @@ int32_t WebRtcAecm_Free(void *aecmInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
* int32_t sampFreq Sampling frequency of data
*
* Outputs Description
@@ -89,8 +89,8 @@ int32_t WebRtcAecm_Init(void* aecmInst, int32_t sampFreq);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * int16_t *farend In buffer containing one frame of
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* farend In buffer containing one frame of
* farend signal
* int16_t nrOfSamples Number of samples in farend buffer
*
@@ -106,14 +106,14 @@ int32_t WebRtcAecm_BufferFarend(void* aecmInst,
/*
* Runs the AECM on an 80 or 160 sample blocks of data.
*
- * Inputs Description
+ * Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * int16_t *nearendNoisy In buffer containing one frame of
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* nearendNoisy In buffer containing one frame of
* reference nearend+echo signal. If
* noise reduction is active, provide
* the noisy signal here.
- * int16_t *nearendClean In buffer containing one frame of
+ * int16_t* nearendClean In buffer containing one frame of
* nearend+echo signal. If noise
* reduction is active, provide the
* clean signal here. Otherwise pass a
@@ -122,11 +122,11 @@ int32_t WebRtcAecm_BufferFarend(void* aecmInst,
* int16_t msInSndCardBuf Delay estimate for sound card and
* system buffers
*
- * Outputs Description
+ * Outputs Description
* -------------------------------------------------------------------
- * int16_t *out Out buffer, one frame of processed nearend
- * int32_t return 0: OK
- * -1: error
+ * int16_t* out Out buffer, one frame of processed nearend
+ * int32_t return 0: OK
+ * -1: error
*/
int32_t WebRtcAecm_Process(void* aecmInst,
const int16_t* nearendNoisy,
@@ -140,8 +140,8 @@ int32_t WebRtcAecm_Process(void* aecmInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * AecmConfig config Config instance that contains all
+ * void* aecmInst Pointer to the AECM instance
+ * AecmConfig config Config instance that contains all
* properties to be set
*
* Outputs Description
@@ -156,11 +156,11 @@ int32_t WebRtcAecm_set_config(void* aecmInst, AecmConfig config);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
- * AecmConfig *config Pointer to the config instance that
+ * AecmConfig* config Pointer to the config instance that
* all properties will be written to
* int32_t return 0: OK
* -1: error
@@ -178,7 +178,7 @@ int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config);
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
@@ -197,7 +197,7 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
@@ -209,7 +209,7 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * size_t return : size in bytes
+ * size_t return Size in bytes
*/
size_t WebRtcAecm_echo_path_size_bytes();
@@ -218,7 +218,7 @@ size_t WebRtcAecm_echo_path_size_bytes();
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
index 1e8e3d86b2d..4f110cc2092 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
@@ -822,10 +822,16 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
if (inMicLevelTmp != stt->micVol)
{
- // Incoming level mismatch; update our level.
- // This could be the case if the volume is changed manually, or if the
- // sound device has a low volume resolution.
- stt->micVol = inMicLevelTmp;
+ if (inMicLevel == stt->lastInMicLevel) {
+ // We requested a volume adjustment, but it didn't occur. This is
+ // probably due to a coarse quantization of the volume slider.
+ // Restore the requested value to prevent getting stuck.
+ inMicLevelTmp = stt->micVol;
+ }
+ else {
+ // As long as the value changed, update to match.
+ stt->micVol = inMicLevelTmp;
+ }
}
if (inMicLevelTmp > stt->maxLevel)
@@ -835,6 +841,7 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
}
// Store last value here, after we've taken care of manual updates etc.
+ stt->lastInMicLevel = inMicLevel;
lastMicVol = stt->micVol;
/* Checks if the signal is saturated. Also a check if individual samples
@@ -1597,6 +1604,7 @@ int WebRtcAgc_Init(void *agcInst, int32_t minLevel, int32_t maxLevel,
stt->maxInit = stt->maxLevel;
stt->zeroCtrlMax = stt->maxAnalog;
+ stt->lastInMicLevel = 0;
/* Initialize micVol parameter */
stt->micVol = stt->maxAnalog;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
index 16ea29c4961..b036f449c70 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
@@ -111,6 +111,7 @@ typedef struct
int32_t minLevel; // Minimum possible volume level
int32_t minOutput; // Minimum output volume level
int32_t zeroCtrlMax; // Remember max gain => don't amp low input
+ int32_t lastInMicLevel;
int16_t scale; // Scale factor for internal volume levels
#ifdef MIC_LEVEL_FEEDBACK
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c b/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
index 00565dd7230..4b169c180eb 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
@@ -118,7 +118,7 @@ int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16
limiterLvlX = analogTarget - limiterOffset;
limiterIdx = 2
+ WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((int32_t)limiterLvlX, 13),
- WEBRTC_SPL_RSHIFT_U16(kLog10_2, 1));
+ (kLog10_2 / 2));
tmp16no1 = WebRtcSpl_DivW32W16ResW16(limiterOffset + (kCompRatio >> 1), kCompRatio);
limiterLvl = targetLevelDbfs + tmp16no1;
@@ -288,12 +288,7 @@ int32_t WebRtcAgc_InitDigital(DigitalAgc_t *stt, int16_t agcMode)
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const int16_t *in_far,
int16_t nrSamples)
{
- // Check for valid pointer
- if (&stt->vadFarend == NULL)
- {
- return -1;
- }
-
+ assert(stt != NULL);
// VAD for far end
WebRtcAgc_ProcessVad(&stt->vadFarend, in_far, nrSamples);
@@ -778,7 +773,7 @@ int16_t WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
tmp16 = WEBRTC_SPL_LSHIFT_W16(3, 12);
tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, (dB - state->meanLongTerm));
tmp32 = WebRtcSpl_DivW32W16(tmp32, state->stdLongTerm);
- tmpU16 = WEBRTC_SPL_LSHIFT_U16((uint16_t)13, 12);
+ tmpU16 = (13 << 12);
tmp32b = WEBRTC_SPL_MUL_16_U16(state->logRatio, tmpU16);
tmp32 += WEBRTC_SPL_RSHIFT_W32(tmp32b, 10);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
index 048d048723e..b0f1eb6c1db 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_processing/audio_buffer.h"
+#include "webrtc/common_audio/include/audio_util.h"
+#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
namespace webrtc {
@@ -21,145 +23,343 @@ enum {
kSamplesPer32kHzChannel = 320
};
-void StereoToMono(const int16_t* left, const int16_t* right,
- int16_t* out, int samples_per_channel) {
- assert(left != NULL && right != NULL && out != NULL);
- for (int i = 0; i < samples_per_channel; i++) {
- int32_t data32 = (static_cast<int32_t>(left[i]) +
- static_cast<int32_t>(right[i])) >> 1;
+bool HasKeyboardChannel(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kStereo:
+ return false;
+ case AudioProcessing::kMonoAndKeyboard:
+ case AudioProcessing::kStereoAndKeyboard:
+ return true;
+ }
+ assert(false);
+ return false;
+}
+
+int KeyboardChannelIndex(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kStereo:
+ assert(false);
+ return -1;
+ case AudioProcessing::kMonoAndKeyboard:
+ return 1;
+ case AudioProcessing::kStereoAndKeyboard:
+ return 2;
+ }
+ assert(false);
+ return -1;
+}
+
- out[i] = WebRtcSpl_SatW32ToW16(data32);
+void StereoToMono(const float* left, const float* right, float* out,
+ int samples_per_channel) {
+ for (int i = 0; i < samples_per_channel; ++i) {
+ out[i] = (left[i] + right[i]) / 2;
}
}
+
+void StereoToMono(const int16_t* left, const int16_t* right, int16_t* out,
+ int samples_per_channel) {
+ for (int i = 0; i < samples_per_channel; ++i) {
+ out[i] = (left[i] + right[i]) >> 1;
+ }
+}
+
} // namespace
-struct AudioChannel {
- AudioChannel() {
- memset(data, 0, sizeof(data));
+// One int16_t and one float ChannelBuffer that are kept in sync. The sync is
+// broken when someone requests write access to either ChannelBuffer, and
+// reestablished when someone requests the outdated ChannelBuffer. It is
+// therefore safe to use the return value of ibuf() and fbuf() until the next
+// call to the other method.
+class IFChannelBuffer {
+ public:
+ IFChannelBuffer(int samples_per_channel, int num_channels)
+ : ivalid_(true),
+ ibuf_(samples_per_channel, num_channels),
+ fvalid_(true),
+ fbuf_(samples_per_channel, num_channels) {}
+
+ ChannelBuffer<int16_t>* ibuf() {
+ RefreshI();
+ fvalid_ = false;
+ return &ibuf_;
+ }
+
+ ChannelBuffer<float>* fbuf() {
+ RefreshF();
+ ivalid_ = false;
+ return &fbuf_;
}
- int16_t data[kSamplesPer32kHzChannel];
+ private:
+ void RefreshF() {
+ if (!fvalid_) {
+ assert(ivalid_);
+ const int16_t* const int_data = ibuf_.data();
+ float* const float_data = fbuf_.data();
+ const int length = fbuf_.length();
+ for (int i = 0; i < length; ++i)
+ float_data[i] = int_data[i];
+ fvalid_ = true;
+ }
+ }
+
+ void RefreshI() {
+ if (!ivalid_) {
+ assert(fvalid_);
+ const float* const float_data = fbuf_.data();
+ int16_t* const int_data = ibuf_.data();
+ const int length = ibuf_.length();
+ for (int i = 0; i < length; ++i)
+ int_data[i] = WEBRTC_SPL_SAT(std::numeric_limits<int16_t>::max(),
+ float_data[i],
+ std::numeric_limits<int16_t>::min());
+ ivalid_ = true;
+ }
+ }
+
+ bool ivalid_;
+ ChannelBuffer<int16_t> ibuf_;
+ bool fvalid_;
+ ChannelBuffer<float> fbuf_;
};
-struct SplitAudioChannel {
- SplitAudioChannel() {
- memset(low_pass_data, 0, sizeof(low_pass_data));
- memset(high_pass_data, 0, sizeof(high_pass_data));
- memset(analysis_filter_state1, 0, sizeof(analysis_filter_state1));
- memset(analysis_filter_state2, 0, sizeof(analysis_filter_state2));
- memset(synthesis_filter_state1, 0, sizeof(synthesis_filter_state1));
- memset(synthesis_filter_state2, 0, sizeof(synthesis_filter_state2));
+class SplitChannelBuffer {
+ public:
+ SplitChannelBuffer(int samples_per_split_channel, int num_channels)
+ : low_(samples_per_split_channel, num_channels),
+ high_(samples_per_split_channel, num_channels) {
}
+ ~SplitChannelBuffer() {}
- int16_t low_pass_data[kSamplesPer16kHzChannel];
- int16_t high_pass_data[kSamplesPer16kHzChannel];
+ int16_t* low_channel(int i) { return low_.ibuf()->channel(i); }
+ int16_t* high_channel(int i) { return high_.ibuf()->channel(i); }
+ float* low_channel_f(int i) { return low_.fbuf()->channel(i); }
+ float* high_channel_f(int i) { return high_.fbuf()->channel(i); }
- int32_t analysis_filter_state1[6];
- int32_t analysis_filter_state2[6];
- int32_t synthesis_filter_state1[6];
- int32_t synthesis_filter_state2[6];
+ private:
+ IFChannelBuffer low_;
+ IFChannelBuffer high_;
};
-// TODO(andrew): check range of input parameters?
-AudioBuffer::AudioBuffer(int max_num_channels,
- int samples_per_channel)
- : max_num_channels_(max_num_channels),
- num_channels_(0),
+AudioBuffer::AudioBuffer(int input_samples_per_channel,
+ int num_input_channels,
+ int process_samples_per_channel,
+ int num_process_channels,
+ int output_samples_per_channel)
+ : input_samples_per_channel_(input_samples_per_channel),
+ num_input_channels_(num_input_channels),
+ proc_samples_per_channel_(process_samples_per_channel),
+ num_proc_channels_(num_process_channels),
+ output_samples_per_channel_(output_samples_per_channel),
+ samples_per_split_channel_(proc_samples_per_channel_),
num_mixed_channels_(0),
num_mixed_low_pass_channels_(0),
- data_was_mixed_(false),
- samples_per_channel_(samples_per_channel),
- samples_per_split_channel_(samples_per_channel),
reference_copied_(false),
activity_(AudioFrame::kVadUnknown),
- is_muted_(false),
- data_(NULL),
- channels_(NULL),
- split_channels_(NULL),
- mixed_channels_(NULL),
- mixed_low_pass_channels_(NULL),
- low_pass_reference_channels_(NULL) {
- if (max_num_channels_ > 1) {
- channels_.reset(new AudioChannel[max_num_channels_]);
- mixed_channels_.reset(new AudioChannel[max_num_channels_]);
- mixed_low_pass_channels_.reset(new AudioChannel[max_num_channels_]);
+ keyboard_data_(NULL),
+ channels_(new IFChannelBuffer(proc_samples_per_channel_,
+ num_proc_channels_)) {
+ assert(input_samples_per_channel_ > 0);
+ assert(proc_samples_per_channel_ > 0);
+ assert(output_samples_per_channel_ > 0);
+ assert(num_input_channels_ > 0 && num_input_channels_ <= 2);
+ assert(num_proc_channels_ <= num_input_channels);
+
+ if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
+ input_buffer_.reset(new ChannelBuffer<float>(input_samples_per_channel_,
+ num_proc_channels_));
+ }
+
+ if (input_samples_per_channel_ != proc_samples_per_channel_ ||
+ output_samples_per_channel_ != proc_samples_per_channel_) {
+ // Create an intermediate buffer for resampling.
+ process_buffer_.reset(new ChannelBuffer<float>(proc_samples_per_channel_,
+ num_proc_channels_));
+ }
+
+ if (input_samples_per_channel_ != proc_samples_per_channel_) {
+ input_resamplers_.reserve(num_proc_channels_);
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ input_resamplers_.push_back(
+ new PushSincResampler(input_samples_per_channel_,
+ proc_samples_per_channel_));
+ }
}
- low_pass_reference_channels_.reset(new AudioChannel[max_num_channels_]);
- if (samples_per_channel_ == kSamplesPer32kHzChannel) {
- split_channels_.reset(new SplitAudioChannel[max_num_channels_]);
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ output_resamplers_.reserve(num_proc_channels_);
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ output_resamplers_.push_back(
+ new PushSincResampler(proc_samples_per_channel_,
+ output_samples_per_channel_));
+ }
+ }
+
+ if (proc_samples_per_channel_ == kSamplesPer32kHzChannel) {
samples_per_split_channel_ = kSamplesPer16kHzChannel;
+ split_channels_.reset(new SplitChannelBuffer(samples_per_split_channel_,
+ num_proc_channels_));
+ filter_states_.reset(new SplitFilterStates[num_proc_channels_]);
}
}
AudioBuffer::~AudioBuffer() {}
-int16_t* AudioBuffer::data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (data_ != NULL) {
- return data_;
+void AudioBuffer::CopyFrom(const float* const* data,
+ int samples_per_channel,
+ AudioProcessing::ChannelLayout layout) {
+ assert(samples_per_channel == input_samples_per_channel_);
+ assert(ChannelsFromLayout(layout) == num_input_channels_);
+ InitForNewData();
+
+ if (HasKeyboardChannel(layout)) {
+ keyboard_data_ = data[KeyboardChannelIndex(layout)];
}
- return channels_[channel].data;
-}
+ // Downmix.
+ const float* const* data_ptr = data;
+ if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
+ StereoToMono(data[0],
+ data[1],
+ input_buffer_->channel(0),
+ input_samples_per_channel_);
+ data_ptr = input_buffer_->channels();
+ }
-int16_t* AudioBuffer::low_pass_split_data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (split_channels_.get() == NULL) {
- return data(channel);
+ // Resample.
+ if (input_samples_per_channel_ != proc_samples_per_channel_) {
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ input_resamplers_[i]->Resample(data_ptr[i],
+ input_samples_per_channel_,
+ process_buffer_->channel(i),
+ proc_samples_per_channel_);
+ }
+ data_ptr = process_buffer_->channels();
}
- return split_channels_[channel].low_pass_data;
+ // Convert to int16.
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
+ channels_->ibuf()->channel(i));
+ }
}
-int16_t* AudioBuffer::high_pass_split_data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (split_channels_.get() == NULL) {
- return NULL;
+void AudioBuffer::CopyTo(int samples_per_channel,
+ AudioProcessing::ChannelLayout layout,
+ float* const* data) {
+ assert(samples_per_channel == output_samples_per_channel_);
+ assert(ChannelsFromLayout(layout) == num_proc_channels_);
+
+ // Convert to float.
+ float* const* data_ptr = data;
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ // Convert to an intermediate buffer for subsequent resampling.
+ data_ptr = process_buffer_->channels();
+ }
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ ScaleToFloat(channels_->ibuf()->channel(i),
+ proc_samples_per_channel_,
+ data_ptr[i]);
+ }
+
+ // Resample.
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ output_resamplers_[i]->Resample(data_ptr[i],
+ proc_samples_per_channel_,
+ data[i],
+ output_samples_per_channel_);
+ }
}
+}
- return split_channels_[channel].high_pass_data;
+void AudioBuffer::InitForNewData() {
+ keyboard_data_ = NULL;
+ num_mixed_channels_ = 0;
+ num_mixed_low_pass_channels_ = 0;
+ reference_copied_ = false;
+ activity_ = AudioFrame::kVadUnknown;
}
-int16_t* AudioBuffer::mixed_data(int channel) const {
+const int16_t* AudioBuffer::data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return channels_->ibuf()->channel(channel);
+}
+
+int16_t* AudioBuffer::data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->data(channel));
+}
+
+float* AudioBuffer::data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return channels_->fbuf()->channel(channel);
+}
+
+const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->low_channel(channel)
+ : data(channel);
+}
+
+int16_t* AudioBuffer::low_pass_split_data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->low_pass_split_data(channel));
+}
+
+float* AudioBuffer::low_pass_split_data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->low_channel_f(channel)
+ : data_f(channel);
+}
+
+const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->high_channel(channel) : NULL;
+}
+
+int16_t* AudioBuffer::high_pass_split_data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->high_pass_split_data(channel));
+}
+
+float* AudioBuffer::high_pass_split_data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->high_channel_f(channel)
+ : NULL;
+}
+
+const int16_t* AudioBuffer::mixed_data(int channel) const {
assert(channel >= 0 && channel < num_mixed_channels_);
- return mixed_channels_[channel].data;
+ return mixed_channels_->channel(channel);
}
-int16_t* AudioBuffer::mixed_low_pass_data(int channel) const {
+const int16_t* AudioBuffer::mixed_low_pass_data(int channel) const {
assert(channel >= 0 && channel < num_mixed_low_pass_channels_);
- return mixed_low_pass_channels_[channel].data;
+ return mixed_low_pass_channels_->channel(channel);
}
-int16_t* AudioBuffer::low_pass_reference(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
+const int16_t* AudioBuffer::low_pass_reference(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
if (!reference_copied_) {
return NULL;
}
- return low_pass_reference_channels_[channel].data;
-}
-
-int32_t* AudioBuffer::analysis_filter_state1(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].analysis_filter_state1;
-}
-
-int32_t* AudioBuffer::analysis_filter_state2(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].analysis_filter_state2;
+ return low_pass_reference_channels_->channel(channel);
}
-int32_t* AudioBuffer::synthesis_filter_state1(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].synthesis_filter_state1;
+const float* AudioBuffer::keyboard_data() const {
+ return keyboard_data_;
}
-int32_t* AudioBuffer::synthesis_filter_state2(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].synthesis_filter_state2;
+SplitFilterStates* AudioBuffer::filter_states(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return &filter_states_[channel];
}
void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
@@ -170,126 +370,96 @@ AudioFrame::VADActivity AudioBuffer::activity() const {
return activity_;
}
-bool AudioBuffer::is_muted() const {
- return is_muted_;
-}
-
int AudioBuffer::num_channels() const {
- return num_channels_;
+ return num_proc_channels_;
}
int AudioBuffer::samples_per_channel() const {
- return samples_per_channel_;
+ return proc_samples_per_channel_;
}
int AudioBuffer::samples_per_split_channel() const {
return samples_per_split_channel_;
}
+int AudioBuffer::samples_per_keyboard_channel() const {
+ // We don't resample the keyboard channel.
+ return input_samples_per_channel_;
+}
+
// TODO(andrew): Do deinterleaving and mixing in one step?
void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
- assert(frame->num_channels_ <= max_num_channels_);
- assert(frame->samples_per_channel_ == samples_per_channel_);
-
- num_channels_ = frame->num_channels_;
- data_was_mixed_ = false;
- num_mixed_channels_ = 0;
- num_mixed_low_pass_channels_ = 0;
- reference_copied_ = false;
+ assert(proc_samples_per_channel_ == input_samples_per_channel_);
+ assert(num_proc_channels_ == num_input_channels_);
+ assert(frame->num_channels_ == num_proc_channels_);
+ assert(frame->samples_per_channel_ == proc_samples_per_channel_);
+ InitForNewData();
activity_ = frame->vad_activity_;
- is_muted_ = false;
- if (frame->energy_ == 0) {
- is_muted_ = true;
- }
-
- if (num_channels_ == 1) {
- // We can get away with a pointer assignment in this case.
- data_ = frame->data_;
- return;
- }
int16_t* interleaved = frame->data_;
- for (int i = 0; i < num_channels_; i++) {
- int16_t* deinterleaved = channels_[i].data;
+ for (int i = 0; i < num_proc_channels_; i++) {
+ int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel_; j++) {
+ for (int j = 0; j < proc_samples_per_channel_; j++) {
deinterleaved[j] = interleaved[interleaved_idx];
- interleaved_idx += num_channels_;
+ interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
- assert(frame->num_channels_ == num_channels_);
- assert(frame->samples_per_channel_ == samples_per_channel_);
+ assert(proc_samples_per_channel_ == output_samples_per_channel_);
+ assert(num_proc_channels_ == num_input_channels_);
+ assert(frame->num_channels_ == num_proc_channels_);
+ assert(frame->samples_per_channel_ == proc_samples_per_channel_);
frame->vad_activity_ = activity_;
if (!data_changed) {
return;
}
- if (num_channels_ == 1) {
- if (data_was_mixed_) {
- memcpy(frame->data_,
- channels_[0].data,
- sizeof(int16_t) * samples_per_channel_);
- } else {
- // These should point to the same buffer in this case.
- assert(data_ == frame->data_);
- }
-
- return;
- }
-
int16_t* interleaved = frame->data_;
- for (int i = 0; i < num_channels_; i++) {
- int16_t* deinterleaved = channels_[i].data;
+ for (int i = 0; i < num_proc_channels_; i++) {
+ int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel_; j++) {
+ for (int j = 0; j < proc_samples_per_channel_; j++) {
interleaved[interleaved_idx] = deinterleaved[j];
- interleaved_idx += num_channels_;
+ interleaved_idx += num_proc_channels_;
}
}
}
-// TODO(andrew): would be good to support the no-mix case with pointer
-// assignment.
-// TODO(andrew): handle mixing to multiple channels?
-void AudioBuffer::Mix(int num_mixed_channels) {
- // We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
- assert(num_mixed_channels == 1);
-
- StereoToMono(channels_[0].data,
- channels_[1].data,
- channels_[0].data,
- samples_per_channel_);
-
- num_channels_ = num_mixed_channels;
- data_was_mixed_ = true;
-}
-
void AudioBuffer::CopyAndMix(int num_mixed_channels) {
// We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
+ assert(num_proc_channels_ == 2);
assert(num_mixed_channels == 1);
+ if (!mixed_channels_.get()) {
+ mixed_channels_.reset(
+ new ChannelBuffer<int16_t>(proc_samples_per_channel_,
+ num_mixed_channels));
+ }
- StereoToMono(channels_[0].data,
- channels_[1].data,
- mixed_channels_[0].data,
- samples_per_channel_);
+ StereoToMono(channels_->ibuf()->channel(0),
+ channels_->ibuf()->channel(1),
+ mixed_channels_->channel(0),
+ proc_samples_per_channel_);
num_mixed_channels_ = num_mixed_channels;
}
void AudioBuffer::CopyAndMixLowPass(int num_mixed_channels) {
// We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
+ assert(num_proc_channels_ == 2);
assert(num_mixed_channels == 1);
+ if (!mixed_low_pass_channels_.get()) {
+ mixed_low_pass_channels_.reset(
+ new ChannelBuffer<int16_t>(samples_per_split_channel_,
+ num_mixed_channels));
+ }
StereoToMono(low_pass_split_data(0),
low_pass_split_data(1),
- mixed_low_pass_channels_[0].data,
+ mixed_low_pass_channels_->channel(0),
samples_per_split_channel_);
num_mixed_low_pass_channels_ = num_mixed_channels;
@@ -297,10 +467,14 @@ void AudioBuffer::CopyAndMixLowPass(int num_mixed_channels) {
void AudioBuffer::CopyLowPassToReference() {
reference_copied_ = true;
- for (int i = 0; i < num_channels_; i++) {
- memcpy(low_pass_reference_channels_[i].data,
- low_pass_split_data(i),
- sizeof(int16_t) * samples_per_split_channel_);
+ if (!low_pass_reference_channels_.get()) {
+ low_pass_reference_channels_.reset(
+ new ChannelBuffer<int16_t>(samples_per_split_channel_,
+ num_proc_channels_));
+ }
+ for (int i = 0; i < num_proc_channels_; i++) {
+ low_pass_reference_channels_->CopyFrom(low_pass_split_data(i), i);
}
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
index 2638bef6058..67e4f485043 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
@@ -8,75 +8,124 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
+#include <vector>
+
+#include "webrtc/modules/audio_processing/common.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
-struct AudioChannel;
-struct SplitAudioChannel;
+class PushSincResampler;
+class SplitChannelBuffer;
+class IFChannelBuffer;
+
+struct SplitFilterStates {
+ SplitFilterStates() {
+ memset(analysis_filter_state1, 0, sizeof(analysis_filter_state1));
+ memset(analysis_filter_state2, 0, sizeof(analysis_filter_state2));
+ memset(synthesis_filter_state1, 0, sizeof(synthesis_filter_state1));
+ memset(synthesis_filter_state2, 0, sizeof(synthesis_filter_state2));
+ }
+
+ static const int kStateSize = 6;
+ int analysis_filter_state1[kStateSize];
+ int analysis_filter_state2[kStateSize];
+ int synthesis_filter_state1[kStateSize];
+ int synthesis_filter_state2[kStateSize];
+};
class AudioBuffer {
public:
- AudioBuffer(int max_num_channels, int samples_per_channel);
+ // TODO(ajm): Switch to take ChannelLayouts.
+ AudioBuffer(int input_samples_per_channel,
+ int num_input_channels,
+ int process_samples_per_channel,
+ int num_process_channels,
+ int output_samples_per_channel);
virtual ~AudioBuffer();
int num_channels() const;
int samples_per_channel() const;
int samples_per_split_channel() const;
+ int samples_per_keyboard_channel() const;
- int16_t* data(int channel) const;
- int16_t* low_pass_split_data(int channel) const;
- int16_t* high_pass_split_data(int channel) const;
- int16_t* mixed_data(int channel) const;
- int16_t* mixed_low_pass_data(int channel) const;
- int16_t* low_pass_reference(int channel) const;
+ int16_t* data(int channel);
+ const int16_t* data(int channel) const;
+ int16_t* low_pass_split_data(int channel);
+ const int16_t* low_pass_split_data(int channel) const;
+ int16_t* high_pass_split_data(int channel);
+ const int16_t* high_pass_split_data(int channel) const;
+ const int16_t* mixed_data(int channel) const;
+ const int16_t* mixed_low_pass_data(int channel) const;
+ const int16_t* low_pass_reference(int channel) const;
- int32_t* analysis_filter_state1(int channel) const;
- int32_t* analysis_filter_state2(int channel) const;
- int32_t* synthesis_filter_state1(int channel) const;
- int32_t* synthesis_filter_state2(int channel) const;
+ // Float versions of the accessors, with automatic conversion back and forth
+ // as necessary. The range of the numbers are the same as for int16_t.
+ float* data_f(int channel);
+ float* low_pass_split_data_f(int channel);
+ float* high_pass_split_data_f(int channel);
+
+ const float* keyboard_data() const;
+
+ SplitFilterStates* filter_states(int channel);
void set_activity(AudioFrame::VADActivity activity);
AudioFrame::VADActivity activity() const;
- bool is_muted() const;
-
+ // Use for int16 interleaved data.
void DeinterleaveFrom(AudioFrame* audioFrame);
void InterleaveTo(AudioFrame* audioFrame) const;
// If |data_changed| is false, only the non-audio data members will be copied
// to |frame|.
void InterleaveTo(AudioFrame* frame, bool data_changed) const;
- void Mix(int num_mixed_channels);
+
+ // Use for float deinterleaved data.
+ void CopyFrom(const float* const* data,
+ int samples_per_channel,
+ AudioProcessing::ChannelLayout layout);
+ void CopyTo(int samples_per_channel,
+ AudioProcessing::ChannelLayout layout,
+ float* const* data);
+
void CopyAndMix(int num_mixed_channels);
void CopyAndMixLowPass(int num_mixed_channels);
void CopyLowPassToReference();
private:
- const int max_num_channels_;
- int num_channels_;
+ // Called from DeinterleaveFrom() and CopyFrom().
+ void InitForNewData();
+
+ const int input_samples_per_channel_;
+ const int num_input_channels_;
+ const int proc_samples_per_channel_;
+ const int num_proc_channels_;
+ const int output_samples_per_channel_;
+ int samples_per_split_channel_;
int num_mixed_channels_;
int num_mixed_low_pass_channels_;
- // Whether the original data was replaced with mixed data.
- bool data_was_mixed_;
- const int samples_per_channel_;
- int samples_per_split_channel_;
bool reference_copied_;
AudioFrame::VADActivity activity_;
- bool is_muted_;
-
- int16_t* data_;
- scoped_array<AudioChannel> channels_;
- scoped_array<SplitAudioChannel> split_channels_;
- scoped_array<AudioChannel> mixed_channels_;
- // TODO(andrew): improve this, we don't need the full 32 kHz space here.
- scoped_array<AudioChannel> mixed_low_pass_channels_;
- scoped_array<AudioChannel> low_pass_reference_channels_;
+
+ const float* keyboard_data_;
+ scoped_ptr<IFChannelBuffer> channels_;
+ scoped_ptr<SplitChannelBuffer> split_channels_;
+ scoped_ptr<SplitFilterStates[]> filter_states_;
+ scoped_ptr<ChannelBuffer<int16_t> > mixed_channels_;
+ scoped_ptr<ChannelBuffer<int16_t> > mixed_low_pass_channels_;
+ scoped_ptr<ChannelBuffer<int16_t> > low_pass_reference_channels_;
+ scoped_ptr<ChannelBuffer<float> > input_buffer_;
+ scoped_ptr<ChannelBuffer<float> > process_buffer_;
+ ScopedVector<PushSincResampler> input_resamplers_;
+ ScopedVector<PushSincResampler> output_resamplers_;
};
+
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
index 336b4eee75f..b1d18c5b06a 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
@@ -12,6 +12,7 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
+ 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
},
'targets': [
{
@@ -53,9 +54,9 @@
'audio_buffer.h',
'audio_processing_impl.cc',
'audio_processing_impl.h',
+ 'common.h',
'echo_cancellation_impl.cc',
'echo_cancellation_impl.h',
- 'echo_cancellation_impl_wrapper.h',
'echo_control_mobile_impl.cc',
'echo_control_mobile_impl.h',
'gain_control_impl.cc',
@@ -67,10 +68,12 @@
'level_estimator_impl.h',
'noise_suppression_impl.cc',
'noise_suppression_impl.h',
- 'splitting_filter.cc',
- 'splitting_filter.h',
'processing_component.cc',
'processing_component.h',
+ 'rms_level.cc',
+ 'rms_level.h',
+ 'typing_detection.cc',
+ 'typing_detection.h',
'utility/delay_estimator.c',
'utility/delay_estimator.h',
'utility/delay_estimator_internal.h',
@@ -103,6 +106,17 @@
'ns/nsx_core.h',
'ns/nsx_defines.h',
],
+ 'conditions': [
+ ['target_arch=="mipsel"', {
+ 'sources': [
+ 'ns/nsx_core_mips.c',
+ ],
+ }, {
+ 'sources': [
+ 'ns/nsx_core_c.c',
+ ],
+ }],
+ ],
}, {
'defines': ['WEBRTC_NS_FLOAT'],
'sources': [
@@ -124,6 +138,14 @@
'sources': [
'aecm/aecm_core_mips.c',
],
+ 'conditions': [
+ ['mips_fpu==1', {
+ 'sources': [
+ 'aec/aec_core_mips.c',
+ 'aec/aec_rdft_mips.c',
+ ],
+ }],
+ ],
}, {
'sources': [
'aecm/aecm_core_c.c',
@@ -177,18 +199,22 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
],
'sources': [
+ 'aec/aec_core_neon.c',
'aecm/aecm_core_neon.c',
'ns/nsx_core_neon.c',
],
'conditions': [
['OS=="android" or OS=="ios"', {
'dependencies': [
- 'audio_processing_offsets',
+ '<(gen_core_neon_offsets_gyp):*',
],
'sources': [
'aecm/aecm_core_neon.S',
'ns/nsx_core_neon.S',
],
+ 'include_dirs': [
+ '<(shared_generated_dir)',
+ ],
'sources!': [
'aecm/aecm_core_neon.c',
'ns/nsx_core_neon.c',
@@ -197,22 +223,6 @@
}],
],
}],
- 'conditions': [
- ['OS=="android" or OS=="ios"', {
- 'targets': [{
- 'target_name': 'audio_processing_offsets',
- 'type': 'none',
- 'sources': [
- 'aecm/aecm_core_neon_offsets.c',
- 'ns/nsx_core_neon_offsets.c',
- ],
- 'variables': {
- 'asm_header_dir': 'asm_offsets',
- },
- 'includes': ['../../build/generate_asm_header.gypi',],
- }],
- }],
- ],
}],
],
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
index 4d36ff7e7b4..de387edb2f5 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -12,17 +12,20 @@
#include <assert.h>
+#include "webrtc/common_audio/include/audio_util.h"
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
+#include "webrtc/modules/audio_processing/common.h"
+#include "webrtc/modules/audio_processing/echo_cancellation_impl.h"
#include "webrtc/modules/audio_processing/echo_control_mobile_impl.h"
#include "webrtc/modules/audio_processing/gain_control_impl.h"
#include "webrtc/modules/audio_processing/high_pass_filter_impl.h"
#include "webrtc/modules/audio_processing/level_estimator_impl.h"
#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
#include "webrtc/modules/audio_processing/processing_component.h"
-#include "webrtc/modules/audio_processing/splitting_filter.h"
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
@@ -36,9 +39,30 @@
#endif
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
+#define RETURN_ON_ERR(expr) \
+ do { \
+ int err = expr; \
+ if (err != kNoError) { \
+ return err; \
+ } \
+ } while (0)
+
namespace webrtc {
+
+// Throughout webrtc, it's assumed that success is represented by zero.
+COMPILE_ASSERT(AudioProcessing::kNoError == 0, no_error_must_be_zero);
+
AudioProcessing* AudioProcessing::Create(int id) {
- AudioProcessingImpl* apm = new AudioProcessingImpl(id);
+ return Create();
+}
+
+AudioProcessing* AudioProcessing::Create() {
+ Config config;
+ return Create(config);
+}
+
+AudioProcessing* AudioProcessing::Create(const Config& config) {
+ AudioProcessingImpl* apm = new AudioProcessingImpl(config);
if (apm->Initialize() != kNoError) {
delete apm;
apm = NULL;
@@ -47,12 +71,8 @@ AudioProcessing* AudioProcessing::Create(int id) {
return apm;
}
-int32_t AudioProcessing::TimeUntilNextProcess() { return -1; }
-int32_t AudioProcessing::Process() { return -1; }
-
-AudioProcessingImpl::AudioProcessingImpl(int id)
- : id_(id),
- echo_cancellation_(NULL),
+AudioProcessingImpl::AudioProcessingImpl(const Config& config)
+ : echo_cancellation_(NULL),
echo_control_mobile_(NULL),
gain_control_(NULL),
high_pass_filter_(NULL),
@@ -60,41 +80,43 @@ AudioProcessingImpl::AudioProcessingImpl(int id)
noise_suppression_(NULL),
voice_detection_(NULL),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
- render_audio_(NULL),
- capture_audio_(NULL),
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
debug_file_(FileWrapper::Create()),
event_msg_(new audioproc::Event()),
#endif
- sample_rate_hz_(kSampleRate16kHz),
- split_sample_rate_hz_(kSampleRate16kHz),
- samples_per_channel_(sample_rate_hz_ / 100),
+ fwd_in_format_(kSampleRate16kHz, 1),
+ fwd_proc_format_(kSampleRate16kHz, 1),
+ fwd_out_format_(kSampleRate16kHz),
+ rev_in_format_(kSampleRate16kHz, 1),
+ rev_proc_format_(kSampleRate16kHz, 1),
+ split_rate_(kSampleRate16kHz),
stream_delay_ms_(0),
delay_offset_ms_(0),
was_stream_delay_set_(false),
- num_reverse_channels_(1),
- num_input_channels_(1),
- num_output_channels_(1) {
- echo_cancellation_ = EchoCancellationImplWrapper::Create(this);
+ output_will_be_muted_(false),
+ key_pressed_(false) {
+ echo_cancellation_ = new EchoCancellationImpl(this, crit_);
component_list_.push_back(echo_cancellation_);
- echo_control_mobile_ = new EchoControlMobileImpl(this);
+ echo_control_mobile_ = new EchoControlMobileImpl(this, crit_);
component_list_.push_back(echo_control_mobile_);
- gain_control_ = new GainControlImpl(this);
+ gain_control_ = new GainControlImpl(this, crit_);
component_list_.push_back(gain_control_);
- high_pass_filter_ = new HighPassFilterImpl(this);
+ high_pass_filter_ = new HighPassFilterImpl(this, crit_);
component_list_.push_back(high_pass_filter_);
- level_estimator_ = new LevelEstimatorImpl(this);
+ level_estimator_ = new LevelEstimatorImpl(this, crit_);
component_list_.push_back(level_estimator_);
- noise_suppression_ = new NoiseSuppressionImpl(this);
+ noise_suppression_ = new NoiseSuppressionImpl(this, crit_);
component_list_.push_back(noise_suppression_);
- voice_detection_ = new VoiceDetectionImpl(this);
+ voice_detection_ = new VoiceDetectionImpl(this, crit_);
component_list_.push_back(voice_detection_);
+
+ SetExtraOptions(config);
}
AudioProcessingImpl::~AudioProcessingImpl() {
@@ -112,52 +134,52 @@ AudioProcessingImpl::~AudioProcessingImpl() {
debug_file_->CloseFile();
}
#endif
-
- if (render_audio_) {
- delete render_audio_;
- render_audio_ = NULL;
- }
-
- if (capture_audio_) {
- delete capture_audio_;
- capture_audio_ = NULL;
- }
}
-
delete crit_;
crit_ = NULL;
}
-CriticalSectionWrapper* AudioProcessingImpl::crit() const {
- return crit_;
-}
-
-int AudioProcessingImpl::split_sample_rate_hz() const {
- return split_sample_rate_hz_;
-}
-
int AudioProcessingImpl::Initialize() {
CriticalSectionScoped crit_scoped(crit_);
return InitializeLocked();
}
-int AudioProcessingImpl::InitializeLocked() {
- if (render_audio_ != NULL) {
- delete render_audio_;
- render_audio_ = NULL;
- }
-
- if (capture_audio_ != NULL) {
- delete capture_audio_;
- capture_audio_ = NULL;
- }
-
- render_audio_ = new AudioBuffer(num_reverse_channels_,
- samples_per_channel_);
- capture_audio_ = new AudioBuffer(num_input_channels_,
- samples_per_channel_);
+int AudioProcessingImpl::set_sample_rate_hz(int rate) {
+ CriticalSectionScoped crit_scoped(crit_);
+ return InitializeLocked(rate,
+ rate,
+ rev_in_format_.rate(),
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.num_channels(),
+ rev_in_format_.num_channels());
+}
+
+int AudioProcessingImpl::Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) {
+ CriticalSectionScoped crit_scoped(crit_);
+ return InitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ reverse_sample_rate_hz,
+ ChannelsFromLayout(input_layout),
+ ChannelsFromLayout(output_layout),
+ ChannelsFromLayout(reverse_layout));
+}
- was_stream_delay_set_ = false;
+int AudioProcessingImpl::InitializeLocked() {
+ render_audio_.reset(new AudioBuffer(rev_in_format_.samples_per_channel(),
+ rev_in_format_.num_channels(),
+ rev_proc_format_.samples_per_channel(),
+ rev_proc_format_.num_channels(),
+ rev_proc_format_.samples_per_channel()));
+ capture_audio_.reset(new AudioBuffer(fwd_in_format_.samples_per_channel(),
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.samples_per_channel(),
+ fwd_proc_format_.num_channels(),
+ fwd_out_format_.samples_per_channel()));
// Initialize all components.
std::list<ProcessingComponent*>::iterator it;
@@ -180,115 +202,228 @@ int AudioProcessingImpl::InitializeLocked() {
return kNoError;
}
-void AudioProcessingImpl::SetExtraOptions(const Config& config) {
- std::list<ProcessingComponent*>::iterator it;
- for (it = component_list_.begin(); it != component_list_.end(); ++it)
- (*it)->SetExtraOptions(config);
-}
+int AudioProcessingImpl::InitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels) {
+ if (input_sample_rate_hz <= 0 ||
+ output_sample_rate_hz <= 0 ||
+ reverse_sample_rate_hz <= 0) {
+ return kBadSampleRateError;
+ }
+ if (num_output_channels > num_input_channels) {
+ return kBadNumberChannelsError;
+ }
+ // Only mono and stereo supported currently.
+ if (num_input_channels > 2 || num_input_channels < 1 ||
+ num_output_channels > 2 || num_output_channels < 1 ||
+ num_reverse_channels > 2 || num_reverse_channels < 1) {
+ return kBadNumberChannelsError;
+ }
-int AudioProcessingImpl::EnableExperimentalNs(bool enable) {
- return kNoError;
-}
+ fwd_in_format_.set(input_sample_rate_hz, num_input_channels);
+ fwd_out_format_.set(output_sample_rate_hz);
+ rev_in_format_.set(reverse_sample_rate_hz, num_reverse_channels);
-int AudioProcessingImpl::set_sample_rate_hz(int rate) {
- CriticalSectionScoped crit_scoped(crit_);
- if (rate == sample_rate_hz_) {
- return kNoError;
+ // We process at the closest native rate >= min(input rate, output rate)...
+ int min_proc_rate = std::min(fwd_in_format_.rate(), fwd_out_format_.rate());
+ int fwd_proc_rate;
+ if (min_proc_rate > kSampleRate16kHz) {
+ fwd_proc_rate = kSampleRate32kHz;
+ } else if (min_proc_rate > kSampleRate8kHz) {
+ fwd_proc_rate = kSampleRate16kHz;
+ } else {
+ fwd_proc_rate = kSampleRate8kHz;
}
- if (rate != kSampleRate8kHz &&
- rate != kSampleRate16kHz &&
- rate != kSampleRate32kHz) {
- return kBadParameterError;
+ // ...with one exception.
+ if (echo_control_mobile_->is_enabled() && min_proc_rate > kSampleRate16kHz) {
+ fwd_proc_rate = kSampleRate16kHz;
}
- if (echo_control_mobile_->is_enabled() && rate > kSampleRate16kHz) {
- LOG(LS_ERROR) << "AECM only supports 16 kHz or lower sample rates";
- return kUnsupportedComponentError;
+
+ fwd_proc_format_.set(fwd_proc_rate, num_output_channels);
+
+ // We normally process the reverse stream at 16 kHz. Unless...
+ int rev_proc_rate = kSampleRate16kHz;
+ if (fwd_proc_format_.rate() == kSampleRate8kHz) {
+ // ...the forward stream is at 8 kHz.
+ rev_proc_rate = kSampleRate8kHz;
+ } else {
+ if (rev_in_format_.rate() == kSampleRate32kHz) {
+ // ...or the input is at 32 kHz, in which case we use the splitting
+ // filter rather than the resampler.
+ rev_proc_rate = kSampleRate32kHz;
+ }
}
- sample_rate_hz_ = rate;
- samples_per_channel_ = rate / 100;
+ // TODO(ajm): Enable this.
+ // Always downmix the reverse stream to mono for analysis.
+ //rev_proc_format_.set(rev_proc_rate, 1);
+ rev_proc_format_.set(rev_proc_rate, rev_in_format_.num_channels());
- if (sample_rate_hz_ == kSampleRate32kHz) {
- split_sample_rate_hz_ = kSampleRate16kHz;
+ if (fwd_proc_format_.rate() == kSampleRate32kHz) {
+ split_rate_ = kSampleRate16kHz;
} else {
- split_sample_rate_hz_ = sample_rate_hz_;
+ split_rate_ = fwd_proc_format_.rate();
}
return InitializeLocked();
}
-int AudioProcessingImpl::sample_rate_hz() const {
+// Calls InitializeLocked() if any of the audio parameters have changed from
+// their current values.
+int AudioProcessingImpl::MaybeInitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels) {
+ if (input_sample_rate_hz == fwd_in_format_.rate() &&
+ output_sample_rate_hz == fwd_out_format_.rate() &&
+ reverse_sample_rate_hz == rev_in_format_.rate() &&
+ num_input_channels == fwd_in_format_.num_channels() &&
+ num_output_channels == fwd_proc_format_.num_channels() &&
+ num_reverse_channels == rev_in_format_.num_channels()) {
+ return kNoError;
+ }
+
+ return InitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ reverse_sample_rate_hz,
+ num_input_channels,
+ num_output_channels,
+ num_reverse_channels);
+}
+
+void AudioProcessingImpl::SetExtraOptions(const Config& config) {
CriticalSectionScoped crit_scoped(crit_);
- return sample_rate_hz_;
+ std::list<ProcessingComponent*>::iterator it;
+ for (it = component_list_.begin(); it != component_list_.end(); ++it)
+ (*it)->SetExtraOptions(config);
+}
+
+int AudioProcessingImpl::EnableExperimentalNs(bool enable) {
+ return kNoError;
}
-int AudioProcessingImpl::set_num_reverse_channels(int channels) {
+int AudioProcessingImpl::input_sample_rate_hz() const {
CriticalSectionScoped crit_scoped(crit_);
- if (channels == num_reverse_channels_) {
- return kNoError;
- }
- // Only stereo supported currently.
- if (channels > 2 || channels < 1) {
- return kBadParameterError;
- }
+ return fwd_in_format_.rate();
+}
- num_reverse_channels_ = channels;
+int AudioProcessingImpl::sample_rate_hz() const {
+ CriticalSectionScoped crit_scoped(crit_);
+ return fwd_in_format_.rate();
+}
- return InitializeLocked();
+int AudioProcessingImpl::proc_sample_rate_hz() const {
+ return fwd_proc_format_.rate();
+}
+
+int AudioProcessingImpl::proc_split_sample_rate_hz() const {
+ return split_rate_;
}
int AudioProcessingImpl::num_reverse_channels() const {
- return num_reverse_channels_;
+ return rev_proc_format_.num_channels();
+}
+
+int AudioProcessingImpl::num_input_channels() const {
+ return fwd_in_format_.num_channels();
+}
+
+int AudioProcessingImpl::num_output_channels() const {
+ return fwd_proc_format_.num_channels();
+}
+
+void AudioProcessingImpl::set_output_will_be_muted(bool muted) {
+ output_will_be_muted_ = muted;
}
-int AudioProcessingImpl::set_num_channels(
- int input_channels,
- int output_channels) {
+bool AudioProcessingImpl::output_will_be_muted() const {
+ return output_will_be_muted_;
+}
+
+int AudioProcessingImpl::ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) {
CriticalSectionScoped crit_scoped(crit_);
- if (input_channels == num_input_channels_ &&
- output_channels == num_output_channels_) {
- return kNoError;
- }
- if (output_channels > input_channels) {
- return kBadParameterError;
+ if (!src || !dest) {
+ return kNullPointerError;
}
- // Only stereo supported currently.
- if (input_channels > 2 || input_channels < 1 ||
- output_channels > 2 || output_channels < 1) {
- return kBadParameterError;
+
+ RETURN_ON_ERR(MaybeInitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ rev_in_format_.rate(),
+ ChannelsFromLayout(input_layout),
+ ChannelsFromLayout(output_layout),
+ rev_in_format_.num_channels()));
+ if (samples_per_channel != fwd_in_format_.samples_per_channel()) {
+ return kBadDataLengthError;
}
- num_input_channels_ = input_channels;
- num_output_channels_ = output_channels;
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ event_msg_->set_type(audioproc::Event::STREAM);
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < fwd_in_format_.num_channels(); ++i)
+ msg->add_input_channel(src[i], channel_size);
+ }
+#endif
- return InitializeLocked();
-}
+ capture_audio_->CopyFrom(src, samples_per_channel, input_layout);
+ RETURN_ON_ERR(ProcessStreamLocked());
+ if (output_copy_needed(is_data_processed())) {
+ capture_audio_->CopyTo(fwd_out_format_.samples_per_channel(),
+ output_layout,
+ dest);
+ }
-int AudioProcessingImpl::num_input_channels() const {
- return num_input_channels_;
-}
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < fwd_proc_format_.num_channels(); ++i)
+ msg->add_output_channel(dest[i], channel_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
+ }
+#endif
-int AudioProcessingImpl::num_output_channels() const {
- return num_output_channels_;
+ return kNoError;
}
int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
CriticalSectionScoped crit_scoped(crit_);
- int err = kNoError;
-
- if (frame == NULL) {
+ if (!frame) {
return kNullPointerError;
}
-
- if (frame->sample_rate_hz_ != sample_rate_hz_) {
+ // Must be a native rate.
+ if (frame->sample_rate_hz_ != kSampleRate8kHz &&
+ frame->sample_rate_hz_ != kSampleRate16kHz &&
+ frame->sample_rate_hz_ != kSampleRate32kHz) {
return kBadSampleRateError;
}
-
- if (frame->num_channels_ != num_input_channels_) {
- return kBadNumberChannelsError;
+ if (echo_control_mobile_->is_enabled() &&
+ frame->sample_rate_hz_ > kSampleRate16kHz) {
+ LOG(LS_ERROR) << "AECM only supports 16 or 8 kHz sample rates";
+ return kUnsupportedComponentError;
}
- if (frame->samples_per_channel_ != samples_per_channel_) {
+ // TODO(ajm): The input and output rates and channels are currently
+ // constrained to be identical in the int16 interface.
+ RETURN_ON_ERR(MaybeInitializeLocked(frame->sample_rate_hz_,
+ frame->sample_rate_hz_,
+ rev_in_format_.rate(),
+ frame->num_channels_,
+ frame->num_channels_,
+ rev_in_format_.num_channels()));
+ if (frame->samples_per_channel_ != fwd_in_format_.samples_per_channel()) {
return kBadDataLengthError;
}
@@ -300,126 +435,142 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
frame->samples_per_channel_ *
frame->num_channels_;
msg->set_input_data(frame->data_, data_size);
- msg->set_delay(stream_delay_ms_);
- msg->set_drift(echo_cancellation_->stream_drift_samples());
- msg->set_level(gain_control_->stream_analog_level());
}
#endif
capture_audio_->DeinterleaveFrom(frame);
+ RETURN_ON_ERR(ProcessStreamLocked());
+ capture_audio_->InterleaveTo(frame, output_copy_needed(is_data_processed()));
- // TODO(ajm): experiment with mixing and AEC placement.
- if (num_output_channels_ < num_input_channels_) {
- capture_audio_->Mix(num_output_channels_);
- frame->num_channels_ = num_output_channels_;
- }
-
- bool data_processed = is_data_processed();
- if (analysis_needed(data_processed)) {
- for (int i = 0; i < num_output_channels_; i++) {
- // Split into a low and high band.
- SplittingFilterAnalysis(capture_audio_->data(i),
- capture_audio_->low_pass_split_data(i),
- capture_audio_->high_pass_split_data(i),
- capture_audio_->analysis_filter_state1(i),
- capture_audio_->analysis_filter_state2(i));
- }
- }
-
- err = high_pass_filter_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
-
- err = gain_control_->AnalyzeCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t data_size = sizeof(int16_t) *
+ frame->samples_per_channel_ *
+ frame->num_channels_;
+ msg->set_output_data(frame->data_, data_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
+#endif
- err = echo_cancellation_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
+ return kNoError;
+}
- if (echo_control_mobile_->is_enabled() &&
- noise_suppression_->is_enabled()) {
- capture_audio_->CopyLowPassToReference();
- }
- err = noise_suppression_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+int AudioProcessingImpl::ProcessStreamLocked() {
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ msg->set_delay(stream_delay_ms_);
+ msg->set_drift(echo_cancellation_->stream_drift_samples());
+ msg->set_level(gain_control_->stream_analog_level());
+ msg->set_keypress(key_pressed_);
}
+#endif
- err = echo_control_mobile_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+ AudioBuffer* ca = capture_audio_.get(); // For brevity.
+ bool data_processed = is_data_processed();
+ if (analysis_needed(data_processed)) {
+ for (int i = 0; i < fwd_proc_format_.num_channels(); i++) {
+ // Split into a low and high band.
+ WebRtcSpl_AnalysisQMF(ca->data(i),
+ ca->samples_per_channel(),
+ ca->low_pass_split_data(i),
+ ca->high_pass_split_data(i),
+ ca->filter_states(i)->analysis_filter_state1,
+ ca->filter_states(i)->analysis_filter_state2);
+ }
}
- err = voice_detection_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(high_pass_filter_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(gain_control_->AnalyzeCaptureAudio(ca));
+ RETURN_ON_ERR(echo_cancellation_->ProcessCaptureAudio(ca));
- err = gain_control_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+ if (echo_control_mobile_->is_enabled() && noise_suppression_->is_enabled()) {
+ ca->CopyLowPassToReference();
}
+ RETURN_ON_ERR(noise_suppression_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(echo_control_mobile_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(voice_detection_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(gain_control_->ProcessCaptureAudio(ca));
if (synthesis_needed(data_processed)) {
- for (int i = 0; i < num_output_channels_; i++) {
+ for (int i = 0; i < fwd_proc_format_.num_channels(); i++) {
// Recombine low and high bands.
- SplittingFilterSynthesis(capture_audio_->low_pass_split_data(i),
- capture_audio_->high_pass_split_data(i),
- capture_audio_->data(i),
- capture_audio_->synthesis_filter_state1(i),
- capture_audio_->synthesis_filter_state2(i));
+ WebRtcSpl_SynthesisQMF(ca->low_pass_split_data(i),
+ ca->high_pass_split_data(i),
+ ca->samples_per_split_channel(),
+ ca->data(i),
+ ca->filter_states(i)->synthesis_filter_state1,
+ ca->filter_states(i)->synthesis_filter_state2);
}
}
// The level estimator operates on the recombined data.
- err = level_estimator_->ProcessStream(capture_audio_);
- if (err != kNoError) {
- return err;
+ RETURN_ON_ERR(level_estimator_->ProcessStream(ca));
+
+ was_stream_delay_set_ = false;
+ return kNoError;
+}
+
+int AudioProcessingImpl::AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) {
+ CriticalSectionScoped crit_scoped(crit_);
+ if (data == NULL) {
+ return kNullPointerError;
}
- capture_audio_->InterleaveTo(frame, interleave_needed(data_processed));
+ const int num_channels = ChannelsFromLayout(layout);
+ RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
+ fwd_out_format_.rate(),
+ sample_rate_hz,
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.num_channels(),
+ num_channels));
+ if (samples_per_channel != rev_in_format_.samples_per_channel()) {
+ return kBadDataLengthError;
+ }
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
if (debug_file_->Open()) {
- audioproc::Stream* msg = event_msg_->mutable_stream();
- const size_t data_size = sizeof(int16_t) *
- frame->samples_per_channel_ *
- frame->num_channels_;
- msg->set_output_data(frame->data_, data_size);
- err = WriteMessageToDebugFile();
- if (err != kNoError) {
- return err;
- }
+ event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
+ audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < num_channels; ++i)
+ msg->add_channel(data[i], channel_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
#endif
- was_stream_delay_set_ = false;
- return kNoError;
+ render_audio_->CopyFrom(data, samples_per_channel, layout);
+ return AnalyzeReverseStreamLocked();
}
int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
CriticalSectionScoped crit_scoped(crit_);
- int err = kNoError;
-
if (frame == NULL) {
return kNullPointerError;
}
-
- if (frame->sample_rate_hz_ != sample_rate_hz_) {
+ // Must be a native rate.
+ if (frame->sample_rate_hz_ != kSampleRate8kHz &&
+ frame->sample_rate_hz_ != kSampleRate16kHz &&
+ frame->sample_rate_hz_ != kSampleRate32kHz) {
return kBadSampleRateError;
}
-
- if (frame->num_channels_ != num_reverse_channels_) {
- return kBadNumberChannelsError;
+ // This interface does not tolerate different forward and reverse rates.
+ if (frame->sample_rate_hz_ != fwd_in_format_.rate()) {
+ return kBadSampleRateError;
}
- if (frame->samples_per_channel_ != samples_per_channel_) {
+ RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
+ fwd_out_format_.rate(),
+ frame->sample_rate_hz_,
+ fwd_in_format_.num_channels(),
+ fwd_in_format_.num_channels(),
+ frame->num_channels_));
+ if (frame->samples_per_channel_ != rev_in_format_.samples_per_channel()) {
return kBadDataLengthError;
}
@@ -431,44 +582,33 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
frame->samples_per_channel_ *
frame->num_channels_;
msg->set_data(frame->data_, data_size);
- err = WriteMessageToDebugFile();
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
#endif
render_audio_->DeinterleaveFrom(frame);
+ return AnalyzeReverseStreamLocked();
+}
- // TODO(ajm): turn the splitting filter into a component?
- if (sample_rate_hz_ == kSampleRate32kHz) {
- for (int i = 0; i < num_reverse_channels_; i++) {
+int AudioProcessingImpl::AnalyzeReverseStreamLocked() {
+ AudioBuffer* ra = render_audio_.get(); // For brevity.
+ if (rev_proc_format_.rate() == kSampleRate32kHz) {
+ for (int i = 0; i < rev_proc_format_.num_channels(); i++) {
// Split into low and high band.
- SplittingFilterAnalysis(render_audio_->data(i),
- render_audio_->low_pass_split_data(i),
- render_audio_->high_pass_split_data(i),
- render_audio_->analysis_filter_state1(i),
- render_audio_->analysis_filter_state2(i));
+ WebRtcSpl_AnalysisQMF(ra->data(i),
+ ra->samples_per_channel(),
+ ra->low_pass_split_data(i),
+ ra->high_pass_split_data(i),
+ ra->filter_states(i)->analysis_filter_state1,
+ ra->filter_states(i)->analysis_filter_state2);
}
}
- // TODO(ajm): warnings possible from components?
- err = echo_cancellation_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
-
- err = echo_control_mobile_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(echo_cancellation_->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(echo_control_mobile_->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(gain_control_->ProcessRenderAudio(ra));
- err = gain_control_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
-
- return err; // TODO(ajm): this is for returning warnings; necessary?
+ return kNoError;
}
int AudioProcessingImpl::set_stream_delay_ms(int delay) {
@@ -499,6 +639,14 @@ bool AudioProcessingImpl::was_stream_delay_set() const {
return was_stream_delay_set_;
}
+void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) {
+ key_pressed_ = key_pressed;
+}
+
+bool AudioProcessingImpl::stream_key_pressed() const {
+ return key_pressed_;
+}
+
void AudioProcessingImpl::set_delay_offset_ms(int offset) {
CriticalSectionScoped crit_scoped(crit_);
delay_offset_ms_ = offset;
@@ -613,13 +761,6 @@ VoiceDetection* AudioProcessingImpl::voice_detection() const {
return voice_detection_;
}
-int32_t AudioProcessingImpl::ChangeUniqueId(const int32_t id) {
- CriticalSectionScoped crit_scoped(crit_);
- id_ = id;
-
- return kNoError;
-}
-
bool AudioProcessingImpl::is_data_processed() const {
int enabled_count = 0;
std::list<ProcessingComponent*>::const_iterator it;
@@ -645,20 +786,21 @@ bool AudioProcessingImpl::is_data_processed() const {
return true;
}
-bool AudioProcessingImpl::interleave_needed(bool is_data_processed) const {
+bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const {
// Check if we've upmixed or downmixed the audio.
- return (num_output_channels_ != num_input_channels_ || is_data_processed);
+ return ((fwd_proc_format_.num_channels() != fwd_in_format_.num_channels()) ||
+ is_data_processed);
}
bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const {
- return (is_data_processed && sample_rate_hz_ == kSampleRate32kHz);
+ return (is_data_processed && fwd_proc_format_.rate() == kSampleRate32kHz);
}
bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const {
if (!is_data_processed && !voice_detection_->is_enabled()) {
// Only level_estimator_ is enabled.
return false;
- } else if (sample_rate_hz_ == kSampleRate32kHz) {
+ } else if (fwd_proc_format_.rate() == kSampleRate32kHz) {
// Something besides level_estimator_ is enabled, and we have super-wb.
return true;
}
@@ -690,17 +832,18 @@ int AudioProcessingImpl::WriteMessageToDebugFile() {
event_msg_->Clear();
- return 0;
+ return kNoError;
}
int AudioProcessingImpl::WriteInitMessage() {
event_msg_->set_type(audioproc::Event::INIT);
audioproc::Init* msg = event_msg_->mutable_init();
- msg->set_sample_rate(sample_rate_hz_);
- msg->set_device_sample_rate(echo_cancellation_->device_sample_rate_hz());
- msg->set_num_input_channels(num_input_channels_);
- msg->set_num_output_channels(num_output_channels_);
- msg->set_num_reverse_channels(num_reverse_channels_);
+ msg->set_sample_rate(fwd_in_format_.rate());
+ msg->set_num_input_channels(fwd_in_format_.num_channels());
+ msg->set_num_output_channels(fwd_proc_format_.num_channels());
+ msg->set_num_reverse_channels(rev_in_format_.num_channels());
+ msg->set_reverse_sample_rate(rev_in_format_.rate());
+ msg->set_output_sample_rate(fwd_out_format_.rate());
int err = WriteMessageToDebugFile();
if (err != kNoError) {
@@ -710,4 +853,5 @@ int AudioProcessingImpl::WriteInitMessage() {
return kNoError;
}
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
index e48a2c18a4f..d34f305a96b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
@@ -19,9 +19,10 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
+
class AudioBuffer;
class CriticalSectionWrapper;
-class EchoCancellationImplWrapper;
+class EchoCancellationImpl;
class EchoControlMobileImpl;
class FileWrapper;
class GainControlImpl;
@@ -39,44 +40,92 @@ class Event;
} // namespace audioproc
#endif
-class AudioProcessingImpl : public AudioProcessing {
+class AudioRate {
public:
- enum {
- kSampleRate8kHz = 8000,
- kSampleRate16kHz = 16000,
- kSampleRate32kHz = 32000
- };
+ explicit AudioRate(int sample_rate_hz)
+ : rate_(sample_rate_hz),
+ samples_per_channel_(AudioProcessing::kChunkSizeMs * rate_ / 1000) {}
+ virtual ~AudioRate() {}
+
+ void set(int rate) {
+ rate_ = rate;
+ samples_per_channel_ = AudioProcessing::kChunkSizeMs * rate_ / 1000;
+ }
- explicit AudioProcessingImpl(int id);
- virtual ~AudioProcessingImpl();
+ int rate() const { return rate_; }
+ int samples_per_channel() const { return samples_per_channel_; }
- CriticalSectionWrapper* crit() const;
+ private:
+ int rate_;
+ int samples_per_channel_;
+};
- int split_sample_rate_hz() const;
- bool was_stream_delay_set() const;
+class AudioFormat : public AudioRate {
+ public:
+ AudioFormat(int sample_rate_hz, int num_channels)
+ : AudioRate(sample_rate_hz),
+ num_channels_(num_channels) {}
+ virtual ~AudioFormat() {}
+
+ void set(int rate, int num_channels) {
+ AudioRate::set(rate);
+ num_channels_ = num_channels;
+ }
+
+ int num_channels() const { return num_channels_; }
+
+ private:
+ int num_channels_;
+};
+
+class AudioProcessingImpl : public AudioProcessing {
+ public:
+ explicit AudioProcessingImpl(const Config& config);
+ virtual ~AudioProcessingImpl();
// AudioProcessing methods.
virtual int Initialize() OVERRIDE;
- virtual int InitializeLocked();
+ virtual int Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) OVERRIDE;
virtual void SetExtraOptions(const Config& config) OVERRIDE;
virtual int EnableExperimentalNs(bool enable) OVERRIDE;
virtual bool experimental_ns_enabled() const OVERRIDE {
return false;
}
virtual int set_sample_rate_hz(int rate) OVERRIDE;
+ virtual int input_sample_rate_hz() const OVERRIDE;
virtual int sample_rate_hz() const OVERRIDE;
- virtual int set_num_channels(int input_channels,
- int output_channels) OVERRIDE;
+ virtual int proc_sample_rate_hz() const OVERRIDE;
+ virtual int proc_split_sample_rate_hz() const OVERRIDE;
virtual int num_input_channels() const OVERRIDE;
virtual int num_output_channels() const OVERRIDE;
- virtual int set_num_reverse_channels(int channels) OVERRIDE;
virtual int num_reverse_channels() const OVERRIDE;
+ virtual void set_output_will_be_muted(bool muted) OVERRIDE;
+ virtual bool output_will_be_muted() const OVERRIDE;
virtual int ProcessStream(AudioFrame* frame) OVERRIDE;
+ virtual int ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) OVERRIDE;
virtual int AnalyzeReverseStream(AudioFrame* frame) OVERRIDE;
+ virtual int AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) OVERRIDE;
virtual int set_stream_delay_ms(int delay) OVERRIDE;
virtual int stream_delay_ms() const OVERRIDE;
+ virtual bool was_stream_delay_set() const OVERRIDE;
virtual void set_delay_offset_ms(int offset) OVERRIDE;
virtual int delay_offset_ms() const OVERRIDE;
+ virtual void set_stream_key_pressed(bool key_pressed) OVERRIDE;
+ virtual bool stream_key_pressed() const OVERRIDE;
virtual int StartDebugRecording(
const char filename[kMaxFilenameSize]) OVERRIDE;
virtual int StartDebugRecording(FILE* handle) OVERRIDE;
@@ -89,18 +138,32 @@ class AudioProcessingImpl : public AudioProcessing {
virtual NoiseSuppression* noise_suppression() const OVERRIDE;
virtual VoiceDetection* voice_detection() const OVERRIDE;
- // Module methods.
- virtual int32_t ChangeUniqueId(const int32_t id) OVERRIDE;
+ protected:
+ // Overridden in a mock.
+ virtual int InitializeLocked();
private:
+ int InitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels);
+ int MaybeInitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels);
+ int ProcessStreamLocked();
+ int AnalyzeReverseStreamLocked();
+
bool is_data_processed() const;
- bool interleave_needed(bool is_data_processed) const;
+ bool output_copy_needed(bool is_data_processed) const;
bool synthesis_needed(bool is_data_processed) const;
bool analysis_needed(bool is_data_processed) const;
- int id_;
-
- EchoCancellationImplWrapper* echo_cancellation_;
+ EchoCancellationImpl* echo_cancellation_;
EchoControlMobileImpl* echo_control_mobile_;
GainControlImpl* gain_control_;
HighPassFilterImpl* high_pass_filter_;
@@ -110,29 +173,34 @@ class AudioProcessingImpl : public AudioProcessing {
std::list<ProcessingComponent*> component_list_;
CriticalSectionWrapper* crit_;
- AudioBuffer* render_audio_;
- AudioBuffer* capture_audio_;
+ scoped_ptr<AudioBuffer> render_audio_;
+ scoped_ptr<AudioBuffer> capture_audio_;
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// TODO(andrew): make this more graceful. Ideally we would split this stuff
// out into a separate class with an "enabled" and "disabled" implementation.
int WriteMessageToDebugFile();
int WriteInitMessage();
scoped_ptr<FileWrapper> debug_file_;
- scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
- std::string event_str_; // Memory for protobuf serialization.
+ scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
+ std::string event_str_; // Memory for protobuf serialization.
#endif
- int sample_rate_hz_;
- int split_sample_rate_hz_;
- int samples_per_channel_;
+ AudioFormat fwd_in_format_;
+ AudioFormat fwd_proc_format_;
+ AudioRate fwd_out_format_;
+ AudioFormat rev_in_format_;
+ AudioFormat rev_proc_format_;
+ int split_rate_;
+
int stream_delay_ms_;
int delay_offset_ms_;
bool was_stream_delay_set_;
- int num_reverse_channels_;
- int num_input_channels_;
- int num_output_channels_;
+ bool output_will_be_muted_;
+
+ bool key_pressed_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_PROCESSING_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
new file mode 100644
index 00000000000..09576175756
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/modules/interface/module_common_types.h"
+
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace webrtc {
+
+class MockInitialize : public AudioProcessingImpl {
+ public:
+ explicit MockInitialize(const Config& config) : AudioProcessingImpl(config) {
+ }
+
+ MOCK_METHOD0(InitializeLocked, int());
+ int RealInitializeLocked() { return AudioProcessingImpl::InitializeLocked(); }
+};
+
+TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) {
+ Config config;
+ MockInitialize mock(config);
+ ON_CALL(mock, InitializeLocked())
+ .WillByDefault(Invoke(&mock, &MockInitialize::RealInitializeLocked));
+
+ EXPECT_CALL(mock, InitializeLocked()).Times(1);
+ mock.Initialize();
+
+ AudioFrame frame;
+ // Call with the default parameters; there should be no init.
+ frame.num_channels_ = 1;
+ SetFrameSampleRate(&frame, 16000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(0);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+ EXPECT_NOERR(mock.AnalyzeReverseStream(&frame));
+
+ // New sample rate. (Only impacts ProcessStream).
+ SetFrameSampleRate(&frame, 32000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(1);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+
+ // New number of channels.
+ frame.num_channels_ = 2;
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(2);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+ // ProcessStream sets num_channels_ == num_output_channels.
+ frame.num_channels_ = 2;
+ EXPECT_NOERR(mock.AnalyzeReverseStream(&frame));
+
+ // A new sample rate passed to AnalyzeReverseStream should be an error and
+ // not cause an init.
+ SetFrameSampleRate(&frame, 16000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(0);
+ EXPECT_EQ(mock.kBadSampleRateError, mock.AnalyzeReverseStream(&frame));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
index 05d7514bded..82aa7fd14fd 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
@@ -7,25 +7,23 @@
# be found in the AUTHORS file in the root of the source tree.
{
- 'targets': [
- {
- 'target_name': 'audioproc_unittest_proto',
- 'type': 'static_library',
- 'sources': [ 'test/unittest.proto', ],
- 'variables': {
- 'proto_in_dir': 'test',
- # Workaround to protect against gyp's pathname relativization when this
- # file is included by modules.gyp.
- 'proto_out_protected': 'webrtc/audio_processing',
- 'proto_out_dir': '<(proto_out_protected)',
- },
- 'includes': [ '../../build/protoc.gypi', ],
- },
- ],
'conditions': [
['enable_protobuf==1', {
'targets': [
{
+ 'target_name': 'audioproc_unittest_proto',
+ 'type': 'static_library',
+ 'sources': [ 'test/unittest.proto', ],
+ 'variables': {
+ 'proto_in_dir': 'test',
+ # Workaround to protect against gyp's pathname relativization when
+ # this file is included by modules.gyp.
+ 'proto_out_protected': 'webrtc/audio_processing',
+ 'proto_out_dir': '<(proto_out_protected)',
+ },
+ 'includes': [ '../../build/protoc.gypi', ],
+ },
+ {
'target_name': 'audioproc',
'type': 'executable',
'dependencies': [
diff --git a/chromium/third_party/webrtc/modules/audio_processing/common.h b/chromium/third_party/webrtc/modules/audio_processing/common.h
new file mode 100644
index 00000000000..42454df299f
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/common.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
+
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+static inline int ChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kMonoAndKeyboard:
+ return 1;
+ case AudioProcessing::kStereo:
+ case AudioProcessing::kStereoAndKeyboard:
+ return 2;
+ }
+ assert(false);
+ return -1;
+}
+
+// Helper to encapsulate a contiguous data buffer with access to a pointer
+// array of the deinterleaved channels.
+template <typename T>
+class ChannelBuffer {
+ public:
+ ChannelBuffer(int samples_per_channel, int num_channels)
+ : data_(new T[samples_per_channel * num_channels]),
+ channels_(new T*[num_channels]),
+ samples_per_channel_(samples_per_channel),
+ num_channels_(num_channels) {
+ memset(data_.get(), 0, sizeof(T) * samples_per_channel * num_channels);
+ for (int i = 0; i < num_channels; ++i)
+ channels_[i] = &data_[i * samples_per_channel];
+ }
+ ~ChannelBuffer() {}
+
+ void CopyFrom(const void* channel_ptr, int i) {
+ assert(i < num_channels_);
+ memcpy(channels_[i], channel_ptr, samples_per_channel_ * sizeof(T));
+ }
+
+ T* data() { return data_.get(); }
+ T* channel(int i) {
+ assert(i < num_channels_);
+ return channels_[i];
+ }
+ T** channels() { return channels_.get(); }
+
+ int samples_per_channel() { return samples_per_channel_; }
+ int num_channels() { return num_channels_; }
+ int length() { return samples_per_channel_ * num_channels_; }
+
+ private:
+ scoped_ptr<T[]> data_;
+ scoped_ptr<T*[]> channels_;
+ int samples_per_channel_;
+ int num_channels_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/debug.proto b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
index 4b3a1638941..dce2f792093 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/debug.proto
+++ b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
@@ -4,22 +4,39 @@ package webrtc.audioproc;
message Init {
optional int32 sample_rate = 1;
- optional int32 device_sample_rate = 2;
+ optional int32 device_sample_rate = 2 [deprecated=true];
optional int32 num_input_channels = 3;
optional int32 num_output_channels = 4;
optional int32 num_reverse_channels = 5;
+ optional int32 reverse_sample_rate = 6;
+ optional int32 output_sample_rate = 7;
}
+// May contain interleaved or deinterleaved data, but don't store both formats.
message ReverseStream {
+ // int16 interleaved data.
optional bytes data = 1;
+
+ // float deinterleaved data, where each repeated element points to a single
+ // channel buffer of data.
+ repeated bytes channel = 2;
}
+// May contain interleaved or deinterleaved data, but don't store both formats.
message Stream {
+ // int16 interleaved data.
optional bytes input_data = 1;
optional bytes output_data = 2;
+
optional int32 delay = 3;
optional sint32 drift = 4;
optional int32 level = 5;
+ optional bool keypress = 6;
+
+ // float deinterleaved data, where each repeated element points to a single
+ // channel buffer of data.
+ repeated bytes input_channel = 7;
+ repeated bytes output_channel = 8;
}
message Event {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
index cd12363ec5e..e770f9fe377 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
@@ -18,7 +18,6 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
@@ -56,23 +55,20 @@ AudioProcessing::Error MapError(int err) {
}
} // namespace
-EchoCancellationImplWrapper* EchoCancellationImplWrapper::Create(
- const AudioProcessingImpl* audioproc) {
- return new EchoCancellationImpl(audioproc);
-}
-
-EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
drift_compensation_enabled_(false),
metrics_enabled_(false),
suppression_level_(kModerateSuppression),
- device_sample_rate_hz_(48000),
stream_drift_samples_(0),
was_stream_drift_set_(false),
stream_has_echo_(false),
delay_logging_enabled_(false),
- delay_correction_enabled_(false) {}
+ delay_correction_enabled_(false),
+ reported_delay_enabled_(true) {}
EchoCancellationImpl::~EchoCancellationImpl() {}
@@ -133,10 +129,10 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
Handle* my_handle = handle(handle_index);
err = WebRtcAec_Process(
my_handle,
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
static_cast<int16_t>(audio->samples_per_split_channel()),
apm_->stream_delay_ms(),
stream_drift_samples_);
@@ -168,7 +164,7 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int EchoCancellationImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
// Ensure AEC and AECM are not both enabled.
if (enable && apm_->echo_control_mobile()->is_enabled()) {
return apm_->kBadParameterError;
@@ -182,7 +178,7 @@ bool EchoCancellationImpl::is_enabled() const {
}
int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(level) == -1) {
return apm_->kBadParameterError;
}
@@ -197,7 +193,7 @@ EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level()
}
int EchoCancellationImpl::enable_drift_compensation(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
drift_compensation_enabled_ = enable;
return Configure();
}
@@ -206,20 +202,6 @@ bool EchoCancellationImpl::is_drift_compensation_enabled() const {
return drift_compensation_enabled_;
}
-int EchoCancellationImpl::set_device_sample_rate_hz(int rate) {
- CriticalSectionScoped crit_scoped(apm_->crit());
- if (rate < 8000 || rate > 96000) {
- return apm_->kBadParameterError;
- }
-
- device_sample_rate_hz_ = rate;
- return Initialize();
-}
-
-int EchoCancellationImpl::device_sample_rate_hz() const {
- return device_sample_rate_hz_;
-}
-
void EchoCancellationImpl::set_stream_drift_samples(int drift) {
was_stream_drift_set_ = true;
stream_drift_samples_ = drift;
@@ -230,7 +212,7 @@ int EchoCancellationImpl::stream_drift_samples() const {
}
int EchoCancellationImpl::enable_metrics(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
metrics_enabled_ = enable;
return Configure();
}
@@ -242,7 +224,7 @@ bool EchoCancellationImpl::are_metrics_enabled() const {
// TODO(ajm): we currently just use the metrics from the first AEC. Think more
// aboue the best way to extend this to multi-channel.
int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (metrics == NULL) {
return apm_->kNullPointerError;
}
@@ -289,7 +271,7 @@ bool EchoCancellationImpl::stream_has_echo() const {
}
int EchoCancellationImpl::enable_delay_logging(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
delay_logging_enabled_ = enable;
return Configure();
}
@@ -300,7 +282,7 @@ bool EchoCancellationImpl::is_delay_logging_enabled() const {
// TODO(bjornv): How should we handle the multi-channel case?
int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (median == NULL) {
return apm_->kNullPointerError;
}
@@ -322,7 +304,7 @@ int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
}
struct AecCore* EchoCancellationImpl::aec_core() const {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (!is_component_enabled()) {
return NULL;
}
@@ -336,13 +318,12 @@ int EchoCancellationImpl::Initialize() {
return err;
}
- was_stream_drift_set_ = false;
-
return apm_->kNoError;
}
void EchoCancellationImpl::SetExtraOptions(const Config& config) {
delay_correction_enabled_ = config.Get<DelayCorrection>().enabled;
+ reported_delay_enabled_ = config.Get<ReportedDelay>().enabled;
Configure();
}
@@ -357,16 +338,19 @@ void* EchoCancellationImpl::CreateHandle() const {
return handle;
}
-int EchoCancellationImpl::DestroyHandle(void* handle) const {
+void EchoCancellationImpl::DestroyHandle(void* handle) const {
assert(handle != NULL);
- return WebRtcAec_Free(static_cast<Handle*>(handle));
+ WebRtcAec_Free(static_cast<Handle*>(handle));
}
int EchoCancellationImpl::InitializeHandle(void* handle) const {
assert(handle != NULL);
+ // TODO(ajm): Drift compensation is disabled in practice. If restored, it
+ // should be managed internally and not depend on the hardware sample rate.
+ // For now, just hardcode a 48 kHz value.
return WebRtcAec_Init(static_cast<Handle*>(handle),
- apm_->sample_rate_hz(),
- device_sample_rate_hz_);
+ apm_->proc_sample_rate_hz(),
+ 48000);
}
int EchoCancellationImpl::ConfigureHandle(void* handle) const {
@@ -379,6 +363,8 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const {
WebRtcAec_enable_delay_correction(WebRtcAec_aec_core(
static_cast<Handle*>(handle)), delay_correction_enabled_ ? 1 : 0);
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(
+ static_cast<Handle*>(handle)), reported_delay_enabled_ ? 1 : 0);
return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
index 3ab0ce26689..b9c116a0650 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
@@ -11,25 +11,26 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
-#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
class AudioBuffer;
+class CriticalSectionWrapper;
-class EchoCancellationImpl : public EchoCancellationImplWrapper {
+class EchoCancellationImpl : public EchoCancellation,
+ public ProcessingComponent {
public:
- explicit EchoCancellationImpl(const AudioProcessingImpl* apm);
+ EchoCancellationImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~EchoCancellationImpl();
- // EchoCancellationImplWrapper implementation.
- virtual int ProcessRenderAudio(const AudioBuffer* audio) OVERRIDE;
- virtual int ProcessCaptureAudio(AudioBuffer* audio) OVERRIDE;
+ int ProcessRenderAudio(const AudioBuffer* audio);
+ int ProcessCaptureAudio(AudioBuffer* audio);
// EchoCancellation implementation.
virtual bool is_enabled() const OVERRIDE;
- virtual int device_sample_rate_hz() const OVERRIDE;
virtual int stream_drift_samples() const OVERRIDE;
// ProcessingComponent implementation.
@@ -41,7 +42,6 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
virtual int Enable(bool enable) OVERRIDE;
virtual int enable_drift_compensation(bool enable) OVERRIDE;
virtual bool is_drift_compensation_enabled() const OVERRIDE;
- virtual int set_device_sample_rate_hz(int rate) OVERRIDE;
virtual void set_stream_drift_samples(int drift) OVERRIDE;
virtual int set_suppression_level(SuppressionLevel level) OVERRIDE;
virtual SuppressionLevel suppression_level() const OVERRIDE;
@@ -58,20 +58,21 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
bool drift_compensation_enabled_;
bool metrics_enabled_;
SuppressionLevel suppression_level_;
- int device_sample_rate_hz_;
int stream_drift_samples_;
bool was_stream_drift_set_;
bool stream_has_echo_;
bool delay_logging_enabled_;
bool delay_correction_enabled_;
+ bool reported_delay_enabled_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
index f9bc3213ff1..49bcf9459b0 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
@@ -14,6 +14,7 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -47,4 +48,34 @@ TEST(EchoCancellationInternalTest, DelayCorrection) {
EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core));
}
+TEST(EchoCancellationInternalTest, ReportedDelay) {
+ scoped_ptr<AudioProcessing> ap(AudioProcessing::Create(0));
+ EXPECT_TRUE(ap->echo_cancellation()->aec_core() == NULL);
+
+ EXPECT_EQ(ap->kNoError, ap->echo_cancellation()->Enable(true));
+ EXPECT_TRUE(ap->echo_cancellation()->is_enabled());
+
+ AecCore* aec_core = ap->echo_cancellation()->aec_core();
+ ASSERT_TRUE(aec_core != NULL);
+ // Enabled by default.
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+
+ Config config;
+ config.Set<ReportedDelay>(new ReportedDelay(false));
+ ap->SetExtraOptions(config);
+ EXPECT_EQ(0, WebRtcAec_reported_delay_enabled(aec_core));
+
+ // Retains setting after initialization.
+ EXPECT_EQ(ap->kNoError, ap->Initialize());
+ EXPECT_EQ(0, WebRtcAec_reported_delay_enabled(aec_core));
+
+ config.Set<ReportedDelay>(new ReportedDelay(true));
+ ap->SetExtraOptions(config);
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+
+ // Retains setting after initialization.
+ EXPECT_EQ(ap->kNoError, ap->Initialize());
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h
deleted file mode 100644
index f1c03f32d6f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
-
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
-
-namespace webrtc {
-
-class AudioProcessingImpl;
-class AudioBuffer;
-
-class EchoCancellationImplWrapper : public virtual EchoCancellation,
- public virtual ProcessingComponent {
- public:
- static EchoCancellationImplWrapper* Create(
- const AudioProcessingImpl* audioproc);
- virtual ~EchoCancellationImplWrapper() {}
-
- virtual int ProcessRenderAudio(const AudioBuffer* audio) = 0;
- virtual int ProcessCaptureAudio(AudioBuffer* audio) = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
index f7853814966..a03adc5300e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
@@ -15,7 +15,6 @@
#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
@@ -63,9 +62,11 @@ size_t EchoControlMobile::echo_path_size_bytes() {
return WebRtcAecm_echo_path_size_bytes();
}
-EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
routing_mode_(kSpeakerphone),
comfort_noise_enabled_(true),
external_echo_path_(NULL) {}
@@ -127,7 +128,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
for (int i = 0; i < audio->num_channels(); i++) {
// TODO(ajm): improve how this works, possibly inside AECM.
// This is kind of hacked up.
- int16_t* noisy = audio->low_pass_reference(i);
+ const int16_t* noisy = audio->low_pass_reference(i);
int16_t* clean = audio->low_pass_split_data(i);
if (noisy == NULL) {
noisy = clean;
@@ -155,7 +156,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int EchoControlMobileImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
// Ensure AEC and AECM are not both enabled.
if (enable && apm_->echo_cancellation()->is_enabled()) {
return apm_->kBadParameterError;
@@ -169,7 +170,7 @@ bool EchoControlMobileImpl::is_enabled() const {
}
int EchoControlMobileImpl::set_routing_mode(RoutingMode mode) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(mode) == -1) {
return apm_->kBadParameterError;
}
@@ -184,7 +185,7 @@ EchoControlMobile::RoutingMode EchoControlMobileImpl::routing_mode()
}
int EchoControlMobileImpl::enable_comfort_noise(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
comfort_noise_enabled_ = enable;
return Configure();
}
@@ -195,7 +196,7 @@ bool EchoControlMobileImpl::is_comfort_noise_enabled() const {
int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
size_t size_bytes) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (echo_path == NULL) {
return apm_->kNullPointerError;
}
@@ -214,7 +215,7 @@ int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
int EchoControlMobileImpl::GetEchoPath(void* echo_path,
size_t size_bytes) const {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (echo_path == NULL) {
return apm_->kNullPointerError;
}
@@ -240,7 +241,7 @@ int EchoControlMobileImpl::Initialize() {
return apm_->kNoError;
}
- if (apm_->sample_rate_hz() == apm_->kSampleRate32kHz) {
+ if (apm_->proc_sample_rate_hz() > apm_->kSampleRate16kHz) {
LOG(LS_ERROR) << "AECM only supports 16 kHz or lower sample rates";
return apm_->kBadSampleRateError;
}
@@ -259,14 +260,14 @@ void* EchoControlMobileImpl::CreateHandle() const {
return handle;
}
-int EchoControlMobileImpl::DestroyHandle(void* handle) const {
- return WebRtcAecm_Free(static_cast<Handle*>(handle));
+void EchoControlMobileImpl::DestroyHandle(void* handle) const {
+ WebRtcAecm_Free(static_cast<Handle*>(handle));
}
int EchoControlMobileImpl::InitializeHandle(void* handle) const {
assert(handle != NULL);
Handle* my_handle = static_cast<Handle*>(handle);
- if (WebRtcAecm_Init(my_handle, apm_->sample_rate_hz()) != 0) {
+ if (WebRtcAecm_Init(my_handle, apm_->proc_sample_rate_hz()) != 0) {
return GetHandleError(my_handle);
}
if (external_echo_path_ != NULL) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
index 5eefab0a3c2..4f5b5931a1c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
@@ -15,13 +15,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class EchoControlMobileImpl : public EchoControlMobile,
public ProcessingComponent {
public:
- explicit EchoControlMobileImpl(const AudioProcessingImpl* apm);
+ EchoControlMobileImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~EchoControlMobileImpl();
int ProcessRenderAudio(const AudioBuffer* audio);
@@ -47,11 +49,12 @@ class EchoControlMobileImpl : public EchoControlMobile,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
RoutingMode routing_mode_;
bool comfort_noise_enabled_;
unsigned char* external_echo_path_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
index 35547031e30..a67b67ecb16 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
@@ -12,12 +12,10 @@
#include <assert.h>
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/agc/include/gain_control.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
-
namespace webrtc {
typedef void Handle;
@@ -37,9 +35,11 @@ int16_t MapSetting(GainControl::Mode mode) {
}
} // namespace
-GainControlImpl::GainControlImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+GainControlImpl::GainControlImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
mode_(kAdaptiveAnalog),
minimum_capture_level_(0),
maximum_capture_level_(255),
@@ -59,7 +59,7 @@ int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
assert(audio->samples_per_split_channel() <= 160);
- int16_t* mixed_data = audio->low_pass_split_data(0);
+ const int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0);
@@ -91,6 +91,7 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
int err = apm_->kNoError;
if (mode_ == kAdaptiveAnalog) {
+ capture_levels_.assign(num_handles(), analog_capture_level_);
for (int i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
err = WebRtcAgc_AddMic(
@@ -114,7 +115,6 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
audio->low_pass_split_data(i),
audio->high_pass_split_data(i),
static_cast<int16_t>(audio->samples_per_split_channel()),
- //capture_levels_[i],
analog_capture_level_,
&capture_level_out);
@@ -190,13 +190,6 @@ int GainControlImpl::set_stream_analog_level(int level) {
if (level < minimum_capture_level_ || level > maximum_capture_level_) {
return apm_->kBadParameterError;
}
-
- if (mode_ == kAdaptiveAnalog) {
- if (level != analog_capture_level_) {
- // The analog level has been changed; update our internal levels.
- capture_levels_.assign(num_handles(), level);
- }
- }
analog_capture_level_ = level;
return apm_->kNoError;
@@ -210,7 +203,7 @@ int GainControlImpl::stream_analog_level() {
}
int GainControlImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -219,7 +212,7 @@ bool GainControlImpl::is_enabled() const {
}
int GainControlImpl::set_mode(Mode mode) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(mode) == -1) {
return apm_->kBadParameterError;
}
@@ -234,7 +227,7 @@ GainControl::Mode GainControlImpl::mode() const {
int GainControlImpl::set_analog_level_limits(int minimum,
int maximum) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (minimum < 0) {
return apm_->kBadParameterError;
}
@@ -266,7 +259,7 @@ bool GainControlImpl::stream_is_saturated() const {
}
int GainControlImpl::set_target_level_dbfs(int level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (level > 31 || level < 0) {
return apm_->kBadParameterError;
}
@@ -280,7 +273,7 @@ int GainControlImpl::target_level_dbfs() const {
}
int GainControlImpl::set_compression_gain_db(int gain) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (gain < 0 || gain > 90) {
return apm_->kBadParameterError;
}
@@ -294,7 +287,7 @@ int GainControlImpl::compression_gain_db() const {
}
int GainControlImpl::enable_limiter(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
limiter_enabled_ = enable;
return Configure();
}
@@ -309,11 +302,7 @@ int GainControlImpl::Initialize() {
return err;
}
- analog_capture_level_ =
- (maximum_capture_level_ - minimum_capture_level_) >> 1;
capture_levels_.assign(num_handles(), analog_capture_level_);
- was_analog_level_set_ = false;
-
return apm_->kNoError;
}
@@ -328,8 +317,8 @@ void* GainControlImpl::CreateHandle() const {
return handle;
}
-int GainControlImpl::DestroyHandle(void* handle) const {
- return WebRtcAgc_Free(static_cast<Handle*>(handle));
+void GainControlImpl::DestroyHandle(void* handle) const {
+ WebRtcAgc_Free(static_cast<Handle*>(handle));
}
int GainControlImpl::InitializeHandle(void* handle) const {
@@ -337,7 +326,7 @@ int GainControlImpl::InitializeHandle(void* handle) const {
minimum_capture_level_,
maximum_capture_level_,
MapSetting(mode_),
- apm_->sample_rate_hz());
+ apm_->proc_sample_rate_hz());
}
int GainControlImpl::ConfigureHandle(void* handle) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
index 2de02f6e6b7..81159870009 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
@@ -17,13 +17,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class GainControlImpl : public GainControl,
public ProcessingComponent {
public:
- explicit GainControlImpl(const AudioProcessingImpl* apm);
+ GainControlImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~GainControlImpl();
int ProcessRenderAudio(AudioBuffer* audio);
@@ -58,11 +60,12 @@ class GainControlImpl : public GainControl,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
Mode mode_;
int minimum_capture_level_;
int maximum_capture_level_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp
new file mode 100644
index 00000000000..55c79689f7e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': ['lib_core_neon_offsets.gypi'],
+ 'targets' : [
+ {
+ 'target_name': 'gen_nsx_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',],
+ 'variables' : {
+ 'unpack_lib_name':'nsx_core_neon_offsets.o',
+ },
+ 'includes': [
+ '../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ {
+ 'target_name': 'gen_aecm_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'variables': {
+ 'unpack_lib_name':'aecm_core_neon_offsets.o',
+ },
+ 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',],
+ 'includes': [
+ '../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp
new file mode 100644
index 00000000000..f4a9134fb21
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': ['lib_core_neon_offsets.gypi'],
+ 'targets' : [
+ {
+ 'target_name': 'gen_nsx_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',],
+ 'variables' : {
+ 'unpack_lib_name':'nsx_core_neon_offsets.o',
+ },
+ 'includes': [
+ '../../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ {
+ 'target_name': 'gen_aecm_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'variables': {
+ 'unpack_lib_name':'aecm_core_neon_offsets.o',
+ },
+ 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',],
+ 'includes': [
+ '../../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
index da2170373b0..0a23ff23555 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
@@ -13,11 +13,10 @@
#include <assert.h>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
namespace webrtc {
namespace {
@@ -36,7 +35,7 @@ struct FilterState {
int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
assert(hpf != NULL);
- if (sample_rate_hz == AudioProcessingImpl::kSampleRate8kHz) {
+ if (sample_rate_hz == AudioProcessing::kSampleRate8kHz) {
hpf->ba = kFilterCoefficients8kHz;
} else {
hpf->ba = kFilterCoefficients;
@@ -105,9 +104,11 @@ int Filter(FilterState* hpf, int16_t* data, int length) {
typedef FilterState Handle;
-HighPassFilterImpl::HighPassFilterImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
- apm_(apm) {}
+HighPassFilterImpl::HighPassFilterImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
+ apm_(apm),
+ crit_(crit) {}
HighPassFilterImpl::~HighPassFilterImpl() {}
@@ -135,7 +136,7 @@ int HighPassFilterImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int HighPassFilterImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -147,14 +148,13 @@ void* HighPassFilterImpl::CreateHandle() const {
return new FilterState;
}
-int HighPassFilterImpl::DestroyHandle(void* handle) const {
+void HighPassFilterImpl::DestroyHandle(void* handle) const {
delete static_cast<Handle*>(handle);
- return apm_->kNoError;
}
int HighPassFilterImpl::InitializeHandle(void* handle) const {
return InitializeFilter(static_cast<Handle*>(handle),
- apm_->sample_rate_hz());
+ apm_->proc_sample_rate_hz());
}
int HighPassFilterImpl::ConfigureHandle(void* /*handle*/) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
index 7e11ea9ceee..6f91f3bc049 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
@@ -15,13 +15,14 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class HighPassFilterImpl : public HighPassFilter,
public ProcessingComponent {
public:
- explicit HighPassFilterImpl(const AudioProcessingImpl* apm);
+ HighPassFilterImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
virtual ~HighPassFilterImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -37,11 +38,12 @@ class HighPassFilterImpl : public HighPassFilter,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
index b5c856de273..77c3f3add22 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
@@ -15,7 +15,6 @@
#include <stdio.h> // FILE
#include "webrtc/common.h"
-#include "webrtc/modules/interface/module.h"
#include "webrtc/typedefs.h"
struct AecCore;
@@ -50,11 +49,32 @@ class VoiceDetection;
// except when really necessary.
struct DelayCorrection {
DelayCorrection() : enabled(false) {}
- DelayCorrection(bool enabled) : enabled(enabled) {}
+ explicit DelayCorrection(bool enabled) : enabled(enabled) {}
+ bool enabled;
+};
+
+// Use to disable the reported system delays. By disabling the reported system
+// delays the echo cancellation algorithm assumes the process and reverse
+// streams to be aligned. This configuration only applies to EchoCancellation
+// and not EchoControlMobile and is set with AudioProcessing::SetExtraOptions().
+// Note that by disabling reported system delays the EchoCancellation may
+// regress in performance.
+struct ReportedDelay {
+ ReportedDelay() : enabled(true) {}
+ explicit ReportedDelay(bool enabled) : enabled(enabled) {}
+ bool enabled;
+};
+// Must be provided through AudioProcessing::Create(Confg&). It will have no
+// impact if used with AudioProcessing::SetExtraOptions().
+struct ExperimentalAgc {
+ ExperimentalAgc() : enabled(true) {}
+ explicit ExperimentalAgc(bool enabled) : enabled(enabled) {}
bool enabled;
};
+static const int kAudioProcMaxNativeSampleRateHz = 32000;
+
// The Audio Processing Module (APM) provides a collection of voice processing
// components designed for real-time communications software.
//
@@ -84,16 +104,12 @@ struct DelayCorrection {
// 2. Parameter getters are never called concurrently with the corresponding
// setter.
//
-// APM accepts only 16-bit linear PCM audio data in frames of 10 ms. Multiple
-// channels should be interleaved.
+// APM accepts only linear PCM audio data in chunks of 10 ms. The int16
+// interfaces use interleaved data, while the float interfaces use deinterleaved
+// data.
//
// Usage example, omitting error checking:
// AudioProcessing* apm = AudioProcessing::Create(0);
-// apm->set_sample_rate_hz(32000); // Super-wideband processing.
-//
-// // Mono capture and stereo render.
-// apm->set_num_channels(1, 1);
-// apm->set_num_reverse_channels(2);
//
// apm->high_pass_filter()->Enable(true);
//
@@ -132,13 +148,27 @@ struct DelayCorrection {
// // Close the application...
// delete apm;
//
-class AudioProcessing : public Module {
+class AudioProcessing {
public:
- // Creates a APM instance, with identifier |id|. Use one instance for every
- // primary audio stream requiring processing. On the client-side, this would
- // typically be one instance for the near-end stream, and additional instances
- // for each far-end stream which requires processing. On the server-side,
- // this would typically be one instance for every incoming stream.
+ enum ChannelLayout {
+ kMono,
+ // Left, right.
+ kStereo,
+ // Mono, keyboard mic.
+ kMonoAndKeyboard,
+ // Left, right, keyboard mic.
+ kStereoAndKeyboard
+ };
+
+ // Creates an APM instance. Use one instance for every primary audio stream
+ // requiring processing. On the client-side, this would typically be one
+ // instance for the near-end stream, and additional instances for each far-end
+ // stream which requires processing. On the server-side, this would typically
+ // be one instance for every incoming stream.
+ static AudioProcessing* Create();
+ // Allows passing in an optional configuration at create-time.
+ static AudioProcessing* Create(const Config& config);
+ // TODO(ajm): Deprecated; remove all calls to it.
static AudioProcessing* Create(int id);
virtual ~AudioProcessing() {}
@@ -147,11 +177,26 @@ class AudioProcessing : public Module {
// it is not necessary to call before processing the first stream after
// creation.
//
- // set_sample_rate_hz(), set_num_channels() and set_num_reverse_channels()
- // will trigger a full initialization if the settings are changed from their
- // existing values. Otherwise they are no-ops.
+ // It is also not necessary to call if the audio parameters (sample
+ // rate and number of channels) have changed. Passing updated parameters
+ // directly to |ProcessStream()| and |AnalyzeReverseStream()| is permissible.
+ // If the parameters are known at init-time though, they may be provided.
virtual int Initialize() = 0;
+ // The int16 interfaces require:
+ // - only |NativeRate|s be used
+ // - that the input, output and reverse rates must match
+ // - that |output_layout| matches |input_layout|
+ //
+ // The float interfaces accept arbitrary rates and support differing input
+ // and output layouts, but the output may only remove channels, not add.
+ virtual int Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) = 0;
+
// Pass down additional options which don't have explicit setters. This
// ensures the options are applied immediately.
virtual void SetExtraOptions(const Config& config) = 0;
@@ -159,23 +204,30 @@ class AudioProcessing : public Module {
virtual int EnableExperimentalNs(bool enable) = 0;
virtual bool experimental_ns_enabled() const = 0;
- // Sets the sample |rate| in Hz for both the primary and reverse audio
- // streams. 8000, 16000 or 32000 Hz are permitted.
+ // DEPRECATED.
+ // TODO(ajm): Remove after Chromium has upgraded to using Initialize().
virtual int set_sample_rate_hz(int rate) = 0;
+ // TODO(ajm): Remove after voice engine no longer requires it to resample
+ // the reverse stream to the forward rate.
+ virtual int input_sample_rate_hz() const = 0;
+ // TODO(ajm): Remove after Chromium no longer depends on it.
virtual int sample_rate_hz() const = 0;
- // Sets the number of channels for the primary audio stream. Input frames must
- // contain a number of channels given by |input_channels|, while output frames
- // will be returned with number of channels given by |output_channels|.
- virtual int set_num_channels(int input_channels, int output_channels) = 0;
+ // TODO(ajm): Only intended for internal use. Make private and friend the
+ // necessary classes?
+ virtual int proc_sample_rate_hz() const = 0;
+ virtual int proc_split_sample_rate_hz() const = 0;
virtual int num_input_channels() const = 0;
virtual int num_output_channels() const = 0;
-
- // Sets the number of channels for the reverse audio stream. Input frames must
- // contain a number of channels given by |channels|.
- virtual int set_num_reverse_channels(int channels) = 0;
virtual int num_reverse_channels() const = 0;
+ // Set to true when the output of AudioProcessing will be muted or in some
+ // other way not used. Ideally, the captured audio would still be processed,
+ // but some components may change behavior based on this information.
+ // Default false.
+ virtual void set_output_will_be_muted(bool muted) = 0;
+ virtual bool output_will_be_muted() const = 0;
+
// Processes a 10 ms |frame| of the primary audio stream. On the client-side,
// this is the near-end (or captured) audio.
//
@@ -184,10 +236,25 @@ class AudioProcessing : public Module {
// with the stream_ tag which is needed should be called after processing.
//
// The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
- // members of |frame| must be valid, and correspond to settings supplied
- // to APM.
+ // members of |frame| must be valid. If changed from the previous call to this
+ // method, it will trigger an initialization.
virtual int ProcessStream(AudioFrame* frame) = 0;
+ // Accepts deinterleaved float audio with the range [-1, 1]. Each element
+ // of |src| points to a channel buffer, arranged according to
+ // |input_layout|. At output, the channels will be arranged according to
+ // |output_layout| at |output_sample_rate_hz| in |dest|.
+ //
+ // The output layout may only remove channels, not add. |src| and |dest|
+ // may use the same memory, if desired.
+ virtual int ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) = 0;
+
// Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame
// will not be modified. On the client-side, this is the far-end (or to be
// rendered) audio.
@@ -199,11 +266,19 @@ class AudioProcessing : public Module {
// chances are you don't need to use it.
//
// The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
- // members of |frame| must be valid.
+ // members of |frame| must be valid. |sample_rate_hz_| must correspond to
+ // |input_sample_rate_hz()|
//
// TODO(ajm): add const to input; requires an implementation fix.
virtual int AnalyzeReverseStream(AudioFrame* frame) = 0;
+ // Accepts deinterleaved float audio with the range [-1, 1]. Each element
+ // of |data| points to a channel buffer, arranged according to |layout|.
+ virtual int AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) = 0;
+
// This must be called if and only if echo processing is enabled.
//
// Sets the |delay| in ms between AnalyzeReverseStream() receiving a far-end
@@ -219,6 +294,12 @@ class AudioProcessing : public Module {
// ProcessStream().
virtual int set_stream_delay_ms(int delay) = 0;
virtual int stream_delay_ms() const = 0;
+ virtual bool was_stream_delay_set() const = 0;
+
+ // Call to signal that a key press occurred (true) or did not occur (false)
+ // with this chunk of audio.
+ virtual void set_stream_key_pressed(bool key_pressed) = 0;
+ virtual bool stream_key_pressed() const = 0;
// Sets a delay |offset| in ms to add to the values passed in through
// set_stream_delay_ms(). May be positive or negative.
@@ -283,9 +364,13 @@ class AudioProcessing : public Module {
kBadStreamParameterWarning = -13
};
- // Inherited from Module.
- virtual int32_t TimeUntilNextProcess() OVERRIDE;
- virtual int32_t Process() OVERRIDE;
+ enum NativeRate {
+ kSampleRate8kHz = 8000,
+ kSampleRate16kHz = 16000,
+ kSampleRate32kHz = 32000
+ };
+
+ static const int kChunkSizeMs = 10;
};
// The acoustic echo cancellation (AEC) component provides better performance
@@ -306,16 +391,10 @@ class EchoCancellation {
// render and capture devices are used, particularly with webcams.
//
// This enables a compensation mechanism, and requires that
- // |set_device_sample_rate_hz()| and |set_stream_drift_samples()| be called.
+ // set_stream_drift_samples() be called.
virtual int enable_drift_compensation(bool enable) = 0;
virtual bool is_drift_compensation_enabled() const = 0;
- // Provides the sampling rate of the audio devices. It is assumed the render
- // and capture devices use the same nominal sample rate. Required if and only
- // if drift compensation is enabled.
- virtual int set_device_sample_rate_hz(int rate) = 0;
- virtual int device_sample_rate_hz() const = 0;
-
// Sets the difference between the number of samples rendered and captured by
// the audio devices since the last call to |ProcessStream()|. Must be called
// if drift compensation is enabled, prior to |ProcessStream()|.
@@ -555,8 +634,7 @@ class LevelEstimator {
// frames since the last call to RMS(). The returned value is positive but
// should be interpreted as negative. It is constrained to [0, 127].
//
- // The computation follows:
- // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-05
+ // The computation follows: https://tools.ietf.org/html/rfc6465
// with the intent that it can provide the RTP audio level indication.
//
// Frames passed to ProcessStream() with an |_energy| of zero are considered
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
index 46520ab494e..c1ac23adf76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
@@ -26,10 +26,6 @@ class MockEchoCancellation : public EchoCancellation {
int(bool enable));
MOCK_CONST_METHOD0(is_drift_compensation_enabled,
bool());
- MOCK_METHOD1(set_device_sample_rate_hz,
- int(int rate));
- MOCK_CONST_METHOD0(device_sample_rate_hz,
- int());
MOCK_METHOD1(set_stream_drift_samples,
void(int drift));
MOCK_CONST_METHOD0(stream_drift_samples,
@@ -181,6 +177,13 @@ class MockAudioProcessing : public AudioProcessing {
MOCK_METHOD0(Initialize,
int());
+ MOCK_METHOD6(Initialize,
+ int(int sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout));
MOCK_METHOD1(SetExtraOptions,
void(const Config& config));
MOCK_METHOD1(EnableExperimentalNs,
@@ -189,26 +192,49 @@ class MockAudioProcessing : public AudioProcessing {
bool());
MOCK_METHOD1(set_sample_rate_hz,
int(int rate));
+ MOCK_CONST_METHOD0(input_sample_rate_hz,
+ int());
MOCK_CONST_METHOD0(sample_rate_hz,
int());
- MOCK_METHOD2(set_num_channels,
- int(int input_channels, int output_channels));
+ MOCK_CONST_METHOD0(proc_sample_rate_hz,
+ int());
+ MOCK_CONST_METHOD0(proc_split_sample_rate_hz,
+ int());
MOCK_CONST_METHOD0(num_input_channels,
int());
MOCK_CONST_METHOD0(num_output_channels,
int());
- MOCK_METHOD1(set_num_reverse_channels,
- int(int channels));
MOCK_CONST_METHOD0(num_reverse_channels,
int());
+ MOCK_METHOD1(set_output_will_be_muted,
+ void(bool muted));
+ MOCK_CONST_METHOD0(output_will_be_muted,
+ bool());
MOCK_METHOD1(ProcessStream,
int(AudioFrame* frame));
+ MOCK_METHOD7(ProcessStream,
+ int(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest));
MOCK_METHOD1(AnalyzeReverseStream,
int(AudioFrame* frame));
+ MOCK_METHOD4(AnalyzeReverseStream,
+ int(const float* const* data, int frames, int sample_rate_hz,
+ ChannelLayout input_layout));
MOCK_METHOD1(set_stream_delay_ms,
int(int delay));
MOCK_CONST_METHOD0(stream_delay_ms,
int());
+ MOCK_CONST_METHOD0(was_stream_delay_set,
+ bool());
+ MOCK_METHOD1(set_stream_key_pressed,
+ void(bool key_pressed));
+ MOCK_CONST_METHOD0(stream_key_pressed,
+ bool());
MOCK_METHOD1(set_delay_offset_ms,
void(int offset));
MOCK_CONST_METHOD0(delay_offset_ms,
@@ -230,20 +256,16 @@ class MockAudioProcessing : public AudioProcessing {
}
virtual MockHighPassFilter* high_pass_filter() const {
return high_pass_filter_.get();
- };
+ }
virtual MockLevelEstimator* level_estimator() const {
return level_estimator_.get();
- };
+ }
virtual MockNoiseSuppression* noise_suppression() const {
return noise_suppression_.get();
- };
+ }
virtual MockVoiceDetection* voice_detection() const {
return voice_detection_.get();
- };
- MOCK_METHOD0(TimeUntilNextProcess,
- int32_t());
- MOCK_METHOD0(Process,
- int32_t());
+ }
private:
scoped_ptr<MockEchoCancellation> echo_cancellation_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
index 29dbdfc78e0..cfe295a6a0b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
@@ -10,110 +10,35 @@
#include "webrtc/modules/audio_processing/level_estimator_impl.h"
-#include <assert.h>
-#include <math.h>
-#include <string.h>
-
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/rms_level.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
-namespace {
-
-const double kMaxSquaredLevel = 32768.0 * 32768.0;
-
-class Level {
- public:
- static const int kMinLevel = 127;
-
- Level()
- : sum_square_(0.0),
- sample_count_(0) {}
- ~Level() {}
-
- void Init() {
- sum_square_ = 0.0;
- sample_count_ = 0;
- }
-
- void Process(int16_t* data, int length) {
- assert(data != NULL);
- assert(length > 0);
- sum_square_ += SumSquare(data, length);
- sample_count_ += length;
- }
-
- void ProcessMuted(int length) {
- assert(length > 0);
- sample_count_ += length;
- }
- int RMS() {
- if (sample_count_ == 0 || sum_square_ == 0.0) {
- Init();
- return kMinLevel;
- }
-
- // Normalize by the max level.
- double rms = sum_square_ / (sample_count_ * kMaxSquaredLevel);
- // 20log_10(x^0.5) = 10log_10(x)
- rms = 10 * log10(rms);
- if (rms > 0)
- rms = 0;
- else if (rms < -kMinLevel)
- rms = -kMinLevel;
-
- rms = -rms;
- Init();
- return static_cast<int>(rms + 0.5);
- }
-
- private:
- static double SumSquare(int16_t* data, int length) {
- double sum_square = 0.0;
- for (int i = 0; i < length; ++i) {
- double data_d = static_cast<double>(data[i]);
- sum_square += data_d * data_d;
- }
- return sum_square;
- }
-
- double sum_square_;
- int sample_count_;
-};
-} // namespace
-
-LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
- apm_(apm) {}
+LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
+ crit_(crit) {}
LevelEstimatorImpl::~LevelEstimatorImpl() {}
int LevelEstimatorImpl::ProcessStream(AudioBuffer* audio) {
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
- Level* level = static_cast<Level*>(handle(0));
- if (audio->is_muted()) {
- level->ProcessMuted(audio->samples_per_channel());
- return apm_->kNoError;
+ RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
+ for (int i = 0; i < audio->num_channels(); ++i) {
+ rms_level->Process(audio->data(i), audio->samples_per_channel());
}
- int16_t* mixed_data = audio->data(0);
- if (audio->num_channels() > 1) {
- audio->CopyAndMix(1);
- mixed_data = audio->mixed_data(0);
- }
-
- level->Process(mixed_data, audio->samples_per_channel());
-
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -123,43 +48,38 @@ bool LevelEstimatorImpl::is_enabled() const {
int LevelEstimatorImpl::RMS() {
if (!is_component_enabled()) {
- return apm_->kNotEnabledError;
+ return AudioProcessing::kNotEnabledError;
}
- Level* level = static_cast<Level*>(handle(0));
- return level->RMS();
+ RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
+ return rms_level->RMS();
}
+// The ProcessingComponent implementation is pretty weird in this class since
+// we have only a single instance of the trivial underlying component.
void* LevelEstimatorImpl::CreateHandle() const {
- return new Level;
+ return new RMSLevel;
}
-int LevelEstimatorImpl::DestroyHandle(void* handle) const {
- assert(handle != NULL);
- Level* level = static_cast<Level*>(handle);
- delete level;
- return apm_->kNoError;
+void LevelEstimatorImpl::DestroyHandle(void* handle) const {
+ delete static_cast<RMSLevel*>(handle);
}
int LevelEstimatorImpl::InitializeHandle(void* handle) const {
- assert(handle != NULL);
- Level* level = static_cast<Level*>(handle);
- level->Init();
-
- return apm_->kNoError;
+ static_cast<RMSLevel*>(handle)->Reset();
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::ConfigureHandle(void* /*handle*/) const {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::num_handles_required() const {
return 1;
}
-int LevelEstimatorImpl::GetHandleError(void* handle) const {
- // The component has no detailed errors.
- assert(handle != NULL);
- return apm_->kUnspecifiedError;
+int LevelEstimatorImpl::GetHandleError(void* /*handle*/) const {
+ return AudioProcessing::kUnspecifiedError;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
index 20dc18dc425..b38337d4d41 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
@@ -13,15 +13,18 @@
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/processing_component.h"
+#include "webrtc/modules/audio_processing/rms_level.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class LevelEstimatorImpl : public LevelEstimator,
public ProcessingComponent {
public:
- explicit LevelEstimatorImpl(const AudioProcessingImpl* apm);
+ LevelEstimatorImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~LevelEstimatorImpl();
int ProcessStream(AudioBuffer* audio);
@@ -38,12 +41,13 @@ class LevelEstimatorImpl : public LevelEstimator,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ CriticalSectionWrapper* crit_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi b/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi
new file mode 100644
index 00000000000..f32ddd47f78
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi
@@ -0,0 +1,51 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file has common information for gen_core_neon_offsets.gyp
+# and gen_core_neon_offsets_chromium.gyp
+{
+ 'variables': {
+ 'variables' : {
+ 'lib_intermediate_name': '',
+ 'conditions' : [
+ ['android_webview_build==1', {
+ 'lib_intermediate_name' : '$(abspath $(call intermediates-dir-for,STATIC_LIBRARIES,lib_core_neon_offsets,,,$(gyp_var_prefix)))/lib_core_neon_offsets.a',
+ }],
+ ],
+ },
+ 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
+ 'output_dir': '<(shared_generated_dir)',
+ 'output_format': 'cheader',
+ 'unpack_lib_search_path_list': [
+ '-a', '<(PRODUCT_DIR)/lib_core_neon_offsets.a',
+ '-a', '<(LIB_DIR)/webrtc/modules/audio_processing/lib_core_neon_offsets.a',
+ '-a', '<(LIB_DIR)/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.a',
+ '-a', '<(lib_intermediate_name)',
+ ],
+ 'unpack_lib_output_dir':'<(shared_generated_dir)',
+ },
+ 'includes': [
+ '../../build/common.gypi',
+ ],
+ 'conditions': [
+ ['((target_arch=="arm" and arm_version==7) or target_arch=="armv7") and (OS=="android" or OS=="ios")', {
+ 'targets' : [
+ {
+ 'target_name': 'lib_core_neon_offsets',
+ 'type': 'static_library',
+ 'android_unmangled_name': 1,
+ 'hard_dependency': 1,
+ 'sources': [
+ 'ns/nsx_core_neon_offsets.c',
+ 'aecm/aecm_core_neon_offsets.c',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
index 41c11b1cd72..eea0a04a2a6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
@@ -12,15 +12,14 @@
#include <assert.h>
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#if defined(WEBRTC_NS_FLOAT)
#include "webrtc/modules/audio_processing/ns/include/noise_suppression.h"
#elif defined(WEBRTC_NS_FIXED)
#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
#endif
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
namespace webrtc {
@@ -47,9 +46,11 @@ int MapSetting(NoiseSuppression::Level level) {
}
} // namespace
-NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
level_(kModerate) {}
NoiseSuppressionImpl::~NoiseSuppressionImpl() {}
@@ -67,10 +68,10 @@ int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
Handle* my_handle = static_cast<Handle*>(handle(i));
#if defined(WEBRTC_NS_FLOAT)
err = WebRtcNs_Process(static_cast<Handle*>(handle(i)),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i));
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i));
#elif defined(WEBRTC_NS_FIXED)
err = WebRtcNsx_Process(static_cast<Handle*>(handle(i)),
audio->low_pass_split_data(i),
@@ -88,7 +89,7 @@ int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int NoiseSuppressionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -97,7 +98,7 @@ bool NoiseSuppressionImpl::is_enabled() const {
}
int NoiseSuppressionImpl::set_level(Level level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(level) == -1) {
return apm_->kBadParameterError;
}
@@ -140,19 +141,21 @@ void* NoiseSuppressionImpl::CreateHandle() const {
return handle;
}
-int NoiseSuppressionImpl::DestroyHandle(void* handle) const {
+void NoiseSuppressionImpl::DestroyHandle(void* handle) const {
#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Free(static_cast<Handle*>(handle));
+ WebRtcNs_Free(static_cast<Handle*>(handle));
#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Free(static_cast<Handle*>(handle));
+ WebRtcNsx_Free(static_cast<Handle*>(handle));
#endif
}
int NoiseSuppressionImpl::InitializeHandle(void* handle) const {
#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+ return WebRtcNs_Init(static_cast<Handle*>(handle),
+ apm_->proc_sample_rate_hz());
#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+ return WebRtcNsx_Init(static_cast<Handle*>(handle),
+ apm_->proc_sample_rate_hz());
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
index f6dd8cbd78a..cadbbd9cd4c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
@@ -15,13 +15,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class NoiseSuppressionImpl : public NoiseSuppression,
public ProcessingComponent {
public:
- explicit NoiseSuppressionImpl(const AudioProcessingImpl* apm);
+ NoiseSuppressionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~NoiseSuppressionImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -40,13 +42,15 @@ class NoiseSuppressionImpl : public NoiseSuppression,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
Level level_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h b/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
index 32b18038089..3cf889e2d07 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
@@ -99,10 +99,10 @@ int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
* -1 - Error
*/
int WebRtcNs_Process(NsHandle* NS_inst,
- short* spframe,
- short* spframe_H,
- short* outframe,
- short* outframe_H);
+ float* spframe,
+ float* spframe_H,
+ float* outframe,
+ float* outframe_H);
/* Returns the internally used prior speech probability of the current frame.
* There is a frequency bin based one as well, with which this should not be
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c b/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
index 848467f080c..075ab88c1c6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
@@ -43,8 +43,8 @@ int WebRtcNs_set_policy(NsHandle* NS_inst, int mode) {
}
-int WebRtcNs_Process(NsHandle* NS_inst, short* spframe, short* spframe_H,
- short* outframe, short* outframe_H) {
+int WebRtcNs_Process(NsHandle* NS_inst, float* spframe, float* spframe_H,
+ float* outframe, float* outframe_H) {
return WebRtcNs_ProcessCore(
(NSinst_t*) NS_inst, spframe, spframe_H, outframe, outframe_H);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
index 124a66d8df5..ec267ae0f69 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
@@ -715,10 +715,10 @@ void WebRtcNs_SpeechNoiseProb(NSinst_t* inst, float* probSpeechFinal, float* snr
}
int WebRtcNs_ProcessCore(NSinst_t* inst,
- short* speechFrame,
- short* speechFrameHB,
- short* outFrame,
- short* outFrameHB) {
+ float* speechFrame,
+ float* speechFrameHB,
+ float* outFrame,
+ float* outFrameHB) {
// main routine for noise reduction
int flagHB = 0;
@@ -731,8 +731,8 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
float snrPrior, currentEstimateStsa;
float tmpFloat1, tmpFloat2, tmpFloat3, probSpeech, probNonSpeech;
float gammaNoiseTmp, gammaNoiseOld;
- float noiseUpdateTmp, fTmp, dTmp;
- float fin[BLOCKL_MAX], fout[BLOCKL_MAX];
+ float noiseUpdateTmp, fTmp;
+ float fout[BLOCKL_MAX];
float winData[ANAL_BLOCKL_MAX];
float magn[HALF_ANAL_BLOCKL], noise[HALF_ANAL_BLOCKL];
float theFilter[HALF_ANAL_BLOCKL], theFilterTmp[HALF_ANAL_BLOCKL];
@@ -775,26 +775,17 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
updateParsFlag = inst->modelUpdatePars[0];
//
- //for LB do all processing
- // convert to float
- for (i = 0; i < inst->blockLen10ms; i++) {
- fin[i] = (float)speechFrame[i];
- }
// update analysis buffer for L band
memcpy(inst->dataBuf, inst->dataBuf + inst->blockLen10ms,
sizeof(float) * (inst->anaLen - inst->blockLen10ms));
- memcpy(inst->dataBuf + inst->anaLen - inst->blockLen10ms, fin,
+ memcpy(inst->dataBuf + inst->anaLen - inst->blockLen10ms, speechFrame,
sizeof(float) * inst->blockLen10ms);
if (flagHB == 1) {
- // convert to float
- for (i = 0; i < inst->blockLen10ms; i++) {
- fin[i] = (float)speechFrameHB[i];
- }
// update analysis buffer for H band
memcpy(inst->dataBufHB, inst->dataBufHB + inst->blockLen10ms,
sizeof(float) * (inst->anaLen - inst->blockLen10ms));
- memcpy(inst->dataBufHB + inst->anaLen - inst->blockLen10ms, fin,
+ memcpy(inst->dataBufHB + inst->anaLen - inst->blockLen10ms, speechFrameHB,
sizeof(float) * inst->blockLen10ms);
}
@@ -833,30 +824,16 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
inst->outBuf[i] = fout[i + inst->blockLen10ms];
}
}
- // convert to short
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = fout[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrame[i] = (short)dTmp;
- }
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrame[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fout[i], WEBRTC_SPL_WORD16_MIN);
// for time-domain gain of HB
- if (flagHB == 1) {
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = inst->dataBufHB[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrameHB[i] = (short)dTmp;
- }
- } // end of H band gain computation
- //
+ if (flagHB == 1)
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrameHB[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, inst->dataBufHB[i], WEBRTC_SPL_WORD16_MIN);
+
return 0;
}
@@ -1239,16 +1216,9 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
inst->outLen -= inst->blockLen10ms;
}
- // convert to short
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = fout[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrame[i] = (short)dTmp;
- }
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrame[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fout[i], WEBRTC_SPL_WORD16_MIN);
// for time-domain gain of HB
if (flagHB == 1) {
@@ -1289,13 +1259,9 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
}
//apply gain
for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = gainTimeDomainHB * inst->dataBufHB[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrameHB[i] = (short)dTmp;
+ float o = gainTimeDomainHB * inst->dataBufHB[i];
+ outFrameHB[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, o, WEBRTC_SPL_WORD16_MIN);
}
} // end of H band gain computation
//
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
index 50daa137cf8..785239ebdac 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
@@ -167,10 +167,10 @@ int WebRtcNs_set_policy_core(NSinst_t* inst, int mode);
int WebRtcNs_ProcessCore(NSinst_t* inst,
- short* inFrameLow,
- short* inFrameHigh,
- short* outFrameLow,
- short* outFrameHigh);
+ float* inFrameLow,
+ float* inFrameHigh,
+ float* outFrameLow,
+ float* outFrameHigh);
#ifdef __cplusplus
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
index c7229579f4c..2c8270f568c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
@@ -70,11 +70,6 @@ static const int16_t WebRtcNsx_kLogTableFrac[256] = {
// Skip first frequency bins during estimation. (0 <= value < 64)
static const int kStartBand = 5;
-static const int16_t kIndicatorTable[17] = {
- 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
- 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
-};
-
// hybrib Hanning & flat window
static const int16_t kBlocks80w128x[128] = {
0, 536, 1072, 1606, 2139, 2669, 3196, 3720, 4240, 4756, 5266,
@@ -481,7 +476,7 @@ static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) {
}
// Denormalize the real-valued signal |in|, the output from inverse FFT.
-static __inline void Denormalize(NsxInst_t* inst, int16_t* in, int factor) {
+static void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) {
int i = 0;
int32_t tmp32 = 0;
for (i = 0; i < inst->anaLen; i += 1) {
@@ -546,9 +541,9 @@ static void AnalysisUpdateC(NsxInst_t* inst,
}
// Normalize the real-valued signal |in|, the input to forward FFT.
-static __inline void NormalizeRealBuffer(NsxInst_t* inst,
- const int16_t* in,
- int16_t* out) {
+static void NormalizeRealBufferC(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out) {
int i = 0;
for (i = 0; i < inst->anaLen; ++i) {
out[i] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData)
@@ -560,6 +555,8 @@ NoiseEstimation WebRtcNsx_NoiseEstimation;
PrepareSpectrum WebRtcNsx_PrepareSpectrum;
SynthesisUpdate WebRtcNsx_SynthesisUpdate;
AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+Denormalize WebRtcNsx_Denormalize;
+NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
#if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON)
// Initialize function pointers for ARM Neon platform.
@@ -571,6 +568,19 @@ static void WebRtcNsx_InitNeon(void) {
}
#endif
+#if defined(MIPS32_LE)
+// Initialize function pointers for MIPS platform.
+static void WebRtcNsx_InitMips(void) {
+ WebRtcNsx_PrepareSpectrum = WebRtcNsx_PrepareSpectrum_mips;
+ WebRtcNsx_SynthesisUpdate = WebRtcNsx_SynthesisUpdate_mips;
+ WebRtcNsx_AnalysisUpdate = WebRtcNsx_AnalysisUpdate_mips;
+ WebRtcNsx_NormalizeRealBuffer = WebRtcNsx_NormalizeRealBuffer_mips;
+#if defined(MIPS_DSP_R1_LE)
+ WebRtcNsx_Denormalize = WebRtcNsx_Denormalize_mips;
+#endif
+}
+#endif
+
void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst,
int16_t pink_noise_exp_avg,
int32_t pink_noise_num_avg,
@@ -758,6 +768,8 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
WebRtcNsx_PrepareSpectrum = PrepareSpectrumC;
WebRtcNsx_SynthesisUpdate = SynthesisUpdateC;
WebRtcNsx_AnalysisUpdate = AnalysisUpdateC;
+ WebRtcNsx_Denormalize = DenormalizeC;
+ WebRtcNsx_NormalizeRealBuffer = NormalizeRealBufferC;
#ifdef WEBRTC_DETECT_ARM_NEON
uint64_t features = WebRtc_GetCPUFeaturesARM();
@@ -768,6 +780,10 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
WebRtcNsx_InitNeon();
#endif
+#if defined(MIPS32_LE)
+ WebRtcNsx_InitMips();
+#endif
+
inst->initFlag = 1;
return 0;
@@ -1169,239 +1185,6 @@ void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, uint16_t* magnIn) {
}
}
-// Compute speech/noise probability
-// speech/noise probability is returned in: probSpeechFinal
-//snrLocPrior is the prior SNR for each frequency (in Q11)
-//snrLocPost is the post SNR for each frequency (in Q11)
-void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, uint16_t* nonSpeechProbFinal,
- uint32_t* priorLocSnr, uint32_t* postLocSnr) {
- uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
-
- int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
- int32_t frac32, logTmp;
- int32_t logLrtTimeAvgKsumFX;
-
- int16_t indPriorFX16;
- int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart;
-
- int i, normTmp, normTmp2, nShifts;
-
- // compute feature based on average LR factor
- // this is the average over all frequencies of the smooth log LRT
- logLrtTimeAvgKsumFX = 0;
- for (i = 0; i < inst->magnLen; i++) {
- besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11
- normTmp = WebRtcSpl_NormU32(postLocSnr[i]);
- num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp)
- if (normTmp > 10) {
- den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp)
- } else {
- den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp)
- }
- if (den > 0) {
- besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11
- } else {
- besselTmpFX32 -= num; // Q11
- }
-
- // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) - inst->logLrtTimeAvg[i]);
- // Here, LRT_TAVG = 0.5
- zeros = WebRtcSpl_NormU32(priorLocSnr[i]);
- frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19);
- tmp32 = WEBRTC_SPL_MUL(frac32, frac32);
- tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19);
- tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12);
- frac32 = tmp32 + 37;
- // tmp32 = log2(priorLocSnr[i])
- tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12
- logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); // log2(priorLocSnr[i])*log(2)
- tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); // Q12
- inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12
-
- logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12
- }
- inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, inst->stages + 10); // 5 = BIN_SIZE_LRT / 2
- // done with computation of LR factor
-
- //
- //compute the indicator functions
- //
-
- // average LRT feature
- // FLOAT code
- // indicator0 = 0.5 * (tanh(widthPrior * (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
- tmpIndFX = 16384; // Q14(1.0)
- tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
- nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
- //use larger width in tanh map for pause regions
- if (tmp32no1 < 0) {
- tmpIndFX = 0;
- tmp32no1 = -tmp32no1;
- //widthPrior = widthPrior * 2.0;
- nShifts++;
- }
- tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
- // compute indicator function: sigmoid map
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
- if (tmpIndFX == 0) {
- tmpIndFX = 8192 - tmp16no2; // Q14
- } else {
- tmpIndFX = 8192 + tmp16no2; // Q14
- }
- }
- indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
-
- //spectral flatness feature
- if (inst->weightSpecFlat) {
- tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
- tmpIndFX = 16384; // Q14(1.0)
- //use larger width in tanh map for pause regions
- tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
- nShifts = 4;
- if (inst->thresholdSpecFlat < tmpU32no1) {
- tmpIndFX = 0;
- tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
- //widthPrior = widthPrior * 2.0;
- nShifts++;
- }
- tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
- nShifts), 25); //Q14
- tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14
- // compute indicator function: sigmoid map
- // FLOAT code
- // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
- if (tableIndex < 16) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
- if (tmpIndFX) {
- tmpIndFX = 8192 + tmp16no2; // Q14
- } else {
- tmpIndFX = 8192 - tmp16no2; // Q14
- }
- }
- indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
- }
-
- //for template spectral-difference
- if (inst->weightSpecDiff) {
- tmpU32no1 = 0;
- if (inst->featureSpecDiff) {
- normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
- WebRtcSpl_NormU32(inst->featureSpecDiff));
- tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); // Q(normTmp-2*stages)
- tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, 20 - inst->stages
- - normTmp);
- if (tmpU32no2 > 0) {
- // Q(20 - inst->stages)
- tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
- } else {
- tmpU32no1 = (uint32_t)(0x7fffffff);
- }
- }
- tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, 17), 25);
- tmpU32no2 = tmpU32no1 - tmpU32no3;
- nShifts = 1;
- tmpIndFX = 16384; // Q14(1.0)
- //use larger width in tanh map for pause regions
- if (tmpU32no2 & 0x80000000) {
- tmpIndFX = 0;
- tmpU32no2 = tmpU32no3 - tmpU32no1;
- //widthPrior = widthPrior * 2.0;
- nShifts--;
- }
- tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
- // compute indicator function: sigmoid map
- /* FLOAT code
- indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
- */
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
- if (tableIndex < 16) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
- tmp16no1, frac, 14);
- if (tmpIndFX) {
- tmpIndFX = 8192 + tmp16no2;
- } else {
- tmpIndFX = 8192 - tmp16no2;
- }
- }
- indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
- }
-
- //combine the indicator function with the feature weights
- // FLOAT code
- // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * indicator1 + weightIndPrior2 * indicator2);
- indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
- // done with computing indicator function
-
- //compute the prior probability
- // FLOAT code
- // inst->priorNonSpeechProb += PRIOR_UPDATE * (indPriorNonSpeech - inst->priorNonSpeechProb);
- tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
- inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
- PRIOR_UPDATE_Q14, tmp16, 14); // Q14
-
- //final speech probability: combine prior model with LR factor:
-
- memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
-
- if (inst->priorNonSpeechProb > 0) {
- for (i = 0; i < inst->magnLen; i++) {
- // FLOAT code
- // invLrt = exp(inst->logLrtTimeAvg[i]);
- // invLrt = inst->priorSpeechProb * invLrt;
- // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / (1.0 - inst->priorSpeechProb + invLrt);
- // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt;
- // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / (inst->priorNonSpeechProb + invLrt);
- if (inst->logLrtTimeAvgW32[i] < 65300) {
- tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(inst->logLrtTimeAvgW32[i], 23637),
- 14); // Q12
- intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12);
- if (intPart < -8) {
- intPart = -8;
- }
- frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12
-
- // Quadratic approximation of 2^frac
- tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12
- tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12
- invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart)
- + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8
-
- normTmp = WebRtcSpl_NormW32(invLrtFX);
- normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb));
- if (normTmp + normTmp2 >= 7) {
- if (normTmp + normTmp2 < 15) {
- invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp);
- // Q(normTmp+normTmp2-7)
- tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb));
- // Q(normTmp+normTmp2+7)
- invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); // Q14
- } else {
- tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q22
- invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14
- }
-
- tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, 8); // Q22
-
- nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1,
- (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8
- }
- }
- }
- }
-}
-
// Transform input (speechFrame) to frequency domain magnitude (magnU16)
void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU16) {
@@ -1461,7 +1244,7 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU
right_shifts_in_magnU16 = WEBRTC_SPL_MAX(right_shifts_in_magnU16, 0);
// create realImag as winData interleaved with zeros (= imag. part), normalize it
- NormalizeRealBuffer(inst, winData, realImag);
+ WebRtcNsx_NormalizeRealBuffer(inst, winData, realImag);
// FFT output will be in winData[].
WebRtcSpl_RealForwardFFT(inst->real_fft, realImag, winData);
@@ -1624,9 +1407,9 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU
tmpU32no1 = WEBRTC_SPL_RSHIFT_U32((uint32_t)sum_log_i_log_magn, 12); // Q5
// Shift the largest value of sum_log_i and tmp32no3 before multiplication
- tmp_u16 = WEBRTC_SPL_LSHIFT_U16((uint16_t)sum_log_i, 1); // Q6
+ tmp_u16 = ((uint16_t)sum_log_i << 1); // Q6
if ((uint32_t)sum_log_i > tmpU32no1) {
- tmp_u16 = WEBRTC_SPL_RSHIFT_U16(tmp_u16, zeros);
+ tmp_u16 >>= zeros;
} else {
tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, zeros);
}
@@ -1693,7 +1476,7 @@ void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) {
// Inverse FFT output will be in rfft_out[].
outCIFFT = WebRtcSpl_RealInverseFFT(inst->real_fft, realImag, rfft_out);
- Denormalize(inst, rfft_out, outCIFFT);
+ WebRtcNsx_Denormalize(inst, rfft_out, outCIFFT);
//scale factor: only do it after END_STARTUP_LONG time
gainFactor = 8192; // 8192 = Q13(1.0)
@@ -2288,8 +2071,8 @@ int WebRtcNsx_ProcessCore(NsxInst_t* inst, short* speechFrame, short* speechFram
tmpU16no1 += nonSpeechProbFinal[i]; // Q8
tmpU32no1 += (uint32_t)(inst->noiseSupFilter[i]); // Q14
}
- avgProbSpeechHB = (int16_t)(4096
- - WEBRTC_SPL_RSHIFT_U16(tmpU16no1, inst->stages - 7)); // Q12
+ assert(inst->stages >= 7);
+ avgProbSpeechHB = (4096 - (tmpU16no1 >> (inst->stages - 7))); // Q12
avgFilterGainHB = (int16_t)WEBRTC_SPL_RSHIFT_U32(
tmpU32no1, inst->stages - 3); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
index 1ad369ffbeb..5b3c5e78f4e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
@@ -201,6 +201,23 @@ typedef void (*AnalysisUpdate)(NsxInst_t* inst,
int16_t* new_speech);
extern AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+// Denormalize the real-valued signal |in|, the output from inverse FFT.
+typedef void (*Denormalize) (NsxInst_t* inst, int16_t* in, int factor);
+extern Denormalize WebRtcNsx_Denormalize;
+
+// Normalize the real-valued signal |in|, the input to forward FFT.
+typedef void (*NormalizeRealBuffer) (NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out);
+extern NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
+
+// Compute speech/noise probability.
+// Intended to be private.
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr);
+
#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON)
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for ARM Neon platforms
@@ -218,6 +235,26 @@ void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buff);
#endif
+#if defined(MIPS32_LE)
+// For the above function pointers, functions for generic platforms are declared
+// and defined as static in file nsx_core.c, while those for MIPS platforms
+// are declared below and defined in file nsx_core_mips.c.
+void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
+ int16_t* out_frame,
+ int16_t gain_factor);
+void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
+ int16_t* out,
+ int16_t* new_speech);
+void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buff);
+void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out);
+#if defined(MIPS_DSP_R1_LE)
+void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor);
+#endif
+
+#endif
+
#ifdef __cplusplus
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
new file mode 100644
index 00000000000..452b96e77b0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
@@ -0,0 +1,273 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/nsx_core.h"
+
+static const int16_t kIndicatorTable[17] = {
+ 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
+ 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
+};
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//snrLocPrior is the prior SNR for each frequency (in Q11)
+//snrLocPost is the post SNR for each frequency (in Q11)
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr) {
+
+ uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
+ int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
+ int32_t frac32, logTmp;
+ int32_t logLrtTimeAvgKsumFX;
+ int16_t indPriorFX16;
+ int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart;
+ int i, normTmp, normTmp2, nShifts;
+
+ // compute feature based on average LR factor
+ // this is the average over all frequencies of the smooth log LRT
+ logLrtTimeAvgKsumFX = 0;
+ for (i = 0; i < inst->magnLen; i++) {
+ besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11
+ normTmp = WebRtcSpl_NormU32(postLocSnr[i]);
+ num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp)
+ if (normTmp > 10) {
+ den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp)
+ } else {
+ den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp)
+ }
+ if (den > 0) {
+ besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11
+ } else {
+ besselTmpFX32 -= num; // Q11
+ }
+
+ // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior)
+ // - inst->logLrtTimeAvg[i]);
+ // Here, LRT_TAVG = 0.5
+ zeros = WebRtcSpl_NormU32(priorLocSnr[i]);
+ frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19);
+ tmp32 = WEBRTC_SPL_MUL(frac32, frac32);
+ tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19);
+ tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12);
+ frac32 = tmp32 + 37;
+ // tmp32 = log2(priorLocSnr[i])
+ tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12
+ logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8);
+ // log2(priorLocSnr[i])*log(2)
+ tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1);
+ // Q12
+ inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12
+
+ logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12
+ }
+ inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5,
+ inst->stages + 10);
+ // 5 = BIN_SIZE_LRT / 2
+ // done with computation of LR factor
+
+ //
+ //compute the indicator functions
+ //
+
+ // average LRT feature
+ // FLOAT code
+ // indicator0 = 0.5 * (tanh(widthPrior *
+ // (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
+ tmpIndFX = 16384; // Q14(1.0)
+ tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
+ nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
+ //use larger width in tanh map for pause regions
+ if (tmp32no1 < 0) {
+ tmpIndFX = 0;
+ tmp32no1 = -tmp32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
+ // compute indicator function: sigmoid map
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
+ if ((tableIndex < 16) && (tableIndex >= 0)) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX == 0) {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ }
+ }
+ indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
+
+ //spectral flatness feature
+ if (inst->weightSpecFlat) {
+ tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
+ nShifts = 4;
+ if (inst->thresholdSpecFlat < tmpU32no1) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
+ nShifts), 25);
+ //Q14
+ tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts),
+ 25); //Q14
+ // compute indicator function: sigmoid map
+ // FLOAT code
+ // indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
+ // (threshPrior1 - tmpFloat1)) + 1.0);
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
+ }
+
+ //for template spectral-difference
+ if (inst->weightSpecDiff) {
+ tmpU32no1 = 0;
+ if (inst->featureSpecDiff) {
+ normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
+ WebRtcSpl_NormU32(inst->featureSpecDiff));
+ tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp);
+ // Q(normTmp-2*stages)
+ tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy,
+ 20 - inst->stages - normTmp);
+ if (tmpU32no2 > 0) {
+ // Q(20 - inst->stages)
+ tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
+ } else {
+ tmpU32no1 = (uint32_t)(0x7fffffff);
+ }
+ }
+ tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff,
+ 17),
+ 25);
+ tmpU32no2 = tmpU32no1 - tmpU32no3;
+ nShifts = 1;
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ if (tmpU32no2 & 0x80000000) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no3 - tmpU32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts--;
+ }
+ tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
+ // compute indicator function: sigmoid map
+ /* FLOAT code
+ indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
+ */
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2;
+ } else {
+ tmpIndFX = 8192 - tmp16no2;
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
+ }
+
+ //combine the indicator function with the feature weights
+ // FLOAT code
+ // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 *
+ // indicator1 + weightIndPrior2 * indicator2);
+ indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
+ // done with computing indicator function
+
+ //compute the prior probability
+ // FLOAT code
+ // inst->priorNonSpeechProb += PRIOR_UPDATE *
+ // (indPriorNonSpeech - inst->priorNonSpeechProb);
+ tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
+ inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
+ PRIOR_UPDATE_Q14, tmp16, 14); // Q14
+
+ //final speech probability: combine prior model with LR factor:
+
+ memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
+
+ if (inst->priorNonSpeechProb > 0) {
+ for (i = 0; i < inst->magnLen; i++) {
+ // FLOAT code
+ // invLrt = exp(inst->logLrtTimeAvg[i]);
+ // invLrt = inst->priorSpeechProb * invLrt;
+ // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) /
+ // (1.0 - inst->priorSpeechProb + invLrt);
+ // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt;
+ // nonSpeechProbFinal[i] = inst->priorNonSpeechProb /
+ // (inst->priorNonSpeechProb + invLrt);
+ if (inst->logLrtTimeAvgW32[i] < 65300) {
+ tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(
+ inst->logLrtTimeAvgW32[i], 23637),
+ 14); // Q12
+ intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12);
+ if (intPart < -8) {
+ intPart = -8;
+ }
+ frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12
+
+ // Quadratic approximation of 2^frac
+ tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12
+ tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12
+ invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart)
+ + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8
+
+ normTmp = WebRtcSpl_NormW32(invLrtFX);
+ normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb));
+ if (normTmp + normTmp2 >= 7) {
+ if (normTmp + normTmp2 < 15) {
+ invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp);
+ // Q(normTmp+normTmp2-7)
+ tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX,
+ (16384 - inst->priorNonSpeechProb));
+ // Q(normTmp+normTmp2+7)
+ invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2);
+ // Q14
+ } else {
+ tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX,
+ (16384 - inst->priorNonSpeechProb));
+ // Q22
+ invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14
+ }
+
+ tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb,
+ 8); // Q22
+
+ nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1,
+ (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8
+ }
+ }
+ }
+ }
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
new file mode 100644
index 00000000000..ccb0c376324
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
@@ -0,0 +1,1008 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/nsx_core.h"
+
+static const int16_t kIndicatorTable[17] = {
+ 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
+ 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
+};
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//snrLocPrior is the prior SNR for each frequency (in Q11)
+//snrLocPost is the post SNR for each frequency (in Q11)
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr) {
+
+ uint32_t tmpU32no1, tmpU32no2, tmpU32no3;
+ int32_t indPriorFX, tmp32no1;
+ int32_t logLrtTimeAvgKsumFX;
+ int16_t indPriorFX16;
+ int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac;
+ int i, normTmp, nShifts;
+
+ int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9;
+ int32_t const_max = 0x7fffffff;
+ int32_t const_neg43 = -43;
+ int32_t const_5412 = 5412;
+ int32_t const_11rsh12 = (11 << 12);
+ int32_t const_178 = 178;
+
+
+ // compute feature based on average LR factor
+ // this is the average over all frequencies of the smooth log LRT
+ logLrtTimeAvgKsumFX = 0;
+ for (i = 0; i < inst->magnLen; i++) {
+ r0 = postLocSnr[i]; // Q11
+ r1 = priorLocSnr[i];
+ r2 = inst->logLrtTimeAvgW32[i];
+
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "clz %[r3], %[r0] \n\t"
+ "clz %[r5], %[r1] \n\t"
+ "slti %[r4], %[r3], 32 \n\t"
+ "slti %[r6], %[r5], 32 \n\t"
+ "movz %[r3], $0, %[r4] \n\t"
+ "movz %[r5], $0, %[r6] \n\t"
+ "slti %[r4], %[r3], 11 \n\t"
+ "addiu %[r6], %[r3], -11 \n\t"
+ "neg %[r7], %[r6] \n\t"
+ "sllv %[r6], %[r1], %[r6] \n\t"
+ "srav %[r7], %[r1], %[r7] \n\t"
+ "movn %[r6], %[r7], %[r4] \n\t"
+ "sllv %[r1], %[r1], %[r5] \n\t"
+ "and %[r1], %[r1], %[const_max] \n\t"
+ "sra %[r1], %[r1], 19 \n\t"
+ "mul %[r7], %[r1], %[r1] \n\t"
+ "sllv %[r3], %[r0], %[r3] \n\t"
+ "divu %[r8], %[r3], %[r6] \n\t"
+ "slti %[r6], %[r6], 1 \n\t"
+ "mul %[r7], %[r7], %[const_neg43] \n\t"
+ "sra %[r7], %[r7], 19 \n\t"
+ "movz %[r3], %[r8], %[r6] \n\t"
+ "subu %[r0], %[r0], %[r3] \n\t"
+ "mul %[r1], %[r1], %[const_5412] \n\t"
+ "sra %[r1], %[r1], 12 \n\t"
+ "addu %[r7], %[r7], %[r1] \n\t"
+ "addiu %[r1], %[r7], 37 \n\t"
+ "addiu %[r5], %[r5], -31 \n\t"
+ "neg %[r5], %[r5] \n\t"
+ "sll %[r5], %[r5], 12 \n\t"
+ "addu %[r5], %[r5], %[r1] \n\t"
+ "subu %[r7], %[r5], %[const_11rsh12] \n\t"
+ "mul %[r7], %[r7], %[const_178] \n\t"
+ "sra %[r7], %[r7], 8 \n\t"
+ "addu %[r7], %[r7], %[r2] \n\t"
+ "sra %[r7], %[r7], 1 \n\t"
+ "subu %[r2], %[r2], %[r7] \n\t"
+ "addu %[r2], %[r2], %[r0] \n\t"
+ ".set pop \n\t"
+ : [r0] "+r" (r0), [r1] "+r" (r1), [r2] "+r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8)
+ : [const_max] "r" (const_max), [const_neg43] "r" (const_neg43),
+ [const_5412] "r" (const_5412), [const_11rsh12] "r" (const_11rsh12),
+ [const_178] "r" (const_178)
+ : "hi", "lo"
+ );
+ inst->logLrtTimeAvgW32[i] = r2;
+ logLrtTimeAvgKsumFX += r2;
+ }
+
+ inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5,
+ inst->stages + 10);
+ // 5 = BIN_SIZE_LRT / 2
+ // done with computation of LR factor
+
+ //
+ // compute the indicator functions
+ //
+
+ // average LRT feature
+ // FLOAT code
+ // indicator0 = 0.5 * (tanh(widthPrior *
+ // (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
+ tmpIndFX = 16384; // Q14(1.0)
+ tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
+ nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
+ //use larger width in tanh map for pause regions
+ if (tmp32no1 < 0) {
+ tmpIndFX = 0;
+ tmp32no1 = -tmp32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
+ // compute indicator function: sigmoid map
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
+ if ((tableIndex < 16) && (tableIndex >= 0)) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX == 0) {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ }
+ }
+ indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
+
+ //spectral flatness feature
+ if (inst->weightSpecFlat) {
+ tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
+ nShifts = 4;
+ if (inst->thresholdSpecFlat < tmpU32no1) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
+ nShifts), 25);
+ //Q14
+ tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts),
+ 25); //Q14
+ // compute indicator function: sigmoid map
+ // FLOAT code
+ // indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
+ // (threshPrior1 - tmpFloat1)) + 1.0);
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
+ }
+
+ //for template spectral-difference
+ if (inst->weightSpecDiff) {
+ tmpU32no1 = 0;
+ if (inst->featureSpecDiff) {
+ normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
+ WebRtcSpl_NormU32(inst->featureSpecDiff));
+ tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp);
+ // Q(normTmp-2*stages)
+ tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy,
+ 20 - inst->stages - normTmp);
+ if (tmpU32no2 > 0) {
+ // Q(20 - inst->stages)
+ tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
+ } else {
+ tmpU32no1 = (uint32_t)(0x7fffffff);
+ }
+ }
+ tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff,
+ 17),
+ 25);
+ tmpU32no2 = tmpU32no1 - tmpU32no3;
+ nShifts = 1;
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ if (tmpU32no2 & 0x80000000) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no3 - tmpU32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts--;
+ }
+ tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
+ // compute indicator function: sigmoid map
+ /* FLOAT code
+ indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
+ */
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2;
+ } else {
+ tmpIndFX = 8192 - tmp16no2;
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
+ }
+
+ //combine the indicator function with the feature weights
+ // FLOAT code
+ // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 *
+ // indicator1 + weightIndPrior2 * indicator2);
+ indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
+ // done with computing indicator function
+
+ //compute the prior probability
+ // FLOAT code
+ // inst->priorNonSpeechProb += PRIOR_UPDATE *
+ // (indPriorNonSpeech - inst->priorNonSpeechProb);
+ tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
+ inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
+ PRIOR_UPDATE_Q14, tmp16, 14); // Q14
+
+ //final speech probability: combine prior model with LR factor:
+
+ memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
+
+ if (inst->priorNonSpeechProb > 0) {
+ r0 = inst->priorNonSpeechProb;
+ r1 = 16384 - r0;
+ int32_t const_23637 = 23637;
+ int32_t const_44 = 44;
+ int32_t const_84 = 84;
+ int32_t const_1 = 1;
+ int32_t const_neg8 = -8;
+ for (i = 0; i < inst->magnLen; i++) {
+ r2 = inst->logLrtTimeAvgW32[i];
+ if (r2 < 65300) {
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mul %[r2], %[r2], %[const_23637] \n\t"
+ "sll %[r6], %[r1], 16 \n\t"
+ "clz %[r7], %[r6] \n\t"
+ "clo %[r8], %[r6] \n\t"
+ "slt %[r9], %[r6], $0 \n\t"
+ "movn %[r7], %[r8], %[r9] \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "andi %[r3], %[r2], 0xfff \n\t"
+ "mul %[r4], %[r3], %[r3] \n\t"
+ "mul %[r3], %[r3], %[const_84] \n\t"
+ "sra %[r2], %[r2], 12 \n\t"
+ "slt %[r5], %[r2], %[const_neg8] \n\t"
+ "movn %[r2], %[const_neg8], %[r5] \n\t"
+ "mul %[r4], %[r4], %[const_44] \n\t"
+ "sra %[r3], %[r3], 7 \n\t"
+ "addiu %[r7], %[r7], -1 \n\t"
+ "slti %[r9], %[r7], 31 \n\t"
+ "movz %[r7], $0, %[r9] \n\t"
+ "sra %[r4], %[r4], 19 \n\t"
+ "addu %[r4], %[r4], %[r3] \n\t"
+ "addiu %[r3], %[r2], 8 \n\t"
+ "addiu %[r2], %[r2], -4 \n\t"
+ "neg %[r5], %[r2] \n\t"
+ "sllv %[r6], %[r4], %[r2] \n\t"
+ "srav %[r5], %[r4], %[r5] \n\t"
+ "slt %[r2], %[r2], $0 \n\t"
+ "movn %[r6], %[r5], %[r2] \n\t"
+ "sllv %[r3], %[const_1], %[r3] \n\t"
+ "addu %[r2], %[r3], %[r6] \n\t"
+ "clz %[r4], %[r2] \n\t"
+ "clo %[r5], %[r2] \n\t"
+ "slt %[r8], %[r2], $0 \n\t"
+ "movn %[r4], %[r5], %[r8] \n\t"
+ "addiu %[r4], %[r4], -1 \n\t"
+ "slt %[r5], $0, %[r2] \n\t"
+ "or %[r5], %[r5], %[r7] \n\t"
+ "movz %[r4], $0, %[r5] \n\t"
+ "addiu %[r6], %[r7], -7 \n\t"
+ "addu %[r6], %[r6], %[r4] \n\t"
+ "bltz %[r6], 1f \n\t"
+ " nop \n\t"
+ "addiu %[r4], %[r6], -8 \n\t"
+ "neg %[r3], %[r4] \n\t"
+ "srav %[r5], %[r2], %[r3] \n\t"
+ "mul %[r5], %[r5], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r1] \n\t"
+ "slt %[r4], %[r4], $0 \n\t"
+ "srav %[r5], %[r5], %[r6] \n\t"
+ "sra %[r2], %[r2], 8 \n\t"
+ "movn %[r2], %[r5], %[r4] \n\t"
+ "sll %[r3], %[r0], 8 \n\t"
+ "addu %[r2], %[r0], %[r2] \n\t"
+ "divu %[r3], %[r3], %[r2] \n\t"
+ "1: \n\t"
+ ".set pop \n\t"
+ : [r2] "+r" (r2), [r3] "=&r" (r3), [r4] "=&r" (r4),
+ [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+ [r8] "=&r" (r8), [r9] "=&r" (r9)
+ : [r0] "r" (r0), [r1] "r" (r1), [const_23637] "r" (const_23637),
+ [const_neg8] "r" (const_neg8), [const_84] "r" (const_84),
+ [const_1] "r" (const_1), [const_44] "r" (const_44)
+ : "hi", "lo"
+ );
+ nonSpeechProbFinal[i] = r3;
+ }
+ }
+ }
+}
+
+// Update analysis buffer for lower band, and window data before FFT.
+void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
+ int16_t* out,
+ int16_t* new_speech) {
+
+ int iters, after;
+ int anaLen = inst->anaLen;
+ int *window = (int*)inst->window;
+ int *anaBuf = (int*)inst->analysisBuffer;
+ int *outBuf = (int*)out;
+ int r0, r1, r2, r3, r4, r5, r6, r7;
+#if defined(MIPS_DSP_R1_LE)
+ int r8;
+#endif
+
+ // For lower band update analysis buffer.
+ WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer,
+ inst->analysisBuffer + inst->blockLen10ms,
+ inst->anaLen - inst->blockLen10ms);
+ WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer
+ + inst->anaLen - inst->blockLen10ms, new_speech, inst->blockLen10ms);
+
+ // Window data before FFT.
+#if defined(MIPS_DSP_R1_LE)
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[iters], %[anaLen], 3 \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lw %[r0], 0(%[window]) \n\t"
+ "lw %[r1], 0(%[anaBuf]) \n\t"
+ "lw %[r2], 4(%[window]) \n\t"
+ "lw %[r3], 4(%[anaBuf]) \n\t"
+ "lw %[r4], 8(%[window]) \n\t"
+ "lw %[r5], 8(%[anaBuf]) \n\t"
+ "lw %[r6], 12(%[window]) \n\t"
+ "lw %[r7], 12(%[anaBuf]) \n\t"
+ "muleq_s.w.phl %[r8], %[r0], %[r1] \n\t"
+ "muleq_s.w.phr %[r0], %[r0], %[r1] \n\t"
+ "muleq_s.w.phl %[r1], %[r2], %[r3] \n\t"
+ "muleq_s.w.phr %[r2], %[r2], %[r3] \n\t"
+ "muleq_s.w.phl %[r3], %[r4], %[r5] \n\t"
+ "muleq_s.w.phr %[r4], %[r4], %[r5] \n\t"
+ "muleq_s.w.phl %[r5], %[r6], %[r7] \n\t"
+ "muleq_s.w.phr %[r6], %[r6], %[r7] \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "precr_sra_r.ph.w %[r8], %[r0], 15 \n\t"
+ "precr_sra_r.ph.w %[r1], %[r2], 15 \n\t"
+ "precr_sra_r.ph.w %[r3], %[r4], 15 \n\t"
+ "precr_sra_r.ph.w %[r5], %[r6], 15 \n\t"
+ "sw %[r8], 0(%[outBuf]) \n\t"
+ "sw %[r1], 4(%[outBuf]) \n\t"
+ "sw %[r3], 8(%[outBuf]) \n\t"
+ "sw %[r5], 12(%[outBuf]) \n\t"
+#else
+ "shra_r.w %[r8], %[r8], 15 \n\t"
+ "shra_r.w %[r0], %[r0], 15 \n\t"
+ "shra_r.w %[r1], %[r1], 15 \n\t"
+ "shra_r.w %[r2], %[r2], 15 \n\t"
+ "shra_r.w %[r3], %[r3], 15 \n\t"
+ "shra_r.w %[r4], %[r4], 15 \n\t"
+ "shra_r.w %[r5], %[r5], 15 \n\t"
+ "shra_r.w %[r6], %[r6], 15 \n\t"
+ "sll %[r0], %[r0], 16 \n\t"
+ "sll %[r2], %[r2], 16 \n\t"
+ "sll %[r4], %[r4], 16 \n\t"
+ "sll %[r6], %[r6], 16 \n\t"
+ "packrl.ph %[r0], %[r8], %[r0] \n\t"
+ "packrl.ph %[r2], %[r1], %[r2] \n\t"
+ "packrl.ph %[r4], %[r3], %[r4] \n\t"
+ "packrl.ph %[r6], %[r5], %[r6] \n\t"
+ "sw %[r0], 0(%[outBuf]) \n\t"
+ "sw %[r2], 4(%[outBuf]) \n\t"
+ "sw %[r4], 8(%[outBuf]) \n\t"
+ "sw %[r6], 12(%[outBuf]) \n\t"
+#endif
+ "addiu %[window], %[window], 16 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 16 \n\t"
+ "addiu %[outBuf], %[outBuf], 16 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "andi %[after], %[anaLen], 7 \n\t"
+ "3: \n\t"
+ "blez %[after], 4f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 2 \n\t"
+ "addiu %[outBuf], %[outBuf], 2 \n\t"
+ "shra_r.w %[r0], %[r0], 14 \n\t"
+ "sh %[r0], -2(%[outBuf]) \n\t"
+ "b 3b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8),
+ [iters] "=&r" (iters), [after] "=&r" (after),
+ [window] "+r" (window),[anaBuf] "+r" (anaBuf),
+ [outBuf] "+r" (outBuf)
+ : [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+#else
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[iters], %[anaLen], 2 \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[anaBuf]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[anaBuf]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 8 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "sh %[r0], 0(%[outBuf]) \n\t"
+ "sh %[r2], 2(%[outBuf]) \n\t"
+ "sh %[r4], 4(%[outBuf]) \n\t"
+ "sh %[r6], 6(%[outBuf]) \n\t"
+ "addiu %[outBuf], %[outBuf], 8 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "andi %[after], %[anaLen], 3 \n\t"
+ "3: \n\t"
+ "blez %[after], 4f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 2 \n\t"
+ "addiu %[outBuf], %[outBuf], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sh %[r0], -2(%[outBuf]) \n\t"
+ "b 3b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "=&r" (iters),
+ [after] "=&r" (after), [window] "+r" (window),
+ [anaBuf] "+r" (anaBuf), [outBuf] "+r" (outBuf)
+ : [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+#endif
+}
+
+// For the noise supression process, synthesis, read out fully processed
+// segment, and update synthesis buffer.
+void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
+ int16_t* out_frame,
+ int16_t gain_factor) {
+
+ int iters = inst->blockLen10ms >> 2;
+ int after = inst->blockLen10ms & 3;
+ int r0, r1, r2, r3, r4, r5, r6, r7;
+ int16_t *window = (int16_t*)inst->window;
+ int16_t *real = inst->real;
+ int16_t *synthBuf = inst->synthesisBuffer;
+ int16_t *out = out_frame;
+ int sat_pos = 0x7fff;
+ int sat_neg = 0xffff8000;
+ int block10 = (int)inst->blockLen10ms;
+ int anaLen = (int)inst->anaLen;
+
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[real]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[real]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "mul %[r2], %[r2], %[gain_factor] \n\t"
+ "mul %[r4], %[r4], %[gain_factor] \n\t"
+ "mul %[r6], %[r6], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "addiu %[r2], %[r2], 0x1000 \n\t"
+ "addiu %[r4], %[r4], 0x1000 \n\t"
+ "addiu %[r6], %[r6], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "sra %[r2], %[r2], 13 \n\t"
+ "sra %[r4], %[r4], 13 \n\t"
+ "sra %[r6], %[r6], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "lh %[r3], 2(%[synthBuf]) \n\t"
+ "lh %[r5], 4(%[synthBuf]) \n\t"
+ "lh %[r7], 6(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "addu %[r2], %[r2], %[r3] \n\t"
+ "addu %[r4], %[r4], %[r5] \n\t"
+ "addu %[r6], %[r6], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "slt %[r3], %[r2], %[sat_neg] \n\t"
+ "slt %[r5], %[r4], %[sat_neg] \n\t"
+ "slt %[r7], %[r6], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "movn %[r2], %[sat_neg], %[r3] \n\t"
+ "movn %[r4], %[sat_neg], %[r5] \n\t"
+ "movn %[r6], %[sat_neg], %[r7] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r2], 2(%[synthBuf]) \n\t"
+ "sh %[r4], 4(%[synthBuf]) \n\t"
+ "sh %[r6], 6(%[synthBuf]) \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r2], 2(%[out]) \n\t"
+ "sh %[r4], 4(%[out]) \n\t"
+ "sh %[r6], 6(%[out]) \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[real], %[real], 8 \n\t"
+ "addiu %[synthBuf],%[synthBuf], 8 \n\t"
+ "addiu %[out], %[out], 8 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "blez %[after], 3f \n\t"
+ " subu %[block10], %[anaLen], %[block10] \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "addiu %[synthBuf],%[synthBuf], 2 \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "b 2b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "3: \n\t"
+ "sra %[iters], %[block10], 2 \n\t"
+ "4: \n\t"
+ "blez %[iters], 5f \n\t"
+ " andi %[after], %[block10], 3 \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[real]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[real]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "mul %[r2], %[r2], %[gain_factor] \n\t"
+ "mul %[r4], %[r4], %[gain_factor] \n\t"
+ "mul %[r6], %[r6], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "addiu %[r2], %[r2], 0x1000 \n\t"
+ "addiu %[r4], %[r4], 0x1000 \n\t"
+ "addiu %[r6], %[r6], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "sra %[r2], %[r2], 13 \n\t"
+ "sra %[r4], %[r4], 13 \n\t"
+ "sra %[r6], %[r6], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "lh %[r3], 2(%[synthBuf]) \n\t"
+ "lh %[r5], 4(%[synthBuf]) \n\t"
+ "lh %[r7], 6(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "addu %[r2], %[r2], %[r3] \n\t"
+ "addu %[r4], %[r4], %[r5] \n\t"
+ "addu %[r6], %[r6], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "slt %[r3], %[r2], %[sat_neg] \n\t"
+ "slt %[r5], %[r4], %[sat_neg] \n\t"
+ "slt %[r7], %[r6], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "movn %[r2], %[sat_neg], %[r3] \n\t"
+ "movn %[r4], %[sat_neg], %[r5] \n\t"
+ "movn %[r6], %[sat_neg], %[r7] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r2], 2(%[synthBuf]) \n\t"
+ "sh %[r4], 4(%[synthBuf]) \n\t"
+ "sh %[r6], 6(%[synthBuf]) \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[real], %[real], 8 \n\t"
+ "addiu %[synthBuf],%[synthBuf], 8 \n\t"
+ "b 4b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "5: \n\t"
+ "blez %[after], 6f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "addiu %[synthBuf],%[synthBuf], 2 \n\t"
+ "b 2b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "6: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "+r" (iters),
+ [after] "+r" (after), [block10] "+r" (block10),
+ [window] "+r" (window), [real] "+r" (real),
+ [synthBuf] "+r" (synthBuf), [out] "+r" (out)
+ : [gain_factor] "r" (gain_factor), [sat_pos] "r" (sat_pos),
+ [sat_neg] "r" (sat_neg), [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+
+ // update synthesis buffer
+ WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer,
+ inst->synthesisBuffer + inst->blockLen10ms,
+ inst->anaLen - inst->blockLen10ms);
+ WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer
+ + inst->anaLen - inst->blockLen10ms, inst->blockLen10ms);
+}
+
+// Filter the data in the frequency domain, and create spectrum.
+void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buf) {
+
+ uint16_t *noiseSupFilter = inst->noiseSupFilter;
+ int16_t *real = inst->real;
+ int16_t *imag = inst->imag;
+ int32_t loop_count = 2;
+ int16_t tmp_1, tmp_2, tmp_3, tmp_4, tmp_5, tmp_6;
+ int16_t tmp16 = (inst->anaLen << 1) - 4;
+ int16_t* freq_buf_f = freq_buf;
+ int16_t* freq_buf_s = &freq_buf[tmp16];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ //first sample
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[imag], %[imag], 2 \n\t"
+ "addiu %[noiseSupFilter], %[noiseSupFilter], 2 \n\t"
+ "addiu %[freq_buf_f], %[freq_buf_f], 4 \n\t"
+ "1: \n\t"
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_5], 2(%[real]) \n\t"
+ "lh %[tmp_6], 2(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t"
+ "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t"
+ "addiu %[loop_count], %[loop_count], 2 \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sra %[tmp_5], %[tmp_5], 14 \n\t"
+ "sra %[tmp_6], %[tmp_6], 14 \n\t"
+ "addiu %[noiseSupFilter], %[noiseSupFilter], 4 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_2], 4(%[freq_buf_s]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "sh %[tmp_3], 6(%[freq_buf_s]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_5], 2(%[real]) \n\t"
+ "sh %[tmp_5], 0(%[freq_buf_s]) \n\t"
+ "sh %[tmp_6], 2(%[imag]) \n\t"
+ "sh %[tmp_6], 2(%[freq_buf_s]) \n\t"
+ "negu %[tmp_6], %[tmp_6] \n\t"
+ "addiu %[freq_buf_s], %[freq_buf_s], -8 \n\t"
+ "addiu %[real], %[real], 4 \n\t"
+ "addiu %[imag], %[imag], 4 \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 4(%[freq_buf_f]) \n\t"
+ "sh %[tmp_6], 6(%[freq_buf_f]) \n\t"
+ "blt %[loop_count], %[loop_size], 1b \n\t"
+ " addiu %[freq_buf_f], %[freq_buf_f], 8 \n\t"
+ //last two samples:
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_5], 2(%[real]) \n\t"
+ "lh %[tmp_6], 2(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t"
+ "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sra %[tmp_5], %[tmp_5], 14 \n\t"
+ "sra %[tmp_6], %[tmp_6], 14 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_2], 4(%[freq_buf_s]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "sh %[tmp_3], 6(%[freq_buf_s]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 4(%[freq_buf_f]) \n\t"
+ "sh %[tmp_6], 6(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 2(%[real]) \n\t"
+ "sh %[tmp_6], 2(%[imag]) \n\t"
+ ".set pop \n\t"
+ : [real] "+r" (real), [imag] "+r" (imag),
+ [freq_buf_f] "+r" (freq_buf_f), [freq_buf_s] "+r" (freq_buf_s),
+ [loop_count] "+r" (loop_count), [noiseSupFilter] "+r" (noiseSupFilter),
+ [tmp_1] "=&r" (tmp_1), [tmp_2] "=&r" (tmp_2), [tmp_3] "=&r" (tmp_3),
+ [tmp_4] "=&r" (tmp_4), [tmp_5] "=&r" (tmp_5), [tmp_6] "=&r" (tmp_6)
+ : [loop_size] "r" (inst->anaLen2)
+ : "memory", "hi", "lo"
+ );
+}
+
+#if defined(MIPS_DSP_R1_LE)
+// Denormalize the real-valued signal |in|, the output from inverse FFT.
+void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor) {
+ int32_t r0, r1, r2, r3, t0;
+ int len = inst->anaLen;
+ int16_t *out = &inst->real[0];
+ int shift = factor - inst->normData;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "bltz %[shift], 4f \n\t"
+ " sra %[t0], %[len], 2 \n\t"
+ "beqz %[t0], 2f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "1: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "shllv_s.ph %[r0], %[r0], %[shift] \n\t"
+ "shllv_s.ph %[r1], %[r1], %[shift] \n\t"
+ "shllv_s.ph %[r2], %[r2], %[shift] \n\t"
+ "shllv_s.ph %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 1b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "2: \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "shllv_s.ph %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 3b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "b 8f \n\t"
+ "4: \n\t"
+ "negu %[shift], %[shift] \n\t"
+ "beqz %[t0], 6f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "5: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "srav %[r0], %[r0], %[shift] \n\t"
+ "srav %[r1], %[r1], %[shift] \n\t"
+ "srav %[r2], %[r2], %[shift] \n\t"
+ "srav %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 5b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "6: \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "7: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "srav %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 7b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "8: \n\t"
+ ".set pop \n\t"
+ : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1),
+ [r2] "=&r" (r2), [r3] "=&r" (r3)
+ : [len] "r" (len), [shift] "r" (shift), [in] "r" (in),
+ [out] "r" (out)
+ : "memory"
+ );
+}
+#endif
+
+// Normalize the real-valued signal |in|, the input to forward FFT.
+void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out) {
+ int32_t r0, r1, r2, r3, t0;
+ int len = inst->anaLen;
+ int shift = inst->normData;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "beqz %[len], 4f \n\t"
+ " sra %[t0], %[len], 2 \n\t"
+ "beqz %[t0], 2f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "1: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "sllv %[r0], %[r0], %[shift] \n\t"
+ "sllv %[r1], %[r1], %[shift] \n\t"
+ "sllv %[r2], %[r2], %[shift] \n\t"
+ "sllv %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 1b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "2: \n\t"
+ "beqz %[len], 4f \n\t"
+ " nop \n\t"
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "sllv %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 3b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1),
+ [r2] "=&r" (r2), [r3] "=&r" (r3)
+ : [len] "r" (len), [shift] "r" (shift), [in] "r" (in),
+ [out] "r" (out)
+ : "memory"
+ );
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc b/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
index 23bf22570b1..9e16d7c4eea 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
@@ -12,15 +12,12 @@
#include <assert.h>
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
namespace webrtc {
-ProcessingComponent::ProcessingComponent() {}
-
-ProcessingComponent::ProcessingComponent(const AudioProcessingImpl* apm)
- : apm_(apm),
- initialized_(false),
+ProcessingComponent::ProcessingComponent()
+ : initialized_(false),
enabled_(false),
num_handles_(0) {}
@@ -35,7 +32,7 @@ int ProcessingComponent::Destroy() {
}
initialized_ = false;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int ProcessingComponent::EnableComponent(bool enable) {
@@ -43,7 +40,7 @@ int ProcessingComponent::EnableComponent(bool enable) {
enabled_ = enable; // Must be set before Initialize() is called.
int err = Initialize();
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
enabled_ = false;
return err;
}
@@ -51,7 +48,7 @@ int ProcessingComponent::EnableComponent(bool enable) {
enabled_ = enable;
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
bool ProcessingComponent::is_component_enabled() const {
@@ -69,7 +66,7 @@ int ProcessingComponent::num_handles() const {
int ProcessingComponent::Initialize() {
if (!enabled_) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
num_handles_ = num_handles_required();
@@ -82,12 +79,12 @@ int ProcessingComponent::Initialize() {
if (handles_[i] == NULL) {
handles_[i] = CreateHandle();
if (handles_[i] == NULL) {
- return apm_->kCreationFailedError;
+ return AudioProcessing::kCreationFailedError;
}
}
int err = InitializeHandle(handles_[i]);
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(handles_[i]);
}
}
@@ -98,17 +95,17 @@ int ProcessingComponent::Initialize() {
int ProcessingComponent::Configure() {
if (!initialized_) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(static_cast<int>(handles_.size()) >= num_handles_);
for (int i = 0; i < num_handles_; i++) {
int err = ConfigureHandle(handles_[i]);
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(handles_[i]);
}
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/processing_component.h b/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
index c090d222456..8ee3ac6c7db 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
@@ -13,16 +13,13 @@
#include <vector>
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/common.h"
namespace webrtc {
-class AudioProcessingImpl;
-
class ProcessingComponent {
public:
ProcessingComponent();
- explicit ProcessingComponent(const AudioProcessingImpl* apm);
virtual ~ProcessingComponent();
virtual int Initialize();
@@ -41,11 +38,10 @@ class ProcessingComponent {
virtual void* CreateHandle() const = 0;
virtual int InitializeHandle(void* handle) const = 0;
virtual int ConfigureHandle(void* handle) const = 0;
- virtual int DestroyHandle(void* handle) const = 0;
+ virtual void DestroyHandle(void* handle) const = 0;
virtual int num_handles_required() const = 0;
virtual int GetHandleError(void* handle) const = 0;
- const AudioProcessingImpl* apm_;
std::vector<void*> handles_;
bool initialized_;
bool enabled_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc b/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc
new file mode 100644
index 00000000000..14136bf3049
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/rms_level.h"
+
+#include <assert.h>
+#include <math.h>
+
+namespace webrtc {
+
+static const float kMaxSquaredLevel = 32768 * 32768;
+
+RMSLevel::RMSLevel()
+ : sum_square_(0),
+ sample_count_(0) {}
+
+RMSLevel::~RMSLevel() {}
+
+void RMSLevel::Reset() {
+ sum_square_ = 0;
+ sample_count_ = 0;
+}
+
+void RMSLevel::Process(const int16_t* data, int length) {
+ for (int i = 0; i < length; ++i) {
+ sum_square_ += data[i] * data[i];
+ }
+ sample_count_ += length;
+}
+
+void RMSLevel::ProcessMuted(int length) {
+ sample_count_ += length;
+}
+
+int RMSLevel::RMS() {
+ if (sample_count_ == 0 || sum_square_ == 0) {
+ Reset();
+ return kMinLevel;
+ }
+
+ // Normalize by the max level.
+ float rms = sum_square_ / (sample_count_ * kMaxSquaredLevel);
+ // 20log_10(x^0.5) = 10log_10(x)
+ rms = 10 * log10(rms);
+ assert(rms <= 0);
+ if (rms < -kMinLevel)
+ rms = -kMinLevel;
+
+ rms = -rms;
+ Reset();
+ return static_cast<int>(rms + 0.5);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/rms_level.h b/chromium/third_party/webrtc/modules/audio_processing/rms_level.h
new file mode 100644
index 00000000000..055d271bb19
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/rms_level.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Computes the root mean square (RMS) level in dBFs (decibels from digital
+// full-scale) of audio data. The computation follows RFC 6465:
+// https://tools.ietf.org/html/rfc6465
+// with the intent that it can provide the RTP audio level indication.
+//
+// The expected approach is to provide constant-sized chunks of audio to
+// Process(). When enough chunks have been accumulated to form a packet, call
+// RMS() to get the audio level indicator for the RTP header.
+class RMSLevel {
+ public:
+ static const int kMinLevel = 127;
+
+ RMSLevel();
+ ~RMSLevel();
+
+ // Can be called to reset internal states, but is not required during normal
+ // operation.
+ void Reset();
+
+ // Pass each chunk of audio to Process() to accumulate the level.
+ void Process(const int16_t* data, int length);
+
+ // If all samples with the given |length| have a magnitude of zero, this is
+ // a shortcut to avoid some computation.
+ void ProcessMuted(int length);
+
+ // Computes the RMS level over all data passed to Process() since the last
+ // call to RMS(). The returned value is positive but should be interpreted as
+ // negative as per the RFC. It is constrained to [0, 127].
+ int RMS();
+
+ private:
+ float sum_square_;
+ int sample_count_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc b/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc
deleted file mode 100644
index 372c8dc426e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_processing/splitting_filter.h"
-
-namespace webrtc {
-
-void SplittingFilterAnalysis(const int16_t* in_data,
- int16_t* low_band,
- int16_t* high_band,
- int32_t* filter_state1,
- int32_t* filter_state2)
-{
- WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2);
-}
-
-void SplittingFilterSynthesis(const int16_t* low_band,
- const int16_t* high_band,
- int16_t* out_data,
- int32_t* filt_state1,
- int32_t* filt_state2)
-{
- WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h b/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h
deleted file mode 100644
index b6c851273a6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-/*
- * SplittingFilterbank_analysisQMF(...)
- *
- * Splits a super-wb signal into two subbands: 0-8 kHz and 8-16 kHz.
- *
- * Input:
- * - in_data : super-wb audio signal
- *
- * Input & Output:
- * - filt_state1: Filter state for first all-pass filter
- * - filt_state2: Filter state for second all-pass filter
- *
- * Output:
- * - low_band : The signal from the 0-4 kHz band
- * - high_band : The signal from the 4-8 kHz band
- */
-void SplittingFilterAnalysis(const int16_t* in_data,
- int16_t* low_band,
- int16_t* high_band,
- int32_t* filt_state1,
- int32_t* filt_state2);
-
-/*
- * SplittingFilterbank_synthesisQMF(...)
- *
- * Combines the two subbands (0-8 and 8-16 kHz) into a super-wb signal.
- *
- * Input:
- * - low_band : The signal with the 0-8 kHz band
- * - high_band : The signal with the 8-16 kHz band
- *
- * Input & Output:
- * - filt_state1: Filter state for first all-pass filter
- * - filt_state2: Filter state for second all-pass filter
- *
- * Output:
- * - out_data : super-wb speech signal
- */
-void SplittingFilterSynthesis(const int16_t* low_band,
- const int16_t* high_band,
- int16_t* out_data,
- int32_t* filt_state1,
- int32_t* filt_state2);
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc
new file mode 100644
index 00000000000..5f5ce0abafd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/typing_detection.h"
+
+namespace webrtc {
+
+TypingDetection::TypingDetection()
+ : time_active_(0),
+ time_since_last_typing_(0),
+ penalty_counter_(0),
+ counter_since_last_detection_update_(0),
+ detection_to_report_(false),
+ new_detection_to_report_(false),
+ time_window_(10),
+ cost_per_typing_(100),
+ reporting_threshold_(300),
+ penalty_decay_(1),
+ type_event_delay_(2),
+ report_detection_update_period_(1) {
+}
+
+TypingDetection::~TypingDetection() {}
+
+bool TypingDetection::Process(bool key_pressed, bool vad_activity) {
+ if (vad_activity)
+ time_active_++;
+ else
+ time_active_ = 0;
+
+ // Keep track if time since last typing event
+ if (key_pressed)
+ time_since_last_typing_ = 0;
+ else
+ ++time_since_last_typing_;
+
+ if (time_since_last_typing_ < type_event_delay_ &&
+ vad_activity &&
+ time_active_ < time_window_) {
+ penalty_counter_ += cost_per_typing_;
+ if (penalty_counter_ > reporting_threshold_)
+ new_detection_to_report_ = true;
+ }
+
+ if (penalty_counter_ > 0)
+ penalty_counter_ -= penalty_decay_;
+
+ if (++counter_since_last_detection_update_ ==
+ report_detection_update_period_) {
+ detection_to_report_ = new_detection_to_report_;
+ new_detection_to_report_ = false;
+ counter_since_last_detection_update_ = 0;
+ }
+
+ return detection_to_report_;
+}
+
+int TypingDetection::TimeSinceLastDetectionInSeconds() {
+ // Round to whole seconds.
+ return (time_since_last_typing_ + 50) / 100;
+}
+
+void TypingDetection::SetParameters(int time_window,
+ int cost_per_typing,
+ int reporting_threshold,
+ int penalty_decay,
+ int type_event_delay,
+ int report_detection_update_period) {
+ if (time_window) time_window_ = time_window;
+
+ if (cost_per_typing) cost_per_typing_ = cost_per_typing;
+
+ if (reporting_threshold) reporting_threshold_ = reporting_threshold;
+
+ if (penalty_decay) penalty_decay_ = penalty_decay;
+
+ if (type_event_delay) type_event_delay_ = type_event_delay;
+
+ if (report_detection_update_period)
+ report_detection_update_period_ = report_detection_update_period;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h
new file mode 100644
index 00000000000..5fa6456e9e9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
+
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class TypingDetection {
+ public:
+ TypingDetection();
+ virtual ~TypingDetection();
+
+ // Run the detection algortihm. Shall be called every 10 ms. Returns true if
+ // typing is detected, or false if not, based on the update period as set with
+ // SetParameters(). See |report_detection_update_period_| description below.
+ bool Process(bool key_pressed, bool vad_activity);
+
+ // Gets the time in seconds since the last detection.
+ int TimeSinceLastDetectionInSeconds();
+
+ // Sets the algorithm parameters. A parameter value of 0 leaves it unchanged.
+ // See the correspondning member variables below for descriptions.
+ void SetParameters(int time_window,
+ int cost_per_typing,
+ int reporting_threshold,
+ int penalty_decay,
+ int type_event_delay,
+ int report_detection_update_period);
+
+ private:
+ int time_active_;
+ int time_since_last_typing_;
+ int penalty_counter_;
+
+ // Counter since last time the detection status reported by Process() was
+ // updated. See also |report_detection_update_period_|.
+ int counter_since_last_detection_update_;
+
+ // The detection status to report. Updated every
+ // |report_detection_update_period_| call to Process().
+ bool detection_to_report_;
+
+ // What |detection_to_report_| should be set to next time it is updated.
+ bool new_detection_to_report_;
+
+ // Settable threshold values.
+
+ // Number of 10 ms slots accepted to count as a hit.
+ int time_window_;
+
+ // Penalty added for a typing + activity coincide.
+ int cost_per_typing_;
+
+ // Threshold for |penalty_counter_|.
+ int reporting_threshold_;
+
+ // How much we reduce |penalty_counter_| every 10 ms.
+ int penalty_decay_;
+
+ // How old typing events we allow.
+ int type_event_delay_;
+
+ // Settable update period.
+
+ // Number of 10 ms slots between each update of the detection status returned
+ // by Process(). This inertia added to the algorithm is usually desirable and
+ // provided so that consumers of the class don't have to implement that
+ // themselves if they don't wish.
+ // If set to 1, each call to Process() will return the detection status for
+ // that 10 ms slot.
+ // If set to N (where N > 1), the detection status returned from Process()
+ // will remain the same until Process() has been called N times. Then, if none
+ // of the last N calls to Process() has detected typing for each respective
+ // 10 ms slot, Process() will return false. If at least one of the last N
+ // calls has detected typing, Process() will return true. And that returned
+ // status will then remain the same until the next N calls have been done.
+ int report_detection_update_period_;
+};
+
+} // namespace webrtc
+
+#endif // #ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
index 6d6e9bc97bb..3b2043267ff 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
@@ -30,10 +30,6 @@ static const float kMinHistogramThreshold = 1.5f;
static const int kMinRequiredHits = 10;
static const int kMaxHitsWhenPossiblyNonCausal = 10;
static const int kMaxHitsWhenPossiblyCausal = 1000;
-// TODO(bjornv): Make kMaxDelayDifference a configurable parameter, since it
-// corresponds to the filter length if the delay estimation is used in echo
-// control.
-static const int kMaxDelayDifference = 32;
static const float kQ14Scaling = 1.f / (1 << 14); // Scaling by 2^14 to get Q0.
static const float kFractionSlope = 0.05f;
static const float kMinFractionWhenPossiblyCausal = 0.5f;
@@ -195,8 +191,8 @@ static int HistogramBasedValidation(const BinaryDelayEstimator* self,
// depending on the distance between the |candidate_delay| and |last_delay|.
// TODO(bjornv): How much can we gain by turning the fraction calculation
// into tables?
- if (delay_difference >= kMaxDelayDifference) {
- fraction = 1.f - kFractionSlope * (delay_difference - kMaxDelayDifference);
+ if (delay_difference > self->allowed_offset) {
+ fraction = 1.f - kFractionSlope * (delay_difference - self->allowed_offset);
fraction = (fraction > kMinFractionWhenPossiblyCausal ? fraction :
kMinFractionWhenPossiblyCausal);
} else if (delay_difference < 0) {
@@ -308,6 +304,39 @@ void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) {
memset(self->far_bit_counts, 0, sizeof(int) * self->history_size);
}
+void WebRtc_SoftResetBinaryDelayEstimatorFarend(
+ BinaryDelayEstimatorFarend* self, int delay_shift) {
+ int abs_shift = abs(delay_shift);
+ int shift_size = 0;
+ int dest_index = 0;
+ int src_index = 0;
+ int padding_index = 0;
+
+ assert(self != NULL);
+ shift_size = self->history_size - abs_shift;
+ assert(shift_size > 0);
+ if (delay_shift == 0) {
+ return;
+ } else if (delay_shift > 0) {
+ dest_index = abs_shift;
+ } else if (delay_shift < 0) {
+ src_index = abs_shift;
+ padding_index = shift_size;
+ }
+
+ // Shift and zero pad buffers.
+ memmove(&self->binary_far_history[dest_index],
+ &self->binary_far_history[src_index],
+ sizeof(*self->binary_far_history) * shift_size);
+ memset(&self->binary_far_history[padding_index], 0,
+ sizeof(*self->binary_far_history) * abs_shift);
+ memmove(&self->far_bit_counts[dest_index],
+ &self->far_bit_counts[src_index],
+ sizeof(*self->far_bit_counts) * shift_size);
+ memset(&self->far_bit_counts[padding_index], 0,
+ sizeof(*self->far_bit_counts) * abs_shift);
+}
+
void WebRtc_AddBinaryFarSpectrum(BinaryDelayEstimatorFarend* handle,
uint32_t binary_far_spectrum) {
assert(handle != NULL);
@@ -349,10 +378,10 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self) {
}
BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
- BinaryDelayEstimatorFarend* farend, int lookahead) {
+ BinaryDelayEstimatorFarend* farend, int max_lookahead) {
BinaryDelayEstimator* self = NULL;
- if ((farend != NULL) && (lookahead >= 0)) {
+ if ((farend != NULL) && (max_lookahead >= 0)) {
// Sanity conditions fulfilled.
self = malloc(sizeof(BinaryDelayEstimator));
}
@@ -361,7 +390,11 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
int malloc_fail = 0;
self->farend = farend;
- self->near_history_size = lookahead + 1;
+ self->near_history_size = max_lookahead + 1;
+ self->robust_validation_enabled = 0; // Disabled by default.
+ self->allowed_offset = 0;
+
+ self->lookahead = max_lookahead;
// Allocate memory for spectrum buffers. The extra array element in
// |mean_bit_counts| and |histogram| is a dummy element only used while
@@ -374,7 +407,7 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
malloc_fail |= (self->bit_counts == NULL);
// Allocate memory for history buffers.
- self->binary_near_history = malloc((lookahead + 1) * sizeof(uint32_t));
+ self->binary_near_history = malloc((max_lookahead + 1) * sizeof(uint32_t));
malloc_fail |= (self->binary_near_history == NULL);
self->histogram = malloc((farend->history_size + 1) * sizeof(float));
@@ -400,26 +433,40 @@ void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) {
self->mean_bit_counts[i] = (20 << 9); // 20 in Q9.
self->histogram[i] = 0.f;
}
- self->minimum_probability = (32 << 9); // 32 in Q9.
- self->last_delay_probability = (32 << 9); // 32 in Q9.
+ self->minimum_probability = kMaxBitCountsQ9; // 32 in Q9.
+ self->last_delay_probability = (int) kMaxBitCountsQ9; // 32 in Q9.
// Default return value if we're unable to estimate. -1 is used for errors.
self->last_delay = -2;
- self->robust_validation_enabled = 0; // Disabled by default.
self->last_candidate_delay = -2;
self->compare_delay = self->farend->history_size;
self->candidate_hits = 0;
self->last_delay_histogram = 0.f;
}
+int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self,
+ int delay_shift) {
+ int lookahead = 0;
+ assert(self != NULL);
+ lookahead = self->lookahead;
+ self->lookahead -= delay_shift;
+ if (self->lookahead < 0) {
+ self->lookahead = 0;
+ }
+ if (self->lookahead > self->near_history_size - 1) {
+ self->lookahead = self->near_history_size - 1;
+ }
+ return lookahead - self->lookahead;
+}
+
int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
uint32_t binary_near_spectrum) {
int i = 0;
int candidate_delay = -1;
int valid_candidate = 0;
- int32_t value_best_candidate = 32 << 9; // 32 in Q9, (max |mean_bit_counts|).
+ int32_t value_best_candidate = kMaxBitCountsQ9;
int32_t value_worst_candidate = 0;
int32_t valley_depth = 0;
@@ -430,8 +477,7 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
memmove(&(self->binary_near_history[1]), &(self->binary_near_history[0]),
(self->near_history_size - 1) * sizeof(uint32_t));
self->binary_near_history[0] = binary_near_spectrum;
- binary_near_spectrum =
- self->binary_near_history[self->near_history_size - 1];
+ binary_near_spectrum = self->binary_near_history[self->lookahead];
}
// Compare with delayed spectra and store the |bit_counts| for each delay.
@@ -547,21 +593,23 @@ int WebRtc_binary_last_delay(BinaryDelayEstimator* self) {
return self->last_delay;
}
-int WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self) {
- int delay_quality = 0;
+float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self) {
+ float quality = 0;
assert(self != NULL);
- // |last_delay_probability| is the opposite of quality and states how deep the
- // minimum of the cost function is. The value states how many non-matching
- // bits we have between the binary spectra for the corresponding delay
- // estimate. The range is thus from 0 to 32, since we use 32 bits in the
- // binary spectra.
-
- // Return the |delay_quality| = 1 - |last_delay_probability| / 32 (in Q14).
- delay_quality = (32 << 9) - self->last_delay_probability;
- if (delay_quality < 0) {
- delay_quality = 0;
+
+ if (self->robust_validation_enabled) {
+ // Simply a linear function of the histogram height at delay estimate.
+ quality = self->histogram[self->compare_delay] / kHistogramMax;
+ } else {
+ // Note that |last_delay_probability| states how deep the minimum of the
+ // cost function is, so it is rather an error probability.
+ quality = (float) (kMaxBitCountsQ9 - self->last_delay_probability) /
+ kMaxBitCountsQ9;
+ if (quality < 0) {
+ quality = 0;
+ }
}
- return delay_quality;
+ return quality;
}
void WebRtc_MeanEstimatorFix(int32_t new_value,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
index 7ffb81b8b18..3d5ffce20e9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
@@ -16,6 +16,8 @@
#include "webrtc/typedefs.h"
+static const int32_t kMaxBitCountsQ9 = (32 << 9); // 32 matching bits in Q9.
+
typedef struct {
// Pointer to bit counts.
int* far_bit_counts;
@@ -44,12 +46,16 @@ typedef struct {
// Robust validation
int robust_validation_enabled;
+ int allowed_offset;
int last_candidate_delay;
int compare_delay;
int candidate_hits;
float* histogram;
float last_delay_histogram;
+ // For dynamically changing the lookahead when using SoftReset...().
+ int lookahead;
+
// Far-end binary spectrum history buffer etc.
BinaryDelayEstimatorFarend* farend;
} BinaryDelayEstimator;
@@ -90,6 +96,15 @@ BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
//
void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self);
+// Soft resets the delay estimation far-end instance created with
+// WebRtc_CreateBinaryDelayEstimatorFarend(...).
+//
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+void WebRtc_SoftResetBinaryDelayEstimatorFarend(
+ BinaryDelayEstimatorFarend* self, int delay_shift);
+
// Adds the binary far-end spectrum to the internal far-end history buffer. This
// spectrum is used as reference when calculating the delay using
// WebRtc_ProcessBinarySpectrum().
@@ -121,38 +136,10 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self);
// Allocates the memory needed by the binary delay estimation. The memory needs
// to be initialized separately through WebRtc_InitBinaryDelayEstimator(...).
//
-// Inputs:
-// - farend : Pointer to the far-end part of the Binary Delay
-// Estimator. This memory has to be created separately
-// prior to this call using
-// WebRtc_CreateBinaryDelayEstimatorFarend().
-//
-// Note that BinaryDelayEstimator does not take
-// ownership of |farend|.
-//
-// - lookahead : Amount of non-causal lookahead to use. This can
-// detect cases in which a near-end signal occurs before
-// the corresponding far-end signal. It will delay the
-// estimate for the current block by an equal amount,
-// and the returned values will be offset by it.
-//
-// A value of zero is the typical no-lookahead case.
-// This also represents the minimum delay which can be
-// estimated.
-//
-// Note that the effective range of delay estimates is
-// [-|lookahead|,... ,|history_size|-|lookahead|)
-// where |history_size| was set upon creating the far-end
-// history buffer size.
-//
-// Return value:
-// - BinaryDelayEstimator*
-// : Created |handle|. If the memory can't be allocated
-// or if any of the input parameters are invalid NULL
-// is returned.
-//
+// See WebRtc_CreateDelayEstimator(..) in delay_estimator_wrapper.c for detailed
+// description.
BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
- BinaryDelayEstimatorFarend* farend, int lookahead);
+ BinaryDelayEstimatorFarend* farend, int max_lookahead);
// Initializes the delay estimation instance created with
// WebRtc_CreateBinaryDelayEstimator(...).
@@ -165,6 +152,18 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
//
void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self);
+// Soft resets the delay estimation instance created with
+// WebRtc_CreateBinaryDelayEstimator(...).
+//
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - actual_shifts : The actual number of shifts performed.
+//
+int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self,
+ int delay_shift);
+
// Estimates and returns the delay between the binary far-end and binary near-
// end spectra. It is assumed the binary far-end spectrum has been added using
// WebRtc_AddBinaryFarSpectrum() prior to this call. The value will be offset by
@@ -199,17 +198,12 @@ int WebRtc_binary_last_delay(BinaryDelayEstimator* self);
// Returns the estimation quality of the last calculated delay updated by the
// function WebRtc_ProcessBinarySpectrum(...). The estimation quality is a value
-// in the interval [0, 1] in Q14. The higher the value, the better quality.
-//
-// Input:
-// - self : Pointer to the delay estimation instance.
+// in the interval [0, 1]. The higher the value, the better the quality.
//
// Return value:
-// - delay_quality : >= 0 - Estimation quality (in Q14) of last
-// calculated delay value.
-// -2 - Insufficient data for estimation.
-//
-int WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self);
+// - delay_quality : >= 0 - Estimation quality of last calculated
+// delay value.
+float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self);
// Updates the |mean_value| recursively with a step size of 2^-|factor|. This
// function is used internally in the Binary Delay Estimator as well as the
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
index bdc199cafbb..ca0901d6db2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -26,6 +26,9 @@ enum { kLookahead = 10 };
// Length of binary spectrum sequence.
enum { kSequenceLength = 400 };
+const int kEnable[] = { 0, 1 };
+const size_t kSizeEnable = sizeof(kEnable) / sizeof(*kEnable);
+
class DelayEstimatorTest : public ::testing::Test {
protected:
DelayEstimatorTest();
@@ -38,7 +41,8 @@ class DelayEstimatorTest : public ::testing::Test {
void RunBinarySpectra(BinaryDelayEstimator* binary1,
BinaryDelayEstimator* binary2,
int near_offset, int lookahead_offset, int far_offset);
- void RunBinarySpectraTest(int near_offset, int lookahead_offset);
+ void RunBinarySpectraTest(int near_offset, int lookahead_offset,
+ int ref_robust_validation, int robust_validation);
void* handle_;
DelayEstimator* self_;
@@ -113,7 +117,7 @@ void DelayEstimatorTest::Init() {
EXPECT_EQ(0, farend_self_->far_spectrum_initialized);
EXPECT_EQ(0, self_->near_spectrum_initialized);
EXPECT_EQ(-2, WebRtc_last_delay(handle_)); // Delay in initial state.
- EXPECT_EQ(0, WebRtc_last_delay_quality(handle_)); // Zero quality.
+ EXPECT_FLOAT_EQ(0, WebRtc_last_delay_quality(handle_)); // Zero quality.
}
void DelayEstimatorTest::InitBinary() {
@@ -143,6 +147,8 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1,
int near_offset,
int lookahead_offset,
int far_offset) {
+ int different_validations = binary1->robust_validation_enabled ^
+ binary2->robust_validation_enabled;
WebRtc_InitBinaryDelayEstimatorFarend(binary_farend_);
WebRtc_InitBinaryDelayEstimator(binary1);
WebRtc_InitBinaryDelayEstimator(binary2);
@@ -167,19 +173,32 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1,
if ((delay_1 != -2) && (delay_2 != -2)) {
EXPECT_EQ(delay_1, delay_2 - lookahead_offset - near_offset);
}
+ // For the case of identical signals |delay_1| and |delay_2| should match
+ // all the time, unless one of them has robust validation turned on. In
+ // that case the robust validation leaves the initial state faster.
if ((near_offset == 0) && (lookahead_offset == 0)) {
- EXPECT_EQ(delay_1, delay_2);
+ if (!different_validations) {
+ EXPECT_EQ(delay_1, delay_2);
+ } else {
+ if (binary1->robust_validation_enabled) {
+ EXPECT_GE(delay_1, delay_2);
+ } else {
+ EXPECT_GE(delay_2, delay_1);
+ }
+ }
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_binary_last_delay(binary1));
- EXPECT_NE(0, WebRtc_binary_last_delay_quality(binary1));
+ EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary1));
EXPECT_NE(-2, WebRtc_binary_last_delay(binary2));
- EXPECT_NE(0, WebRtc_binary_last_delay_quality(binary2));
+ EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary2));
}
void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
- int lookahead_offset) {
+ int lookahead_offset,
+ int ref_robust_validation,
+ int robust_validation) {
BinaryDelayEstimator* binary2 =
WebRtc_CreateBinaryDelayEstimator(binary_farend_,
kLookahead + lookahead_offset);
@@ -187,6 +206,8 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
// the delay is equivalent with a positive |offset| of the far-end sequence.
// For non-causal systems the delay is equivalent with a negative |offset| of
// the far-end sequence.
+ binary_->robust_validation_enabled = ref_robust_validation;
+ binary2->robust_validation_enabled = robust_validation;
for (int offset = -kLookahead;
offset < kMaxDelay - lookahead_offset - near_offset;
offset++) {
@@ -194,6 +215,7 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
}
WebRtc_FreeBinaryDelayEstimator(binary2);
binary2 = NULL;
+ binary_->robust_validation_enabled = 0; // Reset reference.
}
TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
@@ -206,14 +228,12 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
void* handle = farend_handle_;
handle = WebRtc_CreateDelayEstimatorFarend(33, kMaxDelay + kLookahead);
EXPECT_TRUE(handle == NULL);
- handle = farend_handle_;
handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, 1);
EXPECT_TRUE(handle == NULL);
handle = handle_;
handle = WebRtc_CreateDelayEstimator(NULL, kLookahead);
EXPECT_TRUE(handle == NULL);
- handle = handle_;
handle = WebRtc_CreateDelayEstimator(farend_handle_, -1);
EXPECT_TRUE(handle == NULL);
@@ -246,10 +266,18 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_,
spectrum_size_, 16));
+ // WebRtc_set_allowed_offset() should return -1 if we have:
+ // 1) NULL pointer as |handle|.
+ // 2) |allowed_offset| < 0.
+ EXPECT_EQ(-1, WebRtc_set_allowed_offset(NULL, 0));
+ EXPECT_EQ(-1, WebRtc_set_allowed_offset(handle_, -1));
+
+ EXPECT_EQ(-1, WebRtc_get_allowed_offset(NULL));
+
// WebRtc_enable_robust_validation() should return -1 if we have:
// 1) NULL pointer as |handle|.
// 2) Incorrect |enable| value (not 0 or 1).
- EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, 0));
+ EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, kEnable[0]));
EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, -1));
EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, 2));
@@ -286,21 +314,31 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
// WebRtc_last_delay() should return -1 if we have a NULL pointer as |handle|.
EXPECT_EQ(-1, WebRtc_last_delay(NULL));
- // WebRtc_last_delay_quality() should return -1 if we have a NULL pointer as
- // |handle|.
- EXPECT_EQ(-1, WebRtc_last_delay_quality(NULL));
-
// Free any local memory if needed.
WebRtc_FreeDelayEstimator(handle);
}
+TEST_F(DelayEstimatorTest, VerifyAllowedOffset) {
+ // Is set to zero by default.
+ EXPECT_EQ(0, WebRtc_get_allowed_offset(handle_));
+ for (int i = 1; i >= 0; i--) {
+ EXPECT_EQ(0, WebRtc_set_allowed_offset(handle_, i));
+ EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_));
+ Init();
+ // Unaffected over a reset.
+ EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_));
+ }
+}
+
TEST_F(DelayEstimatorTest, VerifyEnableRobustValidation) {
- Init();
// Disabled by default.
EXPECT_EQ(0, WebRtc_is_robust_validation_enabled(handle_));
- for (int i = 1; i >= 0; i--) {
- EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, i));
- EXPECT_EQ(i, WebRtc_is_robust_validation_enabled(handle_));
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, kEnable[i]));
+ EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_));
+ Init();
+ // Unaffected over a reset.
+ EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_));
}
}
@@ -335,6 +373,7 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
// (|last_delay| = -2). Then we compare the Process() output with the
// last_delay() call.
+ // TODO(bjornv): Update quality values for robust validation.
int last_delay = 0;
// Floating point operations.
Init();
@@ -345,13 +384,16 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
spectrum_size_);
if (last_delay != -2) {
EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
- EXPECT_EQ(7203, WebRtc_last_delay_quality(handle_));
+ if (!WebRtc_is_robust_validation_enabled(handle_)) {
+ EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9,
+ WebRtc_last_delay_quality(handle_));
+ }
break;
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_last_delay(handle_));
- EXPECT_NE(0, WebRtc_last_delay_quality(handle_));
+ EXPECT_LT(0, WebRtc_last_delay_quality(handle_));
// Fixed point operations.
Init();
@@ -362,13 +404,16 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
spectrum_size_, 0);
if (last_delay != -2) {
EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
- EXPECT_EQ(7203, WebRtc_last_delay_quality(handle_));
+ if (!WebRtc_is_robust_validation_enabled(handle_)) {
+ EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9,
+ WebRtc_last_delay_quality(handle_));
+ }
break;
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_last_delay(handle_));
- EXPECT_NE(0, WebRtc_last_delay_quality(handle_));
+ EXPECT_LT(0, WebRtc_last_delay_quality(handle_));
}
TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimatorFarend) {
@@ -391,18 +436,14 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) {
BinaryDelayEstimator* binary_handle = binary_;
// WebRtc_CreateBinaryDelayEstimator() should return -1 if we have a NULL
- // pointer as |binary_handle| or invalid input values. Upon failure, the
+ // pointer as |binary_farend| or invalid input values. Upon failure, the
// |binary_handle| should be NULL.
// Make sure we have a non-NULL value at start, so we can detect NULL after
// create failure.
binary_handle = WebRtc_CreateBinaryDelayEstimator(NULL, kLookahead);
EXPECT_TRUE(binary_handle == NULL);
- binary_handle = binary_;
binary_handle = WebRtc_CreateBinaryDelayEstimator(binary_farend_, -1);
EXPECT_TRUE(binary_handle == NULL);
- binary_handle = binary_;
- binary_handle = WebRtc_CreateBinaryDelayEstimator(0, 0);
- EXPECT_TRUE(binary_handle == NULL);
}
TEST_F(DelayEstimatorTest, MeanEstimatorFix) {
@@ -431,26 +472,70 @@ TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearSameSpectrum) {
// the signal accordingly. We create two Binary Delay Estimators and feed them
// with the same signals, so they should output the same results.
// We verify both causal and non-causal delays.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
- RunBinarySpectraTest(0, 0);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(0, 0, kEnable[i], kEnable[j]);
+ }
+ }
}
TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentSpectrum) {
// In this test we use the same setup as above, but we now feed the two Binary
// Delay Estimators with different signals, so they should output different
// results.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
const int kNearOffset = 1;
- RunBinarySpectraTest(kNearOffset, 0);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(kNearOffset, 0, kEnable[i], kEnable[j]);
+ }
+ }
}
TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentLookahead) {
// In this test we use the same setup as above, feeding the two Binary
// Delay Estimators with the same signals. The difference is that we create
// them with different lookahead.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
const int kLookaheadOffset = 1;
- RunBinarySpectraTest(0, kLookaheadOffset);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(0, kLookaheadOffset, kEnable[i], kEnable[j]);
+ }
+ }
}
+TEST_F(DelayEstimatorTest, AllowedOffsetNoImpactWhenRobustValidationDisabled) {
+ // The same setup as in ExactDelayEstimateMultipleNearSameSpectrum with the
+ // difference that |allowed_offset| is set for the reference binary delay
+ // estimator.
+
+ binary_->allowed_offset = 10;
+ RunBinarySpectraTest(0, 0, 0, 0);
+ binary_->allowed_offset = 0; // Reset reference.
+}
+
+TEST_F(DelayEstimatorTest, VerifyLookaheadAtCreate) {
+ void* farend_handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize,
+ kMaxDelay);
+ ASSERT_TRUE(farend_handle != NULL);
+ void* handle = WebRtc_CreateDelayEstimator(farend_handle, kLookahead);
+ ASSERT_TRUE(handle != NULL);
+ EXPECT_EQ(kLookahead, WebRtc_lookahead(handle));
+ WebRtc_FreeDelayEstimator(handle);
+ WebRtc_FreeDelayEstimatorFarend(farend_handle);
+}
+
+// TODO(bjornv): Add tests for SoftReset...(...).
+
} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
index ce4431844ee..6ec894e65e9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
@@ -191,6 +191,12 @@ int WebRtc_InitDelayEstimatorFarend(void* handle) {
return 0;
}
+void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift) {
+ DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
+ assert(self != NULL);
+ WebRtc_SoftResetBinaryDelayEstimatorFarend(self->binary_farend, delay_shift);
+}
+
int WebRtc_AddFarSpectrumFix(void* handle, uint16_t* far_spectrum,
int spectrum_size, int far_q) {
DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
@@ -261,7 +267,7 @@ void WebRtc_FreeDelayEstimator(void* handle) {
free(self);
}
-void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead) {
+void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
DelayEstimator* self = NULL;
DelayEstimatorFarend* farend = (DelayEstimatorFarend*) farend_handle;
@@ -274,7 +280,7 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead) {
// Allocate memory for the farend spectrum handling.
self->binary_handle =
- WebRtc_CreateBinaryDelayEstimator(farend->binary_farend, lookahead);
+ WebRtc_CreateBinaryDelayEstimator(farend->binary_farend, max_lookahead);
memory_fail |= (self->binary_handle == NULL);
// Allocate memory for spectrum buffers.
@@ -312,6 +318,50 @@ int WebRtc_InitDelayEstimator(void* handle) {
return 0;
}
+int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ return WebRtc_SoftResetBinaryDelayEstimator(self->binary_handle, delay_shift);
+}
+
+int WebRtc_set_lookahead(void* handle, int lookahead) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ assert(self->binary_handle != NULL);
+ if ((lookahead > self->binary_handle->near_history_size - 1) ||
+ (lookahead < 0)) {
+ return -1;
+ }
+ self->binary_handle->lookahead = lookahead;
+ return self->binary_handle->lookahead;
+}
+
+int WebRtc_lookahead(void* handle) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ assert(self->binary_handle != NULL);
+ return self->binary_handle->lookahead;
+}
+
+int WebRtc_set_allowed_offset(void* handle, int allowed_offset) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+
+ if ((self == NULL) || (allowed_offset < 0)) {
+ return -1;
+ }
+ self->binary_handle->allowed_offset = allowed_offset;
+ return 0;
+}
+
+int WebRtc_get_allowed_offset(const void* handle) {
+ const DelayEstimator* self = (const DelayEstimator*) handle;
+
+ if (self == NULL) {
+ return -1;
+ }
+ return self->binary_handle->allowed_offset;
+}
+
int WebRtc_enable_robust_validation(void* handle, int enable) {
DelayEstimator* self = (DelayEstimator*) handle;
@@ -326,13 +376,12 @@ int WebRtc_enable_robust_validation(void* handle, int enable) {
return 0;
}
-int WebRtc_is_robust_validation_enabled(void* handle) {
- DelayEstimator* self = (DelayEstimator*) handle;
+int WebRtc_is_robust_validation_enabled(const void* handle) {
+ const DelayEstimator* self = (const DelayEstimator*) handle;
if (self == NULL) {
return -1;
}
- assert(self->binary_handle != NULL);
return self->binary_handle->robust_validation_enabled;
}
@@ -403,12 +452,8 @@ int WebRtc_last_delay(void* handle) {
return WebRtc_binary_last_delay(self->binary_handle);
}
-int WebRtc_last_delay_quality(void* handle) {
+float WebRtc_last_delay_quality(void* handle) {
DelayEstimator* self = (DelayEstimator*) handle;
-
- if (self == NULL) {
- return -1;
- }
-
+ assert(self != NULL);
return WebRtc_binary_last_delay_quality(self->binary_handle);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
index 50bcddeddc2..13e86bdd438 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
@@ -52,6 +52,13 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size);
//
int WebRtc_InitDelayEstimatorFarend(void* handle);
+// Soft resets the far-end part of the delay estimation instance returned by
+// WebRtc_CreateDelayEstimatorFarend(...).
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift);
+
// Adds the far-end spectrum to the far-end history buffer. This spectrum is
// used as reference when calculating the delay using
// WebRtc_ProcessSpectrum().
@@ -91,11 +98,18 @@ void WebRtc_FreeDelayEstimator(void* handle);
// ownership of |farend_handle|, which has to be torn
// down properly after this instance.
//
-// - lookahead : Amount of non-causal lookahead to use. This can
-// detect cases in which a near-end signal occurs before
-// the corresponding far-end signal. It will delay the
-// estimate for the current block by an equal amount,
-// and the returned values will be offset by it.
+// - max_lookahead : Maximum amount of non-causal lookahead allowed. The
+// actual amount of lookahead used can be controlled by
+// WebRtc_set_lookahead(...). The default |lookahead| is
+// set to |max_lookahead| at create time. Use
+// WebRtc_set_lookahead(...) before start if a different
+// value is desired.
+//
+// Using lookahead can detect cases in which a near-end
+// signal occurs before the corresponding far-end signal.
+// It will delay the estimate for the current block by an
+// equal amount, and the returned values will be offset
+// by it.
//
// A value of zero is the typical no-lookahead case.
// This also represents the minimum delay which can be
@@ -111,7 +125,7 @@ void WebRtc_FreeDelayEstimator(void* handle);
// if any of the input parameters are invalid NULL is
// returned.
//
-void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead);
+void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead);
// Initializes the delay estimation instance returned by
// WebRtc_CreateDelayEstimator(...)
@@ -123,17 +137,59 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead);
//
int WebRtc_InitDelayEstimator(void* handle);
+// Soft resets the delay estimation instance returned by
+// WebRtc_CreateDelayEstimator(...)
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - actual_shifts : The actual number of shifts performed.
+//
+int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift);
+
+// Sets the amount of |lookahead| to use. Valid values are [0, max_lookahead]
+// where |max_lookahead| was set at create time through
+// WebRtc_CreateDelayEstimator(...).
+//
+// Input:
+// - lookahead : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - new_lookahead : The actual number of shifts performed.
+//
+int WebRtc_set_lookahead(void* handle, int lookahead);
+
+// Returns the amount of lookahead we currently use.
+int WebRtc_lookahead(void* handle);
+
+// Sets the |allowed_offset| used in the robust validation scheme. If the
+// delay estimator is used in an echo control component, this parameter is
+// related to the filter length. In principle |allowed_offset| should be set to
+// the echo control filter length minus the expected echo duration, i.e., the
+// delay offset the echo control can handle without quality regression. The
+// default value, used if not set manually, is zero. Note that |allowed_offset|
+// has to be non-negative.
+// Inputs:
+// - handle : Pointer to the delay estimation instance.
+// - allowed_offset : The amount of delay offset, measured in partitions,
+// the echo control filter can handle.
+int WebRtc_set_allowed_offset(void* handle, int allowed_offset);
+
+// Returns the |allowed_offset| in number of partitions.
+int WebRtc_get_allowed_offset(const void* handle);
+
// TODO(bjornv): Implement this functionality. Currently, enabling it has no
// impact, hence this is an empty API.
// Enables/Disables a robust validation functionality in the delay estimation.
-// This is by default disabled upon initialization.
+// This is by default set to disabled at create time. The state is preserved
+// over a reset.
// Inputs:
// - handle : Pointer to the delay estimation instance.
// - enable : Enable (1) or disable (0) this feature.
int WebRtc_enable_robust_validation(void* handle, int enable);
// Returns 1 if robust validation is enabled and 0 if disabled.
-int WebRtc_is_robust_validation_enabled(void* handle);
+int WebRtc_is_robust_validation_enabled(const void* handle);
// Estimates and returns the delay between the far-end and near-end blocks. The
// value will be offset by the lookahead (i.e. the lookahead should be
@@ -179,18 +235,11 @@ int WebRtc_last_delay(void* handle);
// Returns the estimation quality/probability of the last calculated delay
// updated by the function WebRtc_DelayEstimatorProcess(...). The estimation
-// quality is a value in the interval [0, 1] in Q9. The higher the value, the
-// better quality.
-//
-// Input:
-// - handle : Pointer to the delay estimation instance.
+// quality is a value in the interval [0, 1]. The higher the value, the better
+// the quality.
//
// Return value:
-// - delay_quality : >= 0 - Estimation quality (in Q9) of last calculated
-// delay value.
-// -1 - Error.
-// -2 - Insufficient data for estimation.
-//
-int WebRtc_last_delay_quality(void* handle);
+// - delay_quality : >= 0 - Estimation quality of last calculated delay.
+float WebRtc_last_delay_quality(void* handle);
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
index 2b7634dd073..5dacf0b804c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
@@ -22,7 +22,12 @@ extern "C" {
namespace webrtc {
-typedef scoped_ptr_malloc<RingBuffer, WebRtc_FreeBuffer> scoped_ring_buffer;
+struct FreeBufferDeleter {
+ inline void operator()(void* ptr) const {
+ WebRtc_FreeBuffer(ptr);
+ }
+};
+typedef scoped_ptr<RingBuffer, FreeBufferDeleter> scoped_ring_buffer;
static void AssertElementEq(int expected, int actual) {
ASSERT_EQ(expected, actual);
@@ -56,8 +61,8 @@ static void RandomStressTest(int** data_ptr) {
srand(seed);
for (int i = 0; i < kNumTests; i++) {
const int buffer_size = std::max(rand() % kMaxBufferSize, 1);
- scoped_array<int> write_data(new int[buffer_size]);
- scoped_array<int> read_data(new int[buffer_size]);
+ scoped_ptr<int[]> write_data(new int[buffer_size]);
+ scoped_ptr<int[]> read_data(new int[buffer_size]);
scoped_ring_buffer buffer(WebRtc_CreateBuffer(buffer_size, sizeof(int)));
ASSERT_TRUE(buffer.get() != NULL);
ASSERT_EQ(0, WebRtc_InitBuffer(buffer.get()));
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
index d41547c8022..c6e497ffa3e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
@@ -13,10 +13,8 @@
#include <assert.h>
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
@@ -39,9 +37,11 @@ int MapSetting(VoiceDetection::Likelihood likelihood) {
}
} // namespace
-VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
stream_has_voice_(false),
using_external_vad_(false),
likelihood_(kLowLikelihood),
@@ -61,7 +61,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
assert(audio->samples_per_split_channel() <= 160);
- int16_t* mixed_data = audio->low_pass_split_data(0);
+ const int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0);
@@ -70,7 +70,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
// TODO(ajm): concatenate data in frame buffer here.
int vad_ret = WebRtcVad_Process(static_cast<Handle*>(handle(0)),
- apm_->split_sample_rate_hz(),
+ apm_->proc_split_sample_rate_hz(),
mixed_data,
frame_size_samples_);
if (vad_ret == 0) {
@@ -87,7 +87,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int VoiceDetectionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -108,7 +108,7 @@ bool VoiceDetectionImpl::stream_has_voice() const {
}
int VoiceDetectionImpl::set_likelihood(VoiceDetection::Likelihood likelihood) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(likelihood) == -1) {
return apm_->kBadParameterError;
}
@@ -122,7 +122,7 @@ VoiceDetection::Likelihood VoiceDetectionImpl::likelihood() const {
}
int VoiceDetectionImpl::set_frame_size_ms(int size) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
assert(size == 10); // TODO(ajm): remove when supported.
if (size != 10 &&
size != 20 &&
@@ -146,7 +146,8 @@ int VoiceDetectionImpl::Initialize() {
}
using_external_vad_ = false;
- frame_size_samples_ = frame_size_ms_ * (apm_->split_sample_rate_hz() / 1000);
+ frame_size_samples_ = frame_size_ms_ *
+ apm_->proc_split_sample_rate_hz() / 1000;
// TODO(ajm): intialize frame buffer here.
return apm_->kNoError;
@@ -163,8 +164,8 @@ void* VoiceDetectionImpl::CreateHandle() const {
return handle;
}
-int VoiceDetectionImpl::DestroyHandle(void* handle) const {
- return WebRtcVad_Free(static_cast<Handle*>(handle));
+void VoiceDetectionImpl::DestroyHandle(void* handle) const {
+ WebRtcVad_Free(static_cast<Handle*>(handle));
}
int VoiceDetectionImpl::InitializeHandle(void* handle) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
index f8f50e8493c..1dfdf20ae92 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
@@ -15,13 +15,14 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class VoiceDetectionImpl : public VoiceDetection,
public ProcessingComponent {
public:
- explicit VoiceDetectionImpl(const AudioProcessingImpl* apm);
+ VoiceDetectionImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
virtual ~VoiceDetectionImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -46,11 +47,12 @@ class VoiceDetectionImpl : public VoiceDetection,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
bool stream_has_voice_;
bool using_external_vad_;
Likelihood likelihood_;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS b/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
index 6c7028550b6..cce3a26bc5e 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
@@ -3,3 +3,8 @@ stefan@webrtc.org
henrik.lundin@webrtc.org
mflodman@webrtc.org
asapersson@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
index c2c8616559b..cff5dd18545 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -18,7 +18,8 @@
namespace webrtc {
-class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver {
+class BitrateControllerImpl::RtcpBandwidthObserverImpl
+ : public RtcpBandwidthObserver {
public:
explicit RtcpBandwidthObserverImpl(BitrateControllerImpl* owner)
: owner_(owner) {
@@ -76,94 +77,29 @@ class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver {
BitrateControllerImpl* owner_;
};
-class LowRateStrategy {
- public:
- LowRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : bandwidth_estimation_(bandwidth_estimation),
- bitrate_observers_(bitrate_observers) {}
-
- virtual ~LowRateStrategy() {}
-
- virtual void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) = 0;
-
- protected:
- SendSideBandwidthEstimation* bandwidth_estimation_;
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers_;
-};
-
-class EnforceMinRateStrategy : public LowRateStrategy {
- public:
- EnforceMinRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : LowRateStrategy(bandwidth_estimation, bitrate_observers) {}
-
- void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) {
- // Min bitrate to all observers.
- BitrateControllerImpl::BitrateObserverConfList::iterator it;
- for (it = bitrate_observers_->begin(); it != bitrate_observers_->end();
- ++it) {
- it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss,
- rtt);
- }
- // Set sum of min to current send bitrate.
- bandwidth_estimation_->SetSendBitrate(sum_min_bitrates);
- }
-};
-
-class NoEnforceMinRateStrategy : public LowRateStrategy {
- public:
- NoEnforceMinRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : LowRateStrategy(bandwidth_estimation, bitrate_observers) {}
-
- void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) {
- // Allocate up to |min_bitrate_| to one observer at a time, until
- // |bitrate| is depleted.
- uint32_t remainder = bitrate;
- BitrateControllerImpl::BitrateObserverConfList::iterator it;
- for (it = bitrate_observers_->begin(); it != bitrate_observers_->end();
- ++it) {
- uint32_t allocation = std::min(remainder, it->second->min_bitrate_);
- it->first->OnNetworkChanged(allocation, fraction_loss, rtt);
- remainder -= allocation;
- }
- // Set |bitrate| to current send bitrate.
- bandwidth_estimation_->SetSendBitrate(bitrate);
- }
-};
-
BitrateController* BitrateController::CreateBitrateController(
+ Clock* clock,
bool enforce_min_bitrate) {
- return new BitrateControllerImpl(enforce_min_bitrate);
+ return new BitrateControllerImpl(clock, enforce_min_bitrate);
}
-BitrateControllerImpl::BitrateControllerImpl(bool enforce_min_bitrate)
- : critsect_(CriticalSectionWrapper::CreateCriticalSection()) {
- if (enforce_min_bitrate) {
- low_rate_strategy_.reset(new EnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- } else {
- low_rate_strategy_.reset(new NoEnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- }
-}
+BitrateControllerImpl::BitrateControllerImpl(Clock* clock, bool enforce_min_bitrate)
+ : clock_(clock),
+ last_bitrate_update_ms_(clock_->TimeInMilliseconds()),
+ critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ bandwidth_estimation_(),
+ bitrate_observers_(),
+ enforce_min_bitrate_(enforce_min_bitrate),
+ reserved_bitrate_bps_(0),
+ last_bitrate_bps_(0),
+ last_fraction_loss_(0),
+ last_rtt_ms_(0),
+ last_enforce_min_bitrate_(!enforce_min_bitrate_),
+ bitrate_observers_modified_(false),
+ last_reserved_bitrate_bps_(0) {}
BitrateControllerImpl::~BitrateControllerImpl() {
- BitrateObserverConfList::iterator it =
- bitrate_observers_.begin();
+ BitrateObserverConfList::iterator it = bitrate_observers_.begin();
while (it != bitrate_observers_.end()) {
delete it->second;
bitrate_observers_.erase(it);
@@ -203,26 +139,56 @@ void BitrateControllerImpl::SetBitrateObserver(
it->second->start_bitrate_ = start_bitrate;
it->second->min_bitrate_ = min_bitrate;
it->second->max_bitrate_ = max_bitrate;
+ // Set the send-side bandwidth to the max of the sum of start bitrates and
+ // the current estimate, so that if the user wants to immediately use more
+ // bandwidth, that can be enforced.
+ uint32_t sum_start_bitrate = 0;
+ BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ sum_start_bitrate += it->second->start_bitrate_;
+ }
+ uint32_t current_estimate;
+ uint8_t loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&current_estimate, &loss, &rtt);
+ bandwidth_estimation_.SetSendBitrate(std::max(sum_start_bitrate,
+ current_estimate));
} else {
// Add new settings.
bitrate_observers_.push_back(BitrateObserverConfiguration(observer,
new BitrateConfiguration(start_bitrate, min_bitrate, max_bitrate)));
+ bitrate_observers_modified_ = true;
+
+ // TODO(andresp): This is a ugly way to set start bitrate.
+ //
+ // Only change start bitrate if we have exactly one observer. By definition
+ // you can only have one start bitrate, once we have our first estimate we
+ // will adapt from there.
+ if (bitrate_observers_.size() == 1) {
+ bandwidth_estimation_.SetSendBitrate(start_bitrate);
+ }
}
- uint32_t sum_start_bitrate = 0;
+
+ UpdateMinMaxBitrate();
+}
+
+void BitrateControllerImpl::UpdateMinMaxBitrate() {
uint32_t sum_min_bitrate = 0;
uint32_t sum_max_bitrate = 0;
-
- // Summarize all configurations.
+ BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
- sum_start_bitrate += it->second->start_bitrate_;
sum_min_bitrate += it->second->min_bitrate_;
sum_max_bitrate += it->second->max_bitrate_;
}
- // Only change start bitrate if we have exactly one observer. By definition
- // you can only have one start bitrate, once we have our first estimate we
- // will adapt from there.
- if (bitrate_observers_.size() == 1) {
- bandwidth_estimation_.SetSendBitrate(sum_start_bitrate);
+ if (sum_max_bitrate == 0) {
+ // No max configured use 1Gbit/s.
+ sum_max_bitrate = 1000000000;
+ }
+ if (enforce_min_bitrate_ == false) {
+ // If not enforcing min bitrate, allow the bandwidth estimation to
+ // go as low as 10 kbps.
+ sum_min_bitrate = std::min(sum_min_bitrate, 10000u);
}
bandwidth_estimation_.SetMinMaxBitrate(sum_min_bitrate,
sum_max_bitrate);
@@ -235,31 +201,46 @@ void BitrateControllerImpl::RemoveBitrateObserver(BitrateObserver* observer) {
if (it != bitrate_observers_.end()) {
delete it->second;
bitrate_observers_.erase(it);
+ bitrate_observers_modified_ = true;
}
}
void BitrateControllerImpl::EnforceMinBitrate(bool enforce_min_bitrate) {
CriticalSectionScoped cs(critsect_);
- if (enforce_min_bitrate) {
- low_rate_strategy_.reset(new EnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- } else {
- low_rate_strategy_.reset(new NoEnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- }
+ enforce_min_bitrate_ = enforce_min_bitrate;
+ UpdateMinMaxBitrate();
+}
+
+void BitrateControllerImpl::SetReservedBitrate(uint32_t reserved_bitrate_bps) {
+ CriticalSectionScoped cs(critsect_);
+ reserved_bitrate_bps_ = reserved_bitrate_bps;
+ MaybeTriggerOnNetworkChanged();
}
void BitrateControllerImpl::OnReceivedEstimatedBitrate(const uint32_t bitrate) {
- uint32_t new_bitrate = 0;
- uint8_t fraction_lost = 0;
- uint16_t rtt = 0;
CriticalSectionScoped cs(critsect_);
- if (bandwidth_estimation_.UpdateBandwidthEstimate(bitrate,
- &new_bitrate,
- &fraction_lost,
- &rtt)) {
- OnNetworkChanged(new_bitrate, fraction_lost, rtt);
+ bandwidth_estimation_.UpdateReceiverEstimate(bitrate);
+ MaybeTriggerOnNetworkChanged();
+}
+
+int32_t BitrateControllerImpl::TimeUntilNextProcess() {
+ enum { kBitrateControllerUpdateIntervalMs = 25 };
+ CriticalSectionScoped cs(critsect_);
+ int time_since_update_ms =
+ clock_->TimeInMilliseconds() - last_bitrate_update_ms_;
+ return std::max(0, kBitrateControllerUpdateIntervalMs - time_since_update_ms);
+}
+
+int32_t BitrateControllerImpl::Process() {
+ if (TimeUntilNextProcess() > 0)
+ return 0;
+ {
+ CriticalSectionScoped cs(critsect_);
+ bandwidth_estimation_.UpdateEstimate(clock_->TimeInMilliseconds());
+ MaybeTriggerOnNetworkChanged();
}
+ last_bitrate_update_ms_ = clock_->TimeInMilliseconds();
+ return 0;
}
void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
@@ -267,37 +248,63 @@ void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
const uint32_t rtt,
const int number_of_packets,
const uint32_t now_ms) {
- uint32_t new_bitrate = 0;
- uint8_t loss = fraction_loss;
CriticalSectionScoped cs(critsect_);
- if (bandwidth_estimation_.UpdatePacketLoss(number_of_packets, rtt, now_ms,
- &loss, &new_bitrate)) {
- OnNetworkChanged(new_bitrate, loss, rtt);
+ bandwidth_estimation_.UpdateReceiverBlock(
+ fraction_loss, rtt, number_of_packets, now_ms);
+ MaybeTriggerOnNetworkChanged();
+}
+
+void BitrateControllerImpl::MaybeTriggerOnNetworkChanged() {
+ uint32_t bitrate;
+ uint8_t fraction_loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&bitrate, &fraction_loss, &rtt);
+ bitrate -= std::min(bitrate, reserved_bitrate_bps_);
+
+ if (bitrate_observers_modified_ ||
+ bitrate != last_bitrate_bps_ ||
+ fraction_loss != last_fraction_loss_ ||
+ rtt != last_rtt_ms_ ||
+ last_enforce_min_bitrate_ != enforce_min_bitrate_ ||
+ last_reserved_bitrate_bps_ != reserved_bitrate_bps_) {
+ last_bitrate_bps_ = bitrate;
+ last_fraction_loss_ = fraction_loss;
+ last_rtt_ms_ = rtt;
+ last_enforce_min_bitrate_ = enforce_min_bitrate_;
+ last_reserved_bitrate_bps_ = reserved_bitrate_bps_;
+ bitrate_observers_modified_ = false;
+ OnNetworkChanged(bitrate, fraction_loss, rtt);
}
}
-// We have the lock here.
void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate,
const uint8_t fraction_loss,
const uint32_t rtt) {
// Sanity check.
- uint32_t number_of_observers = bitrate_observers_.size();
- if (number_of_observers == 0) {
+ if (bitrate_observers_.empty())
return;
- }
+
uint32_t sum_min_bitrates = 0;
BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
sum_min_bitrates += it->second->min_bitrate_;
}
- if (bitrate <= sum_min_bitrates) {
- return low_rate_strategy_->LowRateAllocation(bitrate, fraction_loss, rtt,
- sum_min_bitrates);
- }
+ if (bitrate <= sum_min_bitrates)
+ return LowRateAllocation(bitrate, fraction_loss, rtt, sum_min_bitrates);
+ else
+ return NormalRateAllocation(bitrate, fraction_loss, rtt, sum_min_bitrates);
+}
+
+void BitrateControllerImpl::NormalRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates) {
+ uint32_t number_of_observers = bitrate_observers_.size();
uint32_t bitrate_per_observer = (bitrate - sum_min_bitrates) /
number_of_observers;
// Use map to sort list based on max bitrate.
ObserverSortingMap list_max_bitrates;
+ BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration*>(
it->second->max_bitrate_,
@@ -328,8 +335,46 @@ void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate,
}
}
+void BitrateControllerImpl::LowRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates) {
+ if (enforce_min_bitrate_) {
+ // Min bitrate to all observers.
+ BitrateControllerImpl::BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss, rtt);
+ }
+ // Set sum of min to current send bitrate.
+ bandwidth_estimation_.SetSendBitrate(sum_min_bitrates);
+ } else {
+ // Allocate up to |min_bitrate_| to one observer at a time, until
+ // |bitrate| is depleted.
+ uint32_t remainder = bitrate;
+ BitrateControllerImpl::BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ uint32_t allocation = std::min(remainder, it->second->min_bitrate_);
+ it->first->OnNetworkChanged(allocation, fraction_loss, rtt);
+ remainder -= allocation;
+ }
+ // Set |bitrate| to current send bitrate.
+ bandwidth_estimation_.SetSendBitrate(bitrate);
+ }
+}
+
bool BitrateControllerImpl::AvailableBandwidth(uint32_t* bandwidth) const {
- return bandwidth_estimation_.AvailableBandwidth(bandwidth);
+ CriticalSectionScoped cs(critsect_);
+ uint32_t bitrate;
+ uint8_t fraction_loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&bitrate, &fraction_loss, &rtt);
+ if (bitrate) {
+ *bandwidth = bitrate - std::min(bitrate, reserved_bitrate_bps_);
+ return true;
+ }
+ return false;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
index 62ed6fda5ab..aff127bd142 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
@@ -27,12 +27,30 @@
namespace webrtc {
-class RtcpBandwidthObserverImpl;
-class LowRateStrategy;
-
class BitrateControllerImpl : public BitrateController {
public:
- friend class RtcpBandwidthObserverImpl;
+ BitrateControllerImpl(Clock* clock, bool enforce_min_bitrate);
+ virtual ~BitrateControllerImpl();
+
+ virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE;
+
+ virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE;
+
+ virtual void SetBitrateObserver(BitrateObserver* observer,
+ const uint32_t start_bitrate,
+ const uint32_t min_bitrate,
+ const uint32_t max_bitrate) OVERRIDE;
+
+ virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE;
+
+ virtual void EnforceMinBitrate(bool enforce_min_bitrate) OVERRIDE;
+ virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) OVERRIDE;
+
+ virtual int32_t TimeUntilNextProcess() OVERRIDE;
+ virtual int32_t Process() OVERRIDE;
+
+ private:
+ class RtcpBandwidthObserverImpl;
struct BitrateConfiguration {
BitrateConfiguration(uint32_t start_bitrate,
@@ -59,23 +77,8 @@ class BitrateControllerImpl : public BitrateController {
BitrateObserverConfiguration;
typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
- explicit BitrateControllerImpl(bool enforce_min_bitrate);
- virtual ~BitrateControllerImpl();
-
- virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE;
-
- virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE;
-
- virtual void SetBitrateObserver(BitrateObserver* observer,
- const uint32_t start_bitrate,
- const uint32_t min_bitrate,
- const uint32_t max_bitrate) OVERRIDE;
-
- virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE;
+ void UpdateMinMaxBitrate() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- virtual void EnforceMinBitrate(bool enforce_min_bitrate) OVERRIDE;
-
- private:
// Called by BitrateObserver's direct from the RTCP module.
void OnReceivedEstimatedBitrate(const uint32_t bitrate);
@@ -84,18 +87,48 @@ class BitrateControllerImpl : public BitrateController {
const int number_of_packets,
const uint32_t now_ms);
- typedef std::multimap<uint32_t, ObserverConfiguration*> ObserverSortingMap;
+ void MaybeTriggerOnNetworkChanged() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- BitrateObserverConfList::iterator
- FindObserverConfigurationPair(const BitrateObserver* observer);
void OnNetworkChanged(const uint32_t bitrate,
const uint8_t fraction_loss, // 0 - 255.
- const uint32_t rtt);
+ const uint32_t rtt)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ void NormalRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ void LowRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ typedef std::multimap<uint32_t, ObserverConfiguration*> ObserverSortingMap;
+
+ BitrateObserverConfList::iterator FindObserverConfigurationPair(
+ const BitrateObserver* observer) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ // Used by process thread.
+ Clock* clock_;
+ uint32_t last_bitrate_update_ms_;
CriticalSectionWrapper* critsect_;
- SendSideBandwidthEstimation bandwidth_estimation_;
- BitrateObserverConfList bitrate_observers_;
- scoped_ptr<LowRateStrategy> low_rate_strategy_;
+ SendSideBandwidthEstimation bandwidth_estimation_ GUARDED_BY(*critsect_);
+ BitrateObserverConfList bitrate_observers_ GUARDED_BY(*critsect_);
+ bool enforce_min_bitrate_ GUARDED_BY(*critsect_);
+ uint32_t reserved_bitrate_bps_ GUARDED_BY(*critsect_);
+
+ uint32_t last_bitrate_bps_ GUARDED_BY(*critsect_);
+ uint8_t last_fraction_loss_ GUARDED_BY(*critsect_);
+ uint32_t last_rtt_ms_ GUARDED_BY(*critsect_);
+ bool last_enforce_min_bitrate_ GUARDED_BY(*critsect_);
+ bool bitrate_observers_modified_ GUARDED_BY(*critsect_);
+ uint32_t last_reserved_bitrate_bps_ GUARDED_BY(*critsect_);
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(BitrateControllerImpl);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
index 30f85a81cb9..8523d505b24 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -57,12 +57,12 @@ class TestBitrateObserver: public BitrateObserver {
class BitrateControllerTest : public ::testing::Test {
protected:
- BitrateControllerTest() : enforce_min_bitrate_(true) {}
+ BitrateControllerTest() : clock_(0), enforce_min_bitrate_(true) {}
~BitrateControllerTest() {}
virtual void SetUp() {
- controller_ =
- BitrateController::CreateBitrateController(enforce_min_bitrate_);
+ controller_ = BitrateController::CreateBitrateController(
+ &clock_, enforce_min_bitrate_);
bandwidth_observer_ = controller_->CreateRtcpBandwidthObserver();
}
@@ -70,6 +70,8 @@ class BitrateControllerTest : public ::testing::Test {
delete bandwidth_observer_;
delete controller_;
}
+
+ webrtc::SimulatedClock clock_;
bool enforce_min_bitrate_;
BitrateController* controller_;
RtcpBandwidthObserver* bandwidth_observer_;
@@ -81,58 +83,74 @@ TEST_F(BitrateControllerTest, Basic) {
controller_->RemoveBitrateObserver(&bitrate_observer);
}
+TEST_F(BitrateControllerTest, UpdatingBitrateObserver) {
+ TestBitrateObserver bitrate_observer;
+ controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetBitrateObserver(&bitrate_observer, 1500000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetBitrateObserver(&bitrate_observer, 500000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
+}
+
TEST_F(BitrateControllerTest, OneBitrateObserverOneRtcpObserver) {
TestBitrateObserver bitrate_observer;
controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
// Receive a high remb, test bitrate inc.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer.last_rtt_);
- // Test start bitrate.
+ // Test bitrate increase 8% per second.
webrtc::ReportBlockList report_blocks;
report_blocks.push_back(CreateReportBlock(1, 2, 0, 1));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
- EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
- EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+ EXPECT_EQ(50u, bitrate_observer.last_rtt_);
- // Test bitrate increase 8% per second.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 21));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1001);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer.last_rtt_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
- EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 801));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 4001);
- EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+ // Reach max cap.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 101));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 5001);
- EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 121));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 6001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 141));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 7001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
// Test that a low REMB trigger immediately.
bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
@@ -154,6 +172,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
// Receive a high remb, test bitrate inc.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer.last_rtt_);
// Test start bitrate.
webrtc::ReportBlockList report_blocks;
@@ -161,9 +182,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
second_bandwidth_observer->OnReceivedRtcpReceiverReport(
report_blocks, 100, 1);
- EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
- EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+ EXPECT_EQ(100u, bitrate_observer.last_rtt_);
// Test bitrate increase 8% per second.
report_blocks.clear();
@@ -171,7 +192,7 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 501);
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
1001);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(100u, bitrate_observer.last_rtt_);
@@ -180,50 +201,45 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
report_blocks.push_back(CreateReportBlock(1, 2, 0, 31));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
1501);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
// Second report should not change estimate.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
// Reports from only one bandwidth observer is ok.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
3001);
- EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 81));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
4001);
- EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 101));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 5001);
EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+ // Reach max cap.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 121));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 6001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ second_bandwidth_observer->OnReceivedRtcpReceiverReport(
+ report_blocks, 50, 5001);
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 141));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 7001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ second_bandwidth_observer->OnReceivedRtcpReceiverReport(
+ report_blocks, 50, 6001);
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
// Test that a low REMB trigger immediately.
// We don't care which bandwidth observer that delivers the REMB.
@@ -232,8 +248,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer.last_rtt_);
+ // Min cap.
bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
- EXPECT_EQ(100000u, bitrate_observer.last_bitrate_); // Min cap.
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
controller_->RemoveBitrateObserver(&bitrate_observer);
delete second_bandwidth_observer;
}
@@ -317,40 +334,33 @@ TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) {
controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 300000);
// Receive a high remb, test bitrate inc.
+ // Test too low start bitrate, hence lower than sum of min.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer_1.last_rtt_);
- // Test too low start bitrate, hence lower than sum of min.
+ // Test bitrate increase 8% per second, distributed equally.
webrtc::ReportBlockList report_blocks;
report_blocks.push_back(CreateReportBlock(1, 2, 0, 1));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
-
- // Test bitrate increase 8% per second, distributed equally.
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 21));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1001);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(112500u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer_1.last_rtt_);
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(212500u, bitrate_observer_2.last_bitrate_);
EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer_2.last_rtt_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(112500u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(212500u, bitrate_observer_2.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
EXPECT_EQ(126000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(226000u, bitrate_observer_2.last_bitrate_);
report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 81));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 4001);
+ report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
+ bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
EXPECT_EQ(140580u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(240580u, bitrate_observer_2.last_bitrate_);
@@ -416,6 +426,61 @@ TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) {
controller_->RemoveBitrateObserver(&bitrate_observer_2);
}
+TEST_F(BitrateControllerTest, SetReservedBitrate) {
+ TestBitrateObserver bitrate_observer;
+ controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
+
+ // Receive successively lower REMBs, verify the reserved bitrate is deducted.
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(150000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
+ EXPECT_EQ(150000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(200000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(30000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(200000);
+ EXPECT_EQ(170000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(160000);
+ EXPECT_EQ(160000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(30000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(160000);
+ EXPECT_EQ(130000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(120000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(10000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(110000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(120000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(10000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(0);
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
+
+ controller_->RemoveBitrateObserver(&bitrate_observer);
+}
+
class BitrateControllerTestNoEnforceMin : public BitrateControllerTest {
protected:
BitrateControllerTestNoEnforceMin() : BitrateControllerTest() {
@@ -434,8 +499,32 @@ TEST_F(BitrateControllerTestNoEnforceMin, OneBitrateObserver) {
EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_);
// Low REMB.
- bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
- EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(10000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+
+ // Keeps at least 10 kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+
+ controller_->RemoveBitrateObserver(&bitrate_observer_1);
+}
+
+TEST_F(BitrateControllerTestNoEnforceMin, SetReservedBitrate) {
+ TestBitrateObserver bitrate_observer_1;
+ controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 400000);
+ controller_->SetReservedBitrate(10000);
+
+ // High REMB.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(150000);
+ EXPECT_EQ(140000u, bitrate_observer_1.last_bitrate_);
+
+ // Low REMB.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(15000);
+ EXPECT_EQ(5000u, bitrate_observer_1.last_bitrate_);
+
+ // Keeps at least 10 kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
controller_->RemoveBitrateObserver(&bitrate_observer_1);
}
@@ -469,9 +558,15 @@ TEST_F(BitrateControllerTestNoEnforceMin, ThreeBitrateObservers) {
EXPECT_EQ(200000u, bitrate_observer_3.last_bitrate_); // Remainder.
// Low REMB.
- bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(10000);
// Verify that the first observer gets all the rate, and the rest get zero.
- EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
+
+ // Verify it keeps an estimate of at least 10kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
index 0f743676581..46d7830300b 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
@@ -15,6 +15,7 @@
#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
#define WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
+#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
namespace webrtc {
@@ -35,7 +36,7 @@ class BitrateObserver {
virtual ~BitrateObserver() {}
};
-class BitrateController {
+class BitrateController : public Module {
/*
* This class collects feedback from all streams sent to a peer (via
* RTCPBandwidthObservers). It does one aggregated send side bandwidth
@@ -48,7 +49,8 @@ class BitrateController {
// When true, the bitrate will never be set lower than the minimum bitrate(s).
// When false, the bitrate observers will be allocated rates up to their
// respective minimum bitrate, satisfying one observer after the other.
- static BitrateController* CreateBitrateController(bool enforce_min_bitrate);
+ static BitrateController* CreateBitrateController(Clock* clock,
+ bool enforce_min_bitrate);
virtual ~BitrateController() {}
virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0;
@@ -73,6 +75,8 @@ class BitrateController {
// Changes the mode that was set in the constructor.
virtual void EnforceMinBitrate(bool enforce_min_bitrate) = 0;
+
+ virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
index ce385db54c1..5da23f06624 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -10,216 +10,204 @@
#include "webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h"
-#include <math.h> // sqrt()
+#include <cmath>
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
+namespace {
+enum { kBweIncreaseIntervalMs = 1000 };
+enum { kBweDecreaseIntervalMs = 300 };
+enum { kLimitNumPackets = 20 };
+enum { kAvgPacketSizeBytes = 1000 };
+
+// Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
+// The formula in RFC 3448, Section 3.1, is used.
+uint32_t CalcTfrcBps(uint16_t rtt, uint8_t loss) {
+ if (rtt == 0 || loss == 0) {
+ // Input variables out of range.
+ return 0;
+ }
+ double R = static_cast<double>(rtt) / 1000; // RTT in seconds.
+ int b = 1; // Number of packets acknowledged by a single TCP acknowledgement:
+ // recommended = 1.
+ double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds
+ // recommended = 4*R.
+ double p = static_cast<double>(loss) / 255; // Packet loss rate in [0, 1).
+ double s = static_cast<double>(kAvgPacketSizeBytes);
+
+ // Calculate send rate in bytes/second.
+ double X =
+ s / (R * std::sqrt(2 * b * p / 3) +
+ (t_RTO * (3 * std::sqrt(3 * b * p / 8) * p * (1 + 32 * p * p))));
+
+ // Convert to bits/second.
+ return (static_cast<uint32_t>(X * 8));
+}
+}
SendSideBandwidthEstimation::SendSideBandwidthEstimation()
- : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- accumulate_lost_packets_Q8_(0),
+ : accumulate_lost_packets_Q8_(0),
accumulate_expected_packets_(0),
bitrate_(0),
min_bitrate_configured_(0),
max_bitrate_configured_(0),
+ time_last_receiver_block_ms_(0),
last_fraction_loss_(0),
- last_round_trip_time_(0),
+ last_round_trip_time_ms_(0),
bwe_incoming_(0),
- time_last_increase_(0),
- time_last_decrease_(0) {
-}
+ time_last_decrease_ms_(0) {}
-SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {
- delete critsect_;
-}
+SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {}
-void SendSideBandwidthEstimation::SetSendBitrate(const uint32_t bitrate) {
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetSendBitrate(uint32_t bitrate) {
bitrate_ = bitrate;
+
+ // Clear last sent bitrate history so the new value can be used directly
+ // and not capped.
+ min_bitrate_history_.clear();
}
-void SendSideBandwidthEstimation::SetMinMaxBitrate(const uint32_t min_bitrate,
- const uint32_t max_bitrate) {
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetMinMaxBitrate(uint32_t min_bitrate,
+ uint32_t max_bitrate) {
min_bitrate_configured_ = min_bitrate;
- if (max_bitrate == 0) {
- // no max configured use 1Gbit/s
- max_bitrate_configured_ = 1000000000;
- } else {
- max_bitrate_configured_ = max_bitrate;
- }
+ max_bitrate_configured_ = max_bitrate;
}
-bool SendSideBandwidthEstimation::UpdateBandwidthEstimate(
- const uint32_t bandwidth,
- uint32_t* new_bitrate,
- uint8_t* fraction_lost,
- uint16_t* rtt) {
- *new_bitrate = 0;
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetMinBitrate(uint32_t min_bitrate) {
+ min_bitrate_configured_ = min_bitrate;
+}
- bwe_incoming_ = bandwidth;
+void SendSideBandwidthEstimation::CurrentEstimate(uint32_t* bitrate,
+ uint8_t* loss,
+ uint32_t* rtt) const {
+ *bitrate = bitrate_;
+ *loss = last_fraction_loss_;
+ *rtt = last_round_trip_time_ms_;
+}
- if (bitrate_ == 0) {
- // SendSideBandwidthEstimation off
- return false;
- }
- if (bwe_incoming_ > 0 && bitrate_ > bwe_incoming_) {
- bitrate_ = bwe_incoming_;
- *new_bitrate = bitrate_;
- *fraction_lost = last_fraction_loss_;
- *rtt = last_round_trip_time_;
- return true;
- }
- return false;
+void SendSideBandwidthEstimation::UpdateReceiverEstimate(uint32_t bandwidth) {
+ bwe_incoming_ = bandwidth;
+ CapBitrateToThresholds();
}
-bool SendSideBandwidthEstimation::UpdatePacketLoss(
- const int number_of_packets,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint8_t* loss,
- uint32_t* new_bitrate) {
- CriticalSectionScoped cs(critsect_);
-
- if (bitrate_ == 0) {
- // SendSideBandwidthEstimation off
- return false;
- }
+void SendSideBandwidthEstimation::UpdateReceiverBlock(uint8_t fraction_loss,
+ uint32_t rtt,
+ int number_of_packets,
+ uint32_t now_ms) {
// Update RTT.
- last_round_trip_time_ = rtt;
+ last_round_trip_time_ms_ = rtt;
// Check sequence number diff and weight loss report
if (number_of_packets > 0) {
// Calculate number of lost packets.
- const int num_lost_packets_Q8 = *loss * number_of_packets;
+ const int num_lost_packets_Q8 = fraction_loss * number_of_packets;
// Accumulate reports.
accumulate_lost_packets_Q8_ += num_lost_packets_Q8;
accumulate_expected_packets_ += number_of_packets;
// Report loss if the total report is based on sufficiently many packets.
if (accumulate_expected_packets_ >= kLimitNumPackets) {
- *loss = accumulate_lost_packets_Q8_ / accumulate_expected_packets_;
+ last_fraction_loss_ =
+ accumulate_lost_packets_Q8_ / accumulate_expected_packets_;
- // Reset accumulators
+ // Reset accumulators.
accumulate_lost_packets_Q8_ = 0;
accumulate_expected_packets_ = 0;
} else {
- // Report zero loss until we have enough data to estimate
- // the loss rate.
- return false;
+ // Early return without updating estimate.
+ return;
}
}
- // Keep for next time.
- last_fraction_loss_ = *loss;
- uint32_t bitrate = 0;
- if (!ShapeSimple(*loss, rtt, now_ms, &bitrate)) {
- // No change.
- return false;
- }
- bitrate_ = bitrate;
- *new_bitrate = bitrate;
- return true;
+ time_last_receiver_block_ms_ = now_ms;
+ UpdateEstimate(now_ms);
}
-bool SendSideBandwidthEstimation::AvailableBandwidth(
- uint32_t* bandwidth) const {
- CriticalSectionScoped cs(critsect_);
- if (bitrate_ == 0) {
- return false;
+void SendSideBandwidthEstimation::UpdateEstimate(uint32_t now_ms) {
+ UpdateMinHistory(now_ms);
+
+ // Only start updating bitrate when receiving receiver blocks.
+ if (time_last_receiver_block_ms_ != 0) {
+ if (last_fraction_loss_ <= 5) {
+ // Loss < 2%: Increase rate by 8% of the min bitrate in the last
+ // kBweIncreaseIntervalMs.
+ // Note that by remembering the bitrate over the last second one can
+ // rampup up one second faster than if only allowed to start ramping
+ // at 8% per second rate now. E.g.:
+ // If sending a constant 100kbps it can rampup immediatly to 108kbps
+ // whenever a receiver report is received with lower packet loss.
+ // If instead one would do: bitrate_ *= 1.08^(delta time), it would
+ // take over one second since the lower packet loss to achieve 108kbps.
+ bitrate_ = static_cast<uint32_t>(
+ min_bitrate_history_.front().second * 1.08 + 0.5);
+
+ // Add 1 kbps extra, just to make sure that we do not get stuck
+ // (gives a little extra increase at low rates, negligible at higher
+ // rates).
+ bitrate_ += 1000;
+
+ } else if (last_fraction_loss_ <= 26) {
+ // Loss between 2% - 10%: Do nothing.
+
+ } else {
+ // Loss > 10%: Limit the rate decreases to once a kBweDecreaseIntervalMs +
+ // rtt.
+ if ((now_ms - time_last_decrease_ms_) >=
+ static_cast<uint32_t>(kBweDecreaseIntervalMs +
+ last_round_trip_time_ms_)) {
+ time_last_decrease_ms_ = now_ms;
+
+ // Reduce rate:
+ // newRate = rate * (1 - 0.5*lossRate);
+ // where packetLoss = 256*lossRate;
+ bitrate_ = static_cast<uint32_t>(
+ (bitrate_ * static_cast<double>(512 - last_fraction_loss_)) /
+ 512.0);
+
+ // Calculate what rate TFRC would apply in this situation and to not
+ // reduce further than it.
+ bitrate_ = std::max(
+ bitrate_,
+ CalcTfrcBps(last_round_trip_time_ms_, last_fraction_loss_));
+ }
+ }
}
- *bandwidth = bitrate_;
- return true;
+ CapBitrateToThresholds();
}
-/*
- * Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
- * The formula in RFC 3448, Section 3.1, is used.
- */
-uint32_t SendSideBandwidthEstimation::CalcTFRCbps(uint16_t rtt, uint8_t loss) {
- if (rtt == 0 || loss == 0) {
- // input variables out of range
- return 0;
+void SendSideBandwidthEstimation::UpdateMinHistory(uint32_t now_ms) {
+ // Remove old data points from history.
+ // Since history precision is in ms, add one so it is able to increase
+ // bitrate if it is off by as little as 0.5ms.
+ while (!min_bitrate_history_.empty() &&
+ now_ms - min_bitrate_history_.front().first + 1 >
+ kBweIncreaseIntervalMs) {
+ min_bitrate_history_.pop_front();
}
- double R = static_cast<double>(rtt) / 1000; // RTT in seconds
- int b = 1; // number of packets acknowledged by a single TCP acknowledgement;
- // recommended = 1
- double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds
- // recommended = 4*R
- double p = static_cast<double>(loss) / 255; // packet loss rate in [0, 1)
- double s = static_cast<double>(kAvgPacketSizeBytes);
- // calculate send rate in bytes/second
- double X = s / (R * sqrt(2 * b * p / 3) +
- (t_RTO * (3 * sqrt(3 * b * p / 8) * p * (1 + 32 * p * p))));
+ // Typical minimum sliding-window algorithm: Pop values higher than current
+ // bitrate before pushing it.
+ while (!min_bitrate_history_.empty() &&
+ bitrate_ <= min_bitrate_history_.back().second) {
+ min_bitrate_history_.pop_back();
+ }
- return (static_cast<uint32_t>(X * 8)); // bits/second
+ min_bitrate_history_.push_back(std::make_pair(now_ms, bitrate_));
}
-bool SendSideBandwidthEstimation::ShapeSimple(const uint8_t loss,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint32_t* bitrate) {
- uint32_t new_bitrate = 0;
- bool reducing = false;
-
- // Limit the rate increases to once a kBWEIncreaseIntervalMs.
- if (loss <= 5) {
- if ((now_ms - time_last_increase_) < kBWEIncreaseIntervalMs) {
- return false;
- }
- time_last_increase_ = now_ms;
- }
- // Limit the rate decreases to once a kBWEDecreaseIntervalMs + rtt.
- if (loss > 26) {
- if ((now_ms - time_last_decrease_) < kBWEDecreaseIntervalMs + rtt) {
- return false;
- }
- time_last_decrease_ = now_ms;
- }
-
- if (loss > 5 && loss <= 26) {
- // 2% - 10%
- new_bitrate = bitrate_;
- } else if (loss > 26) {
- // 26/256 ~= 10%
- // reduce rate: newRate = rate * (1 - 0.5*lossRate)
- // packetLoss = 256*lossRate
- new_bitrate = static_cast<uint32_t>((bitrate_ *
- static_cast<double>(512 - loss)) / 512.0);
- reducing = true;
- } else {
- // increase rate by 8%
- new_bitrate = static_cast<uint32_t>(bitrate_ * 1.08 + 0.5);
-
- // add 1 kbps extra, just to make sure that we do not get stuck
- // (gives a little extra increase at low rates, negligible at higher rates)
- new_bitrate += 1000;
- }
- if (reducing) {
- // Calculate what rate TFRC would apply in this situation
- // scale loss to Q0 (back to [0, 255])
- uint32_t tfrc_bitrate = CalcTFRCbps(rtt, loss);
- if (tfrc_bitrate > new_bitrate) {
- // do not reduce further if rate is below TFRC rate
- new_bitrate = tfrc_bitrate;
- }
- }
- if (bwe_incoming_ > 0 && new_bitrate > bwe_incoming_) {
- new_bitrate = bwe_incoming_;
+void SendSideBandwidthEstimation::CapBitrateToThresholds() {
+ if (bwe_incoming_ > 0 && bitrate_ > bwe_incoming_) {
+ bitrate_ = bwe_incoming_;
}
- if (new_bitrate > max_bitrate_configured_) {
- new_bitrate = max_bitrate_configured_;
+ if (bitrate_ > max_bitrate_configured_) {
+ bitrate_ = max_bitrate_configured_;
}
- if (new_bitrate < min_bitrate_configured_) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "The configured min bitrate (%u kbps) is greater than the "
- "estimated available bandwidth (%u kbps).\n",
- min_bitrate_configured_ / 1000, new_bitrate / 1000);
- new_bitrate = min_bitrate_configured_;
+ if (bitrate_ < min_bitrate_configured_) {
+ LOG(LS_WARNING) << "Estimated available bandwidth " << bitrate_ / 1000
+ << " kbps is below configured min bitrate "
+ << min_bitrate_configured_ / 1000 << " kbps.";
+ bitrate_ = min_bitrate_configured_;
}
- *bitrate = new_bitrate;
- return true;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
index 0c1fa94517d..eb675d1ca68 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -13,6 +13,8 @@
#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
#define WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
+#include <deque>
+
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -22,38 +24,33 @@ class SendSideBandwidthEstimation {
SendSideBandwidthEstimation();
virtual ~SendSideBandwidthEstimation();
- // Call when we receive a RTCP message with TMMBR or REMB
- // Return true if new_bitrate is valid.
- bool UpdateBandwidthEstimate(const uint32_t bandwidth,
- uint32_t* new_bitrate,
- uint8_t* fraction_lost,
- uint16_t* rtt);
-
- // Call when we receive a RTCP message with a ReceiveBlock
- // Return true if new_bitrate is valid.
- bool UpdatePacketLoss(const int number_of_packets,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint8_t* loss,
- uint32_t* new_bitrate);
-
- // Return false if no bandwidth estimate is available
- bool AvailableBandwidth(uint32_t* bandwidth) const;
- void SetSendBitrate(const uint32_t bitrate);
- void SetMinMaxBitrate(const uint32_t min_bitrate, const uint32_t max_bitrate);
+ void CurrentEstimate(uint32_t* bitrate, uint8_t* loss, uint32_t* rtt) const;
- private:
- bool ShapeSimple(const uint8_t loss, const uint32_t rtt,
- const uint32_t now_ms, uint32_t* bitrate);
+ // Call periodically to update estimate.
+ void UpdateEstimate(uint32_t now_ms);
+
+ // Call when we receive a RTCP message with TMMBR or REMB.
+ void UpdateReceiverEstimate(uint32_t bandwidth);
- uint32_t CalcTFRCbps(uint16_t rtt, uint8_t loss);
+ // Call when we receive a RTCP message with a ReceiveBlock.
+ void UpdateReceiverBlock(uint8_t fraction_loss,
+ uint32_t rtt,
+ int number_of_packets,
+ uint32_t now_ms);
+
+ void SetSendBitrate(uint32_t bitrate);
+ void SetMinMaxBitrate(uint32_t min_bitrate, uint32_t max_bitrate);
+ void SetMinBitrate(uint32_t min_bitrate);
+
+ private:
+ void CapBitrateToThresholds();
- enum { kBWEIncreaseIntervalMs = 1000 };
- enum { kBWEDecreaseIntervalMs = 300 };
- enum { kLimitNumPackets = 20 };
- enum { kAvgPacketSizeBytes = 1000 };
+ // Updates history of min bitrates.
+ // After this method returns min_bitrate_history_.front().second contains the
+ // min bitrate used during last kBweIncreaseIntervalMs.
+ void UpdateMinHistory(uint32_t now_ms);
- CriticalSectionWrapper* critsect_;
+ std::deque<std::pair<uint32_t, uint32_t> > min_bitrate_history_;
// incoming filters
int accumulate_lost_packets_Q8_;
@@ -63,12 +60,12 @@ class SendSideBandwidthEstimation {
uint32_t min_bitrate_configured_;
uint32_t max_bitrate_configured_;
+ uint32_t time_last_receiver_block_ms_;
uint8_t last_fraction_loss_;
- uint16_t last_round_trip_time_;
+ uint16_t last_round_trip_time_ms_;
uint32_t bwe_incoming_;
- uint32_t time_last_increase_;
- uint32_t time_last_decrease_;
+ uint32_t time_last_decrease_ms_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/OWNERS b/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
index 3276530e93d..e85861b8d3e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
+++ b/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
@@ -1,3 +1,8 @@
alexeypa@chromium.org
sergeyu@chromium.org
wez@chromium.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
index 6ed3ae881f2..2547ba37aa8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
@@ -81,7 +81,7 @@ DesktopFrameWithCursor::DesktopFrameWithCursor(DesktopFrame* frame,
mutable_updated_region()->Swap(frame->mutable_updated_region());
DesktopVector image_pos = position.subtract(cursor.hotspot());
- DesktopRect target_rect = DesktopRect::MakeSize(cursor.image().size());
+ DesktopRect target_rect = DesktopRect::MakeSize(cursor.image()->size());
target_rect.Translate(image_pos);
DesktopVector target_origin = target_rect.top_left();
target_rect.IntersectWith(DesktopRect::MakeSize(size()));
@@ -101,10 +101,10 @@ DesktopFrameWithCursor::DesktopFrameWithCursor(DesktopFrame* frame,
target_rect.left() * DesktopFrame::kBytesPerPixel;
DesktopVector origin_shift = target_rect.top_left().subtract(target_origin);
AlphaBlend(target_rect_data, stride(),
- cursor.image().data() +
- origin_shift.y() * cursor.image().stride() +
+ cursor.image()->data() +
+ origin_shift.y() * cursor.image()->stride() +
origin_shift.x() * DesktopFrame::kBytesPerPixel,
- cursor.image().stride(),
+ cursor.image()->stride(),
target_rect.size());
}
@@ -142,12 +142,16 @@ void DesktopAndCursorComposer::Capture(const DesktopRegion& region) {
desktop_capturer_->Capture(region);
}
+void DesktopAndCursorComposer::SetExcludedWindow(WindowId window) {
+ desktop_capturer_->SetExcludedWindow(window);
+}
+
SharedMemory* DesktopAndCursorComposer::CreateSharedMemory(size_t size) {
return callback_->CreateSharedMemory(size);
}
void DesktopAndCursorComposer::OnCaptureCompleted(DesktopFrame* frame) {
- if (cursor_.get() && cursor_state_ == MouseCursorMonitor::INSIDE) {
+ if (frame && cursor_.get() && cursor_state_ == MouseCursorMonitor::INSIDE) {
DesktopFrameWithCursor* frame_with_cursor =
new DesktopFrameWithCursor(frame, *cursor_, cursor_position_);
frame = frame_with_cursor;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
index 4f7c85bde3e..3fac0212d49 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
@@ -34,6 +34,7 @@ class DesktopAndCursorComposer : public DesktopCapturer,
// DesktopCapturer interface.
virtual void Start(DesktopCapturer::Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
private:
// DesktopCapturer::Callback interface.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
index 15d6f546118..b482a29605e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
@@ -58,6 +58,18 @@ uint32_t BlendPixels(uint32_t dest, uint32_t src) {
return b + (g << 8) + (r << 16) + 0xff000000;
}
+DesktopFrame* CreateTestFrame() {
+ DesktopFrame* frame =
+ new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight));
+ uint32_t* data = reinterpret_cast<uint32_t*>(frame->data());
+ for (int y = 0; y < kScreenHeight; ++y) {
+ for (int x = 0; x < kScreenWidth; ++x) {
+ *(data++) = GetFakeFramePixelValue(DesktopVector(x, y));
+ }
+ }
+ return frame;
+}
+
class FakeScreenCapturer : public DesktopCapturer {
public:
FakeScreenCapturer() {}
@@ -67,27 +79,17 @@ class FakeScreenCapturer : public DesktopCapturer {
}
virtual void Capture(const DesktopRegion& region) OVERRIDE {
- DesktopFrame* frame =
- new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight));
- uint32_t* data = reinterpret_cast<uint32_t*>(frame->data());
- for (int y = 0; y < kScreenHeight; ++y) {
- for (int x = 0; x < kScreenWidth; ++x) {
- *(data++) = GetFakeFramePixelValue(DesktopVector(x, y));
- }
- }
-
- last_frame_.reset(SharedDesktopFrame::Wrap(frame));
-
- callback_->OnCaptureCompleted(last_frame_->Share());
+ callback_->OnCaptureCompleted(next_frame_.release());
}
- // Returns last fake captured frame.
- SharedDesktopFrame* last_frame() { return last_frame_.get(); }
+ void SetNextFrame(DesktopFrame* next_frame) {
+ next_frame_.reset(next_frame);
+ }
private:
Callback* callback_;
- scoped_ptr<SharedDesktopFrame> last_frame_;
+ scoped_ptr<DesktopFrame> next_frame_;
};
class FakeMouseMonitor : public MouseCursorMonitor {
@@ -187,6 +189,20 @@ class DesktopAndCursorComposerTest : public testing::Test,
scoped_ptr<DesktopFrame> frame_;
};
+// Verify DesktopAndCursorComposer can handle the case when the screen capturer
+// fails.
+TEST_F(DesktopAndCursorComposerTest, Error) {
+ blender_.Start(this);
+
+ fake_cursor_->SetHotspot(DesktopVector());
+ fake_cursor_->SetState(MouseCursorMonitor::INSIDE, DesktopVector());
+ fake_screen_->SetNextFrame(NULL);
+
+ blender_.Capture(DesktopRegion());
+
+ EXPECT_EQ(frame_, static_cast<DesktopFrame*>(NULL));
+}
+
TEST_F(DesktopAndCursorComposerTest, Blend) {
struct {
int x, y;
@@ -222,6 +238,10 @@ TEST_F(DesktopAndCursorComposerTest, Blend) {
DesktopVector pos(tests[i].x, tests[i].y);
fake_cursor_->SetState(state, pos);
+ scoped_ptr<SharedDesktopFrame> frame(
+ SharedDesktopFrame::Wrap(CreateTestFrame()));
+ fake_screen_->SetNextFrame(frame->Share());
+
blender_.Capture(DesktopRegion());
VerifyFrame(*frame_, state, pos);
@@ -229,9 +249,7 @@ TEST_F(DesktopAndCursorComposerTest, Blend) {
// Verify that the cursor is erased before the frame buffer is returned to
// the screen capturer.
frame_.reset();
- VerifyFrame(*fake_screen_->last_frame(),
- MouseCursorMonitor::OUTSIDE,
- DesktopVector());
+ VerifyFrame(*frame, MouseCursorMonitor::OUTSIDE, DesktopVector());
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
index eb3bc9a29d1..6f4a083015a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
@@ -36,6 +36,10 @@
"differ_block.h",
"mac/desktop_configuration.h",
"mac/desktop_configuration.mm",
+ "mac/desktop_configuration_monitor.h",
+ "mac/desktop_configuration_monitor.cc",
+ "mac/osx_version.h",
+ "mac/osx_version.cc",
"mac/scoped_pixel_buffer_object.cc",
"mac/scoped_pixel_buffer_object.h",
"mouse_cursor.cc",
@@ -65,9 +69,17 @@
"win/scoped_gdi_object.h",
"win/scoped_thread_desktop.cc",
"win/scoped_thread_desktop.h",
+ "win/screen_capturer_win_gdi.cc",
+ "win/screen_capturer_win_gdi.h",
+ "win/screen_capturer_win_magnifier.cc",
+ "win/screen_capturer_win_magnifier.h",
+ "win/screen_capture_utils.cc",
+ "win/screen_capture_utils.h",
+ "win/window_capture_utils.cc",
+ "win/window_capture_utils.h",
"window_capturer.cc",
"window_capturer.h",
- "window_capturer_mac.cc",
+ "window_capturer_mac.mm",
"window_capturer_win.cc",
"window_capturer_x11.cc",
"x11/shared_x_display.h",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
index a4fa02547bd..105853bf94b 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
@@ -19,6 +19,10 @@ DesktopCaptureOptions::DesktopCaptureOptions()
// XDamage is often broken, so don't use it by default.
use_update_notifications_ = false;
#endif
+
+#if defined(WEBRTC_WIN)
+ allow_use_magnification_api_ = false;
+#endif
}
DesktopCaptureOptions::~DesktopCaptureOptions() {}
@@ -29,6 +33,9 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() {
#if defined(USE_X11)
result.set_x_display(SharedXDisplay::CreateDefault());
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ result.set_configuration_monitor(new DesktopConfigurationMonitor());
+#endif
return result;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
index f0c76b1733f..c6aabd4529d 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
@@ -10,12 +10,17 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#if defined(USE_X11)
#include "webrtc/modules/desktop_capture/x11/shared_x_display.h"
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#endif
+
namespace webrtc {
// An object that stores initialization parameters for screen and window
@@ -38,6 +43,15 @@ class DesktopCaptureOptions {
}
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ DesktopConfigurationMonitor* configuration_monitor() const {
+ return configuration_monitor_;
+ }
+ void set_configuration_monitor(scoped_refptr<DesktopConfigurationMonitor> m) {
+ configuration_monitor_ = m;
+ }
+#endif
+
// Flag indicating that the capturer should use screen change notifications.
// Enables/disables use of XDAMAGE in the X11 capturer.
bool use_update_notifications() const { return use_update_notifications_; }
@@ -52,10 +66,27 @@ class DesktopCaptureOptions {
disable_effects_ = disable_effects;
}
+#if defined(WEBRTC_WIN)
+ bool allow_use_magnification_api() const {
+ return allow_use_magnification_api_;
+ }
+ void set_allow_use_magnification_api(bool allow) {
+ allow_use_magnification_api_ = allow;
+ }
+#endif
+
private:
#if defined(USE_X11)
scoped_refptr<SharedXDisplay> x_display_;
#endif
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
+#endif
+
+#if defined(WEBRTC_WIN)
+ bool allow_use_magnification_api_;
+#endif
bool use_update_notifications_;
bool disable_effects_;
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
index d43ec499191..3e417965531 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
@@ -26,6 +26,17 @@ typedef intptr_t WindowId;
const WindowId kNullWindowId = 0;
+// Type used to identify screens on the desktop. Values are platform-specific:
+// - On Windows: integer display device index.
+// - On OSX: CGDirectDisplayID cast to intptr_t.
+// - On Linux (with X11): TBD.
+typedef intptr_t ScreenId;
+
+// The screen id corresponds to all screen combined together.
+const ScreenId kFullDesktopScreenId = -1;
+
+const ScreenId kInvalidScreenId = -2;
+
} // namespace webrtc
#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
index bcb664ef859..7ad16364977 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
@@ -13,6 +13,8 @@
#include <stddef.h>
+#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
+
namespace webrtc {
class DesktopFrame;
@@ -52,6 +54,11 @@ class DesktopCapturer {
// the top left corner of the capture target. Pending capture operations are
// canceled when DesktopCapturer is deleted.
virtual void Capture(const DesktopRegion& region) = 0;
+
+ // Sets the window to be excluded from the captured image in the future
+ // Capture calls. Used to exclude the screenshare notification window for
+ // screen capturing.
+ virtual void SetExcludedWindow(WindowId window) {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
index e51273d8d21..047eeec3d9c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
@@ -58,7 +58,7 @@ class DesktopSize {
int32_t width() const { return width_; }
int32_t height() const { return height_; }
- bool is_empty() const { return width_ <= 0 && height_ <= 0; }
+ bool is_empty() const { return width_ <= 0 || height_ <= 0; }
bool equals(const DesktopSize& other) const {
return width_ == other.width_ && height_ == other.height_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
index fc7c6ed9e2d..c4528ae3496 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
@@ -14,8 +14,8 @@
#include <map>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ.h b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
index 8edce80b4ef..0b419d2dded 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
@@ -76,7 +76,7 @@ class Differ {
int bytes_per_row_;
// Diff information for each block in the image.
- scoped_array<DiffInfo> diff_info_;
+ scoped_ptr<DiffInfo[]> diff_info_;
// Dimensions and total size of diff info array.
int diff_info_width_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
index 40fde4dbc4e..da1a21461dc 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
@@ -200,8 +200,8 @@ class DifferTest : public testing::Test {
int buffer_size_;
// Previous and current screen buffers.
- scoped_array<uint8_t> prev_;
- scoped_array<uint8_t> curr_;
+ scoped_ptr<uint8_t[]> prev_;
+ scoped_ptr<uint8_t[]> curr_;
private:
DISALLOW_COPY_AND_ASSIGN(DifferTest);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
index 433040a04ee..bb2339bb0f1 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
@@ -52,10 +52,18 @@ struct MacDesktopConfiguration {
// increase as you move up the screen) or Carbon-style "top-down" coordinates.
static MacDesktopConfiguration GetCurrent(Origin origin);
- // Bounds of the desktop in Density-Independent Pixels (DIPs).
+ // Returns true if the given desktop configuration equals this one.
+ bool Equals(const MacDesktopConfiguration& other);
+
+ // Returns the pointer to the display configuration with the specified id.
+ const MacDisplayConfiguration* FindDisplayConfigurationById(
+ CGDirectDisplayID id);
+
+ // Bounds of the desktop excluding monitors with DPI settings different from
+ // the main monitor. In Density-Independent Pixels (DIPs).
DesktopRect bounds;
- // Bounds of the desktop in physical pixels.
+ // Same as bounds, but expressed in physical pixels.
DesktopRect pixel_bounds;
// Scale factor from DIPs to physical pixels.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
index a917b5dc052..35fa65be2d2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
@@ -110,15 +110,8 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) {
MacDisplayConfiguration display_config =
GetConfigurationForScreen([screens objectAtIndex: i]);
- // Handling mixed-DPI is hard, so we only return displays that match the
- // "primary" display's DPI. The primary display is always the first in the
- // list returned by [NSScreen screens].
- if (i == 0) {
+ if (i == 0)
desktop_config.dip_to_pixel_scale = display_config.dip_to_pixel_scale;
- } else if (desktop_config.dip_to_pixel_scale !=
- display_config.dip_to_pixel_scale) {
- continue;
- }
// Cocoa uses bottom-up coordinates, so if the caller wants top-down then
// we need to invert the positions of secondary monitors relative to the
@@ -126,21 +119,62 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) {
if (i > 0 && origin == TopLeftOrigin) {
InvertRectYOrigin(desktop_config.displays[0].bounds,
&display_config.bounds);
- InvertRectYOrigin(desktop_config.displays[0].pixel_bounds,
- &display_config.pixel_bounds);
+ // |display_bounds| is density dependent, so we need to convert the
+ // primay monitor's position into the secondary monitor's density context.
+ float scaling_factor = display_config.dip_to_pixel_scale /
+ desktop_config.displays[0].dip_to_pixel_scale;
+ DesktopRect primary_bounds = DesktopRect::MakeLTRB(
+ desktop_config.displays[0].pixel_bounds.left() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.top() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.right() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.bottom() * scaling_factor);
+ InvertRectYOrigin(primary_bounds, &display_config.pixel_bounds);
}
// Add the display to the configuration.
desktop_config.displays.push_back(display_config);
- // Update the desktop bounds to account for this display.
- desktop_config.bounds =
- JoinRects(desktop_config.bounds, display_config.bounds);
- desktop_config.pixel_bounds =
- JoinRects(desktop_config.pixel_bounds, display_config.pixel_bounds);
+ // Update the desktop bounds to account for this display, unless the current
+ // display uses different DPI settings.
+ if (display_config.dip_to_pixel_scale ==
+ desktop_config.dip_to_pixel_scale) {
+ desktop_config.bounds =
+ JoinRects(desktop_config.bounds, display_config.bounds);
+ desktop_config.pixel_bounds =
+ JoinRects(desktop_config.pixel_bounds, display_config.pixel_bounds);
+ }
}
return desktop_config;
}
+// For convenience of comparing MacDisplayConfigurations in
+// MacDesktopConfiguration::Equals.
+bool operator==(const MacDisplayConfiguration& left,
+ const MacDisplayConfiguration& right) {
+ return left.id == right.id &&
+ left.bounds.equals(right.bounds) &&
+ left.pixel_bounds.equals(right.pixel_bounds) &&
+ left.dip_to_pixel_scale == right.dip_to_pixel_scale;
+}
+
+bool MacDesktopConfiguration::Equals(const MacDesktopConfiguration& other) {
+ return bounds.equals(other.bounds) &&
+ pixel_bounds.equals(other.pixel_bounds) &&
+ dip_to_pixel_scale == other.dip_to_pixel_scale &&
+ displays == other.displays;
+}
+
+// Finds the display configuration with the specified id.
+const MacDisplayConfiguration*
+MacDesktopConfiguration::FindDisplayConfigurationById(
+ CGDirectDisplayID id) {
+ for (MacDisplayConfigurations::const_iterator it = displays.begin();
+ it != displays.end(); ++it) {
+ if (it->id == id)
+ return &(*it);
+ }
+ return NULL;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc
new file mode 100644
index 00000000000..f0d5c34be65
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+// The amount of time allowed for displays to reconfigure.
+static const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000;
+
+DesktopConfigurationMonitor::DesktopConfigurationMonitor()
+ : ref_count_(0),
+ display_configuration_capture_event_(EventWrapper::Create()) {
+ CGError err = CGDisplayRegisterReconfigurationCallback(
+ DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this);
+ if (err != kCGErrorSuccess) {
+ LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err;
+ abort();
+ }
+ display_configuration_capture_event_->Set();
+
+ desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
+ MacDesktopConfiguration::TopLeftOrigin);
+}
+
+DesktopConfigurationMonitor::~DesktopConfigurationMonitor() {
+ CGError err = CGDisplayRemoveReconfigurationCallback(
+ DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this);
+ if (err != kCGErrorSuccess)
+ LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err;
+}
+
+void DesktopConfigurationMonitor::Lock() {
+ if (!display_configuration_capture_event_->Wait(
+ kDisplayConfigurationEventTimeoutMs)) {
+ LOG_F(LS_ERROR) << "Event wait timed out.";
+ abort();
+ }
+}
+
+void DesktopConfigurationMonitor::Unlock() {
+ display_configuration_capture_event_->Set();
+}
+
+// static
+void DesktopConfigurationMonitor::DisplaysReconfiguredCallback(
+ CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter) {
+ DesktopConfigurationMonitor* monitor =
+ reinterpret_cast<DesktopConfigurationMonitor*>(user_parameter);
+ monitor->DisplaysReconfigured(display, flags);
+}
+
+void DesktopConfigurationMonitor::DisplaysReconfigured(
+ CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags) {
+ if (flags & kCGDisplayBeginConfigurationFlag) {
+ if (reconfiguring_displays_.empty()) {
+ // If this is the first display to start reconfiguring then wait on
+ // |display_configuration_capture_event_| to block the capture thread
+ // from accessing display memory until the reconfiguration completes.
+ if (!display_configuration_capture_event_->Wait(
+ kDisplayConfigurationEventTimeoutMs)) {
+ LOG_F(LS_ERROR) << "Event wait timed out.";
+ abort();
+ }
+ }
+ reconfiguring_displays_.insert(display);
+ } else {
+ reconfiguring_displays_.erase(display);
+ if (reconfiguring_displays_.empty()) {
+ desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
+ MacDesktopConfiguration::TopLeftOrigin);
+ display_configuration_capture_event_->Set();
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
new file mode 100644
index 00000000000..27143a84e1e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
+
+#include <ApplicationServices/ApplicationServices.h>
+
+#include <set>
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class EventWrapper;
+
+// The class provides functions to synchronize capturing and display
+// reconfiguring across threads, and the up-to-date MacDesktopConfiguration.
+class DesktopConfigurationMonitor {
+ public:
+ DesktopConfigurationMonitor();
+ // Acquires a lock on the current configuration.
+ void Lock();
+ // Releases the lock previously acquired.
+ void Unlock();
+ // Returns the current desktop configuration. Should only be called when the
+ // lock has been acquired.
+ const MacDesktopConfiguration& desktop_configuration() {
+ return desktop_configuration_;
+ }
+
+ void AddRef() { ++ref_count_; }
+ void Release() {
+ if (--ref_count_ == 0)
+ delete this;
+ }
+
+ private:
+ static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter);
+ ~DesktopConfigurationMonitor();
+
+ void DisplaysReconfigured(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags);
+
+ Atomic32 ref_count_;
+ std::set<CGDirectDisplayID> reconfiguring_displays_;
+ MacDesktopConfiguration desktop_configuration_;
+ scoped_ptr<EventWrapper> display_configuration_capture_event_;
+
+ DISALLOW_COPY_AND_ASSIGN(DesktopConfigurationMonitor);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc
new file mode 100644
index 00000000000..7466f20342b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sys/utsname.h>
+
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+namespace {
+
+int GetDarwinVersion() {
+ struct utsname uname_info;
+ if (uname(&uname_info) != 0) {
+ LOG(LS_ERROR) << "uname failed";
+ return 0;
+ }
+
+ if (strcmp(uname_info.sysname, "Darwin") != 0)
+ return 0;
+
+ char* dot;
+ int result = strtol(uname_info.release, &dot, 10);
+ if (*dot != '.') {
+ LOG(LS_ERROR) << "Failed to parse version";
+ return 0;
+ }
+
+ return result;
+}
+
+} // namespace
+
+bool IsOSLionOrLater() {
+ static int darwin_version = GetDarwinVersion();
+
+ // Verify that the version has been parsed correctly.
+ if (darwin_version < 6) {
+ LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version;
+ abort();
+ }
+
+ // Darwin major version 11 corresponds to OSX 10.7.
+ return darwin_version >= 11;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h
new file mode 100644
index 00000000000..0ba49a4e69e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+namespace webrtc {
+
+// Returns true if the OS version >= OSX 10.7.
+bool IsOSLionOrLater();
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
index 73d425aea23..4d1dd1ffd6a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
@@ -14,7 +14,7 @@
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
index 3f1ab3ddf50..22a9c0ee8c8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
@@ -10,10 +10,14 @@
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include <assert.h>
+
#include "webrtc/modules/desktop_capture/desktop_frame.h"
namespace webrtc {
+MouseCursor::MouseCursor() {}
+
MouseCursor::MouseCursor(DesktopFrame* image, const DesktopVector& hotspot)
: image_(image),
hotspot_(hotspot) {
@@ -25,8 +29,10 @@ MouseCursor::~MouseCursor() {}
// static
MouseCursor* MouseCursor::CopyOf(const MouseCursor& cursor) {
- return new MouseCursor(BasicDesktopFrame::CopyOf(cursor.image()),
- cursor.hotspot());
+ return cursor.image()
+ ? new MouseCursor(BasicDesktopFrame::CopyOf(*cursor.image()),
+ cursor.hotspot())
+ : new MouseCursor();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
index 4cf770830ce..22887f9ae47 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -21,13 +21,19 @@ class DesktopFrame;
class MouseCursor {
public:
+ MouseCursor();
+
// Takes ownership of |image|. |hotspot| must be within |image| boundaries.
MouseCursor(DesktopFrame* image, const DesktopVector& hotspot);
+
~MouseCursor();
static MouseCursor* CopyOf(const MouseCursor& cursor);
- const DesktopFrame& image() const { return *image_; }
+ void set_image(DesktopFrame* image) { image_.reset(image); }
+ const DesktopFrame* image() const { return image_.get(); }
+
+ void set_hotspot(const DesktopVector& hotspot ) { hotspot_ = hotspot; }
const DesktopVector& hotspot() const { return hotspot_; }
private:
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
index 9785b736b2a..24dfe72dfa7 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
@@ -69,7 +69,8 @@ class MouseCursorMonitor {
//
// TODO(sergeyu): Provide a way to select a specific screen.
static MouseCursorMonitor* CreateForScreen(
- const DesktopCaptureOptions& options);
+ const DesktopCaptureOptions& options,
+ ScreenId screen);
// Initializes the monitor with the |callback|, which must remain valid until
// capturer is destroyed.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
index d3c02896f9e..e8806338197 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
@@ -15,35 +15,59 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#include "webrtc/modules/desktop_capture/mac/osx_version.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
namespace webrtc {
class MouseCursorMonitorMac : public MouseCursorMonitor {
public:
- MouseCursorMonitorMac(CGWindowID window_id);
+ MouseCursorMonitorMac(const DesktopCaptureOptions& options,
+ CGWindowID window_id,
+ ScreenId screen_id);
virtual ~MouseCursorMonitorMac();
virtual void Init(Callback* callback, Mode mode) OVERRIDE;
virtual void Capture() OVERRIDE;
private:
+ static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter);
+ void DisplaysReconfigured(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags);
+
void CaptureImage();
+ scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
CGWindowID window_id_;
-
+ ScreenId screen_id_;
Callback* callback_;
Mode mode_;
-
scoped_ptr<MouseCursor> last_cursor_;
};
-MouseCursorMonitorMac::MouseCursorMonitorMac(CGWindowID window_id)
- : window_id_(window_id),
+MouseCursorMonitorMac::MouseCursorMonitorMac(
+ const DesktopCaptureOptions& options,
+ CGWindowID window_id,
+ ScreenId screen_id)
+ : configuration_monitor_(options.configuration_monitor()),
+ window_id_(window_id),
+ screen_id_(screen_id),
callback_(NULL),
mode_(SHAPE_AND_POSITION) {
+ assert(window_id == kCGNullWindowID || screen_id == kInvalidScreenId);
+ if (screen_id != kInvalidScreenId && !IsOSLionOrLater()) {
+ // Single screen capture is not supported on pre OS X 10.7.
+ screen_id_ = kFullDesktopScreenId;
+ }
}
MouseCursorMonitorMac::~MouseCursorMonitorMac() {}
@@ -72,6 +96,21 @@ void MouseCursorMonitorMac::Capture() {
DesktopVector position(gc_position.x, gc_position.y);
+ configuration_monitor_->Lock();
+ MacDesktopConfiguration configuration =
+ configuration_monitor_->desktop_configuration();
+ configuration_monitor_->Unlock();
+ float scale = 1.0f;
+
+ // Find the dpi to physical pixel scale for the screen where the mouse cursor
+ // is.
+ for (MacDisplayConfigurations::iterator it = configuration.displays.begin();
+ it != configuration.displays.end(); ++it) {
+ if (it->bounds.Contains(position)) {
+ scale = it->dip_to_pixel_scale;
+ break;
+ }
+ }
// If we are capturing cursor for a specific window then we need to figure out
// if the current mouse position is covered by another window and also adjust
// |position| to make it relative to the window origin.
@@ -134,10 +173,8 @@ void MouseCursorMonitorMac::Capture() {
}
}
}
-
CFRelease(window_array);
}
-
if (!found_window) {
// If we failed to get list of windows or the window wasn't in the list
// pretend that the cursor is outside the window. This can happen, e.g. if
@@ -145,8 +182,32 @@ void MouseCursorMonitorMac::Capture() {
state = OUTSIDE;
position.set(-1, -1);
}
+ } else {
+ assert(screen_id_ >= kFullDesktopScreenId);
+ if (screen_id_ != kFullDesktopScreenId) {
+ // For single screen capturing, convert the position to relative to the
+ // target screen.
+ const MacDisplayConfiguration* config =
+ configuration.FindDisplayConfigurationById(
+ static_cast<CGDirectDisplayID>(screen_id_));
+ if (config) {
+ if (!config->pixel_bounds.Contains(position))
+ state = OUTSIDE;
+ position = position.subtract(config->bounds.top_left());
+ } else {
+ // The target screen is no longer valid.
+ state = OUTSIDE;
+ position.set(-1, -1);
+ }
+ } else {
+ position.subtract(configuration.bounds.top_left());
+ }
+ }
+ if (state == INSIDE) {
+ // Convert Density Independent Pixel to physical pixel.
+ position = DesktopVector(round(position.x() * scale),
+ round(position.y() * scale));
}
-
callback_->OnMouseCursorPosition(state, position);
}
@@ -182,10 +243,10 @@ void MouseCursorMonitorMac::CaptureImage() {
// Compare the cursor with the previous one.
if (last_cursor_.get() &&
- last_cursor_->image().size().equals(size) &&
+ last_cursor_->image()->size().equals(size) &&
last_cursor_->hotspot().equals(hotspot) &&
- memcmp(last_cursor_->image().data(), src_data,
- last_cursor_->image().stride() * size.height()) == 0) {
+ memcmp(last_cursor_->image()->data(), src_data,
+ last_cursor_->image()->stride() * size.height()) == 0) {
return;
}
@@ -204,15 +265,15 @@ void MouseCursorMonitorMac::CaptureImage() {
callback_->OnMouseCursor(cursor.release());
}
-
MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
const DesktopCaptureOptions& options, WindowId window) {
- return new MouseCursorMonitorMac(window);
+ return new MouseCursorMonitorMac(options, window, kInvalidScreenId);
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
- return new MouseCursorMonitorMac(kCGNullWindowID);
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
+ return new MouseCursorMonitorMac(options, kCGNullWindowID, screen);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
index 7aa1b7141d5..3a632cc0d9c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
-#include <cstddef>
+#include <stddef.h>
namespace webrtc {
@@ -21,7 +21,8 @@ MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
return NULL;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
index 18bf1ca40e0..c6af2b700e4 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
@@ -50,7 +50,12 @@ class MouseCursorMonitorTest : public testing::Test,
// tests. Figure out how to do that without breaking other tests in
// modules_unittests and enable these tests on Mac.
// https://code.google.com/p/webrtc/issues/detail?id=2532
-#if !defined(WEBRTC_MAC)
+//
+// Disabled on Windows due to flake, see:
+// https://code.google.com/p/webrtc/issues/detail?id=3408
+// Disabled on Linux due to flake, see:
+// https://code.google.com/p/webrtc/issues/detail?id=3245
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) && !defined(WEBRTC_LINUX)
#define MAYBE(x) x
#else
#define MAYBE(x) DISABLED_##x
@@ -58,7 +63,7 @@ class MouseCursorMonitorTest : public testing::Test,
TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) {
scoped_ptr<MouseCursorMonitor> capturer(MouseCursorMonitor::CreateForScreen(
- DesktopCaptureOptions::CreateDefault()));
+ DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Capture();
@@ -66,10 +71,10 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) {
EXPECT_TRUE(cursor_image_.get());
EXPECT_GE(cursor_image_->hotspot().x(), 0);
EXPECT_LE(cursor_image_->hotspot().x(),
- cursor_image_->image().size().width());
+ cursor_image_->image()->size().width());
EXPECT_GE(cursor_image_->hotspot().y(), 0);
EXPECT_LE(cursor_image_->hotspot().y(),
- cursor_image_->image().size().height());
+ cursor_image_->image()->size().height());
EXPECT_TRUE(position_received_);
EXPECT_EQ(MouseCursorMonitor::INSIDE, state_);
@@ -109,7 +114,7 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromWindow)) {
// Make sure that OnMouseCursorPosition() is not called in the SHAPE_ONLY mode.
TEST_F(MouseCursorMonitorTest, MAYBE(ShapeOnly)) {
scoped_ptr<MouseCursorMonitor> capturer(MouseCursorMonitor::CreateForScreen(
- DesktopCaptureOptions::CreateDefault()));
+ DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Init(this, MouseCursorMonitor::SHAPE_ONLY);
capturer->Capture();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
index 82f7d2447fb..fd0b222a3a6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
@@ -10,9 +10,12 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
+#include <assert.h>
+
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -20,13 +23,20 @@ namespace webrtc {
class MouseCursorMonitorWin : public MouseCursorMonitor {
public:
explicit MouseCursorMonitorWin(HWND window);
+ explicit MouseCursorMonitorWin(ScreenId screen);
virtual ~MouseCursorMonitorWin();
virtual void Init(Callback* callback, Mode mode) OVERRIDE;
virtual void Capture() OVERRIDE;
private:
+ // Get the rect of the currently selected screen, relative to the primary
+ // display's top-left. If the screen is disabled or disconnected, or any error
+ // happens, an empty rect is returned.
+ DesktopRect GetScreenRect();
+
HWND window_;
+ ScreenId screen_;
Callback* callback_;
Mode mode_;
@@ -38,12 +48,23 @@ class MouseCursorMonitorWin : public MouseCursorMonitor {
MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
: window_(window),
+ screen_(kInvalidScreenId),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
desktop_dc_(NULL),
last_cursor_(NULL) {
}
+MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
+ : window_(NULL),
+ screen_(screen),
+ callback_(NULL),
+ mode_(SHAPE_AND_POSITION),
+ desktop_dc_(NULL),
+ last_cursor_(NULL) {
+ assert(screen >= kFullDesktopScreenId);
+}
+
MouseCursorMonitorWin::~MouseCursorMonitorWin() {
if (desktop_dc_)
ReleaseDC(NULL, desktop_dc_);
@@ -85,28 +106,68 @@ void MouseCursorMonitorWin::Capture() {
bool inside = cursor_info.flags == CURSOR_SHOWING;
if (window_) {
- RECT rect;
- if (!GetWindowRect(window_, &rect)) {
+ DesktopRect original_rect;
+ DesktopRect cropped_rect;
+ if (!GetCroppedWindowRect(window_, &cropped_rect, &original_rect)) {
position.set(0, 0);
inside = false;
} else {
- position = position.subtract(DesktopVector(rect.left, rect.top));
- if (inside)
- inside = (window_ == WindowFromPoint(cursor_info.ptScreenPos));
+ if (inside) {
+ HWND windowUnderCursor = WindowFromPoint(cursor_info.ptScreenPos);
+ inside = windowUnderCursor ?
+ (window_ == GetAncestor(windowUnderCursor, GA_ROOT)) : false;
+ }
+ position = position.subtract(cropped_rect.top_left());
}
+ } else {
+ assert(screen_ != kInvalidScreenId);
+ DesktopRect rect = GetScreenRect();
+ if (inside)
+ inside = rect.Contains(position);
+ position = position.subtract(rect.top_left());
}
callback_->OnMouseCursorPosition(inside ? INSIDE : OUTSIDE, position);
}
+DesktopRect MouseCursorMonitorWin::GetScreenRect() {
+ assert(screen_ != kInvalidScreenId);
+ if (screen_ == kFullDesktopScreenId) {
+ return DesktopRect::MakeXYWH(
+ GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN));
+ }
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL result = EnumDisplayDevices(NULL, screen_, &device, 0);
+ if (!result)
+ return DesktopRect();
+
+ DEVMODE device_mode;
+ device_mode.dmSize = sizeof(device_mode);
+ device_mode.dmDriverExtra = 0;
+ result = EnumDisplaySettingsEx(
+ device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0);
+ if (!result)
+ return DesktopRect();
+
+ return DesktopRect::MakeXYWH(device_mode.dmPosition.x,
+ device_mode.dmPosition.y,
+ device_mode.dmPelsWidth,
+ device_mode.dmPelsHeight);
+}
+
MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
const DesktopCaptureOptions& options, WindowId window) {
return new MouseCursorMonitorWin(reinterpret_cast<HWND>(window));
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
- return new MouseCursorMonitorWin(NULL);
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
+ return new MouseCursorMonitorWin(screen);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
index 9114b95f3b4..f09593db9d6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
@@ -17,6 +17,7 @@
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/x11/x_error_trap.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -145,10 +146,12 @@ void MouseCursorMonitorX11::Capture() {
Window root_window;
Window child_window;
unsigned int mask;
+
+ XErrorTrap error_trap(display());
Bool result = XQueryPointer(display(), window_, &root_window, &child_window,
&root_x, &root_y, &win_x, &win_y, &mask);
CursorState state;
- if (!result) {
+ if (!result || error_trap.GetLastErrorAndDisable() != 0) {
state = OUTSIDE;
} else {
// In screen mode (window_ == root_window) the mouse is always inside.
@@ -214,7 +217,8 @@ MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
if (!options.x_display())
return NULL;
return new MouseCursorMonitorX11(
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
index b045f05267c..45a3507b923 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include <assert.h>
#include <algorithm>
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
index 9dd3a1050a4..a8d40a72ef8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
@@ -11,6 +11,9 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_
+#include <vector>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -40,6 +43,13 @@ struct MouseCursorShape;
// Since data can be read while another capture action is happening.
class ScreenCapturer : public DesktopCapturer {
public:
+ // Use a struct to represent a screen although it has only an id for now,
+ // because we may want to add more fields (e.g. description) in the future.
+ struct Screen {
+ ScreenId id;
+ };
+ typedef std::vector<Screen> ScreenList;
+
// Provides callbacks used by the capturer to pass captured video frames and
// mouse cursor shapes to the processing pipeline.
//
@@ -78,6 +88,15 @@ class ScreenCapturer : public DesktopCapturer {
// remain valid until the capturer is destroyed.
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) = 0;
+
+ // Get the list of screens (not containing kFullDesktopScreenId). Returns
+ // false in case of a failure.
+ virtual bool GetScreenList(ScreenList* screens) = 0;
+
+ // Select the screen to be captured. Returns false in case of a failure (e.g.
+ // if there is no screen with the specified id). If this is never called, the
+ // full desktop is captured.
+ virtual bool SelectScreen(ScreenId id) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
index 75af043c84d..86761c170f0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include <assert.h>
#include <algorithm>
#include "webrtc/system_wrappers/interface/logging.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
index 00639c73918..2d5733906f0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
@@ -19,18 +19,18 @@
#include <IOKit/pwr_mgt/IOPMLib.h>
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
-#include <sys/utsname.h>
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#include "webrtc/modules/desktop_capture/mac/osx_version.h"
#include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h"
#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
-#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
@@ -63,7 +63,8 @@ DesktopRect ScaleAndRoundCGRect(const CGRect& rect, float scale) {
static_cast<int>(ceil((rect.origin.y + rect.size.height) * scale)));
}
-// Copy pixels in the |rect| from |src_place| to |dest_plane|.
+// Copy pixels in the |rect| from |src_place| to |dest_plane|. |rect| should be
+// relative to the origin of |src_plane| and |dest_plane|.
void CopyRect(const uint8_t* src_plane,
int src_plane_stride,
uint8_t* dest_plane,
@@ -87,46 +88,110 @@ void CopyRect(const uint8_t* src_plane,
}
}
-int GetDarwinVersion() {
- struct utsname uname_info;
- if (uname(&uname_info) != 0) {
- LOG(LS_ERROR) << "uname failed";
- return 0;
+// Returns an array of CGWindowID for all the on-screen windows except
+// |window_to_exclude|, or NULL if the window is not found or it fails. The
+// caller should release the returned CFArrayRef.
+CFArrayRef CreateWindowListWithExclusion(CGWindowID window_to_exclude) {
+ if (!window_to_exclude)
+ return NULL;
+
+ CFArrayRef all_windows = CGWindowListCopyWindowInfo(
+ kCGWindowListOptionOnScreenOnly, kCGNullWindowID);
+ if (!all_windows)
+ return NULL;
+
+ CFMutableArrayRef returned_array = CFArrayCreateMutable(
+ NULL, CFArrayGetCount(all_windows), NULL);
+
+ bool found = false;
+ for (CFIndex i = 0; i < CFArrayGetCount(all_windows); ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(all_windows, i));
+
+ CFNumberRef id_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+
+ CGWindowID id;
+ CFNumberGetValue(id_ref, kCFNumberIntType, &id);
+ if (id == window_to_exclude) {
+ found = true;
+ continue;
+ }
+ CFArrayAppendValue(returned_array, reinterpret_cast<void *>(id));
}
+ CFRelease(all_windows);
- if (strcmp(uname_info.sysname, "Darwin") != 0)
- return 0;
-
- char* dot;
- int result = strtol(uname_info.release, &dot, 10);
- if (*dot != '.') {
- LOG(LS_ERROR) << "Failed to parse version";
- return 0;
+ if (!found) {
+ CFRelease(returned_array);
+ returned_array = NULL;
}
-
- return result;
+ return returned_array;
}
-bool IsOSLionOrLater() {
- static int darwin_version = GetDarwinVersion();
-
- // Verify that the version has been parsed correctly.
- if (darwin_version < 6) {
- LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version;
- abort();
+// Returns the bounds of |window| in physical pixels, enlarged by a small amount
+// on four edges to take account of the border/shadow effects.
+DesktopRect GetExcludedWindowPixelBounds(CGWindowID window,
+ float dip_to_pixel_scale) {
+ // The amount of pixels to add to the actual window bounds to take into
+ // account of the border/shadow effects.
+ static const int kBorderEffectSize = 20;
+ CGRect rect;
+ CGWindowID ids[1];
+ ids[0] = window;
+
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+
+ if (CFArrayGetCount(window_array) > 0) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFDictionaryRef bounds_ref = reinterpret_cast<CFDictionaryRef>(
+ CFDictionaryGetValue(window, kCGWindowBounds));
+ CGRectMakeWithDictionaryRepresentation(bounds_ref, &rect);
}
- // Darwin major version 11 corresponds to OSX 10.7.
- return darwin_version >= 11;
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ rect.origin.x -= kBorderEffectSize;
+ rect.origin.y -= kBorderEffectSize;
+ rect.size.width += kBorderEffectSize * 2;
+ rect.size.height += kBorderEffectSize * 2;
+ // |rect| is in DIP, so convert to physical pixels.
+ return ScaleAndRoundCGRect(rect, dip_to_pixel_scale);
}
-// The amount of time allowed for displays to reconfigure.
-const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000;
+// Create an image of the given region using the given |window_list|.
+// |pixel_bounds| should be in the primary display's coordinate in physical
+// pixels. The caller should release the returned CGImageRef and CFDataRef.
+CGImageRef CreateExcludedWindowRegionImage(const DesktopRect& pixel_bounds,
+ float dip_to_pixel_scale,
+ CFArrayRef window_list,
+ CFDataRef* data_ref) {
+ CGRect window_bounds;
+ // The origin is in DIP while the size is in physical pixels. That's what
+ // CGWindowListCreateImageFromArray expects.
+ window_bounds.origin.x = pixel_bounds.left() / dip_to_pixel_scale;
+ window_bounds.origin.y = pixel_bounds.top() / dip_to_pixel_scale;
+ window_bounds.size.width = pixel_bounds.width();
+ window_bounds.size.height = pixel_bounds.height();
+
+ CGImageRef excluded_image = CGWindowListCreateImageFromArray(
+ window_bounds, window_list, kCGWindowImageDefault);
+
+ CGDataProviderRef provider = CGImageGetDataProvider(excluded_image);
+ *data_ref = CGDataProviderCopyData(provider);
+ assert(*data_ref);
+ return excluded_image;
+}
// A class to perform video frame capturing for mac.
class ScreenCapturerMac : public ScreenCapturer {
public:
- ScreenCapturerMac();
+ explicit ScreenCapturerMac(
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor);
virtual ~ScreenCapturerMac();
bool Init();
@@ -134,8 +199,11 @@ class ScreenCapturerMac : public ScreenCapturer {
// Overridden from ScreenCapturer:
virtual void Start(Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
private:
void CaptureCursor();
@@ -145,7 +213,8 @@ class ScreenCapturerMac : public ScreenCapturer {
void GlBlitSlow(const DesktopFrame& frame);
void CgBlitPreLion(const DesktopFrame& frame,
const DesktopRegion& region);
- void CgBlitPostLion(const DesktopFrame& frame,
+ // Returns false if the selected screen is no longer valid.
+ bool CgBlitPostLion(const DesktopFrame& frame,
const DesktopRegion& region);
// Called when the screen configuration is changed.
@@ -158,8 +227,6 @@ class ScreenCapturerMac : public ScreenCapturer {
void ScreenUpdateMove(CGScreenUpdateMoveDelta delta,
size_t count,
const CGRect *rect_array);
- void DisplaysReconfigured(CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags);
static void ScreenRefreshCallback(CGRectCount count,
const CGRect *rect_array,
void *user_parameter);
@@ -167,12 +234,10 @@ class ScreenCapturerMac : public ScreenCapturer {
size_t count,
const CGRect *rect_array,
void *user_parameter);
- static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags,
- void *user_parameter);
-
void ReleaseBuffers();
+ DesktopFrame* CreateFrame();
+
Callback* callback_;
MouseShapeObserver* mouse_shape_observer_;
@@ -185,6 +250,16 @@ class ScreenCapturerMac : public ScreenCapturer {
// Current display configuration.
MacDesktopConfiguration desktop_config_;
+ // Currently selected display, or 0 if the full desktop is selected. On OS X
+ // 10.6 and before, this is always 0.
+ CGDirectDisplayID current_display_;
+
+ // The physical pixel bounds of the current screen.
+ DesktopRect screen_pixel_bounds_;
+
+ // The dip to physical pixel scale of the current screen.
+ float dip_to_pixel_scale_;
+
// A thread-safe list of invalid rectangles, and the size of the most
// recently captured screen.
ScreenCapturerHelper helper_;
@@ -195,13 +270,8 @@ class ScreenCapturerMac : public ScreenCapturer {
// Contains an invalid region from the previous capture.
DesktopRegion last_invalid_region_;
- // Used to ensure that frame captures do not take place while displays
- // are being reconfigured.
- scoped_ptr<EventWrapper> display_configuration_capture_event_;
-
- // Records the Ids of attached displays which are being reconfigured.
- // Accessed on the thread on which we are notified of display events.
- std::set<CGDirectDisplayID> reconfiguring_displays_;
+ // Monitoring display reconfiguration.
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor_;
// Power management assertion to prevent the screen from sleeping.
IOPMAssertionID power_assertion_id_display_;
@@ -217,6 +287,8 @@ class ScreenCapturerMac : public ScreenCapturer {
void* opengl_library_;
CGLSetFullScreenFunc cgl_set_full_screen_;
+ CGWindowID excluded_window_;
+
DISALLOW_COPY_AND_ASSIGN(ScreenCapturerMac);
};
@@ -243,24 +315,14 @@ class InvertedDesktopFrame : public DesktopFrame {
DISALLOW_COPY_AND_ASSIGN(InvertedDesktopFrame);
};
-DesktopFrame* CreateFrame(
- const MacDesktopConfiguration& desktop_config) {
-
- DesktopSize size(desktop_config.pixel_bounds.width(),
- desktop_config.pixel_bounds.height());
- scoped_ptr<DesktopFrame> frame(new BasicDesktopFrame(size));
-
- frame->set_dpi(DesktopVector(
- kStandardDPI * desktop_config.dip_to_pixel_scale,
- kStandardDPI * desktop_config.dip_to_pixel_scale));
- return frame.release();
-}
-
-ScreenCapturerMac::ScreenCapturerMac()
+ScreenCapturerMac::ScreenCapturerMac(
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor)
: callback_(NULL),
mouse_shape_observer_(NULL),
cgl_context_(NULL),
- display_configuration_capture_event_(EventWrapper::Create()),
+ current_display_(0),
+ dip_to_pixel_scale_(1.0f),
+ desktop_config_monitor_(desktop_config_monitor),
power_assertion_id_display_(kIOPMNullAssertionID),
power_assertion_id_user_(kIOPMNullAssertionID),
app_services_library_(NULL),
@@ -268,8 +330,8 @@ ScreenCapturerMac::ScreenCapturerMac()
cg_display_bytes_per_row_(NULL),
cg_display_bits_per_pixel_(NULL),
opengl_library_(NULL),
- cgl_set_full_screen_(NULL) {
- display_configuration_capture_event_->Set();
+ cgl_set_full_screen_(NULL),
+ excluded_window_(0) {
}
ScreenCapturerMac::~ScreenCapturerMac() {
@@ -284,11 +346,6 @@ ScreenCapturerMac::~ScreenCapturerMac() {
ReleaseBuffers();
UnregisterRefreshAndMoveHandlers();
- CGError err = CGDisplayRemoveReconfigurationCallback(
- ScreenCapturerMac::DisplaysReconfiguredCallback, this);
- if (err != kCGErrorSuccess)
- LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err;
-
dlclose(app_services_library_);
dlclose(opengl_library_);
}
@@ -297,14 +354,9 @@ bool ScreenCapturerMac::Init() {
if (!RegisterRefreshAndMoveHandlers()) {
return false;
}
-
- CGError err = CGDisplayRegisterReconfigurationCallback(
- ScreenCapturerMac::DisplaysReconfiguredCallback, this);
- if (err != kCGErrorSuccess) {
- LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err;
- return false;
- }
-
+ desktop_config_monitor_->Lock();
+ desktop_config_ = desktop_config_monitor_->desktop_configuration();
+ desktop_config_monitor_->Unlock();
ScreenConfigurationChanged();
return true;
}
@@ -343,20 +395,22 @@ void ScreenCapturerMac::Start(Callback* callback) {
&power_assertion_id_user_);
}
-void ScreenCapturerMac::Capture(
- const DesktopRegion& region_to_capture) {
+void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
TickTime capture_start_time = TickTime::Now();
queue_.MoveToNextFrame();
- // Wait until the display configuration is stable. If one or more displays
- // are reconfiguring then |display_configuration_capture_event_| will not be
- // set until the reconfiguration completes.
- // TODO(wez): Replace this with an early-exit (See crbug.com/104542).
- if (!display_configuration_capture_event_->Wait(
- kDisplayConfigurationEventTimeoutMs)) {
- LOG_F(LS_ERROR) << "Event wait timed out.";
- abort();
+ desktop_config_monitor_->Lock();
+ MacDesktopConfiguration new_config =
+ desktop_config_monitor_->desktop_configuration();
+ if (!desktop_config_.Equals(new_config)) {
+ desktop_config_ = new_config;
+ // If the display configuraiton has changed then refresh capturer data
+ // structures. Occasionally, the refresh and move handlers are lost when
+ // the screen mode changes, so re-register them here.
+ UnregisterRefreshAndMoveHandlers();
+ RegisterRefreshAndMoveHandlers();
+ ScreenConfigurationChanged();
}
DesktopRegion region;
@@ -366,7 +420,7 @@ void ScreenCapturerMac::Capture(
// Note that we can't reallocate other buffers at this point, since the caller
// may still be reading from them.
if (!queue_.current_frame())
- queue_.ReplaceCurrentFrame(CreateFrame(desktop_config_));
+ queue_.ReplaceCurrentFrame(CreateFrame());
DesktopFrame* current_frame = queue_.current_frame();
@@ -374,7 +428,10 @@ void ScreenCapturerMac::Capture(
if (IsOSLionOrLater()) {
// Lion requires us to use their new APIs for doing screen capture. These
// APIS currently crash on 10.6.8 if there is no monitor attached.
- CgBlitPostLion(*current_frame, region);
+ if (!CgBlitPostLion(*current_frame, region)) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
} else if (cgl_context_) {
flip = true;
if (pixel_buffer_object_.get() != 0) {
@@ -398,7 +455,7 @@ void ScreenCapturerMac::Capture(
// Signal that we are done capturing data from the display framebuffer,
// and accessing display structures.
- display_configuration_capture_event_->Set();
+ desktop_config_monitor_->Unlock();
// Capture the current cursor shape and notify |callback_| if it has changed.
CaptureCursor();
@@ -408,6 +465,10 @@ void ScreenCapturerMac::Capture(
callback_->OnCaptureCompleted(new_frame);
}
+void ScreenCapturerMac::SetExcludedWindow(WindowId window) {
+ excluded_window_ = window;
+}
+
void ScreenCapturerMac::SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) {
assert(!mouse_shape_observer_);
@@ -415,6 +476,47 @@ void ScreenCapturerMac::SetMouseShapeObserver(
mouse_shape_observer_ = mouse_shape_observer;
}
+bool ScreenCapturerMac::GetScreenList(ScreenList* screens) {
+ assert(screens->size() == 0);
+ if (!IsOSLionOrLater()) {
+ // Single monitor cast is not supported on pre OS X 10.7.
+ Screen screen;
+ screen.id = kFullDesktopScreenId;
+ screens->push_back(screen);
+ return true;
+ }
+
+ for (MacDisplayConfigurations::iterator it = desktop_config_.displays.begin();
+ it != desktop_config_.displays.end(); ++it) {
+ Screen screen;
+ screen.id = static_cast<ScreenId>(it->id);
+ screens->push_back(screen);
+ }
+ return true;
+}
+
+bool ScreenCapturerMac::SelectScreen(ScreenId id) {
+ if (!IsOSLionOrLater()) {
+ // Ignore the screen selection on unsupported OS.
+ assert(!current_display_);
+ return id == kFullDesktopScreenId;
+ }
+
+ if (id == kFullDesktopScreenId) {
+ current_display_ = 0;
+ } else {
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(
+ static_cast<CGDirectDisplayID>(id));
+ if (!config)
+ return false;
+ current_display_ = config->id;
+ }
+
+ ScreenConfigurationChanged();
+ return true;
+}
+
void ScreenCapturerMac::CaptureCursor() {
if (!mouse_shape_observer_)
return;
@@ -608,7 +710,7 @@ void ScreenCapturerMac::CgBlitPreLion(const DesktopFrame& frame,
}
}
-void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
+bool ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
const DesktopRegion& region) {
// Copy the entire contents of the previous capture buffer, to capture over.
// TODO(wez): Get rid of this as per crbug.com/145064, or implement
@@ -619,13 +721,40 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
frame.stride() * frame.size().height());
}
- for (size_t i = 0; i < desktop_config_.displays.size(); ++i) {
- const MacDisplayConfiguration& display_config = desktop_config_.displays[i];
+ MacDisplayConfigurations displays_to_capture;
+ if (current_display_) {
+ // Capturing a single screen. Note that the screen id may change when
+ // screens are added or removed.
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(current_display_);
+ if (config) {
+ displays_to_capture.push_back(*config);
+ } else {
+ LOG(LS_ERROR) << "The selected screen cannot be found for capturing.";
+ return false;
+ }
+ } else {
+ // Capturing the whole desktop.
+ displays_to_capture = desktop_config_.displays;
+ }
+ // Create the window list once for all displays.
+ CFArrayRef window_list = CreateWindowListWithExclusion(excluded_window_);
+
+ for (size_t i = 0; i < displays_to_capture.size(); ++i) {
+ const MacDisplayConfiguration& display_config = displays_to_capture[i];
+
+ // Capturing mixed-DPI on one surface is hard, so we only return displays
+ // that match the "primary" display's DPI. The primary display is always
+ // the first in the list.
+ if (i > 0 && display_config.dip_to_pixel_scale !=
+ displays_to_capture[0].dip_to_pixel_scale) {
+ continue;
+ }
// Determine the display's position relative to the desktop, in pixels.
DesktopRect display_bounds = display_config.pixel_bounds;
- display_bounds.Translate(-desktop_config_.pixel_bounds.left(),
- -desktop_config_.pixel_bounds.top());
+ display_bounds.Translate(-screen_pixel_bounds_.left(),
+ -screen_pixel_bounds_.top());
// Determine which parts of the blit region, if any, lay within the monitor.
DesktopRegion copy_region = region;
@@ -636,6 +765,26 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
// Translate the region to be copied into display-relative coordinates.
copy_region.Translate(-display_bounds.left(), -display_bounds.top());
+ DesktopRect excluded_window_bounds;
+ CGImageRef excluded_image = NULL;
+ CFDataRef excluded_window_region_data = NULL;
+ if (excluded_window_ && window_list) {
+ // Get the region of the excluded window relative the primary display.
+ excluded_window_bounds = GetExcludedWindowPixelBounds(
+ excluded_window_, display_config.dip_to_pixel_scale);
+ excluded_window_bounds.IntersectWith(display_config.pixel_bounds);
+
+ // Create the image under the excluded window first, because it's faster
+ // than captuing the whole display.
+ if (!excluded_window_bounds.is_empty()) {
+ excluded_image = CreateExcludedWindowRegionImage(
+ excluded_window_bounds,
+ display_config.dip_to_pixel_scale,
+ window_list,
+ &excluded_window_region_data);
+ }
+ }
+
// Create an image containing a snapshot of the display.
CGImageRef image = CGDisplayCreateImage(display_config.id);
if (image == NULL)
@@ -665,26 +814,58 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
i.rect());
}
+ // Copy the region of the excluded window to the frame.
+ if (excluded_image) {
+ assert(excluded_window_region_data);
+ display_base_address = CFDataGetBytePtr(excluded_window_region_data);
+ src_bytes_per_row = CGImageGetBytesPerRow(excluded_image);
+
+ // Translate the bounds relative to the desktop, because |frame| data
+ // starts from the desktop top-left corner.
+ DesktopRect window_bounds_relative_to_desktop(excluded_window_bounds);
+ window_bounds_relative_to_desktop.Translate(
+ -screen_pixel_bounds_.left(), -screen_pixel_bounds_.top());
+ out_ptr = frame.data() +
+ (window_bounds_relative_to_desktop.left() * src_bytes_per_pixel) +
+ (window_bounds_relative_to_desktop.top() * frame.stride());
+
+ CopyRect(display_base_address,
+ src_bytes_per_row,
+ out_ptr,
+ frame.stride(),
+ src_bytes_per_pixel,
+ DesktopRect::MakeSize(excluded_window_bounds.size()));
+ CFRelease(excluded_window_region_data);
+ CFRelease(excluded_image);
+ }
+
CFRelease(data);
CFRelease(image);
}
+ if (window_list)
+ CFRelease(window_list);
+ return true;
}
void ScreenCapturerMac::ScreenConfigurationChanged() {
+ if (current_display_) {
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(current_display_);
+ screen_pixel_bounds_ = config ? config->pixel_bounds : DesktopRect();
+ dip_to_pixel_scale_ = config ? config->dip_to_pixel_scale : 1.0f;
+ } else {
+ screen_pixel_bounds_ = desktop_config_.pixel_bounds;
+ dip_to_pixel_scale_ = desktop_config_.dip_to_pixel_scale;
+ }
+
// Release existing buffers, which will be of the wrong size.
ReleaseBuffers();
// Clear the dirty region, in case the display is down-sizing.
helper_.ClearInvalidRegion();
- // Refresh the cached desktop configuration.
- desktop_config_ = MacDesktopConfiguration::GetCurrent(
- MacDesktopConfiguration::TopLeftOrigin);
-
// Re-mark the entire desktop as dirty.
- helper_.InvalidateScreen(
- DesktopSize(desktop_config_.pixel_bounds.width(),
- desktop_config_.pixel_bounds.height()));
+ helper_.InvalidateScreen(screen_pixel_bounds_.size());
// Make sure the frame buffers will be reallocated.
queue_.Reset();
@@ -765,8 +946,8 @@ void ScreenCapturerMac::ScreenConfigurationChanged() {
(*cgl_set_full_screen_)(cgl_context_);
CGLSetCurrentContext(cgl_context_);
- size_t buffer_size = desktop_config_.pixel_bounds.width() *
- desktop_config_.pixel_bounds.height() *
+ size_t buffer_size = screen_pixel_bounds_.width() *
+ screen_pixel_bounds_.height() *
sizeof(uint32_t);
pixel_buffer_object_.Init(cgl_context_, buffer_size);
}
@@ -798,20 +979,17 @@ void ScreenCapturerMac::UnregisterRefreshAndMoveHandlers() {
void ScreenCapturerMac::ScreenRefresh(CGRectCount count,
const CGRect* rect_array) {
- if (desktop_config_.pixel_bounds.is_empty())
+ if (screen_pixel_bounds_.is_empty())
return;
DesktopRegion region;
-
+ DesktopVector translate_vector =
+ DesktopVector().subtract(screen_pixel_bounds_.top_left());
for (CGRectCount i = 0; i < count; ++i) {
// Convert from Density-Independent Pixel to physical pixel coordinates.
- DesktopRect rect =
- ScaleAndRoundCGRect(rect_array[i], desktop_config_.dip_to_pixel_scale);
-
+ DesktopRect rect = ScaleAndRoundCGRect(rect_array[i], dip_to_pixel_scale_);
// Translate from local desktop to capturer framebuffer coordinates.
- rect.Translate(-desktop_config_.pixel_bounds.left(),
- -desktop_config_.pixel_bounds.top());
-
+ rect.Translate(translate_vector);
region.AddRect(rect);
}
@@ -831,45 +1009,12 @@ void ScreenCapturerMac::ScreenUpdateMove(CGScreenUpdateMoveDelta delta,
ScreenRefresh(count, refresh_rects);
}
-void ScreenCapturerMac::DisplaysReconfigured(
- CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags) {
- if (flags & kCGDisplayBeginConfigurationFlag) {
- if (reconfiguring_displays_.empty()) {
- // If this is the first display to start reconfiguring then wait on
- // |display_configuration_capture_event_| to block the capture thread
- // from accessing display memory until the reconfiguration completes.
- if (!display_configuration_capture_event_->Wait(
- kDisplayConfigurationEventTimeoutMs)) {
- LOG_F(LS_ERROR) << "Event wait timed out.";
- abort();
- }
- }
-
- reconfiguring_displays_.insert(display);
- } else {
- reconfiguring_displays_.erase(display);
-
- if (reconfiguring_displays_.empty()) {
- // If no other displays are reconfiguring then refresh capturer data
- // structures and un-block the capturer thread. Occasionally, the
- // refresh and move handlers are lost when the screen mode changes,
- // so re-register them here (the same does not appear to be true for
- // the reconfiguration handler itself).
- UnregisterRefreshAndMoveHandlers();
- RegisterRefreshAndMoveHandlers();
- ScreenConfigurationChanged();
- display_configuration_capture_event_->Set();
- }
- }
-}
-
void ScreenCapturerMac::ScreenRefreshCallback(CGRectCount count,
const CGRect* rect_array,
void* user_parameter) {
ScreenCapturerMac* capturer =
reinterpret_cast<ScreenCapturerMac*>(user_parameter);
- if (capturer->desktop_config_.pixel_bounds.is_empty())
+ if (capturer->screen_pixel_bounds_.is_empty())
capturer->ScreenConfigurationChanged();
capturer->ScreenRefresh(count, rect_array);
}
@@ -884,20 +1029,24 @@ void ScreenCapturerMac::ScreenUpdateMoveCallback(
capturer->ScreenUpdateMove(delta, count, rect_array);
}
-void ScreenCapturerMac::DisplaysReconfiguredCallback(
- CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags,
- void* user_parameter) {
- ScreenCapturerMac* capturer =
- reinterpret_cast<ScreenCapturerMac*>(user_parameter);
- capturer->DisplaysReconfigured(display, flags);
+DesktopFrame* ScreenCapturerMac::CreateFrame() {
+ scoped_ptr<DesktopFrame> frame(
+ new BasicDesktopFrame(screen_pixel_bounds_.size()));
+
+ frame->set_dpi(DesktopVector(kStandardDPI * dip_to_pixel_scale_,
+ kStandardDPI * dip_to_pixel_scale_));
+ return frame.release();
}
} // namespace
// static
ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) {
- scoped_ptr<ScreenCapturerMac> capturer(new ScreenCapturerMac());
+ if (!options.configuration_monitor())
+ return NULL;
+
+ scoped_ptr<ScreenCapturerMac> capturer(
+ new ScreenCapturerMac(options.configuration_monitor()));
if (!capturer->Init())
capturer.reset();
return capturer.release();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
index 17673b5cc0e..aa0e808eb69 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
@@ -26,6 +26,8 @@ class MockScreenCapturer : public ScreenCapturer {
MOCK_METHOD1(Capture, void(const DesktopRegion& region));
MOCK_METHOD1(SetMouseShapeObserver, void(
MouseShapeObserver* mouse_shape_observer));
+ MOCK_METHOD1(GetScreenList, bool(ScreenList* screens));
+ MOCK_METHOD1(SelectScreen, bool(ScreenId id));
private:
DISALLOW_COPY_AND_ASSIGN(MockScreenCapturer);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
index b4ae128085a..50ff7a2853a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
@@ -59,6 +59,15 @@ SharedMemory* ScreenCapturerTest::CreateSharedMemory(size_t size) {
return new FakeSharedMemory(new char[size], size);
}
+TEST_F(ScreenCapturerTest, GetScreenListAndSelectScreen) {
+ webrtc::ScreenCapturer::ScreenList screens;
+ EXPECT_TRUE(capturer_->GetScreenList(&screens));
+ for(webrtc::ScreenCapturer::ScreenList::iterator it = screens.begin();
+ it != screens.end(); ++it) {
+ EXPECT_TRUE(capturer_->SelectScreen(it->id));
+ }
+}
+
TEST_F(ScreenCapturerTest, StartCapturer) {
capturer_->SetMouseShapeObserver(&mouse_observer_);
capturer_->Start(&callback_);
@@ -97,7 +106,7 @@ TEST_F(ScreenCapturerTest, Capture) {
delete frame;
}
-#if defined(OS_WIN)
+#if defined(WEBRTC_WIN)
TEST_F(ScreenCapturerTest, UseSharedBuffers) {
DesktopFrame* frame = NULL;
@@ -120,6 +129,20 @@ TEST_F(ScreenCapturerTest, UseSharedBuffers) {
delete frame;
}
-#endif // defined(OS_WIN)
+TEST_F(ScreenCapturerTest, UseMagnifier) {
+ DesktopCaptureOptions options(DesktopCaptureOptions::CreateDefault());
+ options.set_allow_use_magnification_api(true);
+ capturer_.reset(ScreenCapturer::Create(options));
+
+ DesktopFrame* frame = NULL;
+ EXPECT_CALL(callback_, OnCaptureCompleted(_)).WillOnce(SaveArg<0>(&frame));
+
+ capturer_->Start(&callback_);
+ capturer_->Capture(DesktopRegion());
+ ASSERT_TRUE(frame);
+ delete frame;
+}
+
+#endif // defined(WEBRTC_WIN)
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
index a9bcd48f9c1..5950795d470 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
@@ -10,364 +10,20 @@
#include "webrtc/modules/desktop_capture/screen_capturer.h"
-#include <windows.h>
-
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-#include "webrtc/modules/desktop_capture/desktop_frame.h"
-#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
-#include "webrtc/modules/desktop_capture/desktop_region.h"
-#include "webrtc/modules/desktop_capture/differ.h"
-#include "webrtc/modules/desktop_capture/mouse_cursor.h"
-#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
-#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
-#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
-#include "webrtc/modules/desktop_capture/win/cursor.h"
-#include "webrtc/modules/desktop_capture/win/desktop.h"
-#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/tick_util.h"
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h"
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h"
namespace webrtc {
-namespace {
-
-// Constants from dwmapi.h.
-const UINT DWM_EC_DISABLECOMPOSITION = 0;
-const UINT DWM_EC_ENABLECOMPOSITION = 1;
-
-typedef HRESULT (WINAPI * DwmEnableCompositionFunc)(UINT);
-
-const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
-
-// ScreenCapturerWin captures 32bit RGB using GDI.
-//
-// ScreenCapturerWin is double-buffered as required by ScreenCapturer.
-class ScreenCapturerWin : public ScreenCapturer {
- public:
- ScreenCapturerWin(const DesktopCaptureOptions& options);
- virtual ~ScreenCapturerWin();
-
- // Overridden from ScreenCapturer:
- virtual void Start(Callback* callback) OVERRIDE;
- virtual void Capture(const DesktopRegion& region) OVERRIDE;
- virtual void SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) OVERRIDE;
-
- private:
- // Make sure that the device contexts match the screen configuration.
- void PrepareCaptureResources();
-
- // Captures the current screen contents into the current buffer.
- void CaptureImage();
-
- // Capture the current cursor shape.
- void CaptureCursor();
-
- Callback* callback_;
- MouseShapeObserver* mouse_shape_observer_;
-
- // A thread-safe list of invalid rectangles, and the size of the most
- // recently captured screen.
- ScreenCapturerHelper helper_;
-
- // Snapshot of the last cursor bitmap we sent to the client. This is used
- // to diff against the current cursor so we only send a cursor-change
- // message when the shape has changed.
- MouseCursorShape last_cursor_;
-
- ScopedThreadDesktop desktop_;
-
- // GDI resources used for screen capture.
- HDC desktop_dc_;
- HDC memory_dc_;
-
- // Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
-
- // Rectangle describing the bounds of the desktop device context.
- DesktopRect desktop_dc_rect_;
-
- // Class to calculate the difference between two screen bitmaps.
- scoped_ptr<Differ> differ_;
-
- HMODULE dwmapi_library_;
- DwmEnableCompositionFunc composition_func_;
-
- // Used to suppress duplicate logging of SetThreadExecutionState errors.
- bool set_thread_execution_state_failed_;
-
- DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWin);
-};
-
-ScreenCapturerWin::ScreenCapturerWin(const DesktopCaptureOptions& options)
- : callback_(NULL),
- mouse_shape_observer_(NULL),
- desktop_dc_(NULL),
- memory_dc_(NULL),
- dwmapi_library_(NULL),
- composition_func_(NULL),
- set_thread_execution_state_failed_(false) {
- if (options.disable_effects()) {
- // Load dwmapi.dll dynamically since it is not available on XP.
- if (!dwmapi_library_)
- dwmapi_library_ = LoadLibrary(kDwmapiLibraryName);
-
- if (dwmapi_library_) {
- composition_func_ = reinterpret_cast<DwmEnableCompositionFunc>(
- GetProcAddress(dwmapi_library_, "DwmEnableComposition"));
- }
- }
-}
-
-ScreenCapturerWin::~ScreenCapturerWin() {
- if (desktop_dc_)
- ReleaseDC(NULL, desktop_dc_);
- if (memory_dc_)
- DeleteDC(memory_dc_);
-
- // Restore Aero.
- if (composition_func_)
- (*composition_func_)(DWM_EC_ENABLECOMPOSITION);
-
- if (dwmapi_library_)
- FreeLibrary(dwmapi_library_);
-}
-
-void ScreenCapturerWin::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
-
- queue_.MoveToNextFrame();
-
- // Request that the system not power-down the system, or the display hardware.
- if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
- if (!set_thread_execution_state_failed_) {
- set_thread_execution_state_failed_ = true;
- LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
- << GetLastError();
- }
- }
-
- // Make sure the GDI capture resources are up-to-date.
- PrepareCaptureResources();
-
- // Copy screen bits to the current buffer.
- CaptureImage();
-
- const DesktopFrame* current_frame = queue_.current_frame();
- const DesktopFrame* last_frame = queue_.previous_frame();
- if (last_frame) {
- // Make sure the differencer is set up correctly for these previous and
- // current screens.
- if (!differ_.get() ||
- (differ_->width() != current_frame->size().width()) ||
- (differ_->height() != current_frame->size().height()) ||
- (differ_->bytes_per_row() != current_frame->stride())) {
- differ_.reset(new Differ(current_frame->size().width(),
- current_frame->size().height(),
- DesktopFrame::kBytesPerPixel,
- current_frame->stride()));
- }
-
- // Calculate difference between the two last captured frames.
- DesktopRegion region;
- differ_->CalcDirtyRegion(last_frame->data(), current_frame->data(),
- &region);
- helper_.InvalidateRegion(region);
- } else {
- // No previous frame is available. Invalidate the whole screen.
- helper_.InvalidateScreen(current_frame->size());
- }
-
- helper_.set_size_most_recent(current_frame->size());
-
- // Emit the current frame.
- DesktopFrame* frame = queue_.current_frame()->Share();
- frame->set_dpi(DesktopVector(
- GetDeviceCaps(desktop_dc_, LOGPIXELSX),
- GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
- frame->mutable_updated_region()->Clear();
- helper_.TakeInvalidRegion(frame->mutable_updated_region());
- frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
- callback_->OnCaptureCompleted(frame);
-
- // Check for cursor shape update.
- CaptureCursor();
-}
-
-void ScreenCapturerWin::SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) {
- assert(!mouse_shape_observer_);
- assert(mouse_shape_observer);
-
- mouse_shape_observer_ = mouse_shape_observer;
-}
-
-void ScreenCapturerWin::Start(Callback* callback) {
- assert(!callback_);
- assert(callback);
-
- callback_ = callback;
-
- // Vote to disable Aero composited desktop effects while capturing. Windows
- // will restore Aero automatically if the process exits. This has no effect
- // under Windows 8 or higher. See crbug.com/124018.
- if (composition_func_)
- (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
-}
-
-void ScreenCapturerWin::PrepareCaptureResources() {
- // Switch to the desktop receiving user input if different from the current
- // one.
- scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
- if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
- // Release GDI resources otherwise SetThreadDesktop will fail.
- if (desktop_dc_) {
- ReleaseDC(NULL, desktop_dc_);
- desktop_dc_ = NULL;
- }
-
- if (memory_dc_) {
- DeleteDC(memory_dc_);
- memory_dc_ = NULL;
- }
-
- // If SetThreadDesktop() fails, the thread is still assigned a desktop.
- // So we can continue capture screen bits, just from the wrong desktop.
- desktop_.SetThreadDesktop(input_desktop.release());
-
- // Re-assert our vote to disable Aero.
- // See crbug.com/124018 and crbug.com/129906.
- if (composition_func_ != NULL) {
- (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
- }
- }
-
- // If the display bounds have changed then recreate GDI resources.
- // TODO(wez): Also check for pixel format changes.
- DesktopRect screen_rect(DesktopRect::MakeXYWH(
- GetSystemMetrics(SM_XVIRTUALSCREEN),
- GetSystemMetrics(SM_YVIRTUALSCREEN),
- GetSystemMetrics(SM_CXVIRTUALSCREEN),
- GetSystemMetrics(SM_CYVIRTUALSCREEN)));
- if (!screen_rect.equals(desktop_dc_rect_)) {
- if (desktop_dc_) {
- ReleaseDC(NULL, desktop_dc_);
- desktop_dc_ = NULL;
- }
- if (memory_dc_) {
- DeleteDC(memory_dc_);
- memory_dc_ = NULL;
- }
- desktop_dc_rect_ = DesktopRect();
- }
-
- if (desktop_dc_ == NULL) {
- assert(memory_dc_ == NULL);
-
- // Create GDI device contexts to capture from the desktop into memory.
- desktop_dc_ = GetDC(NULL);
- if (!desktop_dc_)
- abort();
- memory_dc_ = CreateCompatibleDC(desktop_dc_);
- if (!memory_dc_)
- abort();
- desktop_dc_rect_ = screen_rect;
-
- // Make sure the frame buffers will be reallocated.
- queue_.Reset();
-
- helper_.ClearInvalidRegion();
- }
-}
-
-void ScreenCapturerWin::CaptureImage() {
- // If the current buffer is from an older generation then allocate a new one.
- // Note that we can't reallocate other buffers at this point, since the caller
- // may still be reading from them.
- if (!queue_.current_frame()) {
- assert(desktop_dc_ != NULL);
- assert(memory_dc_ != NULL);
-
- DesktopSize size = DesktopSize(
- desktop_dc_rect_.width(), desktop_dc_rect_.height());
-
- size_t buffer_size = size.width() * size.height() *
- DesktopFrame::kBytesPerPixel;
- SharedMemory* shared_memory =
- callback_->CreateSharedMemory(buffer_size);
- scoped_ptr<DesktopFrameWin> buffer(
- DesktopFrameWin::Create(size, shared_memory, desktop_dc_));
- queue_.ReplaceCurrentFrame(buffer.release());
- }
-
- // Select the target bitmap into the memory dc and copy the rect from desktop
- // to memory.
- DesktopFrameWin* current = static_cast<DesktopFrameWin*>(
- queue_.current_frame()->GetUnderlyingFrame());
- HGDIOBJ previous_object = SelectObject(memory_dc_, current->bitmap());
- if (previous_object != NULL) {
- BitBlt(memory_dc_,
- 0, 0, desktop_dc_rect_.width(), desktop_dc_rect_.height(),
- desktop_dc_,
- desktop_dc_rect_.left(), desktop_dc_rect_.top(),
- SRCCOPY | CAPTUREBLT);
-
- // Select back the previously selected object to that the device contect
- // could be destroyed independently of the bitmap if needed.
- SelectObject(memory_dc_, previous_object);
- }
-}
-
-void ScreenCapturerWin::CaptureCursor() {
- CURSORINFO cursor_info;
- cursor_info.cbSize = sizeof(CURSORINFO);
- if (!GetCursorInfo(&cursor_info)) {
- LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " << GetLastError();
- return;
- }
-
- // Note that |cursor_info.hCursor| does not need to be freed.
- scoped_ptr<MouseCursor> cursor_image(
- CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
- if (!cursor_image.get())
- return;
-
- scoped_ptr<MouseCursorShape> cursor(new MouseCursorShape);
- cursor->hotspot = cursor_image->hotspot();
- cursor->size = cursor_image->image().size();
- uint8_t* current_row = cursor_image->image().data();
- for (int y = 0; y < cursor_image->image().size().height(); ++y) {
- cursor->data.append(current_row,
- current_row + cursor_image->image().size().width() *
- DesktopFrame::kBytesPerPixel);
- current_row += cursor_image->image().stride();
- }
-
- // Compare the current cursor with the last one we sent to the client. If
- // they're the same, then don't bother sending the cursor again.
- if (last_cursor_.size.equals(cursor->size) &&
- last_cursor_.hotspot.equals(cursor->hotspot) &&
- last_cursor_.data == cursor->data) {
- return;
- }
-
- LOG(LS_VERBOSE) << "Sending updated cursor: " << cursor->size.width() << "x"
- << cursor->size.height();
-
- // Record the last cursor image that we sent to the client.
- last_cursor_ = *cursor;
-
- if (mouse_shape_observer_)
- mouse_shape_observer_->OnCursorShapeChanged(cursor.release());
-}
-
-} // namespace
-
// static
ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) {
- return new ScreenCapturerWin(options);
+ scoped_ptr<ScreenCapturer> gdi_capturer(new ScreenCapturerWinGdi(options));
+
+ if (options.allow_use_magnification_api())
+ return new ScreenCapturerWinMagnifier(gdi_capturer.Pass());
+
+ return gdi_capturer.release();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
index c5a4c8cb17a..4d07d98c628 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
@@ -57,6 +57,8 @@ class ScreenCapturerLinux : public ScreenCapturer,
// ScreenCapturer interface.
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
private:
Display* display() { return options_.x_display()->display(); }
@@ -310,6 +312,20 @@ void ScreenCapturerLinux::SetMouseShapeObserver(
mouse_shape_observer_ = mouse_shape_observer;
}
+bool ScreenCapturerLinux::GetScreenList(ScreenList* screens) {
+ DCHECK(screens->size() == 0);
+ // TODO(jiayl): implement screen enumeration.
+ Screen default_screen;
+ default_screen.id = 0;
+ screens->push_back(default_screen);
+ return true;
+}
+
+bool ScreenCapturerLinux::SelectScreen(ScreenId id) {
+ // TODO(jiayl): implement screen selection.
+ return true;
+}
+
bool ScreenCapturerLinux::HandleXEvent(const XEvent& event) {
if (use_damage_ && (event.type == damage_event_base_ + XDamageNotify)) {
const XDamageNotifyEvent* damage_event =
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
index bb43b28b1c7..7870d833f15 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
@@ -17,8 +17,8 @@
#include <windows.h>
#endif
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
index 11bb2dbb6d0..00055c44ad8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
@@ -137,7 +137,7 @@ MouseCursor* CreateMouseCursorFromHCursor(HDC dc, HCURSOR cursor) {
int width = bitmap_info.bmWidth;
int height = bitmap_info.bmHeight;
- scoped_array<uint32_t> mask_data(new uint32_t[width * height]);
+ scoped_ptr<uint32_t[]> mask_data(new uint32_t[width * height]);
// Get pixel data from |scoped_mask| converting it to 32bpp along the way.
// GetDIBits() sets the alpha component of every pixel to 0.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
index f590bd255ce..b046ace315a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
@@ -58,15 +58,15 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) {
int width = bitmap_info.bmWidth;
int height = bitmap_info.bmHeight;
- EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->image().size()));
+ EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->image()->size()));
// Get the pixels from |scoped_color|.
int size = width * height;
- scoped_array<uint32_t> data(new uint32_t[size]);
+ scoped_ptr<uint32_t[]> data(new uint32_t[size]);
EXPECT_TRUE(GetBitmapBits(scoped_color, size * sizeof(uint32_t), data.get()));
// Compare the 32bpp image in |mouse_shape| with the one loaded from |right|.
- return memcmp(data.get(), mouse_shape->image().data(),
+ return memcmp(data.get(), mouse_shape->image()->data(),
size * sizeof(uint32_t)) == 0;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h b/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
index bdc490c7286..fda56ca8d81 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
@@ -11,10 +11,10 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_H_
-#include <string>
#include <windows.h>
+#include <string>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
index 0ca35c526ab..366df6d4ff0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
@@ -13,7 +13,7 @@
#include <windows.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
index 39514237ea7..f12731d975e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
@@ -13,7 +13,7 @@
#include <windows.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc
new file mode 100644
index 00000000000..1b335452779
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+
+#include <assert.h>
+#include <windows.h>
+
+namespace webrtc {
+
+bool GetScreenList(ScreenCapturer::ScreenList* screens) {
+ assert(screens->size() == 0);
+
+ BOOL enum_result = TRUE;
+ for (int device_index = 0;; ++device_index) {
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ enum_result = EnumDisplayDevices(NULL, device_index, &device, 0);
+
+ // |enum_result| is 0 if we have enumerated all devices.
+ if (!enum_result)
+ break;
+
+ // We only care about active displays.
+ if (!(device.StateFlags & DISPLAY_DEVICE_ACTIVE))
+ continue;
+
+ ScreenCapturer::Screen screen;
+ screen.id = device_index;
+ screens->push_back(screen);
+ }
+ return true;
+}
+
+bool IsScreenValid(ScreenId screen, std::wstring* device_key) {
+ if (screen == kFullDesktopScreenId) {
+ *device_key = L"";
+ return true;
+ }
+
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL enum_result = EnumDisplayDevices(NULL, screen, &device, 0);
+ if (enum_result)
+ *device_key = device.DeviceKey;
+
+ return !!enum_result;
+}
+
+DesktopRect GetScreenRect(ScreenId screen, const std::wstring& device_key) {
+ if (screen == kFullDesktopScreenId) {
+ return DesktopRect::MakeXYWH(GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN));
+ }
+
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL result = EnumDisplayDevices(NULL, screen, &device, 0);
+ if (!result)
+ return DesktopRect();
+
+ // Verifies the device index still maps to the same display device, to make
+ // sure we are capturing the same device when devices are added or removed.
+ // DeviceKey is documented as reserved, but it actually contains the registry
+ // key for the device and is unique for each monitor, while DeviceID is not.
+ if (device_key != device.DeviceKey)
+ return DesktopRect();
+
+ DEVMODE device_mode;
+ device_mode.dmSize = sizeof(device_mode);
+ device_mode.dmDriverExtra = 0;
+ result = EnumDisplaySettingsEx(
+ device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0);
+ if (!result)
+ return DesktopRect();
+
+ return DesktopRect::MakeXYWH(device_mode.dmPosition.x,
+ device_mode.dmPosition.y,
+ device_mode.dmPelsWidth,
+ device_mode.dmPelsHeight);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h
new file mode 100644
index 00000000000..42473e047b3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
+
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+
+namespace webrtc {
+
+// Output the list of active screens into |screens|. Returns true if succeeded,
+// or false if it fails to enumerate the display devices.
+bool GetScreenList(ScreenCapturer::ScreenList* screens);
+
+// Returns true if |screen| is a valid screen. The screen device key is
+// returned through |device_key| if the screen is valid. The device key can be
+// used in GetScreenRect to verify the screen matches the previously obtained
+// id.
+bool IsScreenValid(ScreenId screen, std::wstring* device_key);
+
+// Get the rect of the screen identified by |screen|, relative to the primary
+// display's top-left. If the screen device key does not match |device_key|, or
+// the screen does not exist, or any error happens, an empty rect is returned.
+DesktopRect GetScreenRect(ScreenId screen, const std::wstring& device_key);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
new file mode 100644
index 00000000000..9cb3681fd2c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
@@ -0,0 +1,324 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
+#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/desktop_region.h"
+#include "webrtc/modules/desktop_capture/differ.h"
+#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/desktop.h"
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+namespace {
+
+// Constants from dwmapi.h.
+const UINT DWM_EC_DISABLECOMPOSITION = 0;
+const UINT DWM_EC_ENABLECOMPOSITION = 1;
+
+const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
+
+} // namespace
+
+ScreenCapturerWinGdi::ScreenCapturerWinGdi(const DesktopCaptureOptions& options)
+ : callback_(NULL),
+ mouse_shape_observer_(NULL),
+ current_screen_id_(kFullDesktopScreenId),
+ desktop_dc_(NULL),
+ memory_dc_(NULL),
+ dwmapi_library_(NULL),
+ composition_func_(NULL),
+ set_thread_execution_state_failed_(false) {
+ if (options.disable_effects()) {
+ // Load dwmapi.dll dynamically since it is not available on XP.
+ if (!dwmapi_library_)
+ dwmapi_library_ = LoadLibrary(kDwmapiLibraryName);
+
+ if (dwmapi_library_) {
+ composition_func_ = reinterpret_cast<DwmEnableCompositionFunc>(
+ GetProcAddress(dwmapi_library_, "DwmEnableComposition"));
+ }
+ }
+}
+
+ScreenCapturerWinGdi::~ScreenCapturerWinGdi() {
+ if (desktop_dc_)
+ ReleaseDC(NULL, desktop_dc_);
+ if (memory_dc_)
+ DeleteDC(memory_dc_);
+
+ // Restore Aero.
+ if (composition_func_)
+ (*composition_func_)(DWM_EC_ENABLECOMPOSITION);
+
+ if (dwmapi_library_)
+ FreeLibrary(dwmapi_library_);
+}
+
+void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
+ TickTime capture_start_time = TickTime::Now();
+
+ queue_.MoveToNextFrame();
+
+ // Request that the system not power-down the system, or the display hardware.
+ if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
+ if (!set_thread_execution_state_failed_) {
+ set_thread_execution_state_failed_ = true;
+ LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
+ << GetLastError();
+ }
+ }
+
+ // Make sure the GDI capture resources are up-to-date.
+ PrepareCaptureResources();
+
+ if (!CaptureImage()) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ const DesktopFrame* current_frame = queue_.current_frame();
+ const DesktopFrame* last_frame = queue_.previous_frame();
+ if (last_frame && last_frame->size().equals(current_frame->size())) {
+ // Make sure the differencer is set up correctly for these previous and
+ // current screens.
+ if (!differ_.get() ||
+ (differ_->width() != current_frame->size().width()) ||
+ (differ_->height() != current_frame->size().height()) ||
+ (differ_->bytes_per_row() != current_frame->stride())) {
+ differ_.reset(new Differ(current_frame->size().width(),
+ current_frame->size().height(),
+ DesktopFrame::kBytesPerPixel,
+ current_frame->stride()));
+ }
+
+ // Calculate difference between the two last captured frames.
+ DesktopRegion region;
+ differ_->CalcDirtyRegion(last_frame->data(), current_frame->data(),
+ &region);
+ helper_.InvalidateRegion(region);
+ } else {
+ // No previous frame is available, or the screen is resized. Invalidate the
+ // whole screen.
+ helper_.InvalidateScreen(current_frame->size());
+ }
+
+ helper_.set_size_most_recent(current_frame->size());
+
+ // Emit the current frame.
+ DesktopFrame* frame = queue_.current_frame()->Share();
+ frame->set_dpi(DesktopVector(
+ GetDeviceCaps(desktop_dc_, LOGPIXELSX),
+ GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
+ frame->mutable_updated_region()->Clear();
+ helper_.TakeInvalidRegion(frame->mutable_updated_region());
+ frame->set_capture_time_ms(
+ (TickTime::Now() - capture_start_time).Milliseconds());
+ callback_->OnCaptureCompleted(frame);
+
+ // Check for cursor shape update.
+ CaptureCursor();
+}
+
+void ScreenCapturerWinGdi::SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) {
+ assert(!mouse_shape_observer_);
+ assert(mouse_shape_observer);
+
+ mouse_shape_observer_ = mouse_shape_observer;
+}
+
+bool ScreenCapturerWinGdi::GetScreenList(ScreenList* screens) {
+ return webrtc::GetScreenList(screens);
+}
+
+bool ScreenCapturerWinGdi::SelectScreen(ScreenId id) {
+ bool valid = IsScreenValid(id, &current_device_key_);
+ if (valid)
+ current_screen_id_ = id;
+ return valid;
+}
+
+void ScreenCapturerWinGdi::Start(Callback* callback) {
+ assert(!callback_);
+ assert(callback);
+
+ callback_ = callback;
+
+ // Vote to disable Aero composited desktop effects while capturing. Windows
+ // will restore Aero automatically if the process exits. This has no effect
+ // under Windows 8 or higher. See crbug.com/124018.
+ if (composition_func_)
+ (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
+}
+
+void ScreenCapturerWinGdi::PrepareCaptureResources() {
+ // Switch to the desktop receiving user input if different from the current
+ // one.
+ scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
+ if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
+ // Release GDI resources otherwise SetThreadDesktop will fail.
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+
+ if (memory_dc_) {
+ DeleteDC(memory_dc_);
+ memory_dc_ = NULL;
+ }
+
+ // If SetThreadDesktop() fails, the thread is still assigned a desktop.
+ // So we can continue capture screen bits, just from the wrong desktop.
+ desktop_.SetThreadDesktop(input_desktop.release());
+
+ // Re-assert our vote to disable Aero.
+ // See crbug.com/124018 and crbug.com/129906.
+ if (composition_func_ != NULL) {
+ (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
+ }
+ }
+
+ // If the display bounds have changed then recreate GDI resources.
+ // TODO(wez): Also check for pixel format changes.
+ DesktopRect screen_rect(DesktopRect::MakeXYWH(
+ GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN)));
+ if (!screen_rect.equals(desktop_dc_rect_)) {
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+ if (memory_dc_) {
+ DeleteDC(memory_dc_);
+ memory_dc_ = NULL;
+ }
+ desktop_dc_rect_ = DesktopRect();
+ }
+
+ if (desktop_dc_ == NULL) {
+ assert(memory_dc_ == NULL);
+
+ // Create GDI device contexts to capture from the desktop into memory.
+ desktop_dc_ = GetDC(NULL);
+ if (!desktop_dc_)
+ abort();
+ memory_dc_ = CreateCompatibleDC(desktop_dc_);
+ if (!memory_dc_)
+ abort();
+
+ desktop_dc_rect_ = screen_rect;
+
+ // Make sure the frame buffers will be reallocated.
+ queue_.Reset();
+
+ helper_.ClearInvalidRegion();
+ }
+}
+
+bool ScreenCapturerWinGdi::CaptureImage() {
+ DesktopRect screen_rect =
+ GetScreenRect(current_screen_id_, current_device_key_);
+ if (screen_rect.is_empty())
+ return false;
+
+ DesktopSize size = screen_rect.size();
+ // If the current buffer is from an older generation then allocate a new one.
+ // Note that we can't reallocate other buffers at this point, since the caller
+ // may still be reading from them.
+ if (!queue_.current_frame() ||
+ !queue_.current_frame()->size().equals(screen_rect.size())) {
+ assert(desktop_dc_ != NULL);
+ assert(memory_dc_ != NULL);
+
+ size_t buffer_size = size.width() * size.height() *
+ DesktopFrame::kBytesPerPixel;
+ SharedMemory* shared_memory = callback_->CreateSharedMemory(buffer_size);
+
+ scoped_ptr<DesktopFrame> buffer;
+ buffer.reset(
+ DesktopFrameWin::Create(size, shared_memory, desktop_dc_));
+ queue_.ReplaceCurrentFrame(buffer.release());
+ }
+
+ // Select the target bitmap into the memory dc and copy the rect from desktop
+ // to memory.
+ DesktopFrameWin* current = static_cast<DesktopFrameWin*>(
+ queue_.current_frame()->GetUnderlyingFrame());
+ HGDIOBJ previous_object = SelectObject(memory_dc_, current->bitmap());
+ if (previous_object != NULL) {
+ BitBlt(memory_dc_,
+ 0, 0, screen_rect.width(), screen_rect.height(),
+ desktop_dc_,
+ screen_rect.left(), screen_rect.top(),
+ SRCCOPY | CAPTUREBLT);
+
+ // Select back the previously selected object to that the device contect
+ // could be destroyed independently of the bitmap if needed.
+ SelectObject(memory_dc_, previous_object);
+ }
+ return true;
+}
+
+void ScreenCapturerWinGdi::CaptureCursor() {
+ CURSORINFO cursor_info;
+ cursor_info.cbSize = sizeof(CURSORINFO);
+ if (!GetCursorInfo(&cursor_info)) {
+ LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " << GetLastError();
+ return;
+ }
+
+ // Note that |cursor_info.hCursor| does not need to be freed.
+ scoped_ptr<MouseCursor> cursor_image(
+ CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
+ if (!cursor_image.get())
+ return;
+
+ scoped_ptr<MouseCursorShape> cursor(new MouseCursorShape);
+ cursor->hotspot = cursor_image->hotspot();
+ cursor->size = cursor_image->image()->size();
+ uint8_t* current_row = cursor_image->image()->data();
+ for (int y = 0; y < cursor_image->image()->size().height(); ++y) {
+ cursor->data.append(current_row,
+ current_row + cursor_image->image()->size().width() *
+ DesktopFrame::kBytesPerPixel);
+ current_row += cursor_image->image()->stride();
+ }
+
+ // Compare the current cursor with the last one we sent to the client. If
+ // they're the same, then don't bother sending the cursor again.
+ if (last_cursor_.size.equals(cursor->size) &&
+ last_cursor_.hotspot.equals(cursor->hotspot) &&
+ last_cursor_.data == cursor->data) {
+ return;
+ }
+
+ LOG(LS_VERBOSE) << "Sending updated cursor: " << cursor->size.width() << "x"
+ << cursor->size.height();
+
+ // Record the last cursor image that we sent to the client.
+ last_cursor_ = *cursor;
+
+ if (mouse_shape_observer_)
+ mouse_shape_observer_->OnCursorShapeChanged(cursor.release());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
new file mode 100644
index 00000000000..2db87d097ca
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
+
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+
+#include <windows.h>
+
+#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
+#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Differ;
+class MouseShapeObserver;
+
+// ScreenCapturerWinGdi captures 32bit RGB using GDI.
+//
+// ScreenCapturerWinGdi is double-buffered as required by ScreenCapturer.
+class ScreenCapturerWinGdi : public ScreenCapturer {
+ public:
+ explicit ScreenCapturerWinGdi(const DesktopCaptureOptions& options);
+ virtual ~ScreenCapturerWinGdi();
+
+ // Overridden from ScreenCapturer:
+ virtual void Start(Callback* callback) OVERRIDE;
+ virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
+
+ private:
+ typedef HRESULT (WINAPI * DwmEnableCompositionFunc)(UINT);
+
+ // Make sure that the device contexts match the screen configuration.
+ void PrepareCaptureResources();
+
+ // Captures the current screen contents into the current buffer. Returns true
+ // if succeeded.
+ bool CaptureImage();
+
+ // Capture the current cursor shape.
+ void CaptureCursor();
+
+ Callback* callback_;
+ MouseShapeObserver* mouse_shape_observer_;
+ ScreenId current_screen_id_;
+ std::wstring current_device_key_;
+
+ // A thread-safe list of invalid rectangles, and the size of the most
+ // recently captured screen.
+ ScreenCapturerHelper helper_;
+
+ // Snapshot of the last cursor bitmap we sent to the client. This is used
+ // to diff against the current cursor so we only send a cursor-change
+ // message when the shape has changed.
+ MouseCursorShape last_cursor_;
+
+ ScopedThreadDesktop desktop_;
+
+ // GDI resources used for screen capture.
+ HDC desktop_dc_;
+ HDC memory_dc_;
+
+ // Queue of the frames buffers.
+ ScreenCaptureFrameQueue queue_;
+
+ // Rectangle describing the bounds of the desktop device context, relative to
+ // the primary display's top-left.
+ DesktopRect desktop_dc_rect_;
+
+ // Class to calculate the difference between two screen bitmaps.
+ scoped_ptr<Differ> differ_;
+
+ HMODULE dwmapi_library_;
+ DwmEnableCompositionFunc composition_func_;
+
+ // Used to suppress duplicate logging of SetThreadExecutionState errors.
+ bool set_thread_execution_state_failed_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWinGdi);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
new file mode 100644
index 00000000000..042cb937acc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
@@ -0,0 +1,461 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
+#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/desktop_region.h"
+#include "webrtc/modules/desktop_capture/differ.h"
+#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/desktop.h"
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+// kMagnifierWindowClass has to be "Magnifier" according to the Magnification
+// API. The other strings can be anything.
+static LPCTSTR kMagnifierHostClass = L"ScreenCapturerWinMagnifierHost";
+static LPCTSTR kHostWindowName = L"MagnifierHost";
+static LPCTSTR kMagnifierWindowClass = L"Magnifier";
+static LPCTSTR kMagnifierWindowName = L"MagnifierWindow";
+
+Atomic32 ScreenCapturerWinMagnifier::tls_index_(TLS_OUT_OF_INDEXES);
+
+ScreenCapturerWinMagnifier::ScreenCapturerWinMagnifier(
+ scoped_ptr<ScreenCapturer> fallback_capturer)
+ : fallback_capturer_(fallback_capturer.Pass()),
+ fallback_capturer_started_(false),
+ callback_(NULL),
+ current_screen_id_(kFullDesktopScreenId),
+ excluded_window_(NULL),
+ set_thread_execution_state_failed_(false),
+ desktop_dc_(NULL),
+ mag_lib_handle_(NULL),
+ mag_initialize_func_(NULL),
+ mag_uninitialize_func_(NULL),
+ set_window_source_func_(NULL),
+ set_window_filter_list_func_(NULL),
+ set_image_scaling_callback_func_(NULL),
+ host_window_(NULL),
+ magnifier_window_(NULL),
+ magnifier_initialized_(false),
+ magnifier_capture_succeeded_(true) {
+}
+
+ScreenCapturerWinMagnifier::~ScreenCapturerWinMagnifier() {
+ // DestroyWindow must be called before MagUninitialize. magnifier_window_ is
+ // destroyed automatically when host_window_ is destroyed.
+ if (host_window_)
+ DestroyWindow(host_window_);
+
+ if (magnifier_initialized_)
+ mag_uninitialize_func_();
+
+ if (mag_lib_handle_)
+ FreeLibrary(mag_lib_handle_);
+
+ if (desktop_dc_)
+ ReleaseDC(NULL, desktop_dc_);
+}
+
+void ScreenCapturerWinMagnifier::Start(Callback* callback) {
+ assert(!callback_);
+ assert(callback);
+ callback_ = callback;
+
+ InitializeMagnifier();
+}
+
+void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
+ TickTime capture_start_time = TickTime::Now();
+
+ queue_.MoveToNextFrame();
+
+ // Request that the system not power-down the system, or the display hardware.
+ if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
+ if (!set_thread_execution_state_failed_) {
+ set_thread_execution_state_failed_ = true;
+ LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
+ << GetLastError();
+ }
+ }
+ // Switch to the desktop receiving user input if different from the current
+ // one.
+ scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
+ if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
+ // Release GDI resources otherwise SetThreadDesktop will fail.
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+ // If SetThreadDesktop() fails, the thread is still assigned a desktop.
+ // So we can continue capture screen bits, just from the wrong desktop.
+ desktop_.SetThreadDesktop(input_desktop.release());
+ }
+
+ bool succeeded = false;
+
+ // Do not try to use the magnfiier if it's capturing non-primary screen, or it
+ // failed before.
+ if (magnifier_initialized_ && IsCapturingPrimaryScreenOnly() &&
+ magnifier_capture_succeeded_) {
+ DesktopRect rect = GetScreenRect(current_screen_id_, current_device_key_);
+ CreateCurrentFrameIfNecessary(rect.size());
+
+ // CaptureImage may fail in some situations, e.g. windows8 metro mode.
+ succeeded = CaptureImage(rect);
+ }
+
+ // Defer to the fallback capturer if magnifier capturer did not work.
+ if (!succeeded) {
+ LOG_F(LS_WARNING) << "Switching to the fallback screen capturer.";
+ StartFallbackCapturer();
+ fallback_capturer_->Capture(region);
+ return;
+ }
+
+ const DesktopFrame* current_frame = queue_.current_frame();
+ const DesktopFrame* last_frame = queue_.previous_frame();
+ if (last_frame && last_frame->size().equals(current_frame->size())) {
+ // Make sure the differencer is set up correctly for these previous and
+ // current screens.
+ if (!differ_.get() || (differ_->width() != current_frame->size().width()) ||
+ (differ_->height() != current_frame->size().height()) ||
+ (differ_->bytes_per_row() != current_frame->stride())) {
+ differ_.reset(new Differ(current_frame->size().width(),
+ current_frame->size().height(),
+ DesktopFrame::kBytesPerPixel,
+ current_frame->stride()));
+ }
+
+ // Calculate difference between the two last captured frames.
+ DesktopRegion region;
+ differ_->CalcDirtyRegion(
+ last_frame->data(), current_frame->data(), &region);
+ helper_.InvalidateRegion(region);
+ } else {
+ // No previous frame is available, or the screen is resized. Invalidate the
+ // whole screen.
+ helper_.InvalidateScreen(current_frame->size());
+ }
+
+ helper_.set_size_most_recent(current_frame->size());
+
+ // Emit the current frame.
+ DesktopFrame* frame = queue_.current_frame()->Share();
+ frame->set_dpi(DesktopVector(GetDeviceCaps(desktop_dc_, LOGPIXELSX),
+ GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
+ frame->mutable_updated_region()->Clear();
+ helper_.TakeInvalidRegion(frame->mutable_updated_region());
+ frame->set_capture_time_ms(
+ (TickTime::Now() - capture_start_time).Milliseconds());
+ callback_->OnCaptureCompleted(frame);
+}
+
+void ScreenCapturerWinMagnifier::SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) {
+ assert(false); // NOTREACHED();
+}
+
+bool ScreenCapturerWinMagnifier::GetScreenList(ScreenList* screens) {
+ return webrtc::GetScreenList(screens);
+}
+
+bool ScreenCapturerWinMagnifier::SelectScreen(ScreenId id) {
+ bool valid = IsScreenValid(id, &current_device_key_);
+
+ // Set current_screen_id_ even if the fallback capturer is being used, so we
+ // can switch back to the magnifier when possible.
+ if (valid)
+ current_screen_id_ = id;
+
+ if (fallback_capturer_started_)
+ fallback_capturer_->SelectScreen(id);
+
+ return valid;
+}
+
+void ScreenCapturerWinMagnifier::SetExcludedWindow(WindowId excluded_window) {
+ excluded_window_ = (HWND)excluded_window;
+ if (excluded_window_ && magnifier_initialized_) {
+ set_window_filter_list_func_(
+ magnifier_window_, MW_FILTERMODE_EXCLUDE, 1, &excluded_window_);
+ }
+}
+
+bool ScreenCapturerWinMagnifier::CaptureImage(const DesktopRect& rect) {
+ assert(magnifier_initialized_);
+
+ // Set the magnifier control to cover the captured rect. The content of the
+ // magnifier control will be the captured image.
+ BOOL result = SetWindowPos(magnifier_window_,
+ NULL,
+ rect.left(), rect.top(),
+ rect.width(), rect.height(),
+ 0);
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to call SetWindowPos: " << GetLastError()
+ << ". Rect = {" << rect.left() << ", " << rect.top()
+ << ", " << rect.right() << ", " << rect.bottom() << "}";
+ return false;
+ }
+
+ magnifier_capture_succeeded_ = false;
+
+ RECT native_rect = {rect.left(), rect.top(), rect.right(), rect.bottom()};
+
+ // OnCaptured will be called via OnMagImageScalingCallback and fill in the
+ // frame before set_window_source_func_ returns.
+ result = set_window_source_func_(magnifier_window_, native_rect);
+
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to call MagSetWindowSource: " << GetLastError()
+ << ". Rect = {" << rect.left() << ", " << rect.top()
+ << ", " << rect.right() << ", " << rect.bottom() << "}";
+ return false;
+ }
+
+ return magnifier_capture_succeeded_;
+}
+
+BOOL ScreenCapturerWinMagnifier::OnMagImageScalingCallback(
+ HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty) {
+ assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+
+ ScreenCapturerWinMagnifier* owner =
+ reinterpret_cast<ScreenCapturerWinMagnifier*>(
+ TlsGetValue(tls_index_.Value()));
+
+ owner->OnCaptured(srcdata, srcheader);
+
+ return TRUE;
+}
+
+bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
+ assert(!magnifier_initialized_);
+
+ desktop_dc_ = GetDC(NULL);
+
+ mag_lib_handle_ = LoadLibrary(L"Magnification.dll");
+ if (!mag_lib_handle_)
+ return false;
+
+ // Initialize Magnification API function pointers.
+ mag_initialize_func_ = reinterpret_cast<MagInitializeFunc>(
+ GetProcAddress(mag_lib_handle_, "MagInitialize"));
+ mag_uninitialize_func_ = reinterpret_cast<MagUninitializeFunc>(
+ GetProcAddress(mag_lib_handle_, "MagUninitialize"));
+ set_window_source_func_ = reinterpret_cast<MagSetWindowSourceFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetWindowSource"));
+ set_window_filter_list_func_ = reinterpret_cast<MagSetWindowFilterListFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetWindowFilterList"));
+ set_image_scaling_callback_func_ =
+ reinterpret_cast<MagSetImageScalingCallbackFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetImageScalingCallback"));
+
+ if (!mag_initialize_func_ || !mag_uninitialize_func_ ||
+ !set_window_source_func_ || !set_window_filter_list_func_ ||
+ !set_image_scaling_callback_func_) {
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "library functions missing.";
+ return false;
+ }
+
+ BOOL result = mag_initialize_func_();
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagInitialize " << GetLastError();
+ return false;
+ }
+
+ HMODULE hInstance = NULL;
+ result = GetModuleHandleExA(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
+ GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
+ reinterpret_cast<char*>(&DefWindowProc),
+ &hInstance);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from GetModulehandleExA " << GetLastError();
+ return false;
+ }
+
+ // Register the host window class. See the MSDN documentation of the
+ // Magnification API for more infomation.
+ WNDCLASSEX wcex = {};
+ wcex.cbSize = sizeof(WNDCLASSEX);
+ wcex.lpfnWndProc = &DefWindowProc;
+ wcex.hInstance = hInstance;
+ wcex.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wcex.lpszClassName = kMagnifierHostClass;
+
+ // Ignore the error which may happen when the class is already registered.
+ RegisterClassEx(&wcex);
+
+ // Create the host window.
+ host_window_ = CreateWindowEx(WS_EX_LAYERED,
+ kMagnifierHostClass,
+ kHostWindowName,
+ 0,
+ 0, 0, 0, 0,
+ NULL,
+ NULL,
+ hInstance,
+ NULL);
+ if (!host_window_) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from creating host window " << GetLastError();
+ return false;
+ }
+
+ // Create the magnifier control.
+ magnifier_window_ = CreateWindow(kMagnifierWindowClass,
+ kMagnifierWindowName,
+ WS_CHILD | WS_VISIBLE,
+ 0, 0, 0, 0,
+ host_window_,
+ NULL,
+ hInstance,
+ NULL);
+ if (!magnifier_window_) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from creating magnifier window "
+ << GetLastError();
+ return false;
+ }
+
+ // Hide the host window.
+ ShowWindow(host_window_, SW_HIDE);
+
+ // Set the scaling callback to receive captured image.
+ result = set_image_scaling_callback_func_(
+ magnifier_window_,
+ &ScreenCapturerWinMagnifier::OnMagImageScalingCallback);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagSetImageScalingCallback "
+ << GetLastError();
+ return false;
+ }
+
+ if (excluded_window_) {
+ result = set_window_filter_list_func_(
+ magnifier_window_, MW_FILTERMODE_EXCLUDE, 1, &excluded_window_);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagSetWindowFilterList "
+ << GetLastError();
+ return false;
+ }
+ }
+
+ if (tls_index_.Value() == TLS_OUT_OF_INDEXES) {
+ // More than one threads may get here at the same time, but only one will
+ // write to tls_index_ using CompareExchange.
+ DWORD new_tls_index = TlsAlloc();
+ if (!tls_index_.CompareExchange(new_tls_index, TLS_OUT_OF_INDEXES))
+ TlsFree(new_tls_index);
+ }
+
+ assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+ TlsSetValue(tls_index_.Value(), this);
+
+ magnifier_initialized_ = true;
+ return true;
+}
+
+void ScreenCapturerWinMagnifier::OnCaptured(void* data,
+ const MAGIMAGEHEADER& header) {
+ DesktopFrame* current_frame = queue_.current_frame();
+
+ // Verify the format.
+ // TODO(jiayl): support capturing sources with pixel formats other than RGBA.
+ int captured_bytes_per_pixel = header.cbSize / header.width / header.height;
+ if (header.format != GUID_WICPixelFormat32bppRGBA ||
+ header.width != static_cast<UINT>(current_frame->size().width()) ||
+ header.height != static_cast<UINT>(current_frame->size().height()) ||
+ header.stride != static_cast<UINT>(current_frame->stride()) ||
+ captured_bytes_per_pixel != DesktopFrame::kBytesPerPixel) {
+ LOG_F(LS_WARNING) << "Output format does not match the captured format: "
+ << "width = " << header.width << ", "
+ << "height = " << header.height << ", "
+ << "stride = " << header.stride << ", "
+ << "bpp = " << captured_bytes_per_pixel << ", "
+ << "pixel format RGBA ? "
+ << (header.format == GUID_WICPixelFormat32bppRGBA) << ".";
+ return;
+ }
+
+ // Copy the data into the frame.
+ current_frame->CopyPixelsFrom(
+ reinterpret_cast<uint8_t*>(data),
+ header.stride,
+ DesktopRect::MakeXYWH(0, 0, header.width, header.height));
+
+ magnifier_capture_succeeded_ = true;
+}
+
+void ScreenCapturerWinMagnifier::CreateCurrentFrameIfNecessary(
+ const DesktopSize& size) {
+ // If the current buffer is from an older generation then allocate a new one.
+ // Note that we can't reallocate other buffers at this point, since the caller
+ // may still be reading from them.
+ if (!queue_.current_frame() || !queue_.current_frame()->size().equals(size)) {
+ size_t buffer_size =
+ size.width() * size.height() * DesktopFrame::kBytesPerPixel;
+ SharedMemory* shared_memory = callback_->CreateSharedMemory(buffer_size);
+
+ scoped_ptr<DesktopFrame> buffer;
+ if (shared_memory) {
+ buffer.reset(new SharedMemoryDesktopFrame(
+ size, size.width() * DesktopFrame::kBytesPerPixel, shared_memory));
+ } else {
+ buffer.reset(new BasicDesktopFrame(size));
+ }
+ queue_.ReplaceCurrentFrame(buffer.release());
+ }
+}
+
+bool ScreenCapturerWinMagnifier::IsCapturingPrimaryScreenOnly() const {
+ if (current_screen_id_ != kFullDesktopScreenId)
+ return current_screen_id_ == 0; // the primary screen is always '0'.
+
+ return GetSystemMetrics(SM_CMONITORS) == 1;
+}
+
+void ScreenCapturerWinMagnifier::StartFallbackCapturer() {
+ assert(fallback_capturer_);
+ if (!fallback_capturer_started_) {
+ fallback_capturer_started_ = true;
+
+ fallback_capturer_->Start(callback_);
+ fallback_capturer_->SelectScreen(current_screen_id_);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
new file mode 100644
index 00000000000..b6d559083eb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
+
+#include <windows.h>
+#include <magnification.h>
+#include <wincodec.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class DesktopFrame;
+class DesktopRect;
+class Differ;
+class MouseShapeObserver;
+
+// Captures the screen using the Magnification API to support window exclusion.
+// Each capturer must run on a dedicated thread because it uses thread local
+// storage for redirecting the library callback. Also the thread must have a UI
+// message loop to handle the window messages for the magnifier window.
+class ScreenCapturerWinMagnifier : public ScreenCapturer {
+ public:
+ // |fallback_capturer| will be used to capture the screen if a non-primary
+ // screen is being captured, or the OS does not support Magnification API, or
+ // the magnifier capturer fails (e.g. in Windows8 Metro mode).
+ explicit ScreenCapturerWinMagnifier(
+ scoped_ptr<ScreenCapturer> fallback_capturer);
+ virtual ~ScreenCapturerWinMagnifier();
+
+ // Overridden from ScreenCapturer:
+ virtual void Start(Callback* callback) OVERRIDE;
+ virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
+
+ private:
+ typedef BOOL(WINAPI* MagImageScalingCallback)(HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty);
+ typedef BOOL(WINAPI* MagInitializeFunc)(void);
+ typedef BOOL(WINAPI* MagUninitializeFunc)(void);
+ typedef BOOL(WINAPI* MagSetWindowSourceFunc)(HWND hwnd, RECT rect);
+ typedef BOOL(WINAPI* MagSetWindowFilterListFunc)(HWND hwnd,
+ DWORD dwFilterMode,
+ int count,
+ HWND* pHWND);
+ typedef BOOL(WINAPI* MagSetImageScalingCallbackFunc)(
+ HWND hwnd,
+ MagImageScalingCallback callback);
+
+ static BOOL WINAPI OnMagImageScalingCallback(HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty);
+
+ // Captures the screen within |rect| in the desktop coordinates. Returns true
+ // if succeeded.
+ // It can only capture the primary screen for now. The magnification library
+ // crashes under some screen configurations (e.g. secondary screen on top of
+ // primary screen) if it tries to capture a non-primary screen. The caller
+ // must make sure not calling it on non-primary screens.
+ bool CaptureImage(const DesktopRect& rect);
+
+ // Helper method for setting up the magnifier control. Returns true if
+ // succeeded.
+ bool InitializeMagnifier();
+
+ // Called by OnMagImageScalingCallback to output captured data.
+ void OnCaptured(void* data, const MAGIMAGEHEADER& header);
+
+ // Makes sure the current frame exists and matches |size|.
+ void CreateCurrentFrameIfNecessary(const DesktopSize& size);
+
+ // Returns true if we are capturing the primary screen only.
+ bool IsCapturingPrimaryScreenOnly() const;
+
+ // Start the fallback capturer and select the screen.
+ void StartFallbackCapturer();
+
+ static Atomic32 tls_index_;
+
+ scoped_ptr<ScreenCapturer> fallback_capturer_;
+ bool fallback_capturer_started_;
+ Callback* callback_;
+ ScreenId current_screen_id_;
+ std::wstring current_device_key_;
+ HWND excluded_window_;
+
+ // A thread-safe list of invalid rectangles, and the size of the most
+ // recently captured screen.
+ ScreenCapturerHelper helper_;
+
+ // Queue of the frames buffers.
+ ScreenCaptureFrameQueue queue_;
+
+ // Class to calculate the difference between two screen bitmaps.
+ scoped_ptr<Differ> differ_;
+
+ // Used to suppress duplicate logging of SetThreadExecutionState errors.
+ bool set_thread_execution_state_failed_;
+
+ ScopedThreadDesktop desktop_;
+
+ // Used for getting the screen dpi.
+ HDC desktop_dc_;
+
+ HMODULE mag_lib_handle_;
+ MagInitializeFunc mag_initialize_func_;
+ MagUninitializeFunc mag_uninitialize_func_;
+ MagSetWindowSourceFunc set_window_source_func_;
+ MagSetWindowFilterListFunc set_window_filter_list_func_;
+ MagSetImageScalingCallbackFunc set_image_scaling_callback_func_;
+
+ // The hidden window hosting the magnifier control.
+ HWND host_window_;
+ // The magnifier control that captures the screen.
+ HWND magnifier_window_;
+
+ // True if the magnifier control has been successfully initialized.
+ bool magnifier_initialized_;
+
+ // True if the last OnMagImageScalingCallback was called and handled
+ // successfully. Reset at the beginning of each CaptureImage call.
+ bool magnifier_capture_succeeded_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWinMagnifier);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc
new file mode 100644
index 00000000000..03e021954b1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
+
+namespace webrtc {
+
+bool
+GetCroppedWindowRect(HWND window,
+ DesktopRect* cropped_rect,
+ DesktopRect* original_rect) {
+ RECT rect;
+ if (!GetWindowRect(window, &rect)) {
+ return false;
+ }
+ WINDOWPLACEMENT window_placement;
+ window_placement.length = sizeof(window_placement);
+ if (!GetWindowPlacement(window, &window_placement)) {
+ return false;
+ }
+
+ *original_rect = DesktopRect::MakeLTRB(
+ rect.left, rect.top, rect.right, rect.bottom);
+
+ if (window_placement.showCmd & SW_SHOWMAXIMIZED) {
+ DesktopSize border = DesktopSize(GetSystemMetrics(SM_CXSIZEFRAME),
+ GetSystemMetrics(SM_CYSIZEFRAME));
+ *cropped_rect = DesktopRect::MakeLTRB(
+ rect.left + border.width(),
+ rect.top,
+ rect.right - border.width(),
+ rect.bottom - border.height());
+ } else {
+ *cropped_rect = *original_rect;
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
new file mode 100644
index 00000000000..2a3a470c59e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <windows.h>
+
+#include "webrtc/modules/desktop_capture/desktop_geometry.h"
+
+namespace webrtc {
+
+// Output the window rect, with the left/right/bottom frame border cropped if
+// the window is maximized. |cropped_rect| is the cropped rect relative to the
+// desktop. |original_rect| is the original rect returned from GetWindowRect.
+// Returns true if all API calls succeeded.
+bool GetCroppedWindowRect(HWND window,
+ DesktopRect* cropped_rect,
+ DesktopRect* original_rect);
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
index 478c8ee99c3..ad75c88d5d8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
@@ -11,12 +11,12 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_CAPTURER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_CAPTURER_H_
-#include <vector>
#include <string>
+#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -49,6 +49,14 @@ class WindowCapturer : public DesktopCapturer {
// Select window to be captured. Returns false in case of a failure (e.g. if
// there is no window with the specified id).
virtual bool SelectWindow(WindowId id) = 0;
+
+ // Bring the selected window to the front. Returns false in case of a
+ // failure or no window selected.
+ // TODO(jiayl): remove the default impl when FakeWindowCapturer is updated in
+ // Chromium.
+ virtual bool BringSelectedWindowToFront() {
+ return true;
+ }
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc
deleted file mode 100755
index 6268fc01156..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/window_capturer.h"
-
-#include <assert.h>
-#include <ApplicationServices/ApplicationServices.h>
-#include <CoreFoundation/CoreFoundation.h>
-
-#include "webrtc/modules/desktop_capture/desktop_frame.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-namespace {
-
-bool CFStringRefToUtf8(const CFStringRef string, std::string* str_utf8) {
- assert(string);
- assert(str_utf8);
- CFIndex length = CFStringGetLength(string);
- size_t max_length_utf8 =
- CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8);
- str_utf8->resize(max_length_utf8);
- CFIndex used_bytes;
- int result = CFStringGetBytes(
- string, CFRangeMake(0, length), kCFStringEncodingUTF8, 0, false,
- reinterpret_cast<UInt8*>(&*str_utf8->begin()), max_length_utf8,
- &used_bytes);
- if (result != length) {
- str_utf8->clear();
- return false;
- }
- str_utf8->resize(used_bytes);
- return true;
-}
-
-class WindowCapturerMac : public WindowCapturer {
- public:
- WindowCapturerMac();
- virtual ~WindowCapturerMac();
-
- // WindowCapturer interface.
- virtual bool GetWindowList(WindowList* windows) OVERRIDE;
- virtual bool SelectWindow(WindowId id) OVERRIDE;
-
- // DesktopCapturer interface.
- virtual void Start(Callback* callback) OVERRIDE;
- virtual void Capture(const DesktopRegion& region) OVERRIDE;
-
- private:
- Callback* callback_;
- CGWindowID window_id_;
-
- DISALLOW_COPY_AND_ASSIGN(WindowCapturerMac);
-};
-
-WindowCapturerMac::WindowCapturerMac()
- : callback_(NULL),
- window_id_(0) {
-}
-
-WindowCapturerMac::~WindowCapturerMac() {
-}
-
-bool WindowCapturerMac::GetWindowList(WindowList* windows) {
- // Only get on screen, non-desktop windows.
- CFArrayRef window_array = CGWindowListCopyWindowInfo(
- kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
- kCGNullWindowID);
- if (!window_array)
- return false;
-
- // Check windows to make sure they have an id, title, and use window layer
- // other than 0.
- CFIndex count = CFArrayGetCount(window_array);
- for (CFIndex i = 0; i < count; ++i) {
- CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
- CFArrayGetValueAtIndex(window_array, i));
- CFStringRef window_title = reinterpret_cast<CFStringRef>(
- CFDictionaryGetValue(window, kCGWindowName));
- CFNumberRef window_id = reinterpret_cast<CFNumberRef>(
- CFDictionaryGetValue(window, kCGWindowNumber));
- CFNumberRef window_layer = reinterpret_cast<CFNumberRef>(
- CFDictionaryGetValue(window, kCGWindowLayer));
- if (window_title && window_id && window_layer) {
- // Skip windows with layer=0 (menu, dock).
- int layer;
- CFNumberGetValue(window_layer, kCFNumberIntType, &layer);
- if (layer != 0)
- continue;
-
- int id;
- CFNumberGetValue(window_id, kCFNumberIntType, &id);
- WindowCapturer::Window window;
- window.id = id;
- if (!CFStringRefToUtf8(window_title, &(window.title)) ||
- window.title.empty()) {
- continue;
- }
- windows->push_back(window);
- }
- }
-
- CFRelease(window_array);
- return true;
-}
-
-bool WindowCapturerMac::SelectWindow(WindowId id) {
- // Request description for the specified window to make sure |id| is valid.
- CGWindowID ids[1];
- ids[0] = id;
- CFArrayRef window_id_array =
- CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
- CFArrayRef window_array =
- CGWindowListCreateDescriptionFromArray(window_id_array);
- int results_count = window_array ? CFArrayGetCount(window_array) : 0;
- CFRelease(window_id_array);
- CFRelease(window_array);
-
- if (results_count == 0) {
- // Could not find the window. It might have been closed.
- return false;
- }
-
- window_id_ = id;
- return true;
-}
-
-void WindowCapturerMac::Start(Callback* callback) {
- assert(!callback_);
- assert(callback);
-
- callback_ = callback;
-}
-
-void WindowCapturerMac::Capture(const DesktopRegion& region) {
- CGImageRef window_image = CGWindowListCreateImage(
- CGRectNull, kCGWindowListOptionIncludingWindow,
- window_id_, kCGWindowImageBoundsIgnoreFraming);
-
- if (!window_image) {
- CFRelease(window_image);
- callback_->OnCaptureCompleted(NULL);
- return;
- }
-
- int bits_per_pixel = CGImageGetBitsPerPixel(window_image);
- if (bits_per_pixel != 32) {
- LOG(LS_ERROR) << "Unsupported window image depth: " << bits_per_pixel;
- CFRelease(window_image);
- callback_->OnCaptureCompleted(NULL);
- return;
- }
-
- int width = CGImageGetWidth(window_image);
- int height = CGImageGetHeight(window_image);
- CGDataProviderRef provider = CGImageGetDataProvider(window_image);
- CFDataRef cf_data = CGDataProviderCopyData(provider);
- DesktopFrame* frame = new BasicDesktopFrame(
- DesktopSize(width, height));
-
- int src_stride = CGImageGetBytesPerRow(window_image);
- const uint8_t* src_data = CFDataGetBytePtr(cf_data);
- for (int y = 0; y < height; ++y) {
- memcpy(frame->data() + frame->stride() * y, src_data + src_stride * y,
- DesktopFrame::kBytesPerPixel * width);
- }
-
- CFRelease(cf_data);
- CFRelease(window_image);
-
- callback_->OnCaptureCompleted(frame);
-}
-
-} // namespace
-
-// static
-WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) {
- return new WindowCapturerMac();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
new file mode 100644
index 00000000000..d177fc40c7d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
@@ -0,0 +1,230 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/window_capturer.h"
+
+#include <assert.h>
+#include <ApplicationServices/ApplicationServices.h>
+#include <Cocoa/Cocoa.h>
+#include <CoreFoundation/CoreFoundation.h>
+
+#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+namespace {
+
+bool CFStringRefToUtf8(const CFStringRef string, std::string* str_utf8) {
+ assert(string);
+ assert(str_utf8);
+ CFIndex length = CFStringGetLength(string);
+ size_t max_length_utf8 =
+ CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8);
+ str_utf8->resize(max_length_utf8);
+ CFIndex used_bytes;
+ int result = CFStringGetBytes(
+ string, CFRangeMake(0, length), kCFStringEncodingUTF8, 0, false,
+ reinterpret_cast<UInt8*>(&*str_utf8->begin()), max_length_utf8,
+ &used_bytes);
+ if (result != length) {
+ str_utf8->clear();
+ return false;
+ }
+ str_utf8->resize(used_bytes);
+ return true;
+}
+
+bool IsWindowValid(CGWindowID id) {
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ bool valid = window_array && CFArrayGetCount(window_array);
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ return valid;
+}
+
+class WindowCapturerMac : public WindowCapturer {
+ public:
+ WindowCapturerMac();
+ virtual ~WindowCapturerMac();
+
+ // WindowCapturer interface.
+ virtual bool GetWindowList(WindowList* windows) OVERRIDE;
+ virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
+
+ // DesktopCapturer interface.
+ virtual void Start(Callback* callback) OVERRIDE;
+ virtual void Capture(const DesktopRegion& region) OVERRIDE;
+
+ private:
+ Callback* callback_;
+ CGWindowID window_id_;
+
+ DISALLOW_COPY_AND_ASSIGN(WindowCapturerMac);
+};
+
+WindowCapturerMac::WindowCapturerMac()
+ : callback_(NULL),
+ window_id_(0) {
+}
+
+WindowCapturerMac::~WindowCapturerMac() {
+}
+
+bool WindowCapturerMac::GetWindowList(WindowList* windows) {
+ // Only get on screen, non-desktop windows.
+ CFArrayRef window_array = CGWindowListCopyWindowInfo(
+ kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
+ kCGNullWindowID);
+ if (!window_array)
+ return false;
+
+ // Check windows to make sure they have an id, title, and use window layer
+ // other than 0.
+ CFIndex count = CFArrayGetCount(window_array);
+ for (CFIndex i = 0; i < count; ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, i));
+ CFStringRef window_title = reinterpret_cast<CFStringRef>(
+ CFDictionaryGetValue(window, kCGWindowName));
+ CFNumberRef window_id = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+ CFNumberRef window_layer = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowLayer));
+ if (window_title && window_id && window_layer) {
+ // Skip windows with layer=0 (menu, dock).
+ int layer;
+ CFNumberGetValue(window_layer, kCFNumberIntType, &layer);
+ if (layer != 0)
+ continue;
+
+ int id;
+ CFNumberGetValue(window_id, kCFNumberIntType, &id);
+ WindowCapturer::Window window;
+ window.id = id;
+ if (!CFStringRefToUtf8(window_title, &(window.title)) ||
+ window.title.empty()) {
+ continue;
+ }
+ windows->push_back(window);
+ }
+ }
+
+ CFRelease(window_array);
+ return true;
+}
+
+bool WindowCapturerMac::SelectWindow(WindowId id) {
+ if (!IsWindowValid(id))
+ return false;
+ window_id_ = id;
+ return true;
+}
+
+bool WindowCapturerMac::BringSelectedWindowToFront() {
+ if (!window_id_)
+ return false;
+
+ CGWindowID ids[1];
+ ids[0] = window_id_;
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ if (window_array == NULL || 0 == CFArrayGetCount(window_array)) {
+ // Could not find the window. It might have been closed.
+ LOG(LS_INFO) << "Window not found";
+ CFRelease(window_id_array);
+ return false;
+ }
+
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFNumberRef pid_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowOwnerPID));
+
+ int pid;
+ CFNumberGetValue(pid_ref, kCFNumberIntType, &pid);
+
+ // TODO(jiayl): this will bring the process main window to the front. We
+ // should find a way to bring only the window to the front.
+ bool result =
+ [[NSRunningApplication runningApplicationWithProcessIdentifier: pid]
+ activateWithOptions: NSApplicationActivateIgnoringOtherApps];
+
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+ return result;
+}
+
+void WindowCapturerMac::Start(Callback* callback) {
+ assert(!callback_);
+ assert(callback);
+
+ callback_ = callback;
+}
+
+void WindowCapturerMac::Capture(const DesktopRegion& region) {
+ if (!IsWindowValid(window_id_)) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ CGImageRef window_image = CGWindowListCreateImage(
+ CGRectNull, kCGWindowListOptionIncludingWindow,
+ window_id_, kCGWindowImageBoundsIgnoreFraming);
+
+ if (!window_image) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ int bits_per_pixel = CGImageGetBitsPerPixel(window_image);
+ if (bits_per_pixel != 32) {
+ LOG(LS_ERROR) << "Unsupported window image depth: " << bits_per_pixel;
+ CFRelease(window_image);
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ int width = CGImageGetWidth(window_image);
+ int height = CGImageGetHeight(window_image);
+ CGDataProviderRef provider = CGImageGetDataProvider(window_image);
+ CFDataRef cf_data = CGDataProviderCopyData(provider);
+ DesktopFrame* frame = new BasicDesktopFrame(
+ DesktopSize(width, height));
+
+ int src_stride = CGImageGetBytesPerRow(window_image);
+ const uint8_t* src_data = CFDataGetBytePtr(cf_data);
+ for (int y = 0; y < height; ++y) {
+ memcpy(frame->data() + frame->stride() * y, src_data + src_stride * y,
+ DesktopFrame::kBytesPerPixel * width);
+ }
+
+ CFRelease(cf_data);
+ CFRelease(window_image);
+
+ callback_->OnCaptureCompleted(frame);
+}
+
+} // namespace
+
+// static
+WindowCapturer* WindowCapturer::Create(const DesktopCaptureOptions& options) {
+ return new WindowCapturerMac();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
index 7bb1247ea9d..5f9010d2fd3 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
@@ -26,6 +26,7 @@ class WindowCapturerNull : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
@@ -54,6 +55,11 @@ bool WindowCapturerNull::SelectWindow(WindowId id) {
return false;
}
+bool WindowCapturerNull::BringSelectedWindowToFront() {
+ // Not implemented yet.
+ return false;
+}
+
void WindowCapturerNull::Start(Callback* callback) {
assert(!callback_);
assert(callback);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
index 95f41db73b2..a0021856203 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
@@ -11,9 +11,9 @@
#include "webrtc/modules/desktop_capture/window_capturer.h"
#include <assert.h>
-#include <windows.h>
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -89,6 +89,7 @@ class WindowCapturerWin : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
@@ -157,6 +158,16 @@ bool WindowCapturerWin::SelectWindow(WindowId id) {
return true;
}
+bool WindowCapturerWin::BringSelectedWindowToFront() {
+ if (!window_)
+ return false;
+
+ if (!IsWindow(window_) || !IsWindowVisible(window_) || IsIconic(window_))
+ return false;
+
+ return SetForegroundWindow(window_) != 0;
+}
+
void WindowCapturerWin::Start(Callback* callback) {
assert(!callback_);
assert(callback);
@@ -171,15 +182,16 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
return;
}
- // Stop capturing if the window has been minimized or hidden.
- if (IsIconic(window_) || !IsWindowVisible(window_)) {
+ // Stop capturing if the window has been closed or hidden.
+ if (!IsWindow(window_) || !IsWindowVisible(window_)) {
callback_->OnCaptureCompleted(NULL);
return;
}
- RECT rect;
- if (!GetWindowRect(window_, &rect)) {
- LOG(LS_WARNING) << "Failed to get window size: " << GetLastError();
+ DesktopRect original_rect;
+ DesktopRect cropped_rect;
+ if (!GetCroppedWindowRect(window_, &cropped_rect, &original_rect)) {
+ LOG(LS_WARNING) << "Failed to get window info: " << GetLastError();
callback_->OnCaptureCompleted(NULL);
return;
}
@@ -192,8 +204,7 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
}
scoped_ptr<DesktopFrameWin> frame(DesktopFrameWin::Create(
- DesktopSize(rect.right - rect.left, rect.bottom - rect.top),
- NULL, window_dc));
+ cropped_rect.size(), NULL, window_dc));
if (!frame.get()) {
ReleaseDC(window_, window_dc);
callback_->OnCaptureCompleted(NULL);
@@ -201,7 +212,7 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
}
HDC mem_dc = CreateCompatibleDC(window_dc);
- SelectObject(mem_dc, frame->bitmap());
+ HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap());
BOOL result = FALSE;
// When desktop composition (Aero) is enabled each window is rendered to a
@@ -217,21 +228,24 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
// When composition is enabled the DC returned by GetWindowDC() doesn't always
// have window frame rendered correctly. Windows renders it only once and then
// caches the result between captures. We hack it around by calling
- // PrintWindow() whenever window size changes - it somehow affects what we
- // get from BitBlt() on the subsequent captures.
+ // PrintWindow() whenever window size changes, including the first time of
+ // capturing - it somehow affects what we get from BitBlt() on the subsequent
+ // captures.
- if (!IsAeroEnabled() ||
- (!previous_size_.is_empty() && !previous_size_.equals(frame->size()))) {
+ if (!IsAeroEnabled() || !previous_size_.equals(frame->size())) {
result = PrintWindow(window_, mem_dc, 0);
}
// Aero is enabled or PrintWindow() failed, use BitBlt.
if (!result) {
result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(),
- window_dc, 0, 0, SRCCOPY);
+ window_dc,
+ cropped_rect.left() - original_rect.left(),
+ cropped_rect.top() - original_rect.top(),
+ SRCCOPY);
}
- SelectObject(mem_dc, NULL);
+ SelectObject(mem_dc, previous_object);
DeleteDC(mem_dc);
ReleaseDC(window_, window_dc);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
index 5a14356fd16..b641c932180 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
@@ -10,13 +10,14 @@
#include "webrtc/modules/desktop_capture/window_capturer.h"
+#include <assert.h>
#include <string.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xcomposite.h>
#include <X11/extensions/Xrender.h>
#include <X11/Xutil.h>
+
#include <algorithm>
-#include <cassert>
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
@@ -84,7 +85,8 @@ class XWindowProperty {
DISALLOW_COPY_AND_ASSIGN(XWindowProperty);
};
-class WindowCapturerLinux : public WindowCapturer {
+class WindowCapturerLinux : public WindowCapturer,
+ public SharedXDisplay::XEventHandler {
public:
WindowCapturerLinux(const DesktopCaptureOptions& options);
virtual ~WindowCapturerLinux();
@@ -92,11 +94,15 @@ class WindowCapturerLinux : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ // SharedXDisplay::XEventHandler interface.
+ virtual bool HandleXEvent(const XEvent& event) OVERRIDE;
+
private:
Display* display() { return x_display_->display(); }
@@ -146,9 +152,13 @@ WindowCapturerLinux::WindowCapturerLinux(const DesktopCaptureOptions& options)
} else {
LOG(LS_INFO) << "Xcomposite extension not available or too old.";
}
+
+ x_display_->AddEventHandler(ConfigureNotify, this);
}
-WindowCapturerLinux::~WindowCapturerLinux() {}
+WindowCapturerLinux::~WindowCapturerLinux() {
+ x_display_->RemoveEventHandler(ConfigureNotify, this);
+}
bool WindowCapturerLinux::GetWindowList(WindowList* windows) {
WindowList result;
@@ -194,6 +204,9 @@ bool WindowCapturerLinux::SelectWindow(WindowId id) {
if (!x_server_pixel_buffer_.Init(display(), id))
return false;
+ // Tell the X server to send us window resizing events.
+ XSelectInput(display(), id, StructureNotifyMask);
+
selected_window_ = id;
// In addition to needing X11 server-side support for Xcomposite, it actually
@@ -208,6 +221,55 @@ bool WindowCapturerLinux::SelectWindow(WindowId id) {
return true;
}
+bool WindowCapturerLinux::BringSelectedWindowToFront() {
+ if (!selected_window_)
+ return false;
+
+ unsigned int num_children;
+ ::Window* children;
+ ::Window parent;
+ ::Window root;
+ // Find the root window to pass event to.
+ int status = XQueryTree(
+ display(), selected_window_, &root, &parent, &children, &num_children);
+ if (status == 0) {
+ LOG(LS_ERROR) << "Failed to query for the root window.";
+ return false;
+ }
+
+ if (children)
+ XFree(children);
+
+ XRaiseWindow(display(), selected_window_);
+
+ // Some window managers (e.g., metacity in GNOME) consider it illegal to
+ // raise a window without also giving it input focus with
+ // _NET_ACTIVE_WINDOW, so XRaiseWindow() on its own isn't enough.
+ Atom atom = XInternAtom(display(), "_NET_ACTIVE_WINDOW", True);
+ if (atom != None) {
+ XEvent xev;
+ xev.xclient.type = ClientMessage;
+ xev.xclient.serial = 0;
+ xev.xclient.send_event = True;
+ xev.xclient.window = selected_window_;
+ xev.xclient.message_type = atom;
+
+ // The format member is set to 8, 16, or 32 and specifies whether the
+ // data should be viewed as a list of bytes, shorts, or longs.
+ xev.xclient.format = 32;
+
+ memset(xev.xclient.data.l, 0, sizeof(xev.xclient.data.l));
+
+ XSendEvent(display(),
+ root,
+ False,
+ SubstructureRedirectMask | SubstructureNotifyMask,
+ &xev);
+ }
+ XFlush(display());
+ return true;
+}
+
void WindowCapturerLinux::Start(Callback* callback) {
assert(!callback_);
assert(callback);
@@ -216,6 +278,14 @@ void WindowCapturerLinux::Start(Callback* callback) {
}
void WindowCapturerLinux::Capture(const DesktopRegion& region) {
+ if (!x_server_pixel_buffer_.IsWindowValid()) {
+ LOG(LS_INFO) << "The window is no longer valid.";
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ x_display_->ProcessPendingXEvents();
+
if (!has_composite_extension_) {
// Without the Xcomposite extension we capture when the whole window is
// visible on screen and not covered by any other window. This is not
@@ -235,6 +305,20 @@ void WindowCapturerLinux::Capture(const DesktopRegion& region) {
callback_->OnCaptureCompleted(frame);
}
+bool WindowCapturerLinux::HandleXEvent(const XEvent& event) {
+ if (event.type == ConfigureNotify) {
+ XConfigureEvent xce = event.xconfigure;
+ if (!DesktopSize(xce.width, xce.height).equals(
+ x_server_pixel_buffer_.window_size())) {
+ if (!x_server_pixel_buffer_.Init(display(), selected_window_)) {
+ LOG(LS_ERROR) << "Failed to initialize pixel buffer after resizing.";
+ }
+ return true;
+ }
+ }
+ return false;
+}
+
::Window WindowCapturerLinux::GetApplicationWindow(::Window window) {
// Get WM_STATE property of the window.
XWindowProperty<uint32_t> window_state(display(), window, wm_state_atom_);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
index fd8346928c9..aa771145d59 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
@@ -13,7 +13,7 @@
#include <X11/Xlib.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
index 6983a6dcced..be00fa7697e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
@@ -213,6 +213,18 @@ bool XServerPixelBuffer::InitPixmaps(int depth) {
return true;
}
+bool XServerPixelBuffer::IsWindowValid() const {
+ XWindowAttributes attributes;
+ {
+ XErrorTrap error_trap(display_);
+ if (!XGetWindowAttributes(display_, window_, &attributes) ||
+ error_trap.GetLastErrorAndDisable() != 0) {
+ return false;
+ }
+ }
+ return true;
+}
+
void XServerPixelBuffer::Synchronize() {
if (shm_segment_info_ && !shm_pixmap_) {
// XShmGetImage can fail if the display is being reconfigured.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
index b81096c8110..98f263f3a88 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
@@ -40,6 +40,9 @@ class XServerPixelBuffer {
// Returns the size of the window the buffer was initialized for.
const DesktopSize& window_size() { return window_size_; }
+ // Returns true if the window can be found.
+ bool IsWindowValid() const;
+
// If shared memory is being used without pixmaps, synchronize this pixel
// buffer with the root window contents (otherwise, this is a no-op).
// This is to avoid doing a full-screen capture for each individual
diff --git a/chromium/third_party/webrtc/modules/interface/module_common_types.h b/chromium/third_party/webrtc/modules/interface/module_common_types.h
index 2494d68b9ff..2c947071045 100644
--- a/chromium/third_party/webrtc/modules/interface/module_common_types.h
+++ b/chromium/third_party/webrtc/modules/interface/module_common_types.h
@@ -16,8 +16,8 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
#ifdef _WIN32
@@ -27,27 +27,6 @@
namespace webrtc {
-struct RTPHeaderExtension {
- bool hasTransmissionTimeOffset;
- int32_t transmissionTimeOffset;
- bool hasAbsoluteSendTime;
- uint32_t absoluteSendTime;
-};
-
-struct RTPHeader {
- bool markerBit;
- uint8_t payloadType;
- uint16_t sequenceNumber;
- uint32_t timestamp;
- uint32_t ssrc;
- uint8_t numCSRCs;
- uint32_t arrOfCSRCs[kRtpCsrcSize];
- uint8_t paddingLength;
- uint16_t headerLength;
- int payload_type_frequency;
- RTPHeaderExtension extension;
-};
-
struct RTPAudioHeader {
uint8_t numEnergy; // number of valid entries in arrOfEnergy
uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
@@ -124,6 +103,8 @@ struct WebRtcRTPHeader {
RTPHeader header;
FrameType frameType;
RTPTypeHeader type;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms;
};
class RTPFragmentationHeader {
@@ -686,6 +667,10 @@ class AudioFrame {
AudioFrame();
virtual ~AudioFrame() {}
+ // Resets all members to their default state (except does not modify the
+ // contents of |data_|).
+ void Reset();
+
// |interleaved_| is not changed by this method.
void UpdateFrame(int id, uint32_t timestamp, const int16_t* data,
int samples_per_channel, int sample_rate_hz,
@@ -703,13 +688,24 @@ class AudioFrame {
AudioFrame& operator-=(const AudioFrame& rhs);
int id_;
+ // RTP timestamp of the first sample in the AudioFrame.
uint32_t timestamp_;
+ // Time since the first frame in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t elapsed_time_ms_;
+ // NTP time of the estimated capture time in local timebase in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t ntp_time_ms_;
int16_t data_[kMaxDataSizeSamples];
int samples_per_channel_;
int sample_rate_hz_;
int num_channels_;
SpeechType speech_type_;
VADActivity vad_activity_;
+ // Note that there is no guarantee that |energy_| is correct. Any user of this
+ // member must verify that the value is correct.
+ // TODO(henrike) Remove |energy_|.
+ // See https://code.google.com/p/webrtc/issues/detail?id=3315.
uint32_t energy_;
bool interleaved_;
@@ -718,16 +714,25 @@ class AudioFrame {
};
inline AudioFrame::AudioFrame()
- : id_(-1),
- timestamp_(0),
- data_(),
- samples_per_channel_(0),
- sample_rate_hz_(0),
- num_channels_(1),
- speech_type_(kUndefined),
- vad_activity_(kVadUnknown),
- energy_(0xffffffff),
- interleaved_(true) {}
+ : data_() {
+ Reset();
+}
+
+inline void AudioFrame::Reset() {
+ id_ = -1;
+ // TODO(wu): Zero is a valid value for |timestamp_|. We should initialize
+ // to an invalid value, or add a new member to indicate invalidity.
+ timestamp_ = 0;
+ elapsed_time_ms_ = -1;
+ ntp_time_ms_ = -1;
+ samples_per_channel_ = 0;
+ sample_rate_hz_ = 0;
+ num_channels_ = 0;
+ speech_type_ = kUndefined;
+ vad_activity_ = kVadUnknown;
+ energy_ = 0xffffffff;
+ interleaved_ = true;
+}
inline void AudioFrame::UpdateFrame(int id, uint32_t timestamp,
const int16_t* data,
@@ -758,6 +763,8 @@ inline void AudioFrame::CopyFrom(const AudioFrame& src) {
id_ = src.id_;
timestamp_ = src.timestamp_;
+ elapsed_time_ms_ = src.elapsed_time_ms_;
+ ntp_time_ms_ = src.ntp_time_ms_;
samples_per_channel_ = src.samples_per_channel_;
sample_rate_hz_ = src.sample_rate_hz_;
speech_type_ = src.speech_type_;
diff --git a/chromium/third_party/webrtc/modules/media_file/source/OWNERS b/chromium/third_party/webrtc/modules/media_file/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/media_file/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc b/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
index 92c51acce60..19baaa3b218 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
+++ b/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
@@ -23,7 +23,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
// http://msdn2.microsoft.com/en-us/library/ms779636.aspx
@@ -178,8 +177,7 @@ AviFile::AviFile()
_videoCodecConfigParamsLength(0),
_videoStreamDataChunkPrefix(0),
_audioStreamDataChunkPrefix(0),
- _created(false),
- _indexList(new ListWrapper())
+ _created(false)
{
ResetComplexMembers();
}
@@ -188,7 +186,6 @@ AviFile::~AviFile()
{
Close();
- delete _indexList;
delete[] _videoCodecConfigParams;
delete _crit;
}
@@ -1712,21 +1709,11 @@ uint32_t AviFile::StreamAndTwoCharCodeToTag(int32_t streamNum,
void AviFile::ClearIndexList()
{
- while (!_indexList->Empty())
- {
- ListItem* listItem = _indexList->First();
- if (listItem == 0)
- {
- break;
- }
-
- AVIINDEXENTRY* item = static_cast<AVIINDEXENTRY*>(listItem->GetItem());
- if (item != NULL)
- {
- delete item;
- }
- _indexList->PopFront();
- }
+ for (IndexList::iterator iter = _indexList.begin();
+ iter != _indexList.end(); ++iter) {
+ delete *iter;
+ }
+ _indexList.clear();
}
void AviFile::AddChunkToIndexList(uint32_t inChunkId,
@@ -1734,7 +1721,7 @@ void AviFile::AddChunkToIndexList(uint32_t inChunkId,
uint32_t inOffset,
uint32_t inSize)
{
- _indexList->PushBack(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
+ _indexList.push_back(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
inSize));
}
@@ -1747,19 +1734,13 @@ void AviFile::WriteIndex()
_bytesWritten += PutLE32(0);
const size_t idxChunkSize = _bytesWritten;
- for (ListItem* listItem = _indexList->First();
- listItem != NULL;
- listItem = _indexList->Next(listItem))
- {
- const AVIINDEXENTRY* item =
- static_cast<AVIINDEXENTRY*>(listItem->GetItem());
- if (item != NULL)
- {
- _bytesWritten += PutLE32(item->ckid);
- _bytesWritten += PutLE32(item->dwFlags);
- _bytesWritten += PutLE32(item->dwChunkOffset);
- _bytesWritten += PutLE32(item->dwChunkLength);
- }
+ for (IndexList::iterator iter = _indexList.begin();
+ iter != _indexList.end(); ++iter) {
+ const AVIINDEXENTRY* item = *iter;
+ _bytesWritten += PutLE32(item->ckid);
+ _bytesWritten += PutLE32(item->dwFlags);
+ _bytesWritten += PutLE32(item->dwChunkOffset);
+ _bytesWritten += PutLE32(item->dwChunkLength);
}
PutLE32LengthFromCurrent(static_cast<long>(idxChunkSize));
}
diff --git a/chromium/third_party/webrtc/modules/media_file/source/avi_file.h b/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
index a55fc187481..d8b10626dff 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
+++ b/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
@@ -14,12 +14,12 @@
#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
#include <stdio.h>
+#include <list>
#include "webrtc/typedefs.h"
namespace webrtc {
class CriticalSectionWrapper;
-class ListWrapper;
struct AVISTREAMHEADER
{
@@ -194,6 +194,7 @@ private:
void WriteIndex();
private:
+ typedef std::list<AVIINDEXENTRY*> IndexList;
struct AVIMAINHEADER
{
AVIMAINHEADER();
@@ -269,7 +270,7 @@ private:
uint32_t _audioStreamDataChunkPrefix;
bool _created;
- ListWrapper* _indexList; // Elements are of type AVIINDEXENTRY.
+ IndexList _indexList;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc b/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
index 85df0b3a40a..e8883c9a4ba 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
+++ b/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
@@ -2521,6 +2521,7 @@ int32_t ModuleFileUtility::FileDurationMs(const char* fileName,
break;
}
#endif
+ break;
}
case kFileFormatPreencodedFile:
{
diff --git a/chromium/third_party/webrtc/modules/modules.gyp b/chromium/third_party/webrtc/modules/modules.gyp
index ef354abc0fe..8dec125b093 100644
--- a/chromium/third_party/webrtc/modules/modules.gyp
+++ b/chromium/third_party/webrtc/modules/modules.gyp
@@ -16,9 +16,8 @@
'audio_coding/codecs/isac/main/source/isac.gypi',
'audio_coding/codecs/isac/fix/source/isacfix.gypi',
'audio_coding/codecs/pcm16b/pcm16b.gypi',
- 'audio_coding/main/source/audio_coding_module.gypi',
+ 'audio_coding/main/acm2/audio_coding_module.gypi',
'audio_coding/neteq/neteq.gypi',
- 'audio_coding/neteq4/neteq.gypi',
'audio_conference_mixer/source/audio_conference_mixer.gypi',
'audio_device/audio_device.gypi',
'audio_processing/audio_processing.gypi',
@@ -43,6 +42,7 @@
'includes': [
'audio_coding/codecs/isac/isac_test.gypi',
'audio_coding/codecs/isac/isacfix_test.gypi',
+ 'audio_coding/codecs/tools/audio_codec_speed_tests.gypi',
'audio_processing/audio_processing_tests.gypi',
'rtp_rtcp/test/testFec/test_fec.gypi',
'video_coding/main/source/video_coding_test.gypi',
@@ -70,15 +70,13 @@
'dependencies': [
'audio_coding_module',
'audio_processing',
- 'audioproc_unittest_proto',
'bitrate_controller',
'CNG',
'desktop_capture',
'iSACFix',
'media_file',
- 'NetEq',
- 'NetEq4',
- 'NetEq4TestTools',
+ 'neteq',
+ 'neteq_test_tools',
'neteq_unittest_tools',
'paced_sender',
'PCM16B', # Needed by NetEq tests.
@@ -99,13 +97,14 @@
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:rtcp_packet_parser',
],
'sources': [
'audio_coding/main/acm2/acm_receiver_unittest.cc',
+ 'audio_coding/main/acm2/audio_coding_module_unittest.cc',
'audio_coding/main/acm2/call_statistics_unittest.cc',
'audio_coding/main/acm2/initial_delay_manager_unittest.cc',
'audio_coding/main/acm2/nack_unittest.cc',
- 'audio_coding/main/source/acm_neteq_unittest.cc',
'audio_coding/codecs/cng/cng_unittest.cc',
'audio_coding/codecs/isac/fix/source/filters_unittest.cc',
'audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc',
@@ -113,47 +112,48 @@
'audio_coding/codecs/isac/fix/source/transform_unittest.cc',
'audio_coding/codecs/isac/main/source/isac_unittest.cc',
'audio_coding/codecs/opus/opus_unittest.cc',
- 'audio_coding/neteq4/audio_multi_vector_unittest.cc',
- 'audio_coding/neteq4/audio_vector_unittest.cc',
- 'audio_coding/neteq4/background_noise_unittest.cc',
- 'audio_coding/neteq4/buffer_level_filter_unittest.cc',
- 'audio_coding/neteq4/comfort_noise_unittest.cc',
- 'audio_coding/neteq4/decision_logic_unittest.cc',
- 'audio_coding/neteq4/decoder_database_unittest.cc',
- 'audio_coding/neteq4/delay_manager_unittest.cc',
- 'audio_coding/neteq4/delay_peak_detector_unittest.cc',
- 'audio_coding/neteq4/dsp_helper_unittest.cc',
- 'audio_coding/neteq4/dtmf_buffer_unittest.cc',
- 'audio_coding/neteq4/dtmf_tone_generator_unittest.cc',
- 'audio_coding/neteq4/expand_unittest.cc',
- 'audio_coding/neteq4/merge_unittest.cc',
- 'audio_coding/neteq4/neteq_external_decoder_unittest.cc',
- 'audio_coding/neteq4/neteq_impl_unittest.cc',
- 'audio_coding/neteq4/neteq_stereo_unittest.cc',
- 'audio_coding/neteq4/neteq_unittest.cc',
- 'audio_coding/neteq4/normal_unittest.cc',
- 'audio_coding/neteq4/packet_buffer_unittest.cc',
- 'audio_coding/neteq4/payload_splitter_unittest.cc',
- 'audio_coding/neteq4/post_decode_vad_unittest.cc',
- 'audio_coding/neteq4/random_vector_unittest.cc',
- 'audio_coding/neteq4/sync_buffer_unittest.cc',
- 'audio_coding/neteq4/timestamp_scaler_unittest.cc',
- 'audio_coding/neteq4/time_stretch_unittest.cc',
- 'audio_coding/neteq4/mock/mock_audio_decoder.h',
- 'audio_coding/neteq4/mock/mock_audio_vector.h',
- 'audio_coding/neteq4/mock/mock_buffer_level_filter.h',
- 'audio_coding/neteq4/mock/mock_decoder_database.h',
- 'audio_coding/neteq4/mock/mock_delay_manager.h',
- 'audio_coding/neteq4/mock/mock_delay_peak_detector.h',
- 'audio_coding/neteq4/mock/mock_dtmf_buffer.h',
- 'audio_coding/neteq4/mock/mock_dtmf_tone_generator.h',
- 'audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h',
- 'audio_coding/neteq4/mock/mock_packet_buffer.h',
- 'audio_coding/neteq4/mock/mock_payload_splitter.h',
+ 'audio_coding/neteq/audio_classifier_unittest.cc',
+ 'audio_coding/neteq/audio_multi_vector_unittest.cc',
+ 'audio_coding/neteq/audio_vector_unittest.cc',
+ 'audio_coding/neteq/background_noise_unittest.cc',
+ 'audio_coding/neteq/buffer_level_filter_unittest.cc',
+ 'audio_coding/neteq/comfort_noise_unittest.cc',
+ 'audio_coding/neteq/decision_logic_unittest.cc',
+ 'audio_coding/neteq/decoder_database_unittest.cc',
+ 'audio_coding/neteq/delay_manager_unittest.cc',
+ 'audio_coding/neteq/delay_peak_detector_unittest.cc',
+ 'audio_coding/neteq/dsp_helper_unittest.cc',
+ 'audio_coding/neteq/dtmf_buffer_unittest.cc',
+ 'audio_coding/neteq/dtmf_tone_generator_unittest.cc',
+ 'audio_coding/neteq/expand_unittest.cc',
+ 'audio_coding/neteq/merge_unittest.cc',
+ 'audio_coding/neteq/neteq_external_decoder_unittest.cc',
+ 'audio_coding/neteq/neteq_impl_unittest.cc',
+ 'audio_coding/neteq/neteq_stereo_unittest.cc',
+ 'audio_coding/neteq/neteq_unittest.cc',
+ 'audio_coding/neteq/normal_unittest.cc',
+ 'audio_coding/neteq/packet_buffer_unittest.cc',
+ 'audio_coding/neteq/payload_splitter_unittest.cc',
+ 'audio_coding/neteq/post_decode_vad_unittest.cc',
+ 'audio_coding/neteq/random_vector_unittest.cc',
+ 'audio_coding/neteq/sync_buffer_unittest.cc',
+ 'audio_coding/neteq/timestamp_scaler_unittest.cc',
+ 'audio_coding/neteq/time_stretch_unittest.cc',
+ 'audio_coding/neteq/mock/mock_audio_decoder.h',
+ 'audio_coding/neteq/mock/mock_audio_vector.h',
+ 'audio_coding/neteq/mock/mock_buffer_level_filter.h',
+ 'audio_coding/neteq/mock/mock_decoder_database.h',
+ 'audio_coding/neteq/mock/mock_delay_manager.h',
+ 'audio_coding/neteq/mock/mock_delay_peak_detector.h',
+ 'audio_coding/neteq/mock/mock_dtmf_buffer.h',
+ 'audio_coding/neteq/mock/mock_dtmf_tone_generator.h',
+ 'audio_coding/neteq/mock/mock_external_decoder_pcm16b.h',
+ 'audio_coding/neteq/mock/mock_packet_buffer.h',
+ 'audio_coding/neteq/mock/mock_payload_splitter.h',
+ 'audio_coding/neteq/tools/packet_unittest.cc',
'audio_processing/aec/system_delay_unittest.cc',
'audio_processing/aec/echo_cancellation_unittest.cc',
'audio_processing/echo_cancellation_impl_unittest.cc',
- 'audio_processing/test/audio_processing_unittest.cc',
'audio_processing/utility/delay_estimator_unittest.cc',
'audio_processing/utility/ring_buffer_unittest.cc',
'bitrate_controller/bitrate_controller_unittest.cc',
@@ -167,19 +167,19 @@
'desktop_capture/screen_capturer_mock_objects.h',
'desktop_capture/screen_capturer_unittest.cc',
'desktop_capture/window_capturer_unittest.cc',
- "desktop_capture/win/cursor_unittest.cc",
- "desktop_capture/win/cursor_unittest_resources.h",
- "desktop_capture/win/cursor_unittest_resources.rc",
+ 'desktop_capture/win/cursor_unittest.cc',
+ 'desktop_capture/win/cursor_unittest_resources.h',
+ 'desktop_capture/win/cursor_unittest_resources.rc',
'media_file/source/media_file_unittest.cc',
'module_common_types_unittest.cc',
'pacing/paced_sender_unittest.cc',
+ 'remote_bitrate_estimator/bwe_simulations.cc',
'remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h',
'remote_bitrate_estimator/rate_statistics_unittest.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h',
'remote_bitrate_estimator/remote_bitrate_estimators_test.cc',
- 'remote_bitrate_estimator/rtp_to_ntp_unittest.cc',
'remote_bitrate_estimator/test/bwe_test_baselinefile.cc',
'remote_bitrate_estimator/test/bwe_test_baselinefile.h',
'remote_bitrate_estimator/test/bwe_test_fileutils.cc',
@@ -199,9 +199,11 @@
'rtp_rtcp/source/nack_rtx_unittest.cc',
'rtp_rtcp/source/producer_fec_unittest.cc',
'rtp_rtcp/source/receive_statistics_unittest.cc',
+ 'rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc',
'rtp_rtcp/source/rtcp_format_remb_unittest.cc',
- 'rtp_rtcp/source/rtcp_sender_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet_unittest.cc',
'rtp_rtcp/source/rtcp_receiver_unittest.cc',
+ 'rtp_rtcp/source/rtcp_sender_unittest.cc',
'rtp_rtcp/source/rtp_fec_unittest.cc',
'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
'rtp_rtcp/source/rtp_format_vp8_test_helper.cc',
@@ -219,6 +221,7 @@
'rtp_rtcp/test/testAPI/test_api_rtcp.cc',
'rtp_rtcp/test/testAPI/test_api_video.cc',
'utility/source/audio_frame_operations_unittest.cc',
+ 'utility/source/file_player_unittests.cc',
'video_coding/codecs/test/packet_manipulator_unittest.cc',
'video_coding/codecs/test/stats_unittest.cc',
'video_coding/codecs/test/videoprocessor_unittest.cc',
@@ -278,6 +281,14 @@
}],
['enable_protobuf==1', {
'defines': [ 'WEBRTC_AUDIOPROC_DEBUG_DUMP' ],
+ 'dependencies': [
+ 'audioproc_unittest_proto',
+ ],
+ 'sources': [
+ 'audio_processing/audio_processing_impl_unittest.cc',
+ 'audio_processing/test/audio_processing_unittest.cc',
+ 'audio_processing/test/test_utils.h',
+ ],
}],
['build_libvpx==1', {
'dependencies': [
@@ -286,7 +297,7 @@
}],
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -326,13 +337,14 @@
'audio_coding/main/test/EncodeDecodeTest.cc',
'audio_coding/main/test/iSACTest.cc',
'audio_coding/main/test/opus_test.cc',
+ 'audio_coding/main/test/PacketLossTest.cc',
'audio_coding/main/test/PCMFile.cc',
'audio_coding/main/test/RTPFile.cc',
'audio_coding/main/test/SpatialAudio.cc',
'audio_coding/main/test/TestAllCodecs.cc',
'audio_coding/main/test/target_delay_unittest.cc',
'audio_coding/main/test/Tester.cc',
- 'audio_coding/main/test/TestFEC.cc',
+ 'audio_coding/main/test/TestRedFec.cc',
'audio_coding/main/test/TestStereo.cc',
'audio_coding/main/test/TestVADDTX.cc',
'audio_coding/main/test/TimedTrace.cc',
@@ -346,7 +358,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -357,7 +369,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'modules_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/modules/modules_tests.isolate b/chromium/third_party/webrtc/modules/modules_tests.isolate
index 7a051f66eb6..e5055f0d91e 100644
--- a/chromium/third_party/webrtc/modules/modules_tests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_tests.isolate
@@ -8,32 +8,30 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../resources/audio_coding/testfile32kHz.pcm',
- '../../resources/audio_coding/teststereo32kHz.pcm',
- '../../resources/foreman_cif.yuv',
- '../../resources/paris_qcif.yuv',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/teststereo32kHz.pcm',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/resources/paris_qcif.yuv',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/modules_unittests.isolate b/chromium/third_party/webrtc/modules/modules_unittests.isolate
index aa91238caaf..09ace1c1816 100644
--- a/chromium/third_party/webrtc/modules/modules_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_unittests.isolate
@@ -8,77 +8,105 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/resources/short_mixed_mono_48.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.pcm',
+ '<(DEPTH)/resources/short_mixed_stereo_48.dat',
+ '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../data/audio_processing/output_data_float.pb',
- '../../data/voice_engine/audio_tiny48.wav',
- '../../resources/audio_coding/neteq4_network_stats.dat',
- '../../resources/audio_coding/neteq4_rtcp_stats.dat',
- '../../resources/audio_coding/neteq4_universal_ref.pcm',
- '../../resources/audio_coding/neteq_network_stats.dat',
- '../../resources/audio_coding/neteq_rtcp_stats.dat',
- '../../resources/audio_coding/neteq_universal_new.rtp',
- '../../resources/audio_coding/neteq_universal_ref.pcm',
- '../../resources/audio_coding/testfile32kHz.pcm',
- '../../resources/deflicker_before_cif_short.yuv',
- '../../resources/far16_stereo.pcm',
- '../../resources/far32_stereo.pcm',
- '../../resources/far8_stereo.pcm',
- '../../resources/foremanColorEnhanced_cif_short.yuv',
- '../../resources/foreman_cif.yuv',
- '../../resources/foreman_cif_short.yuv',
- '../../resources/near16_stereo.pcm',
- '../../resources/near32_stereo.pcm',
- '../../resources/near8_stereo.pcm',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin',
- '../../resources/video_coding/frame-ethernet-ii.pcap',
- '../../resources/video_coding/frame-loopback.pcap',
- '../../resources/video_coding/pltype103.rtp',
- '../../resources/video_coding/ssrcs-2.pcap',
- '../../resources/video_coding/ssrcs-3.pcap',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/data/audio_processing/output_data_float.pb',
+ '<(DEPTH)/data/voice_engine/audio_tiny48.wav',
+ '<(DEPTH)/resources/att-downlink.rx',
+ '<(DEPTH)/resources/att-uplink.rx',
+ '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq_network_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq_rtcp_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
+ '<(DEPTH)/resources/audio_coding/neteq_universal_ref.pcm',
+ '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
+ '<(DEPTH)/resources/deflicker_before_cif_short.yuv',
+ '<(DEPTH)/resources/far16_stereo.pcm',
+ '<(DEPTH)/resources/far32_stereo.pcm',
+ '<(DEPTH)/resources/far44_stereo.pcm',
+ '<(DEPTH)/resources/far48_stereo.pcm',
+ '<(DEPTH)/resources/far8_stereo.pcm',
+ '<(DEPTH)/resources/foremanColorEnhanced_cif_short.yuv',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/resources/foreman_cif_short.yuv',
+ '<(DEPTH)/resources/near16_stereo.pcm',
+ '<(DEPTH)/resources/near32_stereo.pcm',
+ '<(DEPTH)/resources/near44_stereo.pcm',
+ '<(DEPTH)/resources/near48_stereo.pcm',
+ '<(DEPTH)/resources/near8_stereo.pcm',
+ '<(DEPTH)/resources/ref03.aecdump',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin',
+ '<(DEPTH)/resources/short_mixed_mono_48.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.pcm',
+ '<(DEPTH)/resources/short_mixed_stereo_48.dat',
+ '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
+ '<(DEPTH)/resources/sprint-downlink.rx',
+ '<(DEPTH)/resources/sprint-uplink.rx',
+ '<(DEPTH)/resources/synthetic-trace.rx',
+ '<(DEPTH)/resources/tmobile-downlink.rx',
+ '<(DEPTH)/resources/tmobile-uplink.rx',
+ '<(DEPTH)/resources/utility/encapsulated_pcm16b_8khz.wav',
+ '<(DEPTH)/resources/utility/encapsulated_pcmu_8khz.wav',
+ '<(DEPTH)/resources/verizon3g-downlink.rx',
+ '<(DEPTH)/resources/verizon3g-uplink.rx',
+ '<(DEPTH)/resources/verizon4g-downlink.rx',
+ '<(DEPTH)/resources/verizon4g-uplink.rx',
+ '<(DEPTH)/resources/video_coding/frame-ethernet-ii.pcap',
+ '<(DEPTH)/resources/video_coding/frame-loopback.pcap',
+ '<(DEPTH)/resources/video_coding/pltype103.rtp',
+ '<(DEPTH)/resources/video_coding/ssrcs-2.pcap',
+ '<(DEPTH)/resources/video_coding/ssrcs-3.pcap',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/pacing/OWNERS b/chromium/third_party/webrtc/modules/pacing/OWNERS
index 933a045009b..1426abc6215 100644
--- a/chromium/third_party/webrtc/modules/pacing/OWNERS
+++ b/chromium/third_party/webrtc/modules/pacing/OWNERS
@@ -2,3 +2,8 @@ pwestin@webrtc.org
stefan@webrtc.org
mflodman@webrtc.org
asapersson@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h b/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
index 045469009f0..95f1a86e8f6 100644
--- a/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
@@ -49,14 +49,14 @@ class PacedSender : public Module {
bool retransmission) = 0;
// Called when it's a good time to send a padding data.
virtual int TimeToSendPadding(int bytes) = 0;
+
protected:
virtual ~Callback() {}
};
static const int kDefaultMaxQueueLengthMs = 2000;
- PacedSender(Callback* callback, int target_bitrate_kbps,
- float pace_multiplier);
+ PacedSender(Callback* callback, int max_bitrate_kbps, int min_bitrate_kbps);
virtual ~PacedSender();
@@ -71,13 +71,9 @@ class PacedSender : public Module {
// Resume sending packets.
void Resume();
- // Set the pacing target bitrate and the bitrate up to which we are allowed to
- // pad. We will send padding packets to increase the total bitrate until we
- // reach |pad_up_to_bitrate_kbps|. If the media bitrate is above
- // |pad_up_to_bitrate_kbps| no padding will be sent.
- void UpdateBitrate(int target_bitrate_kbps,
- int max_padding_bitrate_kbps,
- int pad_up_to_bitrate_kbps);
+ // Set target bitrates for the pacer. Padding packets will be utilized to
+ // reach |min_bitrate| unless enough media packets are available.
+ void UpdateBitrate(int max_bitrate_kbps, int min_bitrate_kbps);
// Returns true if we send the packet now, else it will add the packet
// information to the queue and call TimeToSendPacket when it's time to send.
@@ -119,7 +115,6 @@ class PacedSender : public Module {
void UpdateMediaBytesSent(int num_bytes);
Callback* callback_;
- const float pace_multiplier_;
bool enabled_;
bool paused_;
int max_queue_length_ms_;
@@ -128,12 +123,9 @@ class PacedSender : public Module {
// we can pace out during the current interval.
scoped_ptr<paced_sender::IntervalBudget> media_budget_;
// This is the padding budget, keeping track of how many bits of padding we're
- // allowed to send out during the current interval.
+ // allowed to send out during the current interval. This budget will be
+ // utilized when there's no media to send.
scoped_ptr<paced_sender::IntervalBudget> padding_budget_;
- // Media and padding share this budget, therefore no padding will be sent if
- // media uses all of this budget. This is used to avoid padding above a given
- // bitrate.
- scoped_ptr<paced_sender::IntervalBudget> pad_up_to_bitrate_budget_;
TickTime time_last_update_;
TickTime time_last_send_;
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
index c46bd04ed73..e9f9bddced1 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
@@ -120,19 +120,16 @@ class IntervalBudget {
};
} // namespace paced_sender
-PacedSender::PacedSender(Callback* callback, int target_bitrate_kbps,
- float pace_multiplier)
+PacedSender::PacedSender(Callback* callback,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps)
: callback_(callback),
- pace_multiplier_(pace_multiplier),
- enabled_(false),
+ enabled_(true),
paused_(false),
max_queue_length_ms_(kDefaultMaxQueueLengthMs),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- media_budget_(new paced_sender::IntervalBudget(
- pace_multiplier_ * target_bitrate_kbps)),
- padding_budget_(new paced_sender::IntervalBudget(0)),
- // No padding until UpdateBitrate is called.
- pad_up_to_bitrate_budget_(new paced_sender::IntervalBudget(0)),
+ media_budget_(new paced_sender::IntervalBudget(max_bitrate_kbps)),
+ padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
time_last_update_(TickTime::Now()),
capture_time_ms_last_queued_(0),
capture_time_ms_last_sent_(0),
@@ -165,13 +162,11 @@ bool PacedSender::Enabled() const {
return enabled_;
}
-void PacedSender::UpdateBitrate(int target_bitrate_kbps,
- int max_padding_bitrate_kbps,
- int pad_up_to_bitrate_kbps) {
+void PacedSender::UpdateBitrate(int max_bitrate_kbps,
+ int min_bitrate_kbps) {
CriticalSectionScoped cs(critsect_.get());
- media_budget_->set_target_rate_kbps(pace_multiplier_ * target_bitrate_kbps);
- padding_budget_->set_target_rate_kbps(max_padding_bitrate_kbps);
- pad_up_to_bitrate_budget_->set_target_rate_kbps(pad_up_to_bitrate_kbps);
+ media_budget_->set_target_rate_kbps(max_bitrate_kbps);
+ padding_budget_->set_target_rate_kbps(min_bitrate_kbps);
}
bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
@@ -273,24 +268,21 @@ int32_t PacedSender::Process() {
if (high_priority_packets_->empty() &&
normal_priority_packets_->empty() &&
low_priority_packets_->empty() &&
- padding_budget_->bytes_remaining() > 0 &&
- pad_up_to_bitrate_budget_->bytes_remaining() > 0) {
- int padding_needed = std::min(
- padding_budget_->bytes_remaining(),
- pad_up_to_bitrate_budget_->bytes_remaining());
+ padding_budget_->bytes_remaining() > 0) {
+ int padding_needed = padding_budget_->bytes_remaining();
critsect_->Leave();
int bytes_sent = callback_->TimeToSendPadding(padding_needed);
critsect_->Enter();
media_budget_->UseBudget(bytes_sent);
padding_budget_->UseBudget(bytes_sent);
- pad_up_to_bitrate_budget_->UseBudget(bytes_sent);
}
}
return 0;
}
// MUST have critsect_ when calling.
-bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list) {
+bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_.get()) {
paced_sender::Packet packet = GetNextPacketFromList(packet_list);
critsect_->Leave();
@@ -323,7 +315,6 @@ bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list) {
void PacedSender::UpdateBytesPerInterval(uint32_t delta_time_ms) {
media_budget_->IncreaseBudget(delta_time_ms);
padding_budget_->IncreaseBudget(delta_time_ms);
- pad_up_to_bitrate_budget_->IncreaseBudget(delta_time_ms);
}
// MUST have critsect_ when calling.
@@ -387,7 +378,7 @@ paced_sender::Packet PacedSender::GetNextPacketFromList(
void PacedSender::UpdateMediaBytesSent(int num_bytes) {
time_last_send_ = TickTime::Now();
media_budget_->UseBudget(num_bytes);
- pad_up_to_bitrate_budget_->UseBudget(num_bytes);
+ padding_budget_->UseBudget(num_bytes);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
index f8dcdfc6985..9763c80070d 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -59,9 +59,8 @@ class PacedSenderTest : public ::testing::Test {
srand(0);
TickTime::UseFakeClock(123456);
// Need to initialize PacedSender after we initialize clock.
- send_bucket_.reset(new PacedSender(&callback_, kTargetBitrate,
- kPaceMultiplier));
- send_bucket_->SetStatus(true);
+ send_bucket_.reset(
+ new PacedSender(&callback_, kPaceMultiplier * kTargetBitrate, 0));
}
void SendAndExpectPacket(PacedSender::Priority priority,
@@ -209,7 +208,7 @@ TEST_F(PacedSenderTest, Padding) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
// Due to the multiplicative factor we can send 3 packets not 2 packets.
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
TickTime::MillisecondTimestamp(), 250, false);
@@ -235,7 +234,7 @@ TEST_F(PacedSenderTest, Padding) {
TEST_F(PacedSenderTest, NoPaddingWhenDisabled) {
send_bucket_->SetStatus(false);
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
// No padding is expected since the pacer is disabled.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
@@ -255,7 +254,7 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
int64_t capture_time_ms = 56789;
const int kTimeStep = 5;
const int64_t kBitrateWindow = 100;
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
int64_t start_time = TickTime::MillisecondTimestamp();
while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
@@ -267,27 +266,6 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
}
}
-TEST_F(PacedSenderTest, VerifyMaxPaddingBitrate) {
- uint32_t ssrc = 12345;
- uint16_t sequence_number = 1234;
- int64_t capture_time_ms = 56789;
- const int kTimeStep = 5;
- const int64_t kBitrateWindow = 100;
- const int kTargetBitrate = 1500;
- const int kMaxPaddingBitrate = 800;
- send_bucket_->UpdateBitrate(kTargetBitrate, kMaxPaddingBitrate,
- kTargetBitrate);
- int64_t start_time = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
- SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- capture_time_ms, 250, false);
- TickTime::AdvanceFakeClock(kTimeStep);
- EXPECT_CALL(callback_, TimeToSendPadding(500)).Times(1).
- WillOnce(Return(250));
- send_bucket_->Process();
- }
-}
-
TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
@@ -295,10 +273,9 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
const int kTimeStep = 5;
const int64_t kBitrateWindow = 10000;
PacedSenderPadding callback;
- send_bucket_.reset(new PacedSender(&callback, kTargetBitrate,
- kPaceMultiplier));
- send_bucket_->SetStatus(true);
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_.reset(
+ new PacedSender(&callback, kPaceMultiplier * kTargetBitrate, 0));
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
int64_t start_time = TickTime::MillisecondTimestamp();
int media_bytes = 0;
while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
@@ -421,7 +398,8 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
+ EXPECT_CALL(
+ callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
.Times(1)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
@@ -496,7 +474,7 @@ TEST_F(PacedSenderTest, MaxQueueLength) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, 0, 0);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * 30, 0);
for (int i = 0; i < 30; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
@@ -525,7 +503,7 @@ TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, 0, 0);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * 30, 0);
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
index b705ede2a00..eab2b8e0041 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
@@ -2,4 +2,8 @@ pwestin@webrtc.org
stefan@webrtc.org
henrik.lundin@webrtc.org
mflodman@webrtc.org
-asapersson@webrtc.org \ No newline at end of file
+asapersson@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
new file mode 100644
index 00000000000..6b208e4999d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using std::string;
+
+namespace webrtc {
+namespace testing {
+namespace bwe {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+BweTestConfig::EstimatorConfig CreateEstimatorConfig(
+ int flow_id, bool plot_delay, bool plot_estimate) {
+ static const AbsoluteSendTimeRemoteBitrateEstimatorFactory factory =
+ AbsoluteSendTimeRemoteBitrateEstimatorFactory();
+
+ return BweTestConfig::EstimatorConfig("AST", flow_id, &factory, kAimdControl,
+ plot_delay, plot_estimate);
+}
+
+BweTestConfig MakeAdaptiveBweTestConfig() {
+ BweTestConfig result;
+ result.estimator_configs.push_back(CreateEstimatorConfig(0, true, true));
+ return result;
+}
+
+BweTestConfig MakeMultiFlowBweTestConfig(int flow_count) {
+ BweTestConfig result;
+ for (int i = 0; i < flow_count; ++i) {
+ result.estimator_configs.push_back(CreateEstimatorConfig(i, false, true));
+ }
+ return result;
+}
+
+// This test fixture is used to instantiate tests running with adaptive video
+// senders.
+class BweSimulation : public BweTest,
+ public ::testing::TestWithParam<BweTestConfig> {
+ public:
+ BweSimulation() : BweTest() {}
+ virtual ~BweSimulation() {}
+
+ virtual void SetUp() {
+ const BweTestConfig& config = GetParam();
+ SetupTestFromConfig(config);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(BweSimulation);
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweSimulation,
+ ::testing::Values(MakeAdaptiveBweTestConfig()));
+
+TEST_P(BweSimulation, SprintUplinkTest) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("sprint-uplink", "rx")));
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, Verizon4gDownlinkTest) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("verizon4g-downlink", "rx")));
+ RunFor(22 * 60 * 1000);
+}
+
+TEST_P(BweSimulation, Choke1000kbps500kbps1000kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(1000);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(1000);
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, Choke200kbps30kbps200kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(200);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(30);
+ RunFor(60 * 1000);
+ filter.SetCapacity(200);
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, GoogleWifiTrace3Mbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ filter.SetMaxDelay(500);
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("google-wifi-3mbps", "rx")));
+ RunFor(300 * 1000);
+}
+
+class MultiFlowBweSimulation : public BweSimulation {
+ public:
+ MultiFlowBweSimulation() : BweSimulation() {}
+ virtual ~MultiFlowBweSimulation() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MultiFlowBweSimulation);
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, MultiFlowBweSimulation,
+ ::testing::Values(MakeMultiFlowBweTestConfig(3)));
+
+TEST_P(MultiFlowBweSimulation, SelfFairnessTest) {
+ VerboseLogging(true);
+ const int kAllFlowIds[] = {0, 1, 2};
+ const size_t kNumFlows = sizeof(kAllFlowIds) / sizeof(kAllFlowIds[0]);
+ scoped_ptr<AdaptiveVideoSender> senders[kNumFlows];
+ for (size_t i = 0; i < kNumFlows; ++i) {
+ senders[i].reset(new AdaptiveVideoSender(kAllFlowIds[i], this, 30, 300, 0,
+ 0));
+ }
+ // Second and third flow.
+ ChokeFilter choke(this, CreateFlowIds(&kAllFlowIds[1], 2));
+ choke.SetCapacity(1500);
+ // First flow.
+ ChokeFilter choke2(this, CreateFlowIds(&kAllFlowIds[0], 1));
+ choke2.SetCapacity(1000);
+
+ scoped_ptr<RateCounterFilter> rate_counters[kNumFlows];
+ for (size_t i = 0; i < kNumFlows; ++i) {
+ rate_counters[i].reset(new RateCounterFilter(
+ this, CreateFlowIds(&kAllFlowIds[i], 1), "receiver_input"));
+ }
+ RunFor(30 * 60 * 1000);
+}
+#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+} // namespace bwe
+} // namespace testing
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index 170ec765c1e..e61e903558f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -25,6 +25,11 @@ namespace webrtc {
class Clock;
+enum RateControlType {
+ kMimdControl,
+ kAimdControl
+};
+
// RemoteBitrateObserver is used to signal changes in bitrate estimates for
// the incoming streams.
class RemoteBitrateObserver {
@@ -37,6 +42,27 @@ class RemoteBitrateObserver {
virtual ~RemoteBitrateObserver() {}
};
+struct ReceiveBandwidthEstimatorStats {
+ ReceiveBandwidthEstimatorStats() : total_propagation_time_delta_ms(0) {}
+
+ // The "propagation_time_delta" of a frame is defined as (d_arrival - d_sent),
+ // where d_arrival is the delta of the arrival times of the frame and the
+ // previous frame, d_sent is the delta of the sent times of the frame and
+ // the previous frame. The sent time is calculated from the RTP timestamp.
+
+ // |total_propagation_time_delta_ms| is the sum of the propagation_time_deltas
+ // of all received frames, except that it's is adjusted to 0 when it becomes
+ // negative.
+ int total_propagation_time_delta_ms;
+ // The propagation_time_deltas for the frames arrived in the last
+ // kProcessIntervalMs using the clock passed to
+ // RemoteBitrateEstimatorFactory::Create.
+ std::vector<int> recent_propagation_time_delta_ms;
+ // The arrival times for the frames arrived in the last kProcessIntervalMs
+ // using the clock passed to RemoteBitrateEstimatorFactory::Create.
+ std::vector<int64_t> recent_arrival_time_ms;
+};
+
class RemoteBitrateEstimator : public CallStatsObserver, public Module {
public:
virtual ~RemoteBitrateEstimator() {}
@@ -45,6 +71,7 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
// estimate and the over-use detector. If an over-use is detected the
// remote bitrate estimate will be updated. Note that |payload_size| is the
// packet size excluding headers.
+ // Note that |arrival_time_ms| can be of an arbitrary time base.
virtual void IncomingPacket(int64_t arrival_time_ms,
int payload_size,
const RTPHeader& header) = 0;
@@ -58,6 +85,9 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
virtual bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const = 0;
+ // Returns true if the statistics are available.
+ virtual bool GetStats(ReceiveBandwidthEstimatorStats* output) const = 0;
+
protected:
static const int kProcessIntervalMs = 1000;
static const int kStreamTimeOutMs = 2000;
@@ -70,6 +100,7 @@ struct RemoteBitrateEstimatorFactory {
virtual RemoteBitrateEstimator* Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const;
};
@@ -81,6 +112,7 @@ struct AbsoluteSendTimeRemoteBitrateEstimatorFactory
virtual RemoteBitrateEstimator* Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h
deleted file mode 100644
index 7928abfacbc..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
-#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
-
-#include <list>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace synchronization {
-
-struct RtcpMeasurement {
- RtcpMeasurement();
- RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac, uint32_t timestamp);
- uint32_t ntp_secs;
- uint32_t ntp_frac;
- uint32_t rtp_timestamp;
-};
-
-typedef std::list<RtcpMeasurement> RtcpList;
-
-// Converts an RTP timestamp to the NTP domain in milliseconds using two
-// (RTP timestamp, NTP timestamp) pairs.
-bool RtpToNtpMs(int64_t rtp_timestamp, const RtcpList& rtcp,
- int64_t* timestamp_in_ms);
-
-// Returns 1 there has been a forward wrap around, 0 if there has been no wrap
-// around and -1 if there has been a backwards wrap around (i.e. reordering).
-int CheckForWrapArounds(uint32_t rtp_timestamp, uint32_t rtcp_rtp_timestamp);
-} // namespace synchronization
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
index 86f6cb8ee3c..9baaa9c9134 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
@@ -10,19 +10,12 @@
#include <math.h>
#include <stdlib.h> // fabsf
-#if _WIN32
-#include <windows.h>
-#endif
#include "webrtc/modules/remote_bitrate_estimator/overuse_detector.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_rate_control.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_BWE_MATLAB
-extern MatlabEngine eng; // global variable defined elsewhere
-#endif
-
enum { kOverUsingTimeThreshold = 100 };
enum { kMinFramePeriodHistoryLength = 60 };
@@ -43,74 +36,20 @@ OveruseDetector::OveruseDetector(const OverUseDetectorOptions& options)
prev_offset_(0.0),
time_over_using_(-1),
over_use_counter_(0),
- hypothesis_(kBwNormal),
- time_of_last_received_packet_(-1)
-#ifdef WEBRTC_BWE_MATLAB
- , plots_()
-#endif
- {
+ hypothesis_(kBwNormal) {
memcpy(E_, options_.initial_e, sizeof(E_));
memcpy(process_noise_, options_.initial_process_noise,
sizeof(process_noise_));
}
OveruseDetector::~OveruseDetector() {
-#ifdef WEBRTC_BWE_MATLAB
- if (plots_.plot1_) {
- eng.DeletePlot(plots_.plot1_);
- plots_.plot1_ = NULL;
- }
- if (plots_.plot2_) {
- eng.DeletePlot(plots_.plot2_);
- plots_.plot2_ = NULL;
- }
- if (plots_.plot3_) {
- eng.DeletePlot(plots_.plot3_);
- plots_.plot3_ = NULL;
- }
- if (plots_.plot4_) {
- eng.DeletePlot(plots_.plot4_);
- plots_.plot4_ = NULL;
- }
-#endif
-
ts_delta_hist_.clear();
}
void OveruseDetector::Update(uint16_t packet_size,
int64_t timestamp_ms,
uint32_t timestamp,
- const int64_t now_ms) {
- time_of_last_received_packet_ = now_ms;
-#ifdef WEBRTC_BWE_MATLAB
- // Create plots
- const int64_t startTimeMs = nowMS;
- if (plots_.plot1_ == NULL) {
- plots_.plot1_ = eng.NewPlot(new MatlabPlot());
- plots_.plot1_->AddLine(1000, "b.", "scatter");
- }
- if (plots_.plot2_ == NULL) {
- plots_.plot2_ = eng.NewPlot(new MatlabPlot());
- plots_.plot2_->AddTimeLine(30, "b", "offset", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "r--", "limitPos", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "k.", "trigger", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "ko", "detection", startTimeMs);
- // plots_.plot2_->AddTimeLine(30, "g", "slowMean", startTimeMs);
- }
- if (plots_.plot3_ == NULL) {
- plots_.plot3_ = eng.NewPlot(new MatlabPlot());
- plots_.plot3_->AddTimeLine(30, "b", "noiseVar", startTimeMs);
- }
- if (plots_.plot4_ == NULL) {
- plots_.plot4_ = eng.NewPlot(new MatlabPlot());
- // plots_.plot4_->AddTimeLine(60, "b", "p11", startTimeMs);
- // plots_.plot4_->AddTimeLine(60, "r", "p12", startTimeMs);
- plots_.plot4_->AddTimeLine(60, "g", "p22", startTimeMs);
- // plots_.plot4_->AddTimeLine(60, "g--", "p22_hat", startTimeMs);
- // plots_.plot4_->AddTimeLine(30, "b.-", "deltaFs", startTimeMs);
- }
-
-#endif
+ const int64_t arrival_time_ms) {
bool new_timestamp = (timestamp != current_frame_.timestamp);
if (timestamp_ms >= 0) {
if (prev_frame_.timestamp_ms == -1 && current_frame_.timestamp_ms == -1) {
@@ -127,8 +66,6 @@ void OveruseDetector::Update(uint16_t packet_size,
return;
} else if (new_timestamp) {
// First packet of a later frame, the previous frame sample is ready.
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "Frame complete at %I64i",
- current_frame_.complete_time_ms);
if (prev_frame_.complete_time_ms >= 0) { // This is our second frame.
int64_t t_delta = 0;
double ts_delta = 0;
@@ -143,7 +80,7 @@ void OveruseDetector::Update(uint16_t packet_size,
}
// Accumulate the frame size
current_frame_.size += packet_size;
- current_frame_.complete_time_ms = now_ms;
+ current_frame_.complete_time_ms = arrival_time_ms;
}
BandwidthUsage OveruseDetector::State() const {
@@ -168,10 +105,6 @@ void OveruseDetector::SetRateControlRegion(RateControlRegion region) {
}
}
-int64_t OveruseDetector::time_of_last_received_packet() const {
- return time_of_last_received_packet_;
-}
-
void OveruseDetector::SwitchTimeBase() {
current_frame_.size = 0;
current_frame_.complete_time_ms = -1;
@@ -249,10 +182,10 @@ void OveruseDetector::UpdateKalman(int64_t t_delta,
const double residual = t_ts_delta - slope_*h[0] - offset_;
const bool stable_state =
- (BWE_MIN(num_of_deltas_, 60) * fabsf(offset_) < threshold_);
+ (BWE_MIN(num_of_deltas_, 60) * fabs(offset_) < threshold_);
// We try to filter out very late frames. For instance periodic key
// frames doesn't fit the Gaussian model well.
- if (fabsf(residual) < 3 * sqrt(var_noise_)) {
+ if (fabs(residual) < 3 * sqrt(var_noise_)) {
UpdateNoiseEstimate(residual, min_frame_period, stable_state);
} else {
UpdateNoiseEstimate(3 * sqrt(var_noise_), min_frame_period, stable_state);
@@ -279,39 +212,11 @@ void OveruseDetector::UpdateKalman(int64_t t_delta,
E_[0][0] * E_[1][1] - E_[0][1] * E_[1][0] >= 0 &&
E_[0][0] >= 0);
-#ifdef WEBRTC_BWE_MATLAB
- // plots_.plot4_->Append("p11",E_[0][0]);
- // plots_.plot4_->Append("p12",E_[0][1]);
- plots_.plot4_->Append("p22", E_[1][1]);
- // plots_.plot4_->Append("p22_hat", 0.5*(process_noise_[1] +
- // sqrt(process_noise_[1]*(process_noise_[1] + 4*var_noise_))));
- // plots_.plot4_->Append("deltaFs", fsDelta);
- plots_.plot4_->Plot();
-#endif
slope_ = slope_ + K[0] * residual;
prev_offset_ = offset_;
offset_ = offset_ + K[1] * residual;
Detect(ts_delta);
-
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot1_->Append("scatter",
- static_cast<double>(current_frame_.size) - prev_frame_.size,
- static_cast<double>(t_delta - ts_delta));
- plots_.plot1_->MakeTrend("scatter", "slope", slope_, offset_, "k-");
- plots_.plot1_->MakeTrend("scatter", "thresholdPos",
- slope_, offset_ + 2 * sqrt(var_noise_), "r-");
- plots_.plot1_->MakeTrend("scatter", "thresholdNeg",
- slope_, offset_ - 2 * sqrt(var_noise_), "r-");
- plots_.plot1_->Plot();
-
- plots_.plot2_->Append("offset", offset_);
- plots_.plot2_->Append("limitPos", threshold_/BWE_MIN(num_of_deltas_, 60));
- plots_.plot2_->Plot();
-
- plots_.plot3_->Append("noiseVar", var_noise_);
- plots_.plot3_->Plot();
-#endif
}
double OveruseDetector::UpdateMinFramePeriod(double ts_delta) {
@@ -358,7 +263,7 @@ BandwidthUsage OveruseDetector::Detect(double ts_delta) {
return kBwNormal;
}
const double T = BWE_MIN(num_of_deltas_, 60) * offset_;
- if (fabsf(T) > threshold_) {
+ if (fabs(T) > threshold_) {
if (offset_ > 0) {
if (time_over_using_ == -1) {
// Initialize the timer. Assume that we've been
@@ -373,38 +278,17 @@ BandwidthUsage OveruseDetector::Detect(double ts_delta) {
if (time_over_using_ > kOverUsingTimeThreshold
&& over_use_counter_ > 1) {
if (offset_ >= prev_offset_) {
-#ifdef _DEBUG
- if (hypothesis_ != kBwOverusing) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwOverusing");
- }
-#endif
time_over_using_ = 0;
over_use_counter_ = 0;
hypothesis_ = kBwOverusing;
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot2_->Append("detection", offset_); // plot it later
-#endif
}
}
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot2_->Append("trigger", offset_); // plot it later
-#endif
} else {
-#ifdef _DEBUG
- if (hypothesis_ != kBwUnderusing) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwUnderUsing");
- }
-#endif
time_over_using_ = -1;
over_use_counter_ = 0;
hypothesis_ = kBwUnderusing;
}
} else {
-#ifdef _DEBUG
- if (hypothesis_ != kBwNormal) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwNormal");
- }
-#endif
time_over_using_ = -1;
over_use_counter_ = 0;
hypothesis_ = kBwNormal;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
index 57f4ddf04f7..9c565e45f18 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
@@ -16,10 +16,6 @@
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/typedefs.h"
-#ifdef WEBRTC_BWE_MATLAB
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-#endif
-
namespace webrtc {
enum RateControlRegion;
@@ -32,11 +28,10 @@ class OveruseDetector {
void Update(uint16_t packet_size,
int64_t timestamp_ms,
uint32_t rtp_timestamp,
- int64_t now_ms);
+ int64_t arrival_time_ms);
BandwidthUsage State() const;
double NoiseVar() const;
void SetRateControlRegion(RateControlRegion region);
- int64_t time_of_last_received_packet() const;
private:
struct FrameSample {
@@ -52,16 +47,6 @@ class OveruseDetector {
int64_t timestamp_ms;
};
- struct DebugPlots {
-#ifdef WEBRTC_BWE_MATLAB
- DebugPlots() : plot1(NULL), plot2(NULL), plot3(NULL), plot4(NULL) {}
- MatlabPlot* plot1;
- MatlabPlot* plot2;
- MatlabPlot* plot3;
- MatlabPlot* plot4;
-#endif
- };
-
// Returns true if |timestamp| represent a time which is later than
// |prev_timestamp|.
static bool InOrderTimestamp(uint32_t timestamp, uint32_t prev_timestamp);
@@ -103,10 +88,6 @@ class OveruseDetector {
double time_over_using_;
uint16_t over_use_counter_;
BandwidthUsage hypothesis_;
- int64_t time_of_last_received_packet_;
-#ifdef WEBRTC_BWE_MATLAB
- DebugPlots plots_;
-#endif
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
index 4a9b4488108..48485ffb551 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
+#include <assert.h>
+
namespace webrtc {
RateStatistics::RateStatistics(uint32_t window_size_ms, float scale)
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
index 429669059a2..f97371bd621 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
@@ -34,7 +34,7 @@ class RateStatistics {
// Counters are kept in buckets (circular buffer), with one bucket
// per millisecond.
const int num_buckets_;
- scoped_array<uint32_t> buckets_;
+ scoped_ptr<uint32_t[]> buckets_;
// Total count recorded in buckets.
uint32_t accumulated_count_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
index bbd353fcdd4..c2f1b3da475 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
@@ -21,10 +21,66 @@
'sources': [
'include/bwe_defines.h',
'include/remote_bitrate_estimator.h',
- 'include/rtp_to_ntp.h',
'rate_statistics.cc',
'rate_statistics.h',
- 'rtp_to_ntp.cc',
+ ], # source
+ },
+ {
+ 'target_name': 'bwe_tools_util',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ 'rtp_rtcp',
+ ],
+ 'sources': [
+ 'tools/bwe_rtp.cc',
+ 'tools/bwe_rtp.h',
+ ],
+ },
+ {
+ 'target_name': 'bwe_rtp_to_text',
+ 'type': 'executable',
+ 'includes': [
+ '../rtp_rtcp/source/rtp_rtcp.gypi',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ 'bwe_tools_util',
+ 'rtp_rtcp',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include',
+ ],
+ },
+ 'sources': [
+ 'tools/rtp_to_text.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h',
+ ], # source
+ },
+ {
+ 'target_name': 'bwe_rtp_play',
+ 'type': 'executable',
+ 'includes': [
+ '../rtp_rtcp/source/rtp_rtcp.gypi',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ 'bwe_tools_util',
+ 'rtp_rtcp',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include',
+ ],
+ },
+ 'sources': [
+ 'tools/bwe_rtp_play.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h',
], # source
},
], # targets
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index a544ee5d034..08422d28b23 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -16,8 +16,8 @@
#include "webrtc/modules/remote_bitrate_estimator/remote_rate_control.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -55,11 +55,31 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
virtual bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const OVERRIDE;
+ virtual bool GetStats(
+ ReceiveBandwidthEstimatorStats* output) const OVERRIDE;
+
private:
- typedef std::map<unsigned int, OveruseDetector> SsrcOveruseDetectorMap;
+ // Map from SSRC to over-use detector and last incoming packet time in
+ // milliseconds, taken from clock_.
+ typedef std::map<unsigned int, std::pair<OveruseDetector, int64_t> >
+ SsrcOveruseDetectorMap;
+
+ static OveruseDetector* GetDetector(
+ const SsrcOveruseDetectorMap::iterator it) {
+ return &it->second.first;
+ }
+
+ static int64_t GetPacketTimeMs(const SsrcOveruseDetectorMap::iterator it) {
+ return it->second.second;
+ }
+
+ static void SetPacketTimeMs(SsrcOveruseDetectorMap::iterator it,
+ int64_t time_ms) {
+ it->second.second = time_ms;
+ }
// Triggers a new estimate calculation.
- void UpdateEstimate(int64_t time_now);
+ void UpdateEstimate(int64_t now_ms);
void GetSsrcs(std::vector<unsigned int>* ssrcs) const;
@@ -92,6 +112,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
uint32_t ssrc = header.ssrc;
uint32_t rtp_timestamp = header.timestamp +
header.extension.transmissionTimeOffset;
+ int64_t now_ms = clock_->TimeInMilliseconds();
CriticalSectionScoped cs(crit_sect_.get());
SsrcOveruseDetectorMap::iterator it = overuse_detectors_.find(ssrc);
if (it == overuse_detectors_.end()) {
@@ -102,22 +123,23 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
// automatically cleaned up when we have one RemoteBitrateEstimator per REMB
// group.
std::pair<SsrcOveruseDetectorMap::iterator, bool> insert_result =
- overuse_detectors_.insert(std::make_pair(ssrc, OveruseDetector(
- OverUseDetectorOptions())));
+ overuse_detectors_.insert(std::make_pair(ssrc,
+ std::make_pair(OveruseDetector(OverUseDetectorOptions()), now_ms)));
it = insert_result.first;
}
- OveruseDetector* overuse_detector = &it->second;
- incoming_bitrate_.Update(payload_size, arrival_time_ms);
+ SetPacketTimeMs(it, now_ms);
+ OveruseDetector* overuse_detector = GetDetector(it);
+ incoming_bitrate_.Update(payload_size, now_ms);
const BandwidthUsage prior_state = overuse_detector->State();
overuse_detector->Update(payload_size, -1, rtp_timestamp, arrival_time_ms);
if (overuse_detector->State() == kBwOverusing) {
- unsigned int incoming_bitrate = incoming_bitrate_.Rate(arrival_time_ms);
+ unsigned int incoming_bitrate = incoming_bitrate_.Rate(now_ms);
if (prior_state != kBwOverusing ||
- remote_rate_.TimeToReduceFurther(arrival_time_ms, incoming_bitrate)) {
+ remote_rate_.TimeToReduceFurther(now_ms, incoming_bitrate)) {
// The first overuse should immediately trigger a new estimate.
// We also have to update the estimate immediately if we are overusing
// and the target bitrate is too high compared to what we are receiving.
- UpdateEstimate(arrival_time_ms);
+ UpdateEstimate(now_ms);
}
}
}
@@ -126,8 +148,9 @@ int32_t RemoteBitrateEstimatorSingleStream::Process() {
if (TimeUntilNextProcess() > 0) {
return 0;
}
- UpdateEstimate(clock_->TimeInMilliseconds());
- last_process_time_ = clock_->TimeInMilliseconds();
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ UpdateEstimate(now_ms);
+ last_process_time_ = now_ms;
return 0;
}
@@ -138,25 +161,24 @@ int32_t RemoteBitrateEstimatorSingleStream::TimeUntilNextProcess() {
return last_process_time_ + kProcessIntervalMs - clock_->TimeInMilliseconds();
}
-void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t time_now) {
+void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_.get());
BandwidthUsage bw_state = kBwNormal;
double sum_noise_var = 0.0;
SsrcOveruseDetectorMap::iterator it = overuse_detectors_.begin();
while (it != overuse_detectors_.end()) {
- const int64_t time_of_last_received_packet =
- it->second.time_of_last_received_packet();
- if (time_of_last_received_packet >= 0 &&
- time_now - time_of_last_received_packet > kStreamTimeOutMs) {
+ if (GetPacketTimeMs(it) >= 0 &&
+ now_ms - GetPacketTimeMs(it) > kStreamTimeOutMs) {
// This over-use detector hasn't received packets for |kStreamTimeOutMs|
// milliseconds and is considered stale.
overuse_detectors_.erase(it++);
} else {
- sum_noise_var += it->second.NoiseVar();
+ OveruseDetector* overuse_detector = GetDetector(it);
+ sum_noise_var += overuse_detector->NoiseVar();
// Make sure that we trigger an over-use if any of the over-use detectors
// is detecting over-use.
- if (it->second.State() > bw_state) {
- bw_state = it->second.State();
+ if (overuse_detector->State() > bw_state) {
+ bw_state = overuse_detector->State();
}
++it;
}
@@ -169,17 +191,17 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t time_now) {
double mean_noise_var = sum_noise_var /
static_cast<double>(overuse_detectors_.size());
const RateControlInput input(bw_state,
- incoming_bitrate_.Rate(time_now),
+ incoming_bitrate_.Rate(now_ms),
mean_noise_var);
- const RateControlRegion region = remote_rate_.Update(&input, time_now);
- unsigned int target_bitrate = remote_rate_.UpdateBandwidthEstimate(time_now);
+ const RateControlRegion region = remote_rate_.Update(&input, now_ms);
+ unsigned int target_bitrate = remote_rate_.UpdateBandwidthEstimate(now_ms);
if (remote_rate_.ValidEstimate()) {
std::vector<unsigned int> ssrcs;
GetSsrcs(&ssrcs);
observer_->OnReceiveBitrateChanged(ssrcs, target_bitrate);
}
for (it = overuse_detectors_.begin(); it != overuse_detectors_.end(); ++it) {
- it->second.SetRateControlRegion(region);
+ GetDetector(it)->SetRateControlRegion(region);
}
}
@@ -210,6 +232,12 @@ bool RemoteBitrateEstimatorSingleStream::LatestEstimate(
return true;
}
+bool RemoteBitrateEstimatorSingleStream::GetStats(
+ ReceiveBandwidthEstimatorStats* output) const {
+ // Not implemented.
+ return false;
+}
+
void RemoteBitrateEstimatorSingleStream::GetSsrcs(
std::vector<unsigned int>* ssrcs) const {
assert(ssrcs);
@@ -225,9 +253,9 @@ void RemoteBitrateEstimatorSingleStream::GetSsrcs(
RemoteBitrateEstimator* RemoteBitrateEstimatorFactory::Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1,
- "RemoteBitrateEstimatorFactory: Instantiating.");
+ LOG(LS_INFO) << "RemoteBitrateEstimatorFactory: Instantiating.";
return new RemoteBitrateEstimatorSingleStream(observer, clock,
min_bitrate_bps);
}
@@ -235,9 +263,10 @@ RemoteBitrateEstimator* RemoteBitrateEstimatorFactory::Create(
RemoteBitrateEstimator* AbsoluteSendTimeRemoteBitrateEstimatorFactory::Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1,
- "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
+ LOG(LS_INFO) << "AbsoluteSendTimeRemoteBitrateEstimatorFactory: "
+ "Instantiating.";
return new RemoteBitrateEstimatorSingleStream(observer, clock,
min_bitrate_bps);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
index a3e44d87ef5..f67c7f34fc0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
@@ -10,8 +10,8 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
@@ -24,6 +24,7 @@ class RemoteBitrateEstimatorSingleTest : public RemoteBitrateEstimatorTest {
bitrate_estimator_.reset(RemoteBitrateEstimatorFactory().Create(
bitrate_observer_.get(),
&clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps));
}
protected:
@@ -35,7 +36,7 @@ TEST_F(RemoteBitrateEstimatorSingleTest, InitialBehavior) {
}
TEST_F(RemoteBitrateEstimatorSingleTest, RateIncreaseReordering) {
- RateIncreaseReorderingTestHelper();
+ RateIncreaseReorderingTestHelper(498136);
}
TEST_F(RemoteBitrateEstimatorSingleTest, RateIncreaseRtpTimestamps) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
index 88ffe061b99..1b38a1ea306 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
@@ -226,7 +226,8 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
header.ssrc = ssrc;
header.timestamp = rtp_timestamp;
header.extension.absoluteSendTime = absolute_send_time;
- bitrate_estimator_->IncomingPacket(arrival_time, payload_size, header);
+ bitrate_estimator_->IncomingPacket(arrival_time + kArrivalTimeClockOffsetMs,
+ payload_size, header);
}
// Generates a frame of packets belonging to a stream at a given bitrate and
@@ -245,6 +246,10 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(unsigned int ssrc,
while (!packets.empty()) {
testing::RtpStream::RtpPacket* packet = packets.front();
bitrate_observer_->Reset();
+ // The simulated clock should match the time of packet->arrival_time
+ // since both are used in IncomingPacket().
+ clock_.AdvanceTimeMicroseconds(packet->arrival_time -
+ clock_.TimeInMicroseconds());
IncomingPacket(packet->ssrc,
packet->size,
(packet->arrival_time + 500) / 1000,
@@ -256,8 +261,6 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(unsigned int ssrc,
overuse = true;
EXPECT_LE(bitrate_observer_->latest_bitrate(), bitrate_bps);
}
- clock_.AdvanceTimeMicroseconds(packet->arrival_time -
- clock_.TimeInMicroseconds());
delete packet;
packets.pop_front();
}
@@ -341,9 +344,14 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
EXPECT_TRUE(bitrate_observer_->updated());
bitrate_observer_->Reset();
EXPECT_EQ(bitrate_observer_->latest_bitrate(), bitrate_bps);
+ bitrate_estimator_->RemoveStream(kDefaultSsrc);
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_bps));
+ ASSERT_EQ(0u, ssrcs.size());
+ EXPECT_EQ(0u, bitrate_bps);
}
-void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
+void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
+ uint32_t expected_bitrate_bps) {
const int kFramerate = 50; // 50 fps to avoid rounding errors.
const int kFrameIntervalMs = 1000 / kFramerate;
const uint32_t kFrameIntervalAbsSendTime = AbsSendTime(1, kFramerate);
@@ -364,7 +372,7 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
- EXPECT_EQ(498136u, bitrate_observer_->latest_bitrate());
+ EXPECT_EQ(expected_bitrate_bps, bitrate_observer_->latest_bitrate());
for (int i = 0; i < 10; ++i) {
clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs);
timestamp += 2 * 90 * kFrameIntervalMs;
@@ -379,7 +387,7 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
- EXPECT_EQ(498136u, bitrate_observer_->latest_bitrate());
+ EXPECT_EQ(expected_bitrate_bps, bitrate_observer_->latest_bitrate());
}
// Make sure we initially increase the bitrate as expected.
@@ -486,5 +494,21 @@ void RemoteBitrateEstimatorTest::CapacityDropTestHelper(
EXPECT_EQ(expected_bitrate_drop_delta,
bitrate_drop_time - overuse_start_time);
+
+ // Remove stream one by one.
+ unsigned int latest_bps = 0;
+ std::vector<unsigned int> ssrcs;
+ for (int i = 0; i < number_of_streams; i++) {
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &latest_bps));
+ EXPECT_EQ(number_of_streams - i, static_cast<int>(ssrcs.size()));
+ EXPECT_EQ(bitrate_bps, latest_bps);
+ for (int j = i; j < number_of_streams; j++) {
+ EXPECT_EQ(kDefaultSsrc + j, ssrcs[j - i]);
+ }
+ bitrate_estimator_->RemoveStream(kDefaultSsrc + i);
+ }
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &latest_bps));
+ EXPECT_EQ(0u, ssrcs.size());
+ EXPECT_EQ(0u, latest_bps);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
index 64830dab70f..1d748c57b9f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
@@ -16,9 +16,9 @@
#include <utility>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -190,7 +190,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
unsigned int target_bitrate);
void InitialBehaviorTestHelper(unsigned int expected_converge_bitrate);
- void RateIncreaseReorderingTestHelper();
+ void RateIncreaseReorderingTestHelper(unsigned int expected_bitrate);
void RateIncreaseRtpTimestampsTestHelper();
void CapacityDropTestHelper(int number_of_streams,
bool wrap_time_stamp,
@@ -198,6 +198,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
unsigned int expected_bitrate_drop_delta);
static const unsigned int kDefaultSsrc;
+ static const int kArrivalTimeClockOffsetMs = 60000;
SimulatedClock clock_; // Time at the receiver.
scoped_ptr<testing::TestBitrateObserver> bitrate_observer_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
index ed8e5c555ee..67b60848148 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
@@ -8,90 +8,95 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <sstream>
+
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/perf_test.h"
+
+using std::string;
namespace webrtc {
namespace testing {
namespace bwe {
+enum Estimator { kAbsSendTime, kTransmissionOffset };
-std::vector<const PacketSenderFactory*> VideoSenderFactories(uint32_t count) {
- class VideoPacketSenderFactory : public PacketSenderFactory {
- public:
- VideoPacketSenderFactory(float fps, uint32_t kbps, uint32_t ssrc,
- float frame_offset)
- : fps_(fps),
- kbps_(kbps),
- ssrc_(ssrc),
- frame_offset_(frame_offset) {
- }
- virtual ~VideoPacketSenderFactory() {}
- virtual PacketSender* Create() const {
- return new VideoSender(NULL, fps_, kbps_, ssrc_, frame_offset_);
- }
- private:
- float fps_;
- uint32_t kbps_;
- uint32_t ssrc_;
- float frame_offset_;
- };
-
- static const VideoPacketSenderFactory factories[] = {
- VideoPacketSenderFactory(30.00f, 150, 0x1234, 0.13f),
- VideoPacketSenderFactory(15.00f, 500, 0x2345, 0.16f),
- VideoPacketSenderFactory(30.00f, 1200, 0x3456, 0.26f),
- VideoPacketSenderFactory(7.49f, 150, 0x4567, 0.05f),
- VideoPacketSenderFactory(7.50f, 150, 0x5678, 0.15f),
- VideoPacketSenderFactory(7.51f, 150, 0x6789, 0.25f),
- VideoPacketSenderFactory(15.02f, 150, 0x7890, 0.27f),
- VideoPacketSenderFactory(15.03f, 150, 0x8901, 0.38f),
- VideoPacketSenderFactory(30.02f, 150, 0x9012, 0.39f),
- VideoPacketSenderFactory(30.03f, 150, 0x0123, 0.52f)
- };
- assert(count <= sizeof(factories) / sizeof(factories[0]));
-
- std::vector<const PacketSenderFactory*> result;
- for (uint32_t i = 0; i < count; ++i) {
- result.push_back(&factories[i]);
- }
- return result;
-}
-
-std::vector<BweTestConfig::EstimatorConfig> EstimatorConfigs() {
+BweTestConfig::EstimatorConfig EstimatorConfigs(Estimator estimator,
+ int flow_id) {
static const RemoteBitrateEstimatorFactory factories[] = {
RemoteBitrateEstimatorFactory(),
AbsoluteSendTimeRemoteBitrateEstimatorFactory()
};
-
- std::vector<BweTestConfig::EstimatorConfig> result;
- result.push_back(BweTestConfig::EstimatorConfig("TOF", &factories[0]));
- result.push_back(BweTestConfig::EstimatorConfig("AST", &factories[1]));
- return result;
+ switch (estimator) {
+ case kTransmissionOffset:
+ return BweTestConfig::EstimatorConfig("TOF", flow_id, &factories[0],
+ kMimdControl, false, false);
+ case kAbsSendTime:
+ return BweTestConfig::EstimatorConfig("AST", flow_id, &factories[1],
+ kMimdControl, false, false);
+ }
+ assert(false);
+ return BweTestConfig::EstimatorConfig();
}
-BweTestConfig MakeBweTestConfig(uint32_t sender_count) {
- BweTestConfig result = {
- VideoSenderFactories(sender_count), EstimatorConfigs()
- };
+struct DefaultBweTestConfig {
+ BweTestConfig bwe_test_config;
+ size_t number_of_senders;
+};
+
+DefaultBweTestConfig MakeBweTestConfig(uint32_t sender_count,
+ Estimator estimator) {
+ DefaultBweTestConfig result;
+ result.bwe_test_config.estimator_configs.push_back(
+ EstimatorConfigs(estimator, 0));
+ result.number_of_senders = sender_count;
return result;
}
-INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweTest,
- ::testing::Values(MakeBweTestConfig(1),
- MakeBweTestConfig(3)));
+class DefaultBweTest : public BweTest,
+ public ::testing::TestWithParam<DefaultBweTestConfig> {
+ public:
+ DefaultBweTest() : packet_senders_() {}
+ virtual ~DefaultBweTest() {}
+
+ virtual void SetUp() {
+ const DefaultBweTestConfig& config = GetParam();
+ SetupTestFromConfig(config.bwe_test_config);
+ for (size_t i = 0; i < config.number_of_senders; ++i) {
+ packet_senders_.push_back(new VideoSender(0, this, 30, 300, 0, 0));
+ }
+ }
+
+ virtual void TearDown() {
+ while (!packet_senders_.empty()) {
+ delete packet_senders_.front();
+ packet_senders_.pop_front();
+ }
+ }
+
+ protected:
+ std::list<PacketSender*> packet_senders_;
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, DefaultBweTest,
+ ::testing::Values(MakeBweTestConfig(1, kAbsSendTime),
+ MakeBweTestConfig(3, kAbsSendTime),
+ MakeBweTestConfig(1, kTransmissionOffset),
+ MakeBweTestConfig(3, kTransmissionOffset)));
-TEST_P(BweTest, UnlimitedSpeed) {
+TEST_P(DefaultBweTest, UnlimitedSpeed) {
VerboseLogging(false);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyLoss) {
+TEST_P(DefaultBweTest, DISABLED_SteadyLoss) {
LossFilter loss(this);
loss.SetLoss(20.0);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingLoss1) {
+TEST_P(DefaultBweTest, IncreasingLoss1) {
LossFilter loss(this);
for (int i = 0; i < 76; ++i) {
loss.SetLoss(i);
@@ -99,13 +104,13 @@ TEST_P(BweTest, IncreasingLoss1) {
}
}
-TEST_P(BweTest, SteadyDelay) {
+TEST_P(DefaultBweTest, SteadyDelay) {
DelayFilter delay(this);
delay.SetDelay(1000);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingDelay1) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingDelay1) {
DelayFilter delay(this);
RunFor(10 * 60 * 1000);
for (int i = 0; i < 30 * 2; ++i) {
@@ -115,7 +120,7 @@ TEST_P(BweTest, IncreasingDelay1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingDelay2) {
+TEST_P(DefaultBweTest, IncreasingDelay2) {
DelayFilter delay(this);
RateCounterFilter counter(this);
RunFor(1 * 60 * 1000);
@@ -127,7 +132,7 @@ TEST_P(BweTest, IncreasingDelay2) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, JumpyDelay1) {
+TEST_P(DefaultBweTest, JumpyDelay1) {
DelayFilter delay(this);
RunFor(10 * 60 * 1000);
for (int i = 1; i < 200; ++i) {
@@ -140,14 +145,14 @@ TEST_P(BweTest, JumpyDelay1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyJitter) {
+TEST_P(DefaultBweTest, SteadyJitter) {
JitterFilter jitter(this);
RateCounterFilter counter(this);
jitter.SetJitter(20);
RunFor(2 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingJitter1) {
+TEST_P(DefaultBweTest, IncreasingJitter1) {
JitterFilter jitter(this);
for (int i = 0; i < 2 * 60 * 2; ++i) {
jitter.SetJitter(i);
@@ -156,7 +161,7 @@ TEST_P(BweTest, IncreasingJitter1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingJitter2) {
+TEST_P(DefaultBweTest, IncreasingJitter2) {
JitterFilter jitter(this);
RunFor(30 * 1000);
for (int i = 1; i < 51; ++i) {
@@ -167,13 +172,13 @@ TEST_P(BweTest, IncreasingJitter2) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyReorder) {
+TEST_P(DefaultBweTest, SteadyReorder) {
ReorderFilter reorder(this);
reorder.SetReorder(20.0);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingReorder1) {
+TEST_P(DefaultBweTest, IncreasingReorder1) {
ReorderFilter reorder(this);
for (int i = 0; i < 76; ++i) {
reorder.SetReorder(i);
@@ -181,13 +186,13 @@ TEST_P(BweTest, IncreasingReorder1) {
}
}
-TEST_P(BweTest, SteadyChoke) {
+TEST_P(DefaultBweTest, DISABLED_SteadyChoke) {
ChokeFilter choke(this);
choke.SetCapacity(140);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingChoke1) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingChoke1) {
ChokeFilter choke(this);
for (int i = 1200; i >= 100; i -= 100) {
choke.SetCapacity(i);
@@ -195,7 +200,7 @@ TEST_P(BweTest, IncreasingChoke1) {
}
}
-TEST_P(BweTest, IncreasingChoke2) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingChoke2) {
ChokeFilter choke(this);
RunFor(60 * 1000);
for (int i = 1200; i >= 100; i -= 20) {
@@ -204,7 +209,7 @@ TEST_P(BweTest, IncreasingChoke2) {
}
}
-TEST_P(BweTest, Multi1) {
+TEST_P(DefaultBweTest, DISABLED_Multi1) {
DelayFilter delay(this);
ChokeFilter choke(this);
RateCounterFilter counter(this);
@@ -219,7 +224,7 @@ TEST_P(BweTest, Multi1) {
RunFor(5 * 60 * 1000);
}
-TEST_P(BweTest, Multi2) {
+TEST_P(DefaultBweTest, Multi2) {
ChokeFilter choke(this);
JitterFilter jitter(this);
RateCounterFilter counter(this);
@@ -227,6 +232,108 @@ TEST_P(BweTest, Multi2) {
jitter.SetJitter(120);
RunFor(5 * 60 * 1000);
}
+
+// This test fixture is used to instantiate tests running with adaptive video
+// senders.
+class BweFeedbackTest : public BweTest,
+ public ::testing::TestWithParam<BweTestConfig> {
+ public:
+ BweFeedbackTest() : BweTest() {}
+ virtual ~BweFeedbackTest() {}
+
+ virtual void SetUp() {
+ BweTestConfig config;
+ config.estimator_configs.push_back(EstimatorConfigs(kAbsSendTime, 0));
+ SetupTestFromConfig(config);
+ }
+
+ void PrintResults(double max_throughput_kbps, Stats<double> throughput_kbps,
+ Stats<double> delay_ms) {
+ double utilization = throughput_kbps.GetMean() / max_throughput_kbps;
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Utilization",
+ utilization * 100.0,
+ "%",
+ false);
+ std::stringstream ss;
+ ss << throughput_kbps.GetStdDev() / throughput_kbps.GetMean();
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Utilization var coeff",
+ ss.str(),
+ "",
+ false);
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Average delay",
+ delay_ms.AsString(),
+ "ms",
+ false);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(BweFeedbackTest);
+};
+
+TEST_F(BweFeedbackTest, Choke1000kbps500kbps1000kbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ const int kHighCapacityKbps = 1000;
+ const int kLowCapacityKbps = 500;
+ filter.SetCapacity(kHighCapacityKbps);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kLowCapacityKbps);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kHighCapacityKbps);
+ RunFor(60 * 1000);
+ PrintResults((2 * kHighCapacityKbps + kLowCapacityKbps) / 3.0,
+ counter.GetBitrateStats(), filter.GetDelayStats());
+}
+
+TEST_F(BweFeedbackTest, Choke200kbps30kbps200kbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ const int kHighCapacityKbps = 200;
+ const int kLowCapacityKbps = 30;
+ filter.SetCapacity(kHighCapacityKbps);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kLowCapacityKbps);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kHighCapacityKbps);
+ RunFor(60 * 1000);
+
+ PrintResults((2 * kHighCapacityKbps + kLowCapacityKbps) / 3.0,
+ counter.GetBitrateStats(), filter.GetDelayStats());
+}
+
+TEST_F(BweFeedbackTest, Verizon4gDownlinkTest) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("verizon4g-downlink", "rx")));
+ RunFor(22 * 60 * 1000);
+ PrintResults(filter.GetBitrateStats().GetMean(), counter2.GetBitrateStats(),
+ filter.GetDelayStats());
+}
+
+// webrtc:3277
+TEST_F(BweFeedbackTest, DISABLED_GoogleWifiTrace3Mbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ filter.SetMaxDelay(500);
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("google-wifi-3mbps", "rx")));
+ RunFor(300 * 1000);
+ PrintResults(filter.GetBitrateStats().GetMean(), counter2.GetBitrateStats(),
+ filter.GetDelayStats());
+}
} // namespace bwe
} // namespace testing
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
index 994abdbee89..dda36a765e9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
@@ -122,8 +122,6 @@ RateControlRegion RemoteRateControl::Update(const RateControlInput* input,
}
updated_ = true;
current_input_ = *input;
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Incoming rate = %u kbps",
- input->_incomingBitRate/1000);
return rate_control_region_;
}
@@ -158,18 +156,11 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
ChangeRegion(kRcAboveMax);
}
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Response time: %f + %i + 10*33\n",
- avg_change_period_, rtt_);
const uint32_t response_time = static_cast<uint32_t>(avg_change_period_ +
0.5f) + rtt_ + 300;
double alpha = RateIncreaseFactor(now_ms, last_bit_rate_change_,
response_time, noise_var);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: avg_change_period_ = %f ms; RTT = %u ms", avg_change_period_,
- rtt_);
-
current_bit_rate = static_cast<uint32_t>(current_bit_rate * alpha) + 1000;
if (max_hold_rate_ > 0 && beta_ * max_hold_rate_ > current_bit_rate) {
current_bit_rate = static_cast<uint32_t>(beta_ * max_hold_rate_);
@@ -178,9 +169,6 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
recovery = true;
}
max_hold_rate_ = 0;
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Increase rate to current_bit_rate = %u kbps",
- current_bit_rate / 1000);
last_bit_rate_change_ = now_ms;
break;
}
@@ -207,10 +195,6 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
}
UpdateMaxBitRateEstimate(incoming_bit_rate_kbps);
-
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Decrease rate to current_bit_rate = %u kbps",
- current_bit_rate / 1000);
}
// Stay on hold until the pipes are cleared.
ChangeState(kRcHold);
@@ -251,8 +235,6 @@ double RemoteRateControl::RateIncreaseFactor(int64_t now_ms,
alpha = 1.3;
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: alpha = %f", alpha);
-
if (last_ms > -1) {
alpha = pow(alpha, (now_ms - last_ms) / 1000.0);
}
@@ -341,45 +323,5 @@ void RemoteRateControl::ChangeRegion(RateControlRegion region) {
void RemoteRateControl::ChangeState(RateControlState new_state) {
came_from_state_ = rate_control_state_;
rate_control_state_ = new_state;
- char state1[15];
- char state2[15];
- char state3[15];
- StateStr(came_from_state_, state1);
- StateStr(rate_control_state_, state2);
- StateStr(current_input_._bwState, state3);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "\t%s => %s due to %s\n", state1, state2, state3);
-}
-
-void RemoteRateControl::StateStr(RateControlState state, char* str) {
- switch (state) {
- case kRcDecrease:
- strncpy(str, "DECREASE", 9);
- break;
- case kRcHold:
- strncpy(str, "HOLD", 5);
- break;
- case kRcIncrease:
- strncpy(str, "INCREASE", 9);
- break;
- default:
- assert(false);
- }
-}
-
-void RemoteRateControl::StateStr(BandwidthUsage state, char* str) {
- switch (state) {
- case kBwNormal:
- strncpy(str, "NORMAL", 7);
- break;
- case kBwOverusing:
- strncpy(str, "OVER USING", 11);
- break;
- case kBwUnderusing:
- strncpy(str, "UNDER USING", 12);
- break;
- default:
- assert(false);
- }
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
index b525834eca1..d02c6d56b50 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
@@ -53,8 +53,6 @@ class RemoteRateControl {
void ChangeState(const RateControlInput& input, int64_t now_ms);
void ChangeState(RateControlState new_state);
void ChangeRegion(RateControlRegion region);
- static void StateStr(RateControlState state, char* str);
- static void StateStr(BandwidthUsage state, char* str);
uint32_t min_configured_bit_rate_;
uint32_t max_configured_bit_rate_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc
deleted file mode 100644
index 109edae7cc5..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
-
-#include "webrtc/system_wrappers/interface/clock.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-namespace synchronization {
-
-RtcpMeasurement::RtcpMeasurement()
- : ntp_secs(0), ntp_frac(0), rtp_timestamp(0) {}
-
-RtcpMeasurement::RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac,
- uint32_t timestamp)
- : ntp_secs(ntp_secs), ntp_frac(ntp_frac), rtp_timestamp(timestamp) {}
-
-// Calculates the RTP timestamp frequency from two pairs of NTP and RTP
-// timestamps.
-bool CalculateFrequency(
- int64_t rtcp_ntp_ms1,
- uint32_t rtp_timestamp1,
- int64_t rtcp_ntp_ms2,
- uint32_t rtp_timestamp2,
- double* frequency_khz) {
- if (rtcp_ntp_ms1 <= rtcp_ntp_ms2) {
- return false;
- }
- *frequency_khz = static_cast<double>(rtp_timestamp1 - rtp_timestamp2) /
- static_cast<double>(rtcp_ntp_ms1 - rtcp_ntp_ms2);
- return true;
-}
-
-// Detects if there has been a wraparound between |old_timestamp| and
-// |new_timestamp|, and compensates by adding 2^32 if that is the case.
-bool CompensateForWrapAround(uint32_t new_timestamp,
- uint32_t old_timestamp,
- int64_t* compensated_timestamp) {
- assert(compensated_timestamp);
- int64_t wraps = synchronization::CheckForWrapArounds(new_timestamp,
- old_timestamp);
- if (wraps < 0) {
- // Reordering, don't use this packet.
- return false;
- }
- *compensated_timestamp = new_timestamp + (wraps << 32);
- return true;
-}
-
-// Converts |rtp_timestamp| to the NTP time base using the NTP and RTP timestamp
-// pairs in |rtcp|. The converted timestamp is returned in
-// |rtp_timestamp_in_ms|. This function compensates for wrap arounds in RTP
-// timestamps and returns false if it can't do the conversion due to reordering.
-bool RtpToNtpMs(int64_t rtp_timestamp,
- const synchronization::RtcpList& rtcp,
- int64_t* rtp_timestamp_in_ms) {
- assert(rtcp.size() == 2);
- int64_t rtcp_ntp_ms_new = Clock::NtpToMs(rtcp.front().ntp_secs,
- rtcp.front().ntp_frac);
- int64_t rtcp_ntp_ms_old = Clock::NtpToMs(rtcp.back().ntp_secs,
- rtcp.back().ntp_frac);
- int64_t rtcp_timestamp_new = rtcp.front().rtp_timestamp;
- int64_t rtcp_timestamp_old = rtcp.back().rtp_timestamp;
- if (!CompensateForWrapAround(rtcp_timestamp_new,
- rtcp_timestamp_old,
- &rtcp_timestamp_new)) {
- return false;
- }
- double freq_khz;
- if (!CalculateFrequency(rtcp_ntp_ms_new,
- rtcp_timestamp_new,
- rtcp_ntp_ms_old,
- rtcp_timestamp_old,
- &freq_khz)) {
- return false;
- }
- double offset = rtcp_timestamp_new - freq_khz * rtcp_ntp_ms_new;
- int64_t rtp_timestamp_unwrapped;
- if (!CompensateForWrapAround(rtp_timestamp, rtcp_timestamp_old,
- &rtp_timestamp_unwrapped)) {
- return false;
- }
- double rtp_timestamp_ntp_ms = (static_cast<double>(rtp_timestamp_unwrapped) -
- offset) / freq_khz + 0.5f;
- if (rtp_timestamp_ntp_ms < 0) {
- return false;
- }
- *rtp_timestamp_in_ms = rtp_timestamp_ntp_ms;
- return true;
-}
-
-int CheckForWrapArounds(uint32_t new_timestamp, uint32_t old_timestamp) {
- if (new_timestamp < old_timestamp) {
- // This difference should be less than -2^31 if we have had a wrap around
- // (e.g. |new_timestamp| = 1, |rtcp_rtp_timestamp| = 2^32 - 1). Since it is
- // cast to a int32_t, it should be positive.
- if (static_cast<int32_t>(new_timestamp - old_timestamp) > 0) {
- // Forward wrap around.
- return 1;
- }
- } else if (static_cast<int32_t>(old_timestamp - new_timestamp) > 0) {
- // This difference should be less than -2^31 if we have had a backward wrap
- // around. Since it is cast to a int32_t, it should be positive.
- return -1;
- }
- return 0;
-}
-} // namespace synchronization
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc
deleted file mode 100644
index aff314aaa53..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
-
-namespace webrtc {
-
-TEST(WrapAroundTests, NoWrap) {
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0xFFFFFFFE));
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(1, 0));
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(0x00010000, 0x0000FFFF));
-}
-
-TEST(WrapAroundTests, ForwardWrap) {
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0, 0xFFFFFFFF));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0, 0xFFFF0000));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0x0000FFFF, 0xFFFFFFFF));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0x0000FFFF, 0xFFFF0000));
-}
-
-TEST(WrapAroundTests, BackwardWrap) {
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFF0000, 0));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0x0000FFFF));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFF0000, 0x0000FFFF));
-}
-
-TEST(WrapAroundTests, OldRtcpWrapped) {
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- // This expected to fail since it's highly unlikely that the older RTCP
- // has a much smaller RTP timestamp than the newer.
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
-}
-
-TEST(WrapAroundTests, NewRtcpWrapped) {
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF;
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(rtcp.back().rtp_timestamp, rtcp,
- &timestamp_in_ms));
- // Since this RTP packet has the same timestamp as the RTCP packet constructed
- // at time 0 it should be mapped to 0 as well.
- EXPECT_EQ(0, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, RtpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF - 2 * kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
- // Since this RTP packet has the same timestamp as the RTCP packet constructed
- // at time 0 it should be mapped to 0 as well.
- EXPECT_EQ(2, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, OldRtp_RtcpsWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= 2*kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
-}
-
-TEST(WrapAroundTests, OldRtp_NewRtcpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
- // Constructed at the same time as the first RTCP and should therefore be
- // mapped to zero.
- EXPECT_EQ(0, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, OldRtp_OldRtcpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += 2*kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
-}
-}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
new file mode 100644
index 00000000000..40fa6df8ffb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+const int kMinBitrateBps = 30000;
+
+bool ParseArgsAndSetupEstimator(int argc,
+ char** argv,
+ webrtc::Clock* clock,
+ webrtc::RemoteBitrateObserver* observer,
+ RtpPacketSourceInterface** rtp_reader,
+ webrtc::RtpHeaderParser** parser,
+ webrtc::RemoteBitrateEstimator** estimator,
+ std::string* estimator_used) {
+ *rtp_reader = webrtc::rtpplayer::CreateRtpFileReader(argv[3]);
+ if (!*rtp_reader) {
+ fprintf(stderr, "Cannot open input file %s\n", argv[3]);
+ return false;
+ }
+ fprintf(stderr, "Input file: %s\n\n", argv[3]);
+ webrtc::RTPExtensionType extension = webrtc::kRtpExtensionAbsoluteSendTime;
+
+ if (strncmp("tsoffset", argv[1], 8) == 0) {
+ extension = webrtc::kRtpExtensionTransmissionTimeOffset;
+ fprintf(stderr, "Extension: toffset\n");
+ } else {
+ fprintf(stderr, "Extension: abs\n");
+ }
+ int id = atoi(argv[2]);
+
+ // Setup the RTP header parser and the bitrate estimator.
+ *parser = webrtc::RtpHeaderParser::Create();
+ (*parser)->RegisterRtpHeaderExtension(extension, id);
+ if (estimator) {
+ switch (extension) {
+ case webrtc::kRtpExtensionAbsoluteSendTime: {
+ webrtc::AbsoluteSendTimeRemoteBitrateEstimatorFactory factory;
+ *estimator = factory.Create(observer, clock, webrtc::kAimdControl,
+ kMinBitrateBps);
+ *estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator";
+ break;
+ }
+ case webrtc::kRtpExtensionTransmissionTimeOffset: {
+ webrtc::RemoteBitrateEstimatorFactory factory;
+ *estimator = factory.Create(observer, clock, webrtc::kAimdControl,
+ kMinBitrateBps);
+ *estimator_used = "RemoteBitrateEstimator";
+ break;
+ }
+ default:
+ assert(false);
+ }
+ }
+ return true;
+}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h
new file mode 100644
index 00000000000..714457d5668
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
+
+#include <string>
+
+namespace webrtc {
+class Clock;
+class RemoteBitrateEstimator;
+class RemoteBitrateObserver;
+class RtpHeaderParser;
+namespace rtpplayer {
+class RtpPacketSourceInterface;
+}
+}
+
+bool ParseArgsAndSetupEstimator(
+ int argc,
+ char** argv,
+ webrtc::Clock* clock,
+ webrtc::RemoteBitrateObserver* observer,
+ webrtc::rtpplayer::RtpPacketSourceInterface** rtp_reader,
+ webrtc::RtpHeaderParser** parser,
+ webrtc::RemoteBitrateEstimator** estimator,
+ std::string* estimator_used);
+
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
new file mode 100644
index 00000000000..9ea3f08eab5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+class Observer : public webrtc::RemoteBitrateObserver {
+ public:
+ explicit Observer(webrtc::Clock* clock) : clock_(clock) {}
+
+ // Called when a receive channel group has a new bitrate estimate for the
+ // incoming streams.
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ printf("[%u] Num SSRCs: %d, bitrate: %u\n",
+ static_cast<uint32_t>(clock_->TimeInMilliseconds()),
+ static_cast<int>(ssrcs.size()), bitrate);
+ }
+
+ virtual ~Observer() {}
+
+ private:
+ webrtc::Clock* clock_;
+};
+
+int main(int argc, char** argv) {
+ if (argc < 4) {
+ printf("Usage: bwe_rtp_play <extension type> <extension id> "
+ "<input_file.rtp>\n");
+ printf("<extension type> can either be:\n"
+ " abs for absolute send time or\n"
+ " tsoffset for timestamp offset.\n"
+ "<extension id> is the id associated with the extension.\n");
+ return -1;
+ }
+ RtpPacketSourceInterface* reader;
+ webrtc::RemoteBitrateEstimator* estimator;
+ webrtc::RtpHeaderParser* parser;
+ std::string estimator_used;
+ webrtc::SimulatedClock clock(0);
+ Observer observer(&clock);
+ if (!ParseArgsAndSetupEstimator(argc, argv, &clock, &observer, &reader,
+ &parser, &estimator, &estimator_used)) {
+ return -1;
+ }
+ webrtc::scoped_ptr<RtpPacketSourceInterface> rtp_reader(reader);
+ webrtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(parser);
+ webrtc::scoped_ptr<webrtc::RemoteBitrateEstimator> rbe(estimator);
+
+ // Process the file.
+ int packet_counter = 0;
+ int64_t next_process_time_ms = 0;
+ int64_t next_rtp_time_ms = 0;
+ int64_t first_rtp_time_ms = -1;
+ const uint32_t kMaxPacketSize = 1500;
+ uint8_t packet_buffer[kMaxPacketSize];
+ uint8_t* packet = packet_buffer;
+ int non_zero_abs_send_time = 0;
+ int non_zero_ts_offsets = 0;
+ while (true) {
+ uint32_t next_rtp_time;
+ if (next_rtp_time_ms <= clock.TimeInMilliseconds()) {
+ uint32_t packet_length = kMaxPacketSize;
+ if (rtp_reader->NextPacket(packet, &packet_length,
+ &next_rtp_time) == -1) {
+ break;
+ }
+ if (first_rtp_time_ms == -1)
+ first_rtp_time_ms = next_rtp_time;
+ next_rtp_time_ms = next_rtp_time - first_rtp_time_ms;
+ webrtc::RTPHeader header;
+ parser->Parse(packet, packet_length, &header);
+ if (header.extension.absoluteSendTime != 0)
+ ++non_zero_abs_send_time;
+ if (header.extension.transmissionTimeOffset != 0)
+ ++non_zero_ts_offsets;
+ rbe->IncomingPacket(clock.TimeInMilliseconds(),
+ packet_length - header.headerLength,
+ header);
+ ++packet_counter;
+ }
+ next_process_time_ms = rbe->TimeUntilNextProcess() +
+ clock.TimeInMilliseconds();
+ if (next_process_time_ms <= clock.TimeInMilliseconds()) {
+ rbe->Process();
+ }
+ int time_until_next_event =
+ std::min(next_process_time_ms, next_rtp_time_ms) -
+ clock.TimeInMilliseconds();
+ clock.AdvanceTimeMilliseconds(std::max(time_until_next_event, 0));
+ }
+ printf("Parsed %d packets\nTime passed: %u ms\n", packet_counter,
+ static_cast<uint32_t>(clock.TimeInMilliseconds()));
+ printf("Estimator used: %s\n", estimator_used.c_str());
+ printf("Packets with non-zero absolute send time: %d\n",
+ non_zero_abs_send_time);
+ printf("Packets with non-zero timestamp offset: %d\n",
+ non_zero_ts_offsets);
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
new file mode 100644
index 00000000000..af4a4d4ee7b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <sstream>
+
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+int main(int argc, char** argv) {
+ if (argc < 4) {
+ fprintf(stderr, "Usage: rtp_to_text <extension type> <extension id>"
+ " <input_file.rtp> [-t]\n");
+ fprintf(stderr, "<extension type> can either be:\n"
+ " abs for absolute send time or\n"
+ " tsoffset for timestamp offset.\n"
+ "<extension id> is the id associated with the extension.\n"
+ " -t is an optional flag, if set only packet arrival time will be"
+ " output.\n");
+ return -1;
+ }
+ RtpPacketSourceInterface* reader;
+ webrtc::RtpHeaderParser* parser;
+ if (!ParseArgsAndSetupEstimator(argc, argv, NULL, NULL, &reader, &parser,
+ NULL, NULL)) {
+ return -1;
+ }
+ bool arrival_time_only = (argc >= 5 && strncmp(argv[4], "-t", 2) == 0);
+ webrtc::scoped_ptr<RtpPacketSourceInterface> rtp_reader(reader);
+ webrtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(parser);
+ fprintf(stdout, "seqnum timestamp ts_offset abs_sendtime recvtime "
+ "markerbit ssrc size\n");
+ int packet_counter = 0;
+ static const uint32_t kMaxPacketSize = 1500;
+ uint8_t packet_buffer[kMaxPacketSize];
+ uint8_t* packet = packet_buffer;
+ uint32_t packet_length = kMaxPacketSize;
+ uint32_t time_ms = 0;
+ int non_zero_abs_send_time = 0;
+ int non_zero_ts_offsets = 0;
+ while (rtp_reader->NextPacket(packet, &packet_length, &time_ms) == 0) {
+ webrtc::RTPHeader header;
+ parser->Parse(packet, packet_length, &header);
+ if (header.extension.absoluteSendTime != 0)
+ ++non_zero_abs_send_time;
+ if (header.extension.transmissionTimeOffset != 0)
+ ++non_zero_ts_offsets;
+ if (arrival_time_only) {
+ std::stringstream ss;
+ ss << static_cast<int64_t>(time_ms) * 1000000;
+ fprintf(stdout, "%s\n", ss.str().c_str());
+ } else {
+ fprintf(stdout, "%u %u %d %u %u %d %u %u\n", header.sequenceNumber,
+ header.timestamp, header.extension.transmissionTimeOffset,
+ header.extension.absoluteSendTime, time_ms, header.markerBit,
+ header.ssrc, packet_length);
+ }
+ packet_length = kMaxPacketSize;
+ ++packet_counter;
+ }
+ fprintf(stderr, "Parsed %d packets\n", packet_counter);
+ fprintf(stderr, "Packets with non-zero absolute send time: %d\n",
+ non_zero_abs_send_time);
+ fprintf(stderr, "Packets with non-zero timestamp offset: %d\n",
+ non_zero_ts_offsets);
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
index 97b200f0777..e2ef4b1e972 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
@@ -18,7 +18,7 @@ namespace webrtc {
class FecReceiver {
public:
- static FecReceiver* Create(int32_t id, RtpData* callback);
+ static FecReceiver* Create(RtpData* callback);
virtual ~FecReceiver() {}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
index 707adaa0cd3..6f2ea4fb3e6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
@@ -23,24 +23,9 @@ class Clock;
class StreamStatistician {
public:
- struct Statistics {
- Statistics()
- : fraction_lost(0),
- cumulative_lost(0),
- extended_max_sequence_number(0),
- jitter(0),
- max_jitter(0) {}
-
- uint8_t fraction_lost;
- uint32_t cumulative_lost;
- uint32_t extended_max_sequence_number;
- uint32_t jitter;
- uint32_t max_jitter;
- };
-
virtual ~StreamStatistician();
- virtual bool GetStatistics(Statistics* statistics, bool reset) = 0;
+ virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const = 0;
virtual uint32_t BitrateReceived() const = 0;
@@ -66,9 +51,13 @@ class ReceiveStatistics : public Module {
static ReceiveStatistics* Create(Clock* clock);
// Updates the receive statistics with this packet.
- virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ virtual void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted) = 0;
+ // Increment counter for number of FEC packets received.
+ virtual void FecPacketReceived(uint32_t ssrc) = 0;
+
// Returns a map of all statisticians which have seen an incoming packet
// during the last two seconds.
virtual StatisticianMap GetActiveStatisticians() const = 0;
@@ -78,17 +67,31 @@ class ReceiveStatistics : public Module {
// Sets the max reordering threshold in number of packets.
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0;
+
+ // Called on new RTCP stats creation.
+ virtual void RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) = 0;
+
+ // Called on new RTP stats creation.
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) = 0;
};
class NullReceiveStatistics : public ReceiveStatistics {
public:
- virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ virtual void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted) OVERRIDE;
+ virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE;
virtual StatisticianMap GetActiveStatisticians() const OVERRIDE;
virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE;
virtual int32_t TimeUntilNextProcess() OVERRIDE;
virtual int32_t Process() OVERRIDE;
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE;
+ virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback)
+ OVERRIDE;
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) OVERRIDE;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
new file mode 100644
index 00000000000..25f0f2ecf98
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
+
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Clock;
+class RtpRtcp;
+class TimestampExtrapolator;
+
+// RemoteNtpTimeEstimator can be used to estimate a given RTP timestamp's NTP
+// time in local timebase.
+// Note that it needs to be trained with at least 2 RTCP SR (by calling
+// |UpdateRtcpTimestamp|) before it can be used.
+class RemoteNtpTimeEstimator {
+ public:
+ explicit RemoteNtpTimeEstimator(Clock* clock);
+
+ ~RemoteNtpTimeEstimator();
+
+ // Updates the estimator with the timestamp from newly received RTCP SR for
+ // |ssrc|. The RTCP SR is read from |rtp_rtcp|.
+ bool UpdateRtcpTimestamp(uint32_t ssrc, RtpRtcp* rtp_rtcp);
+
+ // Estimates the NTP timestamp in local timebase from |rtp_timestamp|.
+ // Returns the NTP timestamp in ms when success. -1 if failed.
+ int64_t Estimate(uint32_t rtp_timestamp);
+
+ private:
+ Clock* clock_;
+ scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
+ RtcpList rtcp_list_;
+ DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
index 3ea4dcd1beb..965f4b02421 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
@@ -54,8 +54,7 @@ class RTPPayloadStrategy {
class RTPPayloadRegistry {
public:
// The registry takes ownership of the strategy.
- RTPPayloadRegistry(const int32_t id,
- RTPPayloadStrategy* rtp_payload_strategy);
+ RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy);
~RTPPayloadRegistry();
int32_t RegisterReceivePayload(
@@ -76,10 +75,10 @@ class RTPPayloadRegistry {
const uint32_t rate,
int8_t* payload_type) const;
- void SetRtxStatus(bool enable, uint32_t ssrc);
-
bool RtxEnabled() const;
+ void SetRtxSsrc(uint32_t ssrc);
+
void SetRtxPayloadType(int payload_type);
bool IsRtx(const RTPHeader& header) const;
@@ -153,7 +152,6 @@ class RTPPayloadRegistry {
scoped_ptr<CriticalSectionWrapper> crit_sect_;
ModuleRTPUtility::PayloadTypeMap payload_type_map_;
- int32_t id_;
scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
int8_t red_payload_type_;
int8_t ulpfec_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
index 67dad0d977b..95c565f01bf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -213,7 +213,7 @@ class RtpRtcp : public Module {
*
* return -1 on failure else 0
*/
- virtual int32_t SetSSRC(const uint32_t ssrc) = 0;
+ virtual void SetSSRC(const uint32_t ssrc) = 0;
/*
* Get CSRC
@@ -249,10 +249,14 @@ class RtpRtcp : public Module {
virtual int32_t SetCSRCStatus(const bool include) = 0;
/*
- * Turn on/off sending RTX (RFC 4588) on a specific SSRC.
+ * Turn on/off sending RTX (RFC 4588). The modes can be set as a combination
+ * of values of the enumerator RtxMode.
*/
- virtual int32_t SetRTXSendStatus(int modes, bool set_ssrc,
- uint32_t ssrc) = 0;
+ virtual void SetRTXSendStatus(int modes) = 0;
+
+ // Sets the SSRC to use when sending RTX packets. This doesn't enable RTX,
+ // only the SSRC is set.
+ virtual void SetRtxSsrc(uint32_t ssrc) = 0;
// Sets the payload type to use when sending RTX packets. Note that this
// doesn't enable RTX, only the payload type is set.
@@ -261,8 +265,8 @@ class RtpRtcp : public Module {
/*
* Get status of sending RTX (RFC 4588) on a specific SSRC.
*/
- virtual int32_t RTXSendStatus(int* modes, uint32_t* ssrc,
- int* payloadType) const = 0;
+ virtual void RTXSendStatus(int* modes, uint32_t* ssrc,
+ int* payloadType) const = 0;
/*
* sends kRtcpByeCode when going from true to false
@@ -508,6 +512,13 @@ class RtpRtcp : public Module {
virtual int32_t RemoveRTCPReportBlock(const uint32_t SSRC) = 0;
/*
+ * Get number of sent and received RTCP packet types.
+ */
+ virtual void GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const = 0;
+
+ /*
* (APP) Application specific data
*
* return -1 on failure else 0
@@ -664,25 +675,6 @@ class RtpRtcp : public Module {
int8_t& payloadType) const = 0;
/*
- * Set status and ID for header-extension-for-audio-level-indication.
- * See http://tools.ietf.org/html/rfc6464 for more details.
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetRTPAudioLevelIndicationStatus(
- const bool enable,
- const uint8_t ID) = 0;
-
- /*
- * Get status and ID for header-extension-for-audio-level-indication.
- *
- * return -1 on failure else 0
- */
- virtual int32_t GetRTPAudioLevelIndicationStatus(
- bool& enable,
- uint8_t& ID) const = 0;
-
- /*
* Store the audio level in dBov for header-extension-for-audio-level-
* indication.
* This API shall be called before transmision of an RTP packet to ensure
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
index b66e927ae59..6f99f938de6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -27,6 +27,9 @@ namespace webrtc {
const int kVideoPayloadTypeFrequency = 90000;
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
struct AudioPayload
{
uint32_t frequency;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 42c7b4eab91..03156c79df4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -77,17 +77,19 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD0(SSRC,
uint32_t());
MOCK_METHOD1(SetSSRC,
- int32_t(const uint32_t ssrc));
+ void(const uint32_t ssrc));
MOCK_CONST_METHOD1(CSRCs,
int32_t(uint32_t arrOfCSRC[kRtpCsrcSize]));
MOCK_METHOD2(SetCSRCs,
int32_t(const uint32_t arrOfCSRC[kRtpCsrcSize], const uint8_t arrLength));
MOCK_METHOD1(SetCSRCStatus,
int32_t(const bool include));
- MOCK_METHOD3(SetRTXSendStatus,
- int32_t(int modes, bool setSSRC, uint32_t ssrc));
+ MOCK_METHOD1(SetRTXSendStatus,
+ void(int modes));
MOCK_CONST_METHOD3(RTXSendStatus,
- int32_t(int* modes, uint32_t* ssrc, int* payload_type));
+ void(int* modes, uint32_t* ssrc, int* payload_type));
+ MOCK_METHOD1(SetRtxSsrc,
+ void(uint32_t));
MOCK_METHOD1(SetRtxSendPayloadType,
void(int));
MOCK_METHOD1(SetSendingStatus,
@@ -168,6 +170,8 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint32_t SSRC, const RTCPReportBlock* receiveBlock));
MOCK_METHOD1(RemoveRTCPReportBlock,
int32_t(const uint32_t SSRC));
+ MOCK_CONST_METHOD2(GetRtcpPacketTypeCounters,
+ void(RtcpPacketTypeCounter*, RtcpPacketTypeCounter*));
MOCK_METHOD4(SetRTCPApplicationSpecificData,
int32_t(const uint8_t subType, const uint32_t name, const uint8_t* data, const uint16_t length));
MOCK_METHOD1(SetRTCPVoIPMetrics,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS b/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
index 0dc142f867c..0d6c174a3b9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
@@ -16,20 +16,19 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
// RFC 5109
namespace webrtc {
-FecReceiver* FecReceiver::Create(int32_t id, RtpData* callback) {
- return new FecReceiverImpl(id, callback);
+FecReceiver* FecReceiver::Create(RtpData* callback) {
+ return new FecReceiverImpl(callback);
}
-FecReceiverImpl::FecReceiverImpl(const int32_t id, RtpData* callback)
- : id_(id),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+FecReceiverImpl::FecReceiverImpl(RtpData* callback)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
recovered_packet_callback_(callback),
- fec_(new ForwardErrorCorrection(id)) {}
+ fec_(new ForwardErrorCorrection()) {}
FecReceiverImpl::~FecReceiverImpl() {
while (!received_packet_list_.empty()) {
@@ -103,8 +102,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
if (timestamp_offset != 0) {
// |timestampOffset| should be 0. However, it's possible this is the first
// location a corrupt payload can be caught, so don't assert.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Corrupt payload found in %s", __FUNCTION__);
+ LOG(LS_WARNING) << "Corrupt payload found.";
delete received_packet;
return -1;
}
@@ -223,6 +221,7 @@ int32_t FecReceiverImpl::ProcessReceivedFec() {
crit_sect_->Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
+ crit_sect_->Leave();
return -1;
}
assert(received_packet_list_.empty());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
index 03421235c3d..b876bedc9c3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
@@ -25,7 +25,7 @@ class CriticalSectionWrapper;
class FecReceiverImpl : public FecReceiver {
public:
- FecReceiverImpl(const int32_t id, RtpData* callback);
+ FecReceiverImpl(RtpData* callback);
virtual ~FecReceiverImpl();
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
@@ -36,7 +36,6 @@ class FecReceiverImpl : public FecReceiver {
virtual int32_t ProcessReceivedFec() OVERRIDE;
private:
- int id_;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
RtpData* recovered_packet_callback_;
ForwardErrorCorrection* fec_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
index 2e8846c3b29..0b1244941c3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
@@ -39,8 +39,8 @@ class MockRtpData : public RtpData {
class ReceiverFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
- fec_ = new ForwardErrorCorrection(0);
- receiver_fec_ = FecReceiver::Create(0, &rtp_data_callback_);
+ fec_ = new ForwardErrorCorrection();
+ receiver_fec_ = FecReceiver::Create(&rtp_data_callback_);
generator_ = new FrameGenerator();
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
index e3c3581be73..e6426ea7eea 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
@@ -17,9 +17,6 @@
namespace webrtc {
enum {
- kRtpHeaderSize = 12
-};
-enum {
kFecPayloadType = 96
};
enum {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
index 189e1b052f9..31303c8ad19 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -17,15 +17,13 @@
#include <algorithm>
#include <iterator>
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
-// Minimum RTP header size in bytes.
-const uint8_t kRtpHeaderSize = 12;
-
// FEC header size in bytes.
const uint8_t kFecHeaderSize = 10;
@@ -84,9 +82,8 @@ ForwardErrorCorrection::ReceivedPacket::~ReceivedPacket() {}
ForwardErrorCorrection::RecoveredPacket::RecoveredPacket() {}
ForwardErrorCorrection::RecoveredPacket::~RecoveredPacket() {}
-ForwardErrorCorrection::ForwardErrorCorrection(int32_t id)
- : id_(id),
- generated_fec_packets_(kMaxMediaPackets),
+ForwardErrorCorrection::ForwardErrorCorrection()
+ : generated_fec_packets_(kMaxMediaPackets),
fec_packet_received_(false) {}
ForwardErrorCorrection::~ForwardErrorCorrection() {}
@@ -114,43 +111,23 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
bool use_unequal_protection,
FecMaskType fec_mask_type,
PacketList* fec_packet_list) {
- if (media_packet_list.empty()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s media packet list is empty", __FUNCTION__);
- return -1;
- }
- if (!fec_packet_list->empty()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s FEC packet list is not empty", __FUNCTION__);
- return -1;
- }
const uint16_t num_media_packets = media_packet_list.size();
- bool l_bit = (num_media_packets > 8 * kMaskSizeLBitClear);
- int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+ // Sanity check arguments.
+ assert(num_media_packets > 0);
+ assert(num_important_packets >= 0 &&
+ num_important_packets <= num_media_packets);
+ assert(fec_packet_list->empty());
if (num_media_packets > kMaxMediaPackets) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s can only protect %d media packets per frame; %d requested",
- __FUNCTION__, kMaxMediaPackets, num_media_packets);
+ LOG(LS_WARNING) << "Can't protect " << num_media_packets
+ << " media packets per frame. Max is " << kMaxMediaPackets;
return -1;
}
- // Error checking on the number of important packets.
- // Can't have more important packets than media packets.
- if (num_important_packets > num_media_packets) {
- WEBRTC_TRACE(
- kTraceError, kTraceRtpRtcp, id_,
- "Number of important packets (%d) greater than number of media "
- "packets (%d)",
- num_important_packets, num_media_packets);
- return -1;
- }
- if (num_important_packets < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Number of important packets (%d) less than zero",
- num_important_packets);
- return -1;
- }
+ bool l_bit = (num_media_packets > 8 * kMaskSizeLBitClear);
+ int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
// Do some error checking on the media packets.
PacketList::const_iterator media_list_it = media_packet_list.begin();
while (media_list_it != media_packet_list.end()) {
@@ -158,20 +135,16 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
assert(media_packet);
if (media_packet->length < kRtpHeaderSize) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s media packet (%d bytes) is smaller than RTP header",
- __FUNCTION__, media_packet->length);
+ LOG(LS_WARNING) << "Media packet " << media_packet->length << " bytes "
+ << "is smaller than RTP header.";
return -1;
}
// Ensure our FEC packets will fit in a typical MTU.
if (media_packet->length + PacketOverhead() + kTransportOverhead >
IP_PACKET_SIZE) {
- WEBRTC_TRACE(
- kTraceError, kTraceRtpRtcp, id_,
- "%s media packet (%d bytes) with overhead is larger than MTU(%d)",
- __FUNCTION__, media_packet->length, IP_PACKET_SIZE);
- return -1;
+ LOG(LS_WARNING) << "Media packet " << media_packet->length << " bytes "
+ << "with overhead is larger than " << IP_PACKET_SIZE;
}
media_list_it++;
}
@@ -584,9 +557,7 @@ void ForwardErrorCorrection::InsertFECPacket(
}
if (fec_packet->protected_pkt_list.empty()) {
// All-zero packet mask; we can discard this FEC packet.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "FEC packet %u has an all-zero packet mask.",
- fec_packet->seq_num, __FUNCTION__);
+ LOG(LS_WARNING) << "FEC packet has an all-zero packet mask.";
delete fec_packet;
} else {
AssignRecoveredPackets(fec_packet, recovered_packet_list);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
index 8910fe477d3..bb790f356f2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
@@ -117,8 +117,7 @@ class ForwardErrorCorrection {
typedef std::list<ReceivedPacket*> ReceivedPacketList;
typedef std::list<RecoveredPacket*> RecoveredPacketList;
- // \param[in] id Module ID
- ForwardErrorCorrection(int32_t id);
+ ForwardErrorCorrection();
virtual ~ForwardErrorCorrection();
@@ -304,7 +303,6 @@ class ForwardErrorCorrection {
static void DiscardOldPackets(RecoveredPacketList* recovered_packet_list);
static uint16_t ParseSequenceNumber(uint8_t* packet);
- int32_t id_;
std::vector<Packet> generated_fec_packets_;
FecPacketList fec_packet_list_;
bool fec_packet_received_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
index ddac3cd113a..f82e46d5723 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
+
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/typedefs.h"
@@ -64,3 +67,4 @@ void GeneratePacketMasks(int num_media_packets, int num_fec_packets,
} // namespace internal
} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index 8c6cc5434cd..209af40bc43 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -164,7 +164,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
class RtpRtcpRtxNackTest : public ::testing::Test {
protected:
RtpRtcpRtxNackTest()
- : rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
+ : rtp_payload_registry_(RTPPayloadStrategy::CreateStrategy(false)),
rtp_rtcp_module_(NULL),
transport_(kTestSsrc + 1),
receiver_(),
@@ -188,7 +188,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
kTestId, &fake_clock, &receiver_, rtp_feedback_.get(),
&rtp_payload_registry_));
- EXPECT_EQ(0, rtp_rtcp_module_->SetSSRC(kTestSsrc));
+ rtp_rtcp_module_->SetSSRC(kTestSsrc);
EXPECT_EQ(0, rtp_rtcp_module_->SetRTCPStatus(kRtcpCompound));
rtp_receiver_->SetNACKStatus(kNackRtcp);
EXPECT_EQ(0, rtp_rtcp_module_->SetStorePacketsStatus(true, 600));
@@ -253,9 +253,9 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
void RunRtxTest(RtxMode rtx_method, int loss) {
- rtp_payload_registry_.SetRtxStatus(true, kTestSsrc + 1);
- EXPECT_EQ(0, rtp_rtcp_module_->SetRTXSendStatus(rtx_method, true,
- kTestSsrc + 1));
+ rtp_payload_registry_.SetRtxSsrc(kTestSsrc + 1);
+ rtp_rtcp_module_->SetRTXSendStatus(rtx_method);
+ rtp_rtcp_module_->SetRtxSsrc(kTestSsrc + 1);
transport_.DropEveryNthPacket(loss);
uint32_t timestamp = 3000;
uint16_t nack_list[kVideoNackListSize];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
index ada7d70266b..baa3827949c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -39,7 +39,7 @@ void VerifyHeader(uint16_t seq_num,
class ProducerFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
- fec_ = new ForwardErrorCorrection(0);
+ fec_ = new ForwardErrorCorrection();
producer_ = new ProducerFec(fec_);
generator_ = new FrameGenerator;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 3ed44b83701..aa7c9c57156 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -24,14 +24,16 @@ const int kStatisticsProcessIntervalMs = 1000;
StreamStatistician::~StreamStatistician() {}
-StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock)
+StreamStatisticianImpl::StreamStatisticianImpl(
+ Clock* clock,
+ RtcpStatisticsCallback* rtcp_callback,
+ StreamDataCountersCallback* rtp_callback)
: clock_(clock),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ stream_lock_(CriticalSectionWrapper::CreateCriticalSection()),
incoming_bitrate_(clock, NULL),
ssrc_(0),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
jitter_q4_(0),
- jitter_max_q4_(0),
cumulative_loss_(0),
jitter_q4_transmission_time_offset_(0),
last_receive_time_ms_(0),
@@ -42,53 +44,53 @@ StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock)
received_seq_first_(0),
received_seq_max_(0),
received_seq_wraps_(0),
- first_packet_(true),
received_packet_overhead_(12),
- received_byte_count_(0),
- received_retransmitted_packets_(0),
- received_inorder_packet_count_(0),
last_report_inorder_packets_(0),
last_report_old_packets_(0),
last_report_seq_max_(0),
- last_reported_statistics_() {}
+ rtcp_callback_(rtcp_callback),
+ rtp_callback_(rtp_callback) {}
void StreamStatisticianImpl::ResetStatistics() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
last_report_inorder_packets_ = 0;
last_report_old_packets_ = 0;
last_report_seq_max_ = 0;
- memset(&last_reported_statistics_, 0, sizeof(last_reported_statistics_));
+ last_reported_statistics_ = RtcpStatistics();
jitter_q4_ = 0;
- jitter_max_q4_ = 0;
cumulative_loss_ = 0;
jitter_q4_transmission_time_offset_ = 0;
received_seq_wraps_ = 0;
received_seq_max_ = 0;
received_seq_first_ = 0;
- received_byte_count_ = 0;
- received_retransmitted_packets_ = 0;
- received_inorder_packet_count_ = 0;
- first_packet_ = true;
+ receive_counters_ = StreamDataCounters();
}
void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
size_t bytes,
bool retransmitted) {
- CriticalSectionScoped cs(crit_sect_.get());
+ UpdateCounters(header, bytes, retransmitted);
+ NotifyRtpCallback();
+}
+
+void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
+ size_t bytes,
+ bool retransmitted) {
+ CriticalSectionScoped cs(stream_lock_.get());
bool in_order = InOrderPacketInternal(header.sequenceNumber);
ssrc_ = header.ssrc;
incoming_bitrate_.Update(bytes);
- received_byte_count_ += bytes;
+ receive_counters_.bytes +=
+ bytes - (header.paddingLength + header.headerLength);
+ receive_counters_.header_bytes += header.headerLength;
+ receive_counters_.padding_bytes += header.paddingLength;
+ ++receive_counters_.packets;
+ if (!in_order && retransmitted) {
+ ++receive_counters_.retransmitted_packets;
+ }
- if (first_packet_) {
- first_packet_ = false;
- // This is the first received report.
+ if (receive_counters_.packets == 1) {
received_seq_first_ = header.sequenceNumber;
- received_seq_max_ = header.sequenceNumber;
- received_inorder_packet_count_ = 1;
- clock_->CurrentNtp(last_receive_time_secs_, last_receive_time_frac_);
- last_receive_time_ms_ = clock_->TimeInMilliseconds();
- return;
}
// Count only the new packets received. That is, if packets 1, 2, 3, 5, 4, 6
@@ -98,66 +100,27 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
uint32_t receive_time_secs;
uint32_t receive_time_frac;
clock_->CurrentNtp(receive_time_secs, receive_time_frac);
- received_inorder_packet_count_++;
// Wrong if we use RetransmitOfOldPacket.
- int32_t seq_diff = header.sequenceNumber - received_seq_max_;
- if (seq_diff < 0) {
+ if (receive_counters_.packets > 1 &&
+ received_seq_max_ > header.sequenceNumber) {
// Wrap around detected.
received_seq_wraps_++;
}
// New max.
received_seq_max_ = header.sequenceNumber;
+ // If new time stamp and more than one in-order packet received, calculate
+ // new jitter statistics.
if (header.timestamp != last_received_timestamp_ &&
- received_inorder_packet_count_ > 1) {
- uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- receive_time_secs, receive_time_frac, header.payload_type_frequency);
- uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- last_receive_time_secs_, last_receive_time_frac_,
- header.payload_type_frequency);
- int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
- (header.timestamp - last_received_timestamp_);
-
- time_diff_samples = abs(time_diff_samples);
-
- // lib_jingle sometimes deliver crazy jumps in TS for the same stream.
- // If this happens, don't update jitter value. Use 5 secs video frequency
- // as the threshold.
- if (time_diff_samples < 450000) {
- // Note we calculate in Q4 to avoid using float.
- int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
- jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
- }
-
- // Extended jitter report, RFC 5450.
- // Actual network jitter, excluding the source-introduced jitter.
- int32_t time_diff_samples_ext =
- (receive_time_rtp - last_receive_time_rtp) -
- ((header.timestamp +
- header.extension.transmissionTimeOffset) -
- (last_received_timestamp_ +
- last_received_transmission_time_offset_));
-
- time_diff_samples_ext = abs(time_diff_samples_ext);
-
- if (time_diff_samples_ext < 450000) {
- int32_t jitter_diffQ4TransmissionTimeOffset =
- (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
- jitter_q4_transmission_time_offset_ +=
- ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
- }
+ (receive_counters_.packets - receive_counters_.retransmitted_packets) >
+ 1) {
+ UpdateJitter(header, receive_time_secs, receive_time_frac);
}
last_received_timestamp_ = header.timestamp;
last_receive_time_secs_ = receive_time_secs;
last_receive_time_frac_ = receive_time_frac;
last_receive_time_ms_ = clock_->TimeInMilliseconds();
- } else {
- if (retransmitted) {
- received_retransmitted_packets_++;
- } else {
- received_inorder_packet_count_++;
- }
}
uint16_t packet_oh = header.headerLength + header.paddingLength;
@@ -167,29 +130,113 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
received_packet_overhead_ = (15 * received_packet_overhead_ + packet_oh) >> 4;
}
+void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
+ uint32_t receive_time_secs,
+ uint32_t receive_time_frac) {
+ uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
+ receive_time_secs, receive_time_frac, header.payload_type_frequency);
+ uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
+ last_receive_time_secs_, last_receive_time_frac_,
+ header.payload_type_frequency);
+ int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
+ (header.timestamp - last_received_timestamp_);
+
+ time_diff_samples = abs(time_diff_samples);
+
+ // lib_jingle sometimes deliver crazy jumps in TS for the same stream.
+ // If this happens, don't update jitter value. Use 5 secs video frequency
+ // as the threshold.
+ if (time_diff_samples < 450000) {
+ // Note we calculate in Q4 to avoid using float.
+ int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
+ jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
+ }
+
+ // Extended jitter report, RFC 5450.
+ // Actual network jitter, excluding the source-introduced jitter.
+ int32_t time_diff_samples_ext =
+ (receive_time_rtp - last_receive_time_rtp) -
+ ((header.timestamp +
+ header.extension.transmissionTimeOffset) -
+ (last_received_timestamp_ +
+ last_received_transmission_time_offset_));
+
+ time_diff_samples_ext = abs(time_diff_samples_ext);
+
+ if (time_diff_samples_ext < 450000) {
+ int32_t jitter_diffQ4TransmissionTimeOffset =
+ (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
+ jitter_q4_transmission_time_offset_ +=
+ ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
+ }
+}
+
+void StreamStatisticianImpl::NotifyRtpCallback() {
+ StreamDataCounters data;
+ uint32_t ssrc;
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ data = receive_counters_;
+ ssrc = ssrc_;
+ }
+ rtp_callback_->DataCountersUpdated(data, ssrc);
+}
+
+void StreamStatisticianImpl::NotifyRtcpCallback() {
+ RtcpStatistics data;
+ uint32_t ssrc;
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ data = last_reported_statistics_;
+ ssrc = ssrc_;
+ }
+ rtcp_callback_->StatisticsUpdated(data, ssrc);
+}
+
+void StreamStatisticianImpl::FecPacketReceived() {
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ ++receive_counters_.fec_packets;
+ }
+ NotifyRtpCallback();
+}
+
void StreamStatisticianImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
max_reordering_threshold_ = max_reordering_threshold;
}
-bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
- CriticalSectionScoped cs(crit_sect_.get());
- if (received_seq_first_ == 0 && received_byte_count_ == 0) {
- // We have not received anything.
- return false;
- }
-
- if (!reset) {
- if (last_report_inorder_packets_ == 0) {
- // No report.
+bool StreamStatisticianImpl::GetStatistics(RtcpStatistics* statistics,
+ bool reset) {
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ if (received_seq_first_ == 0 && receive_counters_.bytes == 0) {
+ // We have not received anything.
return false;
}
- // Just get last report.
- *statistics = last_reported_statistics_;
- return true;
+
+ if (!reset) {
+ if (last_report_inorder_packets_ == 0) {
+ // No report.
+ return false;
+ }
+ // Just get last report.
+ *statistics = last_reported_statistics_;
+ return true;
+ }
+
+ *statistics = CalculateRtcpStatistics();
}
+ NotifyRtcpCallback();
+
+ return true;
+}
+
+RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
+ RtcpStatistics stats;
+
if (last_report_inorder_packets_ == 0) {
// First time we send a report.
last_report_seq_max_ = received_seq_first_ - 1;
@@ -206,7 +253,8 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
// Number of received RTP packets since last report, counts all packets but
// not re-transmissions.
uint32_t rec_since_last =
- received_inorder_packet_count_ - last_report_inorder_packets_;
+ (receive_counters_.packets - receive_counters_.retransmitted_packets) -
+ last_report_inorder_packets_;
// With NACK we don't know the expected retransmissions during the last
// second. We know how many "old" packets we have received. We just count
@@ -218,7 +266,7 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
// re-transmitted. We use RTT to decide if a packet is re-ordered or
// re-transmitted.
uint32_t retransmitted_packets =
- received_retransmitted_packets_ - last_report_old_packets_;
+ receive_counters_.retransmitted_packets - last_report_old_packets_;
rec_since_last += retransmitted_packets;
int32_t missing = 0;
@@ -231,64 +279,60 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
local_fraction_lost =
static_cast<uint8_t>(255 * missing / exp_since_last);
}
- statistics->fraction_lost = local_fraction_lost;
+ stats.fraction_lost = local_fraction_lost;
// We need a counter for cumulative loss too.
cumulative_loss_ += missing;
-
- if (jitter_q4_ > jitter_max_q4_) {
- jitter_max_q4_ = jitter_q4_;
- }
- statistics->cumulative_lost = cumulative_loss_;
- statistics->extended_max_sequence_number = (received_seq_wraps_ << 16) +
- received_seq_max_;
+ stats.cumulative_lost = cumulative_loss_;
+ stats.extended_max_sequence_number =
+ (received_seq_wraps_ << 16) + received_seq_max_;
// Note: internal jitter value is in Q4 and needs to be scaled by 1/16.
- statistics->jitter = jitter_q4_ >> 4;
- statistics->max_jitter = jitter_max_q4_ >> 4;
- if (reset) {
- // Store this report.
- last_reported_statistics_ = *statistics;
-
- // Only for report blocks in RTCP SR and RR.
- last_report_inorder_packets_ = received_inorder_packet_count_;
- last_report_old_packets_ = received_retransmitted_packets_;
- last_report_seq_max_ = received_seq_max_;
- }
- return true;
+ stats.jitter = jitter_q4_ >> 4;
+
+ // Store this report.
+ last_reported_statistics_ = stats;
+
+ // Only for report blocks in RTCP SR and RR.
+ last_report_inorder_packets_ =
+ receive_counters_.packets - receive_counters_.retransmitted_packets;
+ last_report_old_packets_ = receive_counters_.retransmitted_packets;
+ last_report_seq_max_ = received_seq_max_;
+
+ return stats;
}
void StreamStatisticianImpl::GetDataCounters(
uint32_t* bytes_received, uint32_t* packets_received) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
if (bytes_received) {
- *bytes_received = received_byte_count_;
+ *bytes_received = receive_counters_.bytes + receive_counters_.header_bytes +
+ receive_counters_.padding_bytes;
}
if (packets_received) {
- *packets_received =
- received_retransmitted_packets_ + received_inorder_packet_count_;
+ *packets_received = receive_counters_.packets;
}
}
uint32_t StreamStatisticianImpl::BitrateReceived() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
return incoming_bitrate_.BitrateNow();
}
void StreamStatisticianImpl::ProcessBitrate() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
incoming_bitrate_.Process();
}
void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs,
uint32_t* frac) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
*secs = last_receive_time_secs_;
*frac = last_receive_time_frac_;
}
bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
const RTPHeader& header, int min_rtt) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
if (InOrderPacketInternal(header.sequenceNumber)) {
return false;
}
@@ -323,7 +367,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
}
bool StreamStatisticianImpl::IsPacketInOrder(uint16_t sequence_number) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
return InOrderPacketInternal(sequence_number);
}
@@ -348,8 +392,10 @@ ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: clock_(clock),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- last_rate_update_ms_(0) {}
+ receive_statistics_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_rate_update_ms_(0),
+ rtcp_stats_callback_(NULL),
+ rtp_stats_callback_(NULL) {}
ReceiveStatisticsImpl::~ReceiveStatisticsImpl() {
while (!statisticians_.empty()) {
@@ -359,20 +405,31 @@ ReceiveStatisticsImpl::~ReceiveStatisticsImpl() {
}
void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
- size_t bytes, bool old_packet) {
- CriticalSectionScoped cs(crit_sect_.get());
- StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
- if (it == statisticians_.end()) {
- std::pair<StatisticianImplMap::iterator, uint32_t> insert_result =
- statisticians_.insert(std::make_pair(
- header.ssrc, new StreamStatisticianImpl(clock_)));
- it = insert_result.first;
+ size_t bytes,
+ bool retransmitted) {
+ StatisticianImplMap::iterator it;
+ {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ it = statisticians_.find(header.ssrc);
+ if (it == statisticians_.end()) {
+ std::pair<StatisticianImplMap::iterator, uint32_t> insert_result =
+ statisticians_.insert(std::make_pair(
+ header.ssrc, new StreamStatisticianImpl(clock_, this, this)));
+ it = insert_result.first;
+ }
}
- statisticians_[header.ssrc]->IncomingPacket(header, bytes, old_packet);
+ it->second->IncomingPacket(header, bytes, retransmitted);
+}
+
+void ReceiveStatisticsImpl::FecPacketReceived(uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ StatisticianImplMap::iterator it = statisticians_.find(ssrc);
+ assert(it != statisticians_.end());
+ it->second->FecPacketReceived();
}
void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianImplMap::iterator from_it = statisticians_.find(from_ssrc);
if (from_it == statisticians_.end())
return;
@@ -383,7 +440,7 @@ void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) {
}
StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianMap active_statisticians;
for (StatisticianImplMap::const_iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
@@ -400,7 +457,7 @@ StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
uint32_t ssrc) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianImplMap::const_iterator it = statisticians_.find(ssrc);
if (it == statisticians_.end())
return NULL;
@@ -409,7 +466,7 @@ StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->SetMaxReorderingThreshold(max_reordering_threshold);
@@ -417,7 +474,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
}
int32_t ReceiveStatisticsImpl::Process() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->ProcessBitrate();
@@ -427,17 +484,50 @@ int32_t ReceiveStatisticsImpl::Process() {
}
int32_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
int time_since_last_update = clock_->TimeInMilliseconds() -
last_rate_update_ms_;
return std::max(kStatisticsProcessIntervalMs - time_since_last_update, 0);
}
+void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (callback != NULL)
+ assert(rtcp_stats_callback_ == NULL);
+ rtcp_stats_callback_ = callback;
+}
+
+void ReceiveStatisticsImpl::StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (rtcp_stats_callback_) {
+ rtcp_stats_callback_->StatisticsUpdated(statistics, ssrc);
+ }
+}
+
+void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (callback != NULL)
+ assert(rtp_stats_callback_ == NULL);
+ rtp_stats_callback_ = callback;
+}
+
+void ReceiveStatisticsImpl::DataCountersUpdated(const StreamDataCounters& stats,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (rtp_stats_callback_) {
+ rtp_stats_callback_->DataCountersUpdated(stats, ssrc);
+ }
+}
void NullReceiveStatistics::IncomingPacket(const RTPHeader& rtp_header,
size_t bytes,
bool retransmitted) {}
+void NullReceiveStatistics::FecPacketReceived(uint32_t ssrc) {}
+
StatisticianMap NullReceiveStatistics::GetActiveStatisticians() const {
return StatisticianMap();
}
@@ -454,4 +544,10 @@ int32_t NullReceiveStatistics::TimeUntilNextProcess() { return 0; }
int32_t NullReceiveStatistics::Process() { return 0; }
+void NullReceiveStatistics::RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {}
+
+void NullReceiveStatistics::RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
index 0af074c4a13..4aa41f349e5 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -25,37 +25,48 @@ class CriticalSectionWrapper;
class StreamStatisticianImpl : public StreamStatistician {
public:
- explicit StreamStatisticianImpl(Clock* clock);
-
+ StreamStatisticianImpl(Clock* clock,
+ RtcpStatisticsCallback* rtcp_callback,
+ StreamDataCountersCallback* rtp_callback);
virtual ~StreamStatisticianImpl() {}
- virtual bool GetStatistics(Statistics* statistics, bool reset) OVERRIDE;
+ virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) OVERRIDE;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const OVERRIDE;
virtual uint32_t BitrateReceived() const OVERRIDE;
virtual void ResetStatistics() OVERRIDE;
virtual bool IsRetransmitOfOldPacket(const RTPHeader& header,
- int min_rtt) const OVERRIDE;
+ int min_rtt) const OVERRIDE;
virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE;
- void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted);
+ void FecPacketReceived();
void SetMaxReorderingThreshold(int max_reordering_threshold);
void ProcessBitrate();
virtual void LastReceiveTimeNtp(uint32_t* secs, uint32_t* frac) const;
private:
bool InOrderPacketInternal(uint16_t sequence_number) const;
+ RtcpStatistics CalculateRtcpStatistics();
+ void UpdateJitter(const RTPHeader& header,
+ uint32_t receive_time_secs,
+ uint32_t receive_time_frac);
+ void UpdateCounters(const RTPHeader& rtp_header,
+ size_t bytes,
+ bool retransmitted);
+ void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_.get());
+ void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_.get());
Clock* clock_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ scoped_ptr<CriticalSectionWrapper> stream_lock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
int max_reordering_threshold_; // In number of packets or sequence numbers.
// Stats on received RTP packets.
uint32_t jitter_q4_;
- uint32_t jitter_max_q4_;
uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
@@ -67,30 +78,34 @@ class StreamStatisticianImpl : public StreamStatistician {
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
- bool first_packet_;
// Current counter values.
uint16_t received_packet_overhead_;
- uint32_t received_byte_count_;
- uint32_t received_retransmitted_packets_;
- uint32_t received_inorder_packet_count_;
+ StreamDataCounters receive_counters_;
// Counter values when we sent the last report.
uint32_t last_report_inorder_packets_;
uint32_t last_report_old_packets_;
uint16_t last_report_seq_max_;
- Statistics last_reported_statistics_;
+ RtcpStatistics last_reported_statistics_;
+
+ RtcpStatisticsCallback* const rtcp_callback_;
+ StreamDataCountersCallback* const rtp_callback_;
};
-class ReceiveStatisticsImpl : public ReceiveStatistics {
+class ReceiveStatisticsImpl : public ReceiveStatistics,
+ public RtcpStatisticsCallback,
+ public StreamDataCountersCallback {
public:
explicit ReceiveStatisticsImpl(Clock* clock);
~ReceiveStatisticsImpl();
// Implement ReceiveStatistics.
- virtual void IncomingPacket(const RTPHeader& header, size_t bytes,
- bool old_packet) OVERRIDE;
+ virtual void IncomingPacket(const RTPHeader& header,
+ size_t bytes,
+ bool retransmitted) OVERRIDE;
+ virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE;
virtual StatisticianMap GetActiveStatisticians() const OVERRIDE;
virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE;
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE;
@@ -101,13 +116,27 @@ class ReceiveStatisticsImpl : public ReceiveStatistics {
void ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc);
+ virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback)
+ OVERRIDE;
+
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) OVERRIDE;
+
private:
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) OVERRIDE;
+ virtual void DataCountersUpdated(const StreamDataCounters& counters,
+ uint32_t ssrc) OVERRIDE;
+
typedef std::map<uint32_t, StreamStatisticianImpl*> StatisticianImplMap;
Clock* clock_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ scoped_ptr<CriticalSectionWrapper> receive_statistics_lock_;
int64_t last_rate_update_ms_;
StatisticianImplMap statisticians_;
+
+ RtcpStatisticsCallback* rtcp_stats_callback_;
+ StreamDataCountersCallback* rtp_stats_callback_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
index be8f2fcdcc9..f0b9dedde57 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
@@ -28,10 +28,10 @@ class ReceiveStatisticsTest : public ::testing::Test {
receive_statistics_(ReceiveStatistics::Create(&clock_)) {
memset(&header1_, 0, sizeof(header1_));
header1_.ssrc = kSsrc1;
- header1_.sequenceNumber = 0;
+ header1_.sequenceNumber = 100;
memset(&header2_, 0, sizeof(header2_));
header2_.ssrc = kSsrc2;
- header2_.sequenceNumber = 0;
+ header2_.sequenceNumber = 100;
}
protected:
@@ -131,4 +131,173 @@ TEST_F(ReceiveStatisticsTest, ActiveStatisticians) {
EXPECT_EQ(200u, bytes_received);
EXPECT_EQ(2u, packets_received);
}
+
+TEST_F(ReceiveStatisticsTest, RtcpCallbacks) {
+ class TestCallback : public RtcpStatisticsCallback {
+ public:
+ TestCallback()
+ : RtcpStatisticsCallback(), num_calls_(0), ssrc_(0), stats_() {}
+ virtual ~TestCallback() {}
+
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ ssrc_ = ssrc;
+ stats_ = statistics;
+ ++num_calls_;
+ }
+
+ uint32_t num_calls_;
+ uint32_t ssrc_;
+ RtcpStatistics stats_;
+ } callback;
+
+ receive_statistics_->RegisterRtcpStatisticsCallback(&callback);
+
+ // Add some arbitrary data, with loss and jitter.
+ header1_.sequenceNumber = 1;
+ clock_.AdvanceTimeMilliseconds(7);
+ header1_.timestamp += 3;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ header1_.sequenceNumber += 2;
+ clock_.AdvanceTimeMilliseconds(9);
+ header1_.timestamp += 9;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ --header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(13);
+ header1_.timestamp += 47;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, true);
+ header1_.sequenceNumber += 3;
+ clock_.AdvanceTimeMilliseconds(11);
+ header1_.timestamp += 17;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ ++header1_.sequenceNumber;
+
+ EXPECT_EQ(0u, callback.num_calls_);
+
+ // Call GetStatistics, simulating a timed rtcp sender thread.
+ RtcpStatistics statistics;
+ receive_statistics_->GetStatistician(kSsrc1)
+ ->GetStatistics(&statistics, true);
+
+ EXPECT_EQ(1u, callback.num_calls_);
+ EXPECT_EQ(callback.ssrc_, kSsrc1);
+ EXPECT_EQ(statistics.cumulative_lost, callback.stats_.cumulative_lost);
+ EXPECT_EQ(statistics.extended_max_sequence_number,
+ callback.stats_.extended_max_sequence_number);
+ EXPECT_EQ(statistics.fraction_lost, callback.stats_.fraction_lost);
+ EXPECT_EQ(statistics.jitter, callback.stats_.jitter);
+ EXPECT_EQ(51, statistics.fraction_lost);
+ EXPECT_EQ(1u, statistics.cumulative_lost);
+ EXPECT_EQ(5u, statistics.extended_max_sequence_number);
+ EXPECT_EQ(4u, statistics.jitter);
+
+ receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
+
+ // Add some more data.
+ header1_.sequenceNumber = 1;
+ clock_.AdvanceTimeMilliseconds(7);
+ header1_.timestamp += 3;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ header1_.sequenceNumber += 2;
+ clock_.AdvanceTimeMilliseconds(9);
+ header1_.timestamp += 9;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ --header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(13);
+ header1_.timestamp += 47;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, true);
+ header1_.sequenceNumber += 3;
+ clock_.AdvanceTimeMilliseconds(11);
+ header1_.timestamp += 17;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ ++header1_.sequenceNumber;
+
+ receive_statistics_->GetStatistician(kSsrc1)
+ ->GetStatistics(&statistics, true);
+
+ // Should not have been called after deregister.
+ EXPECT_EQ(1u, callback.num_calls_);
+}
+
+TEST_F(ReceiveStatisticsTest, RtpCallbacks) {
+ class TestCallback : public StreamDataCountersCallback {
+ public:
+ TestCallback()
+ : StreamDataCountersCallback(), num_calls_(0), ssrc_(0), stats_() {}
+ virtual ~TestCallback() {}
+
+ virtual void DataCountersUpdated(const StreamDataCounters& counters,
+ uint32_t ssrc) {
+ ssrc_ = ssrc;
+ stats_ = counters;
+ ++num_calls_;
+ }
+
+ void ExpectMatches(uint32_t num_calls,
+ uint32_t ssrc,
+ uint32_t bytes,
+ uint32_t padding,
+ uint32_t packets,
+ uint32_t retransmits,
+ uint32_t fec) {
+ EXPECT_EQ(num_calls, num_calls_);
+ EXPECT_EQ(ssrc, ssrc_);
+ EXPECT_EQ(bytes, stats_.bytes);
+ EXPECT_EQ(padding, stats_.padding_bytes);
+ EXPECT_EQ(packets, stats_.packets);
+ EXPECT_EQ(retransmits, stats_.retransmitted_packets);
+ EXPECT_EQ(fec, stats_.fec_packets);
+ }
+
+ uint32_t num_calls_;
+ uint32_t ssrc_;
+ StreamDataCounters stats_;
+ } callback;
+
+ receive_statistics_->RegisterRtpStatisticsCallback(&callback);
+
+ const uint32_t kHeaderLength = 20;
+ const uint32_t kPaddingLength = 9;
+
+ // One packet of size kPacketSize1.
+ header1_.headerLength = kHeaderLength;
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, false);
+ callback.ExpectMatches(1, kSsrc1, kPacketSize1, 0, 1, 0, 0);
+
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ header1_.paddingLength = 9;
+ // Another packet of size kPacketSize1 with 9 bytes padding.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength + kPaddingLength, false);
+ callback.ExpectMatches(2, kSsrc1, 2 * kPacketSize1, kPaddingLength, 2, 0, 0);
+
+ clock_.AdvanceTimeMilliseconds(5);
+ // Retransmit last packet.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength + kPaddingLength, true);
+ callback.ExpectMatches(
+ 3, kSsrc1, 3 * kPacketSize1, kPaddingLength * 2, 3, 1, 0);
+
+ header1_.paddingLength = 0;
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ // One recovered packet.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, false);
+ receive_statistics_->FecPacketReceived(kSsrc1);
+ callback.ExpectMatches(
+ 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1);
+
+ receive_statistics_->RegisterRtpStatisticsCallback(NULL);
+
+ // New stats, but callback should not be called.
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, true);
+ callback.ExpectMatches(
+ 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
new file mode 100644
index 00000000000..0d71c26b63c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
+
+namespace webrtc {
+
+// TODO(wu): Refactor this class so that it can be shared with
+// vie_sync_module.cc.
+RemoteNtpTimeEstimator::RemoteNtpTimeEstimator(Clock* clock)
+ : clock_(clock),
+ ts_extrapolator_(
+ new TimestampExtrapolator(clock_->TimeInMilliseconds())) {
+}
+
+RemoteNtpTimeEstimator::~RemoteNtpTimeEstimator() {}
+
+bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(uint32_t ssrc,
+ RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+ uint16_t rtt = 0;
+ rtp_rtcp->RTT(ssrc, &rtt, NULL, NULL, NULL);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return true;
+ }
+ // Update RTCP list
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp->RemoteNTP(&ntp_secs,
+ &ntp_frac,
+ NULL,
+ NULL,
+ &rtp_timestamp)) {
+ // Waiting for RTCP.
+ return true;
+ }
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &rtcp_list_, &new_rtcp_sr)) {
+ return false;
+ }
+ if (!new_rtcp_sr) {
+ // No new RTCP SR since last time this function was called.
+ return true;
+ }
+ // Update extrapolator with the new arrival time.
+ // The extrapolator assumes the TimeInMilliseconds time.
+ int64_t receiver_arrival_time_ms = clock_->TimeInMilliseconds();
+ int64_t sender_send_time_ms = Clock::NtpToMs(ntp_secs, ntp_frac);
+ int64_t sender_arrival_time_90k = (sender_send_time_ms + rtt / 2) * 90;
+ ts_extrapolator_->Update(receiver_arrival_time_ms, sender_arrival_time_90k);
+ return true;
+}
+
+int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) {
+ if (rtcp_list_.size() < 2) {
+ // We need two RTCP SR reports to calculate NTP.
+ return -1;
+ }
+ int64_t sender_capture_ntp_ms = 0;
+ if (!RtpToNtpMs(rtp_timestamp, rtcp_list_, &sender_capture_ntp_ms)) {
+ return -1;
+ }
+ uint32_t timestamp = sender_capture_ntp_ms * 90;
+ int64_t receiver_capture_ms =
+ ts_extrapolator_->ExtrapolateLocalTime(timestamp);
+ int64_t ntp_offset =
+ clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds();
+ return receiver_capture_ms + ntp_offset;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
new file mode 100644
index 00000000000..63cedf03afe
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
@@ -0,0 +1,112 @@
+/*
+* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::SetArgPointee;
+
+namespace webrtc {
+
+static const int kTestRtt = 10;
+static const int64_t kLocalClockInitialTimeMs = 123;
+static const int64_t kRemoteClockInitialTimeMs = 345;
+static const uint32_t kTimestampOffset = 567;
+static const int kTestSsrc = 789;
+
+class RemoteNtpTimeEstimatorTest : public ::testing::Test {
+ protected:
+ RemoteNtpTimeEstimatorTest()
+ : local_clock_(kLocalClockInitialTimeMs * 1000),
+ remote_clock_(kRemoteClockInitialTimeMs * 1000),
+ estimator_(&local_clock_) {}
+ ~RemoteNtpTimeEstimatorTest() {}
+
+ void AdvanceTimeMilliseconds(int64_t ms) {
+ local_clock_.AdvanceTimeMilliseconds(ms);
+ remote_clock_.AdvanceTimeMilliseconds(ms);
+ }
+
+ uint32_t GetRemoteTimestamp() {
+ return static_cast<uint32_t>(remote_clock_.TimeInMilliseconds()) * 90 +
+ kTimestampOffset;
+ }
+
+ void SendRtcpSr() {
+ uint32_t rtcp_timestamp = GetRemoteTimestamp();
+ uint32_t ntp_seconds;
+ uint32_t ntp_fractions;
+ remote_clock_.CurrentNtp(ntp_seconds, ntp_fractions);
+
+ AdvanceTimeMilliseconds(kTestRtt / 2);
+ ReceiveRtcpSr(rtcp_timestamp, ntp_seconds, ntp_fractions);
+ }
+
+ void UpdateRtcpTimestamp(MockRtpRtcp* rtp_rtcp, bool expected_result) {
+ if (rtp_rtcp) {
+ EXPECT_CALL(*rtp_rtcp, RTT(_, _, _, _, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kTestRtt),
+ Return(0)));
+ }
+ EXPECT_EQ(expected_result,
+ estimator_.UpdateRtcpTimestamp(kTestSsrc, rtp_rtcp));
+ }
+
+ void ReceiveRtcpSr(uint32_t rtcp_timestamp,
+ uint32_t ntp_seconds,
+ uint32_t ntp_fractions) {
+ EXPECT_CALL(rtp_rtcp_, RemoteNTP(_, _, _, _, _))
+ .WillOnce(DoAll(SetArgPointee<0>(ntp_seconds),
+ SetArgPointee<1>(ntp_fractions),
+ SetArgPointee<4>(rtcp_timestamp),
+ Return(0)));
+
+ UpdateRtcpTimestamp(&rtp_rtcp_, true);
+ }
+
+ SimulatedClock local_clock_;
+ SimulatedClock remote_clock_;
+ MockRtpRtcp rtp_rtcp_;
+ RemoteNtpTimeEstimator estimator_;
+};
+
+TEST_F(RemoteNtpTimeEstimatorTest, Estimate) {
+ // Failed without any RTCP SR, where RemoteNTP returns without valid NTP.
+ EXPECT_CALL(rtp_rtcp_, RemoteNTP(_, _, _, _, _)).WillOnce(Return(0));
+ UpdateRtcpTimestamp(&rtp_rtcp_, false);
+
+ AdvanceTimeMilliseconds(1000);
+ // Remote peer sends first RTCP SR.
+ SendRtcpSr();
+
+ // Remote sends a RTP packet.
+ AdvanceTimeMilliseconds(15);
+ uint32_t rtp_timestamp = GetRemoteTimestamp();
+ int64_t capture_ntp_time_ms = local_clock_.CurrentNtpInMilliseconds();
+
+ // Local peer needs at least 2 RTCP SR to calculate the capture time.
+ const int64_t kNotEnoughRtcpSr = -1;
+ EXPECT_EQ(kNotEnoughRtcpSr, estimator_.Estimate(rtp_timestamp));
+
+ AdvanceTimeMilliseconds(800);
+ // Remote sends second RTCP SR.
+ SendRtcpSr();
+
+ // Local peer gets enough RTCP SR to calculate the capture time.
+ EXPECT_EQ(capture_ntp_time_ms, estimator_.Estimate(rtp_timestamp));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
index 68f61371395..88463e4718f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -69,6 +69,7 @@ class RtcpFormatRembTest : public ::testing::Test {
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
system_clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)) {}
virtual void SetUp();
virtual void TearDown();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
new file mode 100644
index 00000000000..a4cdfd95dee
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
@@ -0,0 +1,695 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+using webrtc::RTCPUtility::PT_APP;
+using webrtc::RTCPUtility::PT_BYE;
+using webrtc::RTCPUtility::PT_IJ;
+using webrtc::RTCPUtility::PT_PSFB;
+using webrtc::RTCPUtility::PT_RR;
+using webrtc::RTCPUtility::PT_RTPFB;
+using webrtc::RTCPUtility::PT_SDES;
+using webrtc::RTCPUtility::PT_SR;
+using webrtc::RTCPUtility::PT_XR;
+
+using webrtc::RTCPUtility::RTCPPacketAPP;
+using webrtc::RTCPUtility::RTCPPacketBYE;
+using webrtc::RTCPUtility::RTCPPacketPSFBAPP;
+using webrtc::RTCPUtility::RTCPPacketPSFBFIR;
+using webrtc::RTCPUtility::RTCPPacketPSFBFIRItem;
+using webrtc::RTCPUtility::RTCPPacketPSFBPLI;
+using webrtc::RTCPUtility::RTCPPacketPSFBREMBItem;
+using webrtc::RTCPUtility::RTCPPacketPSFBRPSI;
+using webrtc::RTCPUtility::RTCPPacketPSFBSLI;
+using webrtc::RTCPUtility::RTCPPacketPSFBSLIItem;
+using webrtc::RTCPUtility::RTCPPacketReportBlockItem;
+using webrtc::RTCPUtility::RTCPPacketRR;
+using webrtc::RTCPUtility::RTCPPacketRTPFBNACK;
+using webrtc::RTCPUtility::RTCPPacketRTPFBNACKItem;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBN;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBNItem;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBR;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBRItem;
+using webrtc::RTCPUtility::RTCPPacketSR;
+using webrtc::RTCPUtility::RTCPPacketXRDLRRReportBlockItem;
+using webrtc::RTCPUtility::RTCPPacketXRReceiverReferenceTimeItem;
+using webrtc::RTCPUtility::RTCPPacketXR;
+using webrtc::RTCPUtility::RTCPPacketXRVOIPMetricItem;
+
+namespace webrtc {
+namespace rtcp {
+namespace {
+// Unused SSRC of media source, set to 0.
+const uint32_t kUnusedMediaSourceSsrc0 = 0;
+
+void AssignUWord8(uint8_t* buffer, size_t* offset, uint8_t value) {
+ buffer[(*offset)++] = value;
+}
+void AssignUWord16(uint8_t* buffer, size_t* offset, uint16_t value) {
+ ModuleRTPUtility::AssignUWord16ToBuffer(buffer + *offset, value);
+ *offset += 2;
+}
+void AssignUWord24(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ModuleRTPUtility::AssignUWord24ToBuffer(buffer + *offset, value);
+ *offset += 3;
+}
+void AssignUWord32(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ModuleRTPUtility::AssignUWord32ToBuffer(buffer + *offset, value);
+ *offset += 4;
+}
+
+size_t BlockToHeaderLength(size_t length_in_bytes) {
+ // Length in 32-bit words minus 1.
+ assert(length_in_bytes > 0);
+ assert(length_in_bytes % 4 == 0);
+ return (length_in_bytes / 4) - 1;
+}
+
+// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
+//
+// RTP header format.
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC/FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateHeader(uint8_t count_or_format, // Depends on packet type.
+ uint8_t packet_type,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ assert(length <= 0xffff);
+ const uint8_t kVersion = 2;
+ AssignUWord8(buffer, pos, (kVersion << 6) + count_or_format);
+ AssignUWord8(buffer, pos, packet_type);
+ AssignUWord16(buffer, pos, length);
+}
+
+// Sender report (SR) (RFC 3550).
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=SR=200 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | NTP timestamp, most significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, least significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | RTP timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's packet count |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's octet count |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateSenderReport(const RTCPPacketSR& sr,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(sr.NumberOfReportBlocks, PT_SR, length, buffer, pos);
+ AssignUWord32(buffer, pos, sr.SenderSSRC);
+ AssignUWord32(buffer, pos, sr.NTPMostSignificant);
+ AssignUWord32(buffer, pos, sr.NTPLeastSignificant);
+ AssignUWord32(buffer, pos, sr.RTPTimestamp);
+ AssignUWord32(buffer, pos, sr.SenderPacketCount);
+ AssignUWord32(buffer, pos, sr.SenderOctetCount);
+}
+
+// Receiver report (RR), header (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=RR=201 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateReceiverReport(const RTCPPacketRR& rr,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(rr.NumberOfReportBlocks, PT_RR, length, buffer, pos);
+ AssignUWord32(buffer, pos, rr.SenderSSRC);
+}
+
+// Report block (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_1 (SSRC of first source) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | fraction lost | cumulative number of packets lost |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | extended highest sequence number received |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | interarrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | last SR (LSR) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | delay since last SR (DLSR) |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateReportBlocks(const std::vector<RTCPPacketReportBlockItem>& blocks,
+ uint8_t* buffer,
+ size_t* pos) {
+ for (std::vector<RTCPPacketReportBlockItem>::const_iterator
+ it = blocks.begin(); it != blocks.end(); ++it) {
+ AssignUWord32(buffer, pos, (*it).SSRC);
+ AssignUWord8(buffer, pos, (*it).FractionLost);
+ AssignUWord24(buffer, pos, (*it).CumulativeNumOfPacketsLost);
+ AssignUWord32(buffer, pos, (*it).ExtendedHighestSequenceNumber);
+ AssignUWord32(buffer, pos, (*it).Jitter);
+ AssignUWord32(buffer, pos, (*it).LastSR);
+ AssignUWord32(buffer, pos, (*it).DelayLastSR);
+ }
+}
+
+// Transmission Time Offsets in RTP Streams (RFC 5450).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// hdr |V=2|P| RC | PT=IJ=195 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// . .
+// . .
+// . .
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateIj(const std::vector<uint32_t>& ij_items,
+ uint8_t* buffer,
+ size_t* pos) {
+ size_t length = ij_items.size();
+ CreateHeader(length, PT_IJ, length, buffer, pos);
+ for (std::vector<uint32_t>::const_iterator it = ij_items.begin();
+ it != ij_items.end(); ++it) {
+ AssignUWord32(buffer, pos, *it);
+ }
+}
+
+// Source Description (SDES) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// header |V=2|P| SC | PT=SDES=202 | length |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_1 |
+// 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_2 |
+// 2 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+//
+// Canonical End-Point Identifier SDES Item (CNAME)
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | CNAME=1 | length | user and domain name ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateSdes(const std::vector<Sdes::Chunk>& chunks,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(chunks.size(), PT_SDES, length, buffer, pos);
+ const uint8_t kSdesItemType = 1;
+ for (std::vector<Sdes::Chunk>::const_iterator it = chunks.begin();
+ it != chunks.end(); ++it) {
+ AssignUWord32(buffer, pos, (*it).ssrc);
+ AssignUWord8(buffer, pos, kSdesItemType);
+ AssignUWord8(buffer, pos, (*it).name.length());
+ memcpy(buffer + *pos, (*it).name.data(), (*it).name.length());
+ *pos += (*it).name.length();
+ memset(buffer + *pos, 0, (*it).null_octets);
+ *pos += (*it).null_octets;
+ }
+}
+
+// Bye packet (BYE) (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| SC | PT=BYE=203 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : ... :
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// (opt) | length | reason for leaving ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateBye(const RTCPPacketBYE& bye,
+ const std::vector<uint32_t>& csrcs,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(length, PT_BYE, length, buffer, pos);
+ AssignUWord32(buffer, pos, bye.SenderSSRC);
+ for (std::vector<uint32_t>::const_iterator it = csrcs.begin();
+ it != csrcs.end(); ++it) {
+ AssignUWord32(buffer, pos, *it);
+ }
+}
+
+// Application-Defined packet (APP) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| subtype | PT=APP=204 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | name (ASCII) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | application-dependent data ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateApp(const RTCPPacketAPP& app,
+ uint32_t ssrc,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(app.SubType, PT_APP, length, buffer, pos);
+ AssignUWord32(buffer, pos, ssrc);
+ AssignUWord32(buffer, pos, app.Name);
+ memcpy(buffer + *pos, app.Data, app.Size);
+ *pos += app.Size;
+}
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// :
+//
+
+// Picture loss indication (PLI) (RFC 4585).
+//
+// FCI: no feedback control information.
+
+void CreatePli(const RTCPPacketPSFBPLI& pli,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 1;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, pli.SenderSSRC);
+ AssignUWord32(buffer, pos, pli.MediaSSRC);
+}
+
+// Slice loss indication (SLI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | First | Number | PictureID |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateSli(const RTCPPacketPSFBSLI& sli,
+ const RTCPPacketPSFBSLIItem& sli_item,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 2;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, sli.SenderSSRC);
+ AssignUWord32(buffer, pos, sli.MediaSSRC);
+
+ AssignUWord8(buffer, pos, sli_item.FirstMB >> 5);
+ AssignUWord8(buffer, pos, (sli_item.FirstMB << 3) +
+ ((sli_item.NumberOfMB >> 10) & 0x07));
+ AssignUWord8(buffer, pos, sli_item.NumberOfMB >> 2);
+ AssignUWord8(buffer, pos, (sli_item.NumberOfMB << 6) + sli_item.PictureId);
+}
+
+// Generic NACK (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PID | BLP |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateNack(const RTCPPacketRTPFBNACK& nack,
+ const std::vector<RTCPPacketRTPFBNACKItem>& nack_fields,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 1;
+ CreateHeader(kFmt, PT_RTPFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, nack.SenderSSRC);
+ AssignUWord32(buffer, pos, nack.MediaSSRC);
+ for (std::vector<RTCPPacketRTPFBNACKItem>::const_iterator
+ it = nack_fields.begin(); it != nack_fields.end(); ++it) {
+ AssignUWord16(buffer, pos, (*it).PacketID);
+ AssignUWord16(buffer, pos, (*it).BitMask);
+ }
+}
+
+// Reference picture selection indication (RPSI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PB |0| Payload Type| Native RPSI bit string |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | defined per codec ... | Padding (0) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateRpsi(const RTCPPacketPSFBRPSI& rpsi,
+ uint8_t padding_bytes,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ // Native bit string should be a multiple of 8 bits.
+ assert(rpsi.NumberOfValidBits % 8 == 0);
+ const uint8_t kFmt = 3;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, rpsi.SenderSSRC);
+ AssignUWord32(buffer, pos, rpsi.MediaSSRC);
+ AssignUWord8(buffer, pos, padding_bytes * 8);
+ AssignUWord8(buffer, pos, rpsi.PayloadType);
+ memcpy(buffer + *pos, rpsi.NativeBitString, rpsi.NumberOfValidBits / 8);
+ *pos += rpsi.NumberOfValidBits / 8;
+ memset(buffer + *pos, 0, padding_bytes);
+ *pos += padding_bytes;
+}
+
+// Full intra request (FIR) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Seq nr. | Reserved |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateFir(const RTCPPacketPSFBFIR& fir,
+ const RTCPPacketPSFBFIRItem& fir_item,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 4;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, fir.SenderSSRC);
+ AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
+ AssignUWord32(buffer, pos, fir_item.SSRC);
+ AssignUWord8(buffer, pos, fir_item.CommandSequenceNumber);
+ AssignUWord24(buffer, pos, 0);
+}
+} // namespace
+
+void RtcpPacket::Append(RtcpPacket* packet) {
+ assert(packet);
+ appended_packets_.push_back(packet);
+}
+
+RawPacket RtcpPacket::Build() const {
+ size_t length = 0;
+ uint8_t packet[IP_PACKET_SIZE];
+ CreateAndAddAppended(packet, &length, IP_PACKET_SIZE);
+ return RawPacket(packet, length);
+}
+
+void RtcpPacket::Build(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ *length = 0;
+ CreateAndAddAppended(packet, length, max_length);
+}
+
+void RtcpPacket::CreateAndAddAppended(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ Create(packet, length, max_length);
+ for (std::vector<RtcpPacket*>::const_iterator it = appended_packets_.begin();
+ it != appended_packets_.end(); ++it) {
+ (*it)->CreateAndAddAppended(packet, length, max_length);
+ }
+}
+
+void Empty::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+}
+
+void SenderReport::Create(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSenderReport(sr_, BlockToHeaderLength(BlockLength()), packet, length);
+ CreateReportBlocks(report_blocks_, packet, length);
+}
+
+void SenderReport::WithReportBlock(ReportBlock* block) {
+ assert(block);
+ if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
+ LOG(LS_WARNING) << "Max report blocks reached.";
+ return;
+ }
+ report_blocks_.push_back(block->report_block_);
+ sr_.NumberOfReportBlocks = report_blocks_.size();
+}
+
+void ReceiverReport::Create(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateReceiverReport(rr_, BlockToHeaderLength(BlockLength()), packet, length);
+ CreateReportBlocks(report_blocks_, packet, length);
+}
+
+void ReceiverReport::WithReportBlock(ReportBlock* block) {
+ assert(block);
+ if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
+ LOG(LS_WARNING) << "Max report blocks reached.";
+ return;
+ }
+ report_blocks_.push_back(block->report_block_);
+ rr_.NumberOfReportBlocks = report_blocks_.size();
+}
+
+void Ij::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateIj(ij_items_, packet, length);
+}
+
+void Ij::WithJitterItem(uint32_t jitter) {
+ if (ij_items_.size() >= kMaxNumberOfIjItems) {
+ LOG(LS_WARNING) << "Max inter-arrival jitter items reached.";
+ return;
+ }
+ ij_items_.push_back(jitter);
+}
+
+void Sdes::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(!chunks_.empty());
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSdes(chunks_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Sdes::WithCName(uint32_t ssrc, std::string cname) {
+ assert(cname.length() <= 0xff);
+ if (chunks_.size() >= kMaxNumberOfChunks) {
+ LOG(LS_WARNING) << "Max SDES chunks reached.";
+ return;
+ }
+ // In each chunk, the list of items must be terminated by one or more null
+ // octets. The next chunk must start on a 32-bit boundary.
+ // CNAME (1 byte) | length (1 byte) | name | padding.
+ int null_octets = 4 - ((2 + cname.length()) % 4);
+ Chunk chunk;
+ chunk.ssrc = ssrc;
+ chunk.name = cname;
+ chunk.null_octets = null_octets;
+ chunks_.push_back(chunk);
+}
+
+size_t Sdes::BlockLength() const {
+ // Header (4 bytes).
+ // Chunk:
+ // SSRC/CSRC (4 bytes) | CNAME (1 byte) | length (1 byte) | name | padding.
+ size_t length = kHeaderLength;
+ for (std::vector<Chunk>::const_iterator it = chunks_.begin();
+ it != chunks_.end(); ++it) {
+ length += 6 + (*it).name.length() + (*it).null_octets;
+ }
+ assert(length % 4 == 0);
+ return length;
+}
+
+void Bye::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateBye(bye_, csrcs_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Bye::WithCsrc(uint32_t csrc) {
+ if (csrcs_.size() >= kMaxNumberOfCsrcs) {
+ LOG(LS_WARNING) << "Max CSRC size reached.";
+ return;
+ }
+ csrcs_.push_back(csrc);
+}
+
+void App::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateApp(app_, ssrc_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Pli::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreatePli(pli_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Sli::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSli(sli_, sli_item_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Nack::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(!nack_fields_.empty());
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateNack(nack_, nack_fields_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Nack::WithList(const uint16_t* nack_list, int length) {
+ assert(nack_list);
+ assert(nack_fields_.empty());
+ int i = 0;
+ while (i < length) {
+ uint16_t pid = nack_list[i++];
+ // Bitmask specifies losses in any of the 16 packets following the pid.
+ uint16_t bitmask = 0;
+ while (i < length) {
+ int shift = static_cast<uint16_t>(nack_list[i] - pid) - 1;
+ if (shift >= 0 && shift <= 15) {
+ bitmask |= (1 << shift);
+ ++i;
+ } else {
+ break;
+ }
+ }
+ RTCPUtility::RTCPPacketRTPFBNACKItem item;
+ item.PacketID = pid;
+ item.BitMask = bitmask;
+ nack_fields_.push_back(item);
+ }
+}
+
+void Rpsi::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(rpsi_.NumberOfValidBits > 0);
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateRpsi(rpsi_, padding_bytes_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Rpsi::WithPictureId(uint64_t picture_id) {
+ const uint32_t kPidBits = 7;
+ const uint64_t k7MsbZeroMask = 0x1ffffffffffffff;
+ uint8_t required_bytes = 0;
+ uint64_t shifted_pid = picture_id;
+ do {
+ ++required_bytes;
+ shifted_pid = (shifted_pid >> kPidBits) & k7MsbZeroMask;
+ } while (shifted_pid > 0);
+
+ // Convert picture id to native bit string (natively defined by the video
+ // codec).
+ int pos = 0;
+ for (int i = required_bytes - 1; i > 0; i--) {
+ rpsi_.NativeBitString[pos++] =
+ 0x80 | static_cast<uint8_t>(picture_id >> (i * kPidBits));
+ }
+ rpsi_.NativeBitString[pos++] = static_cast<uint8_t>(picture_id & 0x7f);
+ rpsi_.NumberOfValidBits = pos * 8;
+
+ // Calculate padding bytes (to reach next 32-bit boundary, 1, 2 or 3 bytes).
+ padding_bytes_ = 4 - ((2 + required_bytes) % 4);
+ if (padding_bytes_ == 4) {
+ padding_bytes_ = 0;
+ }
+}
+
+void Fir::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateFir(fir_, fir_item_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+} // namespace rtcp
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
new file mode 100644
index 00000000000..f60e848b50b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
@@ -0,0 +1,726 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace rtcp {
+
+enum { kCommonFbFmtLength = 12 };
+enum { kReportBlockLength = 24 };
+
+class RawPacket;
+
+// Class for building RTCP packets.
+//
+// Example:
+// ReportBlock report_block;
+// report_block.To(234)
+// report_block.FractionLost(10);
+//
+// ReceiverReport rr;
+// rr.From(123);
+// rr.WithReportBlock(&report_block)
+//
+// Fir fir;
+// fir.From(123);
+// fir.To(234)
+// fir.WithCommandSeqNum(123);
+//
+// size_t length = 0; // Builds an intra frame request
+// uint8_t packet[kPacketSize]; // with sequence number 123.
+// fir.Build(packet, &length, kPacketSize);
+//
+// RawPacket packet = fir.Build(); // Returns a RawPacket holding
+// // the built rtcp packet.
+//
+// rr.Append(&fir) // Builds a compound RTCP packet with
+// RawPacket packet = rr.Build(); // a receiver report, report block
+// // and fir message.
+
+class RtcpPacket {
+ public:
+ virtual ~RtcpPacket() {}
+
+ void Append(RtcpPacket* packet);
+
+ RawPacket Build() const;
+
+ void Build(uint8_t* packet, size_t* length, size_t max_length) const;
+
+ protected:
+ RtcpPacket() : kHeaderLength(4) {}
+
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const = 0;
+
+ const size_t kHeaderLength;
+
+ private:
+ void CreateAndAddAppended(
+ uint8_t* packet, size_t* length, size_t max_length) const;
+
+ std::vector<RtcpPacket*> appended_packets_;
+};
+
+class Empty : public RtcpPacket {
+ public:
+ Empty() {}
+
+ virtual ~Empty() {}
+
+ protected:
+ virtual void Create(uint8_t* packet, size_t* length, size_t max_length) const;
+};
+
+// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
+//
+// RTCP report block (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_1 (SSRC of first source) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | fraction lost | cumulative number of packets lost |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | extended highest sequence number received |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | interarrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | last SR (LSR) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | delay since last SR (DLSR) |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+class ReportBlock {
+ public:
+ ReportBlock() {
+ // TODO(asapersson): Consider adding a constructor to struct.
+ memset(&report_block_, 0, sizeof(report_block_));
+ }
+
+ ~ReportBlock() {}
+
+ void To(uint32_t ssrc) {
+ report_block_.SSRC = ssrc;
+ }
+ void WithFractionLost(uint8_t fraction_lost) {
+ report_block_.FractionLost = fraction_lost;
+ }
+ void WithCumulativeLost(uint32_t cumulative_lost) {
+ report_block_.CumulativeNumOfPacketsLost = cumulative_lost;
+ }
+ void WithExtHighestSeqNum(uint32_t ext_highest_seq_num) {
+ report_block_.ExtendedHighestSequenceNumber = ext_highest_seq_num;
+ }
+ void WithJitter(uint32_t jitter) {
+ report_block_.Jitter = jitter;
+ }
+ void WithLastSr(uint32_t last_sr) {
+ report_block_.LastSR = last_sr;
+ }
+ void WithDelayLastSr(uint32_t delay_last_sr) {
+ report_block_.DelayLastSR = delay_last_sr;
+ }
+
+ private:
+ friend class SenderReport;
+ friend class ReceiverReport;
+ RTCPUtility::RTCPPacketReportBlockItem report_block_;
+};
+
+// RTCP sender report (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=SR=200 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | NTP timestamp, most significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, least significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | RTP timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's packet count |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's octet count |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | report block(s) |
+// | .... |
+
+class SenderReport : public RtcpPacket {
+ public:
+ SenderReport() : RtcpPacket() {
+ memset(&sr_, 0, sizeof(sr_));
+ }
+
+ virtual ~SenderReport() {}
+
+ void From(uint32_t ssrc) {
+ sr_.SenderSSRC = ssrc;
+ }
+ void WithNtpSec(uint32_t sec) {
+ sr_.NTPMostSignificant = sec;
+ }
+ void WithNtpFrac(uint32_t frac) {
+ sr_.NTPLeastSignificant = frac;
+ }
+ void WithRtpTimestamp(uint32_t rtp_timestamp) {
+ sr_.RTPTimestamp = rtp_timestamp;
+ }
+ void WithPacketCount(uint32_t packet_count) {
+ sr_.SenderPacketCount = packet_count;
+ }
+ void WithOctetCount(uint32_t octet_count) {
+ sr_.SenderOctetCount = octet_count;
+ }
+ void WithReportBlock(ReportBlock* block);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfReportBlocks = 0x1f };
+
+ size_t BlockLength() const {
+ const size_t kSrHeaderLength = 8;
+ const size_t kSenderInfoLength = 20;
+ return kSrHeaderLength + kSenderInfoLength +
+ report_blocks_.size() * kReportBlockLength;
+ }
+
+ RTCPUtility::RTCPPacketSR sr_;
+ std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
+};
+
+//
+// RTCP receiver report (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=RR=201 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | report block(s) |
+// | .... |
+
+class ReceiverReport : public RtcpPacket {
+ public:
+ ReceiverReport() : RtcpPacket() {
+ memset(&rr_, 0, sizeof(rr_));
+ }
+
+ virtual ~ReceiverReport() {}
+
+ void From(uint32_t ssrc) {
+ rr_.SenderSSRC = ssrc;
+ }
+ void WithReportBlock(ReportBlock* block);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfReportBlocks = 0x1f };
+
+ size_t BlockLength() const {
+ const size_t kRrHeaderLength = 8;
+ return kRrHeaderLength + report_blocks_.size() * kReportBlockLength;
+ }
+
+ RTCPUtility::RTCPPacketRR rr_;
+ std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
+};
+
+// Transmission Time Offsets in RTP Streams (RFC 5450).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// hdr |V=2|P| RC | PT=IJ=195 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// . .
+// . .
+// . .
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// If present, this RTCP packet must be placed after a receiver report
+// (inside a compound RTCP packet), and MUST have the same value for RC
+// (reception report count) as the receiver report.
+
+class Ij : public RtcpPacket {
+ public:
+ Ij() : RtcpPacket() {}
+
+ virtual ~Ij() {}
+
+ void WithJitterItem(uint32_t jitter);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfIjItems = 0x1f };
+
+ size_t BlockLength() const {
+ return kHeaderLength + 4 * ij_items_.size();
+ }
+
+ std::vector<uint32_t> ij_items_;
+
+ DISALLOW_COPY_AND_ASSIGN(Ij);
+};
+
+// Source Description (SDES) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// header |V=2|P| SC | PT=SDES=202 | length |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_1 |
+// 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_2 |
+// 2 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+//
+// Canonical End-Point Identifier SDES Item (CNAME)
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | CNAME=1 | length | user and domain name ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Sdes : public RtcpPacket {
+ public:
+ Sdes() : RtcpPacket() {}
+
+ virtual ~Sdes() {}
+
+ void WithCName(uint32_t ssrc, std::string cname);
+
+ struct Chunk {
+ uint32_t ssrc;
+ std::string name;
+ int null_octets;
+ };
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfChunks = 0x1f };
+
+ size_t BlockLength() const;
+
+ std::vector<Chunk> chunks_;
+
+ DISALLOW_COPY_AND_ASSIGN(Sdes);
+};
+
+//
+// Bye packet (BYE) (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| SC | PT=BYE=203 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : ... :
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// (opt) | length | reason for leaving ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Bye : public RtcpPacket {
+ public:
+ Bye() : RtcpPacket() {
+ memset(&bye_, 0, sizeof(bye_));
+ }
+
+ virtual ~Bye() {}
+
+ void From(uint32_t ssrc) {
+ bye_.SenderSSRC = ssrc;
+ }
+ void WithCsrc(uint32_t csrc);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfCsrcs = 0x1f - 1 };
+
+ size_t BlockLength() const {
+ size_t source_count = 1 + csrcs_.size();
+ return kHeaderLength + 4 * source_count;
+ }
+
+ RTCPUtility::RTCPPacketBYE bye_;
+ std::vector<uint32_t> csrcs_;
+};
+
+// Application-Defined packet (APP) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| subtype | PT=APP=204 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | name (ASCII) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | application-dependent data ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class App : public RtcpPacket {
+ public:
+ App()
+ : RtcpPacket(),
+ ssrc_(0) {
+ memset(&app_, 0, sizeof(app_));
+ }
+
+ virtual ~App() {}
+
+ void From(uint32_t ssrc) {
+ ssrc_ = ssrc;
+ }
+ void WithSubType(uint8_t subtype) {
+ assert(subtype <= 0x1f);
+ app_.SubType = subtype;
+ }
+ void WithName(uint32_t name) {
+ app_.Name = name;
+ }
+ void WithData(const uint8_t* data, uint16_t data_length) {
+ assert(data);
+ assert(data_length <= kRtcpAppCode_DATA_SIZE);
+ assert(data_length % 4 == 0);
+ memcpy(app_.Data, data, data_length);
+ app_.Size = data_length;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ return 12 + app_.Size;
+ }
+
+ uint32_t ssrc_;
+ RTCPUtility::RTCPPacketAPP app_;
+
+ DISALLOW_COPY_AND_ASSIGN(App);
+};
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// :
+
+// Picture loss indication (PLI) (RFC 4585).
+//
+// FCI: no feedback control information.
+
+class Pli : public RtcpPacket {
+ public:
+ Pli() : RtcpPacket() {
+ memset(&pli_, 0, sizeof(pli_));
+ }
+
+ virtual ~Pli() {}
+
+ void From(uint32_t ssrc) {
+ pli_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ pli_.MediaSSRC = ssrc;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ return kCommonFbFmtLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBPLI pli_;
+
+ DISALLOW_COPY_AND_ASSIGN(Pli);
+};
+
+// Slice loss indication (SLI) (RFC 4585).
+//
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | First | Number | PictureID |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Sli : public RtcpPacket {
+ public:
+ Sli() : RtcpPacket() {
+ memset(&sli_, 0, sizeof(sli_));
+ memset(&sli_item_, 0, sizeof(sli_item_));
+ }
+
+ virtual ~Sli() {}
+
+ void From(uint32_t ssrc) {
+ sli_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ sli_.MediaSSRC = ssrc;
+ }
+ void WithFirstMb(uint16_t first_mb) {
+ assert(first_mb <= 0x1fff);
+ sli_item_.FirstMB = first_mb;
+ }
+ void WithNumberOfMb(uint16_t number_mb) {
+ assert(number_mb <= 0x1fff);
+ sli_item_.NumberOfMB = number_mb;
+ }
+ void WithPictureId(uint8_t picture_id) {
+ assert(picture_id <= 0x3f);
+ sli_item_.PictureId = picture_id;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ const size_t kFciLength = 4;
+ return kCommonFbFmtLength + kFciLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBSLI sli_;
+ RTCPUtility::RTCPPacketPSFBSLIItem sli_item_;
+
+ DISALLOW_COPY_AND_ASSIGN(Sli);
+};
+
+// Generic NACK (RFC 4585).
+//
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PID | BLP |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Nack : public RtcpPacket {
+ public:
+ Nack() : RtcpPacket() {
+ memset(&nack_, 0, sizeof(nack_));
+ }
+
+ virtual ~Nack() {}
+
+ void From(uint32_t ssrc) {
+ nack_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ nack_.MediaSSRC = ssrc;
+ }
+ void WithList(const uint16_t* nack_list, int length);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ size_t fci_length = 4 * nack_fields_.size();
+ return kCommonFbFmtLength + fci_length;
+ }
+
+ RTCPUtility::RTCPPacketRTPFBNACK nack_;
+ std::vector<RTCPUtility::RTCPPacketRTPFBNACKItem> nack_fields_;
+
+ DISALLOW_COPY_AND_ASSIGN(Nack);
+};
+
+// Reference picture selection indication (RPSI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PB |0| Payload Type| Native RPSI bit string |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | defined per codec ... | Padding (0) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Rpsi : public RtcpPacket {
+ public:
+ Rpsi()
+ : RtcpPacket(),
+ padding_bytes_(0) {
+ memset(&rpsi_, 0, sizeof(rpsi_));
+ }
+
+ virtual ~Rpsi() {}
+
+ void From(uint32_t ssrc) {
+ rpsi_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ rpsi_.MediaSSRC = ssrc;
+ }
+ void WithPayloadType(uint8_t payload) {
+ assert(payload <= 0x7f);
+ rpsi_.PayloadType = payload;
+ }
+ void WithPictureId(uint64_t picture_id);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ size_t fci_length = 2 + (rpsi_.NumberOfValidBits / 8) + padding_bytes_;
+ return kCommonFbFmtLength + fci_length;
+ }
+
+ uint8_t padding_bytes_;
+ RTCPUtility::RTCPPacketPSFBRPSI rpsi_;
+
+ DISALLOW_COPY_AND_ASSIGN(Rpsi);
+};
+
+// Full intra request (FIR) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Seq nr. | Reserved |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Fir : public RtcpPacket {
+ public:
+ Fir()
+ : RtcpPacket() {
+ memset(&fir_, 0, sizeof(fir_));
+ memset(&fir_item_, 0, sizeof(fir_item_));
+ }
+
+ virtual ~Fir() {}
+
+ void From(uint32_t ssrc) {
+ fir_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ fir_item_.SSRC = ssrc;
+ }
+ void WithCommandSeqNum(uint8_t seq_num) {
+ fir_item_.CommandSequenceNumber = seq_num;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ const size_t kFciLength = 8;
+ return kCommonFbFmtLength + kFciLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBFIR fir_;
+ RTCPUtility::RTCPPacketPSFBFIRItem fir_item_;
+};
+
+// Class holding a RTCP packet.
+//
+// Takes a built rtcp packet.
+// RawPacket raw_packet(buffer, length);
+//
+// To access the raw packet:
+// raw_packet.buffer(); - pointer to the raw packet
+// raw_packet.buffer_length(); - the length of the raw packet
+
+class RawPacket {
+ public:
+ RawPacket(const uint8_t* packet, size_t length) {
+ assert(length <= IP_PACKET_SIZE);
+ memcpy(buffer_, packet, length);
+ buffer_length_ = length;
+ }
+
+ const uint8_t* buffer() {
+ return buffer_;
+ }
+ size_t buffer_length() const {
+ return buffer_length_;
+ }
+
+ private:
+ size_t buffer_length_;
+ uint8_t buffer_[IP_PACKET_SIZE];
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
new file mode 100644
index 00000000000..aa25c2e5f9d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
@@ -0,0 +1,592 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ * This file includes unit tests for the RtcpPacket.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/test/rtcp_packet_parser.h"
+
+using webrtc::rtcp::App;
+using webrtc::rtcp::Bye;
+using webrtc::rtcp::Empty;
+using webrtc::rtcp::Fir;
+using webrtc::rtcp::Ij;
+using webrtc::rtcp::Nack;
+using webrtc::rtcp::Pli;
+using webrtc::rtcp::Sdes;
+using webrtc::rtcp::SenderReport;
+using webrtc::rtcp::Sli;
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::ReceiverReport;
+using webrtc::rtcp::ReportBlock;
+using webrtc::rtcp::Rpsi;
+using webrtc::test::RtcpPacketParser;
+
+namespace webrtc {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+TEST(RtcpPacketTest, Rr) {
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, RrWithOneReportBlock) {
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+ rb.WithFractionLost(55);
+ rb.WithCumulativeLost(0x111111);
+ rb.WithExtHighestSeqNum(0x22222222);
+ rb.WithJitter(0x33333333);
+ rb.WithLastSr(0x44444444);
+ rb.WithDelayLastSr(0x55555555);
+
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.report_block()->Ssrc());
+ EXPECT_EQ(55U, parser.report_block()->FractionLost());
+ EXPECT_EQ(0x111111U, parser.report_block()->CumPacketLost());
+ EXPECT_EQ(0x22222222U, parser.report_block()->ExtHighestSeqNum());
+ EXPECT_EQ(0x33333333U, parser.report_block()->Jitter());
+ EXPECT_EQ(0x44444444U, parser.report_block()->LastSr());
+ EXPECT_EQ(0x55555555U, parser.report_block()->DelayLastSr());
+}
+
+TEST(RtcpPacketTest, RrWithTwoReportBlocks) {
+ ReportBlock rb1;
+ rb1.To(kRemoteSsrc);
+ ReportBlock rb2;
+ rb2.To(kRemoteSsrc + 1);
+
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb1);
+ rr.WithReportBlock(&rb2);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(2, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc));
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc + 1));
+}
+
+TEST(RtcpPacketTest, Sr) {
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithNtpSec(0x11111111);
+ sr.WithNtpFrac(0x22222222);
+ sr.WithRtpTimestamp(0x33333333);
+ sr.WithPacketCount(0x44444444);
+ sr.WithOctetCount(0x55555555);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(0x11111111U, parser.sender_report()->NtpSec());
+ EXPECT_EQ(0x22222222U, parser.sender_report()->NtpFrac());
+ EXPECT_EQ(0x33333333U, parser.sender_report()->RtpTimestamp());
+ EXPECT_EQ(0x44444444U, parser.sender_report()->PacketCount());
+ EXPECT_EQ(0x55555555U, parser.sender_report()->OctetCount());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, SrWithOneReportBlock) {
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithReportBlock(&rb);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.report_block()->Ssrc());
+}
+
+TEST(RtcpPacketTest, SrWithTwoReportBlocks) {
+ ReportBlock rb1;
+ rb1.To(kRemoteSsrc);
+ ReportBlock rb2;
+ rb2.To(kRemoteSsrc + 1);
+
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithReportBlock(&rb1);
+ sr.WithReportBlock(&rb2);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(2, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc));
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc + 1));
+}
+
+TEST(RtcpPacketTest, IjNoItem) {
+ Ij ij;
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(0, parser.ij_item()->num_packets());
+}
+
+TEST(RtcpPacketTest, IjOneItem) {
+ Ij ij;
+ ij.WithJitterItem(0x11111111);
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(1, parser.ij_item()->num_packets());
+ EXPECT_EQ(0x11111111U, parser.ij_item()->Jitter());
+}
+
+TEST(RtcpPacketTest, IjTwoItems) {
+ Ij ij;
+ ij.WithJitterItem(0x11111111);
+ ij.WithJitterItem(0x22222222);
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(2, parser.ij_item()->num_packets());
+ EXPECT_EQ(0x22222222U, parser.ij_item()->Jitter());
+}
+
+TEST(RtcpPacketTest, AppWithNoData) {
+ App app;
+ app.WithSubType(30);
+ uint32_t name = 'n' << 24;
+ name += 'a' << 16;
+ name += 'm' << 8;
+ name += 'e';
+ app.WithName(name);
+
+ RawPacket packet = app.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.app()->num_packets());
+ EXPECT_EQ(30U, parser.app()->SubType());
+ EXPECT_EQ(name, parser.app()->Name());
+ EXPECT_EQ(0, parser.app_item()->num_packets());
+}
+
+TEST(RtcpPacketTest, App) {
+ App app;
+ app.From(kSenderSsrc);
+ app.WithSubType(30);
+ uint32_t name = 'n' << 24;
+ name += 'a' << 16;
+ name += 'm' << 8;
+ name += 'e';
+ app.WithName(name);
+ const char kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'};
+ const size_t kDataLength = sizeof(kData) / sizeof(kData[0]);
+ app.WithData((const uint8_t*)kData, kDataLength);
+
+ RawPacket packet = app.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.app()->num_packets());
+ EXPECT_EQ(30U, parser.app()->SubType());
+ EXPECT_EQ(name, parser.app()->Name());
+ EXPECT_EQ(1, parser.app_item()->num_packets());
+ EXPECT_EQ(kDataLength, parser.app_item()->DataLength());
+ EXPECT_EQ(0, strncmp(kData, (const char*)parser.app_item()->Data(),
+ parser.app_item()->DataLength()));
+}
+
+TEST(RtcpPacketTest, SdesWithOneChunk) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "alice@host");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(1, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("alice@host", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, SdesWithMultipleChunks) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "a");
+ sdes.WithCName(kSenderSsrc + 1, "ab");
+ sdes.WithCName(kSenderSsrc + 2, "abc");
+ sdes.WithCName(kSenderSsrc + 3, "abcd");
+ sdes.WithCName(kSenderSsrc + 4, "abcde");
+ sdes.WithCName(kSenderSsrc + 5, "abcdef");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(6, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc + 5, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("abcdef", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, CnameItemWithEmptyString) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(1, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, Pli) {
+ Pli pli;
+ pli.From(kSenderSsrc);
+ pli.To(kRemoteSsrc);
+
+ RawPacket packet = pli.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.pli()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.pli()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.pli()->MediaSsrc());
+}
+
+TEST(RtcpPacketTest, Sli) {
+ const uint16_t kFirstMb = 7777;
+ const uint16_t kNumberOfMb = 6666;
+ const uint8_t kPictureId = 60;
+ Sli sli;
+ sli.From(kSenderSsrc);
+ sli.To(kRemoteSsrc);
+ sli.WithFirstMb(kFirstMb);
+ sli.WithNumberOfMb(kNumberOfMb);
+ sli.WithPictureId(kPictureId);
+
+ RawPacket packet = sli.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sli()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sli()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.sli()->MediaSsrc());
+ EXPECT_EQ(1, parser.sli_item()->num_packets());
+ EXPECT_EQ(kFirstMb, parser.sli_item()->FirstMb());
+ EXPECT_EQ(kNumberOfMb, parser.sli_item()->NumberOfMb());
+ EXPECT_EQ(kPictureId, parser.sli_item()->PictureId());
+}
+
+TEST(RtcpPacketTest, Nack) {
+ Nack nack;
+ const uint16_t kList[] = {0, 1, 3, 8, 16};
+ const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+ RawPacket packet = nack.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.nack()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
+ EXPECT_EQ(1, parser.nack_item()->num_packets());
+ std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
+ EXPECT_EQ(kListLength, seqs.size());
+ for (size_t i = 0; i < kListLength; ++i) {
+ EXPECT_EQ(kList[i], seqs[i]);
+ }
+}
+
+TEST(RtcpPacketTest, NackWithWrap) {
+ Nack nack;
+ const uint16_t kList[] = {65500, 65516, 65534, 65535, 0, 1, 3, 20, 100};
+ const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+ RawPacket packet = nack.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.nack()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
+ EXPECT_EQ(4, parser.nack_item()->num_packets());
+ std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
+ EXPECT_EQ(kListLength, seqs.size());
+ for (size_t i = 0; i < kListLength; ++i) {
+ EXPECT_EQ(kList[i], seqs[i]);
+ }
+}
+
+TEST(RtcpPacketTest, Rpsi) {
+ Rpsi rpsi;
+ // 1000001 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x41;
+ const uint16_t kNumberOfValidBytes = 1;
+ rpsi.WithPayloadType(100);
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(100, parser.rpsi()->PayloadType());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithTwoByteNativeString) {
+ Rpsi rpsi;
+ // |1 0000001 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x81;
+ const uint16_t kNumberOfValidBytes = 2;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithThreeByteNativeString) {
+ Rpsi rpsi;
+ // 10000|00 100000|0 1000000 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x102040;
+ const uint16_t kNumberOfValidBytes = 3;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithFourByteNativeString) {
+ Rpsi rpsi;
+ // 1000|001 00001|01 100001|1 1000010 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x84161C2;
+ const uint16_t kNumberOfValidBytes = 4;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithMaxPictureId) {
+ Rpsi rpsi;
+ // 1 1111111| 1111111 1|111111 11|11111 111|1111 1111|111 11111|
+ // 11 111111|1 1111111 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0xffffffffffffffff;
+ const uint16_t kNumberOfValidBytes = 10;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, Fir) {
+ Fir fir;
+ fir.From(kSenderSsrc);
+ fir.To(kRemoteSsrc);
+ fir.WithCommandSeqNum(123);
+
+ RawPacket packet = fir.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.fir()->Ssrc());
+ EXPECT_EQ(1, parser.fir_item()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.fir_item()->Ssrc());
+ EXPECT_EQ(123U, parser.fir_item()->SeqNum());
+}
+
+TEST(RtcpPacketTest, AppendPacket) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, AppendPacketOnEmpty) {
+ Empty empty;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ empty.Append(&rr);
+
+ RawPacket packet = empty.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, AppendPacketWithOwnAppendedPacket) {
+ Fir fir;
+ Bye bye;
+ ReportBlock rb;
+
+ ReceiverReport rr;
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ SenderReport sr;
+ sr.Append(&bye);
+ sr.Append(&rr);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, Bye) {
+ Bye bye;
+ bye.From(kSenderSsrc);
+
+ RawPacket packet = bye.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
+}
+
+TEST(RtcpPacketTest, ByeWithCsrcs) {
+ Fir fir;
+ Bye bye;
+ bye.From(kSenderSsrc);
+ bye.WithCsrc(0x22222222);
+ bye.WithCsrc(0x33333333);
+ bye.Append(&fir);
+
+ RawPacket packet = bye.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, BuildWithInputBuffer) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+ const size_t kFirLength = 20;
+
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength + kFirLength];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength + kFirLength);
+
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, BuildWithTooSmallBuffer) {
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+
+ // No packet.
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength - 1];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength - 1);
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(0U, len);
+}
+
+TEST(RtcpPacketTest, BuildWithTooSmallBuffer_LastBlockFits) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength - 1];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength - 1);
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(0, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index a95fddede25..896bd5f4d3a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -57,7 +57,6 @@ RTCPReceiver::RTCPReceiver(const int32_t id, Clock* clock,
_lastIncreasedSequenceNumberMs(0),
stats_callback_(NULL) {
memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTCPReceiver::~RTCPReceiver() {
@@ -82,8 +81,6 @@ RTCPReceiver::~RTCPReceiver() {
delete first->second;
_receivedCnameMap.erase(first);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id,
- "%s deleted", __FUNCTION__);
}
void
@@ -178,8 +175,7 @@ int32_t RTCPReceiver::ResetRTT(const uint32_t remoteSSRC) {
RTCPReportBlockInformation* reportBlock =
GetReportBlockInformation(remoteSSRC);
if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "\tfailed to GetReportBlockInformation(%u)", remoteSSRC);
+ LOG(LS_WARNING) << "Failed to reset rtt for ssrc " << remoteSSRC;
return -1;
}
reportBlock->RTT = 0;
@@ -282,22 +278,14 @@ bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
return true;
}
-int32_t
-RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const
-{
- if(senderInfo == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
- return -1;
- }
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
- if(_lastReceivedSRNTPsecs == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s No received SR", __FUNCTION__);
- return -1;
- }
- memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
- return 0;
+int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
+ assert(senderInfo);
+ CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ if (_lastReceivedSRNTPsecs == 0) {
+ return -1;
+ }
+ memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
+ return 0;
}
// statistics
@@ -317,6 +305,12 @@ int32_t RTCPReceiver::StatisticsReceived(
return 0;
}
+void RTCPReceiver::GetPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ *packet_counter = packet_type_counter_;
+}
+
int32_t
RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
RTCPUtility::RTCPParserV2* rtcpParser)
@@ -480,11 +474,12 @@ RTCPReceiver::HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
}
// no need for critsect we have _criticalSectionRTCPReceiver
-void
-RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
- RTCPPacketInformation& rtcpPacketInformation,
- const uint32_t remoteSSRC,
- const uint8_t numberOfReportBlocks) {
+void RTCPReceiver::HandleReportBlock(
+ const RTCPUtility::RTCPPacket& rtcpPacket,
+ RTCPPacketInformation& rtcpPacketInformation,
+ const uint32_t remoteSSRC,
+ const uint8_t numberOfReportBlocks)
+ EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) {
// This will be called once per report block in the RTCP packet.
// We filter out all report blocks that are not for us.
// Each packet has max 31 RR blocks.
@@ -511,8 +506,8 @@ RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
RTCPReportBlockInformation* reportBlock =
CreateReportBlockInformation(remoteSSRC);
if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "\tfailed to CreateReportBlockInformation(%u)", remoteSSRC);
+ LOG(LS_WARNING) << "Failed to CreateReportBlockInformation("
+ << remoteSSRC << ")";
return;
}
@@ -772,9 +767,6 @@ int32_t RTCPReceiver::BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec) {
}
RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
if (receiveInfo == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s failed to get RTCPReceiveInformation",
- __FUNCTION__);
return -1;
}
if (receiveInfo->TmmbnBoundingSet.lengthOfSet() > 0) {
@@ -838,6 +830,10 @@ RTCPReceiver::HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
HandleNACKItem(rtcpPacket, rtcpPacketInformation);
pktType = rtcpParser.Iterate();
}
+
+ if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
+ ++packet_type_counter_.nack_packets;
+ }
}
// no need for critsect we have _criticalSectionRTCPReceiver
@@ -940,7 +936,8 @@ void RTCPReceiver::HandleXrDlrrReportBlock(
void RTCPReceiver::HandleXrDlrrReportBlockItem(
const RTCPUtility::RTCPPacket& packet,
- RTCPPacketInformation& rtcpPacketInformation) {
+ RTCPPacketInformation& rtcpPacketInformation)
+ EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) {
if (registered_ssrcs_.find(packet.XRDLRRReportBlockItem.SSRC) ==
registered_ssrcs_.end()) {
// Not to us.
@@ -1026,6 +1023,7 @@ void RTCPReceiver::HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
if (main_ssrc_ == rtcpPacket.PLI.MediaSSRC) {
TRACE_EVENT_INSTANT0("webrtc_rtp", "PLI");
+ ++packet_type_counter_.pli_packets;
// Received a signal that we need to send a new key frame.
rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpPli;
}
@@ -1268,6 +1266,9 @@ void RTCPReceiver::HandleFIRItem(RTCPReceiveInformation* receiveInfo,
if (main_ssrc_ != rtcpPacket.FIRItem.SSRC) {
return;
}
+
+ ++packet_type_counter_.fir_packets;
+
// rtcpPacket.FIR.MediaSSRC SHOULD be 0 but we ignore to check it
// we don't know who this originate from
if (receiveInfo) {
@@ -1332,8 +1333,7 @@ int32_t RTCPReceiver::UpdateTMMBR() {
TMMBRSet* boundingSet = NULL;
numBoundingSet = FindTMMBRBoundingSet(boundingSet);
if (numBoundingSet == -1) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "Failed to find TMMBR bounding set.");
+ LOG(LS_WARNING) << "Failed to find TMMBR bounding set.";
return -1;
}
// Set bounding set
@@ -1353,8 +1353,6 @@ int32_t RTCPReceiver::UpdateTMMBR() {
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if (_cbRtcpBandwidthObserver) {
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(bitrate * 1000);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
- "Set TMMBR request:%d kbps", bitrate);
}
}
return 0;
@@ -1379,9 +1377,6 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
// Process TMMBR and REMB first to avoid multiple callbacks
// to OnNetworkChanged.
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming TMMBR to id:%d", _id);
-
// Might trigger a OnReceivedBandwidthEstimateUpdate.
UpdateTMMBR();
}
@@ -1396,9 +1391,8 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
if (rtcpPacketInformation.nackSequenceNumbers.size() > 0) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming NACK length:%d",
- rtcpPacketInformation.nackSequenceNumbers.size());
+ LOG(LS_VERBOSE) << "Incoming NACK length: "
+ << rtcpPacketInformation.nackSequenceNumbers.size();
_rtpRtcp.OnReceivedNACK(rtcpPacketInformation.nackSequenceNumbers);
}
}
@@ -1413,13 +1407,11 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir)) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming PLI from SSRC:0x%x",
- rtcpPacketInformation.remoteSSRC);
+ LOG(LS_VERBOSE) << "Incoming PLI from SSRC "
+ << rtcpPacketInformation.remoteSSRC;
} else {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming FIR from SSRC:0x%x",
- rtcpPacketInformation.remoteSSRC);
+ LOG(LS_VERBOSE) << "Incoming FIR from SSRC "
+ << rtcpPacketInformation.remoteSSRC;
}
_cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(local_ssrc);
}
@@ -1434,9 +1426,8 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (_cbRtcpBandwidthObserver) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming REMB:%d",
- rtcpPacketInformation.receiverEstimatedMaxBitrate);
+ LOG(LS_VERBOSE) << "Incoming REMB: "
+ << rtcpPacketInformation.receiverEstimatedMaxBitrate;
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(
rtcpPacketInformation.receiverEstimatedMaxBitrate);
}
@@ -1532,9 +1523,6 @@ int32_t RTCPReceiver::TMMBRReceived(const uint32_t size,
while (receiveInfoIt != _receivedInfoMap.end()) {
RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
if(receiveInfo == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s failed to get RTCPReceiveInformation",
- __FUNCTION__);
return -1;
}
num += receiveInfo->TmmbrSet.lengthOfSet();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index 637773dc744..ebffb7cfc9b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -88,6 +88,8 @@ public:
int32_t StatisticsReceived(
std::vector<RTCPReportBlock>* receiveBlocks) const;
+ void GetPacketTypeCounter(RtcpPacketTypeCounter* packet_counter) const;
+
// Returns true if we haven't received an RTCP RR for several RTCP
// intervals, but only triggers true once.
bool RtcpRrTimeout(int64_t rtcp_interval_ms);
@@ -266,6 +268,8 @@ protected:
int64_t _lastIncreasedSequenceNumberMs;
RtcpStatisticsCallback* stats_callback_;
+
+ RtcpPacketTypeCounter packet_type_counter_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
index 949beb9c7f0..0ca43fa53eb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -12,10 +12,10 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" // RTCPReportBlock
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index b3f15bb9662..399c133cd10 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -261,6 +261,7 @@ class RtcpReceiverTest : public ::testing::Test {
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
&system_clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)) {
test_transport_ = new TestTransport();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index a407a42c913..d73de9c4243 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -19,7 +19,7 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -156,16 +156,11 @@ RTCPSender::RTCPSender(const int32_t id,
xrSendReceiverReferenceTimeEnabled_(false),
_xrSendVoIPMetric(false),
- _xrVoIPMetric(),
- _nackCount(0),
- _pliCount(0),
- _fullIntraRequestCount(0)
+ _xrVoIPMetric()
{
memset(_CNAME, 0, sizeof(_CNAME));
memset(_lastSendReport, 0, sizeof(_lastSendReport));
memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTCPSender::~RTCPSender() {
@@ -190,8 +185,6 @@ RTCPSender::~RTCPSender() {
}
delete _criticalSectionTransport;
delete _criticalSectionRTCPSender;
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
}
int32_t
@@ -239,10 +232,7 @@ RTCPSender::Init()
memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
last_xr_rr_.clear();
- _nackCount = 0;
- _pliCount = 0;
- _fullIntraRequestCount = 0;
-
+ memset(&packet_type_counter_, 0, sizeof(packet_type_counter_));
return 0;
}
@@ -354,6 +344,9 @@ RTCPSender::SetREMBData(const uint32_t bitrate,
_rembSSRC[i] = SSRC[i];
}
_sendREMB = true;
+ // Send a REMB immediately if we have a new REMB. The frequency of REMBs is
+ // throttled by the caller.
+ _nextTimeToSendRTCP = _clock->TimeInMilliseconds();
return 0;
}
@@ -430,7 +423,8 @@ RTCPSender::SetCameraDelay(const int32_t delayMS)
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if(delayMS > 1000 || delayMS < -1000)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument, delay can't be larger than 1 sec", __FUNCTION__);
+ LOG(LS_WARNING) << "Delay can't be larger than 1 second: "
+ << delayMS << " ms";
return -1;
}
_cameraDelayMS = delayMS;
@@ -489,14 +483,15 @@ RTCPSender::TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP) const
For audio we use a fix 5 sec interval
For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
- technicaly we break the max 5% RTCP BW for video below 10 kbit/s but that should be extreamly rare
+ technicaly we break the max 5% RTCP BW for video below 10 kbit/s but
+ that should be extremely rare
From RFC 3550
MAX RTCP BW is 5% if the session BW
A send report is approximately 65 bytes inc CNAME
- A report report is approximately 28 bytes
+ A receiver report is approximately 28 bytes
The RECOMMENDED value for the reduced minimum in seconds is 360
divided by the session bandwidth in kilobits/second. This minimum
@@ -558,7 +553,7 @@ From RFC 3550
now += RTCP_SEND_BEFORE_KEY_FRAME_MS;
}
- if(now > _nextTimeToSendRTCP)
+ if(now >= _nextTimeToSendRTCP)
{
return true;
@@ -616,6 +611,12 @@ bool RTCPSender::SendTimeOfXrRrReport(uint32_t mid_ntp,
return true;
}
+void RTCPSender::GetPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ CriticalSectionScoped lock(_criticalSectionRTCPSender);
+ *packet_counter = packet_type_counter_;
+}
+
int32_t RTCPSender::AddExternalReportBlock(
uint32_t SSRC,
const RTCPReportBlock* reportBlock) {
@@ -627,15 +628,10 @@ int32_t RTCPSender::AddReportBlock(
uint32_t SSRC,
std::map<uint32_t, RTCPReportBlock*>* report_blocks,
const RTCPReportBlock* reportBlock) {
- if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
+ assert(reportBlock);
if (report_blocks->size() >= RTCP_MAX_REPORT_BLOCKS) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Too many report blocks.";
return -1;
}
std::map<uint32_t, RTCPReportBlock*>::iterator it =
@@ -673,7 +669,7 @@ int32_t RTCPSender::BuildSR(const FeedbackState& feedback_state,
// sanity
if(pos + 52 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build Sender Report.";
return -2;
}
uint32_t RTPtime;
@@ -756,8 +752,7 @@ int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer, int& pos) {
// sanity
if(pos + 12 + lengthCname >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build SDEC.";
return -2;
}
// SDEC Source Description
@@ -909,7 +904,9 @@ RTCPSender::BuildExtendedJitterReport(
{
if (external_report_blocks_.size() > 0)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "Not implemented.");
+ // TODO(andresp): Remove external report blocks since they are not
+ // supported.
+ LOG(LS_ERROR) << "Handling of external report blocks not implemented.";
return 0;
}
@@ -1313,7 +1310,7 @@ RTCPSender::BuildTMMBN(uint8_t* rtcpbuffer, int& pos)
// sanity
if(pos + 12 + boundingSet->lengthOfSet()*8 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build TMMBN.";
return -2;
}
uint8_t FMT = 4;
@@ -1380,12 +1377,12 @@ RTCPSender::BuildAPP(uint8_t* rtcpbuffer, int& pos)
// sanity
if(_appData == NULL)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build app specific.";
return -1;
}
if(pos + 12 + _appLength >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build app specific.";
return -2;
}
rtcpbuffer[pos++]=(uint8_t)0x80 + _appSubType;
@@ -1421,7 +1418,7 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
// sanity
if(pos + 16 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build NACK.";
return -2;
}
@@ -1474,8 +1471,7 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
numOfNackFields++;
}
if (i != nackSize) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "Nack list to large for one packet.");
+ LOG(LS_WARNING) << "Nack list to large for one packet.";
}
rtcpbuffer[nackSizePos] = static_cast<uint8_t>(2 + numOfNackFields);
*nackString = stringBuilder.GetResult();
@@ -1711,8 +1707,7 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state,
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if(_method == kRtcpOff)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "%s invalid state", __FUNCTION__);
+ LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
return -1;
}
}
@@ -1778,10 +1773,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
rtcpPacketTypeFlags |= kRtcpTmmbn;
_sendTMMBN = false;
}
- if (xrSendReceiverReferenceTimeEnabled_ &&
- (rtcpPacketTypeFlags & kRtcpReport))
+ if (rtcpPacketTypeFlags & kRtcpReport)
{
- if (!_sending)
+ if (xrSendReceiverReferenceTimeEnabled_ && !_sending)
{
rtcpPacketTypeFlags |= kRtcpXrReceiverReferenceTime;
}
@@ -1920,8 +1914,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
return position;
}
TRACE_EVENT_INSTANT0("webrtc_rtp", "RTCPSender::PLI");
- _pliCount++;
- TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_PLICount", _SSRC, _pliCount);
+ ++packet_type_counter_.pli_packets;
+ TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_PLICount", _SSRC,
+ packet_type_counter_.pli_packets);
}
if(rtcpPacketTypeFlags & kRtcpFir)
{
@@ -1932,9 +1927,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
return position;
}
TRACE_EVENT_INSTANT0("webrtc_rtp", "RTCPSender::FIR");
- _fullIntraRequestCount++;
+ ++packet_type_counter_.fir_packets;
TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_FIRCount", _SSRC,
- _fullIntraRequestCount);
+ packet_type_counter_.fir_packets);
}
if(rtcpPacketTypeFlags & kRtcpSli)
{
@@ -2017,8 +2012,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
}
TRACE_EVENT_INSTANT1("webrtc_rtp", "RTCPSender::NACK",
"nacks", TRACE_STR_COPY(nackString.c_str()));
- _nackCount++;
- TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_NACKCount", _SSRC, _nackCount);
+ ++packet_type_counter_.nack_packets;
+ TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_NACKCount", _SSRC,
+ packet_type_counter_.nack_packets);
}
if(rtcpPacketTypeFlags & kRtcpXrVoipMetric)
{
@@ -2065,7 +2061,7 @@ bool RTCPSender::PrepareReport(const FeedbackState& feedback_state,
RTCPReportBlock* report_block,
uint32_t* ntp_secs, uint32_t* ntp_frac) {
// Do we have receive statistics to send?
- StreamStatistician::Statistics stats;
+ RtcpStatistics stats;
if (!statistician->GetStatistics(&stats, true))
return false;
report_block->fractionLost = stats.fraction_lost;
@@ -2123,13 +2119,7 @@ int32_t
RTCPSender::SetCSRCs(const uint32_t arrOfCSRC[kRtpCsrcSize],
const uint8_t arrLength)
{
- if(arrLength > kRtpCsrcSize)
- {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
- assert(false);
- return -1;
- }
-
+ assert(arrLength <= kRtpCsrcSize);
CriticalSectionScoped lock(_criticalSectionRTCPSender);
for(int i = 0; i < arrLength;i++)
@@ -2148,7 +2138,7 @@ RTCPSender::SetApplicationSpecificData(const uint8_t subType,
{
if(length %4 != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_ERROR) << "Failed to SetApplicationSpecificData.";
return -1;
}
CriticalSectionScoped lock(_criticalSectionRTCPSender);
@@ -2194,17 +2184,10 @@ int32_t RTCPSender::WriteAllReportBlocksToBuffer(
uint8_t& numberOfReportBlocks,
const uint32_t NTPsec,
const uint32_t NTPfrac) {
- // sanity one block
- if(pos + 24 >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
numberOfReportBlocks = external_report_blocks_.size();
numberOfReportBlocks += internal_report_blocks_.size();
if ((pos + numberOfReportBlocks * 24) >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Can't fit all report blocks.";
return -1;
}
pos = WriteReportBlocksToBuffer(rtcpbuffer, pos, internal_report_blocks_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index 9ed58244438..cbbc32aac6a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -180,6 +180,8 @@ public:
void SetTargetBitrate(unsigned int target_bitrate);
+ void GetPacketTypeCounter(RtcpPacketTypeCounter* packet_counter) const;
+
private:
int32_t SendToNetwork(const uint8_t* dataBuffer, const uint16_t length);
@@ -342,10 +344,7 @@ private:
bool _xrSendVoIPMetric;
RTCPVoIPMetric _xrVoIPMetric;
- // Counters
- uint32_t _nackCount;
- uint32_t _pliCount;
- uint32_t _fullIntraRequestCount;
+ RtcpPacketTypeCounter packet_type_counter_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index a8b5275fa20..dfb655c5167 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -278,12 +278,13 @@ class RtcpSenderTest : public ::testing::Test {
: over_use_detector_options_(),
clock_(1335900000),
rtp_payload_registry_(new RTPPayloadRegistry(
- 0, RTPPayloadStrategy::CreateStrategy(false))),
+ RTPPayloadStrategy::CreateStrategy(false))),
remote_bitrate_observer_(),
remote_bitrate_estimator_(
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
&clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)),
receive_statistics_(ReceiveStatistics::Create(&clock_)) {
test_transport_ = new TestTransport();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
index 705a38b0161..9acab735e50 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -1266,31 +1266,27 @@ RTCPUtility::RTCPParserV2::ParseFBCommon(const RTCPCommonHeader& header)
}
}
-bool
-RTCPUtility::RTCPParserV2::ParseRPSIItem()
-{
- // RFC 4585 6.3.3. Reference Picture Selection Indication (RPSI)
- /*
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | PB |0| Payload Type| Native RPSI bit string |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | defined per codec ... | Padding (0) |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
+bool RTCPUtility::RTCPParserV2::ParseRPSIItem() {
+
+ // RFC 4585 6.3.3. Reference Picture Selection Indication (RPSI).
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | PB |0| Payload Type| Native RPSI bit string |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | defined per codec ... | Padding (0) |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
- if (length < 4)
- {
+ if (length < 4) {
_state = State_TopLevel;
EndCurrentBlock();
return false;
}
- if(length > 2+RTCP_RPSI_DATA_SIZE)
- {
+ if (length > 2 + RTCP_RPSI_DATA_SIZE) {
_state = State_TopLevel;
EndCurrentBlock();
@@ -1299,12 +1295,14 @@ RTCPUtility::RTCPParserV2::ParseRPSIItem()
_packetType = kRtcpPsfbRpsiCode;
- uint8_t paddingBits = *_ptrRTCPData++;
+ uint8_t padding_bits = *_ptrRTCPData++;
_packet.RPSI.PayloadType = *_ptrRTCPData++;
- memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length-2);
+ memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length - 2);
+ _ptrRTCPData += length - 2;
- _packet.RPSI.NumberOfValidBits = uint16_t(length-2)*8 - paddingBits;
+ _packet.RPSI.NumberOfValidBits =
+ static_cast<uint16_t>(length - 2) * 8 - padding_bits;
return true;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
index 904156e9445..fa847625ead 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -41,7 +41,7 @@ template <typename T> void ClearList(std::list<T*>* my_list) {
class RtpFecTest : public ::testing::Test {
protected:
RtpFecTest()
- : fec_(new ForwardErrorCorrection(0)), ssrc_(rand()), fec_seq_num_(0) {}
+ : fec_(new ForwardErrorCorrection()), ssrc_(rand()), fec_seq_num_(0) {}
ForwardErrorCorrection* fec_;
int ssrc_;
@@ -86,43 +86,6 @@ class RtpFecTest : public ::testing::Test {
void TearDown();
};
-// TODO(marpan): Consider adding table for input/output to simplify tests.
-
-TEST_F(RtpFecTest, HandleIncorrectInputs) {
- int kNumImportantPackets = 0;
- bool kUseUnequalProtection = false;
- uint8_t kProtectionFactor = 60;
-
- // Media packet list is empty.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- int num_media_packets = 10;
- ConstructMediaPackets(num_media_packets);
-
- kNumImportantPackets = -1;
- // Number of important packets below 0.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- kNumImportantPackets = 12;
- // Number of important packets greater than number of media packets.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- num_media_packets = kMaxNumberMediaPackets + 1;
- ConstructMediaPackets(num_media_packets);
-
- kNumImportantPackets = 0;
- // Number of media packet is above maximum allowed (kMaxNumberMediaPackets).
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-}
-
TEST_F(RtpFecTest, FecRecoveryNoLoss) {
const int kNumImportantPackets = 0;
const bool kUseUnequalProtection = false;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
index 650c0fad5e5..e4d3dc08362 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
@@ -28,8 +28,8 @@
#include <queue>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
index 13eb0e101c7..e146492c72f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -18,9 +18,9 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
index fcc7587c1fc..edffe8aecb6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -22,7 +22,7 @@ const uint16_t kRtpOneByteHeaderExtensionId = 0xBEDE;
const size_t kRtpOneByteHeaderLength = 4;
const size_t kTransmissionTimeOffsetLength = 4;
-const size_t kAudioLevelLength = 2;
+const size_t kAudioLevelLength = 4;
const size_t kAbsoluteSendTimeLength = 4;
struct HeaderExtension {
@@ -37,11 +37,7 @@ struct HeaderExtension {
length = kTransmissionTimeOffsetLength;
break;
case kRtpExtensionAudioLevel:
- // TODO(solenberg): Because of how the audio level extension is handled
- // in RTPSenderAudio::SendAudio(), we cannot set the actual length here
- // but must leave it at zero. The consequence is that any other header
- // extensions registered for an audio channel are effectively ignored.
- // length = kAudioLevelLength;
+ length = kAudioLevelLength;
break;
case kRtpExtensionAbsoluteSendTime:
length = kAbsoluteSendTimeLength;
@@ -66,6 +62,8 @@ class RtpHeaderExtensionMap {
int32_t Deregister(const RTPExtensionType type);
+ bool IsRegistered(RTPExtensionType type) const;
+
int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
index d04872582b5..bb24d4dbfb7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -13,7 +13,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -60,8 +59,6 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet, int length,
const bool valid_rtpheader = rtp_parser.Parse(*header, &map);
if (!valid_rtpheader) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingPacket invalid RTP header");
return false;
}
return true;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
index 14ca821f7e5..e3515f44543 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -33,13 +33,21 @@ RTPPacketHistory::RTPPacketHistory(Clock* clock)
}
RTPPacketHistory::~RTPPacketHistory() {
- Free();
+ {
+ CriticalSectionScoped cs(critsect_);
+ Free();
+ }
delete critsect_;
}
-void RTPPacketHistory::SetStorePacketsStatus(bool enable,
+void RTPPacketHistory::SetStorePacketsStatus(bool enable,
uint16_t number_to_store) {
+ CriticalSectionScoped cs(critsect_);
if (enable) {
+ if (store_) {
+ LOG(LS_WARNING) << "Purging packet history in order to re-set status.";
+ Free();
+ }
Allocate(number_to_store);
} else {
Free();
@@ -48,13 +56,7 @@ void RTPPacketHistory::SetStorePacketsStatus(bool enable,
void RTPPacketHistory::Allocate(uint16_t number_to_store) {
assert(number_to_store > 0);
- CriticalSectionScoped cs(critsect_);
- if (store_) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "SetStorePacketsStatus already set, number: %d", number_to_store);
- return;
- }
-
+ assert(!store_);
store_ = true;
stored_packets_.resize(number_to_store);
stored_seq_nums_.resize(number_to_store);
@@ -65,13 +67,12 @@ void RTPPacketHistory::Allocate(uint16_t number_to_store) {
}
void RTPPacketHistory::Free() {
- CriticalSectionScoped cs(critsect_);
if (!store_) {
return;
}
std::vector<std::vector<uint8_t> >::iterator it;
- for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
+ for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
it->clear();
}
@@ -130,8 +131,8 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
VerifyAndAllocatePacketLength(max_packet_length);
if (packet_length > max_packet_length_) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, -1,
- "Failed to store RTP packet, length: %d", packet_length);
+ LOG(LS_WARNING) << "Failed to store RTP packet with length: "
+ << packet_length;
return -1;
}
@@ -156,46 +157,6 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
return 0;
}
-int32_t RTPPacketHistory::ReplaceRTPHeader(const uint8_t* packet,
- uint16_t sequence_number,
- uint16_t rtp_header_length) {
- CriticalSectionScoped cs(critsect_);
- if (!store_) {
- return 0;
- }
-
- assert(packet);
- assert(rtp_header_length > 3);
-
- if (rtp_header_length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Failed to replace RTP packet, length: %d", rtp_header_length);
- return -1;
- }
-
- int32_t index = 0;
- bool found = FindSeqNum(sequence_number, &index);
- if (!found) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u", sequence_number);
- return -1;
- }
-
- uint16_t length = stored_lengths_.at(index);
- if (length == 0 || length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u, len %d", sequence_number, length);
- return -1;
- }
- assert(stored_seq_nums_[index] == sequence_number);
-
- // Update RTP header.
- std::vector<std::vector<uint8_t> >::iterator it =
- stored_packets_.begin() + index;
- std::copy(packet, packet + rtp_header_length, it->begin());
- return 0;
-}
-
bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
CriticalSectionScoped cs(critsect_);
if (!store_) {
@@ -207,7 +168,7 @@ bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
if (!found) {
return false;
}
-
+
uint16_t length = stored_lengths_.at(index);
if (length == 0 || length > max_packet_length_) {
// Invalid length.
@@ -222,6 +183,7 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
uint8_t* packet,
uint16_t* packet_length,
int64_t* stored_time_ms) {
+ assert(*packet_length >= max_packet_length_);
CriticalSectionScoped cs(critsect_);
if (!store_) {
return false;
@@ -230,30 +192,22 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
int32_t index = 0;
bool found = FindSeqNum(sequence_number, &index);
if (!found) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u", sequence_number);
+ LOG(LS_WARNING) << "No match for getting seqNum " << sequence_number;
return false;
}
uint16_t length = stored_lengths_.at(index);
- if (length == 0 || length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u, len %d", sequence_number, length);
- return false;
- }
-
- if (length > *packet_length) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Input buffer too short for packet %u", sequence_number);
+ assert(length <= max_packet_length_);
+ if (length == 0) {
+ LOG(LS_WARNING) << "No match for getting seqNum " << sequence_number
+ << ", len " << length;
return false;
}
- // Verify elapsed time since last retrieve.
+ // Verify elapsed time since last retrieve.
int64_t now = clock_->TimeInMilliseconds();
if (min_elapsed_time_ms > 0 &&
((now - stored_send_times_.at(index)) < min_elapsed_time_ms)) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Skip getting packet %u, packet recently resent.", sequence_number);
return false;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
index 785e4992b0a..190e5057bc9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -18,6 +18,7 @@
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
@@ -40,14 +41,6 @@ class RTPPacketHistory {
int64_t capture_time_ms,
StorageType type);
- // Replaces the stored RTP packet with matching sequence number with the
- // RTP header of the provided packet.
- // Note: Calling this function assumes that the RTP header length should not
- // have changed since the packet was stored.
- int32_t ReplaceRTPHeader(const uint8_t* packet,
- uint16_t sequence_number,
- uint16_t rtp_header_length);
-
// Gets stored RTP packet corresponding to the input sequence number.
// The packet is copied to the buffer pointed to by ptr_rtp_packet.
// The rtp_packet_length should show the available buffer size.
@@ -74,8 +67,8 @@ class RTPPacketHistory {
private:
void GetPacket(int index, uint8_t* packet, uint16_t* packet_length,
int64_t* stored_time_ms) const;
- void Allocate(uint16_t number_to_store);
- void Free();
+ void Allocate(uint16_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ void Free() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
void VerifyAndAllocatePacketLength(uint16_t packet_length);
bool FindSeqNum(uint16_t sequence_number, int32_t* index) const;
int FindBestFittingPacket(uint16_t size) const;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
index 1682b7c3387..7eb22ff69db 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -103,19 +103,6 @@ TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) {
kAllowRetransmission));
}
-TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
- hist_->SetStorePacketsStatus(true, 10);
- uint16_t len = 0;
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
- CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
- EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
- capture_time_ms, kAllowRetransmission));
- uint16_t len_out = len - 1;
- int64_t time;
- EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_,
- &len_out, &time));
-}
-
TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = kMaxPacketLength;
@@ -155,42 +142,6 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
}
}
-TEST_F(RtpPacketHistoryTest, ReplaceRtpHeader) {
- hist_->SetStorePacketsStatus(true, 10);
-
- uint16_t len = 0;
- int64_t capture_time_ms = 1;
- CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
- // Replace should fail, packet is not stored.
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum, len));
- EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
- capture_time_ms, kAllowRetransmission));
-
- // Create modified packet and replace.
- len = 0;
- CreateRtpPacket(kSeqNum, kSsrc + 1, kPayload + 2, kTimestamp, packet_, &len);
- EXPECT_EQ(0, hist_->ReplaceRTPHeader(packet_, kSeqNum, len));
-
- uint16_t len_out = kMaxPacketLength;
- int64_t time;
- EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_,
- &len_out, &time));
- EXPECT_EQ(len, len_out);
- EXPECT_EQ(capture_time_ms, time);
- for (int i = 0; i < len; i++) {
- EXPECT_EQ(packet_[i], packet_out_[i]);
- }
-
- // Replace should fail, too large length.
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum,
- kMaxPacketLength + 1));
-
- // Replace should fail, packet is not stored.
- len = 0;
- CreateRtpPacket(kSeqNum + 1, kSsrc, kPayload, kTimestamp, packet_, &len);
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum + 1, len));
-}
-
TEST_F(RtpPacketHistoryTest, NoCaptureTime) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
@@ -236,10 +187,10 @@ TEST_F(RtpPacketHistoryTest, MinResendTime) {
capture_time_ms, kAllowRetransmission));
int64_t time;
+ len = kMaxPacketLength;
EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len,
&time));
fake_clock_.AdvanceTimeMilliseconds(100);
-
// Time has elapsed.
len = kMaxPacketLength;
EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
index 1c3b990c5de..db2e4cd31da 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -10,15 +10,13 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
RTPPayloadRegistry::RTPPayloadRegistry(
- const int32_t id,
RTPPayloadStrategy* rtp_payload_strategy)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- id_(id),
rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
ulpfec_payload_type_(-1),
@@ -60,9 +58,8 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
case 77: // 205 Transport layer FB message.
case 78: // 206 Payload-specific FB message.
case 79: // 207 Extended report.
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid payloadtype:%d",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Can't register invalid receiver payload type: "
+ << payload_type;
return -1;
default:
break;
@@ -94,9 +91,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
return 0;
}
}
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument payload_type:%d already registered",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Payload type already registered: " << payload_type;
return -1;
}
@@ -138,14 +133,8 @@ int32_t RTPPayloadRegistry::DeRegisterReceivePayload(
const int8_t payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
ModuleRTPUtility::PayloadTypeMap::iterator it =
- payload_type_map_.find(payload_type);
-
- if (it == payload_type_map_.end()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s failed to find payload_type:%d",
- __FUNCTION__, payload_type);
- return -1;
- }
+ payload_type_map_.find(payload_type);
+ assert(it != payload_type_map_.end());
delete it->second;
payload_type_map_.erase(it);
return 0;
@@ -194,11 +183,7 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
const uint8_t channels,
const uint32_t rate,
int8_t* payload_type) const {
- if (payload_type == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
+ assert(payload_type);
size_t payload_name_length = strlen(payload_name);
CriticalSectionScoped cs(crit_sect_.get());
@@ -243,12 +228,6 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
return -1;
}
-void RTPPayloadRegistry::SetRtxStatus(bool enable, uint32_t ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
- rtx_ = enable;
- ssrc_rtx_ = ssrc;
-}
-
bool RTPPayloadRegistry::RtxEnabled() const {
CriticalSectionScoped cs(crit_sect_.get());
return rtx_;
@@ -296,17 +275,24 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t** restored_packet,
(*restored_packet)[1] |= kRtpMarkerBitMask; // Marker bit is set.
}
} else {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Incorrect RTX configuration, dropping packet.");
+ LOG(LS_WARNING) << "Incorrect RTX configuration, dropping packet.";
return false;
}
}
return true;
}
+void RTPPayloadRegistry::SetRtxSsrc(uint32_t ssrc) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ ssrc_rtx_ = ssrc;
+ rtx_ = true;
+}
+
void RTPPayloadRegistry::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
+ assert(payload_type >= 0);
payload_type_rtx_ = payload_type;
+ rtx_ = true;
}
bool RTPPayloadRegistry::IsRed(const RTPHeader& header) const {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index 96fa80ad842..c03ffcd1f3f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -32,8 +32,7 @@ class RtpPayloadRegistryTest : public ::testing::Test {
void SetUp() {
// Note: the payload registry takes ownership of the strategy.
mock_payload_strategy_ = new testing::NiceMock<MockRTPPayloadStrategy>();
- rtp_payload_registry_.reset(
- new RTPPayloadRegistry(123, mock_payload_strategy_));
+ rtp_payload_registry_.reset(new RTPPayloadRegistry(mock_payload_strategy_));
}
protected:
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index 134548518ea..c8104cc3731 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -15,7 +15,7 @@
#include <string.h> // memcpy()
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -277,11 +277,8 @@ int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
specific_payload.Audio.frequency,
specific_payload.Audio.channels,
specific_payload.Audio.rate)) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- id,
- "Failed to create video decoder for payload type:%d",
- payload_type);
+ LOG(LS_ERROR) << "Failed to create decoder for payload type: "
+ << payload_name << "/" << payload_type;
return -1;
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index 9a276819ab8..d92618f2d57 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -39,7 +39,7 @@ RtpReceiver* RtpReceiver::CreateVideoReceiver(
return new RtpReceiverImpl(
id, clock, NullObjectRtpAudioFeedback(), incoming_messages_callback,
rtp_payload_registry,
- RTPReceiverStrategy::CreateVideoStrategy(id, incoming_payload_callback));
+ RTPReceiverStrategy::CreateVideoStrategy(incoming_payload_callback));
}
RtpReceiver* RtpReceiver::CreateAudioReceiver(
@@ -87,8 +87,6 @@ RtpReceiverImpl::RtpReceiverImpl(int32_t id,
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RtpReceiverImpl::~RtpReceiverImpl() {
@@ -96,7 +94,6 @@ RtpReceiverImpl::~RtpReceiverImpl() {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, current_remote_csrc_[i],
false);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
}
RTPReceiverStrategy* RtpReceiverImpl::GetMediaReceiver() const {
@@ -127,9 +124,8 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
if (created_new_payload) {
if (rtp_media_receiver_->OnNewPayloadTypeCreated(payload_name, payload_type,
frequency) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s failed to register payload",
- __FUNCTION__);
+ LOG(LS_ERROR) << "Failed to register payload: " << payload_name << "/"
+ << payload_type;
return -1;
}
}
@@ -182,19 +178,12 @@ bool RtpReceiverImpl::IncomingRtpPacket(
PayloadUnion payload_specific,
bool in_order) {
// Sanity check.
- if (payload_length < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument",
- __FUNCTION__);
- return false;
- }
- int8_t first_payload_byte = 0;
- if (payload_length > 0) {
- first_payload_byte = payload[0];
- }
+ assert(payload_length >= 0);
+
// Trigger our callbacks.
CheckSSRCChanged(rtp_header);
+ int8_t first_payload_byte = payload_length > 0 ? payload[0] : 0;
bool is_red = false;
bool should_reset_statistics = false;
@@ -205,14 +194,9 @@ bool RtpReceiverImpl::IncomingRtpPacket(
&should_reset_statistics) == -1) {
if (payload_length == 0) {
// OK, keep-alive packet.
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "%s received keepalive",
- __FUNCTION__);
return true;
}
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "%s received invalid payloadtype",
- __FUNCTION__);
+ LOG(LS_WARNING) << "Receiving invalid payload type.";
return false;
}
@@ -347,9 +331,8 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
id_, rtp_header.payloadType, payload_name,
rtp_header.payload_type_frequency, channels, rate)) {
// New stream, same codec.
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Failed to create decoder for payload type:%d",
- rtp_header.payloadType);
+ LOG(LS_ERROR) << "Failed to create decoder for payload type: "
+ << rtp_header.payloadType;
}
}
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
index d8a22579621..09c9b6fc300 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
@@ -26,8 +26,7 @@ class TelephoneEventHandler;
// This class is not thread-safe and must be protected by its caller.
class RTPReceiverStrategy {
public:
- static RTPReceiverStrategy* CreateVideoStrategy(int32_t id,
- RtpData* data_callback);
+ static RTPReceiverStrategy* CreateVideoStrategy(RtpData* data_callback);
static RTPReceiverStrategy* CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index b733cdb4b05..5bb519f6220 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -17,19 +17,18 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateVideoStrategy(
- int32_t id, RtpData* data_callback) {
- return new RTPReceiverVideo(id, data_callback);
+ RtpData* data_callback) {
+ return new RTPReceiverVideo(data_callback);
}
-RTPReceiverVideo::RTPReceiverVideo(int32_t id, RtpData* data_callback)
- : RTPReceiverStrategy(data_callback),
- id_(id) {}
+RTPReceiverVideo::RTPReceiverVideo(RtpData* data_callback)
+ : RTPReceiverStrategy(data_callback) {}
RTPReceiverVideo::~RTPReceiverVideo() {
}
@@ -93,11 +92,8 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
// For video we just go with default values.
if (-1 == callback->OnInitializeDecoder(
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- id,
- "Failed to create video decoder for payload type:%d",
- payload_type);
+ LOG(LS_ERROR) << "Failed to created decoder for payload type: "
+ << payload_type;
return -1;
}
return 0;
@@ -111,13 +107,6 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet) {
- WEBRTC_TRACE(kTraceStream,
- kTraceRtpRtcp,
- id_,
- "%s(timestamp:%u)",
- __FUNCTION__,
- rtp_header->header.timestamp);
-
switch (rtp_header->type.Video.codec) {
case kRtpVideoGeneric:
rtp_header->type.Video.isFirstPacket = is_first_packet;
@@ -170,13 +159,8 @@ int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
ModuleRTPUtility::RTPPayload parsed_packet;
- uint32_t id;
- {
- CriticalSectionScoped cs(crit_sect_.get());
- id = id_;
- }
ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
- kRtpVideoVp8, payload_data, payload_data_length, id);
+ kRtpVideoVp8, payload_data, payload_data_length);
if (!rtp_payload_parser.Parse(parsed_packet))
return -1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
index ab69b40ee6f..4d81cb3972e 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -22,7 +22,7 @@ namespace webrtc {
class RTPReceiverVideo : public RTPReceiverStrategy {
public:
- RTPReceiverVideo(const int32_t id, RtpData* data_callback);
+ RTPReceiverVideo(RtpData* data_callback);
virtual ~RTPReceiverVideo();
@@ -80,8 +80,6 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet);
-
- int32_t id_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
index 070845bc795..dcd65988058 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
@@ -20,6 +20,7 @@
# Common
'../interface/fec_receiver.h',
'../interface/receive_statistics.h',
+ '../interface/remote_ntp_time_estimator.h',
'../interface/rtp_header_parser.h',
'../interface/rtp_payload_registry.h',
'../interface/rtp_receiver.h',
@@ -32,10 +33,13 @@
'fec_receiver_impl.h',
'receive_statistics_impl.cc',
'receive_statistics_impl.h',
+ 'remote_ntp_time_estimator.cc',
'rtp_header_parser.cc',
'rtp_rtcp_config.h',
'rtp_rtcp_impl.cc',
'rtp_rtcp_impl.h',
+ 'rtcp_packet.cc',
+ 'rtcp_packet.h',
'rtcp_receiver.cc',
'rtcp_receiver.h',
'rtcp_receiver_help.cc',
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 89e9eb294f8..469a41e1d36 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -17,11 +17,6 @@
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef MATLAB
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-extern MatlabEngine eng; // Global variable defined elsewhere.
-#endif
-
#ifdef _WIN32
// Disable warning C4355: 'this' : used in base member initializer list.
#pragma warning(disable : 4355)
@@ -66,7 +61,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.outgoing_transport,
configuration.audio_messages,
configuration.paced_sender),
- rtcp_sender_(configuration.id, configuration.audio, configuration.clock,
+ rtcp_sender_(configuration.id,
+ configuration.audio,
+ configuration.clock,
configuration.receive_statistics),
rtcp_receiver_(configuration.id, configuration.clock, this),
clock_(configuration.clock),
@@ -83,15 +80,13 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
CriticalSectionWrapper::CreateCriticalSection()),
default_module_(
static_cast<ModuleRtpRtcpImpl*>(configuration.default_module)),
+ padding_index_(-1), // Start padding at the first child module.
nack_method_(kNackOff),
nack_last_time_sent_full_(0),
nack_last_seq_number_sent_(0),
simulcast_(false),
key_frame_req_method_(kKeyFrameReqFirRtp),
remote_bitrate_(configuration.remote_bitrate_estimator),
-#ifdef MATLAB
- , plot1_(NULL),
-#endif
rtt_stats_(configuration.rtt_stats),
critical_section_rtt_(CriticalSectionWrapper::CreateCriticalSection()),
rtt_ms_(0) {
@@ -110,13 +105,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
uint32_t SSRC = rtp_sender_.SSRC();
rtcp_sender_.SetSSRC(SSRC);
SetRtcpReceiverSsrcs(SSRC);
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s created", __FUNCTION__);
}
ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
-
// All child modules MUST be deleted before deleting the default.
assert(child_modules_.empty());
@@ -125,21 +116,9 @@ ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
if (default_module_) {
default_module_->DeRegisterChildModule(this);
}
-#ifdef MATLAB
- if (plot1_) {
- eng.DeletePlot(plot1_);
- plot1_ = NULL;
- }
-#endif
}
void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterChildModule(module:0x%x)",
- module);
-
CriticalSectionScoped lock(
critical_section_module_ptrs_.get());
CriticalSectionScoped double_lock(
@@ -153,17 +132,12 @@ void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
}
void ModuleRtpRtcpImpl::DeRegisterChildModule(RtpRtcp* remove_module) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "DeRegisterChildModule(module:0x%x)", remove_module);
-
CriticalSectionScoped lock(
critical_section_module_ptrs_.get());
CriticalSectionScoped double_lock(
critical_section_module_ptrs_feedback_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module == remove_module) {
@@ -191,13 +165,7 @@ int32_t ModuleRtpRtcpImpl::Process() {
last_bitrate_process_time_ = now;
}
- bool default_instance = false;
- {
- CriticalSectionScoped cs(critical_section_module_ptrs_.get());
- if (!child_modules_.empty())
- default_instance = true;
- }
- if (!default_instance) {
+ if (!IsDefaultModule()) {
bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs;
if (rtcp_sender_.Sending()) {
// Process RTT if we have received a receiver report and we haven't
@@ -269,16 +237,18 @@ int32_t ModuleRtpRtcpImpl::Process() {
return 0;
}
-int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(int mode, bool set_ssrc,
- uint32_t ssrc) {
- rtp_sender_.SetRTXStatus(mode, set_ssrc, ssrc);
- return 0;
+void ModuleRtpRtcpImpl::SetRTXSendStatus(int mode) {
+ rtp_sender_.SetRTXStatus(mode);
}
-int32_t ModuleRtpRtcpImpl::RTXSendStatus(int* mode, uint32_t* ssrc,
- int* payload_type) const {
+void ModuleRtpRtcpImpl::RTXSendStatus(int* mode,
+ uint32_t* ssrc,
+ int* payload_type) const {
rtp_sender_.RTXStatus(mode, ssrc, payload_type);
- return 0;
+}
+
+void ModuleRtpRtcpImpl::SetRtxSsrc(uint32_t ssrc) {
+ rtp_sender_.SetRtxSsrc(ssrc);
}
void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
@@ -288,29 +258,12 @@ void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
const uint8_t* rtcp_packet,
const uint16_t length) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket(packet_length:%u)", length);
- // Minimum RTP is 12 bytes.
- // Minimum RTCP is 8 bytes (RTCP BYE).
- if (length == 8) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket invalid length");
- return false;
- }
- // Check RTP version.
- const uint8_t version = rtcp_packet[0] >> 6;
- if (version != 2) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket invalid RTP version");
- return false;
- }
// Allow receive of non-compound RTCP packets.
RTCPUtility::RTCPParserV2 rtcp_parser(rtcp_packet, length, true);
const bool valid_rtcpheader = rtcp_parser.IsValid();
if (!valid_rtcpheader) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, id_,
- "IncomingRtcpPacket invalid RTCP packet");
+ LOG(LS_WARNING) << "Incoming invalid RTCP packet";
return -1;
}
RTCPHelp::RTCPPacketInformation rtcp_packet_information;
@@ -324,14 +277,6 @@ int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
const CodecInst& voice_codec) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterSendPayload(pl_name:%s pl_type:%d frequency:%u)",
- voice_codec.plname,
- voice_codec.pltype,
- voice_codec.plfreq);
-
return rtp_sender_.RegisterPayload(
voice_codec.plname,
voice_codec.pltype,
@@ -342,13 +287,6 @@ int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
const VideoCodec& video_codec) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterSendPayload(pl_name:%s pl_type:%d)",
- video_codec.plName,
- video_codec.plType);
-
send_video_codec_ = video_codec;
{
// simulcast_ is accessed when accessing child_modules_, so this write needs
@@ -365,11 +303,6 @@ int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
int32_t ModuleRtpRtcpImpl::DeRegisterSendPayload(
const int8_t payload_type) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "DeRegisterSendPayload(%d)", payload_type);
-
return rtp_sender_.DeRegisterSendPayload(payload_type);
}
@@ -378,58 +311,37 @@ int8_t ModuleRtpRtcpImpl::SendPayloadType() const {
}
uint32_t ModuleRtpRtcpImpl::StartTimestamp() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "StartTimestamp()");
-
return rtp_sender_.StartTimestamp();
}
// Configure start timestamp, default is a random number.
int32_t ModuleRtpRtcpImpl::SetStartTimestamp(
const uint32_t timestamp) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetStartTimestamp(%d)",
- timestamp);
rtcp_sender_.SetStartTimestamp(timestamp);
rtp_sender_.SetStartTimestamp(timestamp, true);
return 0; // TODO(pwestin): change to void.
}
uint16_t ModuleRtpRtcpImpl::SequenceNumber() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SequenceNumber()");
-
return rtp_sender_.SequenceNumber();
}
// Set SequenceNumber, default is a random number.
int32_t ModuleRtpRtcpImpl::SetSequenceNumber(
const uint16_t seq_num) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSequenceNumber(%d)",
- seq_num);
-
rtp_sender_.SetSequenceNumber(seq_num);
return 0; // TODO(pwestin): change to void.
}
uint32_t ModuleRtpRtcpImpl::SSRC() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SSRC()");
-
return rtp_sender_.SSRC();
}
// Configure SSRC, default is a random number.
-int32_t ModuleRtpRtcpImpl::SetSSRC(const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetSSRC(%d)", ssrc);
-
+void ModuleRtpRtcpImpl::SetSSRC(const uint32_t ssrc) {
rtp_sender_.SetSSRC(ssrc);
rtcp_sender_.SetSSRC(ssrc);
SetRtcpReceiverSsrcs(ssrc);
-
- return 0; // TODO(pwestin): change to void.
}
int32_t ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
@@ -440,27 +352,17 @@ int32_t ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
int32_t ModuleRtpRtcpImpl::CSRCs(
uint32_t arr_of_csrc[kRtpCsrcSize]) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "CSRCs()");
-
return rtp_sender_.CSRCs(arr_of_csrc);
}
int32_t ModuleRtpRtcpImpl::SetCSRCs(
const uint32_t arr_of_csrc[kRtpCsrcSize],
const uint8_t arr_length) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetCSRCs(arr_length:%d)",
- arr_length);
-
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -469,10 +371,6 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs(
it++;
}
} else {
- for (int i = 0; i < arr_length; ++i) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "\tidx:%d CSRC:%u", i,
- arr_of_csrc[i]);
- }
rtcp_sender_.SetCSRCs(arr_of_csrc, arr_length);
rtp_sender_.SetCSRCs(arr_of_csrc, arr_length);
}
@@ -480,38 +378,23 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs(
}
uint32_t ModuleRtpRtcpImpl::PacketCountSent() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "PacketCountSent()");
-
return rtp_sender_.Packets();
}
uint32_t ModuleRtpRtcpImpl::ByteCountSent() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ByteCountSent()");
-
return rtp_sender_.Bytes();
}
int ModuleRtpRtcpImpl::CurrentSendFrequencyHz() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "CurrentSendFrequencyHz()");
-
return rtp_sender_.SendPayloadFrequency();
}
int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
- if (sending) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingStatus(sending)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingStatus(stopped)");
- }
if (rtcp_sender_.Sending() != sending) {
// Sends RTCP BYE when going from true to false
RTCPSender::FeedbackState feedback_state(this);
if (rtcp_sender_.SetSendingStatus(feedback_state, sending) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Failed to send RTCP BYE");
+ LOG(LS_WARNING) << "Failed to send RTCP BYE";
}
collision_detected_ = false;
@@ -536,33 +419,21 @@ int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
}
bool ModuleRtpRtcpImpl::Sending() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()");
-
return rtcp_sender_.Sending();
}
int32_t ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) {
- if (sending) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingMediaStatus(sending)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingMediaStatus(stopped)");
- }
rtp_sender_.SetSendingMediaStatus(sending);
return 0;
}
bool ModuleRtpRtcpImpl::SendingMedia() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()");
-
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (!have_child_modules) {
+ if (!IsDefaultModule()) {
return rtp_sender_.SendingMedia();
}
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
if (rtp_sender.SendingMedia()) {
@@ -582,17 +453,9 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
uint32_t payload_size,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtp_video_hdr) {
- WEBRTC_TRACE(
- kTraceStream,
- kTraceRtpRtcp,
- id_,
- "SendOutgoingData(frame_type:%d payload_type:%d time_stamp:%u size:%u)",
- frame_type, payload_type, time_stamp, payload_size);
-
rtcp_sender_.SetLastRtpTime(time_stamp, capture_time_ms);
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (!have_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send RTCP from default module.
if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) {
RTCPSender::FeedbackState feedback_state(this);
@@ -615,7 +478,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
return -1;
}
int idx = 0;
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (; idx < rtp_video_hdr->simulcastIdx; ++it) {
if (it == child_modules_.end()) {
return -1;
@@ -633,11 +496,6 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
if (it == child_modules_.end()) {
return -1;
}
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendOutgoingData(SimulcastIdx:%u size:%u, ssrc:0x%x)",
- idx, payload_size, (*it)->rtp_sender_.SSRC());
return (*it)->SendOutgoingData(frame_type,
payload_type,
time_stamp,
@@ -647,7 +505,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
fragmentation,
rtp_video_hdr);
} else {
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
// Send to all "child" modules
while (it != child_modules_.end()) {
if ((*it)->SendingMedia()) {
@@ -670,19 +528,7 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
bool retransmission) {
- WEBRTC_TRACE(
- kTraceStream,
- kTraceRtpRtcp,
- id_,
- "TimeToSendPacket(ssrc:0x%x sequence_number:%u capture_time_ms:%ll)",
- ssrc, sequence_number, capture_time_ms);
-
- bool no_child_modules = false;
- {
- CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- no_child_modules = child_modules_.empty();
- }
- if (no_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send from default module.
if (SendingMedia() && ssrc == rtp_sender_.SSRC()) {
return rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms,
@@ -690,7 +536,7 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
if ((*it)->SendingMedia() && ssrc == (*it)->rtp_sender_.SSRC()) {
return (*it)->rtp_sender_.TimeToSendPacket(sequence_number,
@@ -705,28 +551,18 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
}
int ModuleRtpRtcpImpl::TimeToSendPadding(int bytes) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "TimeToSendPadding(bytes: %d)",
- bytes);
-
- bool no_child_modules = false;
- {
- CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- no_child_modules = child_modules_.empty();
- }
- if (no_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send from default module.
if (SendingMedia()) {
return rtp_sender_.TimeToSendPadding(bytes);
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
- while (it != child_modules_.end()) {
+ for (size_t i = 0; i < child_modules_.size(); ++i) {
// Send padding on one of the modules sending media.
- if ((*it)->SendingMedia()) {
- return (*it)->rtp_sender_.TimeToSendPadding(bytes);
+ if (child_modules_[i]->SendingMedia()) {
+ return child_modules_[i]->rtp_sender_.TimeToSendPadding(bytes);
}
- ++it;
}
}
return 0;
@@ -737,7 +573,7 @@ bool ModuleRtpRtcpImpl::GetSendSideDelay(int* avg_send_delay_ms,
assert(avg_send_delay_ms);
assert(max_send_delay_ms);
- if (!child_modules_.empty()) {
+ if (IsDefaultModule()) {
// This API is only supported for child modules.
return false;
}
@@ -745,26 +581,17 @@ bool ModuleRtpRtcpImpl::GetSendSideDelay(int* avg_send_delay_ms,
}
uint16_t ModuleRtpRtcpImpl::MaxPayloadLength() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "MaxPayloadLength()");
-
return rtp_sender_.MaxPayloadLength();
}
uint16_t ModuleRtpRtcpImpl::MaxDataPayloadLength() const {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "MaxDataPayloadLength()");
-
// Assuming IP/UDP.
uint16_t min_data_payload_length = IP_PACKET_SIZE - 28;
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -789,13 +616,6 @@ int32_t ModuleRtpRtcpImpl::SetTransportOverhead(
const bool tcp,
const bool ipv6,
const uint8_t authentication_overhead) {
- WEBRTC_TRACE(
- kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetTransportOverhead(TCP:%d, IPV6:%d authentication_overhead:%u)",
- tcp, ipv6, authentication_overhead);
-
uint16_t packet_overhead = 0;
if (ipv6) {
packet_overhead = 40;
@@ -827,12 +647,8 @@ int32_t ModuleRtpRtcpImpl::SetTransportOverhead(
}
int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetMaxTransferUnit(%u)",
- mtu);
-
if (mtu > IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Invalid in argument to SetMaxTransferUnit(%u)", mtu);
+ LOG(LS_ERROR) << "Invalid mtu: " << mtu;
return -1;
}
return rtp_sender_.SetMaxPayloadLength(mtu - packet_overhead_,
@@ -840,8 +656,6 @@ int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) {
}
RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTCP()");
-
if (rtcp_sender_.Status() != kRtcpOff) {
return rtcp_receiver_.Status();
}
@@ -850,9 +664,6 @@ RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
// Configure RTCP status i.e on/off.
int32_t ModuleRtpRtcpImpl::SetRTCPStatus(const RTCPMethod method) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPStatus(%d)",
- method);
-
if (rtcp_sender_.SetRTCPStatus(method) == 0) {
return rtcp_receiver_.SetRTCPStatus(method);
}
@@ -866,36 +677,26 @@ uint32_t ModuleRtpRtcpImpl::LastSendReport(
}
int32_t ModuleRtpRtcpImpl::SetCNAME(const char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetCNAME(%s)", c_name);
return rtcp_sender_.SetCNAME(c_name);
}
int32_t ModuleRtpRtcpImpl::CNAME(char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "CNAME()");
return rtcp_sender_.CNAME(c_name);
}
int32_t ModuleRtpRtcpImpl::AddMixedCNAME(
const uint32_t ssrc,
const char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "AddMixedCNAME(SSRC:%u)", ssrc);
-
return rtcp_sender_.AddMixedCNAME(ssrc, c_name);
}
int32_t ModuleRtpRtcpImpl::RemoveMixedCNAME(const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "RemoveMixedCNAME(SSRC:%u)", ssrc);
return rtcp_sender_.RemoveMixedCNAME(ssrc);
}
int32_t ModuleRtpRtcpImpl::RemoteCNAME(
const uint32_t remote_ssrc,
char c_name[RTCP_CNAME_SIZE]) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "RemoteCNAME(SSRC:%u)", remote_ssrc);
-
return rtcp_receiver_.CNAME(remote_ssrc, c_name);
}
@@ -905,8 +706,6 @@ int32_t ModuleRtpRtcpImpl::RemoteNTP(
uint32_t* rtcp_arrival_time_secs,
uint32_t* rtcp_arrival_time_frac,
uint32_t* rtcp_timestamp) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteNTP()");
-
return rtcp_receiver_.NTP(received_ntpsecs,
received_ntpfrac,
rtcp_arrival_time_secs,
@@ -920,24 +719,21 @@ int32_t ModuleRtpRtcpImpl::RTT(const uint32_t remote_ssrc,
uint16_t* avg_rtt,
uint16_t* min_rtt,
uint16_t* max_rtt) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTT()");
-
- return rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ int32_t ret = rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ if (rtt && *rtt == 0) {
+ // Try to get RTT from RtcpRttStats class.
+ *rtt = static_cast<uint16_t>(rtt_ms());
+ }
+ return ret;
}
// Reset RoundTripTime statistics.
int32_t ModuleRtpRtcpImpl::ResetRTT(const uint32_t remote_ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetRTT(SSRC:%u)",
- remote_ssrc);
-
return rtcp_receiver_.ResetRTT(remote_ssrc);
}
// Reset RTP data counters for the sending side.
int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "ResetSendDataCountersRTP()");
-
rtp_sender_.ResetDataCounters();
return 0; // TODO(pwestin): change to void.
}
@@ -945,8 +741,6 @@ int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
// Force a send of an RTCP packet.
// Normal SR and RR are triggered via the process function.
int32_t ModuleRtpRtcpImpl::SendRTCP(uint32_t rtcp_packet_type) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendRTCP(0x%x)",
- rtcp_packet_type);
RTCPSender::FeedbackState feedback_state(this);
return rtcp_sender_.SendRTCP(feedback_state, rtcp_packet_type);
}
@@ -956,24 +750,16 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
const uint32_t name,
const uint8_t* data,
const uint16_t length) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetRTCPApplicationSpecificData(sub_type:%d name:0x%x)",
- sub_type, name);
-
return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length);
}
// (XR) VOIP metric.
int32_t ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
const RTCPVoIPMetric* voip_metric) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPVoIPMetrics()");
-
return rtcp_sender_.SetRTCPVoIPMetrics(voip_metric);
}
void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetRtcpXrRrtrStatus(%s)", enable ? "true" : "false");
return rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable);
}
@@ -984,8 +770,6 @@ bool ModuleRtpRtcpImpl::RtcpXrRrtrStatus() const {
int32_t ModuleRtpRtcpImpl::DataCountersRTP(
uint32_t* bytes_sent,
uint32_t* packets_sent) const {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "DataCountersRTP()");
-
if (bytes_sent) {
*bytes_sent = rtp_sender_.Bytes();
}
@@ -996,77 +780,54 @@ int32_t ModuleRtpRtcpImpl::DataCountersRTP(
}
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()");
-
return rtcp_receiver_.SenderInfoReceived(sender_info);
}
// Received RTCP report.
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(
std::vector<RTCPReportBlock>* receive_blocks) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()");
-
return rtcp_receiver_.StatisticsReceived(receive_blocks);
}
int32_t ModuleRtpRtcpImpl::AddRTCPReportBlock(
const uint32_t ssrc,
const RTCPReportBlock* report_block) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "AddRTCPReportBlock()");
-
return rtcp_sender_.AddExternalReportBlock(ssrc, report_block);
}
int32_t ModuleRtpRtcpImpl::RemoveRTCPReportBlock(
const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoveRTCPReportBlock()");
-
return rtcp_sender_.RemoveExternalReportBlock(ssrc);
}
+void ModuleRtpRtcpImpl::GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const {
+ rtcp_sender_.GetPacketTypeCounter(packets_sent);
+ rtcp_receiver_.GetPacketTypeCounter(packets_received);
+}
+
// (REMB) Receiver Estimated Max Bitrate.
bool ModuleRtpRtcpImpl::REMB() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "REMB()");
-
return rtcp_sender_.REMB();
}
int32_t ModuleRtpRtcpImpl::SetREMBStatus(const bool enable) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetREMBStatus(enable)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetREMBStatus(disable)");
- }
return rtcp_sender_.SetREMBStatus(enable);
}
int32_t ModuleRtpRtcpImpl::SetREMBData(const uint32_t bitrate,
const uint8_t number_of_ssrc,
const uint32_t* ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetREMBData(bitrate:%d,?,?)", bitrate);
return rtcp_sender_.SetREMBData(bitrate, number_of_ssrc, ssrc);
}
// (IJ) Extended jitter report.
bool ModuleRtpRtcpImpl::IJ() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "IJ()");
-
return rtcp_sender_.IJ();
}
int32_t ModuleRtpRtcpImpl::SetIJStatus(const bool enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetIJStatus(%s)", enable ? "true" : "false");
-
return rtcp_sender_.SetIJStatus(enable);
}
@@ -1083,25 +844,14 @@ int32_t ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension(
// (TMMBR) Temporary Max Media Bit Rate.
bool ModuleRtpRtcpImpl::TMMBR() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "TMMBR()");
-
return rtcp_sender_.TMMBR();
}
int32_t ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTMMBRStatus(enable)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTMMBRStatus(disable)");
- }
return rtcp_sender_.SetTMMBRStatus(enable);
}
int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetTMMBN()");
-
uint32_t max_bitrate_kbit =
rtp_sender_.MaxConfiguredBitrateVideo() / 1000;
return rtcp_sender_.SetTMMBN(bounding_set, max_bitrate_kbit);
@@ -1109,32 +859,18 @@ int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
// Returns the currently configured retransmission mode.
int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SelectiveRetransmissions()");
return rtp_sender_.SelectiveRetransmissions();
}
// Enable or disable a retransmission mode, which decides which packets will
// be retransmitted if NACKed.
int ModuleRtpRtcpImpl::SetSelectiveRetransmissions(uint8_t settings) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSelectiveRetransmissions(%u)",
- settings);
return rtp_sender_.SetSelectiveRetransmissions(settings);
}
// Send a Negative acknowledgment packet.
int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
const uint16_t size) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendNACK(size:%u)", size);
-
// Use RTT from RtcpRttStats class if provided.
uint16_t rtt = rtt_ms();
if (rtt == 0) {
@@ -1188,14 +924,6 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
int32_t ModuleRtpRtcpImpl::SetStorePacketsStatus(
const bool enable,
const uint16_t number_to_store) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetStorePacketsStatus(enable, number_to_store:%d)",
- number_to_store);
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetStorePacketsStatus(disable)");
- }
rtp_sender_.SetStorePacketsStatus(enable, number_to_store);
return 0; // TODO(pwestin): change to void.
}
@@ -1219,21 +947,11 @@ int32_t ModuleRtpRtcpImpl::SendTelephoneEventOutband(
const uint8_t key,
const uint16_t time_ms,
const uint8_t level) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SendTelephoneEventOutband(key:%u, time_ms:%u, level:%u)", key,
- time_ms, level);
-
return rtp_sender_.SendTelephoneEvent(key, time_ms, level);
}
bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
int8_t& telephone_event) const {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendTelephoneEventActive()");
-
return rtp_sender_.SendTelephoneEventActive(&telephone_event);
}
@@ -1241,68 +959,23 @@ bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
// packet in silence (CNG).
int32_t ModuleRtpRtcpImpl::SetAudioPacketSize(
const uint16_t packet_size_samples) {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetAudioPacketSize(%u)",
- packet_size_samples);
-
return rtp_sender_.SetAudioPacketSize(packet_size_samples);
}
-int32_t ModuleRtpRtcpImpl::SetRTPAudioLevelIndicationStatus(
- const bool enable,
- const uint8_t id) {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetRTPAudioLevelIndicationStatus(enable=%d, ID=%u)",
- enable,
- id);
-
- return rtp_sender_.SetAudioLevelIndicationStatus(enable, id);
-}
-
-int32_t ModuleRtpRtcpImpl::GetRTPAudioLevelIndicationStatus(
- bool& enable,
- uint8_t& id) const {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "GetRTPAudioLevelIndicationStatus()");
- return rtp_sender_.AudioLevelIndicationStatus(&enable, &id);
-}
-
int32_t ModuleRtpRtcpImpl::SetAudioLevel(
const uint8_t level_d_bov) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetAudioLevel(level_d_bov:%u)",
- level_d_bov);
return rtp_sender_.SetAudioLevel(level_d_bov);
}
// Set payload type for Redundant Audio Data RFC 2198.
int32_t ModuleRtpRtcpImpl::SetSendREDPayloadType(
const int8_t payload_type) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSendREDPayloadType(%d)",
- payload_type);
-
return rtp_sender_.SetRED(payload_type);
}
// Get payload type for Redundant Audio Data RFC 2198.
int32_t ModuleRtpRtcpImpl::SendREDPayloadType(
int8_t& payload_type) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendREDPayloadType()");
-
return rtp_sender_.RED(&payload_type);
}
@@ -1312,54 +985,41 @@ RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
void ModuleRtpRtcpImpl::SetTargetSendBitrate(
const std::vector<uint32_t>& stream_bitrates) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTargetSendBitrate: %ld streams", stream_bitrates.size());
-
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (have_child_modules) {
+ if (IsDefaultModule()) {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
if (simulcast_) {
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (size_t i = 0;
it != child_modules_.end() && i < stream_bitrates.size(); ++it) {
if ((*it)->SendingMedia()) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
- rtp_sender.SetTargetSendBitrate(stream_bitrates[i]);
+ rtp_sender.SetTargetBitrate(stream_bitrates[i]);
++i;
}
}
} else {
- assert(stream_bitrates.size() == 1);
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ if (stream_bitrates.size() > 1)
+ return;
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (; it != child_modules_.end(); ++it) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
- rtp_sender.SetTargetSendBitrate(stream_bitrates[0]);
+ rtp_sender.SetTargetBitrate(stream_bitrates[0]);
}
}
} else {
- assert(stream_bitrates.size() == 1);
- rtp_sender_.SetTargetSendBitrate(stream_bitrates[0]);
+ if (stream_bitrates.size() > 1)
+ return;
+ rtp_sender_.SetTargetBitrate(stream_bitrates[0]);
}
}
int32_t ModuleRtpRtcpImpl::SetKeyFrameRequestMethod(
const KeyFrameRequestMethod method) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetKeyFrameRequestMethod(method:%u)",
- method);
-
key_frame_req_method_ = method;
return 0;
}
int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RequestKeyFrame");
-
switch (key_frame_req_method_) {
case kKeyFrameReqFirRtp:
return rtp_sender_.SendRTPIntraRequest();
@@ -1373,29 +1033,15 @@ int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
const uint8_t picture_id) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendRTCPSliceLossIndication (picture_id:%d)",
- picture_id);
-
RTCPSender::FeedbackState feedback_state(this);
return rtcp_sender_.SendRTCP(
feedback_state, kRtcpSli, 0, 0, false, picture_id);
}
int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetCameraDelay(%d)",
- delay_ms);
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
-
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1412,18 +1058,6 @@ int32_t ModuleRtpRtcpImpl::SetGenericFECStatus(
const bool enable,
const uint8_t payload_type_red,
const uint8_t payload_type_fec) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetGenericFECStatus(enable, %u)",
- payload_type_red);
- } else {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetGenericFECStatus(disable)");
- }
return rtp_sender_.SetGenericFECStatus(enable,
payload_type_red,
payload_type_fec);
@@ -1433,15 +1067,11 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus(
bool& enable,
uint8_t& payload_type_red,
uint8_t& payload_type_fec) {
-
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "GenericFECStatus()");
-
bool child_enabled = false;
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to check all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1471,12 +1101,11 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus(
int32_t ModuleRtpRtcpImpl::SetFecParameters(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1517,9 +1146,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
uint32_t* nack_rate) const {
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update the send bitrate.
CriticalSectionScoped lock(critical_section_module_ptrs_feedback_.get());
@@ -1532,8 +1159,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
if (nack_rate != NULL)
*nack_rate = 0;
- std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1570,21 +1196,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
void ModuleRtpRtcpImpl::RegisterVideoBitrateObserver(
BitrateStatisticsObserver* observer) {
- {
- CriticalSectionScoped cs(critical_section_module_ptrs_.get());
- if (!child_modules_.empty()) {
- for (std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
- it != child_modules_.end();
- ++it) {
- RtpRtcp* module = *it;
- if (module)
- module->RegisterVideoBitrateObserver(observer);
- }
- return;
- }
- }
-
+ assert(!IsDefaultModule());
rtp_sender_.RegisterBitrateObserver(observer);
}
@@ -1592,7 +1204,6 @@ BitrateStatisticsObserver* ModuleRtpRtcpImpl::GetVideoBitrateObserver() const {
return rtp_sender_.GetBitrateObserver();
}
-// Bad state of RTP receiver request a keyframe.
void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
RequestKeyFrame();
}
@@ -1716,4 +1327,9 @@ FrameCountObserver* ModuleRtpRtcpImpl::GetSendFrameCountObserver() const {
return rtp_sender_.GetFrameCountObserver();
}
+bool ModuleRtpRtcpImpl::IsDefaultModule() const {
+ CriticalSectionScoped cs(critical_section_module_ptrs_.get());
+ return !child_modules_.empty();
+}
+
} // Namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 075770dd235..55826b6fe8b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -21,10 +21,6 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/testsupport/gtest_prod_util.h"
-#ifdef MATLAB
-class MatlabPlot;
-#endif
-
namespace webrtc {
class ModuleRtpRtcpImpl : public RtpRtcp {
@@ -80,7 +76,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual uint32_t SSRC() const OVERRIDE;
// Configure SSRC, default is a random number.
- virtual int32_t SetSSRC(const uint32_t ssrc) OVERRIDE;
+ virtual void SetSSRC(const uint32_t ssrc) OVERRIDE;
virtual int32_t CSRCs(uint32_t arr_of_csrc[kRtpCsrcSize]) const OVERRIDE;
@@ -95,13 +91,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual uint32_t ByteCountSent() const;
- virtual int32_t SetRTXSendStatus(const int mode,
- const bool set_ssrc,
- const uint32_t ssrc) OVERRIDE;
+ virtual void SetRTXSendStatus(const int mode) OVERRIDE;
- virtual int32_t RTXSendStatus(int* mode, uint32_t* ssrc,
- int* payloadType) const OVERRIDE;
+ virtual void RTXSendStatus(int* mode, uint32_t* ssrc,
+ int* payloadType) const OVERRIDE;
+ virtual void SetRtxSsrc(uint32_t ssrc) OVERRIDE;
virtual void SetRtxSendPayloadType(int payload_type) OVERRIDE;
@@ -197,10 +192,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Set received RTCP report block.
virtual int32_t AddRTCPReportBlock(
- const uint32_t ssrc, const RTCPReportBlock* receive_block) OVERRIDE;
+ const uint32_t ssrc, const RTCPReportBlock* receive_block) OVERRIDE;
virtual int32_t RemoveRTCPReportBlock(const uint32_t ssrc) OVERRIDE;
+ virtual void GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const OVERRIDE;
+
// (REMB) Receiver Estimated Max Bitrate.
virtual bool REMB() const OVERRIDE;
@@ -291,14 +290,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Get payload type for Redundant Audio Data RFC 2198.
virtual int32_t SendREDPayloadType(int8_t& payload_type) const OVERRIDE;
- // Set status and id for header-extension-for-audio-level-indication.
- virtual int32_t SetRTPAudioLevelIndicationStatus(
- const bool enable, const uint8_t id) OVERRIDE;
-
- // Get status and id for header-extension-for-audio-level-indication.
- virtual int32_t GetRTPAudioLevelIndicationStatus(
- bool& enable, uint8_t& id) const OVERRIDE;
-
// Store the audio level in d_bov for header-extension-for-audio-level-
// indication.
virtual int32_t SetAudioLevel(const uint8_t level_d_bov) OVERRIDE;
@@ -418,6 +409,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
void set_rtt_ms(uint32_t rtt_ms);
uint32_t rtt_ms() const;
+ bool IsDefaultModule() const;
+
int32_t id_;
const bool audio_;
bool collision_detected_;
@@ -429,7 +422,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
scoped_ptr<CriticalSectionWrapper> critical_section_module_ptrs_;
scoped_ptr<CriticalSectionWrapper> critical_section_module_ptrs_feedback_;
ModuleRtpRtcpImpl* default_module_;
- std::list<ModuleRtpRtcpImpl*> child_modules_;
+ std::vector<ModuleRtpRtcpImpl*> child_modules_;
+ size_t padding_index_;
// Send side
NACKMethod nack_method_;
@@ -442,10 +436,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
RemoteBitrateEstimator* remote_bitrate_;
-#ifdef MATLAB
- MatlabPlot* plot1_;
-#endif
-
RtcpRttStats* rtt_stats_;
// The processed RTT from RtcpRttStats.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 50f7f2e1cb3..eba4e010b59 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -12,11 +12,23 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
+#include "webrtc/modules/pacing/include/mock/mock_paced_sender.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+
+using ::testing::_;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SaveArg;
namespace webrtc {
namespace {
+const uint32_t kSenderSsrc = 0x12345;
+const uint32_t kReceiverSsrc = 0x23456;
+const uint32_t kSenderRtxSsrc = 0x32345;
+const uint32_t kOneWayNetworkDelayMs = 100;
class RtcpRttStatsTestImpl : public RtcpRttStats {
public:
@@ -35,12 +47,12 @@ class RtcpRttStatsTestImpl : public RtcpRttStats {
class SendTransport : public Transport,
public NullRtpData {
public:
- SendTransport() : rtp_rtcp_impl_(NULL), clock_(NULL), delay_ms_(0) {}
+ SendTransport() : receiver_(NULL), clock_(NULL), delay_ms_(0) {}
- void SetRtpRtcpModule(ModuleRtpRtcpImpl* rtp_rtcp_impl) {
- rtp_rtcp_impl_ = rtp_rtcp_impl;
+ void SetRtpRtcpModule(ModuleRtpRtcpImpl* receiver) {
+ receiver_ = receiver;
}
- void SimulateNetworkDelay(int delay_ms, SimulatedClock* clock) {
+ void SimulateNetworkDelay(uint32_t delay_ms, SimulatedClock* clock) {
clock_ = clock;
delay_ms_ = delay_ms;
}
@@ -51,14 +63,48 @@ class SendTransport : public Transport,
if (clock_) {
clock_->AdvanceTimeMilliseconds(delay_ms_);
}
- EXPECT_TRUE(rtp_rtcp_impl_ != NULL);
- EXPECT_EQ(0, rtp_rtcp_impl_->IncomingRtcpPacket(
+ EXPECT_TRUE(receiver_ != NULL);
+ EXPECT_EQ(0, receiver_->IncomingRtcpPacket(
static_cast<const uint8_t*>(data), len));
return len;
}
- ModuleRtpRtcpImpl* rtp_rtcp_impl_;
+ ModuleRtpRtcpImpl* receiver_;
SimulatedClock* clock_;
- int delay_ms_;
+ uint32_t delay_ms_;
+};
+
+class RtpRtcpModule {
+ public:
+ RtpRtcpModule(SimulatedClock* clock)
+ : receive_statistics_(ReceiveStatistics::Create(clock)) {
+ RtpRtcp::Configuration config;
+ config.audio = false;
+ config.clock = clock;
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtt_stats = &rtt_stats_;
+
+ impl_.reset(new ModuleRtpRtcpImpl(config));
+ EXPECT_EQ(0, impl_->SetRTCPStatus(kRtcpCompound));
+
+ transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, clock);
+ }
+
+ RtcpPacketTypeCounter packets_sent_;
+ RtcpPacketTypeCounter packets_received_;
+ scoped_ptr<ReceiveStatistics> receive_statistics_;
+ SendTransport transport_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ scoped_ptr<ModuleRtpRtcpImpl> impl_;
+
+ RtcpPacketTypeCounter RtcpSent() {
+ impl_->GetRtcpPacketTypeCounters(&packets_sent_, &packets_received_);
+ return packets_sent_;
+ }
+ RtcpPacketTypeCounter RtcpReceived() {
+ impl_->GetRtcpPacketTypeCounters(&packets_sent_, &packets_received_);
+ return packets_received_;
+ }
};
} // namespace
@@ -66,97 +112,357 @@ class RtpRtcpImplTest : public ::testing::Test {
protected:
RtpRtcpImplTest()
: clock_(1335900000),
- receive_statistics_(ReceiveStatistics::Create(&clock_)) {
- RtpRtcp::Configuration configuration;
- configuration.id = 0;
- configuration.audio = false;
- configuration.clock = &clock_;
- configuration.outgoing_transport = &transport_;
- configuration.receive_statistics = receive_statistics_.get();
- configuration.rtt_stats = &rtt_stats_;
-
- rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl(configuration));
- transport_.SetRtpRtcpModule(rtp_rtcp_impl_.get());
+ sender_(&clock_),
+ receiver_(&clock_) {
+ // Send module.
+ EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true));
+ sender_.impl_->SetSSRC(kSenderSsrc);
+ sender_.impl_->SetRemoteSSRC(kReceiverSsrc);
+ // Receive module.
+ EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false));
+ receiver_.impl_->SetSSRC(kReceiverSsrc);
+ receiver_.impl_->SetRemoteSSRC(kSenderSsrc);
+ // Transport settings.
+ sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get());
+ receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
}
-
SimulatedClock clock_;
- scoped_ptr<ReceiveStatistics> receive_statistics_;
- scoped_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
- SendTransport transport_;
- RtcpRttStatsTestImpl rtt_stats_;
+ RtpRtcpModule sender_;
+ RtpRtcpModule receiver_;
};
TEST_F(RtpRtcpImplTest, Rtt) {
- const uint32_t kSsrc = 0x12345;
- RTPHeader header = {};
+ RTPHeader header;
header.timestamp = 1;
header.sequenceNumber = 123;
- header.ssrc = kSsrc;
+ header.ssrc = kSenderSsrc;
header.headerLength = 12;
- receive_statistics_->IncomingPacket(header, 100, false);
-
- rtp_rtcp_impl_->SetRemoteSSRC(kSsrc);
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSendingStatus(true));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetRTCPStatus(kRtcpCompound));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSSRC(kSsrc));
+ receiver_.receive_statistics_->IncomingPacket(header, 100, false);
- // A SR should have been sent and received.
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ // Sender module should send a SR.
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
- // Send new SR. A response to the last SR should be sent.
+ // Receiver module should send a RR with a response to the last received SR.
clock_.AdvanceTimeMilliseconds(1000);
- transport_.SimulateNetworkDelay(100, &clock_);
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
// Verify RTT.
uint16_t rtt;
uint16_t avg_rtt;
uint16_t min_rtt;
uint16_t max_rtt;
- EXPECT_EQ(0, rtp_rtcp_impl_->RTT(kSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_EQ(100, rtt);
- EXPECT_EQ(100, avg_rtt);
- EXPECT_EQ(100, min_rtt);
- EXPECT_EQ(100, max_rtt);
+ EXPECT_EQ(0,
+ sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, avg_rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, min_rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, max_rtt);
// No RTT from other ssrc.
EXPECT_EQ(-1,
- rtp_rtcp_impl_->RTT(kSsrc + 1, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ sender_.impl_->RTT(kReceiverSsrc+1, &rtt, &avg_rtt, &min_rtt, &max_rtt));
// Verify RTT from rtt_stats config.
- EXPECT_EQ(0U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(0U, rtp_rtcp_impl_->rtt_ms());
- rtp_rtcp_impl_->Process();
- EXPECT_EQ(100U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(100U, rtp_rtcp_impl_->rtt_ms());
+ EXPECT_EQ(0U, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0U, sender_.impl_->rtt_ms());
+ sender_.impl_->Process();
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms());
}
TEST_F(RtpRtcpImplTest, SetRtcpXrRrtrStatus) {
- EXPECT_FALSE(rtp_rtcp_impl_->RtcpXrRrtrStatus());
- rtp_rtcp_impl_->SetRtcpXrRrtrStatus(true);
- EXPECT_TRUE(rtp_rtcp_impl_->RtcpXrRrtrStatus());
+ EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus());
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+ EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus());
}
TEST_F(RtpRtcpImplTest, RttForReceiverOnly) {
- rtp_rtcp_impl_->SetRtcpXrRrtrStatus(true);
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSendingStatus(false));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetRTCPStatus(kRtcpCompound));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSSRC(0x12345));
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
- // A Receiver time reference report (RTRR) should be sent and received.
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ // Receiver module should send a Receiver time reference report (RTRR).
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
- // Send new RTRR. A response to the last RTRR should be sent.
+ // Sender module should send a response to the last received RTRR (DLRR).
clock_.AdvanceTimeMilliseconds(1000);
- transport_.SimulateNetworkDelay(100, &clock_);
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
// Verify RTT.
- EXPECT_EQ(0U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(0U, rtp_rtcp_impl_->rtt_ms());
- rtp_rtcp_impl_->Process();
- EXPECT_EQ(100U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(100U, rtp_rtcp_impl_->rtt_ms());
+ EXPECT_EQ(0U, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0U, receiver_.impl_->rtt_ms());
+ receiver_.impl_->Process();
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms());
+}
+
+TEST_F(RtpRtcpImplTest, RtcpPacketTypeCounter_Nack) {
+ EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+ // Receive module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+
+ // Send module receives the NACK.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+}
+
+TEST_F(RtpRtcpImplTest, RtcpPacketTypeCounter_FirAndPli) {
+ EXPECT_EQ(0U, sender_.RtcpReceived().fir_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().fir_packets);
+ // Receive module sends a FIR.
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpFir));
+ EXPECT_EQ(1U, receiver_.RtcpSent().fir_packets);
+ // Send module receives the FIR.
+ EXPECT_EQ(1U, sender_.RtcpReceived().fir_packets);
+
+ // Receive module sends a FIR and PLI.
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpFir | kRtcpPli));
+ EXPECT_EQ(2U, receiver_.RtcpSent().fir_packets);
+ EXPECT_EQ(1U, receiver_.RtcpSent().pli_packets);
+ // Send module receives the FIR and PLI.
+ EXPECT_EQ(2U, sender_.RtcpReceived().fir_packets);
+ EXPECT_EQ(1U, sender_.RtcpReceived().pli_packets);
+}
+
+class RtpSendingTestTransport : public Transport {
+ public:
+ void ResetCounters() { bytes_received_.clear(); }
+
+ virtual int SendPacket(int channel, const void* data, int length) {
+ RTPHeader header;
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ EXPECT_TRUE(
+ parser->Parse(static_cast<const uint8_t*>(data), length, &header));
+ bytes_received_[header.ssrc] += length;
+ ++packets_received_[header.ssrc];
+ return length;
+ }
+
+ virtual int SendRTCPPacket(int channel, const void* data, int length) {
+ return length;
+ }
+
+ int GetPacketsReceived(uint32_t ssrc) const {
+ std::map<uint32_t, int>::const_iterator it = packets_received_.find(ssrc);
+ if (it == packets_received_.end())
+ return 0;
+ return it->second;
+ }
+
+ int GetBytesReceived(uint32_t ssrc) const {
+ std::map<uint32_t, int>::const_iterator it = bytes_received_.find(ssrc);
+ if (it == bytes_received_.end())
+ return 0;
+ return it->second;
+ }
+
+ int GetTotalBytesReceived() const {
+ int sum = 0;
+ for (std::map<uint32_t, int>::const_iterator it = bytes_received_.begin();
+ it != bytes_received_.end();
+ ++it) {
+ sum += it->second;
+ }
+ return sum;
+ }
+
+ private:
+ std::map<uint32_t, int> bytes_received_;
+ std::map<uint32_t, int> packets_received_;
+};
+
+class RtpSendingTest : public ::testing::Test {
+ protected:
+ // Map from SSRC to number of received packets and bytes.
+ typedef std::map<uint32_t, std::pair<int, int> > PaddingMap;
+
+ RtpSendingTest() {
+ // Send module.
+ RtpRtcp::Configuration config;
+ config.audio = false;
+ config.clock = Clock::GetRealTimeClock();
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtt_stats = &rtt_stats_;
+ config.paced_sender = &pacer_;
+ memset(&codec_, 0, sizeof(VideoCodec));
+ codec_.plType = 100;
+ strncpy(codec_.plName, "VP8", 3);
+ codec_.numberOfSimulcastStreams = 3;
+ codec_.simulcastStream[0].width = 320;
+ codec_.simulcastStream[0].height = 180;
+ codec_.simulcastStream[0].maxBitrate = 300;
+ codec_.simulcastStream[1].width = 640;
+ codec_.simulcastStream[1].height = 360;
+ codec_.simulcastStream[1].maxBitrate = 600;
+ codec_.simulcastStream[2].width = 1280;
+ codec_.simulcastStream[2].height = 720;
+ codec_.simulcastStream[2].maxBitrate = 1200;
+ // We need numberOfSimulcastStreams + 1 RTP modules since we need one
+ // default module.
+ for (int i = 0; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ RtpRtcp* sender = RtpRtcp::CreateRtpRtcp(config);
+ EXPECT_EQ(0, sender->RegisterSendPayload(codec_));
+ EXPECT_EQ(0, sender->SetSendingStatus(true));
+ EXPECT_EQ(0, sender->SetSendingMediaStatus(true));
+ sender->SetSSRC(kSenderSsrc + i);
+ sender->SetRemoteSSRC(kReceiverSsrc + i);
+ senders_.push_back(sender);
+ config.default_module = senders_[0];
+ }
+ std::vector<uint32_t> bitrates;
+ bitrates.push_back(codec_.simulcastStream[0].maxBitrate);
+ bitrates.push_back(codec_.simulcastStream[1].maxBitrate);
+ bitrates.push_back(codec_.simulcastStream[2].maxBitrate);
+ senders_[0]->SetTargetSendBitrate(bitrates);
+ }
+
+ ~RtpSendingTest() {
+ for (int i = senders_.size() - 1; i >= 0; --i) {
+ delete senders_[i];
+ }
+ }
+
+ void SendFrameOnSender(int sender_index,
+ const uint8_t* payload,
+ size_t length) {
+ RTPVideoHeader rtp_video_header = {
+ codec_.simulcastStream[sender_index].width,
+ codec_.simulcastStream[sender_index].height,
+ true,
+ 0,
+ kRtpVideoVp8,
+ {}};
+ uint32_t seq_num = 0;
+ uint32_t ssrc = 0;
+ int64_t capture_time_ms = 0;
+ bool retransmission = false;
+ EXPECT_CALL(pacer_, SendPacket(_, _, _, _, _, _))
+ .WillRepeatedly(DoAll(SaveArg<1>(&ssrc),
+ SaveArg<2>(&seq_num),
+ SaveArg<3>(&capture_time_ms),
+ SaveArg<5>(&retransmission),
+ Return(true)));
+ EXPECT_EQ(0,
+ senders_[sender_index]->SendOutgoingData(kVideoFrameKey,
+ codec_.plType,
+ 0,
+ 0,
+ payload,
+ length,
+ NULL,
+ &rtp_video_header));
+ EXPECT_TRUE(senders_[sender_index]->TimeToSendPacket(
+ ssrc, seq_num, capture_time_ms, retransmission));
+ }
+
+ void ExpectPadding(const PaddingMap& expected_padding) {
+ int expected_total_bytes = 0;
+ for (PaddingMap::const_iterator it = expected_padding.begin();
+ it != expected_padding.end();
+ ++it) {
+ int packets_received = transport_.GetBytesReceived(it->first);
+ if (it->second.first > 0) {
+ EXPECT_GE(packets_received, it->second.first)
+ << "On SSRC: " << it->first;
+ }
+ int bytes_received = transport_.GetBytesReceived(it->first);
+ expected_total_bytes += bytes_received;
+ if (it->second.second > 0) {
+ EXPECT_GE(bytes_received, it->second.second)
+ << "On SSRC: " << it->first;
+ } else {
+ EXPECT_EQ(0, bytes_received) << "On SSRC: " << it->first;
+ }
+ }
+ EXPECT_EQ(expected_total_bytes, transport_.GetTotalBytesReceived());
+ }
+
+ scoped_ptr<ReceiveStatistics> receive_statistics_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ std::vector<RtpRtcp*> senders_;
+ RtpSendingTestTransport transport_;
+ NiceMock<MockPacedSender> pacer_;
+ VideoCodec codec_;
+};
+
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPadding) {
+ // We have to send on an SSRC to be allowed to pad, since a marker bit must
+ // be sent prior to padding packets.
+ const uint8_t payload[200] = {0};
+ for (int i = 0; i < codec_.numberOfSimulcastStreams; ++i) {
+ SendFrameOnSender(i + 1, payload, sizeof(payload));
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair(2, 500);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(4, 1000);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(6, 1500);
+ ExpectPadding(expected_padding);
+}
+
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPaddingRtx) {
+ // Enable RTX to allow padding to be sent prior to media.
+ for (int i = 1; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ senders_[i]->SetRtxSendPayloadType(96);
+ senders_[i]->SetRtxSsrc(kSenderRtxSsrc + i);
+ senders_[i]->SetRTXSendStatus(kRtxRetransmitted);
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(0, 0);
+ expected_padding[kSenderRtxSsrc + 1] = std::make_pair(2, 500);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair(4, 500);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair(6, 500);
+ ExpectPadding(expected_padding);
}
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPaddingRtxRedundantPayloads) {
+ for (int i = 1; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ senders_[i]->SetRtxSendPayloadType(96);
+ senders_[i]->SetRtxSsrc(kSenderRtxSsrc + i);
+ senders_[i]->SetRTXSendStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ senders_[i]->SetStorePacketsStatus(true, 100);
+ }
+ // First send payloads so that we have something to retransmit.
+ const size_t kPayloadSize = 500;
+ const uint8_t payload[kPayloadSize] = {0};
+ for (int i = 0; i < codec_.numberOfSimulcastStreams; ++i) {
+ SendFrameOnSender(i + 1, payload, sizeof(payload));
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderSsrc + 2] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderRtxSsrc + 1] = std::make_pair<int, int>(1, 500);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair<int, int>(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair<int, int>(2, 1000);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair<int, int>(3, 1500);
+ ExpectPadding(expected_padding);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index fd320324b8a..7cfcd7222c8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -15,7 +15,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -56,7 +56,6 @@ RTPSender::RTPSender(const int32_t id,
transport_(transport),
sending_media_(true), // Default to sending media.
max_payload_length_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP.
- target_send_bitrate_(0),
packet_over_head_(28),
payload_type_(-1),
payload_type_map_(),
@@ -88,7 +87,9 @@ RTPSender::RTPSender(const int32_t id,
csrcs_(),
include_csrcs_(true),
rtx_(kRtxOff),
- payload_type_rtx_(-1) {
+ payload_type_rtx_(-1),
+ target_bitrate_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ target_bitrate_(0) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
memset(csrcs_, 0, sizeof(csrcs_));
@@ -104,9 +105,8 @@ RTPSender::RTPSender(const int32_t id,
audio_ = new RTPSenderAudio(id, clock_, this);
audio_->RegisterAudioCallback(audio_feedback);
} else {
- video_ = new RTPSenderVideo(id, clock_, this);
+ video_ = new RTPSenderVideo(clock_, this);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTPSender::~RTPSender() {
@@ -125,12 +125,16 @@ RTPSender::~RTPSender() {
}
delete audio_;
delete video_;
+}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
+void RTPSender::SetTargetBitrate(uint32_t bitrate) {
+ CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ target_bitrate_ = bitrate;
}
-void RTPSender::SetTargetSendBitrate(const uint32_t bits) {
- target_send_bitrate_ = static_cast<uint16_t>(bits / 1000);
+uint32_t RTPSender::GetTargetBitrate() {
+ CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ return target_bitrate_;
}
uint16_t RTPSender::ActualSendBitrateKbit() const {
@@ -157,10 +161,12 @@ uint32_t RTPSender::NackOverheadRate() const {
bool RTPSender::GetSendSideDelay(int* avg_send_delay_ms,
int* max_send_delay_ms) const {
+ if (!SendingMedia())
+ return false;
CriticalSectionScoped cs(statistics_crit_.get());
SendDelayMap::const_iterator it = send_delays_.upper_bound(
clock_->TimeInMilliseconds() - kSendSideDelayWindowMs);
- if (!sending_media_ || it == send_delays_.end())
+ if (it == send_delays_.end())
return false;
int num_delays = 0;
for (; it != send_delays_.end(); ++it) {
@@ -273,7 +279,10 @@ int32_t RTPSender::DeRegisterSendPayload(
return 0;
}
-int8_t RTPSender::SendPayloadType() const { return payload_type_; }
+int8_t RTPSender::SendPayloadType() const {
+ CriticalSectionScoped cs(send_critsect_);
+ return payload_type_;
+}
int RTPSender::SendPayloadFrequency() const {
return audio_ != NULL ? audio_->AudioFrequency() : kVideoPayloadTypeFrequency;
@@ -284,16 +293,12 @@ int32_t RTPSender::SetMaxPayloadLength(
const uint16_t packet_over_head) {
// Sanity check.
if (max_payload_length < 100 || max_payload_length > IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_, "%s invalid argument",
- __FUNCTION__);
+ LOG(LS_ERROR) << "Invalid max payload length: " << max_payload_length;
return -1;
}
CriticalSectionScoped cs(send_critsect_);
max_payload_length_ = max_payload_length;
packet_over_head_ = packet_over_head;
-
- WEBRTC_TRACE(kTraceInfo, kTraceRtpRtcp, id_, "SetMaxPayloadLength to %d.",
- max_payload_length);
return 0;
}
@@ -301,9 +306,9 @@ uint16_t RTPSender::MaxDataPayloadLength() const {
if (audio_configured_) {
return max_payload_length_ - RTPHeaderLength();
} else {
- return max_payload_length_ - RTPHeaderLength() -
- video_->FECPacketOverhead() - ((rtx_) ? 2 : 0);
- // Include the FEC/ULP/RED overhead.
+ return max_payload_length_ - RTPHeaderLength() // RTP overhead.
+ - video_->FECPacketOverhead() // FEC/ULP/RED overhead.
+ - ((rtx_) ? 2 : 0); // RTX overhead.
}
}
@@ -313,16 +318,14 @@ uint16_t RTPSender::MaxPayloadLength() const {
uint16_t RTPSender::PacketOverHead() const { return packet_over_head_; }
-void RTPSender::SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc) {
+void RTPSender::SetRTXStatus(int mode) {
CriticalSectionScoped cs(send_critsect_);
rtx_ = mode;
- if (rtx_ != kRtxOff) {
- if (set_ssrc) {
- ssrc_rtx_ = ssrc;
- } else {
- ssrc_rtx_ = ssrc_db_.CreateSSRC(); // Can't be 0.
- }
- }
+}
+
+void RTPSender::SetRtxSsrc(uint32_t ssrc) {
+ CriticalSectionScoped cs(send_critsect_);
+ ssrc_rtx_ = ssrc;
}
void RTPSender::RTXStatus(int* mode, uint32_t* ssrc,
@@ -333,7 +336,6 @@ void RTPSender::RTXStatus(int* mode, uint32_t* ssrc,
*payload_type = payload_type_rtx_;
}
-
void RTPSender::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(send_critsect_);
payload_type_rtx_ = payload_type;
@@ -344,8 +346,7 @@ int32_t RTPSender::CheckPayloadType(const int8_t payload_type,
CriticalSectionScoped cs(send_critsect_);
if (payload_type < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_, "\tinvalid payload_type (%d)",
- payload_type);
+ LOG(LS_ERROR) << "Invalid payload_type " << payload_type;
return -1;
}
if (audio_configured_) {
@@ -367,8 +368,7 @@ int32_t RTPSender::CheckPayloadType(const int8_t payload_type,
std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
payload_type_map_.find(payload_type);
if (it == payload_type_map_.end()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "\tpayloadType:%d not registered", payload_type);
+ LOG(LS_WARNING) << "Payload type " << payload_type << " not registered.";
return -1;
}
payload_type_ = payload_type;
@@ -397,9 +397,7 @@ int32_t RTPSender::SendOutgoingData(
}
RtpVideoCodecTypes video_type = kRtpVideoGeneric;
if (CheckPayloadType(payload_type, &video_type) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument failed to find payload_type:%d",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Don't send data with unknown payload type.";
return -1;
}
@@ -456,7 +454,7 @@ int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
&capture_time_ms)) {
break;
}
- if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true))
+ if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true, false))
return -1;
ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
@@ -472,7 +470,8 @@ bool RTPSender::SendPaddingAccordingToBitrate(
// Current bitrate since last estimate(1 second) averaged with the
// estimate since then, to get the most up to date bitrate.
uint32_t current_bitrate = bitrate_sent_.BitrateNow();
- int bitrate_diff = target_send_bitrate_ * 1000 - current_bitrate;
+ uint32_t target_bitrate = GetTargetBitrate();
+ int bitrate_diff = target_bitrate - current_bitrate;
if (bitrate_diff <= 0) {
return true;
}
@@ -483,7 +482,7 @@ bool RTPSender::SendPaddingAccordingToBitrate(
} else {
bytes = (bitrate_diff / 8);
// Cap at 200 ms of target send data.
- int bytes_cap = target_send_bitrate_ * 25; // 1000 / 8 / 5.
+ int bytes_cap = target_bitrate / 1000 * 25; // 1000 / 8 / 5.
if (bytes > bytes_cap) {
bytes = bytes_cap;
}
@@ -528,7 +527,7 @@ int RTPSender::SendPadData(int payload_type, uint32_t timestamp,
StorageType store, bool force_full_size_packets,
bool only_pad_after_markerbit) {
// Drop this packet if we're not sending media packets.
- if (!sending_media_) {
+ if (!SendingMedia()) {
return bytes;
}
int padding_bytes_in_packet = 0;
@@ -596,7 +595,6 @@ bool RTPSender::StorePackets() const {
int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
uint16_t length = IP_PACKET_SIZE;
uint8_t data_buffer[IP_PACKET_SIZE];
- uint8_t *buffer_to_send_ptr = data_buffer;
int64_t capture_time_ms;
if (!packet_history_.GetPacketAndSetSendTime(packet_id, min_resend_time, true,
data_buffer, &length,
@@ -605,19 +603,13 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
return 0;
}
- ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length);
- RTPHeader header;
- if (!rtp_parser.Parse(header)) {
- assert(false);
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Failed to parse RTP header of packet to be retransmitted.");
- return -1;
- }
- TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::ReSendPacket",
- "timestamp", header.timestamp,
- "seqnum", header.sequenceNumber);
-
if (paced_sender_) {
+ ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length);
+ RTPHeader header;
+ if (!rtp_parser.Parse(header)) {
+ assert(false);
+ return -1;
+ }
if (!paced_sender_->SendPacket(PacedSender::kHighPriority,
header.ssrc,
header.sequenceNumber,
@@ -630,17 +622,9 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
}
}
- uint8_t data_buffer_rtx[IP_PACKET_SIZE];
- if ((rtx_ & kRtxRetransmitted) > 0) {
- BuildRtxPacket(data_buffer, &length, data_buffer_rtx);
- buffer_to_send_ptr = data_buffer_rtx;
- }
-
- if (SendPacketToNetwork(buffer_to_send_ptr, length)) {
- UpdateRtpStats(buffer_to_send_ptr, length, header, rtx_ != kRtxOff, true);
- return length;
- }
- return -1;
+ return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
+ (rtx_ & kRtxRetransmitted) > 0, true) ?
+ length : -1;
}
bool RTPSender::SendPacketToNetwork(const uint8_t *packet, uint32_t size) {
@@ -650,10 +634,9 @@ bool RTPSender::SendPacketToNetwork(const uint8_t *packet, uint32_t size) {
}
TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::SendPacketToNetwork",
"size", size, "sent", bytes_sent);
- // TODO(pwesin): Add a separate bitrate for sent bitrate after pacer.
+ // TODO(pwestin): Add a separate bitrate for sent bitrate after pacer.
if (bytes_sent <= 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Transport failed to send packet");
+ LOG(LS_WARNING) << "Transport failed to send packet";
return false;
}
return true;
@@ -678,12 +661,12 @@ void RTPSender::OnReceivedNACK(
"num_seqnum", nack_sequence_numbers.size(), "avg_rtt", avg_rtt);
const int64_t now = clock_->TimeInMilliseconds();
uint32_t bytes_re_sent = 0;
+ uint32_t target_bitrate = GetTargetBitrate();
// Enough bandwidth to send NACK?
if (!ProcessNACKBitRate(now)) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "NACK bitrate reached. Skip sending NACK response. Target %d",
- target_send_bitrate_);
+ LOG(LS_INFO) << "NACK bitrate reached. Skip sending NACK response. Target "
+ << target_bitrate;
return;
}
@@ -698,16 +681,15 @@ void RTPSender::OnReceivedNACK(
continue;
} else if (bytes_sent < 0) {
// Failed to send one Sequence number. Give up the rest in this nack.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Failed resending RTP packet %d, Discard rest of packets",
- *it);
+ LOG(LS_WARNING) << "Failed resending RTP packet " << *it
+ << ", Discard rest of packets";
break;
}
// Delay bandwidth estimate (RTT * BW).
- if (target_send_bitrate_ != 0 && avg_rtt) {
+ if (target_bitrate != 0 && avg_rtt) {
// kbits/s * ms = bits => bits/8 = bytes
uint32_t target_bytes =
- (static_cast<uint32_t>(target_send_bitrate_) * avg_rtt) >> 3;
+ (static_cast<uint32_t>(target_bitrate / 1000) * avg_rtt) >> 3;
if (bytes_re_sent > target_bytes) {
break; // Ignore the rest of the packets in the list.
}
@@ -722,32 +704,34 @@ void RTPSender::OnReceivedNACK(
bool RTPSender::ProcessNACKBitRate(const uint32_t now) {
uint32_t num = 0;
- int32_t byte_count = 0;
- const uint32_t avg_interval = 1000;
+ int byte_count = 0;
+ const int kAvgIntervalMs = 1000;
+ uint32_t target_bitrate = GetTargetBitrate();
CriticalSectionScoped cs(send_critsect_);
- if (target_send_bitrate_ == 0) {
+ if (target_bitrate == 0) {
return true;
}
for (num = 0; num < NACK_BYTECOUNT_SIZE; ++num) {
- if ((now - nack_byte_count_times_[num]) > avg_interval) {
+ if ((now - nack_byte_count_times_[num]) > kAvgIntervalMs) {
// Don't use data older than 1sec.
break;
} else {
byte_count += nack_byte_count_[num];
}
}
- int32_t time_interval = avg_interval;
+ int time_interval = kAvgIntervalMs;
if (num == NACK_BYTECOUNT_SIZE) {
// More than NACK_BYTECOUNT_SIZE nack messages has been received
// during the last msg_interval.
time_interval = now - nack_byte_count_times_[num - 1];
if (time_interval < 0) {
- time_interval = avg_interval;
+ time_interval = kAvgIntervalMs;
}
}
- return (byte_count * 8) < (target_send_bitrate_ * time_interval);
+ return (byte_count * 8) <
+ static_cast<int>(target_bitrate / 1000 * time_interval);
}
void RTPSender::UpdateNACKBitRate(const uint32_t bytes,
@@ -796,19 +780,21 @@ bool RTPSender::TimeToSendPacket(uint16_t sequence_number,
UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds());
}
return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
- retransmission && (rtx_ & kRtxRetransmitted) > 0);
+ retransmission && (rtx_ & kRtxRetransmitted) > 0,
+ retransmission);
}
bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
uint16_t length,
int64_t capture_time_ms,
- bool send_over_rtx) {
+ bool send_over_rtx,
+ bool is_retransmit) {
uint8_t *buffer_to_send_ptr = buffer;
ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
- TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::TimeToSendPacket",
+ TRACE_EVENT_INSTANT2("webrtc_rtp", "PrepareAndSendPacket",
"timestamp", rtp_header.timestamp,
"seqnum", rtp_header.sequenceNumber);
@@ -820,20 +806,12 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
int64_t now_ms = clock_->TimeInMilliseconds();
int64_t diff_ms = now_ms - capture_time_ms;
- bool updated_transmission_time_offset =
- UpdateTransmissionTimeOffset(buffer_to_send_ptr, length, rtp_header,
- diff_ms);
- bool updated_abs_send_time =
- UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
- if (updated_transmission_time_offset || updated_abs_send_time) {
- // Update stored packet in case of receiving a re-transmission request.
- packet_history_.ReplaceRTPHeader(buffer_to_send_ptr,
- rtp_header.sequenceNumber,
- rtp_header.headerLength);
- }
-
+ UpdateTransmissionTimeOffset(buffer_to_send_ptr, length, rtp_header,
+ diff_ms);
+ UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
bool ret = SendPacketToNetwork(buffer_to_send_ptr, length);
- UpdateRtpStats(buffer_to_send_ptr, length, rtp_header, false, false);
+ UpdateRtpStats(buffer_to_send_ptr, length, rtp_header, send_over_rtx,
+ is_retransmit);
return ret;
}
@@ -842,15 +820,16 @@ void RTPSender::UpdateRtpStats(const uint8_t* buffer,
const RTPHeader& header,
bool is_rtx,
bool is_retransmit) {
- CriticalSectionScoped lock(statistics_crit_.get());
StreamDataCounters* counters;
- uint32_t ssrc;
+ // Get ssrc before taking statistics_crit_ to avoid possible deadlock.
+ uint32_t ssrc = SSRC();
+
+ CriticalSectionScoped lock(statistics_crit_.get());
if (is_rtx) {
counters = &rtx_rtp_stats_;
ssrc = ssrc_rtx_;
} else {
counters = &rtp_stats_;
- ssrc = ssrc_;
}
bitrate_sent_.Update(size);
@@ -887,14 +866,14 @@ bool RTPSender::IsFecPacket(const uint8_t* buffer,
}
int RTPSender::TimeToSendPadding(int bytes) {
- if (!sending_media_) {
- return 0;
- }
int payload_type;
int64_t capture_time_ms;
uint32_t timestamp;
{
CriticalSectionScoped cs(send_critsect_);
+ if (!sending_media_) {
+ return 0;
+ }
payload_type = ((rtx_ & kRtxRedundantPayloads) > 0) ? payload_type_rtx_ :
payload_type_;
timestamp = timestamp_;
@@ -909,8 +888,13 @@ int RTPSender::TimeToSendPadding(int bytes) {
int bytes_sent = SendRedundantPayloads(payload_type, bytes);
bytes -= bytes_sent;
if (bytes > 0) {
- int padding_sent = SendPadData(payload_type, timestamp, capture_time_ms,
- bytes, kDontStore, true, true);
+ int padding_sent = SendPadData(payload_type,
+ timestamp,
+ capture_time_ms,
+ bytes,
+ kDontStore,
+ true,
+ rtx_ == kRtxOff);
bytes_sent += padding_sent;
}
return bytes_sent;
@@ -1116,9 +1100,8 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer) const {
data_buffer + kHeaderLength + total_block_length);
break;
case kRtpExtensionAudioLevel:
- // Because AudioLevel is handled specially by RTPSenderAudio, we pretend
- // we don't have to care about it here, which is true until we wan't to
- // use it together with any of the other extensions we support.
+ block_length = BuildAudioLevelExtension(
+ data_buffer + kHeaderLength + total_block_length);
break;
case kRtpExtensionAbsoluteSendTime:
block_length = BuildAbsoluteSendTimeExtension(
@@ -1178,8 +1161,42 @@ uint8_t RTPSender::BuildTransmissionTimeOffsetExtension(
return kTransmissionTimeOffsetLength;
}
-uint8_t RTPSender::BuildAbsoluteSendTimeExtension(
- uint8_t* data_buffer) const {
+uint8_t RTPSender::BuildAudioLevelExtension(uint8_t* data_buffer) const {
+ // An RTP Header Extension for Client-to-Mixer Audio Level Indication
+ //
+ // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+ //
+ // The form of the audio level extension block:
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=0 |V| level | 0x00 | 0x00 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ //
+ // Note that we always include 2 pad bytes, which will result in legal and
+ // correctly parsed RTP, but may be a bit wasteful if more short extensions
+ // are implemented. Right now the pad bytes would anyway be required at end
+ // of the extension block, so it makes no difference.
+
+ // Get id defined by user.
+ uint8_t id;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionAudioLevel, &id) != 0) {
+ // Not registered.
+ return 0;
+ }
+ size_t pos = 0;
+ const uint8_t len = 0;
+ data_buffer[pos++] = (id << 4) + len;
+ data_buffer[pos++] = (1 << 7) + 0; // Voice, 0 dBov.
+ data_buffer[pos++] = 0; // Padding.
+ data_buffer[pos++] = 0; // Padding.
+ // kAudioLevelLength is including pad bytes.
+ assert(pos == kAudioLevelLength);
+ return kAudioLevelLength;
+}
+
+uint8_t RTPSender::BuildAbsoluteSendTimeExtension(uint8_t* data_buffer) const {
// Absolute send time in RTP streams.
//
// The absolute send time is signaled to the receiver in-band using the
@@ -1213,106 +1230,138 @@ uint8_t RTPSender::BuildAbsoluteSendTimeExtension(
return kAbsoluteSendTimeLength;
}
-bool RTPSender::UpdateTransmissionTimeOffset(
+void RTPSender::UpdateTransmissionTimeOffset(
uint8_t *rtp_packet, const uint16_t rtp_packet_length,
const RTPHeader &rtp_header, const int64_t time_diff_ms) const {
CriticalSectionScoped cs(send_critsect_);
-
+ // Get id.
+ uint8_t id = 0;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionTransmissionTimeOffset,
+ &id) != 0) {
+ // Not registered.
+ return;
+ }
// Get length until start of header extension block.
int extension_block_pos =
rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
kRtpExtensionTransmissionTimeOffset);
if (extension_block_pos < 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, not registered.");
- return false;
+ LOG(LS_WARNING)
+ << "Failed to update transmission time offset, not registered.";
+ return;
}
int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
if (rtp_packet_length < block_pos + kTransmissionTimeOffsetLength ||
rtp_header.headerLength <
block_pos + kTransmissionTimeOffsetLength) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, invalid length.");
- return false;
+ LOG(LS_WARNING)
+ << "Failed to update transmission time offset, invalid length.";
+ return;
}
// Verify that header contains extension.
if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
(rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
- WEBRTC_TRACE(
- kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, hdr extension not found.");
- return false;
- }
- // Get id.
- uint8_t id = 0;
- if (rtp_header_extension_map_.GetId(kRtpExtensionTransmissionTimeOffset,
- &id) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, no id.");
- return false;
+ LOG(LS_WARNING) << "Failed to update transmission time offset, hdr "
+ "extension not found.";
+ return;
}
// Verify first byte in block.
const uint8_t first_block_byte = (id << 4) + 2;
if (rtp_packet[block_pos] != first_block_byte) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset.");
- return false;
+ LOG(LS_WARNING) << "Failed to update transmission time offset.";
+ return;
}
// Update transmission offset field (converting to a 90 kHz timestamp).
ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
time_diff_ms * 90); // RTP timestamp.
- return true;
}
-bool RTPSender::UpdateAbsoluteSendTime(
- uint8_t *rtp_packet, const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header, const int64_t now_ms) const {
+bool RTPSender::UpdateAudioLevel(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const bool is_voiced,
+ const uint8_t dBov) const {
CriticalSectionScoped cs(send_critsect_);
+ // Get id.
+ uint8_t id = 0;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionAudioLevel, &id) != 0) {
+ // Not registered.
+ return false;
+ }
// Get length until start of header extension block.
int extension_block_pos =
rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
- kRtpExtensionAbsoluteSendTime);
+ kRtpExtensionAudioLevel);
if (extension_block_pos < 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, not registered.");
+ // The feature is not enabled.
return false;
}
int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
- if (rtp_packet_length < block_pos + kAbsoluteSendTimeLength ||
- rtp_header.headerLength < block_pos + kAbsoluteSendTimeLength) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, invalid length.");
+ if (rtp_packet_length < block_pos + kAudioLevelLength ||
+ rtp_header.headerLength < block_pos + kAudioLevelLength) {
+ LOG(LS_WARNING) << "Failed to update audio level, invalid length.";
return false;
}
// Verify that header contains extension.
if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
(rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
- WEBRTC_TRACE(
- kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, hdr extension not found.");
+ LOG(LS_WARNING) << "Failed to update audio level, hdr extension not found.";
+ return false;
+ }
+ // Verify first byte in block.
+ const uint8_t first_block_byte = (id << 4) + 0;
+ if (rtp_packet[block_pos] != first_block_byte) {
+ LOG(LS_WARNING) << "Failed to update audio level.";
return false;
}
+ rtp_packet[block_pos + 1] = (is_voiced ? 0x80 : 0x00) + (dBov & 0x7f);
+ return true;
+}
+
+void RTPSender::UpdateAbsoluteSendTime(
+ uint8_t *rtp_packet, const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header, const int64_t now_ms) const {
+ CriticalSectionScoped cs(send_critsect_);
+
// Get id.
uint8_t id = 0;
if (rtp_header_extension_map_.GetId(kRtpExtensionAbsoluteSendTime,
&id) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, no id.");
- return false;
+ // Not registered.
+ return;
+ }
+ // Get length until start of header extension block.
+ int extension_block_pos =
+ rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
+ kRtpExtensionAbsoluteSendTime);
+ if (extension_block_pos < 0) {
+ // The feature is not enabled.
+ return;
+ }
+ int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
+ if (rtp_packet_length < block_pos + kAbsoluteSendTimeLength ||
+ rtp_header.headerLength < block_pos + kAbsoluteSendTimeLength) {
+ LOG(LS_WARNING) << "Failed to update absolute send time, invalid length.";
+ return;
+ }
+ // Verify that header contains extension.
+ if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
+ (rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
+ LOG(LS_WARNING)
+ << "Failed to update absolute send time, hdr extension not found.";
+ return;
}
// Verify first byte in block.
const uint8_t first_block_byte = (id << 4) + 2;
if (rtp_packet[block_pos] != first_block_byte) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time.");
- return false;
+ LOG(LS_WARNING) << "Failed to update absolute send time.";
+ return;
}
// Update absolute send time field (convert ms to 24-bit unsigned with 18 bit
// fractional part).
ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
((now_ms << 18) / 1000) & 0x00ffffff);
- return true;
}
void RTPSender::SetSendingStatus(bool enabled) {
@@ -1462,19 +1511,6 @@ int32_t RTPSender::SetAudioPacketSize(
return audio_->SetAudioPacketSize(packet_size_samples);
}
-int32_t RTPSender::SetAudioLevelIndicationStatus(const bool enable,
- const uint8_t ID) {
- if (!audio_configured_) {
- return -1;
- }
- return audio_->SetAudioLevelIndicationStatus(enable, ID);
-}
-
-int32_t RTPSender::AudioLevelIndicationStatus(bool *enable,
- uint8_t* id) const {
- return audio_->AudioLevelIndicationStatus(*enable, *id);
-}
-
int32_t RTPSender::SetAudioLevel(const uint8_t level_d_bov) {
return audio_->SetAudioLevel(level_d_bov);
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index e1cc3a182a7..291e619b43a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -25,6 +25,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/modules/rtp_rtcp/source/video_codec_information.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1.
@@ -82,7 +83,8 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
// was sent within the statistics window.
bool GetSendSideDelay(int* avg_send_delay_ms, int* max_send_delay_ms) const;
- void SetTargetSendBitrate(const uint32_t bits);
+ void SetTargetBitrate(uint32_t bitrate);
+ uint32_t GetTargetBitrate();
virtual uint16_t MaxDataPayloadLength() const
OVERRIDE; // with RTP and FEC headers.
@@ -153,19 +155,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
uint16_t BuildRTPHeaderExtension(uint8_t* data_buffer) const;
- uint8_t BuildTransmissionTimeOffsetExtension(
- uint8_t *data_buffer) const;
- uint8_t BuildAbsoluteSendTimeExtension(
- uint8_t* data_buffer) const;
+ uint8_t BuildTransmissionTimeOffsetExtension(uint8_t *data_buffer) const;
+ uint8_t BuildAudioLevelExtension(uint8_t* data_buffer) const;
+ uint8_t BuildAbsoluteSendTimeExtension(uint8_t* data_buffer) const;
- bool UpdateTransmissionTimeOffset(uint8_t *rtp_packet,
- const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header,
- const int64_t time_diff_ms) const;
- bool UpdateAbsoluteSendTime(uint8_t *rtp_packet,
- const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header,
- const int64_t now_ms) const;
+ bool UpdateAudioLevel(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const bool is_voiced,
+ const uint8_t dBov) const;
bool TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms,
bool retransmission);
@@ -187,10 +185,12 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
bool ProcessNACKBitRate(const uint32_t now);
// RTX.
- void SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc);
+ void SetRTXStatus(int mode);
void RTXStatus(int* mode, uint32_t* ssrc, int* payload_type) const;
+ void SetRtxSsrc(uint32_t ssrc);
+
void SetRtxPayloadType(int payloadType);
// Functions wrapping RTPSenderInterface.
@@ -228,12 +228,6 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
// packet in silence (CNG).
int32_t SetAudioPacketSize(const uint16_t packet_size_samples);
- // Set status and ID for header-extension-for-audio-level-indication.
- int32_t SetAudioLevelIndicationStatus(const bool enable, const uint8_t ID);
-
- // Get status and ID for header-extension-for-audio-level-indication.
- int32_t AudioLevelIndicationStatus(bool *enable, uint8_t *id) const;
-
// Store the audio level in d_bov for
// header-extension-for-audio-level-indication.
int32_t SetAudioLevel(const uint8_t level_d_bov);
@@ -303,7 +297,8 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
bool PrepareAndSendPacket(uint8_t* buffer,
uint16_t length,
int64_t capture_time_ms,
- bool send_over_rtx);
+ bool send_over_rtx,
+ bool is_retransmit);
int SendRedundantPayloads(int payload_type, int bytes);
@@ -319,6 +314,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
void UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms);
+ void UpdateTransmissionTimeOffset(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const int64_t time_diff_ms) const;
+ void UpdateAbsoluteSendTime(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const int64_t now_ms) const;
+
void UpdateRtpStats(const uint8_t* buffer,
uint32_t size,
const RTPHeader& header,
@@ -338,13 +342,12 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
CriticalSectionWrapper *send_critsect_;
Transport *transport_;
- bool sending_media_;
+ bool sending_media_ GUARDED_BY(send_critsect_);
uint16_t max_payload_length_;
- uint16_t target_send_bitrate_;
uint16_t packet_over_head_;
- int8_t payload_type_;
+ int8_t payload_type_ GUARDED_BY(send_critsect_);
std::map<int8_t, ModuleRTPUtility::Payload *> payload_type_map_;
RtpHeaderExtensionMap rtp_header_extension_map_;
@@ -388,6 +391,13 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
int rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
+
+ // Note: Don't access this variable directly, always go through
+ // SetTargetBitrateKbps or GetTargetBitrateKbps. Also remember
+ // that by the time the function returns there is no guarantee
+ // that the target bitrate is still valid.
+ scoped_ptr<CriticalSectionWrapper> target_bitrate_critsect_;
+ uint32_t target_bitrate_ GUARDED_BY(target_bitrate_critsect_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index f800142886d..6b3e2276ee6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -17,7 +17,7 @@
namespace webrtc {
RTPSenderAudio::RTPSenderAudio(const int32_t id, Clock* clock,
- RTPSenderInterface* rtpSender) :
+ RTPSender* rtpSender) :
_id(id),
_clock(clock),
_rtpSender(rtpSender),
@@ -42,8 +42,6 @@ RTPSenderAudio::RTPSenderAudio(const int32_t id, Clock* clock,
_cngSWBPayloadType(-1),
_cngFBPayloadType(-1),
_lastPayloadType(-1),
- _includeAudioLevelIndication(false), // @TODO - reset at Init()?
- _audioLevelIndicationID(0),
_audioLevel_dBov(0) {
};
@@ -365,52 +363,12 @@ int32_t RTPSenderAudio::SendAudio(
if (rtpHeaderLength <= 0) {
return -1;
}
+ if (maxPayloadLength < (rtpHeaderLength + payloadSize)) {
+ // Too large payload buffer.
+ return -1;
+ }
{
CriticalSectionScoped cs(_sendAudioCritsect);
-
- // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
- if (_includeAudioLevelIndication) {
- dataBuffer[0] |= 0x10; // set eXtension bit
- /*
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 0xBE | 0xDE | length=1 |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | ID | len=0 |V| level | 0x00 | 0x00 |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- // add our ID (0xBEDE)
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
- RTP_AUDIO_LEVEL_UNIQUE_ID);
- rtpHeaderLength += 2;
-
- // add the length (length=1) in number of word32
- const uint8_t length = 1;
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
- length);
- rtpHeaderLength += 2;
-
- // add ID (defined by the user) and len(=0) byte
- const uint8_t id = _audioLevelIndicationID;
- const uint8_t len = 0;
- dataBuffer[rtpHeaderLength++] = (id << 4) + len;
-
- // add voice-activity flag (V) bit and the audio level (in dBov)
- const uint8_t V = (frameType == kAudioFrameSpeech);
- uint8_t level = _audioLevel_dBov;
- dataBuffer[rtpHeaderLength++] = (V << 7) + level;
-
- // add two bytes zero padding
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength, 0);
- rtpHeaderLength += 2;
- }
-
- if(maxPayloadLength < rtpHeaderLength + payloadSize ) {
- // too large payload buffer
- return -1;
- }
-
if (_REDPayloadType >= 0 && // Have we configured RED?
fragmentation &&
fragmentation->fragmentationVectorSize > 1 &&
@@ -474,6 +432,17 @@ int32_t RTPSenderAudio::SendAudio(
}
}
_lastPayloadType = payloadType;
+
+ // Update audio level extension, if included.
+ {
+ uint16_t packetSize = payloadSize + rtpHeaderLength;
+ ModuleRTPUtility::RTPHeaderParser rtp_parser(dataBuffer, packetSize);
+ RTPHeader rtp_header;
+ rtp_parser.Parse(rtp_header);
+ _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
+ (frameType == kAudioFrameSpeech),
+ _audioLevel_dBov);
+ }
} // end critical section
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp,
"timestamp", _rtpSender->Timestamp(),
@@ -486,32 +455,6 @@ int32_t RTPSenderAudio::SendAudio(
PacedSender::kHighPriority);
}
-int32_t
-RTPSenderAudio::SetAudioLevelIndicationStatus(const bool enable,
- const uint8_t ID)
-{
- if(enable && (ID < 1 || ID > 14))
- {
- return -1;
- }
- CriticalSectionScoped cs(_sendAudioCritsect);
-
- _includeAudioLevelIndication = enable;
- _audioLevelIndicationID = ID;
-
- return 0;
-}
-
-int32_t
-RTPSenderAudio::AudioLevelIndicationStatus(bool& enable,
- uint8_t& ID) const
-{
- CriticalSectionScoped cs(_sendAudioCritsect);
- enable = _includeAudioLevelIndication;
- ID = _audioLevelIndicationID;
- return 0;
-}
-
// Audio level magnitude and voice activity flag are set for each RTP packet
int32_t
RTPSenderAudio::SetAudioLevel(const uint8_t level_dBov)
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index 7074e7b29ad..732199c17a1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -23,7 +23,7 @@ class RTPSenderAudio: public DTMFqueue
{
public:
RTPSenderAudio(const int32_t id, Clock* clock,
- RTPSenderInterface* rtpSender);
+ RTPSender* rtpSender);
virtual ~RTPSenderAudio();
int32_t RegisterAudioPayload(
@@ -44,13 +44,6 @@ public:
// set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
int32_t SetAudioPacketSize(const uint16_t packetSizeSamples);
- // Set status and ID for header-extension-for-audio-level-indication.
- // Valid ID range is [1,14].
- int32_t SetAudioLevelIndicationStatus(const bool enable, const uint8_t ID);
-
- // Get status and ID for header-extension-for-audio-level-indication.
- int32_t AudioLevelIndicationStatus(bool& enable, uint8_t& ID) const;
-
// Store the audio level in dBov for header-extension-for-audio-level-indication.
// Valid range is [0,100]. Actual value is negative.
int32_t SetAudioLevel(const uint8_t level_dBov);
@@ -86,7 +79,7 @@ protected:
private:
int32_t _id;
Clock* _clock;
- RTPSenderInterface* _rtpSender;
+ RTPSender* _rtpSender;
CriticalSectionWrapper* _audioFeedbackCritsect;
RtpAudioFeedback* _audioFeedback;
@@ -117,8 +110,6 @@ private:
int8_t _lastPayloadType;
// Audio level indication (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
- bool _includeAudioLevelIndication;
- uint8_t _audioLevelIndicationID;
uint8_t _audioLevel_dBov;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index ce615be0494..18482890f7a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -160,11 +160,8 @@ TEST_F(RtpSenderTest, RegisterRtpAudioLevelHeaderExtension) {
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
- // Accounted size for audio level is zero because it is currently specially
- // treated by RTPSenderAudio.
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
- // EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
- // rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAudioLevel));
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
@@ -183,14 +180,16 @@ TEST_F(RtpSenderTest, RegisterRtpHeaderExtensions) {
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength +
- kAbsoluteSendTimeLength, rtp_sender_->RtpHeaderExtensionTotalLength());
+ kAbsoluteSendTimeLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset));
- EXPECT_EQ(kRtpOneByteHeaderLength + kAbsoluteSendTimeLength,
- rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAbsoluteSendTimeLength +
+ kAudioLevelLength, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime));
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAudioLevel));
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
@@ -202,23 +201,24 @@ TEST_F(RtpSenderTest, BuildRTPPacket) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12, length);
+ EXPECT_EQ(kRtpHeaderSize, length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
- RtpHeaderExtensionMap map;
- map.Register(kRtpExtensionTransmissionTimeOffset,
- kTransmissionTimeOffsetExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(rtp_header, NULL);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_FALSE(rtp_header.extension.hasTransmissionTimeOffset);
+ EXPECT_FALSE(rtp_header.extension.hasAbsoluteSendTime);
+ EXPECT_FALSE(rtp_header.extension.hasAudioLevel);
EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
EXPECT_EQ(0u, rtp_header.extension.absoluteSendTime);
+ EXPECT_EQ(0u, rtp_header.extension.audioLevel);
}
TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
@@ -231,7 +231,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -246,6 +247,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
// Parse without map extension
@@ -255,6 +257,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasTransmissionTimeOffset);
EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
}
@@ -269,7 +272,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -284,6 +288,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
EXPECT_EQ(kNegTimeOffset, rtp_header.extension.transmissionTimeOffset);
}
@@ -297,7 +302,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -311,6 +317,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
EXPECT_EQ(kAbsoluteSendTime, rtp_header.extension.absoluteSendTime);
// Parse without map extension
@@ -320,9 +327,54 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasAbsoluteSendTime);
EXPECT_EQ(0u, rtp_header2.extension.absoluteSendTime);
}
+TEST_F(RtpSenderTest, BuildRTPPacketWithAudioLevelExtension) {
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+
+ int32_t length = rtp_sender_->BuildRTPheader(packet_,
+ kPayload,
+ kMarkerBit,
+ kTimestamp,
+ 0);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
+
+ // Verify
+ webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RTPHeader rtp_header;
+
+ // Updating audio level is done in RTPSenderAudio, so simulate it here.
+ rtp_parser.Parse(rtp_header);
+ rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
+
+ RtpHeaderExtensionMap map;
+ map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+
+ ASSERT_TRUE(valid_rtp_header);
+ ASSERT_FALSE(rtp_parser.RTCP());
+ VerifyRTPHeaderCommon(rtp_header);
+ EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasAudioLevel);
+ // Expect kAudioLevel + 0x80 because we set "voiced" to true in the call to
+ // UpdateAudioLevel(), above.
+ EXPECT_EQ(kAudioLevel + 0x80u, rtp_header.extension.audioLevel);
+
+ // Parse without map extension
+ webrtc::RTPHeader rtp_header2;
+ const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, NULL);
+
+ ASSERT_TRUE(valid_rtp_header2);
+ VerifyRTPHeaderCommon(rtp_header2);
+ EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasAudioLevel);
+ EXPECT_EQ(0u, rtp_header2.extension.audioLevel);
+}
+
TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kTimeOffset));
EXPECT_EQ(0, rtp_sender_->SetAbsoluteSendTime(kAbsoluteSendTime));
@@ -330,30 +382,42 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, kAudioLevelExtensionId));
int32_t length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
+ // Updating audio level is done in RTPSenderAudio, so simulate it here.
+ rtp_parser.Parse(rtp_header);
+ rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
+
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
map.Register(kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId);
+ map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
+ EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
+ EXPECT_TRUE(rtp_header.extension.hasAudioLevel);
EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
EXPECT_EQ(kAbsoluteSendTime, rtp_header.extension.absoluteSendTime);
+ EXPECT_EQ(kAudioLevel + 0x80u, rtp_header.extension.audioLevel);
// Parse without map extension
webrtc::RTPHeader rtp_header2;
@@ -362,8 +426,12 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasTransmissionTimeOffset);
+ EXPECT_FALSE(rtp_header2.extension.hasAbsoluteSendTime);
+ EXPECT_FALSE(rtp_header2.extension.hasAudioLevel);
EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
EXPECT_EQ(0u, rtp_header2.extension.absoluteSendTime);
+ EXPECT_EQ(0u, rtp_header2.extension.audioLevel);
}
TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
@@ -376,7 +444,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -430,7 +498,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -493,7 +561,7 @@ TEST_F(RtpSenderTest, SendPadding) {
uint16_t seq_num = kSeqNum;
uint32_t timestamp = kTimestamp;
rtp_sender_->SetStorePacketsStatus(true, 10);
- int rtp_header_len = 12;
+ int32_t rtp_header_len = kRtpHeaderSize;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
rtp_header_len += 4; // 4 bytes extension.
@@ -512,7 +580,7 @@ TEST_F(RtpSenderTest, SendPadding) {
kAbsoluteSendTimeExtensionId);
webrtc::RTPHeader rtp_header;
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -613,14 +681,14 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
uint16_t seq_num = kSeqNum;
rtp_sender_->SetStorePacketsStatus(true, 10);
- int rtp_header_len = 12;
+ int32_t rtp_header_len = kRtpHeaderSize;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
rtp_header_len += 4; // 4 bytes extension.
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
- rtp_sender_->SetRTXStatus(kRtxRetransmitted | kRtxRedundantPayloads, true,
- 1234);
+ rtp_sender_->SetRTXStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ rtp_sender_->SetRtxSsrc(1234);
// Create and set up parser.
scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
@@ -630,7 +698,7 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
kTransmissionTimeOffsetExtensionId);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
kAbsoluteSendTimeExtensionId);
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
const size_t kNumPayloadSizes = 10;
const int kPayloadSizes[kNumPayloadSizes] = {500, 550, 600, 650, 700, 750,
800, 850, 900, 950};
@@ -939,48 +1007,6 @@ TEST_F(RtpSenderTest, StreamDataCountersCallbacks) {
rtp_sender_->RegisterRtpStatisticsCallback(NULL);
}
-TEST_F(RtpSenderAudioTest, BuildRTPPacketWithAudioLevelExtension) {
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(true,
- kAudioLevelExtensionId));
- EXPECT_EQ(0, rtp_sender_->SetAudioLevel(kAudioLevel));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
-
- int32_t length = rtp_sender_->BuildRTPheader(packet_,
- kAudioPayload,
- kMarkerBit,
- kTimestamp,
- 0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
-
- // Currently, no space is added by for header extension by BuildRTPHeader().
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
-
- // Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
- webrtc::RTPHeader rtp_header;
-
- RtpHeaderExtensionMap map;
- map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
-
- ASSERT_TRUE(valid_rtp_header);
- ASSERT_FALSE(rtp_parser.RTCP());
- VerifyRTPHeaderCommon(rtp_header);
- EXPECT_EQ(length, rtp_header.headerLength);
- // TODO(solenberg): Should verify that we got audio level in header extension.
-
- // Parse without map extension
- webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, NULL);
-
- ASSERT_TRUE(valid_rtp_header2);
- VerifyRTPHeaderCommon(rtp_header2);
- EXPECT_EQ(length, rtp_header2.headerLength);
- // TODO(solenberg): Should verify that we didn't get audio level.
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(false, 0));
-}
-
TEST_F(RtpSenderAudioTest, SendAudio) {
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "PAYLOAD_NAME";
const uint8_t payload_type = 127;
@@ -1007,8 +1033,6 @@ TEST_F(RtpSenderAudioTest, SendAudio) {
}
TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(true,
- kAudioLevelExtensionId));
EXPECT_EQ(0, rtp_sender_->SetAudioLevel(kAudioLevel));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
@@ -1044,7 +1068,6 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
EXPECT_EQ(0, memcmp(extension, payload_data - sizeof(extension),
sizeof(extension)));
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(false, 0));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 7b36f7cced8..5d8ae166565 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -14,12 +14,13 @@
#include <stdlib.h>
#include <string.h>
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -30,11 +31,9 @@ struct RtpPacket {
ForwardErrorCorrection::Packet* pkt;
};
-RTPSenderVideo::RTPSenderVideo(const int32_t id,
- Clock* clock,
+RTPSenderVideo::RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender)
- : _id(id),
- _rtpSender(*rtpSender),
+ : _rtpSender(*rtpSender),
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_videoCodecInformation(NULL),
@@ -42,7 +41,7 @@ RTPSenderVideo::RTPSenderVideo(const int32_t id,
_retransmissionSettings(kRetransmitBaseLayer),
// Generic FEC
- _fec(id),
+ _fec(),
_fecEnabled(false),
_payloadTypeRED(-1),
_payloadTypeFEC(-1),
@@ -253,8 +252,13 @@ RTPSenderVideo::FECPacketOverhead() const
{
if (_fecEnabled)
{
- return ForwardErrorCorrection::PacketOverhead() +
- REDForFECHeaderLength;
+ // Overhead is FEC headers plus RED for FEC header plus anything in RTP
+ // header beyond the 12 bytes base header (CSRC list, extensions...)
+ // This reason for the header extensions to be included here is that
+ // from an FEC viewpoint, they are part of the payload to be protected.
+ // (The base RTP header is already protected by the FEC header.)
+ return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
+ (_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
}
return 0;
}
@@ -323,8 +327,6 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
{
return retVal;
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
- __FUNCTION__, captureTimeStamp);
return 0;
}
@@ -470,9 +472,9 @@ RTPSenderVideo::SendVP8(const FrameType frameType,
rtpHeaderLength, captureTimeStamp,
capture_time_ms, storage, protect))
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "RTPSenderVideo::SendVP8 failed to send packet number"
- " %d", _rtpSender.SequenceNumber());
+ LOG(LS_WARNING)
+ << "RTPSenderVideo::SendVP8 failed to send packet number "
+ << _rtpSender.SequenceNumber();
}
}
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index 4c406d75014..daa730e8c24 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -31,7 +31,7 @@ struct RtpPacket;
class RTPSenderVideo
{
public:
- RTPSenderVideo(const int32_t id, Clock* clock,
+ RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender);
virtual ~RTPSenderVideo();
@@ -112,7 +112,6 @@ private:
const RTPVideoTypeHeader* rtpTypeHdr);
private:
- int32_t _id;
RTPSenderInterface& _rtpSender;
CriticalSectionWrapper* _sendVideoCritsect;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index 102ebecb02e..c1f3c642749 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -30,7 +30,7 @@
#endif
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
#define DEBUG_PRINT(...) \
@@ -398,6 +398,10 @@ bool RTPHeaderParser::Parse(RTPHeader& header,
header.extension.hasAbsoluteSendTime = false;
header.extension.absoluteSendTime = 0;
+ // May not be present in packet.
+ header.extension.hasAudioLevel = false;
+ header.extension.audioLevel = 0;
+
if (X) {
/* RTP header extension, RFC 3550.
0 1 2 3
@@ -453,89 +457,96 @@ void RTPHeaderParser::ParseOneByteExtensionHeader(
// | ID | len |
// +-+-+-+-+-+-+-+-+
+ // Note that 'len' is the header extension element length, which is the
+ // number of bytes - 1.
const uint8_t id = (*ptr & 0xf0) >> 4;
const uint8_t len = (*ptr & 0x0f);
ptr++;
if (id == 15) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Ext id: 15 encountered, parsing terminated.");
+ LOG(LS_WARNING)
+ << "RTP extension header 15 encountered. Terminate parsing.";
return;
}
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Failed to find extension id: %d", id);
- return;
- }
-
- switch (type) {
- case kRtpExtensionTransmissionTimeOffset: {
- if (len != 2) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Incorrect transmission time offset len: %d", len);
- return;
+ // If we encounter an unknown extension, just skip over it.
+ LOG(LS_WARNING) << "Failed to find extension id: "
+ << static_cast<int>(id);
+ } else {
+ switch (type) {
+ case kRtpExtensionTransmissionTimeOffset: {
+ if (len != 2) {
+ LOG(LS_WARNING) << "Incorrect transmission time offset len: "
+ << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | transmission offset |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ int32_t transmissionTimeOffset = ptr[0] << 16;
+ transmissionTimeOffset += ptr[1] << 8;
+ transmissionTimeOffset += ptr[2];
+ header.extension.transmissionTimeOffset =
+ transmissionTimeOffset;
+ if (transmissionTimeOffset & 0x800000) {
+ // Negative offset, correct sign for Word24 to Word32.
+ header.extension.transmissionTimeOffset |= 0xFF000000;
+ }
+ header.extension.hasTransmissionTimeOffset = true;
+ break;
}
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=2 | transmission offset |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
- int32_t transmissionTimeOffset = *ptr++ << 16;
- transmissionTimeOffset += *ptr++ << 8;
- transmissionTimeOffset += *ptr++;
- header.extension.transmissionTimeOffset =
- transmissionTimeOffset;
- if (transmissionTimeOffset & 0x800000) {
- // Negative offset, correct sign for Word24 to Word32.
- header.extension.transmissionTimeOffset |= 0xFF000000;
+ case kRtpExtensionAudioLevel: {
+ if (len != 0) {
+ LOG(LS_WARNING) << "Incorrect audio level len: " << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=0 |V| level | 0x00 | 0x00 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ //
+
+ // Parse out the fields but only use it for debugging for now.
+ // const uint8_t V = (*ptr & 0x80) >> 7;
+ // const uint8_t level = (*ptr & 0x7f);
+ // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
+ // level=%u", ID, len, V, level);
+
+ header.extension.audioLevel = ptr[0];
+ header.extension.hasAudioLevel = true;
+ break;
}
- header.extension.hasTransmissionTimeOffset = true;
- break;
- }
- case kRtpExtensionAudioLevel: {
- // --- Only used for debugging ---
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=0 |V| level | 0x00 | 0x00 |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- //
-
- // Parse out the fields but only use it for debugging for now.
- // const uint8_t V = (*ptr & 0x80) >> 7;
- // const uint8_t level = (*ptr & 0x7f);
- // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
- // level=%u", ID, len, V, level);
- break;
- }
- case kRtpExtensionAbsoluteSendTime: {
- if (len != 2) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Incorrect absolute send time len: %d", len);
+ case kRtpExtensionAbsoluteSendTime: {
+ if (len != 2) {
+ LOG(LS_WARNING) << "Incorrect absolute send time len: " << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | absolute send time |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ uint32_t absoluteSendTime = ptr[0] << 16;
+ absoluteSendTime += ptr[1] << 8;
+ absoluteSendTime += ptr[2];
+ header.extension.absoluteSendTime = absoluteSendTime;
+ header.extension.hasAbsoluteSendTime = true;
+ break;
+ }
+ default: {
+ LOG(LS_WARNING) << "Extension type not implemented: " << type;
return;
}
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=2 | absolute send time |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
- uint32_t absoluteSendTime = *ptr++ << 16;
- absoluteSendTime += *ptr++ << 8;
- absoluteSendTime += *ptr++;
- header.extension.absoluteSendTime = absoluteSendTime;
- header.extension.hasAbsoluteSendTime = true;
- break;
- }
- default: {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Extension type not implemented.");
- return;
}
}
+ ptr += (len + 1);
uint8_t num_bytes = ParsePaddingBytes(ptrRTPDataExtensionEnd, ptr);
ptr += num_bytes;
}
@@ -556,17 +567,12 @@ uint8_t RTPHeaderParser::ParsePaddingBytes(
return num_zero_bytes;
}
-// RTP payload parser
RTPPayloadParser::RTPPayloadParser(const RtpVideoCodecTypes videoType,
const uint8_t* payloadData,
- uint16_t payloadDataLength,
- int32_t id)
- :
- _id(id),
- _dataPtr(payloadData),
- _dataLength(payloadDataLength),
- _videoType(videoType) {
-}
+ uint16_t payloadDataLength)
+ : _dataPtr(payloadData),
+ _dataLength(payloadDataLength),
+ _videoType(videoType) {}
RTPPayloadParser::~RTPPayloadParser() {
}
@@ -641,8 +647,7 @@ bool RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const {
}
if (dataLength <= 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "Error parsing VP8 payload descriptor; payload too short");
+ LOG(LS_ERROR) << "Error parsing VP8 payload descriptor!";
return false;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
index 8002273c374..732301f6fbe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
@@ -166,8 +166,8 @@ namespace ModuleRTPUtility
public:
RTPPayloadParser(const RtpVideoCodecTypes payloadType,
const uint8_t* payloadData,
- const uint16_t payloadDataLength, // Length w/o padding.
- const int32_t id);
+ // Length w/o padding.
+ const uint16_t payloadDataLength);
~RTPPayloadParser();
@@ -202,7 +202,6 @@ namespace ModuleRTPUtility
int dataLength) const;
private:
- int32_t _id;
const uint8_t* _dataPtr;
const uint16_t _dataLength;
const RtpVideoCodecTypes _videoType;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
index 02a89fc4fd0..d33eaf4c849 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
@@ -76,7 +76,7 @@ TEST(ParseVP8Test, BasicHeader) {
payload[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
payload[1] = 0x01; // P frame.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -97,7 +97,7 @@ TEST(ParseVP8Test, PictureID) {
payload[1] = 0x80;
payload[2] = 17;
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -117,7 +117,7 @@ TEST(ParseVP8Test, PictureID) {
// Re-use payload, but change to long PictureID.
payload[2] = 0x80 | 17;
payload[3] = 17;
- RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10);
ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
@@ -136,7 +136,7 @@ TEST(ParseVP8Test, Tl0PicIdx) {
payload[1] = 0x40;
payload[2] = 17;
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -159,7 +159,7 @@ TEST(ParseVP8Test, TIDAndLayerSync) {
payload[1] = 0x20;
payload[2] = 0x80; // TID(2) + LayerSync(false)
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -183,7 +183,7 @@ TEST(ParseVP8Test, KeyIdx) {
payload[1] = 0x10; // K = 1.
payload[2] = 0x11; // KEYIDX = 17 decimal.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -209,7 +209,7 @@ TEST(ParseVP8Test, MultipleExtensions) {
payload[4] = 42; // Tl0PicIdx.
payload[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -236,7 +236,7 @@ TEST(ParseVP8Test, TooShortHeader) {
payload[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
payload[3] = 17; // PictureID, low 8 bits.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
RTPPayload parsedPacket;
EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
@@ -258,7 +258,7 @@ TEST(ParseVP8Test, TestWithPacketizer) {
ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
ASSERT_TRUE(last);
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
index 1e57970c0d0..df09b01bdf2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
@@ -14,7 +14,6 @@
#include <stdlib.h>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#ifdef _WIN32
#include <windows.h>
@@ -185,8 +184,6 @@ SSRCDatabase::SSRCDatabase()
_ssrcVector = new uint32_t[10];
#endif
_critSect = CriticalSectionWrapper::CreateCriticalSection();
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s created", __FUNCTION__);
}
SSRCDatabase::~SSRCDatabase()
@@ -197,8 +194,6 @@ SSRCDatabase::~SSRCDatabase()
_ssrcMap.clear();
#endif
delete _critSect;
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s deleted", __FUNCTION__);
}
uint32_t SSRCDatabase::GenerateRandom()
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
index 2ca5f287afa..cbb1207b8ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
@@ -13,8 +13,8 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h b/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h
new file mode 100644
index 00000000000..fc0c1fb1ce1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+
+#include "webrtc/modules/utility/interface/process_thread.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockProcessThread : public ProcessThread {
+ public:
+ MOCK_METHOD0(Start, int32_t());
+ MOCK_METHOD0(Stop, int32_t());
+ MOCK_METHOD1(RegisterModule, int32_t(Module* module));
+ MOCK_METHOD1(DeRegisterModule, int32_t(const Module* module));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
diff --git a/chromium/third_party/webrtc/modules/utility/interface/process_thread.h b/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
index cdbb4d39127..4db92a308a8 100644
--- a/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
@@ -25,7 +25,7 @@ public:
virtual int32_t Start() = 0;
virtual int32_t Stop() = 0;
- virtual int32_t RegisterModule(const Module* module) = 0;
+ virtual int32_t RegisterModule(Module* module) = 0;
virtual int32_t DeRegisterModule(const Module* module) = 0;
protected:
virtual ~ProcessThread();
diff --git a/chromium/third_party/webrtc/modules/utility/source/OWNERS b/chromium/third_party/webrtc/modules/utility/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
index 18dba52b651..e3b00104761 100644
--- a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
@@ -72,7 +72,6 @@ void AudioFrameOperations::SwapStereoChannels(AudioFrame* frame) {
void AudioFrameOperations::Mute(AudioFrame& frame) {
memset(frame.data_, 0, sizeof(int16_t) *
frame.samples_per_channel_ * frame.num_channels_);
- frame.energy_ = 0;
}
int AudioFrameOperations::Scale(float left, float right, AudioFrame& frame) {
diff --git a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
index 34c08a89e6b..f4d881cf871 100644
--- a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
@@ -142,17 +142,13 @@ TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) {
TEST_F(AudioFrameOperationsTest, MuteSucceeds) {
SetFrameData(&frame_, 1000, 1000);
- frame_.energy_ = 1000 * 1000 * frame_.samples_per_channel_ *
- frame_.num_channels_;
AudioFrameOperations::Mute(frame_);
AudioFrame muted_frame;
muted_frame.samples_per_channel_ = 320;
muted_frame.num_channels_ = 2;
SetFrameData(&muted_frame, 0, 0);
- muted_frame.energy_ = 0;
VerifyFramesAreEqual(muted_frame, frame_);
- EXPECT_EQ(muted_frame.energy_, frame_.energy_);
}
// TODO(andrew): should not allow negative scales.
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
index 9240e64691e..8049245fb06 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/modules/utility/source/file_player_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "frame_scaler.h"
@@ -35,8 +35,6 @@ FilePlayer* FilePlayer::CreateFilePlayer(uint32_t instanceID,
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
return new VideoFilePlayerImpl(instanceID, fileFormat);
#else
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Invalid file format: %d", kFileFormatAviFile);
assert(false);
return NULL;
#endif
@@ -114,10 +112,9 @@ int32_t FilePlayerImpl::Get10msAudioFromFile(
{
if(_codec.plfreq == 0)
{
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::Get10msAudioFromFile() playing not started!\
- codecFreq = %d, wantedFreq = %d",
- _codec.plfreq, frequencyInHz);
+ LOG(LS_WARNING) << "Get10msAudioFromFile() playing not started!"
+ << " codec freq = " << _codec.plfreq
+ << ", wanted freq = " << frequencyInHz;
return -1;
}
@@ -175,8 +172,7 @@ int32_t FilePlayerImpl::Get10msAudioFromFile(
if(_resampler.ResetIfNeeded(unresampledAudioFrame.sample_rate_hz_,
frequencyInHz, kResamplerSynchronous))
{
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::Get10msAudioFromFile() unexpected codec");
+ LOG(LS_WARNING) << "Get10msAudioFromFile() unexpected codec.";
// New sampling frequency. Update state.
outLen = frequencyInHz / 100;
@@ -214,8 +210,7 @@ int32_t FilePlayerImpl::SetAudioScaling(float scaleFactor)
_scaling = scaleFactor;
return 0;
}
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::SetAudioScaling() not allowed scale factor");
+ LOG(LS_WARNING) << "SetAudioScaling() non-allowed scale factor.";
return -1;
}
@@ -255,9 +250,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
codecInstL16.pacsize = 160;
} else
{
- WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
- "FilePlayerImpl::StartPlayingFile() sample frequency\
- specifed not supported for PCM format.");
+ LOG(LS_ERROR) << "StartPlayingFile() sample frequency not "
+ << "supported for PCM format.";
return -1;
}
@@ -266,12 +260,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize file\
- %s playout.", fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize "
+ << "pcm file " << fileName;
return -1;
}
SetAudioScaling(volumeScaling);
@@ -280,13 +270,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
_fileFormat, codecInst) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingPreEncodedFile() failed to\
- initialize pre-encoded file %s playout.",
- fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize "
+ << "pre-encoded file " << fileName;
return -1;
}
} else
@@ -297,12 +282,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize file\
- %s playout.", fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize file "
+ << fileName;
return -1;
}
SetAudioScaling(volumeScaling);
@@ -350,12 +331,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
codecInstL16.pacsize = 160;
}else
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() sample frequency specifed\
- not supported for PCM format.");
+ LOG(LS_ERROR) << "StartPlayingFile() sample frequency not "
+ << "supported for PCM format.";
return -1;
}
if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
@@ -363,12 +340,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
- playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
@@ -377,12 +350,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
_fileFormat, codecInst) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
- playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
} else {
@@ -392,9 +361,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize\
- stream playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
}
@@ -430,23 +398,14 @@ int32_t FilePlayerImpl::SetUpAudioDecoder()
{
if ((_fileModule.codec_info(_codec) == -1))
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to retrieve Codec info\
- of file data.");
+ LOG(LS_WARNING) << "Failed to retrieve codec info of file data.";
return -1;
}
if( STR_CASE_CMP(_codec.plname, "L16") != 0 &&
_audioDecoder.SetDecodeCodec(_codec,AMRFileStorage) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() codec %s not supported",
- _codec.plname);
+ LOG(LS_WARNING) << "SetUpAudioDecoder() codec " << _codec.plname
+ << " not supported.";
return -1;
}
_numberOf10MsPerFrame = _codec.pacsize / (_codec.plfreq / 100);
@@ -458,7 +417,7 @@ int32_t FilePlayerImpl::SetUpAudioDecoder()
VideoFilePlayerImpl::VideoFilePlayerImpl(uint32_t instanceID,
FileFormats fileFormat)
: FilePlayerImpl(instanceID, fileFormat),
- video_decoder_(new VideoCoder(instanceID)),
+ video_decoder_(new VideoCoder()),
video_codec_info_(),
_decodedVideoFrames(0),
_encodedData(*new EncodedVideoData()),
@@ -522,7 +481,7 @@ int32_t VideoFilePlayerImpl::StopPlayingFile()
CriticalSectionScoped lock( _critSec);
_decodedVideoFrames = 0;
- video_decoder_.reset(new VideoCoder(_instanceID));
+ video_decoder_.reset(new VideoCoder());
return FilePlayerImpl::StopPlayingFile();
}
@@ -627,12 +586,7 @@ int32_t VideoFilePlayerImpl::TimeUntilNextVideoFrame()
reinterpret_cast< int8_t*>(_encodedData.payloadData),
encodedBufferLengthInBytes) != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::TimeUntilNextVideoFrame() error reading\
- video data");
+ LOG(LS_WARNING) << "Error reading video data.";
return -1;
}
_encodedData.payloadSize = encodedBufferLengthInBytes;
@@ -685,23 +639,16 @@ int32_t VideoFilePlayerImpl::SetUpVideoDecoder()
{
if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::SetVideoDecoder() failed to retrieve Codec info of\
- file data.");
+ LOG(LS_WARNING) << "SetVideoDecoder() failed to retrieve codec info of "
+ << "file data.";
return -1;
}
int32_t useNumberOfCores = 1;
if (video_decoder_->SetDecodeCodec(video_codec_info_, useNumberOfCores) !=
0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::SetUpVideoDecoder() codec %s not supported",
- video_codec_info_.plName);
+ LOG(LS_WARNING) << "SetUpVideoDecoder() codec "
+ << video_codec_info_.plName << " not supported.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc b/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc
new file mode 100644
index 00000000000..d430d9f59ad
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for FilePlayer.
+
+#include "webrtc/modules/utility/interface/file_player.h"
+
+#include <stdio.h>
+#include <string>
+
+#include "gflags/gflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+DEFINE_bool(file_player_output, false, "Generate reference files.");
+
+namespace webrtc {
+
+class FilePlayerTest : public ::testing::Test {
+ protected:
+ static const uint32_t kId = 0;
+ static const FileFormats kFileFormat = kFileFormatWavFile;
+ static const int kSampleRateHz = 8000;
+
+ FilePlayerTest()
+ : player_(FilePlayer::CreateFilePlayer(kId, kFileFormat)),
+ output_file_(NULL) {}
+
+ virtual void SetUp() OVERRIDE {
+ if (FLAGS_file_player_output) {
+ std::string output_file =
+ webrtc::test::OutputPath() + "file_player_unittest_out.pcm";
+ output_file_ = fopen(output_file.c_str(), "wb");
+ ASSERT_TRUE(output_file_ != NULL);
+ }
+ }
+
+ virtual void TearDown() OVERRIDE {
+ if (output_file_)
+ fclose(output_file_);
+ }
+
+ ~FilePlayerTest() { FilePlayer::DestroyFilePlayer(player_); }
+
+ void PlayFileAndCheck(const std::string& input_file,
+ const std::string& ref_checksum,
+ int output_length_ms) {
+ const float kScaling = 1;
+ ASSERT_EQ(0,
+ player_->StartPlayingFile(
+ input_file.c_str(), false, 0, kScaling, 0, 0, NULL));
+ rtc::Md5Digest checksum;
+ for (int i = 0; i < output_length_ms / 10; ++i) {
+ int16_t out[10 * kSampleRateHz / 1000] = {0};
+ int num_samples;
+ EXPECT_EQ(0,
+ player_->Get10msAudioFromFile(out, num_samples, kSampleRateHz));
+ checksum.Update(out, num_samples * sizeof(out[0]));
+ if (FLAGS_file_player_output) {
+ ASSERT_EQ(static_cast<size_t>(num_samples),
+ fwrite(out, sizeof(out[0]), num_samples, output_file_));
+ }
+ }
+ char checksum_result[rtc::Md5Digest::kSize];
+ EXPECT_EQ(rtc::Md5Digest::kSize,
+ checksum.Finish(checksum_result, rtc::Md5Digest::kSize));
+ EXPECT_EQ(ref_checksum,
+ rtc::hex_encode(checksum_result, sizeof(checksum_result)));
+ }
+
+ FilePlayer* player_;
+ FILE* output_file_;
+};
+
+TEST_F(FilePlayerTest, PlayWavPcmuFile) {
+ const std::string kFileName =
+ test::ResourcePath("utility/encapsulated_pcmu_8khz", "wav");
+ // The file is longer than this, but keeping the output shorter limits the
+ // runtime for the test.
+ const int kOutputLengthMs = 10000;
+ const std::string kRefChecksum = "c74e7fd432d439b1311e1c16815b3e9a";
+
+ PlayFileAndCheck(kFileName, kRefChecksum, kOutputLengthMs);
+}
+
+TEST_F(FilePlayerTest, PlayWavPcm16File) {
+ const std::string kFileName =
+ test::ResourcePath("utility/encapsulated_pcm16b_8khz", "wav");
+ // The file is longer than this, but keeping the output shorter limits the
+ // runtime for the test.
+ const int kOutputLengthMs = 10000;
+ const std::string kRefChecksum = "e41d7e1dac8aeae9f21e8e03cd7ecd71";
+
+ PlayFileAndCheck(kFileName, kRefChecksum, kOutputLengthMs);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
index 16faa58d3fc..264b867a259 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -12,7 +12,7 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/media_file/interface/media_file.h"
#include "webrtc/modules/utility/source/file_recorder_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "critical_section_wrapper.h"
@@ -38,8 +38,6 @@ FileRecorder* FileRecorder::CreateFileRecorder(uint32_t instanceID,
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
return new AviRecorder(instanceID, fileFormat);
#else
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Invalid file format: %d", kFileFormatAviFile);
assert(false);
return NULL;
#endif
@@ -115,13 +113,8 @@ int32_t FileRecorderImpl::StartRecordingAudioFile(
}
if( retVal != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() failed to initialize file %s for\
- recording.",
- fileName);
+ LOG(LS_WARNING) << "Failed to initialize file " << fileName
+ << " for recording.";
if(IsRecording())
{
@@ -152,12 +145,7 @@ int32_t FileRecorderImpl::StartRecordingAudioFile(
}
if( retVal != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() failed to initialize outStream for\
- recording.");
+ LOG(LS_WARNING) << "Failed to initialize outStream for recording.";
if(IsRecording())
{
@@ -184,12 +172,8 @@ int32_t FileRecorderImpl::RecordAudioToFile(
{
if (codec_info_.plfreq == 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::RecordAudioToFile() recording audio is not turned\
- on");
+ LOG(LS_WARNING) << "RecordAudioToFile() recording audio is not "
+ << "turned on.";
return -1;
}
AudioFrame tempAudioFrame;
@@ -250,13 +234,9 @@ int32_t FileRecorderImpl::RecordAudioToFile(
if (_audioEncoder.Encode(*ptrAudioFrame, _audioBuffer,
encodedLenInBytes) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::RecordAudioToFile() codec %s not supported or\
- failed to encode stream",
- codec_info_.plname);
+ LOG(LS_WARNING) << "RecordAudioToFile() codec "
+ << codec_info_.plname
+ << " not supported or failed to encode stream.";
return -1;
}
} else {
@@ -309,12 +289,8 @@ int32_t FileRecorderImpl::SetUpAudioEncoder()
{
if(_audioEncoder.SetEncodeCodec(codec_info_,_amrFormat) == -1)
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() codec %s not supported",
- codec_info_.plname);
+ LOG(LS_ERROR) << "SetUpAudioEncoder() codec "
+ << codec_info_.plname << " not supported.";
return -1;
}
}
@@ -342,31 +318,6 @@ int32_t FileRecorderImpl::WriteEncodedAudioData(
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-class AudioFrameFileInfo
-{
- public:
- AudioFrameFileInfo(const int8_t* audioData,
- const uint16_t audioSize,
- const uint16_t audioMS,
- const TickTime& playoutTS)
- : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
- _playoutTS(playoutTS)
- {
- if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
- {
- assert(false);
- _audioSize = 0;
- return;
- }
- memcpy(_audioData, audioData, audioSize);
- };
- // TODO (hellner): either turn into a struct or provide get/set functions.
- int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
- uint16_t _audioSize;
- uint16_t _audioMS;
- TickTime _playoutTS;
-};
-
AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
: FileRecorderImpl(instanceID, fileFormat),
_videoOnly(false),
@@ -377,7 +328,7 @@ AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
_writtenAudioMS(0),
_writtenVideoMS(0)
{
- _videoEncoder = new VideoCoder(instanceID);
+ _videoEncoder = new VideoCoder();
_frameScaler = new FrameScaler();
_videoFramesQueue = new VideoFramesQueue();
_thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
@@ -545,49 +496,39 @@ int32_t AviRecorder::ProcessAudio()
{
// Syncronize audio to the current frame to process by throwing away
// audio samples with older timestamp than the video frame.
- uint32_t numberOfAudioElements =
- _audioFramesToWrite.GetSize();
- for (uint32_t i = 0; i < numberOfAudioElements; ++i)
+ size_t numberOfAudioElements =
+ _audioFramesToWrite.size();
+ for (size_t i = 0; i < numberOfAudioElements; ++i)
{
- AudioFrameFileInfo* frameInfo =
- (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
- if(frameInfo)
+ AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
+ if(TickTime::TicksToMilliseconds(
+ frameInfo->_playoutTS.Ticks()) <
+ frameToProcess->render_time_ms())
+ {
+ delete frameInfo;
+ _audioFramesToWrite.pop_front();
+ } else
{
- if(TickTime::TicksToMilliseconds(
- frameInfo->_playoutTS.Ticks()) <
- frameToProcess->render_time_ms())
- {
- delete frameInfo;
- _audioFramesToWrite.PopFront();
- } else
- {
- break;
- }
+ break;
}
}
}
}
// Write all audio up to current timestamp.
int32_t error = 0;
- uint32_t numberOfAudioElements = _audioFramesToWrite.GetSize();
- for (uint32_t i = 0; i < numberOfAudioElements; ++i)
+ size_t numberOfAudioElements = _audioFramesToWrite.size();
+ for (size_t i = 0; i < numberOfAudioElements; ++i)
{
- AudioFrameFileInfo* frameInfo =
- (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
- if(frameInfo)
+ AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
+ if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
{
- if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
- {
- _moduleFile->IncomingAudioData(frameInfo->_audioData,
- frameInfo->_audioSize);
- _writtenAudioMS += frameInfo->_audioMS;
- delete frameInfo;
- _audioFramesToWrite.PopFront();
- } else {
- break;
- }
+ _moduleFile->IncomingAudioData(frameInfo->_audioData,
+ frameInfo->_audioSize);
+ _writtenAudioMS += frameInfo->_audioMS;
+ delete frameInfo;
+ _audioFramesToWrite.pop_front();
} else {
- _audioFramesToWrite.PopFront();
+ break;
}
}
return error;
@@ -635,8 +576,8 @@ bool AviRecorder::Process()
error = EncodeAndWriteVideoToFile( *frameToProcess);
if( error != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "AviRecorder::Process() error writing to file.");
+ LOG(LS_ERROR) << "AviRecorder::Process() error writing to "
+ << "file.";
break;
} else {
uint32_t frameLengthMS = 1000 /
@@ -675,8 +616,7 @@ bool AviRecorder::Process()
error = EncodeAndWriteVideoToFile( *frameToProcess);
if(error != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "AviRecorder::Process() error writing to file.");
+ LOG(LS_ERROR) << "AviRecorder::Process() error writing to file.";
} else {
_writtenVideoMS += frameLengthMS;
}
@@ -727,17 +667,12 @@ int32_t AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
(int8_t*)(_videoEncodedData.payloadData),
_videoEncodedData.payloadSize))
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "Error writing AVI file");
+ LOG(LS_ERROR) << "Error writing AVI file.";
return -1;
}
} else {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideo,
- _instanceID,
- "FileRecorder::RecordVideoToFile() frame dropped by encoder bitrate\
- likely to low.");
+ LOG(LS_ERROR) << "FileRecorder::RecordVideoToFile() frame dropped by "
+ << "encoder, bitrate likely too low.";
}
return 0;
}
@@ -750,6 +685,8 @@ int32_t AviRecorder::WriteEncodedAudioData(
uint16_t millisecondsOfData,
const TickTime* playoutTS)
{
+ CriticalSectionScoped lock(_critSec);
+
if (!IsRecording())
{
return -1;
@@ -762,7 +699,7 @@ int32_t AviRecorder::WriteEncodedAudioData(
{
return -1;
}
- if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength)
+ if (_audioFramesToWrite.size() > kMaxAudioBufferQueueLength)
{
StopRecording();
return -1;
@@ -771,15 +708,15 @@ int32_t AviRecorder::WriteEncodedAudioData(
if(playoutTS)
{
- _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
- bufferLength,
- millisecondsOfData,
- *playoutTS));
+ _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ *playoutTS));
} else {
- _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
- bufferLength,
- millisecondsOfData,
- TickTime::Now()));
+ _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ TickTime::Now()));
}
_timeEvent.Set();
return 0;
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
index 0b7290eddce..53fd26bc25d 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
@@ -15,6 +15,8 @@
#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+#include <list>
+
#include "webrtc/common_audio/resampler/include/resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
@@ -40,6 +42,8 @@ enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
enum { kMaxAudioBufferQueueLength = 100 };
+class CriticalSectionWrapper;
+
class FileRecorderImpl : public FileRecorder
{
public:
@@ -103,6 +107,31 @@ private:
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AudioFrameFileInfo
+{
+ public:
+ AudioFrameFileInfo(const int8_t* audioData,
+ const uint16_t audioSize,
+ const uint16_t audioMS,
+ const TickTime& playoutTS)
+ : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
+ _playoutTS(playoutTS)
+ {
+ if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
+ {
+ assert(false);
+ _audioSize = 0;
+ return;
+ }
+ memcpy(_audioData, audioData, audioSize);
+ };
+ // TODO (hellner): either turn into a struct or provide get/set functions.
+ int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+ uint16_t _audioSize;
+ uint16_t _audioMS;
+ TickTime _playoutTS;
+};
+
class AviRecorder : public FileRecorderImpl
{
public:
@@ -126,6 +155,7 @@ protected:
uint16_t millisecondsOfData,
const TickTime* playoutTS);
private:
+ typedef std::list<AudioFrameFileInfo*> AudioInfoList;
static bool Run(ThreadObj threadObj);
bool Process();
@@ -141,7 +171,7 @@ private:
VideoCodec _videoCodecInst;
bool _videoOnly;
- ListWrapper _audioFramesToWrite;
+ AudioInfoList _audioFramesToWrite;
bool _firstAudioFrameReceived;
VideoFramesQueue* _videoFramesQueue;
diff --git a/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc b/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
index ed127a6715a..50ccf8adc67 100644
--- a/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
@@ -13,7 +13,6 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "webrtc/common_video/libyuv/include/scaler.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
index 08979d24935..bf7db3bc846 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/utility/source/process_thread_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+
namespace webrtc {
ProcessThread::~ProcessThread()
@@ -32,14 +32,12 @@ ProcessThreadImpl::ProcessThreadImpl()
_critSectModules(CriticalSectionWrapper::CreateCriticalSection()),
_thread(NULL)
{
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
}
ProcessThreadImpl::~ProcessThreadImpl()
{
delete _critSectModules;
delete &_timeEvent;
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
int32_t ProcessThreadImpl::Start()
@@ -87,25 +85,21 @@ int32_t ProcessThreadImpl::Stop()
return 0;
}
-int32_t ProcessThreadImpl::RegisterModule(const Module* module)
+int32_t ProcessThreadImpl::RegisterModule(Module* module)
{
CriticalSectionScoped lock(_critSectModules);
// Only allow module to be registered once.
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- if(module == item->GetItem())
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ if(module == *iter)
{
return -1;
}
- item = _modules.Next(item);
}
- _modules.PushFront(module);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
- "number of registered modules has increased to %d",
- _modules.GetSize());
+ _modules.push_front(module);
+
// Wake the thread calling ProcessThreadImpl::Process() to update the
// waiting time. The waiting time for the just registered module may be
// shorter than all other registered modules.
@@ -116,19 +110,13 @@ int32_t ProcessThreadImpl::RegisterModule(const Module* module)
int32_t ProcessThreadImpl::DeRegisterModule(const Module* module)
{
CriticalSectionScoped lock(_critSectModules);
-
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- if(module == item->GetItem())
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ if(module == *iter)
{
- int res = _modules.Erase(item);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
- "number of registered modules has decreased to %d",
- _modules.GetSize());
- return res;
+ _modules.erase(iter);
+ return 0;
}
- item = _modules.Next(item);
}
return -1;
}
@@ -145,16 +133,13 @@ bool ProcessThreadImpl::Process()
int32_t minTimeToNext = 100;
{
CriticalSectionScoped lock(_critSectModules);
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- int32_t timeToNext =
- static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ int32_t timeToNext = (*iter)->TimeUntilNextProcess();
if(minTimeToNext > timeToNext)
{
minTimeToNext = timeToNext;
}
- item = _modules.Next(item);
}
}
@@ -172,16 +157,13 @@ bool ProcessThreadImpl::Process()
}
{
CriticalSectionScoped lock(_critSectModules);
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- int32_t timeToNext =
- static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ int32_t timeToNext = (*iter)->TimeUntilNextProcess();
if(timeToNext < 1)
{
- static_cast<Module*>(item->GetItem())->Process();
+ (*iter)->Process();
}
- item = _modules.Next(item);
}
}
return true;
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
index d1913c47c49..14fbc18a2a5 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
@@ -11,10 +11,11 @@
#ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
#define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+#include <list>
+
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/typedefs.h"
@@ -28,7 +29,7 @@ public:
virtual int32_t Start();
virtual int32_t Stop();
- virtual int32_t RegisterModule(const Module* module);
+ virtual int32_t RegisterModule(Module* module);
virtual int32_t DeRegisterModule(const Module* module);
protected:
@@ -37,9 +38,10 @@ protected:
bool Process();
private:
+ typedef std::list<Module*> ModuleList;
EventWrapper& _timeEvent;
CriticalSectionWrapper* _critSectModules;
- ListWrapper _modules;
+ ModuleList _modules;
ThreadWrapper* _thread;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc b/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
index 39316f47858..547df332f9f 100644
--- a/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
@@ -14,7 +14,7 @@
#include <stdio.h>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#if defined(_WIN32)
#include <Windows.h>
@@ -71,7 +71,6 @@ RtpDumpImpl::RtpDumpImpl()
_file(*FileWrapper::Create()),
_startTime(0)
{
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
}
RtpDump::~RtpDump()
@@ -84,7 +83,6 @@ RtpDumpImpl::~RtpDumpImpl()
_file.CloseFile();
delete &_file;
delete _critSect;
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
@@ -100,8 +98,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
_file.CloseFile();
if (_file.OpenFile(fileNameUTF8, false, false, false) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "failed to open the specified file");
+ LOG(LS_ERROR) << "Failed to open file.";
return -1;
}
@@ -113,8 +110,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
sprintf(magic, "#!rtpplay%s \n", RTPFILE_VERSION);
if (_file.WriteText(magic) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
@@ -129,8 +125,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
memset(dummyHdr, 0, 16);
if (!_file.Write(dummyHdr, sizeof(dummyHdr)))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
return 0;
@@ -198,14 +193,12 @@ int32_t RtpDumpImpl::DumpPacket(const uint8_t* packet, uint16_t packetLength)
if (!_file.Write(&hdr, sizeof(hdr)))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
if (!_file.Write(packet, packetLength))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_coder.cc b/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
index 267ed810489..5096acecd25 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
@@ -13,10 +13,7 @@
#include "webrtc/modules/utility/source/video_coder.h"
namespace webrtc {
-VideoCoder::VideoCoder(uint32_t instanceID)
- : _vcm(VideoCodingModule::Create(instanceID)),
- _decodedVideo(0)
-{
+VideoCoder::VideoCoder() : _vcm(VideoCodingModule::Create()), _decodedVideo(0) {
_vcm->InitializeSender();
_vcm->InitializeReceiver();
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_coder.h b/chromium/third_party/webrtc/modules/utility/source/video_coder.h
index cb8bfa5a182..8e4344be16b 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_coder.h
+++ b/chromium/third_party/webrtc/modules/utility/source/video_coder.h
@@ -20,7 +20,7 @@ namespace webrtc {
class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
{
public:
- VideoCoder(uint32_t instanceID);
+ VideoCoder();
~VideoCoder();
int32_t SetEncodeCodec(VideoCodec& videoCodecInst,
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
index d3d37bec2d0..9ade8b51a49 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
@@ -16,80 +16,48 @@
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
VideoFramesQueue::VideoFramesQueue()
- : _incomingFrames(),
- _renderDelayMs(10)
+ : _renderDelayMs(10)
{
}
VideoFramesQueue::~VideoFramesQueue() {
- while (!_incomingFrames.Empty()) {
- ListItem* item = _incomingFrames.First();
- if (item) {
- I420VideoFrame* ptrFrame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(ptrFrame != NULL);
- delete ptrFrame;
- }
- _incomingFrames.Erase(item);
+ for (FrameList::iterator iter = _incomingFrames.begin();
+ iter != _incomingFrames.end(); ++iter) {
+ delete *iter;
}
- while (!_emptyFrames.Empty()) {
- ListItem* item = _emptyFrames.First();
- if (item) {
- I420VideoFrame* ptrFrame =
- static_cast<I420VideoFrame*>(item->GetItem());
- assert(ptrFrame != NULL);
- delete ptrFrame;
- }
- _emptyFrames.Erase(item);
+ for (FrameList::iterator iter = _emptyFrames.begin();
+ iter != _emptyFrames.end(); ++iter) {
+ delete *iter;
}
}
int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
if (newFrame.native_handle() != NULL) {
- _incomingFrames.PushBack(new TextureVideoFrame(
- static_cast<NativeHandle*>(newFrame.native_handle()),
- newFrame.width(),
- newFrame.height(),
- newFrame.timestamp(),
- newFrame.render_time_ms()));
+ _incomingFrames.push_back(newFrame.CloneFrame());
return 0;
}
I420VideoFrame* ptrFrameToAdd = NULL;
// Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
- if (!_emptyFrames.Empty()) {
- ListItem* item = _emptyFrames.First();
- if (item) {
- ptrFrameToAdd = static_cast<I420VideoFrame*>(item->GetItem());
- _emptyFrames.Erase(item);
- }
+ if (!_emptyFrames.empty()) {
+ ptrFrameToAdd = _emptyFrames.front();
+ _emptyFrames.pop_front();
}
if (!ptrFrameToAdd) {
- if (_emptyFrames.GetSize() + _incomingFrames.GetSize() >
+ if (_emptyFrames.size() + _incomingFrames.size() >
KMaxNumberOfFrames) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
- "%s: too many frames, limit: %d", __FUNCTION__,
- KMaxNumberOfFrames);
+ LOG(LS_WARNING) << "Too many frames, limit: " << KMaxNumberOfFrames;
return -1;
}
-
- WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
- "%s: allocating buffer %d", __FUNCTION__,
- _emptyFrames.GetSize() + _incomingFrames.GetSize());
-
ptrFrameToAdd = new I420VideoFrame();
- if (!ptrFrameToAdd) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not create new frame for", __FUNCTION__);
- return -1;
- }
}
ptrFrameToAdd->CopyFrame(newFrame);
- _incomingFrames.PushBack(ptrFrameToAdd);
+ _incomingFrames.push_back(ptrFrameToAdd);
return 0;
}
@@ -99,20 +67,18 @@ int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
// Recycle all frames that are older than the most recent frame.
I420VideoFrame* VideoFramesQueue::FrameToRecord() {
I420VideoFrame* ptrRenderFrame = NULL;
- ListItem* item = _incomingFrames.First();
- while(item) {
- I420VideoFrame* ptrOldestFrameInList =
- static_cast<I420VideoFrame*>(item->GetItem());
+ for (FrameList::iterator iter = _incomingFrames.begin();
+ iter != _incomingFrames.end(); ++iter) {
+ I420VideoFrame* ptrOldestFrameInList = *iter;
if (ptrOldestFrameInList->render_time_ms() <=
TickTime::MillisecondTimestamp() + _renderDelayMs) {
+ // List is traversed beginning to end. If ptrRenderFrame is not
+ // NULL it must be the first, and thus oldest, VideoFrame in the
+ // queue. It can be recycled.
if (ptrRenderFrame) {
- // List is traversed beginning to end. If ptrRenderFrame is not
- // NULL it must be the first, and thus oldest, VideoFrame in the
- // queue. It can be recycled.
ReturnFrame(ptrRenderFrame);
- _incomingFrames.PopFront();
+ _incomingFrames.pop_front();
}
- item = _incomingFrames.Next(item);
ptrRenderFrame = ptrOldestFrameInList;
} else {
// All VideoFrames following this one will be even newer. No match
@@ -131,7 +97,7 @@ int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
ptrOldFrame->set_height(0);
ptrOldFrame->set_render_time_ms(0);
ptrOldFrame->ResetSize();
- _emptyFrames.PushBack(ptrOldFrame);
+ _emptyFrames.push_back(ptrOldFrame);
} else {
delete ptrOldFrame;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
index 4316bf7c047..afc64d9b71e 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
+++ b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
@@ -13,9 +13,10 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+#include <list>
+
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -42,6 +43,7 @@ class VideoFramesQueue {
int32_t ReturnFrame(I420VideoFrame* ptrOldFrame);
private:
+ typedef std::list<I420VideoFrame*> FrameList;
// Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
// 300 frames correspond to 10 seconds worth of frames at 30 fps.
enum {KMaxNumberOfFrames = 300};
@@ -49,9 +51,9 @@ class VideoFramesQueue {
// List of VideoFrame pointers. The list is sorted in the order of when the
// VideoFrame was inserted into the list. The first VideoFrame in the list
// was inserted first.
- ListWrapper _incomingFrames;
+ FrameList _incomingFrames;
// A list of frames that are free to be re-used.
- ListWrapper _emptyFrames;
+ FrameList _emptyFrames;
// Estimated render delay.
uint32_t _renderDelayMs;
diff --git a/chromium/third_party/webrtc/modules/video_capture/OWNERS b/chromium/third_party/webrtc/modules/video_capture/OWNERS
index 3b02126feaa..fdc2a3ff702 100644
--- a/chromium/third_party/webrtc/modules/video_capture/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_capture/OWNERS
@@ -1,5 +1,13 @@
fischman@webrtc.org
+glaznev@webrtc.org
mallinath@webrtc.org
mflodman@webrtc.org
perkj@webrtc.org
wu@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
index 10c277eeb7a..4a80fe27229 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
@@ -21,36 +21,40 @@
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
-namespace webrtc
-{
+namespace webrtc {
-namespace videocapturemodule
-{
+namespace videocapturemodule {
-static std::string ResolutionsToString(
- const std::vector<std::pair<int, int> >& pairs) {
+// Helper for storing lists of pairs of ints. Used e.g. for resolutions & FPS
+// ranges.
+typedef std::pair<int, int> IntPair;
+typedef std::vector<IntPair> IntPairs;
+
+static std::string IntPairsToString(const IntPairs& pairs, char separator) {
std::stringstream stream;
for (size_t i = 0; i < pairs.size(); ++i) {
if (i > 0)
stream << ", ";
- stream << "(" << pairs[i].first << "x" << pairs[i].second << ")";
+ stream << "(" << pairs[i].first << separator << pairs[i].second << ")";
}
return stream.str();
}
struct AndroidCameraInfo {
std::string name;
- int min_mfps, max_mfps; // FPS*1000.
bool front_facing;
int orientation;
- std::vector<std::pair<int, int> > resolutions; // Pairs are: (width,height).
+ IntPairs resolutions; // Pairs are: (width,height).
+ // Pairs are (min,max) in units of FPS*1000 ("milli-frame-per-second").
+ IntPairs mfpsRanges;
std::string ToString() {
std::stringstream stream;
- stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps
+ stream << "Name: [" << name << "], MFPS ranges: ["
+ << IntPairsToString(mfpsRanges, ':')
<< "], front_facing: " << front_facing
<< ", orientation: " << orientation << ", resolutions: ["
- << ResolutionsToString(resolutions) << "]";
+ << IntPairsToString(resolutions, 'x') << "]";
return stream.str();
}
};
@@ -120,8 +124,6 @@ void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
const Json::Value& camera = cameras[i];
AndroidCameraInfo info;
info.name = camera["name"].asString();
- info.min_mfps = camera["min_mfps"].asInt();
- info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
@@ -130,10 +132,23 @@ void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
}
+ Json::Value mfpsRanges = camera["mfpsRanges"];
+ for (Json::ArrayIndex j = 0; j < mfpsRanges.size(); ++j) {
+ const Json::Value& mfpsRange = mfpsRanges[j];
+ info.mfpsRanges.push_back(std::make_pair(mfpsRange["min_mfps"].asInt(),
+ mfpsRange["max_mfps"].asInt()));
+ }
g_camera_info->push_back(info);
}
}
+void DeviceInfoAndroid::DeInitialize() {
+ if (g_camera_info) {
+ delete g_camera_info;
+ g_camera_info = NULL;
+ }
+}
+
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
const int32_t id) {
return new videocapturemodule::DeviceInfoAndroid(id);
@@ -187,14 +202,17 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
return -1;
for (size_t i = 0; i < info->resolutions.size(); ++i) {
- const std::pair<int, int>& size = info->resolutions[i];
- VideoCaptureCapability cap;
- cap.width = size.first;
- cap.height = size.second;
- cap.maxFPS = info->max_mfps / 1000;
- cap.expectedCaptureDelay = kExpectedCaptureDelay;
- cap.rawType = kVideoNV21;
- _captureCapabilities.push_back(cap);
+ for (size_t j = 0; j < info->mfpsRanges.size(); ++j) {
+ const IntPair& size = info->resolutions[i];
+ const IntPair& mfpsRange = info->mfpsRanges[j];
+ VideoCaptureCapability cap;
+ cap.width = size.first;
+ cap.height = size.second;
+ cap.maxFPS = mfpsRange.second / 1000;
+ cap.expectedCaptureDelay = kExpectedCaptureDelay;
+ cap.rawType = kVideoNV21;
+ _captureCapabilities.push_back(cap);
+ }
}
return _captureCapabilities.size();
}
@@ -210,13 +228,22 @@ int32_t DeviceInfoAndroid::GetOrientation(
return 0;
}
-void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps, int* max_mfps) {
+void DeviceInfoAndroid::GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps, int* max_mfps) {
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL)
return;
- *min_mfps = info->min_mfps;
- *max_mfps = info->max_mfps;
+ // Rely on CameraParameters.getSupportedPreviewFpsRange() to sort its return
+ // value (per its documentation) and return the first (most flexible) range
+ // whose high end is at least as high as that requested.
+ for (size_t i = 0; i < info->mfpsRanges.size(); ++i) {
+ if (info->mfpsRanges[i].second / 1000 >= max_fps_to_match) {
+ *min_mfps = info->mfpsRanges[i].first;
+ *max_mfps = info->mfpsRanges[i].second;
+ return;
+ }
+ }
}
} // namespace videocapturemodule
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
index d277113e5e1..542cbba0881 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
+++ b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
@@ -24,6 +24,7 @@ namespace videocapturemodule
class DeviceInfoAndroid : public DeviceInfoImpl {
public:
static void Initialize(JNIEnv* env);
+ static void DeInitialize();
DeviceInfoAndroid(int32_t id);
virtual ~DeviceInfoAndroid();
@@ -53,10 +54,12 @@ class DeviceInfoAndroid : public DeviceInfoImpl {
virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
- // Populate |min_mfps| and |max_mfps| with the supported range of the device.
- void GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps,
- int* max_mfps);
+ // Populate |min_mfps| and |max_mfps| with the closest supported range of the
+ // device to |max_fps_to_match|.
+ void GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps,
+ int* max_mfps);
private:
enum { kExpectedCaptureDelay = 190};
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc b/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
index 2b6d60644f5..c9aa52ce7f1 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/video_capture/android/video_capture_android.h"
+#include "webrtc/base/common.h"
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -19,9 +20,16 @@
static JavaVM* g_jvm = NULL;
static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
+static jobject g_context = NULL; // Owned android.content.Context.
namespace webrtc {
+// Called by Java to get the global application context.
+jobject JNICALL GetContext(JNIEnv* env, jclass) {
+ assert(g_context);
+ return g_context;
+}
+
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
@@ -38,25 +46,67 @@ void JNICALL ProvideCameraFrame(
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
}
-int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
- g_jvm = javaVM;
- AttachThreadScoped ats(g_jvm);
-
- videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
-
- jclass j_capture_class =
- ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
- assert(j_capture_class);
- g_java_capturer_class =
- reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
- assert(g_java_capturer_class);
+// Called by Java when the device orientation has changed.
+void JNICALL OnOrientationChanged(
+ JNIEnv* env, jobject, jlong context, jint degrees) {
+ webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
+ reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
+ context);
+ degrees = (360 + degrees) % 360;
+ assert(degrees >= 0 && degrees < 360);
+ VideoCaptureRotation rotation =
+ (degrees <= 45 || degrees > 315) ? kCameraRotate0 :
+ (degrees > 45 && degrees <= 135) ? kCameraRotate90 :
+ (degrees > 135 && degrees <= 225) ? kCameraRotate180 :
+ (degrees > 225 && degrees <= 315) ? kCameraRotate270 :
+ kCameraRotate0; // Impossible.
+ int32_t status =
+ captureModule->VideoCaptureImpl::SetCaptureRotation(rotation);
+ RTC_UNUSED(status);
+ assert(status == 0);
+}
- JNINativeMethod native_method = {
- "ProvideCameraFrame", "([BIJ)V",
- reinterpret_cast<void*>(&ProvideCameraFrame)
- };
- if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0)
- assert(false);
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) {
+ if (javaVM) {
+ assert(!g_jvm);
+ g_jvm = javaVM;
+ AttachThreadScoped ats(g_jvm);
+ g_context = ats.env()->NewGlobalRef(context);
+
+ videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
+
+ jclass j_capture_class =
+ ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
+ assert(j_capture_class);
+ g_java_capturer_class =
+ reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
+ assert(g_java_capturer_class);
+
+ JNINativeMethod native_methods[] = {
+ {"GetContext",
+ "()Landroid/content/Context;",
+ reinterpret_cast<void*>(&GetContext)},
+ {"OnOrientationChanged",
+ "(JI)V",
+ reinterpret_cast<void*>(&OnOrientationChanged)},
+ {"ProvideCameraFrame",
+ "([BIJ)V",
+ reinterpret_cast<void*>(&ProvideCameraFrame)}};
+ if (ats.env()->RegisterNatives(g_java_capturer_class,
+ native_methods, 3) != 0)
+ assert(false);
+ } else {
+ if (g_jvm) {
+ AttachThreadScoped ats(g_jvm);
+ ats.env()->UnregisterNatives(g_java_capturer_class);
+ ats.env()->DeleteGlobalRef(g_java_capturer_class);
+ g_java_capturer_class = NULL;
+ ats.env()->DeleteGlobalRef(g_context);
+ g_context = NULL;
+ videocapturemodule::DeviceInfoAndroid::DeInitialize();
+ g_jvm = NULL;
+ }
+ }
return 0;
}
@@ -143,7 +193,8 @@ int32_t VideoCaptureAndroid::StartCapture(
assert(j_start);
int min_mfps = 0;
int max_mfps = 0;
- _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
+ _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
+ &min_mfps, &max_mfps);
bool started = env->CallBooleanMethod(_jCapturer, j_start,
_captureCapability.width,
_captureCapability.height,
@@ -184,8 +235,9 @@ int32_t VideoCaptureAndroid::CaptureSettings(
int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
- if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
- return 0;
+ int32_t status = VideoCaptureImpl::SetCaptureRotation(rotation);
+ if (status != 0)
+ return status;
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
diff --git a/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc b/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
index 2d2bc7fb547..7db6103fecb 100644
--- a/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <assert.h>
#include <stdlib.h>
#include "webrtc/modules/video_capture/device_info_impl.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifndef abs
#define abs(a) (a>=0?a:-a)
@@ -75,13 +76,8 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
const uint32_t deviceCapabilityNumber,
VideoCaptureCapability& capability)
{
+ assert(deviceUniqueIdUTF8 != NULL);
- if (!deviceUniqueIdUTF8)
- {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "deviceUniqueIdUTF8 parameter not set in call to GetCapability");
- return -1;
- }
ReadLockScoped cs(_apiLock);
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
@@ -111,9 +107,9 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
// Make sure the number is valid
if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.size())
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "deviceCapabilityNumber %d is invalid in call to GetCapability",
- deviceCapabilityNumber);
+ LOG(LS_ERROR) << "Invalid deviceCapabilityNumber "
+ << deviceCapabilityNumber << ">= number of capabilities ("
+ << _captureCapabilities.size() << ").";
return -1;
}
@@ -266,9 +262,9 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
}// else height not good
}//end for
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
- "Best camera format: Width %d, Height %d, Frame rate %d, Color format %d",
- bestWidth, bestHeight, bestFrameRate, bestRawType);
+ LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" << bestHeight
+ << "@" << bestFrameRate
+ << "fps, color format: " << bestRawType;
// Copy the capability
if (bestformatIndex < 0)
@@ -343,11 +339,10 @@ int32_t DeviceInfoImpl::GetExpectedCaptureDelay(
}
if (bestDelay > kMaxCaptureDelay)
{
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
- "Expected capture delay too high. %dms, will use %d", bestDelay,
- kMaxCaptureDelay);
+ LOG(LS_WARNING) << "Expected capture delay (" << bestDelay
+ << " ms) too high, using " << kMaxCaptureDelay
+ << " ms.";
bestDelay = kMaxCaptureDelay;
-
}
return bestDelay;
diff --git a/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc
new file mode 100644
index 00000000000..65c9a8dbe7a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Platform-specific initialization bits, if any, go here.
+
+#if !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+namespace webrtc {
+namespace videocapturemodule {
+void EnsureInitialized() {}
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#else // !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+#include <assert.h>
+#include <pthread.h>
+
+#include "base/android/jni_android.h"
+
+// Handy alternative to assert() which suppresses unused-variable warnings when
+// assert() is a no-op (i.e. in Release builds).
+#ifdef NDEBUG
+#define ASSERT(x) if (false && (x)); else
+#else
+#define ASSERT(x) assert(x)
+#endif
+
+namespace webrtc {
+
+// Declared in webrtc/modules/video_capture/include/video_capture.h.
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject g_context);
+
+namespace videocapturemodule {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+void EnsureInitializedOnce() {
+ JNIEnv* jni = ::base::android::AttachCurrentThread();
+ jobject context = ::base::android::GetApplicationContext();
+ JavaVM* jvm = NULL;
+ int status = jni->GetJavaVM(&jvm);
+ ASSERT(status == 0);
+ status = webrtc::SetCaptureAndroidVM(jvm, context) == 0;
+ ASSERT(status);
+}
+
+void EnsureInitialized() {
+ int ret = pthread_once(&g_initialize_once, &EnsureInitializedOnce);
+ ASSERT(ret == 0);
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // ANDROID & WEBRTC_CHROMIUM_BUILD
diff --git a/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h
new file mode 100644
index 00000000000..429879537cd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// Ensure any necessary initialization of webrtc::videocapturemodule has
+// completed.
+void EnsureInitialized();
+
+} // namespace videocapturemodule.
+} // namespace webrtc.
diff --git a/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h b/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h
new file mode 100644
index 00000000000..8ad74a23886
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockVideoCaptureModule : public VideoCaptureModule {
+ public:
+ // from Module
+ MOCK_METHOD0(TimeUntilNextProcess, int32_t());
+ MOCK_METHOD0(Process, int32_t());
+
+ // from RefCountedModule
+ MOCK_METHOD0(AddRef, int32_t());
+ MOCK_METHOD0(Release, int32_t());
+
+ // from VideoCaptureModule
+ MOCK_METHOD1(RegisterCaptureDataCallback,
+ void(VideoCaptureDataCallback& dataCallback));
+ MOCK_METHOD0(DeRegisterCaptureDataCallback, void());
+ MOCK_METHOD1(RegisterCaptureCallback, void(VideoCaptureFeedBack& callBack));
+ MOCK_METHOD0(DeRegisterCaptureCallback, void());
+ MOCK_METHOD1(StartCapture, int32_t(const VideoCaptureCapability& capability));
+ MOCK_METHOD0(StopCapture, int32_t());
+ MOCK_CONST_METHOD0(CurrentDeviceName, const char*());
+ MOCK_METHOD0(CaptureStarted, bool());
+ MOCK_METHOD1(CaptureSettings, int32_t(VideoCaptureCapability& settings));
+ MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
+ MOCK_METHOD0(CaptureDelay, int32_t());
+ MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
+ MOCK_METHOD1(GetEncodeInterface,
+ VideoCaptureEncodeInterface*(const VideoCodec& codec));
+ MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));
+ MOCK_METHOD1(EnableNoPictureAlarm, void(const bool enable));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
diff --git a/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h b/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
index 6b6247c2777..7398af60448 100644
--- a/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
+++ b/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
@@ -20,8 +20,8 @@
namespace webrtc {
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int32_t SetCaptureAndroidVM(JavaVM* javaVM);
+#if defined(ANDROID)
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
#endif
class VideoCaptureModule: public RefCountedModule {
@@ -105,18 +105,17 @@ class VideoCaptureModule: public RefCountedModule {
};
// Register capture data callback
- virtual int32_t RegisterCaptureDataCallback(
+ virtual void RegisterCaptureDataCallback(
VideoCaptureDataCallback& dataCallback) = 0;
// Remove capture data callback
- virtual int32_t DeRegisterCaptureDataCallback() = 0;
+ virtual void DeRegisterCaptureDataCallback() = 0;
// Register capture callback.
- virtual int32_t RegisterCaptureCallback(
- VideoCaptureFeedBack& callBack) = 0;
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0;
// Remove capture callback.
- virtual int32_t DeRegisterCaptureCallback() = 0;
+ virtual void DeRegisterCaptureCallback() = 0;
// Start capture device
virtual int32_t StartCapture(
@@ -133,7 +132,7 @@ class VideoCaptureModule: public RefCountedModule {
// Gets the current configuration.
virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
- virtual int32_t SetCaptureDelay(int32_t delayMS) = 0;
+ virtual void SetCaptureDelay(int32_t delayMS) = 0;
// Returns the current CaptureDelay. Only valid when the camera is running.
virtual int32_t CaptureDelay() = 0;
@@ -149,8 +148,8 @@ class VideoCaptureModule: public RefCountedModule {
virtual VideoCaptureEncodeInterface* GetEncodeInterface(
const VideoCodec& codec) = 0;
- virtual int32_t EnableFrameRateCallback(const bool enable) = 0;
- virtual int32_t EnableNoPictureAlarm(const bool enable) = 0;
+ virtual void EnableFrameRateCallback(const bool enable) = 0;
+ virtual void EnableNoPictureAlarm(const bool enable) = 0;
protected:
virtual ~VideoCaptureModule() {};
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
index c51a53a3372..dea9fc34a41 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_capture/ios/device_info_ios.h"
#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
index 2d11a2043fb..d06d3361f45 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#import <AVFoundation/AVFoundation.h>
#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
new file mode 100644
index 00000000000..7d4147b430d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#include "webrtc/modules/video_capture/ios/video_capture_ios.h"
+
+// The following class listens to a notification with name:
+// 'StatusBarOrientationDidChange'.
+// This notification must be posted in order for the capturer to reflect the
+// orientation change in video w.r.t. the application orientation.
+@interface RTCVideoCaptureIosObjC
+ : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
+
+@property webrtc::VideoCaptureRotation frameRotation;
+
+// custom initializer. Instance of VideoCaptureIos is needed
+// for callback purposes.
+// default init methods have been overridden to return nil.
+- (id)initWithOwner:(webrtc::videocapturemodule::VideoCaptureIos*)owner
+ captureId:(int)captureId;
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId;
+- (BOOL)startCaptureWithCapability:
+ (const webrtc::VideoCaptureCapability&)capability;
+- (BOOL)stopCapture;
+
+@end
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
new file mode 100644
index 00000000000..641ca2416b2
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
@@ -0,0 +1,377 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import <UIKit/UIKit.h>
+
+#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
+#import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
+
+#include "webrtc/system_wrappers/interface/trace.h"
+
+using namespace webrtc;
+using namespace webrtc::videocapturemodule;
+
+@interface RTCVideoCaptureIosObjC (hidden)
+- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
+@end
+
+@implementation RTCVideoCaptureIosObjC {
+ webrtc::videocapturemodule::VideoCaptureIos* _owner;
+ webrtc::VideoCaptureCapability _capability;
+ AVCaptureSession* _captureSession;
+ int _captureId;
+ AVCaptureConnection* _connection;
+ BOOL _captureChanging; // Guarded by _captureChangingCondition.
+ NSCondition* _captureChangingCondition;
+}
+
+@synthesize frameRotation = _framRotation;
+
+- (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
+ if (self == [super init]) {
+ _owner = owner;
+ _captureId = captureId;
+ _captureSession = [[AVCaptureSession alloc] init];
+ _captureChanging = NO;
+ _captureChangingCondition = [[NSCondition alloc] init];
+
+ if (!_captureSession || !_captureChangingCondition) {
+ return nil;
+ }
+
+ // create and configure a new output (using callbacks)
+ AVCaptureVideoDataOutput* captureOutput =
+ [[AVCaptureVideoDataOutput alloc] init];
+ NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
+
+ NSNumber* val = [NSNumber
+ numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
+ NSDictionary* videoSettings =
+ [NSDictionary dictionaryWithObject:val forKey:key];
+ captureOutput.videoSettings = videoSettings;
+
+ // add new output
+ if ([_captureSession canAddOutput:captureOutput]) {
+ [_captureSession addOutput:captureOutput];
+ } else {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d Could not add output to AVCaptureSession ",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__);
+ }
+
+ NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
+ [notify addObserver:self
+ selector:@selector(onVideoError:)
+ name:AVCaptureSessionRuntimeErrorNotification
+ object:_captureSession];
+ [notify addObserver:self
+ selector:@selector(statusBarOrientationDidChange:)
+ name:@"StatusBarOrientationDidChange"
+ object:nil];
+ }
+
+ return self;
+}
+
+- (void)directOutputToSelf {
+ [[self currentOutput]
+ setSampleBufferDelegate:self
+ queue:dispatch_get_global_queue(
+ DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
+}
+
+- (void)directOutputToNil {
+ [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
+}
+
+- (void)statusBarOrientationDidChange:(NSNotification*)notification {
+ [self setRelativeVideoOrientation];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ // check to see if the camera is already set
+ if (_captureSession) {
+ NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
+ if ([currentInputs count] > 0) {
+ AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
+ if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
+ return YES;
+ }
+ }
+ }
+
+ return [self changeCaptureInputByUniqueId:uniqueId];
+}
+
+- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
+ [self waitForCaptureChangeToFinish];
+ if (!_captureSession) {
+ return NO;
+ }
+
+ // check limits of the resolution
+ if (capability.maxFPS < 0 || capability.maxFPS > 60) {
+ return NO;
+ }
+
+ if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
+ if (capability.width > 1920 || capability.height > 1080) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
+ if (capability.width > 1280 || capability.height > 720) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset640x480]) {
+ if (capability.width > 640 || capability.height > 480) {
+ return NO;
+ }
+ } else if ([_captureSession
+ canSetSessionPreset:AVCaptureSessionPreset352x288]) {
+ if (capability.width > 352 || capability.height > 288) {
+ return NO;
+ }
+ } else if (capability.width < 0 || capability.height < 0) {
+ return NO;
+ }
+
+ _capability = capability;
+
+ AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
+ if (!currentOutput)
+ return NO;
+
+ [self directOutputToSelf];
+
+ _captureChanging = YES;
+ dispatch_async(
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
+ return YES;
+}
+
+- (AVCaptureVideoDataOutput*)currentOutput {
+ return [[_captureSession outputs] firstObject];
+}
+
+- (void)startCaptureInBackgroundWithOutput:
+ (AVCaptureVideoDataOutput*)currentOutput {
+ NSString* captureQuality =
+ [NSString stringWithString:AVCaptureSessionPresetLow];
+ if (_capability.width >= 1920 || _capability.height >= 1080) {
+ captureQuality =
+ [NSString stringWithString:AVCaptureSessionPreset1920x1080];
+ } else if (_capability.width >= 1280 || _capability.height >= 720) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
+ } else if (_capability.width >= 640 || _capability.height >= 480) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
+ } else if (_capability.width >= 352 || _capability.height >= 288) {
+ captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
+ }
+
+ // begin configuration for the AVCaptureSession
+ [_captureSession beginConfiguration];
+
+ // picture resolution
+ [_captureSession setSessionPreset:captureQuality];
+
+ // take care of capture framerate now
+ _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
+ [self setRelativeVideoOrientation];
+ CMTime cm_time = {1, _capability.maxFPS, kCMTimeFlags_Valid, 0};
+
+ [_connection setVideoMinFrameDuration:cm_time];
+ [_connection setVideoMaxFrameDuration:cm_time];
+
+ // finished configuring, commit settings to AVCaptureSession.
+ [_captureSession commitConfiguration];
+
+ [_captureSession startRunning];
+ [self signalCaptureChangeEnd];
+}
+
+- (void)setRelativeVideoOrientation {
+ if (!_connection.supportsVideoOrientation)
+ return;
+ switch ([UIApplication sharedApplication].statusBarOrientation) {
+ case UIInterfaceOrientationPortrait:
+ _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
+ break;
+ case UIInterfaceOrientationPortraitUpsideDown:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
+ case UIInterfaceOrientationLandscapeLeft:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ case UIInterfaceOrientationLandscapeRight:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ }
+}
+
+- (void)onVideoError:(NSNotification*)notification {
+ NSLog(@"onVideoError: %@", notification);
+ // TODO(sjlee): make the specific error handling with this notification.
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d [AVCaptureSession startRunning] error.",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__);
+}
+
+- (BOOL)stopCapture {
+ [self waitForCaptureChangeToFinish];
+ [self directOutputToNil];
+
+ if (!_captureSession) {
+ return NO;
+ }
+
+ _captureChanging = YES;
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self stopCaptureInBackground]; });
+ return YES;
+}
+
+- (void)stopCaptureInBackground {
+ [_captureSession stopRunning];
+ [self signalCaptureChangeEnd];
+}
+
+- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
+ NSArray* currentInputs = [_captureSession inputs];
+ // remove current input
+ if ([currentInputs count] > 0) {
+ AVCaptureInput* currentInput =
+ (AVCaptureInput*)[currentInputs objectAtIndex:0];
+
+ [_captureSession removeInput:currentInput];
+ }
+
+ // Look for input device with the name requested (as our input param)
+ // get list of available capture devices
+ int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
+ if (captureDeviceCount <= 0) {
+ return NO;
+ }
+
+ AVCaptureDevice* captureDevice =
+ [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
+
+ if (!captureDevice) {
+ return NO;
+ }
+
+ // now create capture session input out of AVCaptureDevice
+ NSError* deviceError = nil;
+ AVCaptureDeviceInput* newCaptureInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
+ error:&deviceError];
+
+ if (!newCaptureInput) {
+ const char* errorMessage = [[deviceError localizedDescription] UTF8String];
+
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoCapture,
+ _captureId,
+ "%s:%s:%d deviceInputWithDevice error:%s",
+ __FILE__,
+ __FUNCTION__,
+ __LINE__,
+ errorMessage);
+
+ return NO;
+ }
+
+ // try to add our new capture device to the capture session
+ [_captureSession beginConfiguration];
+
+ BOOL addedCaptureInput = NO;
+ if ([_captureSession canAddInput:newCaptureInput]) {
+ [_captureSession addInput:newCaptureInput];
+ addedCaptureInput = YES;
+ } else {
+ addedCaptureInput = NO;
+ }
+
+ [_captureSession commitConfiguration];
+
+ return addedCaptureInput;
+}
+
+- (void)captureOutput:(AVCaptureOutput*)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection*)connection {
+ const int kFlags = 0;
+ CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
+
+ if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
+ return;
+ }
+
+ const int kYPlaneIndex = 0;
+ const int kUVPlaneIndex = 1;
+
+ uint8_t* baseAddress =
+ (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
+ int yPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
+ int yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
+ int uvPlaneBytesPerRow =
+ CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
+ int uvPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
+ int frameSize =
+ yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
+
+ VideoCaptureCapability tempCaptureCapability;
+ tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
+ tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
+ tempCaptureCapability.maxFPS = _capability.maxFPS;
+ tempCaptureCapability.rawType = kVideoNV12;
+
+ _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
+
+ CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
+}
+
+- (void)signalCaptureChangeEnd {
+ [_captureChangingCondition lock];
+ _captureChanging = NO;
+ [_captureChangingCondition signal];
+ [_captureChangingCondition unlock];
+}
+
+- (void)waitForCaptureChangeToFinish {
+ [_captureChangingCondition lock];
+ while (_captureChanging) {
+ [_captureChangingCondition wait];
+ }
+ [_captureChangingCondition unlock];
+}
+@end
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
index 5d7e4b35725..ff8345f26ee 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
@@ -13,7 +13,7 @@
#include "webrtc/modules/video_capture/video_capture_impl.h"
-@class VideoCaptureIosObjC;
+@class RTCVideoCaptureIosObjC;
namespace webrtc {
namespace videocapturemodule {
@@ -33,7 +33,7 @@ class VideoCaptureIos : public VideoCaptureImpl {
virtual int32_t CaptureSettings(VideoCaptureCapability& settings) OVERRIDE;
private:
- VideoCaptureIosObjC* capture_device_;
+ RTCVideoCaptureIosObjC* capture_device_;
bool is_capturing_;
int32_t id_;
VideoCaptureCapability capability_;
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
index bb576c3fcdf..2010f03080c 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
@@ -8,8 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
-#include "webrtc/modules/video_capture/ios/video_capture_ios_objc.h"
+#include "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/system_wrappers/interface/trace.h"
@@ -30,7 +34,7 @@ VideoCaptureIos::VideoCaptureIos(const int32_t capture_id)
}
VideoCaptureIos::~VideoCaptureIos() {
- if (capture_device_) {
+ if (is_capturing_) {
[capture_device_ stopCapture];
}
}
@@ -53,8 +57,8 @@ VideoCaptureModule* VideoCaptureIos::Create(const int32_t capture_id,
capture_module->_deviceUniqueId[name_length] = '\0';
capture_module->capture_device_ =
- [[VideoCaptureIosObjC alloc] initWithOwner:capture_module
- captureId:capture_module->id_];
+ [[RTCVideoCaptureIosObjC alloc] initWithOwner:capture_module
+ captureId:capture_module->id_];
if (!capture_module->capture_device_) {
return NULL;
}
@@ -86,7 +90,6 @@ int32_t VideoCaptureIos::StopCapture() {
}
is_capturing_ = false;
-
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h
deleted file mode 100644
index 8e50facba19..00000000000
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
-
-#import <UIKit/UIKit.h>
-
-#include "webrtc/modules/video_capture/ios/video_capture_ios.h"
-
-@interface VideoCaptureIosObjC
- : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate> {
- @private
- webrtc::videocapturemodule::VideoCaptureIos* _owner;
- webrtc::VideoCaptureCapability _capability;
- AVCaptureSession* _captureSession;
- int _captureId;
-}
-
-@property webrtc::VideoCaptureRotation frameRotation;
-
-// custom initializer. Instance of VideoCaptureIos is needed
-// for callback purposes.
-// default init methods have been overridden to return nil.
-- (id)initWithOwner:(webrtc::videocapturemodule::VideoCaptureIos*)owner
- captureId:(int)captureId;
-- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniequeId;
-- (BOOL)startCaptureWithCapability:
- (const webrtc::VideoCaptureCapability&)capability;
-- (BOOL)stopCapture;
-
-@end
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm
deleted file mode 100644
index 5b8d69786e8..00000000000
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm
+++ /dev/null
@@ -1,287 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
-#import "webrtc/modules/video_capture/ios/video_capture_ios_objc.h"
-
-#include "webrtc/system_wrappers/interface/trace.h"
-
-using namespace webrtc;
-using namespace webrtc::videocapturemodule;
-
-@interface VideoCaptureIosObjC (hidden)
-- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
-
-@end
-
-@implementation VideoCaptureIosObjC
-
-@synthesize frameRotation = _framRotation;
-
-- (id)initWithOwner:(VideoCaptureIos*)owner captureId:(int)captureId {
- if (self == [super init]) {
- _owner = owner;
- _captureId = captureId;
- _captureSession = [[AVCaptureSession alloc] init];
-
- if (!_captureSession) {
- return nil;
- }
-
- // create and configure a new output (using callbacks)
- AVCaptureVideoDataOutput* captureOutput =
- [[AVCaptureVideoDataOutput alloc] init];
- [captureOutput setSampleBufferDelegate:self
- queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
- NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
-
- NSNumber* val = [NSNumber
- numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
- NSDictionary* videoSettings =
- [NSDictionary dictionaryWithObject:val forKey:key];
- captureOutput.videoSettings = videoSettings;
-
- // add new output
- if ([_captureSession canAddOutput:captureOutput]) {
- [_captureSession addOutput:captureOutput];
- } else {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoCapture,
- _captureId,
- "%s:%s:%d Could not add output to AVCaptureSession ",
- __FILE__,
- __FUNCTION__,
- __LINE__);
- }
-
- NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
- [notify addObserver:self
- selector:@selector(onVideoError:)
- name:AVCaptureSessionRuntimeErrorNotification
- object:_captureSession];
- }
-
- return self;
-}
-
-- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
- // check to see if the camera is already set
- if (_captureSession) {
- NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
- if ([currentInputs count] > 0) {
- AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
- if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
- return YES;
- }
- }
- }
-
- return [self changeCaptureInputByUniqueId:uniqueId];
-}
-
-- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
- if (!_captureSession) {
- return NO;
- }
-
- // check limits of the resolution
- if (capability.maxFPS < 0 || capability.maxFPS > 60) {
- return NO;
- }
-
- if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
- if (capability.width > 1920 || capability.height > 1080) {
- return NO;
- }
- } else if ([_captureSession
- canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
- if (capability.width > 1280 || capability.height > 720) {
- return NO;
- }
- } else if ([_captureSession
- canSetSessionPreset:AVCaptureSessionPreset640x480]) {
- if (capability.width > 640 || capability.height > 480) {
- return NO;
- }
- } else if ([_captureSession
- canSetSessionPreset:AVCaptureSessionPreset352x288]) {
- if (capability.width > 352 || capability.height > 288) {
- return NO;
- }
- } else if (capability.width < 0 || capability.height < 0) {
- return NO;
- }
-
- _capability = capability;
-
- NSArray* currentOutputs = [_captureSession outputs];
- if ([currentOutputs count] == 0) {
- return NO;
- }
-
- NSString* captureQuality =
- [NSString stringWithString:AVCaptureSessionPresetLow];
- if (_capability.width >= 1920 || _capability.height >= 1080) {
- captureQuality =
- [NSString stringWithString:AVCaptureSessionPreset1920x1080];
- } else if (_capability.width >= 1280 || _capability.height >= 720) {
- captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
- } else if (_capability.width >= 640 || _capability.height >= 480) {
- captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
- } else if (_capability.width >= 352 || _capability.height >= 288) {
- captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
- }
-
- AVCaptureVideoDataOutput* currentOutput =
- (AVCaptureVideoDataOutput*)[currentOutputs objectAtIndex:0];
-
- // begin configuration for the AVCaptureSession
- [_captureSession beginConfiguration];
-
- // picture resolution
- [_captureSession setSessionPreset:captureQuality];
-
- // take care of capture framerate now
- AVCaptureConnection* connection =
- [currentOutput connectionWithMediaType:AVMediaTypeVideo];
-
- CMTime cm_time = {1, _capability.maxFPS, kCMTimeFlags_Valid, 0};
-
- [connection setVideoMinFrameDuration:cm_time];
- [connection setVideoMaxFrameDuration:cm_time];
-
- // finished configuring, commit settings to AVCaptureSession.
- [_captureSession commitConfiguration];
-
- [_captureSession startRunning];
-
- [captureQuality release];
-
- return YES;
-}
-
-- (void)onVideoError {
- // TODO(sjlee): make the specific error handling with this notification.
- WEBRTC_TRACE(kTraceError,
- kTraceVideoCapture,
- _captureId,
- "%s:%s:%d [AVCaptureSession startRunning] error.",
- __FILE__,
- __FUNCTION__,
- __LINE__);
-}
-
-- (BOOL)stopCapture {
- if (!_captureSession) {
- return NO;
- }
-
- [_captureSession stopRunning];
-
- return YES;
-}
-
-- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
- NSArray* currentInputs = [_captureSession inputs];
- // remove current input
- if ([currentInputs count] > 0) {
- AVCaptureInput* currentInput =
- (AVCaptureInput*)[currentInputs objectAtIndex:0];
-
- [_captureSession removeInput:currentInput];
- }
-
- // Look for input device with the name requested (as our input param)
- // get list of available capture devices
- int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
- if (captureDeviceCount <= 0) {
- return NO;
- }
-
- AVCaptureDevice* captureDevice =
- [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
-
- if (!captureDevice) {
- return NO;
- }
-
- // now create capture session input out of AVCaptureDevice
- NSError* deviceError = nil;
- AVCaptureDeviceInput* newCaptureInput =
- [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
- error:&deviceError];
-
- if (!newCaptureInput) {
- const char* errorMessage = [[deviceError localizedDescription] UTF8String];
-
- WEBRTC_TRACE(kTraceError,
- kTraceVideoCapture,
- _captureId,
- "%s:%s:%d deviceInputWithDevice error:%s",
- __FILE__,
- __FUNCTION__,
- __LINE__,
- errorMessage);
-
- return NO;
- }
-
- // try to add our new capture device to the capture session
- [_captureSession beginConfiguration];
-
- BOOL addedCaptureInput = NO;
- if ([_captureSession canAddInput:newCaptureInput]) {
- [_captureSession addInput:newCaptureInput];
- addedCaptureInput = YES;
- } else {
- addedCaptureInput = NO;
- }
-
- [_captureSession commitConfiguration];
-
- return addedCaptureInput;
-}
-
-- (void)captureOutput:(AVCaptureOutput*)captureOutput
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection*)connection {
- const int kFlags = 0;
- CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
-
- if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
- return;
- }
-
- const int kYPlaneIndex = 0;
- const int kUVPlaneIndex = 1;
-
- uint8_t* baseAddress =
- (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
- int yPlaneBytesPerRow =
- CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
- int yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
- int uvPlaneBytesPerRow =
- CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
- int uvPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
- int frameSize =
- yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
-
- VideoCaptureCapability tempCaptureCapability;
- tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
- tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
- tempCaptureCapability.maxFPS = _capability.maxFPS;
- tempCaptureCapability.rawType = kVideoNV12;
-
- _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
-
- CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
-}
-
-@end
diff --git a/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm b/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
index 7b46aec1925..2b18e1eca17 100644
--- a/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
@@ -155,11 +155,14 @@ using namespace webrtc;
- (void)checkOSSupported
{
Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
- _OSSupportedInfo = NO;
if(nil == osSupportedTest)
{
+ _OSSupportedInfo = NO;
+ }
+ else
+ {
+ _OSSupportedInfo = YES;
}
- _OSSupportedInfo = YES;
}
/// ***** Retrieves the number of capture devices currently available
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
index b8a63c9c4a6..dced22ce08d 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
@@ -60,6 +60,7 @@
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
+ '-framework CoreVideo',
'-framework QTKit',
],
},
@@ -108,11 +109,14 @@
'ios/device_info_ios.mm',
'ios/device_info_ios_objc.h',
'ios/device_info_ios_objc.mm',
+ 'ios/rtc_video_capture_ios_objc.h',
+ 'ios/rtc_video_capture_ios_objc.mm',
'ios/video_capture_ios.h',
'ios/video_capture_ios.mm',
- 'ios/video_capture_ios_objc.h',
- 'ios/video_capture_ios_objc.mm',
],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
@@ -130,11 +134,20 @@
},
],
'conditions': [
+ ['include_tests==1 and build_with_chromium==1 and OS=="android"', {
+ # Use WebRTC capture code for Android APK tests that are built from a
+ # Chromium checkout. Normally when built as a part of Chromium the
+ # Chromium video capture code is used. This overrides the default in
+ # webrtc/build/common.gypi.
+ 'variables': {
+ 'include_internal_video_capture': 1,
+ },
+ }],
['include_tests==1', {
'targets': [
{
'target_name': 'video_capture_tests',
- 'type': 'executable',
+ 'type': '<(gtest_target_type)',
'dependencies': [
'video_capture_module',
'webrtc_utility',
@@ -142,6 +155,8 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
+ 'ensure_initialized.cc',
+ 'ensure_initialized.h',
'test/video_capture_unittest.cc',
'test/video_capture_main_mac.mm',
],
@@ -161,6 +176,13 @@
'-lX11',
],
}],
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
['OS=="mac"', {
'dependencies': [
# Link with a special main for mac so we can use the webcam.
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
index 6689fd132c7..6f179e2da66 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
@@ -17,9 +17,9 @@
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc
@@ -187,45 +187,33 @@ VideoCaptureImpl::~VideoCaptureImpl()
delete[] _deviceUniqueId;
}
-int32_t VideoCaptureImpl::RegisterCaptureDataCallback(
- VideoCaptureDataCallback& dataCallBack)
-{
+void VideoCaptureImpl::RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallBack) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_dataCallBack = &dataCallBack;
-
- return 0;
}
-int32_t VideoCaptureImpl::DeRegisterCaptureDataCallback()
-{
+void VideoCaptureImpl::DeRegisterCaptureDataCallback() {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_dataCallBack = NULL;
- return 0;
}
-int32_t VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack)
-{
+void VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_captureCallBack = &callBack;
- return 0;
}
-int32_t VideoCaptureImpl::DeRegisterCaptureCallback()
-{
+void VideoCaptureImpl::DeRegisterCaptureCallback() {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_captureCallBack = NULL;
- return 0;
-
}
-int32_t VideoCaptureImpl::SetCaptureDelay(int32_t delayMS)
-{
+void VideoCaptureImpl::SetCaptureDelay(int32_t delayMS) {
CriticalSectionScoped cs(&_apiCs);
_captureDelay = delayMS;
- return 0;
}
int32_t VideoCaptureImpl::CaptureDelay()
{
@@ -272,13 +260,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
const VideoCaptureCapability& frameInfo,
int64_t captureTime/*=0*/)
{
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id,
- "IncomingFrame width %d, height %d", (int) frameInfo.width,
- (int) frameInfo.height);
-
- TickTime startProcessTime = TickTime::Now();
-
- CriticalSectionScoped cs(&_callBackCs);
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
const int32_t width = frameInfo.width;
const int32_t height = frameInfo.height;
@@ -295,8 +278,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
CalcBufferSize(commonVideoType, width,
abs(height)) != videoFrameLength)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Wrong incoming frame length.");
+ LOG(LS_ERROR) << "Wrong incoming frame length.";
return -1;
}
@@ -320,8 +302,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
stride_uv, stride_uv);
if (ret < 0)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Failed to allocate I420 frame.");
+ LOG(LS_ERROR) << "Failed to create empty frame, this should only "
+ "happen due to bad parameters.";
return -1;
}
const int conversionResult = ConvertToI420(commonVideoType,
@@ -333,9 +315,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
&_captureFrame);
if (conversionResult < 0)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Failed to convert capture frame from type %d to I420",
- frameInfo.rawType);
+ LOG(LS_ERROR) << "Failed to convert capture frame from type "
+ << frameInfo.rawType << "to I420.";
return -1;
}
DeliverCapturedFrame(_captureFrame, captureTime);
@@ -346,22 +327,14 @@ int32_t VideoCaptureImpl::IncomingFrame(
return -1;
}
- const uint32_t processTime =
- (uint32_t)(TickTime::Now() - startProcessTime).Milliseconds();
- if (processTime > 10) // If the process time is too long MJPG will not work well.
- {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
- "Too long processing time of Incoming frame: %ums",
- (unsigned int) processTime);
- }
-
return 0;
}
int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
int64_t captureTime) {
- CriticalSectionScoped cs(&_callBackCs);
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
DeliverCapturedFrame(*video_frame, captureTime);
return 0;
@@ -389,8 +362,7 @@ int32_t VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) {
return 0;
}
-int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
-{
+void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_frameRateCallBack = enable;
@@ -398,15 +370,12 @@ int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
{
_lastFrameRateCallbackTime = TickTime::Now();
}
- return 0;
}
-int32_t VideoCaptureImpl::EnableNoPictureAlarm(const bool enable)
-{
+void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_noPictureAlarmCallBack = enable;
- return 0;
}
void VideoCaptureImpl::UpdateFrameCount()
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
index 80d5e67862a..f3a4c64cbd6 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
@@ -62,17 +62,18 @@ public:
virtual int32_t ChangeUniqueId(const int32_t id);
//Call backs
- virtual int32_t RegisterCaptureDataCallback(VideoCaptureDataCallback& dataCallback);
- virtual int32_t DeRegisterCaptureDataCallback();
- virtual int32_t RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
- virtual int32_t DeRegisterCaptureCallback();
+ virtual void RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallback);
+ virtual void DeRegisterCaptureDataCallback();
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
+ virtual void DeRegisterCaptureCallback();
- virtual int32_t SetCaptureDelay(int32_t delayMS);
+ virtual void SetCaptureDelay(int32_t delayMS);
virtual int32_t CaptureDelay();
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
- virtual int32_t EnableFrameRateCallback(const bool enable);
- virtual int32_t EnableNoPictureAlarm(const bool enable);
+ virtual void EnableFrameRateCallback(const bool enable);
+ virtual void EnableNoPictureAlarm(const bool enable);
virtual const char* CurrentDeviceName() const;
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate b/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
index 30374ce4e4a..57dd66739fd 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc b/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
index 144f8833b75..2edbe59c4aa 100644
--- a/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -437,6 +437,7 @@ CaptureSinkFilter::GetPin(IN int Index)
STDMETHODIMP CaptureSinkFilter::Pause()
{
+ LockReceive();
LockFilter();
if (m_State == State_Stopped)
{
@@ -456,6 +457,7 @@ STDMETHODIMP CaptureSinkFilter::Pause()
m_State = State_Paused;
}
UnlockFilter();
+ UnlockReceive();
return S_OK;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
index 9eda36e8439..dcd74790c29 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -26,14 +26,12 @@ using namespace webrtc;
NormalAsyncTest::NormalAsyncTest()
:
-NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
- _testNo),
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec", 1),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(1),
_appendNext(false),
_missingFrames(false),
_rttFrames(0),
@@ -47,13 +45,13 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(uint32_t bitRate)
:
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
- bitRate, _testNo),
+ bitRate,
+ 1),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(1),
_appendNext(false),
_missingFrames(false),
_rttFrames(0),
@@ -67,13 +65,12 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
unsigned int testNo)
:
-NormalTest(name, description, _testNo),
+NormalTest(name, description, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
@@ -88,13 +85,12 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
uint32_t bitRate, unsigned int testNo)
:
-NormalTest(name, description, bitRate, _testNo),
+NormalTest(name, description, bitRate, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
@@ -110,13 +106,12 @@ NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
uint32_t bitRate, unsigned int testNo,
unsigned int rttFrames)
:
-NormalTest(name, description, bitRate, _testNo),
+NormalTest(name, description, bitRate, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
index d2d17eebc7f..1e62534acab 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -118,7 +118,6 @@ protected:
int _encFrameCnt;
int _decFrameCnt;
bool _requestKeyFrame;
- unsigned int _testNo;
unsigned int _lengthEncFrame;
FrameQueueTuple* _frameToDecode;
bool _appendNext;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
index 890d0cb50cb..7558abe6bbf 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
@@ -41,7 +41,7 @@ protected:
virtual void Teardown();
double ActualBitRate(int nFrames);
virtual bool PacketLoss(double lossRate, int /*thrown*/);
- static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+ static double RandUniform() { return (rand() + 1.0)/(RAND_MAX + 1.0); }
static void VideoEncodedBufferToEncodedImage(
webrtc::VideoFrame& videoBuffer,
webrtc::EncodedImage &image);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
index 3b034e01c60..ec12a51693c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -565,7 +565,7 @@ UnitTest::Perform()
frameLength = WaitForDecodedFrame();
}
unsigned int length = CalcBufferSize(kI420, width, height);
- scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame,
@@ -645,7 +645,7 @@ UnitTest::Perform()
// check that decoded frame matches with reference
unsigned int length = CalcBufferSize(kI420, width, height);
- scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length,
_refDecFrame, _lengthSourceFrame) == true);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
index cdae0afeaad..8f15b28504d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
@@ -17,6 +17,7 @@
'video_codecs_test_framework',
'webrtc_video_coding',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
],
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
index b3859a5978d..621c244cdfe 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
@@ -31,6 +31,7 @@
'reference_picture_selection.cc',
'include/vp8.h',
'include/vp8_common_types.h',
+ 'vp8_factory.cc',
'vp8_impl.cc',
'default_temporal_layers.cc',
'default_temporal_layers.h',
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc
new file mode 100644
index 00000000000..995191e3c04
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "webrtc/modules/video_coding/codecs/vp8/vp8_impl.h"
+
+namespace webrtc {
+
+VP8Encoder* VP8Encoder::Create() {
+ return new VP8EncoderImpl();
+}
+
+VP8Decoder* VP8Decoder::Create() {
+ return new VP8DecoderImpl();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 910a5f3f8bb..4901edff3d5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -35,10 +35,6 @@ enum { kVp8ErrorPropagationTh = 30 };
namespace webrtc {
-VP8Encoder* VP8Encoder::Create() {
- return new VP8EncoderImpl();
-}
-
VP8EncoderImpl::VP8EncoderImpl()
: encoded_image_(),
encoded_complete_callback_(NULL),
@@ -218,7 +214,10 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
config_->g_lag_in_frames = 0; // 0- no frame lagging
- if (codec_.width * codec_.height > 1280 * 960 && number_of_cores >= 6) {
+ if (codec_.width * codec_.height >= 1920 * 1080 && number_of_cores > 8) {
+ config_->g_threads = 8; // 8 threads for 1080p on high perf machines.
+ } else if (codec_.width * codec_.height > 1280 * 960 &&
+ number_of_cores >= 6) {
config_->g_threads = 3; // 3 threads for 1080p.
} else if (codec_.width * codec_.height > 640 * 480 && number_of_cores >= 3) {
config_->g_threads = 2; // 2 threads for qHD/HD.
@@ -477,8 +476,8 @@ int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
encoded_image_._timeStamp = input_image.timestamp();
encoded_image_.capture_time_ms_ = input_image.render_time_ms();
- encoded_image_._encodedHeight = raw_->h;
- encoded_image_._encodedWidth = raw_->w;
+ encoded_image_._encodedHeight = codec_.height;
+ encoded_image_._encodedWidth = codec_.width;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
}
@@ -496,10 +495,6 @@ int VP8EncoderImpl::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
-VP8Decoder* VP8Decoder::Create() {
- return new VP8DecoderImpl();
-}
-
VP8DecoderImpl::VP8DecoderImpl()
: decode_complete_callback_(NULL),
inited_(false),
@@ -718,7 +713,7 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
}
img = vpx_codec_get_frame(decoder_, &iter);
- ret = ReturnFrame(img, input_image._timeStamp);
+ ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
if (ret != 0) {
// Reset to avoid requesting key frames too often.
if (ret < 0 && propagation_cnt_ > 0)
@@ -798,7 +793,9 @@ int VP8DecoderImpl::DecodePartitions(
return WEBRTC_VIDEO_CODEC_OK;
}
-int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
+int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
+ uint32_t timestamp,
+ int64_t ntp_time_ms) {
if (img == NULL) {
// Decoder OK and NULL image => No show frame
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -816,6 +813,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
+ decoded_image_.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index 26dd52e6a93..56f7219fc1a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -214,7 +214,9 @@ class VP8DecoderImpl : public VP8Decoder {
int DecodePartitions(const EncodedImage& input_image,
const RTPFragmentationHeader* fragmentation);
- int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
+ int ReturnFrame(const vpx_image_t* img,
+ uint32_t timeStamp,
+ int64_t ntp_time_ms);
I420VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 1bd3e1a6238..ffa0bcc681f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -142,7 +142,7 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
webrtc::I420VideoFrame input_frame;
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
- webrtc::scoped_array<uint8_t> frame_buffer(new uint8_t[length]);
+ webrtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
int half_width = (width + 1) / 2;
// Set and register callbacks.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h b/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
index c0166761e83..cad0e5ab879 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
@@ -72,11 +72,9 @@ public:
kReferenceSelection
};
- static VideoCodingModule* Create(const int32_t id);
+ static VideoCodingModule* Create();
- static VideoCodingModule* Create(const int32_t id,
- Clock* clock,
- EventFactory* event_factory);
+ static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
static void Destroy(VideoCodingModule* module);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk b/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
index 9ebdbed9a66..a8cf2d0e64c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
@@ -37,7 +37,6 @@ LOCAL_SRC_FILES := \
receiver.cc \
rtt_filter.cc \
session_info.cc \
- timestamp_extrapolator.cc \
timestamp_map.cc \
timing.cc \
video_coding_impl.cc
@@ -56,7 +55,7 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
$(LOCAL_PATH)/../../../../common_video/interface \
$(LOCAL_PATH)/../../utility/include \
- $(LOCAL_PATH)/../../../../system_wrappers/interface
+ $(LOCAL_PATH)/../../../../system_wrappers/interface
LOCAL_SHARED_LIBRARIES := \
libcutils \
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
index e41c6b42534..e7a9d91b138 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
@@ -20,7 +20,7 @@
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#endif
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -42,9 +42,8 @@ VCMExtDecoderMapItem::VCMExtDecoderMapItem(
internal_render_timing(internal_render_timing) {
}
-VCMCodecDataBase::VCMCodecDataBase(int id)
- : id_(id),
- number_of_cores_(0),
+VCMCodecDataBase::VCMCodecDataBase()
+ : number_of_cores_(0),
max_payload_size_(kDefaultPayloadSize),
periodic_key_frames_(false),
pending_encoder_reset_(true),
@@ -58,8 +57,7 @@ VCMCodecDataBase::VCMCodecDataBase(int id)
ptr_decoder_(NULL),
current_dec_is_external_(false),
dec_map_(),
- dec_external_map_() {
-}
+ dec_external_map_() {}
VCMCodecDataBase::~VCMCodecDataBase() {
ResetSender();
@@ -160,7 +158,7 @@ bool VCMCodecDataBase::SetSendCodec(
if (max_payload_size <= 0) {
max_payload_size = kDefaultPayloadSize;
}
- if (number_of_cores <= 0 || number_of_cores > 32) {
+ if (number_of_cores <= 0) {
return false;
}
if (send_codec->plType <= 0) {
@@ -221,24 +219,14 @@ bool VCMCodecDataBase::SetSendCodec(
} else {
ptr_encoder_ = CreateEncoder(send_codec->codecType);
current_enc_is_external_ = false;
+ if (!ptr_encoder_) {
+ return false;
+ }
}
encoded_frame_callback->SetPayloadType(send_codec->plType);
- if (!ptr_encoder_) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(id_),
- "Failed to create encoder: %s.",
- send_codec->plName);
- return false;
- }
if (ptr_encoder_->InitEncode(send_codec,
number_of_cores_,
max_payload_size_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(id_),
- "Failed to initialize encoder: %s.",
- send_codec->plName);
DeleteEncoder();
return false;
} else if (ptr_encoder_->RegisterEncodeCallback(encoded_frame_callback) < 0) {
@@ -257,8 +245,6 @@ bool VCMCodecDataBase::SetSendCodec(
}
bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
- WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
- "SendCodec");
if (!ptr_encoder_) {
return false;
}
@@ -267,8 +253,6 @@ bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
}
VideoCodecType VCMCodecDataBase::SendCodec() const {
- WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
- "SendCodec type");
if (!ptr_encoder_) {
return kVideoCodecUnknown;
}
@@ -338,12 +322,6 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
}
break;
case kVideoCodecGeneric:
- if (memcmp(&new_send_codec.codecSpecific.Generic,
- &send_codec_.codecSpecific.Generic,
- sizeof(new_send_codec.codecSpecific.Generic)) !=
- 0) {
- return true;
- }
break;
// Known codecs without payload-specifics
case kVideoCodecI420:
@@ -404,7 +382,11 @@ bool VCMCodecDataBase::DeregisterExternalDecoder(uint8_t payload_type) {
// Not found
return false;
}
- if (receive_codec_.plType == payload_type) {
+ // We can't use payload_type to check if the decoder is currently in use,
+ // because payload type may be out of date (e.g. before we decode the first
+ // frame after RegisterReceiveCodec)
+ if (ptr_decoder_ != NULL &&
+ &ptr_decoder_->_decoder == (*it).second->external_decoder_instance) {
// Release it if it was registered and in use.
ReleaseDecoder(ptr_decoder_);
ptr_decoder_ = NULL;
@@ -443,12 +425,6 @@ bool VCMCodecDataBase::RegisterReceiveCodec(
if (number_of_cores < 0) {
return false;
}
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, VCMId(id_),
- "Codec: %s, Payload type %d, Height %d, Width %d, Bitrate %d,"
- "Framerate %d.",
- receive_codec->plName, receive_codec->plType,
- receive_codec->height, receive_codec->width,
- receive_codec->startBitrate, receive_codec->maxFramerate);
// Check if payload value already exists, if so - erase old and insert new.
DeregisterReceiveCodec(receive_codec->plType);
if (receive_codec->codecType == kVideoCodecUnknown) {
@@ -530,7 +506,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoderCopy() const {
if (!decoder_copy) {
return NULL;
}
- return new VCMGenericDecoder(*decoder_copy, id_, ptr_decoder_->External());
+ return new VCMGenericDecoder(*decoder_copy, ptr_decoder_->External());
}
void VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const {
@@ -549,8 +525,7 @@ void VCMCodecDataBase::CopyDecoder(const VCMGenericDecoder& decoder) {
if (decoder_copy) {
VCMDecodedFrameCallback* cb = ptr_decoder_->_callback;
ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = new VCMGenericDecoder(*decoder_copy, id_,
- decoder.External());
+ ptr_decoder_ = new VCMGenericDecoder(*decoder_copy, decoder.External());
if (cb && ptr_decoder_->RegisterDecodeCompleteCallback(cb)) {
assert(false);
}
@@ -575,8 +550,8 @@ VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
assert(new_codec);
const VCMDecoderMapItem* decoder_item = FindDecoderItem(payload_type);
if (!decoder_item) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(id_),
- "Unknown payload type: %u", payload_type);
+ LOG(LS_ERROR) << "Can't find a decoder associated with payload type: "
+ << payload_type;
return NULL;
}
VCMGenericDecoder* ptr_decoder = NULL;
@@ -585,7 +560,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
if (external_dec_item) {
// External codec.
ptr_decoder = new VCMGenericDecoder(
- *external_dec_item->external_decoder_instance, id_, true);
+ *external_dec_item->external_decoder_instance, true);
*external = true;
} else {
// Create decoder.
@@ -617,6 +592,7 @@ VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
return new VCMGenericEncoder(*(new I420Encoder));
#endif
default:
+ LOG(LS_WARNING) << "No internal encoder of this type exists.";
return NULL;
}
}
@@ -636,11 +612,11 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
switch (type) {
#ifdef VIDEOCODEC_VP8
case kVideoCodecVP8:
- return new VCMGenericDecoder(*(VP8Decoder::Create()), id_);
+ return new VCMGenericDecoder(*(VP8Decoder::Create()));
#endif
#ifdef VIDEOCODEC_I420
case kVideoCodecI420:
- return new VCMGenericDecoder(*(new I420Decoder), id_);
+ return new VCMGenericDecoder(*(new I420Decoder));
#endif
default:
return NULL;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
index 2a28ed4fb87..f27218f61cf 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
@@ -50,7 +50,7 @@ struct VCMExtDecoderMapItem {
class VCMCodecDataBase {
public:
- explicit VCMCodecDataBase(int id);
+ VCMCodecDataBase();
~VCMCodecDataBase();
// Sender Side
@@ -174,7 +174,6 @@ class VCMCodecDataBase {
const VCMExtDecoderMapItem* FindExternalDecoderItem(
uint8_t payload_type) const;
- int id_;
int number_of_cores_;
int max_payload_size_;
bool periodic_key_frames_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
index 6760762c9a5..3ccf0b0fd99 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
@@ -149,17 +149,12 @@ const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
return &_fragmentation;
}
-int32_t
-VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
+void VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
{
if(minimumSize > _size)
{
// create buffer of sufficient size
uint8_t* newBuffer = new uint8_t[minimumSize];
- if (newBuffer == NULL)
- {
- return -1;
- }
if(_buffer)
{
// copy old data
@@ -169,7 +164,6 @@ VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
_buffer = newBuffer;
_size = minimumSize;
}
- return 0;
}
webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
index 3e73be51803..dd0f843d267 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
@@ -104,7 +104,7 @@ protected:
* is copied to the new buffer.
* Buffer size is updated to minimumSize.
*/
- int32_t VerifyAndAllocate(const uint32_t minimumSize);
+ void VerifyAndAllocate(const uint32_t minimumSize);
void Reset();
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
index 531c7ac112d..fce68fb32d5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
@@ -14,6 +14,7 @@
#include <string.h>
#include "webrtc/modules/video_coding/main/source/packet.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -86,20 +87,7 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
int64_t timeInMs,
VCMDecodeErrorMode decode_error_mode,
const FrameData& frame_data) {
- // Is this packet part of this frame?
- if (TimeStamp() && (TimeStamp() != packet.timestamp)) {
- return kTimeStampError;
- }
-
- // sanity checks
- if (_size + packet.sizeBytes +
- (packet.insertStartCode ? kH264StartCodeLengthBytes : 0 )
- > kMaxJBFrameSizeBytes) {
- return kSizeError;
- }
- if (NULL == packet.dataPtr && packet.sizeBytes > 0) {
- return kSizeError;
- }
+ assert(!(NULL == packet.dataPtr && packet.sizeBytes > 0));
if (packet.dataPtr != NULL) {
_payloadType = packet.payloadType;
}
@@ -108,6 +96,8 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
// First packet (empty and/or media) inserted into this frame.
// store some info and set some initial values.
_timeStamp = packet.timestamp;
+ // We only take the ntp timestamp of the first packet of a frame.
+ ntp_time_ms_ = packet.ntp_time_ms_;
_codec = packet.codec;
if (packet.frameType != kFrameEmpty) {
// first media packet
@@ -126,11 +116,11 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
const uint32_t newSize = _size +
increments * kBufferIncStepSizeBytes;
if (newSize > kMaxJBFrameSizeBytes) {
+ LOG(LS_ERROR) << "Failed to insert packet due to frame being too "
+ "big.";
return kSizeError;
}
- if (VerifyAndAllocate(newSize) == -1) {
- return kSizeError;
- }
+ VerifyAndAllocate(newSize);
_sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
index 50b1eda70fc..cb0faf9a901 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
@@ -12,8 +12,7 @@
#include "webrtc/modules/video_coding/main/source/generic_decoder.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -59,10 +58,11 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
_timestampMap.Pop(decodedImage.timestamp()));
callback = _receiveCallback;
}
- if (frameInfo == NULL)
- {
- // The map should never be empty or full if this callback is called.
- return WEBRTC_VIDEO_CODEC_ERROR;
+
+ if (frameInfo == NULL) {
+ LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping "
+ "this one.";
+ return WEBRTC_VIDEO_CODEC_OK;
}
_timing.StopDecodeTimer(
@@ -73,14 +73,7 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
if (callback != NULL)
{
decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
- int32_t callbackReturn = callback->FrameToRender(decodedImage);
- if (callbackReturn < 0)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- -1,
- "Render callback returned error: %d", callbackReturn);
- }
+ callback->FrameToRender(decodedImage);
}
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -125,15 +118,15 @@ int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp)
return VCM_OK;
}
-VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, int32_t id, bool isExternal)
+VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, bool isExternal)
:
-_id(id),
_callback(NULL),
_frameInfos(),
_nextFrameInfoIdx(0),
_decoder(decoder),
_codecType(kVideoCodecUnknown),
-_isExternal(isExternal)
+_isExternal(isExternal),
+_keyFrameDecoded(false)
{
}
@@ -156,11 +149,6 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
_frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
_callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Decoding timestamp %u", frame.TimeStamp());
-
_nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
int32_t ret = _decoder.Decode(frame.EncodedImage(),
frame.MissingFrame(),
@@ -170,7 +158,8 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
if (ret < WEBRTC_VIDEO_CODEC_OK)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id), "Decoder error: %d\n", ret);
+ LOG(LS_WARNING) << "Failed to decode frame with timestamp "
+ << frame.TimeStamp() << ", error code: " << ret;
_callback->Pop(frame.TimeStamp());
return ret;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
index e1993fbb906..846d4d3e111 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
@@ -63,7 +63,7 @@ class VCMGenericDecoder
{
friend class VCMCodecDataBase;
public:
- VCMGenericDecoder(VideoDecoder& decoder, int32_t id = 0, bool isExternal = false);
+ VCMGenericDecoder(VideoDecoder& decoder, bool isExternal = false);
~VCMGenericDecoder();
/**
@@ -105,17 +105,14 @@ public:
bool External() const;
-protected:
-
- int32_t _id;
+private:
VCMDecodedFrameCallback* _callback;
VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength];
- uint32_t _nextFrameInfoIdx;
+ uint32_t _nextFrameInfoIdx;
VideoDecoder& _decoder;
VideoCodecType _codecType;
bool _isExternal;
bool _keyFrameDecoded;
-
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
index 064470b1667..6fb2c9f81b7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
@@ -13,8 +13,42 @@
#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
#include "webrtc/modules/video_coding/main/source/media_optimization.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
+namespace {
+// Map information from info into rtp. If no relevant information is found
+// in info, rtp is set to NULL.
+void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader** rtp) {
+ if (!info) {
+ *rtp = NULL;
+ return;
+ }
+ switch (info->codecType) {
+ case kVideoCodecVP8: {
+ (*rtp)->codec = kRtpVideoVp8;
+ (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
+ (*rtp)->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
+ (*rtp)->codecHeader.VP8.nonReference =
+ info->codecSpecific.VP8.nonReference;
+ (*rtp)->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
+ (*rtp)->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
+ (*rtp)->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
+ (*rtp)->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
+ (*rtp)->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
+ return;
+ }
+ case kVideoCodecGeneric:
+ (*rtp)->codec = kRtpVideoGeneric;
+ (*rtp)->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
+ return;
+ default:
+ // No codec specific info. Change RTP header pointer to NULL.
+ *rtp = NULL;
+ return;
+ }
+}
+} // namespace
//#define DEBUG_ENCODER_BIT_STREAM
@@ -50,11 +84,12 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings,
_bitRate = settings->startBitrate * 1000;
_frameRate = settings->maxFramerate;
_codecType = settings->codecType;
- if (_VCMencodedFrameCallback != NULL)
- {
- _VCMencodedFrameCallback->SetCodecType(_codecType);
+ if (_encoder.InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
+ LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
+ "payload name: " << settings->plName;
+ return -1;
}
- return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize);
+ return 0;
}
int32_t
@@ -127,8 +162,6 @@ int32_t
VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback)
{
_VCMencodedFrameCallback = VCMencodedFrameCallback;
-
- _VCMencodedFrameCallback->SetCodecType(_codecType);
_VCMencodedFrameCallback->SetInternalSource(_internalSource);
return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback);
}
@@ -142,15 +175,13 @@ VCMGenericEncoder::InternalSource() const
/***************************
* Callback Implementation
***************************/
-VCMEncodedFrameCallback::VCMEncodedFrameCallback():
+VCMEncodedFrameCallback::VCMEncodedFrameCallback(
+ EncodedImageCallback* post_encode_callback):
_sendCallback(),
_mediaOpt(NULL),
-_encodedBytes(0),
_payloadType(0),
-_codecType(kVideoCodecUnknown),
_internalSource(false),
-post_encode_callback_lock_(CriticalSectionWrapper::CreateCriticalSection()),
-post_encode_callback_(NULL)
+post_encode_callback_(post_encode_callback)
#ifdef DEBUG_ENCODER_BIT_STREAM
, _bitStreamAfterEncoder(NULL)
#endif
@@ -180,12 +211,8 @@ VCMEncodedFrameCallback::Encoded(
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentationHeader)
{
- {
- CriticalSectionScoped cs(post_encode_callback_lock_.get());
- if (post_encode_callback_) {
- post_encode_callback_->Encoded(encodedImage);
- }
- }
+ post_encode_callback_->Encoded(encodedImage);
+
FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
uint32_t encodedBytes = 0;
@@ -202,14 +229,7 @@ VCMEncodedFrameCallback::Encoded(
RTPVideoHeader rtpVideoHeader;
RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
- if (codecSpecificInfo)
- {
- CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
- }
- else
- {
- rtpVideoHeaderPtr = NULL;
- }
+ CopyCodecSpecific(codecSpecificInfo, &rtpVideoHeaderPtr);
int32_t callbackReturn = _sendCallback->SendData(
frameType,
@@ -229,9 +249,8 @@ VCMEncodedFrameCallback::Encoded(
{
return VCM_UNINITIALIZED;
}
- _encodedBytes = encodedBytes;
if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(_encodedBytes, encodedImage._timeStamp,
+ _mediaOpt->UpdateWithEncodedData(encodedBytes, encodedImage._timeStamp,
frameType);
if (_internalSource)
{
@@ -241,12 +260,6 @@ VCMEncodedFrameCallback::Encoded(
return VCM_OK;
}
-uint32_t
-VCMEncodedFrameCallback::EncodedBytes()
-{
- return _encodedBytes;
-}
-
void
VCMEncodedFrameCallback::SetMediaOpt(
media_optimization::MediaOptimization *mediaOpt)
@@ -254,36 +267,4 @@ VCMEncodedFrameCallback::SetMediaOpt(
_mediaOpt = mediaOpt;
}
-void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
- RTPVideoHeader** rtp) {
- switch (info.codecType) {
- case kVideoCodecVP8: {
- (*rtp)->codec = kRtpVideoVp8;
- (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
- (*rtp)->codecHeader.VP8.pictureId = info.codecSpecific.VP8.pictureId;
- (*rtp)->codecHeader.VP8.nonReference =
- info.codecSpecific.VP8.nonReference;
- (*rtp)->codecHeader.VP8.temporalIdx = info.codecSpecific.VP8.temporalIdx;
- (*rtp)->codecHeader.VP8.layerSync = info.codecSpecific.VP8.layerSync;
- (*rtp)->codecHeader.VP8.tl0PicIdx = info.codecSpecific.VP8.tl0PicIdx;
- (*rtp)->codecHeader.VP8.keyIdx = info.codecSpecific.VP8.keyIdx;
- (*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
- return;
- }
- case kVideoCodecGeneric:
- (*rtp)->codec = kRtpVideoGeneric;
- (*rtp)->simulcastIdx = info.codecSpecific.generic.simulcast_idx;
- return;
- default:
- // No codec specific info. Change RTP header pointer to NULL.
- *rtp = NULL;
- return;
- }
-}
-
-void VCMEncodedFrameCallback::RegisterPostEncodeImageCallback(
- EncodedImageCallback* callback) {
- CriticalSectionScoped cs(post_encode_callback_lock_.get());
- post_encode_callback_ = callback;
-}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
index c5cfeabc26f..9277260af34 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
@@ -17,9 +17,7 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-namespace webrtc
-{
-
+namespace webrtc {
class CriticalSectionWrapper;
namespace media_optimization {
@@ -32,7 +30,7 @@ class MediaOptimization;
class VCMEncodedFrameCallback : public EncodedImageCallback
{
public:
- VCMEncodedFrameCallback();
+ VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback);
virtual ~VCMEncodedFrameCallback();
/*
@@ -43,10 +41,6 @@ public:
const CodecSpecificInfo* codecSpecificInfo = NULL,
const RTPFragmentationHeader* fragmentationHeader = NULL);
/*
- * Get number of encoded bytes
- */
- uint32_t EncodedBytes();
- /*
* Callback implementation - generic encoder encode complete
*/
int32_t SetTransportCallback(VCMPacketizationCallback* transport);
@@ -56,27 +50,14 @@ public:
void SetMediaOpt (media_optimization::MediaOptimization* mediaOpt);
void SetPayloadType(uint8_t payloadType) { _payloadType = payloadType; };
- void SetCodecType(VideoCodecType codecType) {_codecType = codecType;};
void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
- void RegisterPostEncodeImageCallback(EncodedImageCallback* callback);
-
private:
- /*
- * Map information from info into rtp. If no relevant information is found
- * in info, rtp is set to NULL.
- */
- static void CopyCodecSpecific(const CodecSpecificInfo& info,
- RTPVideoHeader** rtp);
-
VCMPacketizationCallback* _sendCallback;
media_optimization::MediaOptimization* _mediaOpt;
- uint32_t _encodedBytes;
uint8_t _payloadType;
- VideoCodecType _codecType;
bool _internalSource;
- scoped_ptr<CriticalSectionWrapper> post_encode_callback_lock_;
EncodedImageCallback* post_encode_callback_;
#ifdef DEBUG_ENCODER_BIT_STREAM
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
index f11f81b46f2..d8792f21afb 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
@@ -25,7 +25,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -77,10 +76,6 @@ int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
FrameList::iterator it = begin();
while (!empty()) {
// Throw at least one frame.
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, -1,
- "Recycling: type=%s, low seqnum=%u",
- it->second->FrameType() == kVideoFrameKey ?
- "key" : "delta", it->second->GetLowSeqNum());
it->second->Reset();
free_frames->push_back(it->second);
erase(it++);
@@ -128,16 +123,10 @@ void FrameList::Reset(UnorderedFrameList* free_frames) {
}
VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
- EventFactory* event_factory,
- int vcm_id,
- int receiver_id,
- bool master)
- : vcm_id_(vcm_id),
- receiver_id_(receiver_id),
- clock_(clock),
+ EventFactory* event_factory)
+ : clock_(clock),
running_(false),
crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- master_(master),
frame_event_(event_factory->CreateEvent()),
packet_event_(event_factory->CreateEvent()),
max_number_of_frames_(kStartNumberOfFrames),
@@ -156,7 +145,7 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
num_consecutive_old_frames_(0),
num_consecutive_old_packets_(0),
num_discarded_packets_(0),
- jitter_estimate_(vcm_id, receiver_id),
+ jitter_estimate_(),
inter_frame_delay_(clock_->TimeInMilliseconds()),
rtt_ms_(kDefaultRtt),
nack_mode_(kNoNack),
@@ -192,10 +181,7 @@ void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) {
if (this != &rhs) {
crit_sect_->Enter();
rhs.crit_sect_->Enter();
- vcm_id_ = rhs.vcm_id_;
- receiver_id_ = rhs.receiver_id_;
running_ = rhs.running_;
- master_ = !rhs.master_;
max_number_of_frames_ = rhs.max_number_of_frames_;
incoming_frame_rate_ = rhs.incoming_frame_rate_;
incoming_frame_count_ = rhs.incoming_frame_count_;
@@ -276,10 +262,6 @@ void VCMJitterBuffer::Start() {
first_packet_since_reset_ = true;
rtt_ms_ = kDefaultRtt;
last_decoded_state_.Reset();
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: start",
- this);
}
void VCMJitterBuffer::Stop() {
@@ -300,9 +282,6 @@ void VCMJitterBuffer::Stop() {
// Make sure we wake up any threads waiting on these events.
frame_event_->Set();
packet_event_->Set();
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: stop",
- this);
}
bool VCMJitterBuffer::Running() const {
@@ -327,9 +306,6 @@ void VCMJitterBuffer::Flush() {
waiting_for_completion_.latest_packet_time = -1;
first_packet_since_reset_ = true;
missing_sequence_numbers_.clear();
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: flush",
- this);
}
// Get received key and delta frames
@@ -583,6 +559,8 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
DropPacketsFromNackList(last_decoded_state_.sequence_num());
if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
+ LOG(LS_WARNING) << num_consecutive_old_packets_ << " consecutive old "
+ "packets received. Flushing the jitter buffer.";
Flush();
return kFlushIndicator;
}
@@ -602,13 +580,13 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
VCMFrameBufferEnum ret = kNoError;
if (!*frame) {
// No free frame! Try to reclaim some...
- LOG_F(LS_INFO) << "Unable to get empty frame; Recycling.";
+ LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
bool found_key_frame = RecycleFramesUntilKeyFrame();
*frame = GetEmptyFrame();
- if (!*frame)
- return kGeneralError;
- else if (!found_key_frame)
+ assert(*frame);
+ if (!found_key_frame) {
ret = kFlushIndicator;
+ }
}
(*frame)->Reset();
return ret;
@@ -650,6 +628,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
// Flush if this happens consistently.
num_consecutive_old_frames_++;
if (num_consecutive_old_frames_ > kMaxConsecutiveOldFrames) {
+ LOG(LS_WARNING) << num_consecutive_old_packets_ << " consecutive old "
+ "frames received. Flushing the jitter buffer.";
Flush();
return kFlushIndicator;
}
@@ -702,8 +682,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
if (IsPacketRetransmitted(packet)) {
frame->IncrementNackCount();
}
- if (!UpdateNackList(packet.seqNum)) {
- LOG_F(LS_INFO) << "Requesting key frame due to flushed NACK list.";
+ if (!UpdateNackList(packet.seqNum) &&
+ packet.frameType != kVideoFrameKey) {
buffer_return = kFlushIndicator;
}
latest_received_sequence_number_ = LatestSequenceNumber(
@@ -725,15 +705,6 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
}
case kCompleteSession: {
if (update_decodable_list) {
- if (master_) {
- // Only trace the primary jitter buffer to make it possible to parse
- // and plot the trace file.
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "JB(0x%x) FB(0x%x): Complete frame added to jitter"
- "buffer, size:%d type %d",
- this, frame, frame->Length(), frame->FrameType());
- }
CountFrame(*frame);
frame->SetCountedFrame(true);
if (continuous) {
@@ -960,8 +931,6 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size,
incomplete_frames_.begin(), incomplete_frames_.end(),
HasNonEmptyState);
}
- if (have_non_empty_frame)
- LOG_F(LS_INFO) << "First frame is not key; Recycling.";
bool found_key_frame = RecycleFramesUntilKeyFrame();
if (!found_key_frame) {
*request_key_frame = have_non_empty_frame;
@@ -977,9 +946,9 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size,
int non_continuous_incomplete_duration =
NonContinuousOrIncompleteDuration();
if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
- LOG_F(LS_INFO) << "Too long non-decodable duration: " <<
- non_continuous_incomplete_duration << " > " <<
- 90 * max_incomplete_time_ms_;
+ LOG_F(LS_WARNING) << "Too long non-decodable duration: "
+ << non_continuous_incomplete_duration << " > "
+ << 90 * max_incomplete_time_ms_;
FrameList::reverse_iterator rit = find_if(incomplete_frames_.rbegin(),
incomplete_frames_.rend(), IsKeyFrame);
if (rit == incomplete_frames_.rend()) {
@@ -1038,10 +1007,12 @@ bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
TRACE_EVENT_INSTANT1("webrtc", "AddNack", "seqnum", i);
}
if (TooLargeNackList() && !HandleTooLargeNackList()) {
+ LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
return false;
}
if (MissingTooOldPacket(sequence_number) &&
!HandleTooOldPackets(sequence_number)) {
+ LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
return false;
}
} else {
@@ -1058,8 +1029,9 @@ bool VCMJitterBuffer::TooLargeNackList() const {
bool VCMJitterBuffer::HandleTooLargeNackList() {
// Recycle frames until the NACK list is small enough. It is likely cheaper to
// request a key frame than to retransmit this many missing packets.
- LOG_F(LS_INFO) << "NACK list has grown too large: " <<
- missing_sequence_numbers_.size() << " > " << max_nack_list_size_;
+ LOG_F(LS_WARNING) << "NACK list has grown too large: "
+ << missing_sequence_numbers_.size() << " > "
+ << max_nack_list_size_;
bool key_frame_found = false;
while (TooLargeNackList()) {
key_frame_found = RecycleFramesUntilKeyFrame();
@@ -1083,8 +1055,9 @@ bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
bool key_frame_found = false;
const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
*missing_sequence_numbers_.begin();
- LOG_F(LS_INFO) << "NACK list contains too old sequence numbers: " <<
- age_of_oldest_missing_packet << " > " << max_packet_age_to_nack_;
+ LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
+ << age_of_oldest_missing_packet << " > "
+ << max_packet_age_to_nack_;
while (MissingTooOldPacket(latest_sequence_number)) {
key_frame_found = RecycleFramesUntilKeyFrame();
}
@@ -1136,10 +1109,6 @@ bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
frame_buffers_[max_number_of_frames_] = new_frame;
free_frames_.push_back(new_frame);
++max_number_of_frames_;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "JB(0x%x) FB(0x%x): Jitter buffer increased to:%d frames",
- this, new_frame, max_number_of_frames_);
TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
return true;
}
@@ -1161,13 +1130,9 @@ bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
key_frame_found = key_frame_it != decodable_frames_.end();
}
drop_count_ += dropped_frames;
- if (dropped_frames) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Jitter buffer drop count:%u", drop_count_);
- }
TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
if (key_frame_found) {
+ LOG(LS_INFO) << "Found key frame while dropping frames.";
// Reset last decoded state to make sure the next frame decoded is a key
// frame, and start NACKing from here.
last_decoded_state_.Reset();
@@ -1246,19 +1211,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
if (sample.latest_packet_time == -1) {
return;
}
- if (incomplete_frame) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received incomplete frame "
- "timestamp %u frame size %u at time %u",
- sample.timestamp, sample.frame_size,
- MaskWord64ToUWord32(sample.latest_packet_time));
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received complete frame "
- "timestamp %u frame size %u at time %u",
- sample.timestamp, sample.frame_size,
- MaskWord64ToUWord32(sample.latest_packet_time));
- }
UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
sample.frame_size, incomplete_frame);
}
@@ -1273,23 +1225,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
}
// No retransmitted frames should be a part of the jitter
// estimate.
- if (incomplete_frame) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Received incomplete frame timestamp %u frame type %d "
- "frame size %u at time %u, jitter estimate was %u",
- frame.TimeStamp(), frame.FrameType(), frame.Length(),
- MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
- EstimatedJitterMs());
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received complete frame "
- "timestamp %u frame type %d frame size %u at time %u, "
- "jitter estimate was %u",
- frame.TimeStamp(), frame.FrameType(), frame.Length(),
- MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
- EstimatedJitterMs());
- }
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
frame.Length(), incomplete_frame);
}
@@ -1306,12 +1241,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(
return;
}
int64_t frame_delay;
- // Calculate the delay estimate
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Packet received and sent to jitter estimate with: "
- "timestamp=%u wall_clock=%u", timestamp,
- MaskWord64ToUWord32(latest_packet_time_ms));
bool not_reordered = inter_frame_delay_.CalculateDelay(timestamp,
&frame_delay,
latest_packet_time_ms);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
index 8586f115f88..6ed9cfb85c6 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
@@ -16,6 +16,7 @@
#include <set>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
@@ -23,7 +24,6 @@
#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -77,10 +77,7 @@ class FrameList
class VCMJitterBuffer {
public:
VCMJitterBuffer(Clock* clock,
- EventFactory* event_factory,
- int vcm_id,
- int receiver_id,
- bool master);
+ EventFactory* event_factory);
virtual ~VCMJitterBuffer();
// Makes |this| a deep copy of |rhs|.
@@ -274,13 +271,10 @@ class VCMJitterBuffer {
uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const;
- int vcm_id_;
- int receiver_id_;
Clock* clock_;
// If we are running (have started) or not.
bool running_;
CriticalSectionWrapper* crit_sect_;
- bool master_;
// Event to signal when we have a frame ready for decoder.
scoped_ptr<EventWrapper> frame_event_;
// Event to signal when we have received a packet.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
index e535a8a4043..0490658b420 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
@@ -27,8 +27,8 @@ class TestBasicJitterBuffer : public ::testing::Test {
protected:
virtual void SetUp() {
clock_.reset(new SimulatedClock(0));
- jitter_buffer_.reset(new VCMJitterBuffer(clock_.get(),
- &event_factory_, -1, -1, true));
+ jitter_buffer_.reset(
+ new VCMJitterBuffer(clock_.get(), &event_factory_));
jitter_buffer_->Start();
seq_num_ = 1234;
timestamp_ = 0;
@@ -126,8 +126,7 @@ class TestRunningJitterBuffer : public ::testing::Test {
clock_.reset(new SimulatedClock(0));
max_nack_list_size_ = 150;
oldest_packet_to_nack_ = 250;
- jitter_buffer_ = new VCMJitterBuffer(clock_.get(), &event_factory_, -1, -1,
- true);
+ jitter_buffer_ = new VCMJitterBuffer(clock_.get(), &event_factory_);
stream_generator_ = new StreamGenerator(0, 0, clock_->TimeInMilliseconds());
jitter_buffer_->Start();
jitter_buffer_->SetNackSettings(max_nack_list_size_,
@@ -2034,4 +2033,31 @@ TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
EXPECT_EQ(65535, list[0]);
}
+TEST_F(TestJitterBufferNack, ResetByFutureKeyFrameDoesntError) {
+ stream_generator_->Init(0, 0, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ uint16_t nack_list_size = 0;
+ bool extended = false;
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+
+ // Far-into-the-future video frame, could be caused by resetting the encoder
+ // or otherwise restarting. This should not fail when error when the packet is
+ // a keyframe, even if all of the nack list needs to be flushed.
+ stream_generator_->Init(10000, 0, clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+
+ // Stream should be decodable from this point.
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameDelta);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
index deb036300ee..71c54a00cd2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
@@ -11,7 +11,6 @@
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <assert.h>
#include <math.h>
@@ -20,20 +19,20 @@
namespace webrtc {
-VCMJitterEstimator::VCMJitterEstimator(int32_t vcmId, int32_t receiverId) :
-_vcmId(vcmId),
-_receiverId(receiverId),
-_phi(0.97),
-_psi(0.9999),
-_alphaCountMax(400),
-_thetaLow(0.000001),
-_nackLimit(3),
-_numStdDevDelayOutlier(15),
-_numStdDevFrameSizeOutlier(3),
-_noiseStdDevs(2.33), // ~Less than 1% chance
- // (look up in normal distribution table)...
-_noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
-_rttFilter(vcmId, receiverId) {
+VCMJitterEstimator::VCMJitterEstimator(int32_t vcmId, int32_t receiverId)
+ : _vcmId(vcmId),
+ _receiverId(receiverId),
+ _phi(0.97),
+ _psi(0.9999),
+ _alphaCountMax(400),
+ _thetaLow(0.000001),
+ _nackLimit(3),
+ _numStdDevDelayOutlier(15),
+ _numStdDevFrameSizeOutlier(3),
+ _noiseStdDevs(2.33), // ~Less than 1% chance
+ // (look up in normal distribution table)...
+ _noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
+ _rttFilter() {
Reset();
}
@@ -108,10 +107,6 @@ void
VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes,
bool incompleteFrame /* = false */)
{
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(_vcmId, _receiverId),
- "Jitter estimate updated with: frameSize=%d frameDelayMS=%d",
- frameSizeBytes, frameDelayMS);
if (frameSizeBytes == 0)
{
return;
@@ -162,7 +157,7 @@ VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes
// deviation is probably due to an incorrect line slope.
double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
- if (abs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
+ if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
{
// Update the variance of the deviation from the
@@ -195,16 +190,6 @@ VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes
{
_startupCount++;
}
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Framesize statistics: max=%f average=%f", _maxFrameSize, _avgFrameSize);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "The estimated slope is: theta=(%f, %f)", _theta[0], _theta[1]);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Random jitter: mean=%f variance=%f", _avgNoise, _varNoise);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Current jitter estimate: %f", _filterJitterEstimate);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Current max RTT: %u", _rttFilter.RttMs());
}
// Updates the nack/packet ratio
@@ -257,7 +242,7 @@ VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
{
return;
}
- double sigma = (300.0 * exp(-abs(static_cast<double>(deltaFSBytes)) /
+ double sigma = (300.0 * exp(-fabs(static_cast<double>(deltaFSBytes)) /
(1e0 * _maxFrameSize)) + 1) * sqrt(_varNoise);
if (sigma < 1.0)
{
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
index 27fa6819338..4dc72253be3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
@@ -14,13 +14,68 @@
#include "webrtc/modules/video_coding/main/source/qm_select.h"
#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
namespace media_optimization {
+namespace {
+void UpdateProtectionCallback(
+ VCMProtectionMethod* selected_method,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_overhead_rate_bps,
+ uint32_t* fec_overhead_rate_bps,
+ VCMProtectionCallback* video_protection_callback) {
+ FecProtectionParams delta_fec_params;
+ FecProtectionParams key_fec_params;
+ // Get the FEC code rate for Key frames (set to 0 when NA).
+ key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
+
+ // Get the FEC code rate for Delta frames (set to 0 when NA).
+ delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
+
+ // Get the FEC-UEP protection status for Key frames: UEP on/off.
+ key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
+
+ // Get the FEC-UEP protection status for Delta frames: UEP on/off.
+ delta_fec_params.use_uep_protection =
+ selected_method->RequiredUepProtectionD();
-MediaOptimization::MediaOptimization(int32_t id, Clock* clock)
- : id_(id),
- clock_(clock),
+ // The RTP module currently requires the same |max_fec_frames| for both
+ // key and delta frames.
+ delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+ key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+
+ // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
+ // consecutive losses and little/no packet re-ordering. As we currently
+ // do not have feedback data on the degree of correlated losses and packet
+ // re-ordering, we keep default setting to |kFecMaskRandom| for now.
+ delta_fec_params.fec_mask_type = kFecMaskRandom;
+ key_fec_params.fec_mask_type = kFecMaskRandom;
+
+ // TODO(Marco): Pass FEC protection values per layer.
+ video_protection_callback->ProtectionRequest(&delta_fec_params,
+ &key_fec_params,
+ video_rate_bps,
+ nack_overhead_rate_bps,
+ fec_overhead_rate_bps);
+}
+} // namespace
+
+struct MediaOptimization::EncodedFrameSample {
+ EncodedFrameSample(int size_bytes,
+ uint32_t timestamp,
+ int64_t time_complete_ms)
+ : size_bytes(size_bytes),
+ timestamp(timestamp),
+ time_complete_ms(time_complete_ms) {}
+
+ uint32_t size_bytes;
+ uint32_t timestamp;
+ int64_t time_complete_ms;
+};
+
+MediaOptimization::MediaOptimization(Clock* clock)
+ : clock_(clock),
max_bit_rate_(0),
send_codec_type_(kVideoCodecUnknown),
codec_width_(0),
@@ -35,8 +90,6 @@ MediaOptimization::MediaOptimization(int32_t id, Clock* clock)
target_bit_rate_(0),
incoming_frame_rate_(0),
enable_qm_(false),
- video_protection_callback_(NULL),
- video_qmsettings_callback_(NULL),
encoded_frame_samples_(),
avg_sent_bit_rate_bps_(0),
avg_sent_framerate_(0),
@@ -59,7 +112,8 @@ MediaOptimization::~MediaOptimization(void) {
loss_prot_logic_->Release();
}
-int32_t MediaOptimization::Reset() {
+void MediaOptimization::Reset() {
+ SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0, max_payload_size_);
memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
incoming_frame_rate_ = 0.0;
frame_dropper_->Reset();
@@ -81,12 +135,52 @@ int32_t MediaOptimization::Reset() {
encoded_frame_samples_.clear();
avg_sent_bit_rate_bps_ = 0;
num_layers_ = 1;
- return VCM_OK;
}
-uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
- uint8_t fraction_lost,
- uint32_t round_trip_time_ms) {
+void MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t target_bitrate,
+ uint16_t width,
+ uint16_t height,
+ int num_layers,
+ int32_t mtu) {
+ // Everything codec specific should be reset here since this means the codec
+ // has changed. If native dimension values have changed, then either user
+ // initiated change, or QM initiated change. Will be able to determine only
+ // after the processing of the first frame.
+ last_change_time_ = clock_->TimeInMilliseconds();
+ content_->Reset();
+ content_->UpdateFrameRate(frame_rate);
+
+ max_bit_rate_ = max_bit_rate;
+ send_codec_type_ = send_codec_type;
+ target_bit_rate_ = target_bitrate;
+ float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
+ loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
+ loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
+ loss_prot_logic_->UpdateFrameSize(width, height);
+ loss_prot_logic_->UpdateNumLayers(num_layers);
+ frame_dropper_->Reset();
+ frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
+ user_frame_rate_ = static_cast<float>(frame_rate);
+ codec_width_ = width;
+ codec_height_ = height;
+ num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
+ max_payload_size_ = mtu;
+ qm_resolution_->Initialize(target_bitrate_kbps,
+ user_frame_rate_,
+ codec_width_,
+ codec_height_,
+ num_layers_);
+}
+
+uint32_t MediaOptimization::SetTargetRates(
+ uint32_t target_bitrate,
+ uint8_t fraction_lost,
+ uint32_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback) {
// TODO(holmer): Consider putting this threshold only on the video bitrate,
// and not on protection.
if (max_bit_rate_ > 0 &&
@@ -145,10 +239,13 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
// Get the bit cost of protection method, based on the amount of
// overhead data actually transmitted (including headers) the last
// second.
- UpdateProtectionCallback(selected_method,
- &sent_video_rate_bps,
- &sent_nack_rate_bps,
- &sent_fec_rate_bps);
+ if (protection_callback) {
+ UpdateProtectionCallback(selected_method,
+ &sent_video_rate_bps,
+ &sent_nack_rate_bps,
+ &sent_fec_rate_bps,
+ protection_callback);
+ }
uint32_t sent_total_rate_bps =
sent_video_rate_bps + sent_nack_rate_bps + sent_fec_rate_bps;
// Estimate the overhead costs of the next second as staying the same
@@ -178,7 +275,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
static_cast<float>(target_bit_rate_) / 1000.0f;
frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
- if (enable_qm_) {
+ if (enable_qm_ && qmsettings_callback) {
// Update QM with rates.
qm_resolution_->UpdateRates(target_video_bitrate_kbps,
sent_video_rate_kbps,
@@ -187,7 +284,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
// Check for QM selection.
bool select_qm = CheckStatusForQMchange();
if (select_qm) {
- SelectQuality();
+ SelectQuality(qmsettings_callback);
}
// Reset the short-term averaged content data.
content_->ResetShortTermAvgData();
@@ -198,44 +295,6 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
return target_bit_rate_;
}
-int32_t MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t target_bitrate,
- uint16_t width,
- uint16_t height,
- int num_layers) {
- // Everything codec specific should be reset here since this means the codec
- // has changed. If native dimension values have changed, then either user
- // initiated change, or QM initiated change. Will be able to determine only
- // after the processing of the first frame.
- last_change_time_ = clock_->TimeInMilliseconds();
- content_->Reset();
- content_->UpdateFrameRate(frame_rate);
-
- max_bit_rate_ = max_bit_rate;
- send_codec_type_ = send_codec_type;
- target_bit_rate_ = target_bitrate;
- float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
- loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
- loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
- loss_prot_logic_->UpdateFrameSize(width, height);
- loss_prot_logic_->UpdateNumLayers(num_layers);
- frame_dropper_->Reset();
- frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
- user_frame_rate_ = static_cast<float>(frame_rate);
- codec_width_ = width;
- codec_height_ = height;
- num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
- int32_t ret = VCM_OK;
- ret = qm_resolution_->Initialize(target_bitrate_kbps,
- user_frame_rate_,
- codec_width_,
- codec_height_,
- num_layers_);
- return ret;
-}
-
void MediaOptimization::EnableProtectionMethod(bool enable,
VCMProtectionMethodEnum method) {
bool updated = false;
@@ -249,11 +308,6 @@ void MediaOptimization::EnableProtectionMethod(bool enable,
}
}
-bool MediaOptimization::IsProtectionMethodEnabled(
- VCMProtectionMethodEnum method) {
- return (loss_prot_logic_->SelectedType() == method);
-}
-
uint32_t MediaOptimization::InputFrameRate() {
ProcessIncomingFrameRate(clock_->TimeInMilliseconds());
return uint32_t(incoming_frame_rate_ + 0.5f);
@@ -272,6 +326,13 @@ uint32_t MediaOptimization::SentBitRate() {
return avg_sent_bit_rate_bps_;
}
+VCMFrameCount MediaOptimization::SentFrameCount() {
+ VCMFrameCount count;
+ count.numDeltaFrames = delta_frame_cnt_;
+ count.numKeyFrames = key_frame_cnt_;
+ return count;
+}
+
int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length,
uint32_t timestamp,
FrameType encoded_frame_type) {
@@ -325,29 +386,14 @@ int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length,
return VCM_OK;
}
-int32_t MediaOptimization::RegisterProtectionCallback(
- VCMProtectionCallback* protection_callback) {
- video_protection_callback_ = protection_callback;
- return VCM_OK;
-}
-
-int32_t MediaOptimization::RegisterVideoQMCallback(
- VCMQMSettingsCallback* video_qmsettings) {
- video_qmsettings_callback_ = video_qmsettings;
- // Callback setting controls QM.
- if (video_qmsettings_callback_ != NULL) {
- enable_qm_ = true;
- } else {
- enable_qm_ = false;
- }
- return VCM_OK;
-}
+void MediaOptimization::EnableQM(bool enable) { enable_qm_ = enable; }
void MediaOptimization::EnableFrameDropper(bool enable) {
frame_dropper_->Enable(enable);
}
bool MediaOptimization::DropFrame() {
+ UpdateIncomingFrameRate();
// Leak appropriate number of bytes.
frame_dropper_->Leak((uint32_t)(InputFrameRate() + 0.5f));
if (video_suspended_) {
@@ -356,12 +402,6 @@ bool MediaOptimization::DropFrame() {
return frame_dropper_->DropFrame();
}
-int32_t MediaOptimization::SentFrameCount(VCMFrameCount* frame_count) const {
- frame_count->numDeltaFrames = delta_frame_cnt_;
- frame_count->numKeyFrames = key_frame_cnt_;
- return VCM_OK;
-}
-
void MediaOptimization::UpdateIncomingFrameRate() {
int64_t now = clock_->TimeInMilliseconds();
if (incoming_frame_times_[0] == 0) {
@@ -388,7 +428,8 @@ void MediaOptimization::UpdateContentData(
}
}
-int32_t MediaOptimization::SelectQuality() {
+int32_t MediaOptimization::SelectQuality(
+ VCMQMSettingsCallback* video_qmsettings_callback) {
// Reset quantities for QM select.
qm_resolution_->ResetQM();
@@ -403,7 +444,7 @@ int32_t MediaOptimization::SelectQuality() {
}
// Check for updates to spatial/temporal modes.
- QMUpdate(qm);
+ QMUpdate(qm, video_qmsettings_callback);
// Reset all the rate and related frame counters quantities.
qm_resolution_->ResetRates();
@@ -426,50 +467,7 @@ void MediaOptimization::SuspendBelowMinBitrate(int threshold_bps,
video_suspended_ = false;
}
-// Private methods below this line.
-
-int MediaOptimization::UpdateProtectionCallback(
- VCMProtectionMethod* selected_method,
- uint32_t* video_rate_bps,
- uint32_t* nack_overhead_rate_bps,
- uint32_t* fec_overhead_rate_bps) {
- if (!video_protection_callback_) {
- return VCM_OK;
- }
- FecProtectionParams delta_fec_params;
- FecProtectionParams key_fec_params;
- // Get the FEC code rate for Key frames (set to 0 when NA).
- key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
-
- // Get the FEC code rate for Delta frames (set to 0 when NA).
- delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
-
- // Get the FEC-UEP protection status for Key frames: UEP on/off.
- key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
-
- // Get the FEC-UEP protection status for Delta frames: UEP on/off.
- delta_fec_params.use_uep_protection =
- selected_method->RequiredUepProtectionD();
-
- // The RTP module currently requires the same |max_fec_frames| for both
- // key and delta frames.
- delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
- key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
-
- // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
- // consecutive losses and little/no packet re-ordering. As we currently
- // do not have feedback data on the degree of correlated losses and packet
- // re-ordering, we keep default setting to |kFecMaskRandom| for now.
- delta_fec_params.fec_mask_type = kFecMaskRandom;
- key_fec_params.fec_mask_type = kFecMaskRandom;
-
- // TODO(Marco): Pass FEC protection values per layer.
- return video_protection_callback_->ProtectionRequest(&delta_fec_params,
- &key_fec_params,
- video_rate_bps,
- nack_overhead_rate_bps,
- fec_overhead_rate_bps);
-}
+bool MediaOptimization::IsVideoSuspended() const { return video_suspended_; }
void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
while (!encoded_frame_samples_.empty()) {
@@ -518,7 +516,9 @@ void MediaOptimization::UpdateSentFramerate() {
}
}
-bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
+bool MediaOptimization::QMUpdate(
+ VCMResolutionScale* qm,
+ VCMQMSettingsCallback* video_qmsettings_callback) {
// Check for no change.
if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
return false;
@@ -537,13 +537,9 @@ bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
codec_height_ = qm->codec_height;
}
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- id_,
- "Resolution change from QM select: W = %d, H = %d, FR = %f",
- qm->codec_width,
- qm->codec_height,
- qm->frame_rate);
+ LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
+ "to " << qm->codec_width << "x" << qm->codec_height << "@"
+ << qm->frame_rate;
// Update VPM with new target frame rate and frame size.
// Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
@@ -551,7 +547,7 @@ bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
// will vary/fluctuate, and since we don't want to change the state of the
// VPM frame dropper, unless a temporal action was selected, we use the
// quantity |qm->frame_rate| for updating.
- video_qmsettings_callback_->SetVideoQMSettings(
+ video_qmsettings_callback->SetVideoQMSettings(
qm->frame_rate, codec_width_, codec_height_);
content_->UpdateFrameRate(qm->frame_rate);
qm_resolution_->UpdateCodecParameters(
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
index cde28d23288..35a49712504 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
@@ -18,7 +18,6 @@
#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
#include "webrtc/modules/video_coding/main/source/qm_select.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -29,33 +28,25 @@ class VCMContentMetricsProcessing;
namespace media_optimization {
-enum {
- kBitrateMaxFrameSamples = 60
-};
-enum {
- kBitrateAverageWinMs = 1000
-};
-
-struct EncodedFrameSample {
- EncodedFrameSample(int size_bytes,
- uint32_t timestamp,
- int64_t time_complete_ms)
- : size_bytes(size_bytes),
- timestamp(timestamp),
- time_complete_ms(time_complete_ms) {}
-
- uint32_t size_bytes;
- uint32_t timestamp;
- int64_t time_complete_ms;
-};
-
+// TODO(andresp): Make thread safe.
class MediaOptimization {
public:
- MediaOptimization(int32_t id, Clock* clock);
- ~MediaOptimization(void);
+ explicit MediaOptimization(Clock* clock);
+ ~MediaOptimization();
+
+ // TODO(andresp): Can Reset and SetEncodingData be done at construction time
+ // only?
+ void Reset();
- // Resets the Media Optimization module.
- int32_t Reset();
+ // Informs media optimization of initial encoding state.
+ void SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t bit_rate,
+ uint16_t width,
+ uint16_t height,
+ int num_temporal_layers,
+ int32_t mtu);
// Sets target rates for the encoder given the channel parameters.
// Inputs: target bitrate - the encoder target bitrate in bits/s.
@@ -63,95 +54,64 @@ class MediaOptimization {
// round_trip_time_ms - round trip time in milliseconds.
// min_bit_rate - the bit rate of the end-point with lowest rate.
// max_bit_rate - the bit rate of the end-point with highest rate.
+ // TODO(andresp): Find if the callbacks can be triggered only after releasing
+ // an internal critical section.
uint32_t SetTargetRates(uint32_t target_bitrate,
uint8_t fraction_lost,
- uint32_t round_trip_time_ms);
+ uint32_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback);
- // Informs media optimization of initial encoding state.
- int32_t SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t bit_rate,
- uint16_t width,
- uint16_t height,
- int num_temporal_layers);
-
- // Enables protection method.
void EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method);
+ void EnableQM(bool enable);
+ void EnableFrameDropper(bool enable);
- // Returns weather or not protection method is enabled.
- bool IsProtectionMethodEnabled(VCMProtectionMethodEnum method);
-
- // Returns the actual input frame rate.
- uint32_t InputFrameRate();
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
+ bool IsVideoSuspended() const;
- // Returns the actual sent frame rate.
- uint32_t SentFrameRate();
+ bool DropFrame();
- // Returns the actual sent bit rate.
- uint32_t SentBitRate();
+ void UpdateContentData(const VideoContentMetrics* content_metrics);
// Informs Media Optimization of encoding output: Length and frame type.
int32_t UpdateWithEncodedData(int encoded_length,
uint32_t timestamp,
FrameType encoded_frame_type);
- // Registers a protection callback to be used to inform the user about the
- // protection methods used.
- int32_t RegisterProtectionCallback(
- VCMProtectionCallback* protection_callback);
-
- // Registers a quality settings callback to be used to inform VPM/user.
- int32_t RegisterVideoQMCallback(VCMQMSettingsCallback* video_qmsettings);
-
- void EnableFrameDropper(bool enable);
-
- bool DropFrame();
-
- // Returns the number of key/delta frames encoded.
- int32_t SentFrameCount(VCMFrameCount* frame_count) const;
-
- // Updates incoming frame rate value.
- void UpdateIncomingFrameRate();
-
- // Update content metric data.
- void UpdateContentData(const VideoContentMetrics* content_metrics);
-
- // Computes new Quality Mode.
- int32_t SelectQuality();
-
- // Lets the sender suspend video when the rate drops below
- // |threshold_bps|, and turns back on when the rate goes back up above
- // |threshold_bps| + |window_bps|.
- void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
-
- // Accessors and mutators.
- int32_t max_bit_rate() const { return max_bit_rate_; }
- void set_max_payload_size(int32_t mtu) { max_payload_size_ = mtu; }
- bool video_suspended() const { return video_suspended_; }
+ uint32_t InputFrameRate();
+ uint32_t SentFrameRate();
+ uint32_t SentBitRate();
+ VCMFrameCount SentFrameCount();
private:
- typedef std::list<EncodedFrameSample> FrameSampleList;
enum {
kFrameCountHistorySize = 90
};
enum {
kFrameHistoryWinMs = 2000
};
+ enum {
+ kBitrateAverageWinMs = 1000
+ };
- // Updates protection callback with protection settings.
- int UpdateProtectionCallback(VCMProtectionMethod* selected_method,
- uint32_t* total_video_rate_bps,
- uint32_t* nack_overhead_rate_bps,
- uint32_t* fec_overhead_rate_bps);
+ struct EncodedFrameSample;
+ typedef std::list<EncodedFrameSample> FrameSampleList;
+ void UpdateIncomingFrameRate();
void PurgeOldFrameSamples(int64_t now_ms);
void UpdateSentBitrate(int64_t now_ms);
void UpdateSentFramerate();
+ // Computes new Quality Mode.
+ int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback);
+
// Verifies if QM settings differ from default, i.e. if an update is required.
// Computes actual values, as will be sent to the encoder.
- bool QMUpdate(VCMResolutionScale* qm);
+ bool QMUpdate(VCMResolutionScale* qm,
+ VCMQMSettingsCallback* qmsettings_callback);
// Checks if we should make a QM change. Return true if yes, false otherwise.
bool CheckStatusForQMchange();
@@ -163,7 +123,6 @@ class MediaOptimization {
// the state of |video_suspended_| accordingly.
void CheckSuspendConditions();
- int32_t id_;
Clock* clock_;
int32_t max_bit_rate_;
VideoCodecType send_codec_type_;
@@ -180,8 +139,6 @@ class MediaOptimization {
float incoming_frame_rate_;
int64_t incoming_frame_times_[kFrameCountHistorySize];
bool enable_qm_;
- VCMProtectionCallback* video_protection_callback_;
- VCMQMSettingsCallback* video_qmsettings_callback_;
std::list<EncodedFrameSample> encoded_frame_samples_;
uint32_t avg_sent_bit_rate_bps_;
uint32_t avg_sent_framerate_;
@@ -196,8 +153,7 @@ class MediaOptimization {
bool video_suspended_;
int suspension_threshold_bps_;
int suspension_window_bps_;
-}; // End of MediaOptimization class declaration.
-
+};
} // namespace media_optimization
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
index 1425dad837b..bacfdc604ca 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
@@ -18,9 +18,6 @@ namespace media_optimization {
class TestMediaOptimization : public ::testing::Test {
protected:
enum {
- kId = 4711 // Id number for the MediaOptimization class.
- };
- enum {
kSampleRate = 90000 // RTP timestamps per second.
};
@@ -28,14 +25,13 @@ class TestMediaOptimization : public ::testing::Test {
// a special case (e.g. frame rate in media optimization).
TestMediaOptimization()
: clock_(1000),
- media_opt_(kId, &clock_),
+ media_opt_(&clock_),
frame_time_ms_(33),
next_timestamp_(0) {}
// This method mimics what happens in VideoSender::AddVideoFrame.
void AddFrameAndAdvanceTime(int bitrate_bps, bool expect_frame_drop) {
ASSERT_GE(bitrate_bps, 0);
- media_opt_.UpdateIncomingFrameRate();
bool frame_dropped = media_opt_.DropFrame();
EXPECT_EQ(expect_frame_drop, frame_dropped);
if (!frame_dropped) {
@@ -63,12 +59,14 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SuspendBelowMinBitrate(kThresholdBps, kWindowBps);
// The video should not be suspended from the start.
- EXPECT_FALSE(media_opt_.video_suspended());
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
int target_bitrate_kbps = 100;
media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
media_opt_.EnableFrameDropper(true);
for (int time = 0; time < 2000; time += frame_time_ms_) {
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
@@ -77,11 +75,13 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
// Set the target rate below the limit for muting.
media_opt_.SetTargetRates(kThresholdBps - 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
// Expect the muter to engage immediately and stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.video_suspended());
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
}
@@ -89,22 +89,26 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
// limit + window.
media_opt_.SetTargetRates(kThresholdBps + 1000,
0, // Lossrate.
- 100); // RTT in ms.
- // Expect the muter to stay muted.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
+ // Expect the muter to stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.video_suspended());
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
}
// Set the target above limit + window.
media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
// Expect the muter to disengage immediately.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_FALSE(media_opt_.video_suspended());
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(
AddFrameAndAdvanceTime((kThresholdBps + kWindowBps) / 1000, false));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
index 61ef2ee859a..c1f1a048e8f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
@@ -19,6 +19,7 @@ VCMPacket::VCMPacket()
:
payloadType(0),
timestamp(0),
+ ntp_time_ms_(0),
seqNum(0),
dataPtr(NULL),
sizeBytes(0),
@@ -38,6 +39,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
const WebRtcRTPHeader& rtpHeader) :
payloadType(rtpHeader.header.payloadType),
timestamp(rtpHeader.header.timestamp),
+ ntp_time_ms_(rtpHeader.ntp_time_ms),
seqNum(rtpHeader.header.sequenceNumber),
dataPtr(ptr),
sizeBytes(size),
@@ -58,6 +60,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t ts, bool mBit) :
payloadType(0),
timestamp(ts),
+ ntp_time_ms_(0),
seqNum(seq),
dataPtr(ptr),
sizeBytes(size),
@@ -76,6 +79,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t t
void VCMPacket::Reset() {
payloadType = 0;
timestamp = 0;
+ ntp_time_ms_ = 0;
seqNum = 0;
dataPtr = NULL;
sizeBytes = 0;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
index e9a81bb1fd2..242d3a43142 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
@@ -33,6 +33,8 @@ public:
uint8_t payloadType;
uint32_t timestamp;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms_;
uint16_t seqNum;
const uint8_t* dataPtr;
uint32_t sizeBytes;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
index ae13ddd4211..e179423a77a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
@@ -12,11 +12,13 @@
#include <assert.h>
+#include <cstdlib>
+
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -26,15 +28,11 @@ enum { kMaxReceiverDelayMs = 10000 };
VCMReceiver::VCMReceiver(VCMTiming* timing,
Clock* clock,
EventFactory* event_factory,
- int32_t vcm_id,
- int32_t receiver_id,
bool master)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vcm_id_(vcm_id),
clock_(clock),
- receiver_id_(receiver_id),
master_(master),
- jitter_buffer_(clock_, event_factory, vcm_id, receiver_id, master),
+ jitter_buffer_(clock_, event_factory),
timing_(timing),
render_wait_event_(event_factory->CreateEvent()),
state_(kPassive),
@@ -76,13 +74,6 @@ void VCMReceiver::UpdateRtt(uint32_t rtt) {
int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
uint16_t frame_width,
uint16_t frame_height) {
- if (packet.frameType == kVideoFrameKey) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Inserting key frame packet seqnum=%u, timestamp=%u",
- packet.seqNum, packet.timestamp);
- }
-
// Insert the packet into the jitter buffer. The packet can either be empty or
// contain media at this point.
bool retransmitted = false;
@@ -93,10 +84,6 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
} else if (ret == kFlushIndicator) {
return VCM_FLUSH_INDICATOR;
} else if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Error inserting packet seqnum=%u, timestamp=%u",
- packet.seqNum, packet.timestamp);
return VCM_JITTER_BUFFER_ERROR;
}
if (ret == kCompleteSession && !retransmitted) {
@@ -105,15 +92,6 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
// delay within the jitter estimate.
timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
}
- if (master_) {
- // Only trace the primary receiver to make it possible to parse and plot
- // the trace file.
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Packet seqnum=%u timestamp=%u inserted at %u",
- packet.seqNum, packet.timestamp,
- MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
- }
return VCM_OK;
}
@@ -156,20 +134,17 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
// Assume that render timing errors are due to changes in the video stream.
if (next_render_time_ms < 0) {
timing_error = true;
- } else if (abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "This frame is out of our delay bounds, resetting jitter "
- "buffer: %d > %d",
- static_cast<int>(abs(next_render_time_ms - now_ms)),
- max_video_delay_ms_);
+ } else if (std::abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
+ int frame_delay = static_cast<int>(std::abs(next_render_time_ms - now_ms));
+ LOG(LS_WARNING) << "A frame about to be decoded is out of the configured "
+ << "delay bounds (" << frame_delay << " > "
+ << max_video_delay_ms_
+ << "). Resetting the video jitter buffer.";
timing_error = true;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
max_video_delay_ms_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "More than %u ms target delay. Flushing jitter buffer and"
- "resetting timing.", max_video_delay_ms_);
+ LOG(LS_WARNING) << "The video target delay has grown larger than "
+ << max_video_delay_ms_ << " ms. Resetting jitter buffer.";
timing_error = true;
}
@@ -278,10 +253,7 @@ VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
bool request_key_frame = false;
uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
nack_list_length, &request_key_frame);
- if (*nack_list_length > size) {
- *nack_list_length = 0;
- return kNackNeedMoreMemory;
- }
+ assert(*nack_list_length <= size);
if (internal_nack_list != NULL && *nack_list_length > 0) {
memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
index ac510ea22b3..c037303978f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
@@ -25,7 +25,6 @@ class VCMEncodedFrame;
enum VCMNackStatus {
kNackOk,
- kNackNeedMoreMemory,
kNackKeyFrameRequest
};
@@ -40,8 +39,6 @@ class VCMReceiver {
VCMReceiver(VCMTiming* timing,
Clock* clock,
EventFactory* event_factory,
- int32_t vcm_id,
- int32_t receiver_id,
bool master);
~VCMReceiver();
@@ -95,9 +92,7 @@ class VCMReceiver {
static int32_t GenerateReceiverId();
CriticalSectionWrapper* crit_sect_;
- int32_t vcm_id_;
Clock* clock_;
- int32_t receiver_id_;
bool master_;
VCMJitterBuffer jitter_buffer_;
VCMTiming* timing_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
index 33a3d95f969..e80b9cc9e9e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
@@ -31,7 +31,7 @@ class TestVCMReceiver : public ::testing::Test {
TestVCMReceiver()
: clock_(new SimulatedClock(0)),
timing_(clock_.get()),
- receiver_(&timing_, clock_.get(), &event_factory_, 1, 1, true) {
+ receiver_(&timing_, clock_.get(), &event_factory_, true) {
stream_generator_.reset(new
StreamGenerator(0, 0, clock_->TimeInMilliseconds()));
memset(data_buffer_, 0, kDataBufferSize);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
index 25d89e54c08..739cc824c37 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
@@ -10,7 +10,6 @@
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <math.h>
#include <stdlib.h>
@@ -18,15 +17,11 @@
namespace webrtc {
-VCMRttFilter::VCMRttFilter(int32_t vcmId, int32_t receiverId)
-:
-_vcmId(vcmId),
-_receiverId(receiverId),
-_filtFactMax(35),
-_jumpStdDevs(2.5),
-_driftStdDevs(3.5),
-_detectThreshold(kMaxDriftJumpCount)
-{
+VCMRttFilter::VCMRttFilter()
+ : _filtFactMax(35),
+ _jumpStdDevs(2.5),
+ _driftStdDevs(3.5),
+ _detectThreshold(kMaxDriftJumpCount) {
Reset();
}
@@ -105,16 +100,13 @@ VCMRttFilter::Update(uint32_t rttMs)
_avgRtt = oldAvg;
_varRtt = oldVar;
}
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "RttFilter Update: sample=%u avgRtt=%f varRtt=%f maxRtt=%u",
- rttMs, _avgRtt, _varRtt, _maxRtt);
}
bool
VCMRttFilter::JumpDetection(uint32_t rttMs)
{
double diffFromAvg = _avgRtt - rttMs;
- if (abs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
+ if (fabs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
{
int diffSign = (diffFromAvg >= 0) ? 1 : -1;
int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
@@ -141,8 +133,6 @@ VCMRttFilter::JumpDetection(uint32_t rttMs)
ShortRttFilter(_jumpBuf, abs(_jumpCount));
_filtFactCount = _detectThreshold + 1;
_jumpCount = 0;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Detected an RTT jump");
}
else
{
@@ -174,8 +164,6 @@ VCMRttFilter::DriftDetection(uint32_t rttMs)
ShortRttFilter(_driftBuf, _driftCount);
_filtFactCount = _detectThreshold + 1;
_driftCount = 0;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Detected an RTT drift");
}
}
else
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
index 9ce3798b053..8b816a0b46c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
@@ -19,7 +19,7 @@ namespace webrtc
class VCMRttFilter
{
public:
- VCMRttFilter(int32_t vcmId = 0, int32_t receiverId = 0);
+ VCMRttFilter();
VCMRttFilter& operator=(const VCMRttFilter& rhs);
@@ -48,8 +48,6 @@ private:
// Computes the short time average and maximum of the vector buf.
void ShortRttFilter(uint32_t* buf, uint32_t length);
- int32_t _vcmId;
- int32_t _receiverId;
bool _gotNonZeroUpdate;
double _avgRtt;
double _varRtt;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
index 1cb60d39b58..dab3da1383d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/video_coding/main/source/session_info.h"
#include "webrtc/modules/video_coding/main/source/packet.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -403,6 +404,7 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
}
if (packets_.size() == kMaxPacketsInSession) {
+ LOG(LS_ERROR) << "Max number of packets per frame has been reached.";
return -1;
}
@@ -429,6 +431,8 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
first_packet_seq_num_ = static_cast<int>(packet.seqNum);
} else if (first_packet_seq_num_ != -1 &&
!IsNewerSequenceNumber(packet.seqNum, first_packet_seq_num_)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out of"
+ "frame boundaries";
return -3;
} else if (frame_type_ == kFrameEmpty && packet.frameType != kFrameEmpty) {
// Update the frame type with the type of the first media packet.
@@ -441,6 +445,8 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
last_packet_seq_num_ = static_cast<int>(packet.seqNum);
} else if (last_packet_seq_num_ != -1 &&
IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out of"
+ "frame boundaries";
return -3;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
index 039f09763a2..cae3ee13892 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
@@ -147,7 +147,6 @@ class VCMSessionInfo {
bool complete_;
bool decodable_;
webrtc::FrameType frame_type_;
- bool previous_frame_loss_;
// Packets in this frame.
PacketList packets_;
int empty_seq_num_low_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc
deleted file mode 100644
index 1d911a54e44..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_extrapolator.h"
-#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-VCMTimestampExtrapolator::VCMTimestampExtrapolator(Clock* clock,
- int32_t vcmId,
- int32_t id)
-:
-_rwLock(RWLockWrapper::CreateRWLock()),
-_vcmId(vcmId),
-_id(id),
-_clock(clock),
-_startMs(0),
-_firstTimestamp(0),
-_wrapArounds(0),
-_prevUnwrappedTimestamp(-1),
-_prevWrapTimestamp(-1),
-_lambda(1),
-_firstAfterReset(true),
-_packetCount(0),
-_startUpFilterDelayInPackets(2),
-_detectorAccumulatorPos(0),
-_detectorAccumulatorNeg(0),
-_alarmThreshold(60e3),
-_accDrift(6600), // in timestamp ticks, i.e. 15 ms
-_accMaxError(7000),
-_P11(1e10)
-{
- Reset();
-}
-
-VCMTimestampExtrapolator::~VCMTimestampExtrapolator()
-{
- delete _rwLock;
-}
-
-void
-VCMTimestampExtrapolator::Reset()
-{
- WriteLockScoped wl(*_rwLock);
- _startMs = _clock->TimeInMilliseconds();
- _prevMs = _startMs;
- _firstTimestamp = 0;
- _w[0] = 90.0;
- _w[1] = 0;
- _P[0][0] = 1;
- _P[1][1] = _P11;
- _P[0][1] = _P[1][0] = 0;
- _firstAfterReset = true;
- _prevUnwrappedTimestamp = -1;
- _prevWrapTimestamp = -1;
- _wrapArounds = 0;
- _packetCount = 0;
- _detectorAccumulatorPos = 0;
- _detectorAccumulatorNeg = 0;
-}
-
-void
-VCMTimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz, bool trace)
-{
-
- _rwLock->AcquireLockExclusive();
- if (tMs - _prevMs > 10e3)
- {
- // Ten seconds without a complete frame.
- // Reset the extrapolator
- _rwLock->ReleaseLockExclusive();
- Reset();
- _rwLock->AcquireLockExclusive();
- }
- else
- {
- _prevMs = tMs;
- }
-
- // Remove offset to prevent badly scaled matrices
- tMs -= _startMs;
-
- CheckForWrapArounds(ts90khz);
-
- int64_t unwrapped_ts90khz = static_cast<int64_t>(ts90khz) +
- _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
-
- if (_prevUnwrappedTimestamp >= 0 &&
- unwrapped_ts90khz < _prevUnwrappedTimestamp)
- {
- // Drop reordered frames.
- _rwLock->ReleaseLockExclusive();
- return;
- }
-
- if (_firstAfterReset)
- {
- // Make an initial guess of the offset,
- // should be almost correct since tMs - _startMs
- // should about zero at this time.
- _w[1] = -_w[0] * tMs;
- _firstTimestamp = unwrapped_ts90khz;
- _firstAfterReset = false;
- }
-
- double residual =
- (static_cast<double>(unwrapped_ts90khz) - _firstTimestamp) -
- static_cast<double>(tMs) * _w[0] - _w[1];
- if (DelayChangeDetection(residual, trace) &&
- _packetCount >= _startUpFilterDelayInPackets)
- {
- // A sudden change of average network delay has been detected.
- // Force the filter to adjust its offset parameter by changing
- // the offset uncertainty. Don't do this during startup.
- _P[1][1] = _P11;
- }
- //T = [t(k) 1]';
- //that = T'*w;
- //K = P*T/(lambda + T'*P*T);
- double K[2];
- K[0] = _P[0][0] * tMs + _P[0][1];
- K[1] = _P[1][0] * tMs + _P[1][1];
- double TPT = _lambda + tMs * K[0] + K[1];
- K[0] /= TPT;
- K[1] /= TPT;
- //w = w + K*(ts(k) - that);
- _w[0] = _w[0] + K[0] * residual;
- _w[1] = _w[1] + K[1] * residual;
- //P = 1/lambda*(P - K*T'*P);
- double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0]));
- double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1]));
- _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0]));
- _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1]));
- _P[0][0] = p00;
- _P[0][1] = p01;
- _prevUnwrappedTimestamp = unwrapped_ts90khz;
- if (_packetCount < _startUpFilterDelayInPackets)
- {
- _packetCount++;
- }
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "w[0]=%f w[1]=%f ts=%u tMs=%u", _w[0], _w[1], ts90khz, tMs);
- }
- _rwLock->ReleaseLockExclusive();
-}
-
-int64_t
-VCMTimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz)
-{
- ReadLockScoped rl(*_rwLock);
- int64_t localTimeMs = 0;
- CheckForWrapArounds(timestamp90khz);
- double unwrapped_ts90khz = static_cast<double>(timestamp90khz) +
- _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
- if (_packetCount == 0)
- {
- localTimeMs = -1;
- }
- else if (_packetCount < _startUpFilterDelayInPackets)
- {
- localTimeMs = _prevMs + static_cast<int64_t>(
- static_cast<double>(unwrapped_ts90khz - _prevUnwrappedTimestamp) /
- 90.0 + 0.5);
- }
- else
- {
- if (_w[0] < 1e-3)
- {
- localTimeMs = _startMs;
- }
- else
- {
- double timestampDiff = unwrapped_ts90khz -
- static_cast<double>(_firstTimestamp);
- localTimeMs = static_cast<int64_t>(
- static_cast<double>(_startMs) + (timestampDiff - _w[1]) /
- _w[0] + 0.5);
- }
- }
- return localTimeMs;
-}
-
-// Investigates if the timestamp clock has overflowed since the last timestamp and
-// keeps track of the number of wrap arounds since reset.
-void
-VCMTimestampExtrapolator::CheckForWrapArounds(uint32_t ts90khz)
-{
- if (_prevWrapTimestamp == -1)
- {
- _prevWrapTimestamp = ts90khz;
- return;
- }
- if (ts90khz < _prevWrapTimestamp)
- {
- // This difference will probably be less than -2^31 if we have had a wrap around
- // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is casted to a Word32,
- // it should be positive.
- if (static_cast<int32_t>(ts90khz - _prevWrapTimestamp) > 0)
- {
- // Forward wrap around
- _wrapArounds++;
- }
- }
- // This difference will probably be less than -2^31 if we have had a backward wrap around.
- // Since it is casted to a Word32, it should be positive.
- else if (static_cast<int32_t>(_prevWrapTimestamp - ts90khz) > 0)
- {
- // Backward wrap around
- _wrapArounds--;
- }
- _prevWrapTimestamp = ts90khz;
-}
-
-bool
-VCMTimestampExtrapolator::DelayChangeDetection(double error, bool trace)
-{
- // CUSUM detection of sudden delay changes
- error = (error > 0) ? VCM_MIN(error, _accMaxError) : VCM_MAX(error, -_accMaxError);
- _detectorAccumulatorPos = VCM_MAX(_detectorAccumulatorPos + error - _accDrift, (double)0);
- _detectorAccumulatorNeg = VCM_MIN(_detectorAccumulatorNeg + error + _accDrift, (double)0);
- if (_detectorAccumulatorPos > _alarmThreshold || _detectorAccumulatorNeg < -_alarmThreshold)
- {
- // Alarm
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "g1=%f g2=%f alarm=1", _detectorAccumulatorPos, _detectorAccumulatorNeg);
- }
- _detectorAccumulatorPos = _detectorAccumulatorNeg = 0;
- return true;
- }
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "g1=%f g2=%f alarm=0", _detectorAccumulatorPos, _detectorAccumulatorNeg);
- }
- return false;
-}
-
-}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h b/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h
deleted file mode 100644
index 4565186a353..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
-
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-class Clock;
-
-class VCMTimestampExtrapolator
-{
-public:
- VCMTimestampExtrapolator(Clock* clock,
- int32_t vcmId = 0,
- int32_t receiverId = 0);
- ~VCMTimestampExtrapolator();
- void Update(int64_t tMs, uint32_t ts90khz, bool trace = true);
- int64_t ExtrapolateLocalTime(uint32_t timestamp90khz);
- void Reset();
-
-private:
- void CheckForWrapArounds(uint32_t ts90khz);
- bool DelayChangeDetection(double error, bool trace = true);
- RWLockWrapper* _rwLock;
- int32_t _vcmId;
- int32_t _id;
- Clock* _clock;
- double _w[2];
- double _P[2][2];
- int64_t _startMs;
- int64_t _prevMs;
- uint32_t _firstTimestamp;
- int32_t _wrapArounds;
- int64_t _prevUnwrappedTimestamp;
- int64_t _prevWrapTimestamp;
- const double _lambda;
- bool _firstAfterReset;
- uint32_t _packetCount;
- const uint32_t _startUpFilterDelayInPackets;
-
- double _detectorAccumulatorPos;
- double _detectorAccumulatorNeg;
- const double _alarmThreshold;
- const double _accDrift;
- const double _accMaxError;
- const double _P11;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
index 98a69e962fd..af0e35c4e1f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
@@ -10,25 +10,18 @@
#include "webrtc/modules/video_coding/main/source/timing.h"
-
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_extrapolator.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
namespace webrtc {
VCMTiming::VCMTiming(Clock* clock,
- int32_t vcm_id,
- int32_t timing_id,
VCMTiming* master_timing)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vcm_id_(vcm_id),
clock_(clock),
- timing_id_(timing_id),
master_(false),
ts_extrapolator_(),
codec_timer_(),
@@ -40,7 +33,7 @@ VCMTiming::VCMTiming(Clock* clock,
prev_frame_timestamp_(0) {
if (master_timing == NULL) {
master_ = true;
- ts_extrapolator_ = new VCMTimestampExtrapolator(clock_, vcm_id, timing_id);
+ ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
} else {
ts_extrapolator_ = master_timing->ts_extrapolator_;
}
@@ -55,7 +48,7 @@ VCMTiming::~VCMTiming() {
void VCMTiming::Reset() {
CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Reset();
+ ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
codec_timer_.Reset();
render_delay_ms_ = kDefaultRenderDelayMs;
min_playout_delay_ms_ = 0;
@@ -81,11 +74,6 @@ void VCMTiming::set_min_playout_delay(uint32_t min_playout_delay_ms) {
void VCMTiming::SetJitterDelay(uint32_t jitter_delay_ms) {
CriticalSectionScoped cs(crit_sect_);
if (jitter_delay_ms != jitter_delay_ms_) {
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_),
- "Desired jitter buffer level: %u ms", jitter_delay_ms);
- }
jitter_delay_ms_ = jitter_delay_ms;
// When in initial state, set current delay to minimum delay.
if (current_delay_ms_ == 0) {
@@ -152,39 +140,21 @@ int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
int64_t start_time_ms,
int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_);
- const int32_t max_dec_time = MaxDecodeTimeMs();
int32_t time_diff_ms = codec_timer_.StopTimer(start_time_ms, now_ms);
- if (time_diff_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Codec timer error: %d", time_diff_ms);
- assert(false);
- }
+ assert(time_diff_ms >= 0);
last_decode_ms_ = time_diff_ms;
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_),
- "Frame decoded: time_stamp=%u dec_time=%d max_dec_time=%u, at %u",
- time_stamp, time_diff_ms, max_dec_time, MaskWord64ToUWord32(now_ms));
- }
return 0;
}
void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Update(now_ms, time_stamp, master_);
+ ts_extrapolator_->Update(now_ms, time_stamp);
}
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms)
const {
CriticalSectionScoped cs(crit_sect_);
const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Render frame %u at %u. Render delay %u",
- "jitter delay %u, max decode time %u, playout delay %u",
- frame_timestamp, MaskWord64ToUWord32(render_time_ms), render_delay_ms_,
- jitter_delay_ms_, MaxDecodeTimeMs(), min_playout_delay_ms_);
- }
return render_time_ms;
}
@@ -192,11 +162,6 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int64_t now_ms) const {
int64_t estimated_complete_time_ms =
ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_), "ExtrapolateLocalTime(%u)=%u ms",
- frame_timestamp, MaskWord64ToUWord32(estimated_complete_time_ms));
- }
if (estimated_complete_time_ms == -1) {
estimated_complete_time_ms = now_ms;
}
@@ -210,11 +175,7 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int32_t VCMTiming::MaxDecodeTimeMs(FrameType frame_type /*= kVideoFrameDelta*/)
const {
const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
- if (decode_time_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Negative maximum decode time: %d", decode_time_ms);
- return -1;
- }
+ assert(decode_time_ms >= 0);
return decode_time_ms;
}
@@ -254,11 +215,6 @@ uint32_t VCMTiming::TargetVideoDelay() const {
}
uint32_t VCMTiming::TargetDelayInternal() const {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_),
- "Delay: min_playout=%u jitter=%u max_decode=%u render=%u",
- min_playout_delay_ms_, jitter_delay_ms_, MaxDecodeTimeMs(),
- render_delay_ms_);
return std::max(min_playout_delay_ms_,
jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
index eb251b71153..1dca5e605af 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
@@ -18,15 +18,13 @@
namespace webrtc {
class Clock;
-class VCMTimestampExtrapolator;
+class TimestampExtrapolator;
class VCMTiming {
public:
// The primary timing component should be passed
// if this is the dual timing component.
VCMTiming(Clock* clock,
- int32_t vcm_id = 0,
- int32_t timing_id = 0,
VCMTiming* master_timing = NULL);
~VCMTiming();
@@ -101,11 +99,9 @@ class VCMTiming {
private:
CriticalSectionWrapper* crit_sect_;
- int32_t vcm_id_;
Clock* clock_;
- int32_t timing_id_;
bool master_;
- VCMTimestampExtrapolator* ts_extrapolator_;
+ TimestampExtrapolator* ts_extrapolator_;
VCMCodecTimer codec_timer_;
uint32_t render_delay_ms_;
uint32_t min_playout_delay_ms_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
index b4f6cb7b1c8..f19a585523d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
@@ -48,7 +48,6 @@
'receiver.h',
'rtt_filter.h',
'session_info.h',
- 'timestamp_extrapolator.h',
'timestamp_map.h',
'timing.h',
'video_coding_impl.h',
@@ -72,7 +71,6 @@
'receiver.cc',
'rtt_filter.cc',
'session_info.cc',
- 'timestamp_extrapolator.cc',
'timestamp_map.cc',
'timing.cc',
'video_coding_impl.cc',
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
index 1decc2f112f..5b93a656717 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/video_coding/main/source/packet.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -45,15 +44,44 @@ VCMProcessTimer::Processed() {
} // namespace vcm
namespace {
+// This wrapper provides a way to modify the callback without the need to expose
+// a register method all the way down to the function calling it.
+class EncodedImageCallbackWrapper : public EncodedImageCallback {
+ public:
+ EncodedImageCallbackWrapper()
+ : cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {}
+
+ virtual ~EncodedImageCallbackWrapper() {}
+
+ void Register(EncodedImageCallback* callback) {
+ CriticalSectionScoped cs(cs_.get());
+ callback_ = callback;
+ }
+
+ // TODO(andresp): Change to void as return value is ignored.
+ virtual int32_t Encoded(EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
+ CriticalSectionScoped cs(cs_.get());
+ if (callback_)
+ return callback_->Encoded(
+ encoded_image, codec_specific_info, fragmentation);
+ return 0;
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> cs_;
+ EncodedImageCallback* callback_ GUARDED_BY(cs_);
+};
+
class VideoCodingModuleImpl : public VideoCodingModule {
public:
- VideoCodingModuleImpl(const int32_t id,
- Clock* clock,
+ VideoCodingModuleImpl(Clock* clock,
EventFactory* event_factory,
bool owns_event_factory)
: VideoCodingModule(),
- sender_(new vcm::VideoSender(id, clock)),
- receiver_(new vcm::VideoReceiver(id, clock, event_factory)),
+ sender_(new vcm::VideoSender(clock, &post_encode_callback_)),
+ receiver_(new vcm::VideoReceiver(clock, event_factory)),
own_event_factory_(owns_event_factory ? event_factory : NULL) {}
virtual ~VideoCodingModuleImpl() {
@@ -194,7 +222,8 @@ class VideoCodingModuleImpl : public VideoCodingModule {
}
virtual int StopDebugRecording() OVERRIDE {
- return sender_->StopDebugRecording();
+ sender_->StopDebugRecording();
+ return VCM_OK;
}
virtual void SuspendBelowMinBitrate() {
@@ -326,10 +355,11 @@ class VideoCodingModuleImpl : public VideoCodingModule {
virtual void RegisterPostEncodeImageCallback(
EncodedImageCallback* observer) OVERRIDE {
- sender_->RegisterPostEncodeImageCallback(observer);
+ post_encode_callback_.Register(observer);
}
private:
+ EncodedImageCallbackWrapper post_encode_callback_;
scoped_ptr<vcm::VideoSender> sender_;
scoped_ptr<vcm::VideoReceiver> receiver_;
scoped_ptr<EventFactory> own_event_factory_;
@@ -354,17 +384,16 @@ int32_t VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec) {
return VCMCodecDataBase::Codec(codecType, codec) ? 0 : -1;
}
-VideoCodingModule* VideoCodingModule::Create(const int32_t id) {
+VideoCodingModule* VideoCodingModule::Create() {
return new VideoCodingModuleImpl(
- id, Clock::GetRealTimeClock(), new EventFactoryImpl, true);
+ Clock::GetRealTimeClock(), new EventFactoryImpl, true);
}
-VideoCodingModule* VideoCodingModule::Create(const int32_t id,
- Clock* clock,
+VideoCodingModule* VideoCodingModule::Create(Clock* clock,
EventFactory* event_factory) {
assert(clock);
assert(event_factory);
- return new VideoCodingModuleImpl(id, clock, event_factory, false);
+ return new VideoCodingModuleImpl(clock, event_factory, false);
}
void VideoCodingModule::Destroy(VideoCodingModule* module) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
index d9564c04bd9..bf0bc7905ba 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
@@ -32,6 +32,8 @@ class EncodedFrameObserver;
namespace vcm {
+class DebugRecorder;
+
class VCMProcessTimer {
public:
VCMProcessTimer(uint32_t periodMs, Clock* clock)
@@ -52,7 +54,8 @@ class VideoSender {
public:
typedef VideoCodingModule::SenderNackMode SenderNackMode;
- VideoSender(const int32_t id, Clock* clock);
+ VideoSender(Clock* clock, EncodedImageCallback* post_encode_callback);
+
~VideoSender();
int32_t InitializeSender();
@@ -68,7 +71,8 @@ class VideoSender {
uint8_t payloadType,
bool internalSource);
- int32_t CodecConfigParameters(uint8_t* buffer, int32_t size);
+ int32_t CodecConfigParameters(uint8_t* buffer, int32_t size) const;
+ int32_t SentFrameCount(VCMFrameCount* frameCount);
int Bitrate(unsigned int* bitrate) const;
int FrameRate(unsigned int* framerate) const;
@@ -88,7 +92,6 @@ class VideoSender {
int32_t IntraFrameRequest(int stream_index);
int32_t EnableFrameDropper(bool enable);
- int32_t SentFrameCount(VCMFrameCount* frameCount) const;
int SetSenderNackMode(SenderNackMode mode);
int SetSenderReferenceSelection(bool enable);
@@ -96,21 +99,19 @@ class VideoSender {
int SetSenderKeyFramePeriod(int periodMs);
int StartDebugRecording(const char* file_name_utf8);
- int StopDebugRecording();
+ void StopDebugRecording();
void SuspendBelowMinBitrate();
bool VideoSuspended() const;
- void RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback);
-
int32_t TimeUntilNextProcess();
int32_t Process();
private:
- int32_t _id;
Clock* clock_;
+ scoped_ptr<DebugRecorder> recorder_;
+
scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
CriticalSectionWrapper* _sendCritSect;
VCMGenericEncoder* _encoder;
@@ -118,17 +119,19 @@ class VideoSender {
std::vector<FrameType> _nextFrameTypes;
media_optimization::MediaOptimization _mediaOpt;
VCMSendStatisticsCallback* _sendStatsCallback;
- FILE* _encoderInputFile;
VCMCodecDataBase _codecDataBase;
bool frame_dropper_enabled_;
VCMProcessTimer _sendStatsTimer;
+
+ VCMQMSettingsCallback* qm_settings_callback_;
+ VCMProtectionCallback* protection_callback_;
};
class VideoReceiver {
public:
typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
- VideoReceiver(const int32_t id, Clock* clock, EventFactory* event_factory);
+ VideoReceiver(Clock* clock, EventFactory* event_factory);
~VideoReceiver();
int32_t InitializeReceiver();
@@ -196,7 +199,6 @@ class VideoReceiver {
// in any frame
};
- int32_t _id;
Clock* clock_;
scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
CriticalSectionWrapper* _receiveCritSect;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
index fbb511448c4..435e18202f7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
@@ -35,7 +35,7 @@ class VCMRobustnessTest : public ::testing::Test {
virtual void SetUp() {
clock_.reset(new SimulatedClock(0));
ASSERT_TRUE(clock_.get() != NULL);
- vcm_ = VideoCodingModule::Create(0, clock_.get(), &event_factory_);
+ vcm_ = VideoCodingModule::Create(clock_.get(), &event_factory_);
ASSERT_TRUE(vcm_ != NULL);
ASSERT_EQ(0, vcm_->InitializeReceiver());
const size_t kMaxNackListSize = 250;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
index a64e02d02c8..b0fe510cf9c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
@@ -20,6 +20,7 @@
'<(webrtc_root)/test/test.gyp:test_support',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
],
'sources': [
# headers
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
index 68668eae7de..5bc1c90f52c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
@@ -16,7 +16,7 @@
#include "webrtc/modules/video_coding/main/source/packet.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
// #define DEBUG_DECODER_BIT_STREAM
@@ -24,18 +24,15 @@
namespace webrtc {
namespace vcm {
-VideoReceiver::VideoReceiver(const int32_t id,
- Clock* clock,
- EventFactory* event_factory)
- : _id(id),
- clock_(clock),
+VideoReceiver::VideoReceiver(Clock* clock, EventFactory* event_factory)
+ : clock_(clock),
process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_receiverInited(false),
- _timing(clock_, id, 1),
- _dualTiming(clock_, id, 2, &_timing),
- _receiver(&_timing, clock_, event_factory, id, 1, true),
- _dualReceiver(&_dualTiming, clock_, event_factory, id, 2, false),
+ _timing(clock_),
+ _dualTiming(clock_, &_timing),
+ _receiver(&_timing, clock_, event_factory, true),
+ _dualReceiver(&_dualTiming, clock_, event_factory, false),
_decodedFrameCallback(_timing, clock_),
_dualDecodedFrameCallback(_dualTiming, clock_),
_frameTypeCallback(NULL),
@@ -53,7 +50,7 @@ VideoReceiver::VideoReceiver(const int32_t id,
_scheduleKeyRequest(false),
max_nack_list_size_(0),
pre_decode_image_callback_(NULL),
- _codecDataBase(id),
+ _codecDataBase(),
_receiveStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
_keyRequestTimer(500, clock_) {
@@ -121,8 +118,12 @@ int32_t VideoReceiver::Process() {
// Key frame requests
if (_keyRequestTimer.TimeUntilProcess() == 0) {
_keyRequestTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_scheduleKeyRequest && _frameTypeCallback != NULL) {
+ bool request_key_frame = false;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
+ }
+ if (request_key_frame) {
const int32_t ret = RequestKeyFrame();
if (ret != VCM_OK && returnValue == VCM_OK) {
returnValue = ret;
@@ -135,16 +136,24 @@ int32_t VideoReceiver::Process() {
// disabled when NACK is off.
if (_retransmissionTimer.TimeUntilProcess() == 0) {
_retransmissionTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_packetRequestCallback != NULL) {
- uint16_t length = max_nack_list_size_;
+ bool callback_registered = false;
+ uint16_t length;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ length = max_nack_list_size_;
+ callback_registered = _packetRequestCallback != NULL;
+ }
+ if (callback_registered && length > 0) {
std::vector<uint16_t> nackList(length);
const int32_t ret = NackList(&nackList[0], &length);
if (ret != VCM_OK && returnValue == VCM_OK) {
returnValue = ret;
}
- if (length > 0) {
- _packetRequestCallback->ResendPackets(&nackList[0], length);
+ if (ret == VCM_OK && length > 0) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_packetRequestCallback != NULL) {
+ _packetRequestCallback->ResendPackets(&nackList[0], length);
+ }
}
}
}
@@ -434,17 +443,9 @@ int32_t VideoReceiver::RequestSliceLossIndication(
const int32_t ret =
_frameTypeCallback->SliceLossIndicationRequest(pictureID);
if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to request key frame");
return ret;
}
} else {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "No frame type request callback registered");
return VCM_MISSING_CALLBACK;
}
return VCM_OK;
@@ -452,22 +453,14 @@ int32_t VideoReceiver::RequestSliceLossIndication(
int32_t VideoReceiver::RequestKeyFrame() {
TRACE_EVENT0("webrtc", "RequestKeyFrame");
- CriticalSectionScoped cs(process_crit_sect_.get());
+ CriticalSectionScoped process_cs(process_crit_sect_.get());
if (_frameTypeCallback != NULL) {
const int32_t ret = _frameTypeCallback->RequestKeyFrame();
if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to request key frame");
return ret;
}
_scheduleKeyRequest = false;
} else {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "No frame type request callback registered");
return VCM_MISSING_CALLBACK;
}
return VCM_OK;
@@ -490,29 +483,18 @@ int32_t VideoReceiver::DecodeDualFrame(uint16_t maxWaitTimeMs) {
VCMEncodedFrame* dualFrame =
_dualReceiver.FrameForDecoding(maxWaitTimeMs, dummyRenderTime);
if (dualFrame != NULL && _dualDecoder != NULL) {
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Decoding frame %u with dual decoder",
- dualFrame->TimeStamp());
// Decode dualFrame and try to catch up
int32_t ret =
_dualDecoder->Decode(*dualFrame, clock_->TimeInMilliseconds());
if (ret != WEBRTC_VIDEO_CODEC_OK) {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to decode frame with dual decoder");
+ LOG(LS_ERROR) << "Failed to decode frame with dual decoder. Error code: "
+ << ret;
_dualReceiver.ReleaseFrame(dualFrame);
return VCM_CODEC_ERROR;
}
if (_receiver.DualDecoderCaughtUp(dualFrame, _dualReceiver)) {
// Copy the complete decoder state of the dual decoder
// to the primary decoder.
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Dual decoder caught up");
_codecDataBase.CopyDecoder(*_dualDecoder);
_codecDataBase.ReleaseDecoder(_dualDecoder);
_dualDecoder = NULL;
@@ -547,62 +529,65 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
int32_t ret = _decoder->Decode(frame, clock_->TimeInMilliseconds());
// Check for failed decoding, run frame type request callback if needed.
+ bool request_key_frame = false;
if (ret < 0) {
if (ret == VCM_ERROR_REQUEST_SLI) {
return RequestSliceLossIndication(
_decodedFrameCallback.LastReceivedPictureID() + 1);
} else {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to decode frame %u, requesting key frame",
- frame.TimeStamp());
- ret = RequestKeyFrame();
+ request_key_frame = true;
}
} else if (ret == VCM_REQUEST_SLI) {
ret = RequestSliceLossIndication(
_decodedFrameCallback.LastReceivedPictureID() + 1);
}
if (!frame.Complete() || frame.MissingFrame()) {
- CriticalSectionScoped cs(process_crit_sect_.get());
switch (_keyRequestMode) {
case kKeyOnKeyLoss: {
if (frame.FrameType() == kVideoFrameKey) {
- _scheduleKeyRequest = true;
- return VCM_OK;
+ request_key_frame = true;
+ ret = VCM_OK;
}
break;
}
case kKeyOnLoss: {
- _scheduleKeyRequest = true;
- return VCM_OK;
+ request_key_frame = true;
+ ret = VCM_OK;
}
default:
break;
}
}
+ if (request_key_frame) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = true;
+ }
TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
return ret;
}
// Reset the decoder state
int32_t VideoReceiver::ResetDecoder() {
- CriticalSectionScoped cs(_receiveCritSect);
- if (_decoder != NULL) {
- _receiver.Initialize();
- _timing.Reset();
- {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _scheduleKeyRequest = false;
+ bool reset_key_request = false;
+ {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (_decoder != NULL) {
+ _receiver.Initialize();
+ _timing.Reset();
+ reset_key_request = true;
+ _decoder->Reset();
+ }
+ if (_dualReceiver.State() != kPassive) {
+ _dualReceiver.Initialize();
+ }
+ if (_dualDecoder != NULL) {
+ _codecDataBase.ReleaseDecoder(_dualDecoder);
+ _dualDecoder = NULL;
}
- _decoder->Reset();
- }
- if (_dualReceiver.State() != kPassive) {
- _dualReceiver.Initialize();
}
- if (_dualDecoder != NULL) {
- _codecDataBase.ReleaseDecoder(_dualDecoder);
- _dualDecoder = NULL;
+ if (reset_key_request) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = false;
}
return VCM_OK;
}
@@ -710,25 +695,8 @@ int32_t VideoReceiver::NackList(uint16_t* nackList, uint16_t* size) {
nackStatus = _dualReceiver.NackList(nackList, *size, &nack_list_length);
}
*size = nack_list_length;
-
- switch (nackStatus) {
- case kNackNeedMoreMemory: {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Out of memory");
- return VCM_MEMORY;
- }
- case kNackKeyFrameRequest: {
- CriticalSectionScoped cs(_receiveCritSect);
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to get NACK list, requesting key frame");
+ if (nackStatus == kNackKeyFrameRequest) {
return RequestKeyFrame();
- }
- default:
- break;
}
return VCM_OK;
}
@@ -763,14 +731,17 @@ int VideoReceiver::SetReceiverRobustnessMode(
_keyRequestMode = kKeyOnError; // TODO(hlundin): On long NACK list?
break;
case VideoCodingModule::kSoftNack:
+#if 1
assert(false); // TODO(hlundin): Not completed.
return VCM_NOT_IMPLEMENTED;
+#else
// Enable hybrid NACK/FEC. Always wait for retransmissions and don't add
// extra delay when RTT is above kLowRttNackMs.
_receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
_dualReceiver.SetNackMode(kNoNack, -1, -1);
_keyRequestMode = kKeyOnError;
break;
+#endif
case VideoCodingModule::kDualDecoder:
if (decode_error_mode == kNoErrors) {
return VCM_PARAMETER_ERROR;
@@ -783,14 +754,17 @@ int VideoReceiver::SetReceiverRobustnessMode(
_keyRequestMode = kKeyOnError;
break;
case VideoCodingModule::kReferenceSelection:
+#if 1
assert(false); // TODO(hlundin): Not completed.
return VCM_NOT_IMPLEMENTED;
+#else
if (decode_error_mode == kNoErrors) {
return VCM_PARAMETER_ERROR;
}
_receiver.SetNackMode(kNoNack, -1, -1);
_dualReceiver.SetNackMode(kNoNack, -1, -1);
break;
+#endif
}
_receiver.SetDecodeErrorMode(decode_error_mode);
// The dual decoder should never decode with errors.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
index 4fd524d4bce..502dfa9dd2f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
@@ -33,7 +33,7 @@ class TestVideoReceiver : public ::testing::Test {
TestVideoReceiver() : clock_(0) {}
virtual void SetUp() {
- receiver_.reset(new VideoReceiver(0, &clock_, &event_factory_));
+ receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
EXPECT_EQ(0, receiver_->InitializeReceiver());
EXPECT_EQ(0,
receiver_->RegisterExternalDecoder(
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
index 948218b83a6..38ecc5479a4 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
@@ -17,30 +17,66 @@
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
namespace vcm {
-VideoSender::VideoSender(const int32_t id, Clock* clock)
- : _id(id),
- clock_(clock),
+class DebugRecorder {
+ public:
+ DebugRecorder()
+ : cs_(CriticalSectionWrapper::CreateCriticalSection()), file_(NULL) {}
+
+ ~DebugRecorder() { Stop(); }
+
+ int Start(const char* file_name_utf8) {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_)
+ fclose(file_);
+ file_ = fopen(file_name_utf8, "wb");
+ if (!file_)
+ return VCM_GENERAL_ERROR;
+ return VCM_OK;
+ }
+
+ void Stop() {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_) {
+ fclose(file_);
+ file_ = NULL;
+ }
+ }
+
+ void Add(const I420VideoFrame& frame) {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_)
+ PrintI420VideoFrame(frame, file_);
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> cs_;
+ FILE* file_ GUARDED_BY(cs_);
+};
+
+VideoSender::VideoSender(Clock* clock,
+ EncodedImageCallback* post_encode_callback)
+ : clock_(clock),
+ recorder_(new DebugRecorder()),
process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(),
- _encodedFrameCallback(),
+ _encodedFrameCallback(post_encode_callback),
_nextFrameTypes(1, kVideoFrameDelta),
- _mediaOpt(id, clock_),
+ _mediaOpt(clock_),
_sendStatsCallback(NULL),
- _encoderInputFile(NULL),
- _codecDataBase(id),
+ _codecDataBase(),
frame_dropper_enabled_(true),
- _sendStatsTimer(1000, clock_) {}
+ _sendStatsTimer(1000, clock_),
+ qm_settings_callback_(NULL),
+ protection_callback_(NULL) {}
VideoSender::~VideoSender() {
delete _sendCritSect;
- if (_encoderInputFile != NULL) {
- fclose(_encoderInputFile);
- }
}
int32_t VideoSender::Process() {
@@ -70,8 +106,6 @@ int32_t VideoSender::InitializeSender() {
_codecDataBase.ResetSender();
_encoder = NULL;
_encodedFrameCallback.SetTransportCallback(NULL);
- // setting default bitRate and frameRate to 0
- _mediaOpt.SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0);
_mediaOpt.Reset(); // Resetting frame dropper
return VCM_OK;
}
@@ -97,10 +131,8 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
_encoder = _codecDataBase.GetEncoder();
if (!ret) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to initialize encoder");
+ LOG(LS_ERROR) << "Failed to initialize the encoder with payload name "
+ << sendCodec->plName << ". Error code: " << ret;
return VCM_CODEC_ERROR;
}
@@ -125,9 +157,8 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
sendCodec->startBitrate * 1000,
sendCodec->width,
sendCodec->height,
- numLayers);
- _mediaOpt.set_max_payload_size(maxPayloadSize);
-
+ numLayers,
+ maxPayloadSize);
return VCM_OK;
}
@@ -171,7 +202,8 @@ int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
}
// Get codec config parameters
-int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) {
+int32_t VideoSender::CodecConfigParameters(uint8_t* buffer,
+ int32_t size) const {
CriticalSectionScoped cs(_sendCritSect);
if (_encoder != NULL) {
return _encoder->CodecConfigParameters(buffer, size);
@@ -179,6 +211,14 @@ int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) {
return VCM_UNINITIALIZED;
}
+// TODO(andresp): Make const once media_opt is thread-safe and this has a
+// pointer to it.
+int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) {
+ CriticalSectionScoped cs(_sendCritSect);
+ *frameCount = _mediaOpt.SentFrameCount();
+ return VCM_OK;
+}
+
// Get encode bitrate
int VideoSender::Bitrate(unsigned int* bitrate) const {
CriticalSectionScoped cs(_sendCritSect);
@@ -208,8 +248,11 @@ int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
int32_t ret = 0;
{
CriticalSectionScoped sendCs(_sendCritSect);
- uint32_t targetRate =
- _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt);
+ uint32_t targetRate = _mediaOpt.SetTargetRates(target_bitrate,
+ lossRate,
+ rtt,
+ protection_callback_,
+ qm_settings_callback_);
if (_encoder != NULL) {
ret = _encoder->SetChannelParameters(lossRate, rtt);
if (ret < 0) {
@@ -247,17 +290,19 @@ int32_t VideoSender::RegisterSendStatisticsCallback(
// Register a video quality settings callback which will be called when frame
// rate/dimensions need to be updated for video quality optimization
int32_t VideoSender::RegisterVideoQMCallback(
- VCMQMSettingsCallback* videoQMSettings) {
+ VCMQMSettingsCallback* qm_settings_callback) {
CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.RegisterVideoQMCallback(videoQMSettings);
+ qm_settings_callback_ = qm_settings_callback;
+ _mediaOpt.EnableQM(qm_settings_callback_ != NULL);
+ return VCM_OK;
}
// Register a video protection callback which will be called to deliver the
// requested FEC rate and NACK status (on/off).
int32_t VideoSender::RegisterProtectionCallback(
- VCMProtectionCallback* protection) {
+ VCMProtectionCallback* protection_callback) {
CriticalSectionScoped cs(_sendCritSect);
- _mediaOpt.RegisterProtectionCallback(protection);
+ protection_callback_ = protection_callback;
return VCM_OK;
}
@@ -314,33 +359,19 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame,
if (_nextFrameTypes[0] == kFrameEmpty) {
return VCM_OK;
}
- _mediaOpt.UpdateIncomingFrameRate();
-
if (_mediaOpt.DropFrame()) {
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Drop frame due to bitrate");
- } else {
- _mediaOpt.UpdateContentData(contentMetrics);
- int32_t ret =
- _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes);
- if (_encoderInputFile != NULL) {
- if (PrintI420VideoFrame(videoFrame, _encoderInputFile) < 0) {
- return -1;
- }
- }
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Encode error: %d",
- ret);
- return ret;
- }
- for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
- _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
- }
+ return VCM_OK;
+ }
+ _mediaOpt.UpdateContentData(contentMetrics);
+ int32_t ret =
+ _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes);
+ recorder_->Add(videoFrame);
+ if (ret < 0) {
+ LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
+ return ret;
+ }
+ for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
+ _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
}
return VCM_OK;
}
@@ -369,11 +400,6 @@ int32_t VideoSender::EnableFrameDropper(bool enable) {
return VCM_OK;
}
-int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) const {
- CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.SentFrameCount(frameCount);
-}
-
int VideoSender::SetSenderNackMode(SenderNackMode mode) {
CriticalSectionScoped cs(_sendCritSect);
@@ -406,20 +432,11 @@ int VideoSender::SetSenderKeyFramePeriod(int periodMs) {
}
int VideoSender::StartDebugRecording(const char* file_name_utf8) {
- CriticalSectionScoped cs(_sendCritSect);
- _encoderInputFile = fopen(file_name_utf8, "wb");
- if (_encoderInputFile == NULL)
- return VCM_GENERAL_ERROR;
- return VCM_OK;
+ return recorder_->Start(file_name_utf8);
}
-int VideoSender::StopDebugRecording() {
- CriticalSectionScoped cs(_sendCritSect);
- if (_encoderInputFile != NULL) {
- fclose(_encoderInputFile);
- _encoderInputFile = NULL;
- }
- return VCM_OK;
+void VideoSender::StopDebugRecording() {
+ recorder_->Stop();
}
void VideoSender::SuspendBelowMinBitrate() {
@@ -443,14 +460,7 @@ void VideoSender::SuspendBelowMinBitrate() {
bool VideoSender::VideoSuspended() const {
CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.video_suspended();
+ return _mediaOpt.IsVideoSuspended();
}
-
-void VideoSender::RegisterPostEncodeImageCallback(
- EncodedImageCallback* observer) {
- CriticalSectionScoped cs(_sendCritSect);
- _encodedFrameCallback.RegisterPostEncodeImageCallback(observer);
-}
-
} // namespace vcm
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
index 513a99ee74b..67b3e7aeccb 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
@@ -52,7 +52,7 @@ struct Vp8StreamInfo {
MATCHER_P(MatchesVp8StreamInfo, expected, "") {
bool res = true;
for (int tl = 0; tl < kMaxNumberOfTemporalLayers; ++tl) {
- if (abs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
+ if (fabs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
*result_listener << " framerate_fps[" << tl
<< "] = " << arg.framerate_fps[tl] << " (expected "
<< expected.framerate_fps[tl] << ") ";
@@ -173,7 +173,7 @@ class TestVideoSender : public ::testing::Test {
TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
virtual void SetUp() {
- sender_.reset(new VideoSender(0, &clock_));
+ sender_.reset(new VideoSender(&clock_, &post_encode_callback_));
EXPECT_EQ(0, sender_->InitializeSender());
EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
}
@@ -185,6 +185,7 @@ class TestVideoSender : public ::testing::Test {
SimulatedClock clock_;
PacketizationCallback packetization_callback_;
+ MockEncodedImageCallback post_encode_callback_;
scoped_ptr<VideoSender> sender_;
scoped_ptr<FrameGenerator> generator_;
};
@@ -344,6 +345,8 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
void InsertFrames(float framerate, float seconds) {
for (int i = 0; i < seconds * framerate; ++i) {
clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
+ EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
+ .WillOnce(Return(0));
AddFrame();
// SetChannelParameters needs to be called frequently to propagate
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS b/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h b/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
index b3e0483d06a..817d43d9bd6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
@@ -236,14 +236,6 @@ class VideoProcessingModule : public Module {
uint32_t frame_rate) = 0;
/**
- Set max frame rate
- \param[in] max_frame_rate: maximum frame rate (limited to native frame rate)
-
- \return VPM_OK on success, a negative value on error (see error codes)
- */
- virtual int32_t SetMaxFramerate(uint32_t max_frame_rate) = 0;
-
- /**
Get decimated(target) frame rate
*/
virtual uint32_t Decimatedframe_rate() = 0;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
index ffabbf7fff7..907a549064a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
@@ -12,22 +12,15 @@
#include <stdlib.h>
-#include "webrtc/system_wrappers/interface/trace.h"
-
namespace webrtc {
namespace VideoProcessing {
int32_t Brighten(I420VideoFrame* frame, int delta) {
assert(frame);
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "zero size frame");
return VPM_PARAMETER_ERROR;
}
-
if (frame->width() <= 0 || frame->height() <= 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "Invalid frame size");
return VPM_PARAMETER_ERROR;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
index 8817bac434b..f33117d1376 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
@@ -10,7 +10,6 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <math.h>
@@ -37,16 +36,12 @@ int32_t VPMBrightnessDetection::ProcessFrame(
const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats) {
if (frame.IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
if (!VideoProcessingModule::ValidFrameStats(stats)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame stats");
return VPM_PARAMETER_ERROR;
}
@@ -58,7 +53,7 @@ int32_t VPMBrightnessDetection::ProcessFrame(
for (uint32_t i = 0; i < low_th; i++) {
prop_low += stats.hist[i];
}
-prop_low /= stats.num_pixels;
+ prop_low /= stats.num_pixels;
// Get proportion in highest bins.
unsigned char high_th = 230;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
index eeec01659c0..aaa3a462256 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
@@ -12,44 +12,38 @@
#include "webrtc/modules/video_processing/main/source/color_enhancement.h"
#include "webrtc/modules/video_processing/main/source/color_enhancement_private.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
namespace VideoProcessing {
int32_t ColorEnhancement(I420VideoFrame* frame) {
-assert(frame);
-// Pointers to U and V color pixels.
-uint8_t* ptr_u;
-uint8_t* ptr_v;
-uint8_t temp_chroma;
-if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
- -1, "Null frame pointer");
- return VPM_GENERAL_ERROR;
-}
-
-if (frame->width() == 0 || frame->height() == 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
- -1, "Invalid frame size");
- return VPM_GENERAL_ERROR;
-}
-
-// Set pointers to first U and V pixels (skip luminance).
-ptr_u = frame->buffer(kUPlane);
-ptr_v = frame->buffer(kVPlane);
-int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
-
-// Loop through all chrominance pixels and modify color.
-for (int ix = 0; ix < size_uv; ix++) {
- temp_chroma = colorTable[*ptr_u][*ptr_v];
- *ptr_v = colorTable[*ptr_v][*ptr_u];
- *ptr_u = temp_chroma;
-
- ptr_u++;
- ptr_v++;
-}
-return VPM_OK;
+ assert(frame);
+ // Pointers to U and V color pixels.
+ uint8_t* ptr_u;
+ uint8_t* ptr_v;
+ uint8_t temp_chroma;
+ if (frame->IsZeroSize()) {
+ return VPM_GENERAL_ERROR;
+ }
+ if (frame->width() == 0 || frame->height() == 0) {
+ return VPM_GENERAL_ERROR;
+ }
+
+ // Set pointers to first U and V pixels (skip luminance).
+ ptr_u = frame->buffer(kUPlane);
+ ptr_v = frame->buffer(kVPlane);
+ int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
+
+ // Loop through all chrominance pixels and modify color.
+ for (int ix = 0; ix < size_uv; ix++) {
+ temp_chroma = colorTable[*ptr_u][*ptr_v];
+ *ptr_v = colorTable[*ptr_v][*ptr_u];
+ *ptr_u = temp_chroma;
+
+ ptr_u++;
+ ptr_v++;
+ }
+ return VPM_OK;
}
} // namespace VideoProcessing
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
index 898fd80f473..cdc6174883f 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
@@ -14,8 +14,8 @@
#include <stdlib.h>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/sort.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -102,21 +102,16 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
int height = frame->height();
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Null frame pointer");
return VPM_GENERAL_ERROR;
}
// Stricter height check due to subsampling size calculation below.
if (height < 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame size");
+ LOG(LS_ERROR) << "Invalid frame size.";
return VPM_GENERAL_ERROR;
}
if (!VideoProcessingModule::ValidFrameStats(*stats)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame stats");
return VPM_GENERAL_ERROR;
}
@@ -152,8 +147,7 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
// Ensure we won't get an overflow below.
// In practice, the number of subsampled pixels will not become this large.
if (y_sub_size > (1 << 21) - 1) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Subsampled number of pixels too large");
+ LOG(LS_ERROR) << "Subsampled number of pixels too large.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
index 79c4bcc3d1b..4c8dcb439f6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
@@ -9,7 +9,6 @@
*/
#include "webrtc/modules/video_processing/main/source/denoising.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <string.h>
@@ -78,8 +77,6 @@ int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) {
int32_t num_pixels_changed = 0;
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "zero size frame");
return VPM_GENERAL_ERROR;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
index de4907029bc..e1cd04ff711 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
@@ -9,14 +9,12 @@
*/
#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
VPMFramePreprocessor::VPMFramePreprocessor()
: id_(0),
content_metrics_(NULL),
- max_frame_rate_(0),
resampled_frame_(),
enable_ca_(false),
frame_cnt_(0) {
@@ -60,14 +58,6 @@ void VPMFramePreprocessor::SetInputFrameResampleMode(
spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
}
-int32_t VPMFramePreprocessor::SetMaxFramerate(uint32_t max_frame_rate) {
- if (max_frame_rate == 0) return VPM_PARAMETER_ERROR;
-
- // Max allowed frame_rate.
- max_frame_rate_ = max_frame_rate;
- return vd_->SetMaxFramerate(max_frame_rate);
-}
-
int32_t VPMFramePreprocessor::SetTargetResolution(
uint32_t width, uint32_t height, uint32_t frame_rate) {
if ( (width == 0) || (height == 0) || (frame_rate == 0)) {
@@ -78,7 +68,7 @@ int32_t VPMFramePreprocessor::SetTargetResolution(
if (ret_val < 0) return ret_val;
- ret_val = vd_->SetTargetframe_rate(frame_rate);
+ ret_val = vd_->SetTargetFramerate(frame_rate);
if (ret_val < 0) return ret_val;
return VPM_OK;
@@ -112,8 +102,6 @@ int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
vd_->UpdateIncomingframe_rate();
if (vd_->DropFrame()) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_,
- "Drop frame due to frame rate");
return 1; // drop 1 frame
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
index ca62d38fc6d..64a5797b9bc 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
@@ -39,9 +39,6 @@ class VPMFramePreprocessor {
// Enable content analysis.
void EnableContentAnalysis(bool enable);
- // Set max frame rate.
- int32_t SetMaxFramerate(uint32_t max_frame_rate);
-
// Set target resolution: frame rate and dimension.
int32_t SetTargetResolution(uint32_t width, uint32_t height,
uint32_t frame_rate);
@@ -68,7 +65,6 @@ class VPMFramePreprocessor {
int32_t id_;
VideoContentMetrics* content_metrics_;
- uint32_t max_frame_rate_;
I420VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
VPMContentAnalysis* ca_;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
index 8fd3d036919..bf05bd71545 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
@@ -16,15 +16,7 @@
namespace webrtc {
-VPMVideoDecimator::VPMVideoDecimator()
- : overshoot_modifier_(0),
- drop_count_(0),
- keep_count_(0),
- target_frame_rate_(30),
- incoming_frame_rate_(0.0f),
- max_frame_rate_(30),
- incoming_frame_times_(),
- enable_temporal_decimation_(true) {
+VPMVideoDecimator::VPMVideoDecimator() {
Reset();
}
@@ -36,7 +28,6 @@ void VPMVideoDecimator::Reset() {
keep_count_ = 0;
target_frame_rate_ = 30;
incoming_frame_rate_ = 0.0f;
- max_frame_rate_ = 30;
memset(incoming_frame_times_, 0, sizeof(incoming_frame_times_));
enable_temporal_decimation_ = true;
}
@@ -45,26 +36,10 @@ void VPMVideoDecimator::EnableTemporalDecimation(bool enable) {
enable_temporal_decimation_ = enable;
}
-int32_t VPMVideoDecimator::SetMaxFramerate(uint32_t max_frame_rate) {
- if (max_frame_rate == 0) return VPM_PARAMETER_ERROR;
-
- max_frame_rate_ = max_frame_rate;
-
- if (target_frame_rate_ > max_frame_rate_)
- target_frame_rate_ = max_frame_rate_;
-
- return VPM_OK;
-}
-
-int32_t VPMVideoDecimator::SetTargetframe_rate(uint32_t frame_rate) {
+int32_t VPMVideoDecimator::SetTargetFramerate(uint32_t frame_rate) {
if (frame_rate == 0) return VPM_PARAMETER_ERROR;
- if (frame_rate > max_frame_rate_) {
- // Override.
- target_frame_rate_ = max_frame_rate_;
- } else {
- target_frame_rate_ = frame_rate;
- }
+ target_frame_rate_ = frame_rate;
return VPM_OK;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
index d17da618802..fca74aeae15 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
@@ -25,8 +25,7 @@ class VPMVideoDecimator {
void EnableTemporalDecimation(bool enable);
- int32_t SetMaxFramerate(uint32_t max_frame_rate);
- int32_t SetTargetframe_rate(uint32_t frame_rate);
+ int32_t SetTargetFramerate(uint32_t frame_rate);
bool DropFrame();
@@ -50,7 +49,6 @@ class VPMVideoDecimator {
uint32_t keep_count_;
uint32_t target_frame_rate_;
float incoming_frame_rate_;
- uint32_t max_frame_rate_;
int64_t incoming_frame_times_[kFrameCountHistory_size];
bool enable_temporal_decimation_;
};
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
index af1bfe1a412..3560030c86c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
@@ -11,7 +11,7 @@
#include "webrtc/modules/video_processing/main/source/video_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include <assert.h>
@@ -68,13 +68,9 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id)
deflickering_.ChangeUniqueId(id);
denoising_.ChangeUniqueId(id);
frame_pre_processor_.ChangeUniqueId(id);
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
- "Created");
}
VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
- "Destroyed");
delete &mutex_;
}
@@ -89,8 +85,7 @@ void VideoProcessingModuleImpl::Reset() {
int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
const I420VideoFrame& frame) {
if (frame.IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "zero size frame");
+ LOG(LS_ERROR) << "Zero size frame.";
return VPM_PARAMETER_ERROR;
}
@@ -121,7 +116,10 @@ int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
}
bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) {
- if (stats.num_pixels == 0) return false;
+ if (stats.num_pixels == 0) {
+ LOG(LS_WARNING) << "Invalid frame stats.";
+ return false;
+ }
return true;
}
@@ -173,11 +171,6 @@ void VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
frame_pre_processor_.SetInputFrameResampleMode(resampling_mode);
}
-int32_t VideoProcessingModuleImpl::SetMaxFramerate(uint32_t max_frame_rate) {
- CriticalSectionScoped cs(&mutex_);
- return frame_pre_processor_.SetMaxFramerate(max_frame_rate);
-}
-
int32_t VideoProcessingModuleImpl::SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate) {
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
index 913bb648364..deae6ff657e 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
@@ -51,9 +51,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
// Enable content analysis
virtual void EnableContentAnalysis(bool enable);
- // Set max frame rate
- virtual int32_t SetMaxFramerate(uint32_t max_frame_rate);
-
// Set Target Resolution: frame rate and dimension
virtual int32_t SetTargetResolution(uint32_t width,
uint32_t height,
diff --git a/chromium/third_party/webrtc/modules/video_render/OWNERS b/chromium/third_party/webrtc/modules/video_render/OWNERS
index 5e8ed090910..7dd4a3624b0 100644
--- a/chromium/third_party/webrtc/modules/video_render/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_render/OWNERS
@@ -3,3 +3,10 @@ mflodman@webrtc.org
perkj@webrtc.org
wu@webrtc.org
mallinath@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
index 2057e282f48..c2afbbd6396 100644
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
+++ b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
@@ -245,7 +245,6 @@ AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
- delete &_renderCritSect;
if (_jvm) {
// get the JNI env for this thread
bool isAttached = false;
@@ -277,6 +276,8 @@ AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
}
}
}
+
+ delete &_renderCritSect;
}
int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
index 0707a714b9f..9dfa69d1b79 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
// This files is mostly copied from
// webrtc/modules/video_render/android/video_render_opengles20.h
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
index 2a3ca180f72..02814b22234 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
using namespace webrtc;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
index 7bc46bfdad5..bbada099edf 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
@@ -246,7 +250,7 @@ int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
int VideoRenderIosGles20::ChangeWindow(void* new_window) {
CriticalSectionScoped cs(gles_crit_sec_.get());
- view_ = (VideoRenderIosView*)new_window;
+ view_ = (__bridge VideoRenderIosView*)new_window;
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
index 138a524a0db..e38ed7ae4b1 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
@@ -99,7 +99,7 @@ class VideoRenderIosImpl : IVideoRender {
bool full_screen_;
CriticalSectionWrapper* crit_sec_;
- VideoRenderIosGles20* ptr_ios_render_;
+ webrtc::scoped_ptr<VideoRenderIosGles20> ptr_ios_render_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
index 089b1e76988..9b8e1d62438 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -33,18 +37,13 @@ VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
VideoRenderIosImpl::~VideoRenderIosImpl() {
delete crit_sec_;
-
- if (ptr_ios_render_) {
- delete ptr_ios_render_;
- ptr_ios_render_ = NULL;
- }
}
int32_t VideoRenderIosImpl::Init() {
CriticalSectionScoped cs(crit_sec_);
- ptr_ios_render_ = new VideoRenderIosGles20(
- (VideoRenderIosView*)ptr_window_, full_screen_, id_);
+ ptr_ios_render_.reset(new VideoRenderIosGles20(
+ (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
return ptr_ios_render_->Init();
;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
index db053560607..915c0f71e20 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
@@ -16,15 +16,7 @@
#include "webrtc/modules/video_render/ios/open_gles20.h"
-@interface VideoRenderIosView : UIView {
- @private // NOLINT
- EAGLContext* context_;
- webrtc::OpenGles20* gles_renderer20_;
- int _frameBufferWidth;
- int _frameBufferHeight;
- unsigned int _defaultFrameBuffer;
- unsigned int _colorRenderBuffer;
-}
+@interface VideoRenderIosView : UIView
- (BOOL)createContext;
- (BOOL)presentFramebuffer;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
index 662c9676a61..2e00e097eec 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
@@ -8,12 +8,23 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
-@implementation VideoRenderIosView
+@implementation VideoRenderIosView {
+ EAGLContext* _context;
+ webrtc::scoped_ptr<webrtc::OpenGles20> _gles_renderer20;
+ int _frameBufferWidth;
+ int _frameBufferHeight;
+ unsigned int _defaultFrameBuffer;
+ unsigned int _colorRenderBuffer;
+}
@synthesize context = context_;
@@ -25,7 +36,7 @@ using namespace webrtc;
// init super class
self = [super initWithCoder:coder];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -34,7 +45,7 @@ using namespace webrtc;
// init super class
self = [super init];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -43,7 +54,7 @@ using namespace webrtc;
// init super class
self = [super initWithFrame:frame];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -59,13 +70,7 @@ using namespace webrtc;
_colorRenderBuffer = 0;
}
- context_ = nil;
-
- if (gles_renderer20_) {
- delete gles_renderer20_;
- }
-
- [super dealloc];
+ [EAGLContext setCurrentContext:nil];
}
- (NSString*)description {
@@ -84,14 +89,13 @@ using namespace webrtc;
kEAGLColorFormatRGBA8,
kEAGLDrawablePropertyColorFormat,
nil];
- context_ = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
- if (!context_) {
+ if (!_context) {
return NO;
}
- // set current EAGLContext to self context_
- if (![EAGLContext setCurrentContext:context_]) {
+ if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
@@ -102,7 +106,7 @@ using namespace webrtc;
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
- [context_ renderbufferStorage:GL_RENDERBUFFER
+ [_context renderbufferStorage:GL_RENDERBUFFER
fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
@@ -121,12 +125,12 @@ using namespace webrtc;
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
- return gles_renderer20_->Setup([self bounds].size.width,
+ return _gles_renderer20->Setup([self bounds].size.width,
[self bounds].size.height);
}
- (BOOL)presentFramebuffer {
- if (![context_ presentRenderbuffer:GL_RENDERBUFFER]) {
+ if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
0,
@@ -135,21 +139,15 @@ using namespace webrtc;
__FUNCTION__,
__LINE__);
}
-
- // update UI stuff on the main thread
- [self performSelectorOnMainThread:@selector(setNeedsDisplay)
- withObject:nil
- waitUntilDone:NO];
-
return YES;
}
- (BOOL)renderFrame:(I420VideoFrame*)frameToRender {
- if (![EAGLContext setCurrentContext:context_]) {
+ if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
- return gles_renderer20_->Render(*frameToRender);
+ return _gles_renderer20->Render(*frameToRender);
}
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
@@ -157,7 +155,7 @@ using namespace webrtc;
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom {
- return gles_renderer20_->SetCoordinates(zOrder, left, top, right, bottom);
+ return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
}
@end
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.gypi b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
index 4f3844406e0..71d969baba2 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render.gypi
+++ b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
@@ -87,6 +87,12 @@
'android/video_render_android_surface_view.cc',
'android/video_render_opengles20.cc',
],
+ }, {
+ 'link_settings': {
+ 'libraries': [
+ '-lGLESv2',
+ ],
+ },
}],
['OS!="ios" or include_internal_video_render==0', {
'sources!': [
@@ -112,6 +118,12 @@
'linux/video_x11_channel.cc',
'linux/video_x11_render.cc',
],
+ }, {
+ 'link_settings': {
+ 'libraries': [
+ '-lXext',
+ ],
+ },
}],
['OS!="mac" or include_internal_video_render==0', {
'sources!': [
@@ -130,6 +142,9 @@
],
}],
['OS=="ios"', {
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
@@ -205,11 +220,11 @@
],
}],
['OS=="linux"', {
- 'libraries': [
- '-lrt',
- '-lXext',
- '-lX11',
- ],
+ 'link_settings': {
+ 'libraries': [
+ '-lX11',
+ ],
+ },
}],
['OS=="mac"', {
'xcode_settings': {
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc b/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
index be5cac9aaa6..d790877e31f 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_render//video_render_frames.h"
+#include "webrtc/modules/video_render/video_render_frames.h"
#include <assert.h>
@@ -19,13 +19,12 @@
namespace webrtc {
-const int32_t KEventMaxWaitTimeMs = 200;
+const uint32_t KEventMaxWaitTimeMs = 200;
const uint32_t kMinRenderDelayMs = 10;
const uint32_t kMaxRenderDelayMs= 500;
VideoRenderFrames::VideoRenderFrames()
- : incoming_frames_(),
- render_delay_ms_(10) {
+ : render_delay_ms_(10) {
}
VideoRenderFrames::~VideoRenderFrames() {
@@ -35,12 +34,19 @@ VideoRenderFrames::~VideoRenderFrames() {
int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
const int64_t time_now = TickTime::MillisecondTimestamp();
- if (new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+ // Drop old frames only when there are other frames in the queue, otherwise, a
+ // really slow system never renders any frames.
+ if (!incoming_frames_.empty() &&
+ new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ -1,
"%s: too old frame, timestamp=%u.",
- __FUNCTION__, new_frame->timestamp());
+ __FUNCTION__,
+ new_frame->timestamp());
return -1;
}
+
if (new_frame->render_time_ms() > time_now + KFutureRenderTimestampMS) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future, timestamp=%u.",
@@ -49,26 +55,18 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
}
if (new_frame->native_handle() != NULL) {
- incoming_frames_.PushBack(new TextureVideoFrame(
- static_cast<NativeHandle*>(new_frame->native_handle()),
- new_frame->width(),
- new_frame->height(),
- new_frame->timestamp(),
- new_frame->render_time_ms()));
- return incoming_frames_.GetSize();
+ incoming_frames_.push_back(new_frame->CloneFrame());
+ return static_cast<int32_t>(incoming_frames_.size());
}
// Get an empty frame
I420VideoFrame* frame_to_add = NULL;
- if (!empty_frames_.Empty()) {
- ListItem* item = empty_frames_.First();
- if (item) {
- frame_to_add = static_cast<I420VideoFrame*>(item->GetItem());
- empty_frames_.Erase(item);
- }
+ if (!empty_frames_.empty()) {
+ frame_to_add = empty_frames_.front();
+ empty_frames_.pop_front();
}
if (!frame_to_add) {
- if (empty_frames_.GetSize() + incoming_frames_.GetSize() >
+ if (empty_frames_.size() + incoming_frames_.size() >
KMaxNumberOfFrames) {
// Already allocated too many frames.
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
@@ -80,7 +78,7 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
// Allocate new memory.
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
- empty_frames_.GetSize() + incoming_frames_.GetSize());
+ empty_frames_.size() + incoming_frames_.size());
frame_to_add = new I420VideoFrame();
if (!frame_to_add) {
@@ -97,33 +95,28 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
// TODO(mflodman) Change this!
// Remove const ness. Copying will be costly.
frame_to_add->SwapFrame(new_frame);
- incoming_frames_.PushBack(frame_to_add);
+ incoming_frames_.push_back(frame_to_add);
- return incoming_frames_.GetSize();
+ return static_cast<int32_t>(incoming_frames_.size());
}
I420VideoFrame* VideoRenderFrames::FrameToRender() {
I420VideoFrame* render_frame = NULL;
- while (!incoming_frames_.Empty()) {
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* oldest_frame_in_list =
- static_cast<I420VideoFrame*>(item->GetItem());
- if (oldest_frame_in_list->render_time_ms() <=
- TickTime::MillisecondTimestamp() + render_delay_ms_) {
- // This is the oldest one so far and it's OK to render.
- if (render_frame) {
- // This one is older than the newly found frame, remove this one.
- ReturnFrame(render_frame);
- }
- render_frame = oldest_frame_in_list;
- incoming_frames_.Erase(item);
- } else {
- // We can't release this one yet, we're done here.
- break;
+ FrameList::iterator iter = incoming_frames_.begin();
+ while(iter != incoming_frames_.end()) {
+ I420VideoFrame* oldest_frame_in_list = *iter;
+ if (oldest_frame_in_list->render_time_ms() <=
+ TickTime::MillisecondTimestamp() + render_delay_ms_) {
+ // This is the oldest one so far and it's OK to render.
+ if (render_frame) {
+ // This one is older than the newly found frame, remove this one.
+ ReturnFrame(render_frame);
}
+ render_frame = oldest_frame_in_list;
+ iter = incoming_frames_.erase(iter);
} else {
- assert(false);
+ // We can't release this one yet, we're done here.
+ break;
}
}
return render_frame;
@@ -135,7 +128,7 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
old_frame->ResetSize();
old_frame->set_timestamp(0);
old_frame->set_render_time_ms(0);
- empty_frames_.PushBack(old_frame);
+ empty_frames_.push_back(old_frame);
} else {
delete old_frame;
}
@@ -143,40 +136,29 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
}
int32_t VideoRenderFrames::ReleaseAllFrames() {
- while (!incoming_frames_.Empty()) {
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* frame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(frame != NULL);
- delete frame;
- }
- incoming_frames_.Erase(item);
+ for (FrameList::iterator iter = incoming_frames_.begin();
+ iter != incoming_frames_.end(); ++iter) {
+ delete *iter;
}
- while (!empty_frames_.Empty()) {
- ListItem* item = empty_frames_.First();
- if (item) {
- I420VideoFrame* frame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(frame != NULL);
- delete frame;
- }
- empty_frames_.Erase(item);
+ incoming_frames_.clear();
+
+ for (FrameList::iterator iter = empty_frames_.begin();
+ iter != empty_frames_.end(); ++iter) {
+ delete *iter;
}
+ empty_frames_.clear();
return 0;
}
uint32_t VideoRenderFrames::TimeToNextFrameRelease() {
- int64_t time_to_release = 0;
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* oldest_frame =
- static_cast<I420VideoFrame*>(item->GetItem());
- time_to_release = oldest_frame->render_time_ms() - render_delay_ms_
- - TickTime::MillisecondTimestamp();
- if (time_to_release < 0) {
- time_to_release = 0;
- }
- } else {
- time_to_release = KEventMaxWaitTimeMs;
+ if (incoming_frames_.empty()) {
+ return KEventMaxWaitTimeMs;
+ }
+ I420VideoFrame* oldest_frame = incoming_frames_.front();
+ int64_t time_to_release = oldest_frame->render_time_ms() - render_delay_ms_
+ - TickTime::MillisecondTimestamp();
+ if (time_to_release < 0) {
+ time_to_release = 0;
}
return static_cast<uint32_t>(time_to_release);
}
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_frames.h b/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
index 1a5d64d8072..d2e887f8272 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
+#include <list>
+
#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
namespace webrtc {
@@ -41,6 +42,8 @@ class VideoRenderFrames {
int32_t SetRenderDelay(const uint32_t render_delay);
private:
+ typedef std::list<I420VideoFrame*> FrameList;
+
// 10 seconds for 30 fps.
enum { KMaxNumberOfFrames = 300 };
// Don't render frames with timestamp older than 500ms from now.
@@ -49,9 +52,9 @@ class VideoRenderFrames {
enum { KFutureRenderTimestampMS = 10000 };
// Sorted list with framed to be rendered, oldest first.
- ListWrapper incoming_frames_;
+ FrameList incoming_frames_;
// Empty frames.
- ListWrapper empty_frames_;
+ FrameList empty_frames_;
// Estimated delay from a frame is released until it's rendered.
uint32_t render_delay_ms_;
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate b/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
index 397ec04e8c7..15c80141dcb 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/overrides/webrtc/base/basictypes.h b/chromium/third_party/webrtc/overrides/webrtc/base/basictypes.h
new file mode 100644
index 00000000000..c7cec5e7a47
--- /dev/null
+++ b/chromium/third_party/webrtc/overrides/webrtc/base/basictypes.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file overrides the inclusion of webrtc/base/basictypes.h to remove
+// collisions with Chromium's base/basictypes.h. We then add back a few
+// items that Chromium's version doesn't provide, but libjingle expects.
+
+#ifndef OVERRIDES_WEBRTC_BASE_BASICTYPES_H__
+#define OVERRIDES_WEBRTC_BASE_BASICTYPES_H__
+
+#include "base/basictypes.h"
+#include "build/build_config.h"
+
+#ifndef INT_TYPES_DEFINED
+#define INT_TYPES_DEFINED
+
+#ifdef COMPILER_MSVC
+#if _MSC_VER >= 1600
+#include <stdint.h>
+#else
+typedef unsigned __int64 uint64;
+typedef __int64 int64;
+#endif
+#ifndef INT64_C
+#define INT64_C(x) x ## I64
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## UI64
+#endif
+#define INT64_F "I64"
+#else // COMPILER_MSVC
+#ifndef INT64_C
+#define INT64_C(x) x ## LL
+#endif
+#ifndef UINT64_C
+#define UINT64_C(x) x ## ULL
+#endif
+#ifndef INT64_F
+#define INT64_F "ll"
+#endif
+#endif // COMPILER_MSVC
+#endif // INT_TYPES_DEFINED
+
+// Detect compiler is for x86 or x64.
+#if defined(__x86_64__) || defined(_M_X64) || \
+ defined(__i386__) || defined(_M_IX86)
+#define CPU_X86 1
+#endif
+// Detect compiler is for arm.
+#if defined(__arm__) || defined(_M_ARM)
+#define CPU_ARM 1
+#endif
+#if defined(CPU_X86) && defined(CPU_ARM)
+#error CPU_X86 and CPU_ARM both defined.
+#endif
+#if !defined(ARCH_CPU_BIG_ENDIAN) && !defined(ARCH_CPU_LITTLE_ENDIAN)
+// x86, arm or GCC provided __BYTE_ORDER__ macros
+#if CPU_X86 || CPU_ARM || \
+ (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
+#define ARCH_CPU_LITTLE_ENDIAN
+#elif defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+#define ARCH_CPU_BIG_ENDIAN
+#else
+#error ARCH_CPU_BIG_ENDIAN or ARCH_CPU_LITTLE_ENDIAN should be defined.
+#endif
+#endif
+#if defined(ARCH_CPU_BIG_ENDIAN) && defined(ARCH_CPU_LITTLE_ENDIAN)
+#error ARCH_CPU_BIG_ENDIAN and ARCH_CPU_LITTLE_ENDIAN both defined.
+#endif
+
+#if defined(WEBRTC_WIN)
+typedef int socklen_t;
+#endif
+
+namespace rtc {
+template<class T> inline T _min(T a, T b) { return (a > b) ? b : a; }
+template<class T> inline T _max(T a, T b) { return (a < b) ? b : a; }
+
+// For wait functions that take a number of milliseconds, kForever indicates
+// unlimited time.
+const int kForever = -1;
+}
+
+#if defined(WEBRTC_WIN)
+#if _MSC_VER < 1700
+ #define alignof(t) __alignof(t)
+#endif
+#else // !WEBRTC_WIN
+#define alignof(t) __alignof__(t)
+#endif // !WEBRTC_WIN
+#define RTC_IS_ALIGNED(p, a) (0==(reinterpret_cast<uintptr_t>(p) & ((a)-1)))
+#define ALIGNP(p, t) \
+ (reinterpret_cast<uint8*>(((reinterpret_cast<uintptr_t>(p) + \
+ ((t)-1)) & ~((t)-1))))
+
+// LIBJINGLE_DEFINE_STATIC_LOCAL() is a libjingle's copy
+// of CR_DEFINE_STATIC_LOCAL().
+#define LIBJINGLE_DEFINE_STATIC_LOCAL(type, name, arguments) \
+ CR_DEFINE_STATIC_LOCAL(type, name, arguments)
+
+#endif // OVERRIDES_WEBRTC_BASE_BASICTYPES_H__
diff --git a/chromium/third_party/webrtc/overrides/webrtc/base/constructormagic.h b/chromium/third_party/webrtc/overrides/webrtc/base/constructormagic.h
new file mode 100644
index 00000000000..bb89f91f1f9
--- /dev/null
+++ b/chromium/third_party/webrtc/overrides/webrtc/base/constructormagic.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2009 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file overrides the inclusion of webrtc/base/constructormagic.h
+// We do this because constructor magic defines DISALLOW_EVIL_CONSTRUCTORS,
+// but we want to use the version from Chromium.
+
+#ifndef OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
+#define OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
+
+#include "base/basictypes.h"
+
+#endif // OVERRIDES_WEBRTC_BASE_CONSTRUCTORMAGIC_H__
diff --git a/chromium/third_party/webrtc/overrides/webrtc/base/logging.cc b/chromium/third_party/webrtc/overrides/webrtc/base/logging.cc
new file mode 100644
index 00000000000..f0c79106d67
--- /dev/null
+++ b/chromium/third_party/webrtc/overrides/webrtc/base/logging.cc
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "third_party/webrtc/overrides/webrtc/base/logging.h"
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreServices/CoreServices.h>
+#endif // OS_MACOSX
+
+#include <iomanip>
+
+#include "base/atomicops.h"
+#include "base/strings/string_util.h"
+#include "base/threading/platform_thread.h"
+#include "third_party/webrtc/base/ipaddress.h"
+#include "third_party/webrtc/base/stream.h"
+#include "third_party/webrtc/base/stringencode.h"
+#include "third_party/webrtc/base/stringutils.h"
+#include "third_party/webrtc/base/timeutils.h"
+
+// From this file we can't use VLOG since it expands into usage of the __FILE__
+// macro (for correct filtering). The actual logging call from DIAGNOSTIC_LOG in
+// ~DiagnosticLogMessage. Note that the second parameter to the LAZY_STREAM
+// macro is true since the filter check has already been done for
+// DIAGNOSTIC_LOG.
+#define LOG_LAZY_STREAM_DIRECT(file_name, line_number, sev) \
+ LAZY_STREAM(logging::LogMessage(file_name, line_number, \
+ -sev).stream(), true)
+
+namespace rtc {
+
+void (*g_logging_delegate_function)(const std::string&) = NULL;
+void (*g_extra_logging_init_function)(
+ void (*logging_delegate_function)(const std::string&)) = NULL;
+#ifndef NDEBUG
+COMPILE_ASSERT(sizeof(base::subtle::Atomic32) == sizeof(base::PlatformThreadId),
+ atomic32_not_same_size_as_platformthreadid);
+base::subtle::Atomic32 g_init_logging_delegate_thread_id = 0;
+#endif
+
+/////////////////////////////////////////////////////////////////////////////
+// Constant Labels
+/////////////////////////////////////////////////////////////////////////////
+
+const char* FindLabel(int value, const ConstantLabel entries[]) {
+ for (int i = 0; entries[i].label; ++i) {
+ if (value == entries[i].value) return entries[i].label;
+ }
+ return 0;
+}
+
+std::string ErrorName(int err, const ConstantLabel* err_table) {
+ if (err == 0)
+ return "No error";
+
+ if (err_table != 0) {
+ if (const char * value = FindLabel(err, err_table))
+ return value;
+ }
+
+ char buffer[16];
+ base::snprintf(buffer, sizeof(buffer), "0x%08x", err);
+ return buffer;
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// Log helper functions
+/////////////////////////////////////////////////////////////////////////////
+
+// Generates extra information for LOG_E.
+static std::string GenerateExtra(LogErrorContext err_ctx,
+ int err,
+ const char* module) {
+ if (err_ctx != ERRCTX_NONE) {
+ std::ostringstream tmp;
+ tmp << ": ";
+ tmp << "[0x" << std::setfill('0') << std::hex << std::setw(8) << err << "]";
+ switch (err_ctx) {
+ case ERRCTX_ERRNO:
+ tmp << " " << strerror(err);
+ break;
+#if defined(WEBRTC_WIN)
+ case ERRCTX_HRESULT: {
+ char msgbuf[256];
+ DWORD flags = FORMAT_MESSAGE_FROM_SYSTEM;
+ HMODULE hmod = GetModuleHandleA(module);
+ if (hmod)
+ flags |= FORMAT_MESSAGE_FROM_HMODULE;
+ if (DWORD len = FormatMessageA(
+ flags, hmod, err,
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ msgbuf, sizeof(msgbuf) / sizeof(msgbuf[0]), NULL)) {
+ while ((len > 0) &&
+ isspace(static_cast<unsigned char>(msgbuf[len-1]))) {
+ msgbuf[--len] = 0;
+ }
+ tmp << " " << msgbuf;
+ }
+ break;
+ }
+#endif // OS_WIN
+#if defined(WEBRTC_IOS)
+ case ERRCTX_OSSTATUS:
+ tmp << " " << "Unknown LibJingle error: " << err;
+ break;
+#elif defined(WEBRTC_MAC)
+ case ERRCTX_OSSTATUS: {
+ tmp << " " << nonnull(GetMacOSStatusErrorString(err), "Unknown error");
+ if (const char* desc = GetMacOSStatusCommentString(err)) {
+ tmp << ": " << desc;
+ }
+ break;
+ }
+#endif // OS_MACOSX
+ default:
+ break;
+ }
+ return tmp.str();
+ }
+ return "";
+}
+
+DiagnosticLogMessage::DiagnosticLogMessage(const char* file,
+ int line,
+ LoggingSeverity severity,
+ bool log_to_chrome,
+ LogErrorContext err_ctx,
+ int err)
+ : file_name_(file),
+ line_(line),
+ severity_(severity),
+ log_to_chrome_(log_to_chrome) {
+ extra_ = GenerateExtra(err_ctx, err, NULL);
+}
+
+DiagnosticLogMessage::DiagnosticLogMessage(const char* file,
+ int line,
+ LoggingSeverity severity,
+ bool log_to_chrome,
+ LogErrorContext err_ctx,
+ int err,
+ const char* module)
+ : file_name_(file),
+ line_(line),
+ severity_(severity),
+ log_to_chrome_(log_to_chrome) {
+ extra_ = GenerateExtra(err_ctx, err, module);
+}
+
+DiagnosticLogMessage::~DiagnosticLogMessage() {
+ print_stream_ << extra_;
+ const std::string& str = print_stream_.str();
+ if (log_to_chrome_)
+ LOG_LAZY_STREAM_DIRECT(file_name_, line_, severity_) << str;
+ if (g_logging_delegate_function && severity_ <= LS_INFO) {
+ g_logging_delegate_function(str);
+ }
+}
+
+// Note: this function is a copy from the overriden libjingle implementation.
+void LogMultiline(LoggingSeverity level, const char* label, bool input,
+ const void* data, size_t len, bool hex_mode,
+ LogMultilineState* state) {
+ if (!LOG_CHECK_LEVEL_V(level))
+ return;
+
+ const char * direction = (input ? " << " : " >> ");
+
+ // NULL data means to flush our count of unprintable characters.
+ if (!data) {
+ if (state && state->unprintable_count_[input]) {
+ LOG_V(level) << label << direction << "## "
+ << state->unprintable_count_[input]
+ << " consecutive unprintable ##";
+ state->unprintable_count_[input] = 0;
+ }
+ return;
+ }
+
+ // The ctype classification functions want unsigned chars.
+ const unsigned char* udata = static_cast<const unsigned char*>(data);
+
+ if (hex_mode) {
+ const size_t LINE_SIZE = 24;
+ char hex_line[LINE_SIZE * 9 / 4 + 2], asc_line[LINE_SIZE + 1];
+ while (len > 0) {
+ memset(asc_line, ' ', sizeof(asc_line));
+ memset(hex_line, ' ', sizeof(hex_line));
+ size_t line_len = _min(len, LINE_SIZE);
+ for (size_t i = 0; i < line_len; ++i) {
+ unsigned char ch = udata[i];
+ asc_line[i] = isprint(ch) ? ch : '.';
+ hex_line[i*2 + i/4] = hex_encode(ch >> 4);
+ hex_line[i*2 + i/4 + 1] = hex_encode(ch & 0xf);
+ }
+ asc_line[sizeof(asc_line)-1] = 0;
+ hex_line[sizeof(hex_line)-1] = 0;
+ LOG_V(level) << label << direction
+ << asc_line << " " << hex_line << " ";
+ udata += line_len;
+ len -= line_len;
+ }
+ return;
+ }
+
+ size_t consecutive_unprintable = state ? state->unprintable_count_[input] : 0;
+
+ const unsigned char* end = udata + len;
+ while (udata < end) {
+ const unsigned char* line = udata;
+ const unsigned char* end_of_line = strchrn<unsigned char>(udata,
+ end - udata,
+ '\n');
+ if (!end_of_line) {
+ udata = end_of_line = end;
+ } else {
+ udata = end_of_line + 1;
+ }
+
+ bool is_printable = true;
+
+ // If we are in unprintable mode, we need to see a line of at least
+ // kMinPrintableLine characters before we'll switch back.
+ const ptrdiff_t kMinPrintableLine = 4;
+ if (consecutive_unprintable && ((end_of_line - line) < kMinPrintableLine)) {
+ is_printable = false;
+ } else {
+ // Determine if the line contains only whitespace and printable
+ // characters.
+ bool is_entirely_whitespace = true;
+ for (const unsigned char* pos = line; pos < end_of_line; ++pos) {
+ if (isspace(*pos))
+ continue;
+ is_entirely_whitespace = false;
+ if (!isprint(*pos)) {
+ is_printable = false;
+ break;
+ }
+ }
+ // Treat an empty line following unprintable data as unprintable.
+ if (consecutive_unprintable && is_entirely_whitespace) {
+ is_printable = false;
+ }
+ }
+ if (!is_printable) {
+ consecutive_unprintable += (udata - line);
+ continue;
+ }
+ // Print out the current line, but prefix with a count of prior unprintable
+ // characters.
+ if (consecutive_unprintable) {
+ LOG_V(level) << label << direction << "## " << consecutive_unprintable
+ << " consecutive unprintable ##";
+ consecutive_unprintable = 0;
+ }
+ // Strip off trailing whitespace.
+ while ((end_of_line > line) && isspace(*(end_of_line-1))) {
+ --end_of_line;
+ }
+ // Filter out any private data
+ std::string substr(reinterpret_cast<const char*>(line), end_of_line - line);
+ std::string::size_type pos_private = substr.find("Email");
+ if (pos_private == std::string::npos) {
+ pos_private = substr.find("Passwd");
+ }
+ if (pos_private == std::string::npos) {
+ LOG_V(level) << label << direction << substr;
+ } else {
+ LOG_V(level) << label << direction << "## omitted for privacy ##";
+ }
+ }
+
+ if (state) {
+ state->unprintable_count_[input] = consecutive_unprintable;
+ }
+}
+
+void InitDiagnosticLoggingDelegateFunction(
+ void (*delegate)(const std::string&)) {
+#ifndef NDEBUG
+ // Ensure that this function is always called from the same thread.
+ base::subtle::NoBarrier_CompareAndSwap(&g_init_logging_delegate_thread_id, 0,
+ static_cast<base::subtle::Atomic32>(base::PlatformThread::CurrentId()));
+ DCHECK_EQ(
+ g_init_logging_delegate_thread_id,
+ static_cast<base::subtle::Atomic32>(base::PlatformThread::CurrentId()));
+#endif
+ CHECK(delegate);
+ // This function may be called with the same argument several times if the
+ // page is reloaded or there are several PeerConnections on one page with
+ // logging enabled. This is OK, we simply don't have to do anything.
+ if (delegate == g_logging_delegate_function)
+ return;
+ CHECK(!g_logging_delegate_function);
+#ifdef NDEBUG
+ IPAddress::set_strip_sensitive(true);
+#endif
+ g_logging_delegate_function = delegate;
+
+ if (g_extra_logging_init_function)
+ g_extra_logging_init_function(delegate);
+}
+
+void SetExtraLoggingInit(
+ void (*function)(void (*delegate)(const std::string&))) {
+ CHECK(function);
+ CHECK(!g_extra_logging_init_function);
+ g_extra_logging_init_function = function;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/overrides/webrtc/base/logging.h b/chromium/third_party/webrtc/overrides/webrtc/base/logging.h
new file mode 100644
index 00000000000..d8dfca2cecb
--- /dev/null
+++ b/chromium/third_party/webrtc/overrides/webrtc/base/logging.h
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2012 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file overrides the logging macros in libjingle (webrtc/base/logging.h).
+// Instead of using libjingle's logging implementation, the libjingle macros are
+// mapped to the corresponding base/logging.h macro (chromium's VLOG).
+// If this file is included outside of libjingle (e.g. in wrapper code) it
+// should be included after base/logging.h (if any) or compiler error or
+// unexpected behavior may occur (macros that have the same name in libjingle as
+// in chromium will use the libjingle definition if this file is included
+// first).
+
+// Setting the LoggingSeverity (and lower) that should be written to file should
+// be done via command line by specifying the flags:
+// --vmodule or --v please see base/logging.h for details on how to use them.
+// Specifying what file to write to is done using InitLogging also in
+// base/logging.h.
+
+// The macros and classes declared in here are not described as they are
+// NOT TO BE USED outside of libjingle.
+
+#ifndef THIRD_PARTY_LIBJINGLE_OVERRIDES_WEBRTC_BASE_LOGGING_H_
+#define THIRD_PARTY_LIBJINGLE_OVERRIDES_WEBRTC_BASE_LOGGING_H_
+
+#include <sstream>
+#include <string>
+
+#include "base/logging.h"
+#include "third_party/webrtc/base/scoped_ref_ptr.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// ConstantLabel can be used to easily generate string names from constant
+// values. This can be useful for logging descriptive names of error messages.
+// Usage:
+// const ConstantLabel LIBRARY_ERRORS[] = {
+// KLABEL(SOME_ERROR),
+// KLABEL(SOME_OTHER_ERROR),
+// ...
+// LASTLABEL
+// }
+//
+// int err = LibraryFunc();
+// LOG(LS_ERROR) << "LibraryFunc returned: "
+// << ErrorName(err, LIBRARY_ERRORS);
+
+struct ConstantLabel {
+ int value;
+ const char* label;
+};
+#define KLABEL(x) { x, #x }
+#define LASTLABEL { 0, 0 }
+
+const char* FindLabel(int value, const ConstantLabel entries[]);
+std::string ErrorName(int err, const ConstantLabel* err_table);
+
+//////////////////////////////////////////////////////////////////////
+// Note that the non-standard LoggingSeverity aliases exist because they are
+// still in broad use. The meanings of the levels are:
+// LS_SENSITIVE: Information which should only be logged with the consent
+// of the user, due to privacy concerns.
+// LS_VERBOSE: This level is for data which we do not want to appear in the
+// normal debug log, but should appear in diagnostic logs.
+// LS_INFO: Chatty level used in debugging for all sorts of things, the default
+// in debug builds.
+// LS_WARNING: Something that may warrant investigation.
+// LS_ERROR: Something that should not have occurred.
+// Note that LoggingSeverity is mapped over to chromiums verbosity levels where
+// anything lower than or equal to the current verbosity level is written to
+// file which is the opposite of logging severity in libjingle where higher
+// severity numbers than or equal to the current severity level are written to
+// file. Also, note that the values are explicitly defined here for convenience
+// since the command line flag must be set using numerical values.
+enum LoggingSeverity { LS_ERROR = 1,
+ LS_WARNING = 2,
+ LS_INFO = 3,
+ LS_VERBOSE = 4,
+ LS_SENSITIVE = 5,
+ INFO = LS_INFO,
+ WARNING = LS_WARNING,
+ LERROR = LS_ERROR };
+
+// LogErrorContext assists in interpreting the meaning of an error value.
+enum LogErrorContext {
+ ERRCTX_NONE,
+ ERRCTX_ERRNO, // System-local errno
+ ERRCTX_HRESULT, // Windows HRESULT
+ ERRCTX_OSSTATUS, // MacOS OSStatus
+
+ // Abbreviations for LOG_E macro
+ ERRCTX_EN = ERRCTX_ERRNO, // LOG_E(sev, EN, x)
+ ERRCTX_HR = ERRCTX_HRESULT, // LOG_E(sev, HR, x)
+ ERRCTX_OS = ERRCTX_OSSTATUS, // LOG_E(sev, OS, x)
+};
+
+// Class that writes a log message to the logging delegate ("WebRTC logging
+// stream" in Chrome) and to Chrome's logging stream.
+class DiagnosticLogMessage {
+ public:
+ DiagnosticLogMessage(const char* file, int line, LoggingSeverity severity,
+ bool log_to_chrome, LogErrorContext err_ctx, int err);
+ DiagnosticLogMessage(const char* file, int line, LoggingSeverity severity,
+ bool log_to_chrome, LogErrorContext err_ctx, int err,
+ const char* module);
+ ~DiagnosticLogMessage();
+
+ void CreateTimestamp();
+
+ std::ostream& stream() { return print_stream_; }
+
+ private:
+ const char* file_name_;
+ const int line_;
+ const LoggingSeverity severity_;
+ const bool log_to_chrome_;
+
+ std::string extra_;
+
+ std::ostringstream print_stream_;
+};
+
+// This class is used to explicitly ignore values in the conditional
+// logging macros. This avoids compiler warnings like "value computed
+// is not used" and "statement has no effect".
+class LogMessageVoidify {
+ public:
+ LogMessageVoidify() { }
+ // This has to be an operator with a precedence lower than << but
+ // higher than ?:
+ void operator&(std::ostream&) { }
+};
+
+//////////////////////////////////////////////////////////////////////
+// Logging Helpers
+//////////////////////////////////////////////////////////////////////
+
+class LogMultilineState {
+ public:
+ size_t unprintable_count_[2];
+ LogMultilineState() {
+ unprintable_count_[0] = unprintable_count_[1] = 0;
+ }
+};
+
+// When possible, pass optional state variable to track various data across
+// multiple calls to LogMultiline. Otherwise, pass NULL.
+void LogMultiline(LoggingSeverity level, const char* label, bool input,
+ const void* data, size_t len, bool hex_mode,
+ LogMultilineState* state);
+
+// TODO(grunell): Change name to InitDiagnosticLoggingDelegate or
+// InitDiagnosticLogging. Change also in init_webrtc.h/cc.
+// TODO(grunell): typedef the delegate function.
+void InitDiagnosticLoggingDelegateFunction(
+ void (*delegate)(const std::string&));
+
+void SetExtraLoggingInit(
+ void (*function)(void (*delegate)(const std::string&)));
+} // namespace rtc
+
+//////////////////////////////////////////////////////////////////////
+// Libjingle macros which are mapped over to their VLOG equivalent in
+// base/logging.h
+//////////////////////////////////////////////////////////////////////
+
+#if defined(LOGGING_INSIDE_WEBRTC)
+
+#define DIAGNOSTIC_LOG(sev, ctx, err, ...) \
+ rtc::DiagnosticLogMessage( \
+ __FILE__, __LINE__, sev, VLOG_IS_ON(sev), \
+ rtc::ERRCTX_ ## ctx, err, ##__VA_ARGS__).stream()
+
+#define LOG_CHECK_LEVEL(sev) VLOG_IS_ON(rtc::sev)
+#define LOG_CHECK_LEVEL_V(sev) VLOG_IS_ON(sev)
+
+#define LOG_V(sev) DIAGNOSTIC_LOG(sev, NONE, 0)
+#undef LOG
+#define LOG(sev) DIAGNOSTIC_LOG(rtc::sev, NONE, 0)
+
+// The _F version prefixes the message with the current function name.
+#if defined(__GNUC__) && defined(_DEBUG)
+#define LOG_F(sev) LOG(sev) << __PRETTY_FUNCTION__ << ": "
+#else
+#define LOG_F(sev) LOG(sev) << __FUNCTION__ << ": "
+#endif
+
+#define LOG_E(sev, ctx, err, ...) \
+ DIAGNOSTIC_LOG(rtc::sev, ctx, err, ##__VA_ARGS__)
+
+#undef LOG_ERRNO_EX
+#define LOG_ERRNO_EX(sev, err) LOG_E(sev, ERRNO, err)
+#undef LOG_ERRNO
+#define LOG_ERRNO(sev) LOG_ERRNO_EX(sev, errno)
+
+#if defined(WEBRTC_WIN)
+#define LOG_GLE_EX(sev, err) LOG_E(sev, HRESULT, err)
+#define LOG_GLE(sev) LOG_GLE_EX(sev, GetLastError())
+#define LOG_GLEM(sev, mod) LOG_E(sev, HRESULT, GetLastError(), mod)
+#define LOG_ERR_EX(sev, err) LOG_GLE_EX(sev, err)
+#define LOG_ERR(sev) LOG_GLE(sev)
+#define LAST_SYSTEM_ERROR (::GetLastError())
+#else
+#define LOG_ERR_EX(sev, err) LOG_ERRNO_EX(sev, err)
+#define LOG_ERR(sev) LOG_ERRNO(sev)
+#define LAST_SYSTEM_ERROR (errno)
+#endif // OS_WIN
+
+#undef PLOG
+#define PLOG(sev, err) LOG_ERR_EX(sev, err)
+
+#endif // LOGGING_INSIDE_WEBRTC
+
+#endif // THIRD_PARTY_LIBJINGLE_OVERRIDES_WEBRTC_BASE_LOGGING_H_
diff --git a/chromium/third_party/webrtc/overrides/webrtc/base/win32socketinit.cc b/chromium/third_party/webrtc/overrides/webrtc/base/win32socketinit.cc
new file mode 100644
index 00000000000..929ce8d3632
--- /dev/null
+++ b/chromium/third_party/webrtc/overrides/webrtc/base/win32socketinit.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2006 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Redirect Libjingle's winsock initialization activity into Chromium's
+// singleton object that managest precisely that for the browser.
+
+#include "webrtc/base/win32socketinit.h"
+
+#include "net/base/winsock_init.h"
+
+#if !defined(WEBRTC_WIN)
+#error "Only compile this on Windows"
+#endif
+
+namespace rtc {
+
+void EnsureWinsockInit() {
+ net::EnsureWinsockInit();
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/supplement.gypi b/chromium/third_party/webrtc/supplement.gypi
new file mode 100644
index 00000000000..2fad5745848
--- /dev/null
+++ b/chromium/third_party/webrtc/supplement.gypi
@@ -0,0 +1,6 @@
+{
+ 'variables': {
+ 'build_with_chromium': 0,
+ 'use_sanitizer_options': 0,
+ }
+}
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/aligned_malloc.h b/chromium/third_party/webrtc/system_wrappers/interface/aligned_malloc.h
index 6409999e8f0..5d343cde7c3 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/aligned_malloc.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/aligned_malloc.h
@@ -19,8 +19,6 @@
#include <stddef.h>
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
namespace webrtc {
// Returns a pointer to the first boundry of |alignment| bytes following the
@@ -48,10 +46,12 @@ T* AlignedMalloc(size_t size, size_t alignment) {
return reinterpret_cast<T*>(AlignedMalloc(size, alignment));
}
-// Scoped pointer to AlignedMalloc-memory.
-template<typename T>
-struct Allocator {
- typedef scoped_ptr_malloc<T, AlignedFree> scoped_ptr_aligned;
+// Deleter for use with scoped_ptr. E.g., use as
+// scoped_ptr<Foo, AlignedFreeDeleter> foo;
+struct AlignedFreeDeleter {
+ inline void operator()(void* ptr) const {
+ AlignedFree(ptr);
+ }
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/atomic32.h b/chromium/third_party/webrtc/system_wrappers/interface/atomic32.h
index 08ab4f255d8..8633e263622 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/atomic32.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/atomic32.h
@@ -17,8 +17,8 @@
#include <stddef.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/clock.h b/chromium/third_party/webrtc/system_wrappers/interface/clock.h
index ce3269137a9..c03f976d40b 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/clock.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/clock.h
@@ -11,10 +11,14 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+class RWLockWrapper;
+
// January 1970, in NTP seconds.
const uint32_t kNtpJan1970 = 2208988800UL;
@@ -28,17 +32,17 @@ class Clock {
// Return a timestamp in milliseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMilliseconds() = 0;
+ virtual int64_t TimeInMilliseconds() const = 0;
// Return a timestamp in microseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMicroseconds() = 0;
+ virtual int64_t TimeInMicroseconds() const = 0;
// Retrieve an NTP absolute timestamp in seconds and fractions of a second.
- virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) = 0;
+ virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) const = 0;
// Retrieve an NTP absolute timestamp in milliseconds.
- virtual int64_t CurrentNtpInMilliseconds() = 0;
+ virtual int64_t CurrentNtpInMilliseconds() const = 0;
// Converts an NTP timestamp to a millisecond timestamp.
static int64_t NtpToMs(uint32_t seconds, uint32_t fractions);
@@ -51,21 +55,22 @@ class SimulatedClock : public Clock {
public:
explicit SimulatedClock(int64_t initial_time_us);
- virtual ~SimulatedClock() {}
+ virtual ~SimulatedClock();
// Return a timestamp in milliseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMilliseconds() OVERRIDE;
+ virtual int64_t TimeInMilliseconds() const OVERRIDE;
// Return a timestamp in microseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMicroseconds() OVERRIDE;
+ virtual int64_t TimeInMicroseconds() const OVERRIDE;
// Retrieve an NTP absolute timestamp in milliseconds.
- virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) OVERRIDE;
+ virtual void CurrentNtp(uint32_t& seconds,
+ uint32_t& fractions) const OVERRIDE;
// Converts an NTP timestamp to a millisecond timestamp.
- virtual int64_t CurrentNtpInMilliseconds() OVERRIDE;
+ virtual int64_t CurrentNtpInMilliseconds() const OVERRIDE;
// Advance the simulated clock with a given number of milliseconds or
// microseconds.
@@ -73,7 +78,8 @@ class SimulatedClock : public Clock {
void AdvanceTimeMicroseconds(int64_t microseconds);
private:
- int64_t time_us_;
+ int64_t time_us_ GUARDED_BY(lock_);
+ scoped_ptr<RWLockWrapper> lock_;
};
}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/compile_assert.h b/chromium/third_party/webrtc/system_wrappers/interface/compile_assert.h
index cdeaa5676bb..a075184b510 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/compile_assert.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/compile_assert.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-// Borrowed from Chromium's src/base/basictypes.h.
+// Borrowed from Chromium's src/base/macros.h.
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_COMPILE_ASSERT_H_
@@ -31,13 +31,20 @@
// TODO(ajm): Hack to avoid multiple definitions until the base/ of webrtc and
// libjingle are merged.
#if !defined(COMPILE_ASSERT)
+#if __cplusplus >= 201103L
+// Under C++11, just use static_assert.
+#define COMPILE_ASSERT(expr, msg) static_assert(expr, #msg)
+
+#else
template <bool>
struct CompileAssert {
};
#define COMPILE_ASSERT(expr, msg) \
typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
-#endif // COMPILE_ASSERT
+
+#endif // __cplusplus >= 201103L
+#endif // !defined(COMPILE_ASSERT)
// Implementation details of COMPILE_ASSERT:
//
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/constructor_magic.h b/chromium/third_party/webrtc/system_wrappers/interface/constructor_magic.h
deleted file mode 100644
index b2aabc574f4..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/interface/constructor_magic.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * WebRtc
- * Copy from third_party/libjingle/source/talk/base/constructormagic.h
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
-
-#ifndef DISALLOW_ASSIGN
-#define DISALLOW_ASSIGN(TypeName) \
- void operator=(const TypeName&)
-#endif
-
-#ifndef DISALLOW_COPY_AND_ASSIGN
-// A macro to disallow the evil copy constructor and operator= functions
-// This should be used in the private: declarations for a class
-#define DISALLOW_COPY_AND_ASSIGN(TypeName) \
- TypeName(const TypeName&); \
- DISALLOW_ASSIGN(TypeName)
-#endif
-
-#ifndef DISALLOW_EVIL_CONSTRUCTORS
-// Alternative, less-accurate legacy name.
-#define DISALLOW_EVIL_CONSTRUCTORS(TypeName) \
- DISALLOW_COPY_AND_ASSIGN(TypeName)
-#endif
-
-#ifndef DISALLOW_IMPLICIT_CONSTRUCTORS
-// A macro to disallow all the implicit constructors, namely the
-// default constructor, copy constructor and operator= functions.
-//
-// This should be used in the private: declarations for a class
-// that wants to prevent anyone from instantiating it. This is
-// especially useful for classes containing only static methods.
-#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \
- TypeName(); \
- DISALLOW_EVIL_CONSTRUCTORS(TypeName)
-#endif
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CONSTRUCTOR_MAGIC_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/critical_section_wrapper.h b/chromium/third_party/webrtc/system_wrappers/interface/critical_section_wrapper.h
index 0253a282d18..4979b5c7dd5 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/critical_section_wrapper.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/critical_section_wrapper.h
@@ -15,9 +15,10 @@
// read/write locks instead.
#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
-class CriticalSectionWrapper {
+class LOCKABLE CriticalSectionWrapper {
public:
// Factory method, constructor disabled
static CriticalSectionWrapper* CreateCriticalSection();
@@ -26,33 +27,25 @@ class CriticalSectionWrapper {
// Tries to grab lock, beginning of a critical section. Will wait for the
// lock to become available if the grab failed.
- virtual void Enter() = 0;
+ virtual void Enter() EXCLUSIVE_LOCK_FUNCTION() = 0;
// Returns a grabbed lock, end of critical section.
- virtual void Leave() = 0;
+ virtual void Leave() UNLOCK_FUNCTION() = 0;
};
// RAII extension of the critical section. Prevents Enter/Leave mismatches and
// provides more compact critical section syntax.
-class CriticalSectionScoped {
+class SCOPED_LOCKABLE CriticalSectionScoped {
public:
explicit CriticalSectionScoped(CriticalSectionWrapper* critsec)
- : ptr_crit_sec_(critsec) {
+ EXCLUSIVE_LOCK_FUNCTION(critsec)
+ : ptr_crit_sec_(critsec) {
ptr_crit_sec_->Enter();
}
- ~CriticalSectionScoped() {
- if (ptr_crit_sec_) {
- Leave();
- }
- }
+ ~CriticalSectionScoped() UNLOCK_FUNCTION() { ptr_crit_sec_->Leave(); }
private:
- void Leave() {
- ptr_crit_sec_->Leave();
- ptr_crit_sec_ = 0;
- }
-
CriticalSectionWrapper* ptr_crit_sec_;
};
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/field_trial.h b/chromium/third_party/webrtc/system_wrappers/interface/field_trial.h
new file mode 100644
index 00000000000..f2cf8802760
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/field_trial.h
@@ -0,0 +1,70 @@
+//
+// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+//
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
+
+#include <string>
+
+#include "webrtc/common_types.h"
+
+// Field trials allow webrtc clients (such as Chrome) to turn on feature code
+// in binaries out in the field and gather information with that.
+//
+// WebRTC clients MUST provide an implementation of:
+//
+// std::string webrtc::field_trial::FindFullName(const std::string& trial).
+//
+// Or link with a default one provided in:
+//
+// system_wrappers/source/system_wrappers.gyp:field_trial_default
+//
+//
+// They are designed to wire up directly to chrome field trials and to speed up
+// developers by reducing the need to wire APIs to control whether a feature is
+// on/off. E.g. to experiment with a new method that could lead to a different
+// trade-off between CPU/bandwidth:
+//
+// 1 - Develop the feature with default behaviour off:
+//
+// if (FieldTrial::FindFullName("WebRTCExperimenMethod2") == "Enabled")
+// method2();
+// else
+// method1();
+//
+// 2 - Once the changes are rolled to chrome, the new code path can be
+// controlled as normal chrome field trials.
+//
+// 3 - Evaluate the new feature and clean the code paths.
+//
+// Notes:
+// - NOT every feature is a candidate to be controlled by this mechanism as
+// it may require negotation between involved parties (e.g. SDP).
+//
+// TODO(andresp): since chrome --force-fieldtrials does not marks the trial
+// as active it does not gets propaged to renderer process. For now one
+// needs to push a config with start_active:true or run a local finch
+// server.
+//
+// TODO(andresp): find out how to get bots to run tests with trials enabled.
+
+namespace webrtc {
+namespace field_trial {
+
+// Returns the group name chosen for the named trial, or the empty string
+// if the trial does not exists.
+//
+// Note: To keep things tidy append all the trial names with WebRTC.
+std::string FindFullName(const std::string& name);
+
+} // namespace field_trial
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_FIELD_TRIAL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/list_wrapper.h b/chromium/third_party/webrtc/system_wrappers/interface/list_wrapper.h
deleted file mode 100644
index fe6607195a5..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/interface/list_wrapper.h
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class ListItem {
- friend class ListWrapper;
-
- public:
- ListItem(const void* ptr);
- ListItem(const unsigned int item);
- virtual ~ListItem();
- void* GetItem() const;
- unsigned int GetUnsignedItem() const;
-
- protected:
- ListItem* next_;
- ListItem* prev_;
-
- private:
- const void* item_ptr_;
- const unsigned int item_;
-};
-
-class ListWrapper {
- public:
- ListWrapper();
- virtual ~ListWrapper();
-
- // Returns the number of elements stored in the list.
- unsigned int GetSize() const;
-
- // Puts a pointer to anything last in the list.
- int PushBack(const void* ptr);
- // Puts a pointer to anything first in the list.
- int PushFront(const void* ptr);
-
- // Puts a copy of the specified integer last in the list.
- int PushBack(const unsigned int item_id);
- // Puts a copy of the specified integer first in the list.
- int PushFront(const unsigned int item_id);
-
- // Pops the first ListItem from the list
- int PopFront();
-
- // Pops the last ListItem from the list
- int PopBack();
-
- // Returns true if the list is empty
- bool Empty() const;
-
- // Returns a pointer to the first ListItem in the list.
- ListItem* First() const;
-
- // Returns a pointer to the last ListItem in the list.
- ListItem* Last() const;
-
- // Returns a pointer to the ListItem stored after item in the list.
- ListItem* Next(ListItem* item) const;
-
- // Returns a pointer to the ListItem stored before item in the list.
- ListItem* Previous(ListItem* item) const;
-
- // Removes item from the list.
- int Erase(ListItem* item);
-
- // Insert list item after existing_previous_item. Please note that new_item
- // must be created using new ListItem(). The map will take ownership of
- // new_item following a successfull insert. If insert fails new_item will
- // not be released by the List
- int Insert(ListItem* existing_previous_item,
- ListItem* new_item);
-
- // Insert list item before existing_next_item. Please note that new_item
- // must be created using new ListItem(). The map will take ownership of
- // new_item following a successfull insert. If insert fails new_item will
- // not be released by the List
- int InsertBefore(ListItem* existing_next_item,
- ListItem* new_item);
-
- private:
- void PushBackImpl(ListItem* item);
- void PushFrontImpl(ListItem* item);
-
- CriticalSectionWrapper* critical_section_;
- ListItem* first_;
- ListItem* last_;
- unsigned int size_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_LIST_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/rtp_to_ntp.h b/chromium/third_party/webrtc/system_wrappers/interface/rtp_to_ntp.h
new file mode 100644
index 00000000000..dfc25cd9e93
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/rtp_to_ntp.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
+#define SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
+
+#include <list>
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct RtcpMeasurement {
+ RtcpMeasurement();
+ RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac, uint32_t timestamp);
+ uint32_t ntp_secs;
+ uint32_t ntp_frac;
+ uint32_t rtp_timestamp;
+};
+
+typedef std::list<RtcpMeasurement> RtcpList;
+
+// Updates |rtcp_list| with timestamps from the latest RTCP SR.
+// |new_rtcp_sr| will be set to true if these are the timestamps which have
+// never be added to |rtcp_list|.
+bool UpdateRtcpList(uint32_t ntp_secs,
+ uint32_t ntp_frac,
+ uint32_t rtp_timestamp,
+ RtcpList* rtcp_list,
+ bool* new_rtcp_sr);
+
+// Converts an RTP timestamp to the NTP domain in milliseconds using two
+// (RTP timestamp, NTP timestamp) pairs.
+bool RtpToNtpMs(int64_t rtp_timestamp, const RtcpList& rtcp,
+ int64_t* timestamp_in_ms);
+
+// Returns 1 there has been a forward wrap around, 0 if there has been no wrap
+// around and -1 if there has been a backwards wrap around (i.e. reordering).
+int CheckForWrapArounds(uint32_t rtp_timestamp, uint32_t rtcp_rtp_timestamp);
+
+} // namespace webrtc
+
+#endif // SYSTEM_WRAPPERS_INTERFACE_RTP_TO_NTP_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/rw_lock_wrapper.h b/chromium/third_party/webrtc/system_wrappers/interface/rw_lock_wrapper.h
index 80eb5da8a97..91126e5d78e 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/rw_lock_wrapper.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/rw_lock_wrapper.h
@@ -11,35 +11,36 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_RW_LOCK_WRAPPER_H_
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+
// Note, Windows pre-Vista version of RW locks are not supported natively. For
// these OSs regular critical sections have been used to approximate RW lock
// functionality and will therefore have worse performance.
namespace webrtc {
-class RWLockWrapper {
+class LOCKABLE RWLockWrapper {
public:
static RWLockWrapper* CreateRWLock();
virtual ~RWLockWrapper() {}
- virtual void AcquireLockExclusive() = 0;
- virtual void ReleaseLockExclusive() = 0;
+ virtual void AcquireLockExclusive() EXCLUSIVE_LOCK_FUNCTION() = 0;
+ virtual void ReleaseLockExclusive() UNLOCK_FUNCTION() = 0;
- virtual void AcquireLockShared() = 0;
- virtual void ReleaseLockShared() = 0;
+ virtual void AcquireLockShared() SHARED_LOCK_FUNCTION() = 0;
+ virtual void ReleaseLockShared() UNLOCK_FUNCTION() = 0;
};
// RAII extensions of the RW lock. Prevents Acquire/Release missmatches and
// provides more compact locking syntax.
-class ReadLockScoped {
+class SCOPED_LOCKABLE ReadLockScoped {
public:
- ReadLockScoped(RWLockWrapper& rw_lock)
- :
- rw_lock_(rw_lock) {
+ ReadLockScoped(RWLockWrapper& rw_lock) SHARED_LOCK_FUNCTION(rw_lock)
+ : rw_lock_(rw_lock) {
rw_lock_.AcquireLockShared();
}
- ~ReadLockScoped() {
+ ~ReadLockScoped() UNLOCK_FUNCTION() {
rw_lock_.ReleaseLockShared();
}
@@ -47,15 +48,14 @@ class ReadLockScoped {
RWLockWrapper& rw_lock_;
};
-class WriteLockScoped {
+class SCOPED_LOCKABLE WriteLockScoped {
public:
- WriteLockScoped(RWLockWrapper& rw_lock)
- :
- rw_lock_(rw_lock) {
+ WriteLockScoped(RWLockWrapper& rw_lock) EXCLUSIVE_LOCK_FUNCTION(rw_lock)
+ : rw_lock_(rw_lock) {
rw_lock_.AcquireLockExclusive();
}
- ~WriteLockScoped() {
+ ~WriteLockScoped() UNLOCK_FUNCTION() {
rw_lock_.ReleaseLockExclusive();
}
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/scoped_ptr.h b/chromium/third_party/webrtc/system_wrappers/interface/scoped_ptr.h
index aeac77ac95b..42bb8a6dd4b 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/scoped_ptr.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/scoped_ptr.h
@@ -96,7 +96,7 @@
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_
// This is an implementation designed to match the anticipated future TR2
-// implementation of the scoped_ptr class and scoped_ptr_malloc (deprecated).
+// implementation of the scoped_ptr class.
#include <assert.h>
#include <stddef.h>
@@ -104,8 +104,8 @@
#include <algorithm> // For std::swap().
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/compile_assert.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/template_util.h"
#include "webrtc/system_wrappers/source/move.h"
#include "webrtc/typedefs.h"
@@ -563,153 +563,4 @@ bool operator!=(T* p1, const webrtc::scoped_ptr<T, D>& p2) {
return p1 != p2.get();
}
-namespace webrtc {
-
-// DEPRECATED: Use scoped_ptr<T[]> instead.
-// TODO(ajm): Remove scoped_array.
-//
-// scoped_array extends scoped_ptr to arrays. Deletion of the array pointed to
-// is guaranteed, either on destruction of the scoped_array or via an explicit
-// reset(). Use shared_array or std::vector if your needs are more complex.
-
-template<typename T>
-class scoped_array {
- private:
-
- T* ptr;
-
- scoped_array(scoped_array const &);
- scoped_array & operator=(scoped_array const &);
-
- public:
-
- typedef T element_type;
-
- explicit scoped_array(T* p = NULL) : ptr(p) {}
-
- ~scoped_array() {
- typedef char type_must_be_complete[sizeof(T)];
- delete[] ptr;
- }
-
- void reset(T* p = NULL) {
- typedef char type_must_be_complete[sizeof(T)];
-
- if (ptr != p) {
- T* arr = ptr;
- ptr = p;
- // Delete last, in case arr destructor indirectly results in ~scoped_array
- delete [] arr;
- }
- }
-
- T& operator[](ptrdiff_t i) const {
- assert(ptr != NULL);
- assert(i >= 0);
- return ptr[i];
- }
-
- T* get() const {
- return ptr;
- }
-
- void swap(scoped_array & b) {
- T* tmp = b.ptr;
- b.ptr = ptr;
- ptr = tmp;
- }
-
- T* release() {
- T* tmp = ptr;
- ptr = NULL;
- return tmp;
- }
-
- T** accept() {
- if (ptr) {
- delete [] ptr;
- ptr = NULL;
- }
- return &ptr;
- }
-};
-
-template<class T> inline
-void swap(scoped_array<T>& a, scoped_array<T>& b) {
- a.swap(b);
-}
-
-// DEPRECATED: Use scoped_ptr<C, webrtc::FreeDeleter> instead.
-// TODO(ajm): Remove scoped_ptr_malloc.
-//
-// scoped_ptr_malloc<> is similar to scoped_ptr<>, but it accepts a
-// second template argument, the function used to free the object.
-
-template<typename T, void (*FF)(void*) = free> class scoped_ptr_malloc {
- private:
-
- T* ptr;
-
- scoped_ptr_malloc(scoped_ptr_malloc const &);
- scoped_ptr_malloc & operator=(scoped_ptr_malloc const &);
-
- public:
-
- typedef T element_type;
-
- explicit scoped_ptr_malloc(T* p = 0): ptr(p) {}
-
- ~scoped_ptr_malloc() {
- FF(static_cast<void*>(ptr));
- }
-
- void reset(T* p = 0) {
- if (ptr != p) {
- FF(static_cast<void*>(ptr));
- ptr = p;
- }
- }
-
- T& operator*() const {
- assert(ptr != 0);
- return *ptr;
- }
-
- T* operator->() const {
- assert(ptr != 0);
- return ptr;
- }
-
- T* get() const {
- return ptr;
- }
-
- void swap(scoped_ptr_malloc & b) {
- T* tmp = b.ptr;
- b.ptr = ptr;
- ptr = tmp;
- }
-
- T* release() {
- T* tmp = ptr;
- ptr = 0;
- return tmp;
- }
-
- T** accept() {
- if (ptr) {
- FF(static_cast<void*>(ptr));
- ptr = 0;
- }
- return &ptr;
- }
-};
-
-template<typename T, void (*FF)(void*)> inline
-void swap(scoped_ptr_malloc<T,FF>& a, scoped_ptr_malloc<T,FF>& b) {
- a.swap(b);
-}
-
-} // namespace webrtc
-
#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_PTR_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/scoped_refptr.h b/chromium/third_party/webrtc/system_wrappers/interface/scoped_refptr.h
index a8a0074a2db..b344d211b1b 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/scoped_refptr.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/scoped_refptr.h
@@ -11,6 +11,8 @@
#ifndef SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_
#define SYSTEM_WRAPPERS_INTERFACE_SCOPED_REFPTR_H_
+#include <stddef.h>
+
namespace webrtc {
// Extracted from Chromium's src/base/memory/ref_counted.h.
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/scoped_vector.h b/chromium/third_party/webrtc/system_wrappers/interface/scoped_vector.h
new file mode 100644
index 00000000000..68db3a1210a
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/scoped_vector.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/memory/scoped_vector.h.
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
+
+#include <assert.h>
+#include <algorithm>
+#include <vector>
+
+#include "webrtc/system_wrappers/interface/stl_util.h"
+#include "webrtc/system_wrappers/source/move.h"
+
+namespace webrtc {
+
+// ScopedVector wraps a vector deleting the elements from its
+// destructor.
+template <class T>
+class ScopedVector {
+ WEBRTC_MOVE_ONLY_TYPE_FOR_CPP_03(ScopedVector, RValue)
+
+ public:
+ typedef typename std::vector<T*>::allocator_type allocator_type;
+ typedef typename std::vector<T*>::size_type size_type;
+ typedef typename std::vector<T*>::difference_type difference_type;
+ typedef typename std::vector<T*>::pointer pointer;
+ typedef typename std::vector<T*>::const_pointer const_pointer;
+ typedef typename std::vector<T*>::reference reference;
+ typedef typename std::vector<T*>::const_reference const_reference;
+ typedef typename std::vector<T*>::value_type value_type;
+ typedef typename std::vector<T*>::iterator iterator;
+ typedef typename std::vector<T*>::const_iterator const_iterator;
+ typedef typename std::vector<T*>::reverse_iterator reverse_iterator;
+ typedef typename std::vector<T*>::const_reverse_iterator
+ const_reverse_iterator;
+
+ ScopedVector() {}
+ ~ScopedVector() { clear(); }
+ ScopedVector(RValue other) { swap(*other.object); }
+
+ ScopedVector& operator=(RValue rhs) {
+ swap(*rhs.object);
+ return *this;
+ }
+
+ reference operator[](size_t index) { return v_[index]; }
+ const_reference operator[](size_t index) const { return v_[index]; }
+
+ bool empty() const { return v_.empty(); }
+ size_t size() const { return v_.size(); }
+
+ reverse_iterator rbegin() { return v_.rbegin(); }
+ const_reverse_iterator rbegin() const { return v_.rbegin(); }
+ reverse_iterator rend() { return v_.rend(); }
+ const_reverse_iterator rend() const { return v_.rend(); }
+
+ iterator begin() { return v_.begin(); }
+ const_iterator begin() const { return v_.begin(); }
+ iterator end() { return v_.end(); }
+ const_iterator end() const { return v_.end(); }
+
+ const_reference front() const { return v_.front(); }
+ reference front() { return v_.front(); }
+ const_reference back() const { return v_.back(); }
+ reference back() { return v_.back(); }
+
+ void push_back(T* elem) { v_.push_back(elem); }
+
+ void pop_back() {
+ assert(!empty());
+ delete v_.back();
+ v_.pop_back();
+ }
+
+ std::vector<T*>& get() { return v_; }
+ const std::vector<T*>& get() const { return v_; }
+ void swap(std::vector<T*>& other) { v_.swap(other); }
+ void swap(ScopedVector<T>& other) { v_.swap(other.v_); }
+ void release(std::vector<T*>* out) {
+ out->swap(v_);
+ v_.clear();
+ }
+
+ void reserve(size_t capacity) { v_.reserve(capacity); }
+
+ // Resize, deleting elements in the disappearing range if we are shrinking.
+ void resize(size_t new_size) {
+ if (v_.size() > new_size)
+ STLDeleteContainerPointers(v_.begin() + new_size, v_.end());
+ v_.resize(new_size);
+ }
+
+ template<typename InputIterator>
+ void assign(InputIterator begin, InputIterator end) {
+ v_.assign(begin, end);
+ }
+
+ void clear() { STLDeleteElements(&v_); }
+
+ // Like |clear()|, but doesn't delete any elements.
+ void weak_clear() { v_.clear(); }
+
+ // Lets the ScopedVector take ownership of |x|.
+ iterator insert(iterator position, T* x) {
+ return v_.insert(position, x);
+ }
+
+ // Lets the ScopedVector take ownership of elements in [first,last).
+ template<typename InputIterator>
+ void insert(iterator position, InputIterator first, InputIterator last) {
+ v_.insert(position, first, last);
+ }
+
+ iterator erase(iterator position) {
+ delete *position;
+ return v_.erase(position);
+ }
+
+ iterator erase(iterator first, iterator last) {
+ STLDeleteContainerPointers(first, last);
+ return v_.erase(first, last);
+ }
+
+ // Like |erase()|, but doesn't delete the element at |position|.
+ iterator weak_erase(iterator position) {
+ return v_.erase(position);
+ }
+
+ // Like |erase()|, but doesn't delete the elements in [first, last).
+ iterator weak_erase(iterator first, iterator last) {
+ return v_.erase(first, last);
+ }
+
+ private:
+ std::vector<T*> v_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_SCOPED_VECTOR_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/stl_util.h b/chromium/third_party/webrtc/system_wrappers/interface/stl_util.h
new file mode 100644
index 00000000000..ebe855fb10e
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/stl_util.h
@@ -0,0 +1,265 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/stl_util.h.
+
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
+
+#include <assert.h>
+#include <algorithm>
+#include <functional>
+#include <iterator>
+#include <string>
+#include <vector>
+
+namespace webrtc {
+
+// Clears internal memory of an STL object.
+// STL clear()/reserve(0) does not always free internal memory allocated
+// This function uses swap/destructor to ensure the internal memory is freed.
+template<class T>
+void STLClearObject(T* obj) {
+ T tmp;
+ tmp.swap(*obj);
+ // Sometimes "T tmp" allocates objects with memory (arena implementation?).
+ // Hence using additional reserve(0) even if it doesn't always work.
+ obj->reserve(0);
+}
+
+// For a range within a container of pointers, calls delete (non-array version)
+// on these pointers.
+// NOTE: for these three functions, we could just implement a DeleteObject
+// functor and then call for_each() on the range and functor, but this
+// requires us to pull in all of algorithm.h, which seems expensive.
+// For hash_[multi]set, it is important that this deletes behind the iterator
+// because the hash_set may call the hash function on the iterator when it is
+// advanced, which could result in the hash function trying to deference a
+// stale pointer.
+template <class ForwardIterator>
+void STLDeleteContainerPointers(ForwardIterator begin, ForwardIterator end) {
+ while (begin != end) {
+ ForwardIterator temp = begin;
+ ++begin;
+ delete *temp;
+ }
+}
+
+// For a range within a container of pairs, calls delete (non-array version) on
+// BOTH items in the pairs.
+// NOTE: Like STLDeleteContainerPointers, it is important that this deletes
+// behind the iterator because if both the key and value are deleted, the
+// container may call the hash function on the iterator when it is advanced,
+// which could result in the hash function trying to dereference a stale
+// pointer.
+template <class ForwardIterator>
+void STLDeleteContainerPairPointers(ForwardIterator begin,
+ ForwardIterator end) {
+ while (begin != end) {
+ ForwardIterator temp = begin;
+ ++begin;
+ delete temp->first;
+ delete temp->second;
+ }
+}
+
+// For a range within a container of pairs, calls delete (non-array version) on
+// the FIRST item in the pairs.
+// NOTE: Like STLDeleteContainerPointers, deleting behind the iterator.
+template <class ForwardIterator>
+void STLDeleteContainerPairFirstPointers(ForwardIterator begin,
+ ForwardIterator end) {
+ while (begin != end) {
+ ForwardIterator temp = begin;
+ ++begin;
+ delete temp->first;
+ }
+}
+
+// For a range within a container of pairs, calls delete.
+// NOTE: Like STLDeleteContainerPointers, deleting behind the iterator.
+// Deleting the value does not always invalidate the iterator, but it may
+// do so if the key is a pointer into the value object.
+template <class ForwardIterator>
+void STLDeleteContainerPairSecondPointers(ForwardIterator begin,
+ ForwardIterator end) {
+ while (begin != end) {
+ ForwardIterator temp = begin;
+ ++begin;
+ delete temp->second;
+ }
+}
+
+// To treat a possibly-empty vector as an array, use these functions.
+// If you know the array will never be empty, you can use &*v.begin()
+// directly, but that is undefined behaviour if |v| is empty.
+template<typename T>
+inline T* vector_as_array(std::vector<T>* v) {
+ return v->empty() ? NULL : &*v->begin();
+}
+
+template<typename T>
+inline const T* vector_as_array(const std::vector<T>* v) {
+ return v->empty() ? NULL : &*v->begin();
+}
+
+// Return a mutable char* pointing to a string's internal buffer,
+// which may not be null-terminated. Writing through this pointer will
+// modify the string.
+//
+// string_as_array(&str)[i] is valid for 0 <= i < str.size() until the
+// next call to a string method that invalidates iterators.
+//
+// As of 2006-04, there is no standard-blessed way of getting a
+// mutable reference to a string's internal buffer. However, issue 530
+// (http://www.open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#530)
+// proposes this as the method. According to Matt Austern, this should
+// already work on all current implementations.
+inline char* string_as_array(std::string* str) {
+ // DO NOT USE const_cast<char*>(str->data())
+ return str->empty() ? NULL : &*str->begin();
+}
+
+// The following functions are useful for cleaning up STL containers whose
+// elements point to allocated memory.
+
+// STLDeleteElements() deletes all the elements in an STL container and clears
+// the container. This function is suitable for use with a vector, set,
+// hash_set, or any other STL container which defines sensible begin(), end(),
+// and clear() methods.
+//
+// If container is NULL, this function is a no-op.
+//
+// As an alternative to calling STLDeleteElements() directly, consider
+// STLElementDeleter (defined below), which ensures that your container's
+// elements are deleted when the STLElementDeleter goes out of scope.
+template <class T>
+void STLDeleteElements(T* container) {
+ if (!container)
+ return;
+ STLDeleteContainerPointers(container->begin(), container->end());
+ container->clear();
+}
+
+// Given an STL container consisting of (key, value) pairs, STLDeleteValues
+// deletes all the "value" components and clears the container. Does nothing
+// in the case it's given a NULL pointer.
+template <class T>
+void STLDeleteValues(T* container) {
+ if (!container)
+ return;
+ for (typename T::iterator i(container->begin()); i != container->end(); ++i)
+ delete i->second;
+ container->clear();
+}
+
+
+// The following classes provide a convenient way to delete all elements or
+// values from STL containers when they goes out of scope. This greatly
+// simplifies code that creates temporary objects and has multiple return
+// statements. Example:
+//
+// vector<MyProto *> tmp_proto;
+// STLElementDeleter<vector<MyProto *> > d(&tmp_proto);
+// if (...) return false;
+// ...
+// return success;
+
+// Given a pointer to an STL container this class will delete all the element
+// pointers when it goes out of scope.
+template<class T>
+class STLElementDeleter {
+ public:
+ STLElementDeleter<T>(T* container) : container_(container) {}
+ ~STLElementDeleter<T>() { STLDeleteElements(container_); }
+
+ private:
+ T* container_;
+};
+
+// Given a pointer to an STL container this class will delete all the value
+// pointers when it goes out of scope.
+template<class T>
+class STLValueDeleter {
+ public:
+ STLValueDeleter<T>(T* container) : container_(container) {}
+ ~STLValueDeleter<T>() { STLDeleteValues(container_); }
+
+ private:
+ T* container_;
+};
+
+// Test to see if a set, map, hash_set or hash_map contains a particular key.
+// Returns true if the key is in the collection.
+template <typename Collection, typename Key>
+bool ContainsKey(const Collection& collection, const Key& key) {
+ return collection.find(key) != collection.end();
+}
+
+// Returns true if the container is sorted.
+template <typename Container>
+bool STLIsSorted(const Container& cont) {
+ // Note: Use reverse iterator on container to ensure we only require
+ // value_type to implement operator<.
+ return std::adjacent_find(cont.rbegin(), cont.rend(),
+ std::less<typename Container::value_type>())
+ == cont.rend();
+}
+
+// Returns a new ResultType containing the difference of two sorted containers.
+template <typename ResultType, typename Arg1, typename Arg2>
+ResultType STLSetDifference(const Arg1& a1, const Arg2& a2) {
+ assert(STLIsSorted(a1));
+ assert(STLIsSorted(a2));
+ ResultType difference;
+ std::set_difference(a1.begin(), a1.end(),
+ a2.begin(), a2.end(),
+ std::inserter(difference, difference.end()));
+ return difference;
+}
+
+// Returns a new ResultType containing the union of two sorted containers.
+template <typename ResultType, typename Arg1, typename Arg2>
+ResultType STLSetUnion(const Arg1& a1, const Arg2& a2) {
+ assert(STLIsSorted(a1));
+ assert(STLIsSorted(a2));
+ ResultType result;
+ std::set_union(a1.begin(), a1.end(),
+ a2.begin(), a2.end(),
+ std::inserter(result, result.end()));
+ return result;
+}
+
+// Returns a new ResultType containing the intersection of two sorted
+// containers.
+template <typename ResultType, typename Arg1, typename Arg2>
+ResultType STLSetIntersection(const Arg1& a1, const Arg2& a2) {
+ assert(STLIsSorted(a1));
+ assert(STLIsSorted(a2));
+ ResultType result;
+ std::set_intersection(a1.begin(), a1.end(),
+ a2.begin(), a2.end(),
+ std::inserter(result, result.end()));
+ return result;
+}
+
+// Returns true if the sorted container |a1| contains all elements of the sorted
+// container |a2|.
+template <typename Arg1, typename Arg2>
+bool STLIncludes(const Arg1& a1, const Arg2& a2) {
+ assert(STLIsSorted(a1));
+ assert(STLIsSorted(a2));
+ return std::includes(a1.begin(), a1.end(),
+ a2.begin(), a2.end());
+}
+
+} // namespace webrtc
+
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_STL_UTIL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/template_util.h b/chromium/third_party/webrtc/system_wrappers/interface/template_util.h
index 5ae415b5a91..410e04cc94b 100644
--- a/chromium/third_party/webrtc/system_wrappers/interface/template_util.h
+++ b/chromium/third_party/webrtc/system_wrappers/interface/template_util.h
@@ -13,7 +13,7 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TEMPLATE_UTIL_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_TEMPLATE_UTIL_H_
-#include <cstddef> // For size_t.
+#include <stddef.h> // For size_t.
namespace webrtc {
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/thread_annotations.h b/chromium/third_party/webrtc/system_wrappers/interface/thread_annotations.h
new file mode 100644
index 00000000000..612242d611f
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/thread_annotations.h
@@ -0,0 +1,99 @@
+//
+// Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+//
+// Borrowed from
+// https://code.google.com/p/gperftools/source/browse/src/base/thread_annotations.h
+// but adapted for clang attributes instead of the gcc.
+//
+// This header file contains the macro definitions for thread safety
+// annotations that allow the developers to document the locking policies
+// of their multi-threaded code. The annotations can also help program
+// analysis tools to identify potential thread safety issues.
+
+#ifndef BASE_THREAD_ANNOTATIONS_H_
+#define BASE_THREAD_ANNOTATIONS_H_
+
+#if defined(__clang__) && (!defined(SWIG))
+#define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x))
+#else
+#define THREAD_ANNOTATION_ATTRIBUTE__(x) // no-op
+#endif
+
+// Document if a shared variable/field needs to be protected by a lock.
+// GUARDED_BY allows the user to specify a particular lock that should be
+// held when accessing the annotated variable, while GUARDED_VAR only
+// indicates a shared variable should be guarded (by any lock). GUARDED_VAR
+// is primarily used when the client cannot express the name of the lock.
+#define GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(guarded_by(x))
+#define GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(guarded)
+
+// Document if the memory location pointed to by a pointer should be guarded
+// by a lock when dereferencing the pointer. Similar to GUARDED_VAR,
+// PT_GUARDED_VAR is primarily used when the client cannot express the name
+// of the lock. Note that a pointer variable to a shared memory location
+// could itself be a shared variable. For example, if a shared global pointer
+// q, which is guarded by mu1, points to a shared memory location that is
+// guarded by mu2, q should be annotated as follows:
+// int *q GUARDED_BY(mu1) PT_GUARDED_BY(mu2);
+#define PT_GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded_by(x))
+#define PT_GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded)
+
+// Document the acquisition order between locks that can be held
+// simultaneously by a thread. For any two locks that need to be annotated
+// to establish an acquisition order, only one of them needs the annotation.
+// (i.e. You don't have to annotate both locks with both ACQUIRED_AFTER
+// and ACQUIRED_BEFORE.)
+#define ACQUIRED_AFTER(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(x))
+#define ACQUIRED_BEFORE(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(x))
+
+// The following three annotations document the lock requirements for
+// functions/methods.
+
+// Document if a function expects certain locks to be held before it is called
+#define EXCLUSIVE_LOCKS_REQUIRED(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(__VA_ARGS__))
+
+#define SHARED_LOCKS_REQUIRED(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(shared_locks_required(__VA_ARGS__))
+
+// Document the locks acquired in the body of the function. These locks
+// cannot be held when calling this function (as google3's Mutex locks are
+// non-reentrant).
+#define LOCKS_EXCLUDED(x) THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(x))
+
+// Document the lock the annotated function returns without acquiring it.
+#define LOCK_RETURNED(x) THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x))
+
+// Document if a class/type is a lockable type (such as the Mutex class).
+#define LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(lockable)
+
+// Document if a class is a scoped lockable type (such as the MutexLock class).
+#define SCOPED_LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable)
+
+// The following annotations specify lock and unlock primitives.
+#define EXCLUSIVE_LOCK_FUNCTION(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(exclusive_lock_function(__VA_ARGS__))
+
+#define SHARED_LOCK_FUNCTION(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(shared_lock_function(__VA_ARGS__))
+
+#define EXCLUSIVE_TRYLOCK_FUNCTION(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(exclusive_trylock_function(__VA_ARGS__))
+
+#define SHARED_TRYLOCK_FUNCTION(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(shared_trylock_function(__VA_ARGS__))
+
+#define UNLOCK_FUNCTION(...) \
+ THREAD_ANNOTATION_ATTRIBUTE__(unlock_function(__VA_ARGS__))
+
+// An escape hatch for thread safety analysis to ignore the annotated function.
+#define NO_THREAD_SAFETY_ANALYSIS \
+ THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis)
+
+#endif // BASE_THREAD_ANNOTATIONS_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/timestamp_extrapolator.h b/chromium/third_party/webrtc/system_wrappers/interface/timestamp_extrapolator.h
new file mode 100644
index 00000000000..d067198d8d5
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/timestamp_extrapolator.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
+#define SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
+
+#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc
+{
+
+class TimestampExtrapolator
+{
+public:
+ explicit TimestampExtrapolator(int64_t start_ms);
+ ~TimestampExtrapolator();
+ void Update(int64_t tMs, uint32_t ts90khz);
+ int64_t ExtrapolateLocalTime(uint32_t timestamp90khz);
+ void Reset(int64_t start_ms);
+
+private:
+ void CheckForWrapArounds(uint32_t ts90khz);
+ bool DelayChangeDetection(double error);
+ RWLockWrapper* _rwLock;
+ double _w[2];
+ double _P[2][2];
+ int64_t _startMs;
+ int64_t _prevMs;
+ uint32_t _firstTimestamp;
+ int32_t _wrapArounds;
+ int64_t _prevUnwrappedTimestamp;
+ int64_t _prevWrapTimestamp;
+ const double _lambda;
+ bool _firstAfterReset;
+ uint32_t _packetCount;
+ const uint32_t _startUpFilterDelayInPackets;
+
+ double _detectorAccumulatorPos;
+ double _detectorAccumulatorNeg;
+ const double _alarmThreshold;
+ const double _accDrift;
+ const double _accMaxError;
+ const double _P11;
+};
+
+} // namespace webrtc
+
+#endif // SYSTEM_WRAPPERS_INTERFACE_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/interface/utf_util_win.h b/chromium/third_party/webrtc/system_wrappers/interface/utf_util_win.h
new file mode 100644
index 00000000000..f88f0799ff1
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/interface/utf_util_win.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Conversion functions for UTF-8 and UTF-16 strings on Windows.
+// Duplicated from talk/base/win32.h.
+#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
+#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
+
+#ifdef WIN32
+#include <windows.h>
+#include <string>
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+inline std::wstring ToUtf16(const char* utf8, size_t len) {
+ int len16 = ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len),
+ NULL, 0);
+ scoped_ptr<wchar_t[]> ws(new wchar_t[len16]);
+ ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len), ws.get(),
+ len16);
+ return std::wstring(ws.get(), len16);
+}
+
+inline std::wstring ToUtf16(const std::string& str) {
+ return ToUtf16(str.data(), str.length());
+}
+
+inline std::string ToUtf8(const wchar_t* wide, size_t len) {
+ int len8 = ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len),
+ NULL, 0, NULL, NULL);
+ scoped_ptr<char[]> ns(new char[len8]);
+ ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len), ns.get(), len8,
+ NULL, NULL);
+ return std::string(ns.get(), len8);
+}
+
+inline std::string ToUtf8(const wchar_t* wide) {
+ return ToUtf8(wide, wcslen(wide));
+}
+
+inline std::string ToUtf8(const std::wstring& wstr) {
+ return ToUtf8(wstr.data(), wstr.length());
+}
+
+} // namespace webrtc
+
+#endif // WIN32
+#endif // WEBRTC_SYSTEM_WRAPPERS_INTERFACE_UTF_UTIL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/source/Android.mk b/chromium/third_party/webrtc/system_wrappers/source/Android.mk
index 6cc7a0bac21..4767e72cb38 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/Android.mk
+++ b/chromium/third_party/webrtc/system_wrappers/source/Android.mk
@@ -17,7 +17,6 @@ LOCAL_MODULE := libwebrtc_system_wrappers
LOCAL_MODULE_TAGS := optional
LOCAL_CPP_EXTENSION := .cc
LOCAL_SRC_FILES := \
- android/cpu-features.c \
cpu_features_android.c \
sort.cc \
aligned_malloc.cc \
@@ -36,11 +35,13 @@ LOCAL_SRC_FILES := \
condition_variable_posix.cc \
critical_section_posix.cc \
event_posix.cc \
+ rtp_to_ntp.cc \
sleep.cc \
thread_posix.cc \
tick_util.cc \
+ timestamp_extrapolator.cc \
trace_posix.cc \
- rw_lock_posix.cc
+ rw_lock_posix.cc
LOCAL_CFLAGS := \
$(MY_WEBRTC_COMMON_DEFS)
@@ -56,6 +57,8 @@ LOCAL_SHARED_LIBRARIES := \
libdl \
libstlport
+LOCAL_STATIC_LIBRARIES := cpufeatures
+
ifndef NDK_ROOT
include external/stlport/libstlport.mk
endif
diff --git a/chromium/third_party/webrtc/system_wrappers/source/OWNERS b/chromium/third_party/webrtc/system_wrappers/source/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
index 10e08aae7b1..0acbf975902 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
@@ -16,14 +16,16 @@
#include <stdint.h>
#endif
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
-#include "testing/gtest/include/gtest/gtest.h"
+namespace webrtc {
// Returns true if |size| and |alignment| are valid combinations.
bool CorrectUsage(size_t size, size_t alignment) {
- webrtc::Allocator<char>::scoped_ptr_aligned scoped(
- webrtc::AlignedMalloc<char>(size, alignment));
+ scoped_ptr<char, AlignedFreeDeleter> scoped(
+ static_cast<char*>(AlignedMalloc(size, alignment)));
if (scoped.get() == NULL) {
return false;
}
@@ -34,16 +36,15 @@ bool CorrectUsage(size_t size, size_t alignment) {
TEST(AlignedMalloc, GetRightAlign) {
const size_t size = 100;
const size_t alignment = 32;
- const size_t left_missalignment = 8;
- webrtc::Allocator<char>::scoped_ptr_aligned scoped(
- webrtc::AlignedMalloc<char>(size, alignment));
+ const size_t left_misalignment = 1;
+ scoped_ptr<char, AlignedFreeDeleter> scoped(
+ static_cast<char*>(AlignedMalloc(size, alignment)));
EXPECT_TRUE(scoped.get() != NULL);
const uintptr_t aligned_address = reinterpret_cast<uintptr_t> (scoped.get());
- const uintptr_t missaligned_address = aligned_address - left_missalignment;
- const char* missaligned_ptr = reinterpret_cast<const char*>(
- missaligned_address);
- const char* realigned_ptr = webrtc::GetRightAlign(
- missaligned_ptr, alignment);
+ const uintptr_t misaligned_address = aligned_address - left_misalignment;
+ const char* misaligned_ptr = reinterpret_cast<const char*>(
+ misaligned_address);
+ const char* realigned_ptr = GetRightAlign(misaligned_ptr, alignment);
EXPECT_EQ(scoped.get(), realigned_ptr);
}
@@ -76,3 +77,6 @@ TEST(AlignedMalloc, AlignTo128Bytes) {
size_t alignment = 128;
EXPECT_TRUE(CorrectUsage(size, alignment));
}
+
+} // namespace webrtc
+
diff --git a/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.c b/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.c
deleted file mode 100644
index efe7b95d6ab..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.c
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <sys/system_properties.h>
-#ifdef __arm__
-#include <machine/cpu-features.h>
-#endif
-#include <errno.h>
-#include <fcntl.h>
-#include <pthread.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "webrtc/system_wrappers/source/android/cpu-features.h"
-
-static pthread_once_t g_once;
-static AndroidCpuFamily g_cpuFamily;
-static uint64_t g_cpuFeatures;
-static int g_cpuCount;
-
-static const int android_cpufeatures_debug = 0;
-
-#ifdef __arm__
-# define DEFAULT_CPU_FAMILY ANDROID_CPU_FAMILY_ARM
-#elif defined __i386__
-# define DEFAULT_CPU_FAMILY ANDROID_CPU_FAMILY_X86
-#else
-# define DEFAULT_CPU_FAMILY ANDROID_CPU_FAMILY_UNKNOWN
-#endif
-
-#define D(...) \
- do { \
- if (android_cpufeatures_debug) { \
- printf(__VA_ARGS__); fflush(stdout); \
- } \
- } while (0)
-
-#ifdef __i386__
-static __inline__ void x86_cpuid(int func, int values[4])
-{
- int a, b, c, d;
- /* We need to preserve ebx since we're compiling PIC code */
- /* this means we can't use "=b" for the second output register */
- __asm__ __volatile__ ( \
- "push %%ebx\n"
- "cpuid\n" \
- "mov %1, %%ebx\n"
- "pop %%ebx\n"
- : "=a" (a), "=r" (b), "=c" (c), "=d" (d) \
- : "a" (func) \
- );
- values[0] = a;
- values[1] = b;
- values[2] = c;
- values[3] = d;
-}
-#endif
-
-/* Read the content of /proc/cpuinfo into a user-provided buffer.
- * Return the length of the data, or -1 on error. Does *not*
- * zero-terminate the content. Will not read more
- * than 'buffsize' bytes.
- */
-static int
-read_file(const char* pathname, char* buffer, size_t buffsize)
-{
- int fd, len;
-
- fd = open(pathname, O_RDONLY);
- if (fd < 0)
- return -1;
-
- do {
- len = read(fd, buffer, buffsize);
- } while (len < 0 && errno == EINTR);
-
- close(fd);
-
- return len;
-}
-
-/* Extract the content of a the first occurence of a given field in
- * the content of /proc/cpuinfo and return it as a heap-allocated
- * string that must be freed by the caller.
- *
- * Return NULL if not found
- */
-static char*
-extract_cpuinfo_field(char* buffer, int buflen, const char* field)
-{
- int fieldlen = strlen(field);
- char* bufend = buffer + buflen;
- char* result = NULL;
- int len, ignore;
- const char *p, *q;
-
- /* Look for first field occurence, and ensures it starts the line.
- */
- p = buffer;
- bufend = buffer + buflen;
- for (;;) {
- p = memmem(p, bufend-p, field, fieldlen);
- if (p == NULL)
- goto EXIT;
-
- if (p == buffer || p[-1] == '\n')
- break;
-
- p += fieldlen;
- }
-
- /* Skip to the first column followed by a space */
- p += fieldlen;
- p = memchr(p, ':', bufend-p);
- if (p == NULL || p[1] != ' ')
- goto EXIT;
-
- /* Find the end of the line */
- p += 2;
- q = memchr(p, '\n', bufend-p);
- if (q == NULL)
- q = bufend;
-
- /* Copy the line into a heap-allocated buffer */
- len = q-p;
- result = malloc(len+1);
- if (result == NULL)
- goto EXIT;
-
- memcpy(result, p, len);
- result[len] = '\0';
-
-EXIT:
- return result;
-}
-
-/* Count the number of occurences of a given field prefix in /proc/cpuinfo.
- */
-static int
-count_cpuinfo_field(char* buffer, int buflen, const char* field)
-{
- int fieldlen = strlen(field);
- const char* p = buffer;
- const char* bufend = buffer + buflen;
- const char* q;
- int count = 0;
-
- for (;;) {
- const char* q;
-
- p = memmem(p, bufend-p, field, fieldlen);
- if (p == NULL)
- break;
-
- /* Ensure that the field is at the start of a line */
- if (p > buffer && p[-1] != '\n') {
- p += fieldlen;
- continue;
- }
-
-
- /* skip any whitespace */
- q = p + fieldlen;
- while (q < bufend && (*q == ' ' || *q == '\t'))
- q++;
-
- /* we must have a colon now */
- if (q < bufend && *q == ':') {
- count += 1;
- q ++;
- }
- p = q;
- }
-
- return count;
-}
-
-/* Like strlen(), but for constant string literals */
-#define STRLEN_CONST(x) ((sizeof(x)-1)
-
-
-/* Checks that a space-separated list of items contains one given 'item'.
- * Returns 1 if found, 0 otherwise.
- */
-static int
-has_list_item(const char* list, const char* item)
-{
- const char* p = list;
- int itemlen = strlen(item);
-
- if (list == NULL)
- return 0;
-
- while (*p) {
- const char* q;
-
- /* skip spaces */
- while (*p == ' ' || *p == '\t')
- p++;
-
- /* find end of current list item */
- q = p;
- while (*q && *q != ' ' && *q != '\t')
- q++;
-
- if (itemlen == q-p && !memcmp(p, item, itemlen))
- return 1;
-
- /* skip to next item */
- p = q;
- }
- return 0;
-}
-
-
-static void
-android_cpuInit(void)
-{
- char cpuinfo[4096];
- int cpuinfo_len;
-
- g_cpuFamily = DEFAULT_CPU_FAMILY;
- g_cpuFeatures = 0;
- g_cpuCount = 1;
-
- cpuinfo_len = read_file("/proc/cpuinfo", cpuinfo, sizeof cpuinfo);
- D("cpuinfo_len is (%d):\n%.*s\n", cpuinfo_len,
- cpuinfo_len >= 0 ? cpuinfo_len : 0, cpuinfo);
-
- if (cpuinfo_len < 0) /* should not happen */ {
- return;
- }
-
- /* Count the CPU cores, the value may be 0 for single-core CPUs */
- g_cpuCount = count_cpuinfo_field(cpuinfo, cpuinfo_len, "processor");
- if (g_cpuCount == 0) {
- g_cpuCount = count_cpuinfo_field(cpuinfo, cpuinfo_len, "Processor");
- if (g_cpuCount == 0) {
- g_cpuCount = 1;
- }
- }
-
- D("found cpuCount = %d\n", g_cpuCount);
-
-#ifdef __ARM_ARCH__
- {
- char* features = NULL;
- char* architecture = NULL;
-
- /* Extract architecture from the "CPU Architecture" field.
- * The list is well-known, unlike the the output of
- * the 'Processor' field which can vary greatly.
- *
- * See the definition of the 'proc_arch' array in
- * $KERNEL/arch/arm/kernel/setup.c and the 'c_show' function in
- * same file.
- */
- char* cpuArch = extract_cpuinfo_field(cpuinfo, cpuinfo_len, "CPU architecture");
-
- if (cpuArch != NULL) {
- char* end;
- long archNumber;
- int hasARMv7 = 0;
-
- D("found cpuArch = '%s'\n", cpuArch);
-
- /* read the initial decimal number, ignore the rest */
- archNumber = strtol(cpuArch, &end, 10);
-
- /* Here we assume that ARMv8 will be upwards compatible with v7
- * in the future. Unfortunately, there is no 'Features' field to
- * indicate that Thumb-2 is supported.
- */
- if (end > cpuArch && archNumber >= 7) {
- hasARMv7 = 1;
- }
-
- /* Unfortunately, it seems that certain ARMv6-based CPUs
- * report an incorrect architecture number of 7!
- *
- * See http://code.google.com/p/android/issues/detail?id=10812
- *
- * We try to correct this by looking at the 'elf_format'
- * field reported by the 'Processor' field, which is of the
- * form of "(v7l)" for an ARMv7-based CPU, and "(v6l)" for
- * an ARMv6-one.
- */
- if (hasARMv7) {
- char* cpuProc = extract_cpuinfo_field(cpuinfo, cpuinfo_len,
- "Processor");
- if (cpuProc != NULL) {
- D("found cpuProc = '%s'\n", cpuProc);
- if (has_list_item(cpuProc, "(v6l)")) {
- D("CPU processor and architecture mismatch!!\n");
- hasARMv7 = 0;
- }
- free(cpuProc);
- }
- }
-
- if (hasARMv7) {
- g_cpuFeatures |= ANDROID_CPU_ARM_FEATURE_ARMv7;
- }
-
- /* The LDREX / STREX instructions are available from ARMv6 */
- if (archNumber >= 6) {
- g_cpuFeatures |= ANDROID_CPU_ARM_FEATURE_LDREX_STREX;
- }
-
- free(cpuArch);
- }
-
- /* Extract the list of CPU features from 'Features' field */
- char* cpuFeatures = extract_cpuinfo_field(cpuinfo, cpuinfo_len, "Features");
-
- if (cpuFeatures != NULL) {
-
- D("found cpuFeatures = '%s'\n", cpuFeatures);
-
- if (has_list_item(cpuFeatures, "vfpv3"))
- g_cpuFeatures |= ANDROID_CPU_ARM_FEATURE_VFPv3;
-
- else if (has_list_item(cpuFeatures, "vfpv3d16"))
- g_cpuFeatures |= ANDROID_CPU_ARM_FEATURE_VFPv3;
-
- if (has_list_item(cpuFeatures, "neon")) {
- /* Note: Certain kernels only report neon but not vfpv3
- * in their features list. However, ARM mandates
- * that if Neon is implemented, so must be VFPv3
- * so always set the flag.
- */
- g_cpuFeatures |= ANDROID_CPU_ARM_FEATURE_NEON |
- ANDROID_CPU_ARM_FEATURE_VFPv3;
- }
- free(cpuFeatures);
- }
- }
-#endif /* __ARM_ARCH__ */
-
-#ifdef __i386__
- g_cpuFamily = ANDROID_CPU_FAMILY_X86;
-
- int regs[4];
-
-/* According to http://en.wikipedia.org/wiki/CPUID */
-#define VENDOR_INTEL_b 0x756e6547
-#define VENDOR_INTEL_c 0x6c65746e
-#define VENDOR_INTEL_d 0x49656e69
-
- x86_cpuid(0, regs);
- int vendorIsIntel = (regs[1] == VENDOR_INTEL_b &&
- regs[2] == VENDOR_INTEL_c &&
- regs[3] == VENDOR_INTEL_d);
-
- x86_cpuid(1, regs);
- if ((regs[2] & (1 << 9)) != 0) {
- g_cpuFeatures |= ANDROID_CPU_X86_FEATURE_SSSE3;
- }
- if ((regs[2] & (1 << 23)) != 0) {
- g_cpuFeatures |= ANDROID_CPU_X86_FEATURE_POPCNT;
- }
- if (vendorIsIntel && (regs[2] & (1 << 22)) != 0) {
- g_cpuFeatures |= ANDROID_CPU_X86_FEATURE_MOVBE;
- }
-#endif
-}
-
-
-AndroidCpuFamily
-android_getCpuFamily(void)
-{
- pthread_once(&g_once, android_cpuInit);
- return g_cpuFamily;
-}
-
-
-uint64_t
-android_getCpuFeatures(void)
-{
- pthread_once(&g_once, android_cpuInit);
- return g_cpuFeatures;
-}
-
-
-int
-android_getCpuCount(void)
-{
- pthread_once(&g_once, android_cpuInit);
- return g_cpuCount;
-}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.h b/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.h
deleted file mode 100644
index f20c0bc4d93..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/android/cpu-features.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// You can download Android source at
-// http://source.android.com/source/downloading.html
-// Original files are in ndk/sources/android/cpufeatures
-// Revision is Change-Id: I9a0629efba36a6023f05e5f092e7addcc1b7d2a9
-
-#ifndef CPU_FEATURES_H
-#define CPU_FEATURES_H
-
-#include <sys/cdefs.h>
-#include <stdint.h>
-
-__BEGIN_DECLS
-
-typedef enum {
- ANDROID_CPU_FAMILY_UNKNOWN = 0,
- ANDROID_CPU_FAMILY_ARM,
- ANDROID_CPU_FAMILY_X86,
-
- ANDROID_CPU_FAMILY_MAX /* do not remove */
-
-} AndroidCpuFamily;
-
-/* Return family of the device's CPU */
-extern AndroidCpuFamily android_getCpuFamily(void);
-
-enum {
- ANDROID_CPU_ARM_FEATURE_ARMv7 = (1 << 0),
- ANDROID_CPU_ARM_FEATURE_VFPv3 = (1 << 1),
- ANDROID_CPU_ARM_FEATURE_NEON = (1 << 2),
- ANDROID_CPU_ARM_FEATURE_LDREX_STREX = (1 << 3),
-};
-
-enum {
- ANDROID_CPU_X86_FEATURE_SSSE3 = (1 << 0),
- ANDROID_CPU_X86_FEATURE_POPCNT = (1 << 1),
- ANDROID_CPU_X86_FEATURE_MOVBE = (1 << 2),
-};
-
-extern uint64_t android_getCpuFeatures(void);
-
-/* Return the number of CPU cores detected on this device. */
-extern int android_getCpuCount(void);
-
-__END_DECLS
-
-#endif /* CPU_FEATURES_H */
diff --git a/chromium/third_party/webrtc/system_wrappers/source/clock.cc b/chromium/third_party/webrtc/system_wrappers/source/clock.cc
index 7ef6c66fbc2..33eb8561f6b 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/clock.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/clock.cc
@@ -20,6 +20,7 @@
#include <time.h>
#endif
+#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
namespace webrtc {
@@ -128,18 +129,19 @@ void get_time(WindowsHelpTimer* help_timer, FILETIME& current_time) {
class RealTimeClock : public Clock {
// Return a timestamp in milliseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMilliseconds() OVERRIDE {
+ virtual int64_t TimeInMilliseconds() const OVERRIDE {
return TickTime::MillisecondTimestamp();
}
// Return a timestamp in microseconds relative to some arbitrary source; the
// source is fixed for this clock.
- virtual int64_t TimeInMicroseconds() OVERRIDE {
+ virtual int64_t TimeInMicroseconds() const OVERRIDE {
return TickTime::MicrosecondTimestamp();
}
// Retrieve an NTP absolute timestamp in seconds and fractions of a second.
- virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) OVERRIDE {
+ virtual void CurrentNtp(uint32_t& seconds,
+ uint32_t& fractions) const OVERRIDE {
timeval tv = CurrentTimeVal();
double microseconds_in_seconds;
Adjust(tv, &seconds, &microseconds_in_seconds);
@@ -148,7 +150,7 @@ class RealTimeClock : public Clock {
}
// Retrieve an NTP absolute timestamp in milliseconds.
- virtual int64_t CurrentNtpInMilliseconds() OVERRIDE {
+ virtual int64_t CurrentNtpInMilliseconds() const OVERRIDE {
timeval tv = CurrentTimeVal();
uint32_t seconds;
double microseconds_in_seconds;
@@ -233,12 +235,26 @@ class UnixRealTimeClock : public RealTimeClock {
// Keeps the global state for the Windows implementation of RtpRtcpClock.
// Note that this is a POD. Only PODs are allowed to have static storage
// duration according to the Google Style guide.
-static WindowsHelpTimer global_help_timer = {0, 0, {{ 0, 0}, 0}, 0};
+//
+// Note that on Windows, GetSystemTimeAsFileTime has poorer (up to 15 ms)
+// resolution than the media timers, hence the WindowsHelpTimer context
+// object and Synchronize API to sync the two.
+//
+// We only sync up once, which means that on Windows, our realtime clock
+// wont respond to system time/date changes without a program restart.
+// TODO(henrike): We should probably call sync more often to catch
+// drift and time changes for parity with other platforms.
+
+static WindowsHelpTimer *SyncGlobalHelpTimer() {
+ static WindowsHelpTimer global_help_timer = {0, 0, {{ 0, 0}, 0}, 0};
+ Synchronize(&global_help_timer);
+ return &global_help_timer;
+}
#endif
Clock* Clock::GetRealTimeClock() {
#if defined(_WIN32)
- static WindowsRealTimeClock clock(&global_help_timer);
+ static WindowsRealTimeClock clock(SyncGlobalHelpTimer());
return &clock;
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
static UnixRealTimeClock clock;
@@ -249,23 +265,30 @@ Clock* Clock::GetRealTimeClock() {
}
SimulatedClock::SimulatedClock(int64_t initial_time_us)
- : time_us_(initial_time_us) {}
+ : time_us_(initial_time_us), lock_(RWLockWrapper::CreateRWLock()) {
+}
+
+SimulatedClock::~SimulatedClock() {
+}
-int64_t SimulatedClock::TimeInMilliseconds() {
+int64_t SimulatedClock::TimeInMilliseconds() const {
+ ReadLockScoped synchronize(*lock_);
return (time_us_ + 500) / 1000;
}
-int64_t SimulatedClock::TimeInMicroseconds() {
+int64_t SimulatedClock::TimeInMicroseconds() const {
+ ReadLockScoped synchronize(*lock_);
return time_us_;
}
-void SimulatedClock::CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
- seconds = (TimeInMilliseconds() / 1000) + kNtpJan1970;
- fractions = (uint32_t)((TimeInMilliseconds() % 1000) *
- kMagicNtpFractionalUnit / 1000);
+void SimulatedClock::CurrentNtp(uint32_t& seconds, uint32_t& fractions) const {
+ int64_t now_ms = TimeInMilliseconds();
+ seconds = (now_ms / 1000) + kNtpJan1970;
+ fractions =
+ static_cast<uint32_t>((now_ms % 1000) * kMagicNtpFractionalUnit / 1000);
}
-int64_t SimulatedClock::CurrentNtpInMilliseconds() {
+int64_t SimulatedClock::CurrentNtpInMilliseconds() const {
return TimeInMilliseconds() + 1000 * static_cast<int64_t>(kNtpJan1970);
}
@@ -274,6 +297,7 @@ void SimulatedClock::AdvanceTimeMilliseconds(int64_t milliseconds) {
}
void SimulatedClock::AdvanceTimeMicroseconds(int64_t microseconds) {
+ WriteLockScoped synchronize(*lock_);
time_us_ += microseconds;
}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/clock_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/clock_unittest.cc
index 67d699e5645..71969edcd00 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/clock_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/clock_unittest.cc
@@ -20,6 +20,7 @@ TEST(ClockTest, NtpTime) {
uint32_t fractions;
clock->CurrentNtp(seconds, fractions);
int64_t milliseconds = clock->CurrentNtpInMilliseconds();
+ EXPECT_GT(milliseconds / 1000, kNtpJan1970);
EXPECT_GE(milliseconds, Clock::NtpToMs(seconds, fractions));
EXPECT_NEAR(milliseconds, Clock::NtpToMs(seconds, fractions), 5);
}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/cpu_features_android.c b/chromium/third_party/webrtc/system_wrappers/source/cpu_features_android.c
index 9b44d02846a..0cb3a6c5ee4 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/cpu_features_android.c
+++ b/chromium/third_party/webrtc/system_wrappers/source/cpu_features_android.c
@@ -8,11 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#if defined(WEBRTC_CHROMIUM_BUILD)
#include <cpu-features.h>
-#else
-#include "webrtc/system_wrappers/source/android/cpu-features.h"
-#endif // defined(WEBRTC_CHROMIUM_BUILD)
uint64_t WebRtc_GetCPUFeaturesARM(void) {
return android_getCpuFeatures();
diff --git a/chromium/third_party/webrtc/system_wrappers/source/critical_section_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/critical_section_unittest.cc
index 40df570e9a2..5c416b2de39 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/critical_section_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/critical_section_unittest.cc
@@ -81,7 +81,7 @@ bool LockUnlockThenStopRunFunction(void* obj) {
return false;
}
-TEST_F(CritSectTest, ThreadWakesOnce) {
+TEST_F(CritSectTest, ThreadWakesOnce) NO_THREAD_SAFETY_ANALYSIS {
CriticalSectionWrapper* crit_sect =
CriticalSectionWrapper::CreateCriticalSection();
ProtectedCount count(crit_sect);
@@ -110,7 +110,7 @@ bool LockUnlockRunFunction(void* obj) {
return true;
}
-TEST_F(CritSectTest, ThreadWakesTwice) {
+TEST_F(CritSectTest, ThreadWakesTwice) NO_THREAD_SAFETY_ANALYSIS {
CriticalSectionWrapper* crit_sect =
CriticalSectionWrapper::CreateCriticalSection();
ProtectedCount count(crit_sect);
diff --git a/chromium/third_party/webrtc/system_wrappers/source/field_trial_default.cc b/chromium/third_party/webrtc/system_wrappers/source/field_trial_default.cc
new file mode 100644
index 00000000000..892623cef51
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/field_trial_default.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+//
+
+#include "webrtc/system_wrappers/interface/field_trial.h"
+
+// Clients of webrtc that do not want to configure field trials can link with
+// this instead of providing their own implementation.
+namespace webrtc {
+namespace field_trial {
+
+std::string FindFullName(const std::string& name) {
+ return std::string();
+}
+
+} // namespace field_trial
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.cc b/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.cc
deleted file mode 100644
index 5c9f5af01ce..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.cc
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
-
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-ListItem::ListItem(const void* item)
- : next_(0),
- prev_(0),
- item_ptr_(item),
- item_(0) {
-}
-
-ListItem::ListItem(const unsigned int item)
- : next_(0),
- prev_(0),
- item_ptr_(0),
- item_(item) {
-}
-
-ListItem::~ListItem() {
-}
-
-void* ListItem::GetItem() const {
- return const_cast<void*>(item_ptr_);
-}
-
-unsigned int ListItem::GetUnsignedItem() const {
- return item_;
-}
-
-ListWrapper::ListWrapper()
- : critical_section_(CriticalSectionWrapper::CreateCriticalSection()),
- first_(0),
- last_(0),
- size_(0) {
-}
-
-ListWrapper::~ListWrapper() {
- if (!Empty()) {
- // TODO(hellner) I'm not sure this loggin is useful.
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
- "Potential memory leak in ListWrapper");
- // Remove all remaining list items.
- while (Erase(First()) == 0)
- {}
- }
- delete critical_section_;
-}
-
-bool ListWrapper::Empty() const {
- return !first_ && !last_;
-}
-
-unsigned int ListWrapper::GetSize() const {
- return size_;
-}
-
-int ListWrapper::PushBack(const void* ptr) {
- ListItem* item = new ListItem(ptr);
- CriticalSectionScoped lock(critical_section_);
- PushBackImpl(item);
- return 0;
-}
-
-int ListWrapper::PushBack(const unsigned int item_id) {
- ListItem* item = new ListItem(item_id);
- CriticalSectionScoped lock(critical_section_);
- PushBackImpl(item);
- return 0;
-}
-
-int ListWrapper::PushFront(const unsigned int item_id) {
- ListItem* item = new ListItem(item_id);
- CriticalSectionScoped lock(critical_section_);
- PushFrontImpl(item);
- return 0;
-}
-
-int ListWrapper::PushFront(const void* ptr) {
- ListItem* item = new ListItem(ptr);
- CriticalSectionScoped lock(critical_section_);
- PushFrontImpl(item);
- return 0;
-}
-
-int ListWrapper::PopFront() {
- return Erase(first_);
-}
-
-int ListWrapper::PopBack() {
- return Erase(last_);
-}
-
-ListItem* ListWrapper::First() const {
- return first_;
-}
-
-ListItem* ListWrapper::Last() const {
- return last_;
-}
-
-ListItem* ListWrapper::Next(ListItem* item) const {
- if (!item) {
- return 0;
- }
- return item->next_;
-}
-
-ListItem* ListWrapper::Previous(ListItem* item) const {
- if (!item) {
- return 0;
- }
- return item->prev_;
-}
-
-int ListWrapper::Insert(ListItem* existing_previous_item, ListItem* new_item) {
- if (!new_item) {
- return -1;
- }
- // Allow existing_previous_item to be NULL if the list is empty.
- // TODO(hellner) why allow this? Keep it as is for now to avoid
- // breaking API contract.
- if (!existing_previous_item && !Empty()) {
- return -1;
- }
- CriticalSectionScoped lock(critical_section_);
- if (!existing_previous_item) {
- PushBackImpl(new_item);
- return 0;
- }
- ListItem* next_item = existing_previous_item->next_;
- new_item->next_ = existing_previous_item->next_;
- new_item->prev_ = existing_previous_item;
- existing_previous_item->next_ = new_item;
- if (next_item) {
- next_item->prev_ = new_item;
- } else {
- last_ = new_item;
- }
- size_++;
- return 0;
-}
-
-int ListWrapper::InsertBefore(ListItem* existing_next_item,
- ListItem* new_item) {
- if (!new_item) {
- return -1;
- }
- // Allow existing_next_item to be NULL if the list is empty.
- // Todo: why allow this? Keep it as is for now to avoid breaking API
- // contract.
- if (!existing_next_item && !Empty()) {
- return -1;
- }
- CriticalSectionScoped lock(critical_section_);
- if (!existing_next_item) {
- PushBackImpl(new_item);
- return 0;
- }
-
- ListItem* previous_item = existing_next_item->prev_;
- new_item->next_ = existing_next_item;
- new_item->prev_ = previous_item;
- existing_next_item->prev_ = new_item;
- if (previous_item) {
- previous_item->next_ = new_item;
- } else {
- first_ = new_item;
- }
- size_++;
- return 0;
-}
-
-int ListWrapper::Erase(ListItem* item) {
- if (!item) {
- return -1;
- }
- size_--;
- ListItem* previous_item = item->prev_;
- ListItem* next_item = item->next_;
- if (!previous_item) {
- if (next_item) {
- next_item->prev_ = 0;
- }
- first_ = next_item;
- } else {
- previous_item->next_ = next_item;
- }
- if (!next_item) {
- if (previous_item) {
- previous_item->next_ = 0;
- }
- last_ = previous_item;
- } else {
- next_item->prev_ = previous_item;
- }
- delete item;
- return 0;
-}
-
-void ListWrapper::PushBackImpl(ListItem* item) {
- if (Empty()) {
- first_ = item;
- last_ = item;
- size_++;
- return;
- }
-
- item->prev_ = last_;
- last_->next_ = item;
- last_ = item;
- size_++;
-}
-
-void ListWrapper::PushFrontImpl(ListItem* item) {
- if (Empty()) {
- first_ = item;
- last_ = item;
- size_++;
- return;
- }
-
- item->next_ = first_;
- first_->prev_ = item;
- first_ = item;
- size_++;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.h b/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.h
deleted file mode 100644
index dcc9209e25c..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/list_no_stl.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class ListNoStlItem {
- public:
- ListNoStlItem(const void* ptr);
- ListNoStlItem(const unsigned int item);
- virtual ~ListNoStlItem();
- void* GetItem() const;
- unsigned int GetUnsignedItem() const;
-
- protected:
- ListNoStlItem* next_;
- ListNoStlItem* prev_;
-
- private:
- friend class ListNoStl;
-
- const void* item_ptr_;
- const unsigned int item_;
- DISALLOW_COPY_AND_ASSIGN(ListNoStlItem);
-};
-
-class ListNoStl {
- public:
- ListNoStl();
- virtual ~ListNoStl();
-
- // ListWrapper functions
- unsigned int GetSize() const;
- int PushBack(const void* ptr);
- int PushBack(const unsigned int item_id);
- int PushFront(const void* ptr);
- int PushFront(const unsigned int item_id);
- int PopFront();
- int PopBack();
- bool Empty() const;
- ListNoStlItem* First() const;
- ListNoStlItem* Last() const;
- ListNoStlItem* Next(ListNoStlItem* item) const;
- ListNoStlItem* Previous(ListNoStlItem* item) const;
- int Erase(ListNoStlItem* item);
- int Insert(ListNoStlItem* existing_previous_item,
- ListNoStlItem* new_item);
-
- int InsertBefore(ListNoStlItem* existing_next_item,
- ListNoStlItem* new_item);
-
- private:
- void PushBack(ListNoStlItem* item);
- void PushFront(ListNoStlItem* item);
-
- CriticalSectionWrapper* critical_section_;
- ListNoStlItem* first_;
- ListNoStlItem* last_;
- unsigned int size_;
- DISALLOW_COPY_AND_ASSIGN(ListNoStl);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_NO_STL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/source/list_stl.cc b/chromium/third_party/webrtc/system_wrappers/source/list_stl.cc
deleted file mode 100644
index 81b6f0cc8af..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/list_stl.cc
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
-
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-ListItem::ListItem(const void* item)
- : this_iter_(),
- item_ptr_(item),
- item_(0) {
-}
-
-ListItem::ListItem(const unsigned int item)
- : this_iter_(),
- item_ptr_(0),
- item_(item) {
-}
-
-ListItem::~ListItem() {
-}
-
-void* ListItem::GetItem() const {
- return const_cast<void*>(item_ptr_);
-}
-
-unsigned int ListItem::GetUnsignedItem() const {
- return item_;
-}
-
-ListWrapper::ListWrapper()
- : list_() {
-}
-
-ListWrapper::~ListWrapper() {
- if (!Empty()) {
- // TODO(hellner) I'm not sure this loggin is useful.
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1,
- "Potential memory leak in ListWrapper");
- // Remove all remaining list items.
- while (Erase(First()) == 0) {}
- }
-}
-
-bool ListWrapper::Empty() const {
- return list_.empty();
-}
-
-unsigned int ListWrapper::GetSize() const {
- return list_.size();
-}
-
-int ListWrapper::PushBack(const void* ptr) {
- ListItem* item = new ListItem(ptr);
- list_.push_back(item);
- return 0;
-}
-
-int ListWrapper::PushBack(const unsigned int item_id) {
- ListItem* item = new ListItem(item_id);
- list_.push_back(item);
- return 0;
-}
-
-int ListWrapper::PushFront(const unsigned int item_id) {
- ListItem* item = new ListItem(item_id);
- list_.push_front(item);
- return 0;
-}
-
-int ListWrapper::PushFront(const void* ptr) {
- ListItem* item = new ListItem(ptr);
- list_.push_front(item);
- return 0;
-}
-
-int ListWrapper::PopFront() {
- if (list_.empty()) {
- return -1;
- }
- list_.pop_front();
- return 0;
-}
-
-int ListWrapper::PopBack() {
- if (list_.empty()) {
- return -1;
- }
- list_.pop_back();
- return 0;
-}
-
-ListItem* ListWrapper::First() const {
- if (list_.empty()) {
- return NULL;
- }
- std::list<ListItem*>::iterator item_iter = list_.begin();
- ListItem* return_item = (*item_iter);
- return_item->this_iter_ = item_iter;
- return return_item;
-}
-
-ListItem* ListWrapper::Last() const {
- if (list_.empty()) {
- return NULL;
- }
- // std::list::end() addresses the last item + 1. Decrement so that the
- // actual last is accessed.
- std::list<ListItem*>::iterator item_iter = list_.end();
- --item_iter;
- ListItem* return_item = (*item_iter);
- return_item->this_iter_ = item_iter;
- return return_item;
-}
-
-ListItem* ListWrapper::Next(ListItem* item) const {
- if (item == NULL) {
- return NULL;
- }
- std::list<ListItem*>::iterator item_iter = item->this_iter_;
- ++item_iter;
- if (item_iter == list_.end()) {
- return NULL;
- }
- ListItem* return_item = (*item_iter);
- return_item->this_iter_ = item_iter;
- return return_item;
-}
-
-ListItem* ListWrapper::Previous(ListItem* item) const {
- if (item == NULL) {
- return NULL;
- }
- std::list<ListItem*>::iterator item_iter = item->this_iter_;
- if (item_iter == list_.begin()) {
- return NULL;
- }
- --item_iter;
- ListItem* return_item = (*item_iter);
- return_item->this_iter_ = item_iter;
- return return_item;
-}
-
-int ListWrapper::Insert(ListItem* existing_previous_item,
- ListItem* new_item) {
- // Allow existing_previous_item to be NULL if the list is empty.
- // TODO(hellner) why allow this? Keep it as is for now to avoid
- // breaking API contract.
- if (!existing_previous_item && !Empty()) {
- return -1;
- }
-
- if (!new_item) {
- return -1;
- }
-
- std::list<ListItem*>::iterator insert_location = list_.begin();
- if (!Empty()) {
- insert_location = existing_previous_item->this_iter_;
- if (insert_location != list_.end()) {
- ++insert_location;
- }
- }
-
- list_.insert(insert_location, new_item);
- return 0;
-}
-
-int ListWrapper::InsertBefore(ListItem* existing_next_item,
- ListItem* new_item) {
- // Allow existing_next_item to be NULL if the list is empty.
- // Todo: why allow this? Keep it as is for now to avoid breaking API
- // contract.
- if (!existing_next_item && !Empty()) {
- return -1;
- }
- if (!new_item) {
- return -1;
- }
-
- std::list<ListItem*>::iterator insert_location = list_.begin();
- if (!Empty()) {
- insert_location = existing_next_item->this_iter_;
- }
-
- list_.insert(insert_location, new_item);
- return 0;
-}
-
-int ListWrapper::Erase(ListItem* item) {
- if (item == NULL) {
- return -1;
- }
- list_.erase(item->this_iter_);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/list_stl.h b/chromium/third_party/webrtc/system_wrappers/source/list_stl.h
deleted file mode 100644
index 29945304f36..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/list_stl.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
-
-#include <list>
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-
-namespace webrtc {
-
-class ListItem {
- public:
- ListItem(const void* ptr);
- ListItem(const unsigned int item);
- virtual ~ListItem();
- void* GetItem() const;
- unsigned int GetUnsignedItem() const;
-
- private:
- friend class ListWrapper;
- mutable std::list<ListItem*>::iterator this_iter_;
- const void* item_ptr_;
- const unsigned int item_;
- DISALLOW_COPY_AND_ASSIGN(ListItem);
-};
-
-class ListWrapper {
- public:
- ListWrapper();
- ~ListWrapper();
-
- // ListWrapper functions
- unsigned int GetSize() const;
- int PushBack(const void* ptr);
- int PushBack(const unsigned int item_id);
- int PushFront(const void* ptr);
- int PushFront(const unsigned int item_id);
- int PopFront();
- int PopBack();
- bool Empty() const;
- ListItem* First() const;
- ListItem* Last() const;
- ListItem* Next(ListItem* item) const;
- ListItem* Previous(ListItem* item) const;
- int Erase(ListItem* item);
- int Insert(ListItem* existing_previous_item, ListItem* new_item);
- int InsertBefore(ListItem* existing_next_item, ListItem* new_item);
-
- private:
- mutable std::list<ListItem*> list_;
- DISALLOW_COPY_AND_ASSIGN(ListWrapper);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_SOURCE_LIST_STL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/source/list_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/list_unittest.cc
deleted file mode 100644
index 1e4f922a6b0..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/list_unittest.cc
+++ /dev/null
@@ -1,475 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
-using ::webrtc::ListWrapper;
-using ::webrtc::ListItem;
-using ::webrtc::scoped_ptr;
-
-// Note: kNumberOfElements needs to be even.
-const unsigned int kNumberOfElements = 10;
-
-// An opaque implementation of dynamic or statically allocated unsigned ints.
-// This class makes it possible to use the exact same code for testing of both
-// the dynamic and static implementation of ListWrapper.
-// Clarification: ListWrapper has two versions of PushBack(..). It takes an
-// unsigned integer or a void pointer. The integer implementation takes care
-// of memory management. The void pointer version expect the caller to manage
-// the memory associated with the void pointer.
-// This class works like the integer version but can be implemented on top of
-// either the integer version or void pointer version of ListWrapper.
-// Note: the non-virtual fuctions behave the same for both versions.
-class ListWrapperSimple {
-public:
- static ListWrapperSimple* Create(bool static_allocation);
- virtual ~ListWrapperSimple() {}
-
- // These three functions should be used for manipulating ListItems so that
- // they are the type corresponding to the underlying implementation.
- virtual unsigned int GetUnsignedItem(
- const ListItem* item) const = 0;
- virtual ListItem* CreateListItem(unsigned int item_id) = 0;
- unsigned int GetSize() const {
- return list_.GetSize();
- }
- virtual int PushBack(const unsigned int item_id) = 0;
- virtual int PushFront(const unsigned int item_id) = 0;
- virtual int PopFront() = 0;
- virtual int PopBack() = 0;
- bool Empty() const {
- return list_.Empty();
- }
- ListItem* First() const {
- return list_.First();
- }
- ListItem* Last() const {
- return list_.Last();
- }
- ListItem* Next(ListItem* item) const {
- return list_.Next(item);
- }
- ListItem* Previous(ListItem* item) const {
- return list_.Previous(item);
- }
- virtual int Erase(ListItem* item) = 0;
- int Insert(ListItem* existing_previous_item,
- ListItem* new_item) {
- const int retval = list_.Insert(existing_previous_item, new_item);
- if (retval != 0) {
- EXPECT_TRUE(DestroyListItem(new_item));
- }
- return retval;
- }
-
- int InsertBefore(ListItem* existing_next_item,
- ListItem* new_item) {
- const int retval = list_.InsertBefore(existing_next_item, new_item);
- if (retval != 0) {
- EXPECT_TRUE(DestroyListItem(new_item));
- }
- return retval;
- }
-protected:
- ListWrapperSimple() {}
-
- virtual bool DestroyListItemContent(ListItem* item) = 0;
- bool DestroyListItem(ListItem* item) {
- const bool retval = DestroyListItemContent(item);
- delete item;
- return retval;
- }
-
- ListWrapper list_;
-};
-
-void ClearList(ListWrapperSimple* list_wrapper) {
- if (list_wrapper == NULL) {
- return;
- }
- ListItem* list_item = list_wrapper->First();
- while (list_item != NULL) {
- EXPECT_EQ(list_wrapper->Erase(list_item), 0);
- list_item = list_wrapper->First();
- }
-}
-
-class ListWrapperStatic : public ListWrapperSimple {
-public:
- ListWrapperStatic() {}
- virtual ~ListWrapperStatic() {
- ClearList(this);
- }
-
- virtual unsigned int GetUnsignedItem(const ListItem* item) const {
- return item->GetUnsignedItem();
- }
- virtual ListItem* CreateListItem(unsigned int item_id) {
- return new ListItem(item_id);
- }
- virtual bool DestroyListItemContent(ListItem* item) {
- return true;
- }
- virtual int PushBack(const unsigned int item_id) {
- return list_.PushBack(item_id);
- }
- virtual int PushFront(const unsigned int item_id) {
- return list_.PushFront(item_id);
- }
- virtual int PopFront() {
- return list_.PopFront();
- }
- virtual int PopBack() {
- return list_.PopBack();
- }
- virtual int Erase(ListItem* item) {
- return list_.Erase(item);
- }
-};
-
-class ListWrapperDynamic : public ListWrapperSimple {
-public:
- ListWrapperDynamic() {}
- virtual ~ListWrapperDynamic() {
- ClearList(this);
- }
-
- virtual unsigned int GetUnsignedItem(const ListItem* item) const {
- const unsigned int* return_value_pointer =
- reinterpret_cast<unsigned int*>(item->GetItem());
- if (return_value_pointer == NULL) {
- return -1;
- }
- return *return_value_pointer;
- }
- virtual ListItem* CreateListItem(unsigned int item_id) {
- unsigned int* item_id_pointer = new unsigned int;
- if (item_id_pointer == NULL) {
- return NULL;
- }
- *item_id_pointer = item_id;
- ListItem* return_value = new ListItem(
- reinterpret_cast<void*>(item_id_pointer));
- if (return_value == NULL) {
- delete item_id_pointer;
- return NULL;
- }
- return return_value;
- }
- virtual bool DestroyListItemContent(ListItem* item) {
- if (item == NULL) {
- return false;
- }
- bool return_value = false;
- unsigned int* item_id_ptr = reinterpret_cast<unsigned int*>(
- item->GetItem());
- if (item_id_ptr != NULL) {
- return_value = true;
- delete item_id_ptr;
- }
- return return_value;
- }
- virtual int PushBack(const unsigned int item_id) {
- unsigned int* item_id_ptr = new unsigned int;
- if (item_id_ptr == NULL) {
- return -1;
- }
- *item_id_ptr = item_id;
- const int return_value = list_.PushBack(
- reinterpret_cast<void*>(item_id_ptr));
- if (return_value != 0) {
- delete item_id_ptr;
- }
- return return_value;
- }
- virtual int PushFront(const unsigned int item_id) {
- unsigned int* item_id_ptr = new unsigned int;
- if (item_id_ptr == NULL) {
- return -1;
- }
- *item_id_ptr = item_id;
- const int return_value = list_.PushFront(
- reinterpret_cast<void*>(item_id_ptr));
- if (return_value != 0) {
- delete item_id_ptr;
- }
- return return_value;
- }
- virtual int PopFront() {
- return Erase(list_.First());
- }
- virtual int PopBack() {
- return Erase(list_.Last());
- }
- virtual int Erase(ListItem* item) {
- if (item == NULL) {
- return -1;
- }
- int retval = 0;
- if (!DestroyListItemContent(item)) {
- retval = -1;
- ADD_FAILURE();
- }
- if (list_.Erase(item) != 0) {
- retval = -1;
- }
- return retval;
- }
-};
-
-ListWrapperSimple* ListWrapperSimple::Create(bool static_allocation) {
- if (static_allocation) {
- return new ListWrapperStatic();
- }
- return new ListWrapperDynamic();
-}
-
-ListWrapperSimple* CreateAscendingList(bool static_allocation) {
- ListWrapperSimple* return_value = ListWrapperSimple::Create(
- static_allocation);
- if (return_value == NULL) {
- return NULL;
- }
- for (unsigned int i = 0; i < kNumberOfElements; ++i) {
- if (return_value->PushBack(i) == -1) {
- ClearList(return_value);
- delete return_value;
- return NULL;
- }
- }
- return return_value;
-}
-
-ListWrapperSimple* CreateDescendingList(bool static_allocation) {
- ListWrapperSimple* return_value = ListWrapperSimple::Create(
- static_allocation);
- if (return_value == NULL) {
- return NULL;
- }
- for (unsigned int i = 0; i < kNumberOfElements; ++i) {
- if (return_value->PushBack(kNumberOfElements - i - 1) == -1) {
- ClearList(return_value);
- delete return_value;
- return NULL;
- }
- }
- return return_value;
-}
-
-// [0,kNumberOfElements - 1,1,kNumberOfElements - 2,...] (this is why
-// kNumberOfElements need to be even)
-ListWrapperSimple* CreateInterleavedList(bool static_allocation) {
- ListWrapperSimple* return_value = ListWrapperSimple::Create(
- static_allocation);
- if (return_value == NULL) {
- return NULL;
- }
- unsigned int uneven_count = 0;
- unsigned int even_count = 0;
- for (unsigned int i = 0; i < kNumberOfElements; i++) {
- unsigned int push_value = 0;
- if ((i % 2) == 0) {
- push_value = even_count;
- even_count++;
- } else {
- push_value = kNumberOfElements - uneven_count - 1;
- uneven_count++;
- }
- if (return_value->PushBack(push_value) == -1) {
- ClearList(return_value);
- delete return_value;
- return NULL;
- }
- }
- return return_value;
-}
-
-void PrintList(const ListWrapperSimple* list) {
- ListItem* list_item = list->First();
- printf("[");
- while (list_item != NULL) {
- printf("%3u", list->GetUnsignedItem(list_item));
- list_item = list->Next(list_item);
- }
- printf("]\n");
-}
-
-bool CompareLists(const ListWrapperSimple* lhs, const ListWrapperSimple* rhs) {
- const unsigned int list_size = lhs->GetSize();
- if (lhs->GetSize() != rhs->GetSize()) {
- return false;
- }
- if (lhs->Empty()) {
- return rhs->Empty();
- }
- unsigned int i = 0;
- ListItem* lhs_item = lhs->First();
- ListItem* rhs_item = rhs->First();
- while (i < list_size) {
- if (lhs_item == NULL) {
- return false;
- }
- if (rhs_item == NULL) {
- return false;
- }
- if (lhs->GetUnsignedItem(lhs_item) != rhs->GetUnsignedItem(rhs_item)) {
- return false;
- }
- i++;
- lhs_item = lhs->Next(lhs_item);
- rhs_item = rhs->Next(rhs_item);
- }
- return true;
-}
-
-TEST(ListWrapperTest, ReverseNewIntList) {
- // Create a new temporary list with elements reversed those of
- // new_int_list_
- const scoped_ptr<ListWrapperSimple> descending_list(
- CreateDescendingList(rand() % 2));
- ASSERT_FALSE(descending_list.get() == NULL);
- ASSERT_FALSE(descending_list->Empty());
- ASSERT_EQ(kNumberOfElements, descending_list->GetSize());
-
- const scoped_ptr<ListWrapperSimple> ascending_list(
- CreateAscendingList(rand() % 2));
- ASSERT_FALSE(ascending_list.get() == NULL);
- ASSERT_FALSE(ascending_list->Empty());
- ASSERT_EQ(kNumberOfElements, ascending_list->GetSize());
-
- scoped_ptr<ListWrapperSimple> list_to_reverse(
- ListWrapperSimple::Create(rand() % 2));
-
- // Reverse the list using PushBack and Previous.
- for (ListItem* item = ascending_list->Last(); item != NULL;
- item = ascending_list->Previous(item)) {
- list_to_reverse->PushBack(ascending_list->GetUnsignedItem(item));
- }
-
- ASSERT_TRUE(CompareLists(descending_list.get(), list_to_reverse.get()));
-
- scoped_ptr<ListWrapperSimple> list_to_un_reverse(
- ListWrapperSimple::Create(rand() % 2));
- ASSERT_FALSE(list_to_un_reverse.get() == NULL);
- // Reverse the reversed list using PushFront and Next.
- for (ListItem* item = list_to_reverse->First(); item != NULL;
- item = list_to_reverse->Next(item)) {
- list_to_un_reverse->PushFront(list_to_reverse->GetUnsignedItem(item));
- }
- ASSERT_TRUE(CompareLists(ascending_list.get(), list_to_un_reverse.get()));
-}
-
-TEST(ListWrapperTest, PopTest) {
- scoped_ptr<ListWrapperSimple> ascending_list(CreateAscendingList(rand() % 2));
- ASSERT_FALSE(ascending_list.get() == NULL);
- ASSERT_FALSE(ascending_list->Empty());
- EXPECT_EQ(0, ascending_list->PopFront());
- EXPECT_EQ(1U, ascending_list->GetUnsignedItem(ascending_list->First()));
-
- EXPECT_EQ(0, ascending_list->PopBack());
- EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetUnsignedItem(
- ascending_list->Last()));
- EXPECT_EQ(kNumberOfElements - 2, ascending_list->GetSize());
-}
-
-// Use Insert to interleave two lists.
-TEST(ListWrapperTest, InterLeaveTest) {
- scoped_ptr<ListWrapperSimple> interleave_list(
- CreateAscendingList(rand() % 2));
- ASSERT_FALSE(interleave_list.get() == NULL);
- ASSERT_FALSE(interleave_list->Empty());
-
- scoped_ptr<ListWrapperSimple> descending_list(
- CreateDescendingList(rand() % 2));
- ASSERT_FALSE(descending_list.get() == NULL);
-
- for (unsigned int i = 0; i < kNumberOfElements / 2; ++i) {
- ASSERT_EQ(0, interleave_list->PopBack());
- ASSERT_EQ(0, descending_list->PopBack());
- }
- ASSERT_EQ(kNumberOfElements / 2, interleave_list->GetSize());
- ASSERT_EQ(kNumberOfElements / 2, descending_list->GetSize());
-
- unsigned int insert_position = kNumberOfElements / 2;
- ASSERT_EQ(insert_position * 2, kNumberOfElements);
- while (!descending_list->Empty()) {
- ListItem* item = descending_list->Last();
- ASSERT_FALSE(item == NULL);
-
- const unsigned int item_id = descending_list->GetUnsignedItem(item);
- ASSERT_EQ(0, descending_list->Erase(item));
-
- ListItem* insert_item = interleave_list->CreateListItem(item_id);
- ASSERT_FALSE(insert_item == NULL);
- item = interleave_list->First();
- ASSERT_FALSE(item == NULL);
- for (unsigned int j = 0; j < insert_position - 1; ++j) {
- item = interleave_list->Next(item);
- ASSERT_FALSE(item == NULL);
- }
- EXPECT_EQ(0, interleave_list->Insert(item, insert_item));
- --insert_position;
- }
-
- scoped_ptr<ListWrapperSimple> interleaved_list(
- CreateInterleavedList(rand() % 2));
- ASSERT_FALSE(interleaved_list.get() == NULL);
- ASSERT_FALSE(interleaved_list->Empty());
- ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get()));
-}
-
-// Use InsertBefore to interleave two lists.
-TEST(ListWrapperTest, InterLeaveTestII) {
- scoped_ptr<ListWrapperSimple> interleave_list(
- CreateDescendingList(rand() % 2));
- ASSERT_FALSE(interleave_list.get() == NULL);
- ASSERT_FALSE(interleave_list->Empty());
-
- scoped_ptr<ListWrapperSimple> ascending_list(CreateAscendingList(rand() % 2));
- ASSERT_FALSE(ascending_list.get() == NULL);
-
- for (unsigned int i = 0; i < kNumberOfElements / 2; ++i) {
- ASSERT_EQ(0, interleave_list->PopBack());
- ASSERT_EQ(0, ascending_list->PopBack());
- }
- ASSERT_EQ(kNumberOfElements / 2, interleave_list->GetSize());
- ASSERT_EQ(kNumberOfElements / 2, ascending_list->GetSize());
-
- unsigned int insert_position = kNumberOfElements / 2;
- ASSERT_EQ(insert_position * 2, kNumberOfElements);
- while (!ascending_list->Empty()) {
- ListItem* item = ascending_list->Last();
- ASSERT_FALSE(item == NULL);
-
- const unsigned int item_id = ascending_list->GetUnsignedItem(item);
- ASSERT_EQ(0, ascending_list->Erase(item));
-
- ListItem* insert_item = interleave_list->CreateListItem(item_id);
- ASSERT_FALSE(insert_item == NULL);
- item = interleave_list->First();
- ASSERT_FALSE(item == NULL);
- for (unsigned int j = 0; j < insert_position - 1; ++j) {
- item = interleave_list->Next(item);
- ASSERT_FALSE(item == NULL);
- }
- EXPECT_EQ(interleave_list->InsertBefore(item, insert_item), 0);
- --insert_position;
- }
-
- scoped_ptr<ListWrapperSimple> interleaved_list(
- CreateInterleavedList(rand() % 2));
- ASSERT_FALSE(interleaved_list.get() == NULL);
- ASSERT_FALSE(interleaved_list->Empty());
-
- ASSERT_TRUE(CompareLists(interleaved_list.get(), interleave_list.get()));
-}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/move.h b/chromium/third_party/webrtc/system_wrappers/source/move.h
index 53109c73ad6..2e93641f456 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/move.h
+++ b/chromium/third_party/webrtc/system_wrappers/source/move.h
@@ -144,6 +144,16 @@
// choose the one that adheres to the standard.
//
//
+// WHY HAVE typedef void MoveOnlyTypeForCPP03
+//
+// Callback<>/Bind() needs to understand movable-but-not-copyable semantics
+// to call .Pass() appropriately when it is expected to transfer the value.
+// The cryptic typedef MoveOnlyTypeForCPP03 is added to make this check
+// easy and automatic in helper templates for Callback<>/Bind().
+// See IsMoveOnlyType template and its usage in base/callback_internal.h
+// for more details.
+//
+//
// COMPARED TO C++11
//
// In C++11, you would implement this functionality using an r-value reference
@@ -210,6 +220,7 @@
public: \
operator rvalue_type() { return rvalue_type(this); } \
type Pass() { return type(rvalue_type(this)); } \
+ typedef void MoveOnlyTypeForCPP03; \
private:
#endif // WEBRTC_SYSTEM_WRAPPERS_INTEFACE_MOVE_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc
new file mode 100644
index 00000000000..d6b7b14084e
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
+
+#include "webrtc/system_wrappers/interface/clock.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+RtcpMeasurement::RtcpMeasurement()
+ : ntp_secs(0), ntp_frac(0), rtp_timestamp(0) {}
+
+RtcpMeasurement::RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac,
+ uint32_t timestamp)
+ : ntp_secs(ntp_secs), ntp_frac(ntp_frac), rtp_timestamp(timestamp) {}
+
+// Calculates the RTP timestamp frequency from two pairs of NTP and RTP
+// timestamps.
+bool CalculateFrequency(
+ int64_t rtcp_ntp_ms1,
+ uint32_t rtp_timestamp1,
+ int64_t rtcp_ntp_ms2,
+ uint32_t rtp_timestamp2,
+ double* frequency_khz) {
+ if (rtcp_ntp_ms1 <= rtcp_ntp_ms2) {
+ return false;
+ }
+ *frequency_khz = static_cast<double>(rtp_timestamp1 - rtp_timestamp2) /
+ static_cast<double>(rtcp_ntp_ms1 - rtcp_ntp_ms2);
+ return true;
+}
+
+// Detects if there has been a wraparound between |old_timestamp| and
+// |new_timestamp|, and compensates by adding 2^32 if that is the case.
+bool CompensateForWrapAround(uint32_t new_timestamp,
+ uint32_t old_timestamp,
+ int64_t* compensated_timestamp) {
+ assert(compensated_timestamp);
+ int64_t wraps = CheckForWrapArounds(new_timestamp, old_timestamp);
+ if (wraps < 0) {
+ // Reordering, don't use this packet.
+ return false;
+ }
+ *compensated_timestamp = new_timestamp + (wraps << 32);
+ return true;
+}
+
+bool UpdateRtcpList(uint32_t ntp_secs,
+ uint32_t ntp_frac,
+ uint32_t rtp_timestamp,
+ RtcpList* rtcp_list,
+ bool* new_rtcp_sr) {
+ *new_rtcp_sr = false;
+ if (ntp_secs == 0 && ntp_frac == 0) {
+ return false;
+ }
+
+ RtcpMeasurement measurement;
+ measurement.ntp_secs = ntp_secs;
+ measurement.ntp_frac = ntp_frac;
+ measurement.rtp_timestamp = rtp_timestamp;
+
+ for (RtcpList::iterator it = rtcp_list->begin();
+ it != rtcp_list->end(); ++it) {
+ if (measurement.ntp_secs == (*it).ntp_secs &&
+ measurement.ntp_frac == (*it).ntp_frac) {
+ // This RTCP has already been added to the list.
+ return true;
+ }
+ }
+
+ // We need two RTCP SR reports to map between RTP and NTP. More than two will
+ // not improve the mapping.
+ if (rtcp_list->size() == 2) {
+ rtcp_list->pop_back();
+ }
+ rtcp_list->push_front(measurement);
+ *new_rtcp_sr = true;
+ return true;
+}
+
+// Converts |rtp_timestamp| to the NTP time base using the NTP and RTP timestamp
+// pairs in |rtcp|. The converted timestamp is returned in
+// |rtp_timestamp_in_ms|. This function compensates for wrap arounds in RTP
+// timestamps and returns false if it can't do the conversion due to reordering.
+bool RtpToNtpMs(int64_t rtp_timestamp,
+ const RtcpList& rtcp,
+ int64_t* rtp_timestamp_in_ms) {
+ assert(rtcp.size() == 2);
+ int64_t rtcp_ntp_ms_new = Clock::NtpToMs(rtcp.front().ntp_secs,
+ rtcp.front().ntp_frac);
+ int64_t rtcp_ntp_ms_old = Clock::NtpToMs(rtcp.back().ntp_secs,
+ rtcp.back().ntp_frac);
+ int64_t rtcp_timestamp_new = rtcp.front().rtp_timestamp;
+ int64_t rtcp_timestamp_old = rtcp.back().rtp_timestamp;
+ if (!CompensateForWrapAround(rtcp_timestamp_new,
+ rtcp_timestamp_old,
+ &rtcp_timestamp_new)) {
+ return false;
+ }
+ double freq_khz;
+ if (!CalculateFrequency(rtcp_ntp_ms_new,
+ rtcp_timestamp_new,
+ rtcp_ntp_ms_old,
+ rtcp_timestamp_old,
+ &freq_khz)) {
+ return false;
+ }
+ double offset = rtcp_timestamp_new - freq_khz * rtcp_ntp_ms_new;
+ int64_t rtp_timestamp_unwrapped;
+ if (!CompensateForWrapAround(rtp_timestamp, rtcp_timestamp_old,
+ &rtp_timestamp_unwrapped)) {
+ return false;
+ }
+ double rtp_timestamp_ntp_ms = (static_cast<double>(rtp_timestamp_unwrapped) -
+ offset) / freq_khz + 0.5f;
+ if (rtp_timestamp_ntp_ms < 0) {
+ return false;
+ }
+ *rtp_timestamp_in_ms = rtp_timestamp_ntp_ms;
+ return true;
+}
+
+int CheckForWrapArounds(uint32_t new_timestamp, uint32_t old_timestamp) {
+ if (new_timestamp < old_timestamp) {
+ // This difference should be less than -2^31 if we have had a wrap around
+ // (e.g. |new_timestamp| = 1, |rtcp_rtp_timestamp| = 2^32 - 1). Since it is
+ // cast to a int32_t, it should be positive.
+ if (static_cast<int32_t>(new_timestamp - old_timestamp) > 0) {
+ // Forward wrap around.
+ return 1;
+ }
+ } else if (static_cast<int32_t>(old_timestamp - new_timestamp) > 0) {
+ // This difference should be less than -2^31 if we have had a backward wrap
+ // around. Since it is cast to a int32_t, it should be positive.
+ return -1;
+ }
+ return 0;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc
new file mode 100644
index 00000000000..a4d75aed04a
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
+
+namespace webrtc {
+
+TEST(WrapAroundTests, NoWrap) {
+ EXPECT_EQ(0, CheckForWrapArounds(0xFFFFFFFF, 0xFFFFFFFE));
+ EXPECT_EQ(0, CheckForWrapArounds(1, 0));
+ EXPECT_EQ(0, CheckForWrapArounds(0x00010000, 0x0000FFFF));
+}
+
+TEST(WrapAroundTests, ForwardWrap) {
+ EXPECT_EQ(1, CheckForWrapArounds(0, 0xFFFFFFFF));
+ EXPECT_EQ(1, CheckForWrapArounds(0, 0xFFFF0000));
+ EXPECT_EQ(1, CheckForWrapArounds(0x0000FFFF, 0xFFFFFFFF));
+ EXPECT_EQ(1, CheckForWrapArounds(0x0000FFFF, 0xFFFF0000));
+}
+
+TEST(WrapAroundTests, BackwardWrap) {
+ EXPECT_EQ(-1, CheckForWrapArounds(0xFFFFFFFF, 0));
+ EXPECT_EQ(-1, CheckForWrapArounds(0xFFFF0000, 0));
+ EXPECT_EQ(-1, CheckForWrapArounds(0xFFFFFFFF, 0x0000FFFF));
+ EXPECT_EQ(-1, CheckForWrapArounds(0xFFFF0000, 0x0000FFFF));
+}
+
+TEST(WrapAroundTests, OldRtcpWrapped) {
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0;
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp -= kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp -= kTimestampTicksPerMs;
+ int64_t timestamp_in_ms = -1;
+ // This expected to fail since it's highly unlikely that the older RTCP
+ // has a much smaller RTP timestamp than the newer.
+ EXPECT_FALSE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
+}
+
+TEST(WrapAroundTests, NewRtcpWrapped) {
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0xFFFFFFFF;
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ int64_t timestamp_in_ms = -1;
+ EXPECT_TRUE(RtpToNtpMs(rtcp.back().rtp_timestamp, rtcp, &timestamp_in_ms));
+ // Since this RTP packet has the same timestamp as the RTCP packet constructed
+ // at time 0 it should be mapped to 0 as well.
+ EXPECT_EQ(0, timestamp_in_ms);
+}
+
+TEST(WrapAroundTests, RtpWrapped) {
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0xFFFFFFFF - 2 * kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += kTimestampTicksPerMs;
+ int64_t timestamp_in_ms = -1;
+ EXPECT_TRUE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
+ // Since this RTP packet has the same timestamp as the RTCP packet constructed
+ // at time 0 it should be mapped to 0 as well.
+ EXPECT_EQ(2, timestamp_in_ms);
+}
+
+TEST(WrapAroundTests, OldRtp_RtcpsWrapped) {
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp -= 2*kTimestampTicksPerMs;
+ int64_t timestamp_in_ms = -1;
+ EXPECT_FALSE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
+}
+
+TEST(WrapAroundTests, OldRtp_NewRtcpWrapped) {
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0xFFFFFFFF;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp -= kTimestampTicksPerMs;
+ int64_t timestamp_in_ms = -1;
+ EXPECT_TRUE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
+ // Constructed at the same time as the first RTCP and should therefore be
+ // mapped to zero.
+ EXPECT_EQ(0, timestamp_in_ms);
+}
+
+TEST(WrapAroundTests, OldRtp_OldRtcpWrapped) {
+ const uint32_t kOneMsInNtpFrac = 4294967;
+ const uint32_t kTimestampTicksPerMs = 90;
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp -= kTimestampTicksPerMs;
+ rtcp.push_front(RtcpMeasurement(ntp_sec, ntp_frac, timestamp));
+ ntp_frac += kOneMsInNtpFrac;
+ timestamp += 2*kTimestampTicksPerMs;
+ int64_t timestamp_in_ms = -1;
+ EXPECT_FALSE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
+}
+}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/scoped_vector_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/scoped_vector_unittest.cc
new file mode 100644
index 00000000000..c1b9d01cc8b
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/scoped_vector_unittest.cc
@@ -0,0 +1,328 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/memory/scoped_vector_unittest.cc
+
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+namespace {
+
+// The LifeCycleObject notifies its Observer upon construction & destruction.
+class LifeCycleObject {
+ public:
+ class Observer {
+ public:
+ virtual void OnLifeCycleConstruct(LifeCycleObject* o) = 0;
+ virtual void OnLifeCycleDestroy(LifeCycleObject* o) = 0;
+
+ protected:
+ virtual ~Observer() {}
+ };
+
+ ~LifeCycleObject() {
+ observer_->OnLifeCycleDestroy(this);
+ }
+
+ private:
+ friend class LifeCycleWatcher;
+
+ explicit LifeCycleObject(Observer* observer)
+ : observer_(observer) {
+ observer_->OnLifeCycleConstruct(this);
+ }
+
+ Observer* observer_;
+
+ DISALLOW_COPY_AND_ASSIGN(LifeCycleObject);
+};
+
+// The life cycle states we care about for the purposes of testing ScopedVector
+// against objects.
+enum LifeCycleState {
+ LC_INITIAL,
+ LC_CONSTRUCTED,
+ LC_DESTROYED,
+};
+
+// Because we wish to watch the life cycle of an object being constructed and
+// destroyed, and further wish to test expectations against the state of that
+// object, we cannot save state in that object itself. Instead, we use this
+// pairing of the watcher, which observes the object and notifies of
+// construction & destruction. Since we also may be testing assumptions about
+// things not getting freed, this class also acts like a scoping object and
+// deletes the |constructed_life_cycle_object_|, if any when the
+// LifeCycleWatcher is destroyed. To keep this simple, the only expected state
+// changes are:
+// INITIAL -> CONSTRUCTED -> DESTROYED.
+// Anything more complicated than that should start another test.
+class LifeCycleWatcher : public LifeCycleObject::Observer {
+ public:
+ LifeCycleWatcher() : life_cycle_state_(LC_INITIAL) {}
+ virtual ~LifeCycleWatcher() {}
+
+ // Assert INITIAL -> CONSTRUCTED and no LifeCycleObject associated with this
+ // LifeCycleWatcher.
+ virtual void OnLifeCycleConstruct(LifeCycleObject* object) OVERRIDE {
+ ASSERT_EQ(LC_INITIAL, life_cycle_state_);
+ ASSERT_EQ(NULL, constructed_life_cycle_object_.get());
+ life_cycle_state_ = LC_CONSTRUCTED;
+ constructed_life_cycle_object_.reset(object);
+ }
+
+ // Assert CONSTRUCTED -> DESTROYED and the |object| being destroyed is the
+ // same one we saw constructed.
+ virtual void OnLifeCycleDestroy(LifeCycleObject* object) OVERRIDE {
+ ASSERT_EQ(LC_CONSTRUCTED, life_cycle_state_);
+ LifeCycleObject* constructed_life_cycle_object =
+ constructed_life_cycle_object_.release();
+ ASSERT_EQ(constructed_life_cycle_object, object);
+ life_cycle_state_ = LC_DESTROYED;
+ }
+
+ LifeCycleState life_cycle_state() const { return life_cycle_state_; }
+
+ // Factory method for creating a new LifeCycleObject tied to this
+ // LifeCycleWatcher.
+ LifeCycleObject* NewLifeCycleObject() {
+ return new LifeCycleObject(this);
+ }
+
+ // Returns true iff |object| is the same object that this watcher is tracking.
+ bool IsWatching(LifeCycleObject* object) const {
+ return object == constructed_life_cycle_object_.get();
+ }
+
+ private:
+ LifeCycleState life_cycle_state_;
+ scoped_ptr<LifeCycleObject> constructed_life_cycle_object_;
+
+ DISALLOW_COPY_AND_ASSIGN(LifeCycleWatcher);
+};
+
+TEST(ScopedVectorTest, LifeCycleWatcher) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ LifeCycleObject* object = watcher.NewLifeCycleObject();
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ delete object;
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+}
+
+TEST(ScopedVectorTest, PopBack) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+ scoped_vector.pop_back();
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+ EXPECT_TRUE(scoped_vector.empty());
+}
+
+TEST(ScopedVectorTest, Clear) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+ scoped_vector.clear();
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+ EXPECT_TRUE(scoped_vector.empty());
+}
+
+TEST(ScopedVectorTest, WeakClear) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+ scoped_vector.weak_clear();
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(scoped_vector.empty());
+}
+
+TEST(ScopedVectorTest, ResizeShrink) {
+ LifeCycleWatcher first_watcher;
+ EXPECT_EQ(LC_INITIAL, first_watcher.life_cycle_state());
+ LifeCycleWatcher second_watcher;
+ EXPECT_EQ(LC_INITIAL, second_watcher.life_cycle_state());
+ ScopedVector<LifeCycleObject> scoped_vector;
+
+ scoped_vector.push_back(first_watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, first_watcher.life_cycle_state());
+ EXPECT_EQ(LC_INITIAL, second_watcher.life_cycle_state());
+ EXPECT_TRUE(first_watcher.IsWatching(scoped_vector[0]));
+ EXPECT_FALSE(second_watcher.IsWatching(scoped_vector[0]));
+
+ scoped_vector.push_back(second_watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, first_watcher.life_cycle_state());
+ EXPECT_EQ(LC_CONSTRUCTED, second_watcher.life_cycle_state());
+ EXPECT_FALSE(first_watcher.IsWatching(scoped_vector[1]));
+ EXPECT_TRUE(second_watcher.IsWatching(scoped_vector[1]));
+
+ // Test that shrinking a vector deletes elements in the disappearing range.
+ scoped_vector.resize(1);
+ EXPECT_EQ(LC_CONSTRUCTED, first_watcher.life_cycle_state());
+ EXPECT_EQ(LC_DESTROYED, second_watcher.life_cycle_state());
+ EXPECT_EQ(1u, scoped_vector.size());
+ EXPECT_TRUE(first_watcher.IsWatching(scoped_vector[0]));
+}
+
+TEST(ScopedVectorTest, ResizeGrow) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+
+ scoped_vector.resize(5);
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ ASSERT_EQ(5u, scoped_vector.size());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector[0]));
+ EXPECT_FALSE(watcher.IsWatching(scoped_vector[1]));
+ EXPECT_FALSE(watcher.IsWatching(scoped_vector[2]));
+ EXPECT_FALSE(watcher.IsWatching(scoped_vector[3]));
+ EXPECT_FALSE(watcher.IsWatching(scoped_vector[4]));
+}
+
+TEST(ScopedVectorTest, Scope) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ {
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+ }
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+}
+
+TEST(ScopedVectorTest, MoveConstruct) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ {
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ EXPECT_FALSE(scoped_vector.empty());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+
+ ScopedVector<LifeCycleObject> scoped_vector_copy(scoped_vector.Pass());
+ EXPECT_TRUE(scoped_vector.empty());
+ EXPECT_FALSE(scoped_vector_copy.empty());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector_copy.back()));
+
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ }
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+}
+
+TEST(ScopedVectorTest, MoveAssign) {
+ LifeCycleWatcher watcher;
+ EXPECT_EQ(LC_INITIAL, watcher.life_cycle_state());
+ {
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.push_back(watcher.NewLifeCycleObject());
+ ScopedVector<LifeCycleObject> scoped_vector_assign;
+ EXPECT_FALSE(scoped_vector.empty());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector.back()));
+
+ scoped_vector_assign = scoped_vector.Pass();
+ EXPECT_TRUE(scoped_vector.empty());
+ EXPECT_FALSE(scoped_vector_assign.empty());
+ EXPECT_TRUE(watcher.IsWatching(scoped_vector_assign.back()));
+
+ EXPECT_EQ(LC_CONSTRUCTED, watcher.life_cycle_state());
+ }
+ EXPECT_EQ(LC_DESTROYED, watcher.life_cycle_state());
+}
+
+class DeleteCounter {
+ public:
+ explicit DeleteCounter(int* deletes)
+ : deletes_(deletes) {
+ }
+
+ ~DeleteCounter() {
+ (*deletes_)++;
+ }
+
+ void VoidMethod0() {}
+
+ private:
+ int* const deletes_;
+
+ DISALLOW_COPY_AND_ASSIGN(DeleteCounter);
+};
+
+// This class is used in place of Chromium's base::Callback.
+template <typename T>
+class PassThru {
+ public:
+ explicit PassThru(ScopedVector<T> scoper) : scoper_(scoper.Pass()) {}
+
+ ScopedVector<T> Run() {
+ return scoper_.Pass();
+ }
+
+ private:
+ ScopedVector<T> scoper_;
+};
+
+TEST(ScopedVectorTest, Passed) {
+ int deletes = 0;
+ ScopedVector<DeleteCounter> deleter_vector;
+ deleter_vector.push_back(new DeleteCounter(&deletes));
+ EXPECT_EQ(0, deletes);
+ PassThru<DeleteCounter> pass_thru(deleter_vector.Pass());
+ EXPECT_EQ(0, deletes);
+ ScopedVector<DeleteCounter> result = pass_thru.Run();
+ EXPECT_EQ(0, deletes);
+ result.clear();
+ EXPECT_EQ(1, deletes);
+};
+
+TEST(ScopedVectorTest, InsertRange) {
+ LifeCycleWatcher watchers[5];
+ size_t watchers_size = sizeof(watchers) / sizeof(*watchers);
+
+ std::vector<LifeCycleObject*> vec;
+ for (LifeCycleWatcher* it = watchers; it != watchers + watchers_size;
+ ++it) {
+ EXPECT_EQ(LC_INITIAL, it->life_cycle_state());
+ vec.push_back(it->NewLifeCycleObject());
+ EXPECT_EQ(LC_CONSTRUCTED, it->life_cycle_state());
+ }
+ // Start scope for ScopedVector.
+ {
+ ScopedVector<LifeCycleObject> scoped_vector;
+ scoped_vector.insert(scoped_vector.end(), vec.begin() + 1, vec.begin() + 3);
+ for (LifeCycleWatcher* it = watchers; it != watchers + watchers_size;
+ ++it)
+ EXPECT_EQ(LC_CONSTRUCTED, it->life_cycle_state());
+ }
+ for (LifeCycleWatcher* it = watchers; it != watchers + 1; ++it)
+ EXPECT_EQ(LC_CONSTRUCTED, it->life_cycle_state());
+ for (LifeCycleWatcher* it = watchers + 1; it != watchers + 3; ++it)
+ EXPECT_EQ(LC_DESTROYED, it->life_cycle_state());
+ for (LifeCycleWatcher* it = watchers + 3; it != watchers + watchers_size;
+ ++it)
+ EXPECT_EQ(LC_CONSTRUCTED, it->life_cycle_state());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/stl_util_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/stl_util_unittest.cc
new file mode 100644
index 00000000000..e60a913cfcb
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/stl_util_unittest.cc
@@ -0,0 +1,250 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Borrowed from Chromium's src/base/stl_util_unittest.cc
+#include "webrtc/system_wrappers/interface/stl_util.h"
+
+#include <set>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace {
+
+// Used as test case to ensure the various base::STLXxx functions don't require
+// more than operators "<" and "==" on values stored in containers.
+class ComparableValue {
+ public:
+ explicit ComparableValue(int value) : value_(value) {}
+
+ bool operator==(const ComparableValue& rhs) const {
+ return value_ == rhs.value_;
+ }
+
+ bool operator<(const ComparableValue& rhs) const {
+ return value_ < rhs.value_;
+ }
+
+ private:
+ int value_;
+};
+
+} // namespace
+
+namespace webrtc {
+namespace {
+
+TEST(STLUtilTest, STLIsSorted) {
+ {
+ std::set<int> set;
+ set.insert(24);
+ set.insert(1);
+ set.insert(12);
+ EXPECT_TRUE(STLIsSorted(set));
+ }
+
+ {
+ std::set<ComparableValue> set;
+ set.insert(ComparableValue(24));
+ set.insert(ComparableValue(1));
+ set.insert(ComparableValue(12));
+ EXPECT_TRUE(STLIsSorted(set));
+ }
+
+ {
+ std::vector<int> vector;
+ vector.push_back(1);
+ vector.push_back(1);
+ vector.push_back(4);
+ vector.push_back(64);
+ vector.push_back(12432);
+ EXPECT_TRUE(STLIsSorted(vector));
+ vector.back() = 1;
+ EXPECT_FALSE(STLIsSorted(vector));
+ }
+}
+
+TEST(STLUtilTest, STLSetDifference) {
+ std::set<int> a1;
+ a1.insert(1);
+ a1.insert(2);
+ a1.insert(3);
+ a1.insert(4);
+
+ std::set<int> a2;
+ a2.insert(3);
+ a2.insert(4);
+ a2.insert(5);
+ a2.insert(6);
+ a2.insert(7);
+
+ {
+ std::set<int> difference;
+ difference.insert(1);
+ difference.insert(2);
+ EXPECT_EQ(difference, STLSetDifference<std::set<int> >(a1, a2));
+ }
+
+ {
+ std::set<int> difference;
+ difference.insert(5);
+ difference.insert(6);
+ difference.insert(7);
+ EXPECT_EQ(difference, STLSetDifference<std::set<int> >(a2, a1));
+ }
+
+ {
+ std::vector<int> difference;
+ difference.push_back(1);
+ difference.push_back(2);
+ EXPECT_EQ(difference, STLSetDifference<std::vector<int> >(a1, a2));
+ }
+
+ {
+ std::vector<int> difference;
+ difference.push_back(5);
+ difference.push_back(6);
+ difference.push_back(7);
+ EXPECT_EQ(difference, STLSetDifference<std::vector<int> >(a2, a1));
+ }
+}
+
+TEST(STLUtilTest, STLSetUnion) {
+ std::set<int> a1;
+ a1.insert(1);
+ a1.insert(2);
+ a1.insert(3);
+ a1.insert(4);
+
+ std::set<int> a2;
+ a2.insert(3);
+ a2.insert(4);
+ a2.insert(5);
+ a2.insert(6);
+ a2.insert(7);
+
+ {
+ std::set<int> result;
+ result.insert(1);
+ result.insert(2);
+ result.insert(3);
+ result.insert(4);
+ result.insert(5);
+ result.insert(6);
+ result.insert(7);
+ EXPECT_EQ(result, STLSetUnion<std::set<int> >(a1, a2));
+ }
+
+ {
+ std::set<int> result;
+ result.insert(1);
+ result.insert(2);
+ result.insert(3);
+ result.insert(4);
+ result.insert(5);
+ result.insert(6);
+ result.insert(7);
+ EXPECT_EQ(result, STLSetUnion<std::set<int> >(a2, a1));
+ }
+
+ {
+ std::vector<int> result;
+ result.push_back(1);
+ result.push_back(2);
+ result.push_back(3);
+ result.push_back(4);
+ result.push_back(5);
+ result.push_back(6);
+ result.push_back(7);
+ EXPECT_EQ(result, STLSetUnion<std::vector<int> >(a1, a2));
+ }
+
+ {
+ std::vector<int> result;
+ result.push_back(1);
+ result.push_back(2);
+ result.push_back(3);
+ result.push_back(4);
+ result.push_back(5);
+ result.push_back(6);
+ result.push_back(7);
+ EXPECT_EQ(result, STLSetUnion<std::vector<int> >(a2, a1));
+ }
+}
+
+TEST(STLUtilTest, STLSetIntersection) {
+ std::set<int> a1;
+ a1.insert(1);
+ a1.insert(2);
+ a1.insert(3);
+ a1.insert(4);
+
+ std::set<int> a2;
+ a2.insert(3);
+ a2.insert(4);
+ a2.insert(5);
+ a2.insert(6);
+ a2.insert(7);
+
+ {
+ std::set<int> result;
+ result.insert(3);
+ result.insert(4);
+ EXPECT_EQ(result, STLSetIntersection<std::set<int> >(a1, a2));
+ }
+
+ {
+ std::set<int> result;
+ result.insert(3);
+ result.insert(4);
+ EXPECT_EQ(result, STLSetIntersection<std::set<int> >(a2, a1));
+ }
+
+ {
+ std::vector<int> result;
+ result.push_back(3);
+ result.push_back(4);
+ EXPECT_EQ(result, STLSetIntersection<std::vector<int> >(a1, a2));
+ }
+
+ {
+ std::vector<int> result;
+ result.push_back(3);
+ result.push_back(4);
+ EXPECT_EQ(result, STLSetIntersection<std::vector<int> >(a2, a1));
+ }
+}
+
+TEST(STLUtilTest, STLIncludes) {
+ std::set<int> a1;
+ a1.insert(1);
+ a1.insert(2);
+ a1.insert(3);
+ a1.insert(4);
+
+ std::set<int> a2;
+ a2.insert(3);
+ a2.insert(4);
+
+ std::set<int> a3;
+ a3.insert(3);
+ a3.insert(4);
+ a3.insert(5);
+
+ EXPECT_TRUE(STLIncludes<std::set<int> >(a1, a2));
+ EXPECT_FALSE(STLIncludes<std::set<int> >(a1, a3));
+ EXPECT_FALSE(STLIncludes<std::set<int> >(a2, a1));
+ EXPECT_FALSE(STLIncludes<std::set<int> >(a2, a3));
+ EXPECT_FALSE(STLIncludes<std::set<int> >(a3, a1));
+ EXPECT_TRUE(STLIncludes<std::set<int> >(a3, a2));
+}
+
+} // namespace
+} // namespace webrtc
+
diff --git a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers.gyp b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers.gyp
index 41a736e14b4..66b34356c22 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers.gyp
+++ b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers.gyp
@@ -16,6 +16,9 @@
'spreadsortlib',
'../interface',
],
+ 'dependencies': [
+ '../../base/base.gyp:webrtc_base',
+ ],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
@@ -35,23 +38,29 @@
'../interface/data_log_impl.h',
'../interface/event_tracer.h',
'../interface/event_wrapper.h',
+ '../interface/field_trial.h',
'../interface/file_wrapper.h',
'../interface/fix_interlocked_exchange_pointer_win.h',
- '../interface/list_wrapper.h',
'../interface/logcat_trace_context.h',
'../interface/logging.h',
'../interface/ref_count.h',
+ '../interface/rtp_to_ntp.h',
'../interface/rw_lock_wrapper.h',
'../interface/scoped_ptr.h',
'../interface/scoped_refptr.h',
+ '../interface/scoped_vector.h',
'../interface/sleep.h',
'../interface/sort.h',
'../interface/static_instance.h',
+ '../interface/stl_util.h',
'../interface/stringize_macros.h',
+ '../interface/thread_annotations.h',
'../interface/thread_wrapper.h',
'../interface/tick_util.h',
+ '../interface/timestamp_extrapolator.h',
'../interface/trace.h',
'../interface/trace_event.h',
+ '../interface/utf_util_win.h',
'aligned_malloc.cc',
'atomic32_mac.cc',
'atomic32_posix.cc',
@@ -82,9 +91,9 @@
'event_win.h',
'file_impl.cc',
'file_impl.h',
- 'list_no_stl.cc',
'logcat_trace_context.cc',
'logging.cc',
+ 'rtp_to_ntp.cc',
'rw_lock.cc',
'rw_lock_generic.cc',
'rw_lock_generic.h',
@@ -101,6 +110,7 @@
'thread_posix.h',
'thread_win.cc',
'thread_win.h',
+ 'timestamp_extrapolator.cc',
'trace_impl.cc',
'trace_impl.h',
'trace_posix.cc',
@@ -185,40 +195,34 @@
4267, # size_t to int truncation.
4334, # Ignore warning on shift operator promotion.
],
+ }, {
+ 'target_name': 'field_trial_default',
+ 'type': 'static_library',
+ 'sources': [
+ 'field_trial_default.cc',
+ ],
+ 'dependencies': [
+ 'system_wrappers',
+ ]
},
], # targets
'conditions': [
['OS=="android"', {
'targets': [
{
- 'variables': {
- # Treat this as third-party code.
- 'chromium_code': 0,
- },
'target_name': 'cpu_features_android',
'type': 'static_library',
'sources': [
- # TODO(leozwang): Ideally we want to audomatically exclude .c files
- # as with .cc files, gyp currently only excludes .cc files.
'cpu_features_android.c',
],
'conditions': [
- ['include_ndk_cpu_features==1', {
- 'conditions': [
- ['android_webview_build == 1', {
- 'libraries': [
- 'cpufeatures.a'
- ],
- }, {
- 'dependencies': [
- '<(android_ndk_root)/android_tools_ndk.gyp:cpu_features',
- ],
- }],
+ ['android_webview_build == 1', {
+ 'libraries': [
+ 'cpufeatures.a'
],
}, {
- 'sources': [
- 'android/cpu-features.c',
- 'android/cpu-features.h',
+ 'dependencies': [
+ '<(android_ndk_root)/android_tools_ndk.gyp:cpu_features',
],
}],
],
diff --git a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_tests.gyp b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_tests.gyp
index 5686105e466..f2f6156691c 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_tests.gyp
+++ b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_tests.gyp
@@ -23,14 +23,16 @@
'condition_variable_unittest.cc',
'critical_section_unittest.cc',
'event_tracer_unittest.cc',
- 'list_unittest.cc',
'logging_unittest.cc',
'data_log_unittest.cc',
'data_log_unittest_disabled.cc',
'data_log_helpers_unittest.cc',
'data_log_c_helpers_unittest.c',
'data_log_c_helpers_unittest.h',
+ 'rtp_to_ntp_unittest.cc',
+ 'scoped_vector_unittest.cc',
'stringize_macros_unittest.cc',
+ 'stl_util_unittest.cc',
'thread_unittest.cc',
'thread_posix_unittest.cc',
'unittest_utilities_unittest.cc',
@@ -46,7 +48,7 @@
}],
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -61,7 +63,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['include_tests==1 and build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['include_tests==1 and build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'system_wrappers_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_unittests.isolate b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_unittests.isolate
index 6ec5e7c6420..f5057710223 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_unittests.isolate
+++ b/chromium/third_party/webrtc/system_wrappers/source/system_wrappers_unittests.isolate
@@ -9,27 +9,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/system_wrappers_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/system_wrappers_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/system_wrappers/source/timestamp_extrapolator.cc b/chromium/third_party/webrtc/system_wrappers/source/timestamp_extrapolator.cc
new file mode 100644
index 00000000000..afd212b0c7c
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/source/timestamp_extrapolator.cc
@@ -0,0 +1,230 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
+
+#include <algorithm>
+
+namespace webrtc {
+
+TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms)
+ : _rwLock(RWLockWrapper::CreateRWLock()),
+ _startMs(0),
+ _firstTimestamp(0),
+ _wrapArounds(0),
+ _prevUnwrappedTimestamp(-1),
+ _prevWrapTimestamp(-1),
+ _lambda(1),
+ _firstAfterReset(true),
+ _packetCount(0),
+ _startUpFilterDelayInPackets(2),
+ _detectorAccumulatorPos(0),
+ _detectorAccumulatorNeg(0),
+ _alarmThreshold(60e3),
+ _accDrift(6600), // in timestamp ticks, i.e. 15 ms
+ _accMaxError(7000),
+ _P11(1e10) {
+ Reset(start_ms);
+}
+
+TimestampExtrapolator::~TimestampExtrapolator()
+{
+ delete _rwLock;
+}
+
+void TimestampExtrapolator::Reset(int64_t start_ms)
+{
+ WriteLockScoped wl(*_rwLock);
+ _startMs = start_ms;
+ _prevMs = _startMs;
+ _firstTimestamp = 0;
+ _w[0] = 90.0;
+ _w[1] = 0;
+ _P[0][0] = 1;
+ _P[1][1] = _P11;
+ _P[0][1] = _P[1][0] = 0;
+ _firstAfterReset = true;
+ _prevUnwrappedTimestamp = -1;
+ _prevWrapTimestamp = -1;
+ _wrapArounds = 0;
+ _packetCount = 0;
+ _detectorAccumulatorPos = 0;
+ _detectorAccumulatorNeg = 0;
+}
+
+void
+TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz)
+{
+
+ _rwLock->AcquireLockExclusive();
+ if (tMs - _prevMs > 10e3)
+ {
+ // Ten seconds without a complete frame.
+ // Reset the extrapolator
+ _rwLock->ReleaseLockExclusive();
+ Reset(tMs);
+ _rwLock->AcquireLockExclusive();
+ }
+ else
+ {
+ _prevMs = tMs;
+ }
+
+ // Remove offset to prevent badly scaled matrices
+ tMs -= _startMs;
+
+ CheckForWrapArounds(ts90khz);
+
+ int64_t unwrapped_ts90khz = static_cast<int64_t>(ts90khz) +
+ _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
+
+ if (_prevUnwrappedTimestamp >= 0 &&
+ unwrapped_ts90khz < _prevUnwrappedTimestamp)
+ {
+ // Drop reordered frames.
+ _rwLock->ReleaseLockExclusive();
+ return;
+ }
+
+ if (_firstAfterReset)
+ {
+ // Make an initial guess of the offset,
+ // should be almost correct since tMs - _startMs
+ // should about zero at this time.
+ _w[1] = -_w[0] * tMs;
+ _firstTimestamp = unwrapped_ts90khz;
+ _firstAfterReset = false;
+ }
+
+ double residual =
+ (static_cast<double>(unwrapped_ts90khz) - _firstTimestamp) -
+ static_cast<double>(tMs) * _w[0] - _w[1];
+ if (DelayChangeDetection(residual) &&
+ _packetCount >= _startUpFilterDelayInPackets)
+ {
+ // A sudden change of average network delay has been detected.
+ // Force the filter to adjust its offset parameter by changing
+ // the offset uncertainty. Don't do this during startup.
+ _P[1][1] = _P11;
+ }
+ //T = [t(k) 1]';
+ //that = T'*w;
+ //K = P*T/(lambda + T'*P*T);
+ double K[2];
+ K[0] = _P[0][0] * tMs + _P[0][1];
+ K[1] = _P[1][0] * tMs + _P[1][1];
+ double TPT = _lambda + tMs * K[0] + K[1];
+ K[0] /= TPT;
+ K[1] /= TPT;
+ //w = w + K*(ts(k) - that);
+ _w[0] = _w[0] + K[0] * residual;
+ _w[1] = _w[1] + K[1] * residual;
+ //P = 1/lambda*(P - K*T'*P);
+ double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0]));
+ double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1]));
+ _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0]));
+ _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1]));
+ _P[0][0] = p00;
+ _P[0][1] = p01;
+ _prevUnwrappedTimestamp = unwrapped_ts90khz;
+ if (_packetCount < _startUpFilterDelayInPackets)
+ {
+ _packetCount++;
+ }
+ _rwLock->ReleaseLockExclusive();
+}
+
+int64_t
+TimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz)
+{
+ ReadLockScoped rl(*_rwLock);
+ int64_t localTimeMs = 0;
+ CheckForWrapArounds(timestamp90khz);
+ double unwrapped_ts90khz = static_cast<double>(timestamp90khz) +
+ _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
+ if (_packetCount == 0)
+ {
+ localTimeMs = -1;
+ }
+ else if (_packetCount < _startUpFilterDelayInPackets)
+ {
+ localTimeMs = _prevMs + static_cast<int64_t>(
+ static_cast<double>(unwrapped_ts90khz - _prevUnwrappedTimestamp) /
+ 90.0 + 0.5);
+ }
+ else
+ {
+ if (_w[0] < 1e-3)
+ {
+ localTimeMs = _startMs;
+ }
+ else
+ {
+ double timestampDiff = unwrapped_ts90khz -
+ static_cast<double>(_firstTimestamp);
+ localTimeMs = static_cast<int64_t>(
+ static_cast<double>(_startMs) + (timestampDiff - _w[1]) /
+ _w[0] + 0.5);
+ }
+ }
+ return localTimeMs;
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp and
+// keeps track of the number of wrap arounds since reset.
+void
+TimestampExtrapolator::CheckForWrapArounds(uint32_t ts90khz)
+{
+ if (_prevWrapTimestamp == -1)
+ {
+ _prevWrapTimestamp = ts90khz;
+ return;
+ }
+ if (ts90khz < _prevWrapTimestamp)
+ {
+ // This difference will probably be less than -2^31 if we have had a wrap around
+ // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is casted to a Word32,
+ // it should be positive.
+ if (static_cast<int32_t>(ts90khz - _prevWrapTimestamp) > 0)
+ {
+ // Forward wrap around
+ _wrapArounds++;
+ }
+ }
+ // This difference will probably be less than -2^31 if we have had a backward wrap around.
+ // Since it is casted to a Word32, it should be positive.
+ else if (static_cast<int32_t>(_prevWrapTimestamp - ts90khz) > 0)
+ {
+ // Backward wrap around
+ _wrapArounds--;
+ }
+ _prevWrapTimestamp = ts90khz;
+}
+
+bool
+TimestampExtrapolator::DelayChangeDetection(double error)
+{
+ // CUSUM detection of sudden delay changes
+ error = (error > 0) ? std::min(error, _accMaxError) :
+ std::max(error, -_accMaxError);
+ _detectorAccumulatorPos =
+ std::max(_detectorAccumulatorPos + error - _accDrift, (double)0);
+ _detectorAccumulatorNeg =
+ std::min(_detectorAccumulatorNeg + error + _accDrift, (double)0);
+ if (_detectorAccumulatorPos > _alarmThreshold || _detectorAccumulatorNeg < -_alarmThreshold)
+ {
+ // Alarm
+ _detectorAccumulatorPos = _detectorAccumulatorNeg = 0;
+ return true;
+ }
+ return false;
+}
+
+}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/trace_impl.cc b/chromium/third_party/webrtc/system_wrappers/source/trace_impl.cc
index 8dbe76b1137..13c63acff2e 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/trace_impl.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/trace_impl.cc
@@ -269,10 +269,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message,
sprintf(trace_message, "VIDEO CAPTUR:%5ld %5ld;", id_engine,
id_channel);
break;
- case kTraceVideoPreocessing:
- sprintf(trace_message, " VIDEO PROC:%5ld %5ld;", id_engine,
- id_channel);
- break;
case kTraceRemoteBitrateEstimator:
sprintf(trace_message, " BWE RBE:%5ld %5ld;", id_engine,
id_channel);
@@ -333,9 +329,6 @@ int32_t TraceImpl::AddModuleAndId(char* trace_message,
case kTraceVideoCapture:
sprintf(trace_message, "VIDEO CAPTUR:%11ld;", idl);
break;
- case kTraceVideoPreocessing:
- sprintf(trace_message, " VIDEO PROC:%11ld;", idl);
- break;
case kTraceRemoteBitrateEstimator:
sprintf(trace_message, " BWE RBE:%11ld;", idl);
break;
diff --git a/chromium/third_party/webrtc/test/libtest/libtest.gyp b/chromium/third_party/webrtc/test/libtest/libtest.gyp
deleted file mode 100644
index 17b6f03c955..00000000000
--- a/chromium/third_party/webrtc/test/libtest/libtest.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'includes': [
- '../../build/common.gypi'
- ],
- 'targets': [
- {
- 'target_name': 'libtest',
- 'type': 'static_library',
- 'sources': [
- # Helper classes
- 'include/bit_flip_encryption.h',
- 'include/random_encryption.h',
-
- 'helpers/bit_flip_encryption.cc',
- 'helpers/random_encryption.cc',
- ],
- },
- ],
-}
diff --git a/chromium/third_party/webrtc/test/metrics.gyp b/chromium/third_party/webrtc/test/metrics.gyp
index 3d61b6c0f83..0486a7c9ea8 100644
--- a/chromium/third_party/webrtc/test/metrics.gyp
+++ b/chromium/third_party/webrtc/test/metrics.gyp
@@ -28,59 +28,5 @@
'testsupport/metrics/video_metrics.cc',
],
},
- {
- 'target_name': 'metrics_unittests',
- 'type': '<(gtest_target_type)',
- 'dependencies': [
- 'metrics',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'testsupport/metrics/video_metrics_unittest.cc',
- ],
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are
- # using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'dependencies': [
- '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
- ],
- }],
- ],
- },
], # targets
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are using
- # Chromium's buildbots.
- ['include_tests==1 and build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'targets': [
- {
- 'target_name': 'metrics_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):metrics_unittests_apk',
- ],
- },
- ],
- }],
- ['test_isolation_mode != "noop"', {
- 'targets': [
- {
- 'target_name': 'metrics_unittests_run',
- 'type': 'none',
- 'dependencies': [
- 'metrics_unittests',
- ],
- 'includes': [
- '../build/isolate.gypi',
- 'metrics_unittests.isolate',
- ],
- 'sources': [
- 'metrics_unittests.isolate',
- ],
- },
- ],
- }],
- ],
}
diff --git a/chromium/third_party/webrtc/test/metrics_unittests.isolate b/chromium/third_party/webrtc/test/metrics_unittests.isolate
deleted file mode 100644
index 1a196d653f5..00000000000
--- a/chromium/third_party/webrtc/test/metrics_unittests.isolate
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'conditions': [
- ['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
- 'variables': {
- 'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
- ],
- },
- }],
- ['OS=="linux" or OS=="mac" or OS=="win"', {
- 'variables': {
- 'command': [
- '../../testing/test_env.py',
- '<(PRODUCT_DIR)/metrics_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_tracked': [
- '../../DEPS',
- '../../resources/foreman_cif_short.yuv',
- '../../testing/test_env.py',
- '<(PRODUCT_DIR)/metrics_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
- ],
- },
- }],
- ],
-}
diff --git a/chromium/third_party/webrtc/test/test.gyp b/chromium/third_party/webrtc/test/test.gyp
index 0051bcee95c..69776e7b92e 100644
--- a/chromium/third_party/webrtc/test/test.gyp
+++ b/chromium/third_party/webrtc/test/test.gyp
@@ -53,17 +53,48 @@
],
},
{
+ 'target_name': 'rtcp_packet_parser',
+ 'type': 'static_library',
+ 'sources': [
+ 'rtcp_packet_parser.cc',
+ 'rtcp_packet_parser.h',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+ ],
+ },
+ {
+ 'target_name': 'field_trial',
+ 'type': 'static_library',
+ 'sources': [
+ 'field_trial.cc',
+ 'field_trial.h',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ },
+ {
+ 'target_name': 'test_main',
+ 'type': 'static_library',
+ 'sources': [
+ 'test_main.cc',
+ ],
+ 'dependencies': [
+ 'field_trial',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ },
+ {
'target_name': 'test_support',
'type': 'static_library',
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/testing/gmock.gyp:gmock',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'sources': [
- 'test_suite.cc',
- 'test_suite.h',
'testsupport/android/root_path_android.cc',
'testsupport/android/root_path_android_chromium.cc',
'testsupport/fileutils.cc',
@@ -86,7 +117,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
],
@@ -106,10 +137,16 @@
'target_name': 'test_support_main',
'type': 'static_library',
'dependencies': [
+ 'field_trial',
'test_support',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
],
'sources': [
'run_all_unittests.cc',
+ 'test_suite.cc',
+ 'test_suite.h',
],
},
{
@@ -156,7 +193,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -191,7 +228,7 @@
}],
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['include_tests==1 and build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['include_tests==1 and build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'test_support_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/test/test_support_unittests.isolate b/chromium/third_party/webrtc/test/test_support_unittests.isolate
index 0f5de650794..08bd4a4a1af 100644
--- a/chromium/third_party/webrtc/test/test_support_unittests.isolate
+++ b/chromium/third_party/webrtc/test/test_support_unittests.isolate
@@ -8,28 +8,26 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/test_support_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/test_support_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/test/testsupport/fileutils.cc b/chromium/third_party/webrtc/test/testsupport/fileutils.cc
index c89f9bd1a2f..a3e66201fa5 100644
--- a/chromium/third_party/webrtc/test/testsupport/fileutils.cc
+++ b/chromium/third_party/webrtc/test/testsupport/fileutils.cc
@@ -10,12 +10,20 @@
#include "webrtc/test/testsupport/fileutils.h"
+#include <assert.h>
+
#ifdef WIN32
#include <direct.h>
+#include <tchar.h>
+#include <windows.h>
#include <algorithm>
+
+#include "webrtc/system_wrappers/interface/utf_util_win.h"
#define GET_CURRENT_DIR _getcwd
#else
#include <unistd.h>
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#define GET_CURRENT_DIR getcwd
#endif
@@ -25,6 +33,7 @@
#endif
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
#include "webrtc/typedefs.h" // For architecture defines
@@ -92,7 +101,7 @@ std::string OutputPathImpl() {
return kFallbackPath;
}
path += kOutputDirName;
- if (!CreateDirectory(path)) {
+ if (!CreateDir(path)) {
return kFallbackPath;
}
return path + kPathDelimiter;
@@ -154,7 +163,35 @@ std::string WorkingDir() {
#endif // !WEBRTC_ANDROID
-bool CreateDirectory(std::string directory_name) {
+// Generate a temporary filename in a safe way.
+// Largely copied from talk/base/{unixfilesystem,win32filesystem}.cc.
+std::string TempFilename(const std::string &dir, const std::string &prefix) {
+#ifdef WIN32
+ wchar_t filename[MAX_PATH];
+ if (::GetTempFileName(ToUtf16(dir).c_str(),
+ ToUtf16(prefix).c_str(), 0, filename) != 0)
+ return ToUtf8(filename);
+ assert(false);
+ return "";
+#else
+ int len = dir.size() + prefix.size() + 2 + 6;
+ scoped_ptr<char[]> tempname(new char[len]);
+
+ snprintf(tempname.get(), len, "%s/%sXXXXXX", dir.c_str(),
+ prefix.c_str());
+ int fd = ::mkstemp(tempname.get());
+ if (fd == -1) {
+ assert(false);
+ return "";
+ } else {
+ ::close(fd);
+ }
+ std::string ret(tempname.get());
+ return ret;
+#endif
+}
+
+bool CreateDir(std::string directory_name) {
struct stat path_info = {0};
// Check if the path exists already:
if (stat(directory_name.c_str(), &path_info) == 0) {
diff --git a/chromium/third_party/webrtc/test/testsupport/fileutils.h b/chromium/third_party/webrtc/test/testsupport/fileutils.h
index d51bbde21e7..78789fa8709 100644
--- a/chromium/third_party/webrtc/test/testsupport/fileutils.h
+++ b/chromium/third_party/webrtc/test/testsupport/fileutils.h
@@ -103,6 +103,10 @@ std::string ProjectRootPath();
// found, the current working directory ("./") is returned as a fallback.
std::string OutputPath();
+// Generates an empty file with a unique name in the specified directory and
+// returns the file name and path.
+std::string TempFilename(const std::string &dir, const std::string &prefix);
+
// Returns a path to a resource file for the currently executing platform.
// Adapts to what filenames are currently present in the
// [project-root]/resources/ dir.
@@ -132,7 +136,10 @@ std::string WorkingDir();
// Creates a directory if it not already exists.
// Returns true if successful. Will print an error message to stderr and return
// false if a file with the same name already exists.
-bool CreateDirectory(std::string directory_name);
+bool CreateDir(std::string directory_name);
+
+// Checks if a file exists.
+bool FileExists(std::string& file_name);
// File size of the supplied file in bytes. Will return 0 if the file is
// empty or if the file does not exist/is readable.
diff --git a/chromium/third_party/webrtc/test/testsupport/fileutils_unittest.cc b/chromium/third_party/webrtc/test/testsupport/fileutils_unittest.cc
index c38e4533b16..4cd45137292 100644
--- a/chromium/third_party/webrtc/test/testsupport/fileutils_unittest.cc
+++ b/chromium/third_party/webrtc/test/testsupport/fileutils_unittest.cc
@@ -46,7 +46,7 @@ class FileUtilsTest : public testing::Test {
original_working_dir_ = webrtc::test::WorkingDir();
std::string resources_path = original_working_dir_ + kPathDelimiter +
kResourcesDir + kPathDelimiter;
- webrtc::test::CreateDirectory(resources_path);
+ webrtc::test::CreateDir(resources_path);
files_.push_back(resources_path + kTestName + "." + kExtension);
files_.push_back(resources_path + kTestName + "_32." + kExtension);
@@ -117,12 +117,19 @@ TEST_F(FileUtilsTest, DISABLED_ON_ANDROID(OutputPathFromRootWorkingDir)) {
ASSERT_EQ("./", webrtc::test::OutputPath());
}
+TEST_F(FileUtilsTest, DISABLED_ON_ANDROID(TempFilename)) {
+ std::string temp_filename = webrtc::test::TempFilename(
+ webrtc::test::OutputPath(), "TempFilenameTest");
+ ASSERT_TRUE(webrtc::test::FileExists(temp_filename));
+ remove(temp_filename.c_str());
+}
+
// Only tests that the code executes
-TEST_F(FileUtilsTest, CreateDirectory) {
+TEST_F(FileUtilsTest, CreateDir) {
std::string directory = "fileutils-unittest-empty-dir";
// Make sure it's removed if a previous test has failed:
remove(directory.c_str());
- ASSERT_TRUE(webrtc::test::CreateDirectory(directory));
+ ASSERT_TRUE(webrtc::test::CreateDir(directory));
remove(directory.c_str());
}
diff --git a/chromium/third_party/webrtc/test/testsupport/metrics/video_metrics.cc b/chromium/third_party/webrtc/test/testsupport/metrics/video_metrics.cc
index f537e03925a..1e19806b4d0 100644
--- a/chromium/third_party/webrtc/test/testsupport/metrics/video_metrics.cc
+++ b/chromium/third_party/webrtc/test/testsupport/metrics/video_metrics.cc
@@ -111,8 +111,8 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
const size_t frame_length = 3 * width * height >> 1;
I420VideoFrame ref_frame;
I420VideoFrame test_frame;
- scoped_array<uint8_t> ref_buffer(new uint8_t[frame_length]);
- scoped_array<uint8_t> test_buffer(new uint8_t[frame_length]);
+ scoped_ptr<uint8_t[]> ref_buffer(new uint8_t[frame_length]);
+ scoped_ptr<uint8_t[]> test_buffer(new uint8_t[frame_length]);
// Set decoded image parameters.
int half_width = (width + 1) / 2;
diff --git a/chromium/third_party/webrtc/test/webrtc_test_common.gyp b/chromium/third_party/webrtc/test/webrtc_test_common.gyp
index eae66a04406..556a4450189 100644
--- a/chromium/third_party/webrtc/test/webrtc_test_common.gyp
+++ b/chromium/third_party/webrtc/test/webrtc_test_common.gyp
@@ -18,31 +18,22 @@
'configurable_frame_size_encoder.h',
'direct_transport.cc',
'direct_transport.h',
+ 'encoder_settings.cc',
+ 'encoder_settings.h',
'fake_audio_device.cc',
'fake_audio_device.h',
'fake_decoder.cc',
'fake_decoder.h',
'fake_encoder.cc',
'fake_encoder.h',
- 'flags.cc',
- 'flags.h',
+ 'fake_network_pipe.cc',
+ 'fake_network_pipe.h',
'frame_generator_capturer.cc',
'frame_generator_capturer.h',
- 'gl/gl_renderer.cc',
- 'gl/gl_renderer.h',
- 'linux/glx_renderer.cc',
- 'linux/glx_renderer.h',
- 'linux/video_renderer_linux.cc',
- 'mac/run_tests.mm',
- 'mac/video_renderer_mac.h',
- 'mac/video_renderer_mac.mm',
'mock_transport.h',
- 'null_platform_renderer.cc',
'null_transport.cc',
'null_transport.h',
'rtp_rtcp_observer.h',
- 'run_tests.cc',
- 'run_tests.h',
'run_loop.cc',
'run_loop.h',
'statistics.cc',
@@ -51,11 +42,40 @@
'vcm_capturer.h',
'video_capturer.cc',
'video_capturer.h',
+ 'win/run_loop_win.cc',
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'sources!': [
+ 'run_loop.cc',
+ ],
+ }],
+ ],
+ 'dependencies': [
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+ '<(webrtc_root)/modules/modules.gyp:media_file',
+ '<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ ],
+ },
+ {
+ 'target_name': 'webrtc_test_renderer',
+ 'type': 'static_library',
+ 'sources': [
+ 'gl/gl_renderer.cc',
+ 'gl/gl_renderer.h',
+ 'linux/glx_renderer.cc',
+ 'linux/glx_renderer.h',
+ 'linux/video_renderer_linux.cc',
+ 'mac/video_renderer_mac.h',
+ 'mac/video_renderer_mac.mm',
+ 'null_platform_renderer.cc',
'video_renderer.cc',
'video_renderer.h',
'win/d3d_renderer.cc',
'win/d3d_renderer.h',
- 'win/run_loop_win.cc',
],
'conditions': [
['OS=="linux"', {
@@ -66,7 +86,6 @@
['OS=="mac"', {
'sources!': [
'null_platform_renderer.cc',
- 'run_tests.cc',
],
}],
['OS!="linux" and OS!="mac"', {
@@ -78,10 +97,49 @@
['OS=="win"', {
'sources!': [
'null_platform_renderer.cc',
- 'run_loop.cc',
],
}],
],
+ 'dependencies': [
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+ '<(webrtc_root)/modules/modules.gyp:media_file',
+ '<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ ],
+ 'direct_dependent_settings': {
+ 'conditions': [
+ ['OS=="linux"', {
+ 'libraries': [
+ '-lXext',
+ '-lX11',
+ '-lGL',
+ ],
+ }],
+ ['OS=="android"', {
+ 'libraries' : [
+ '-lGLESv2', '-llog',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'xcode_settings' : {
+ 'OTHER_LDFLAGS' : [
+ '-framework Cocoa',
+ '-framework OpenGL',
+ '-framework CoreVideo',
+ ],
+ },
+ }],
+ ],
+ },
+ },
+ {
+ # This target is only needed since the video render module builds platform
+ # specific code and depends on these libraries. This target should be
+ # removed as soon as the new video API doesn't depend on the module.
+ # TODO(mflodman) Remove this target as described above.
+ 'target_name': 'webrtc_test_video_render_dependencies',
+ 'type': 'static_library',
'direct_dependent_settings': {
'conditions': [
['OS=="linux"', {
@@ -91,9 +149,6 @@
'-lGL',
],
}],
- #TODO(pbos) : These dependencies should not have to be here, they
- # aren't used by test code directly, only by components
- # used by the tests.
['OS=="android"', {
'libraries' : [
'-lGLESv2', '-llog',
@@ -102,26 +157,33 @@
['OS=="mac"', {
'xcode_settings' : {
'OTHER_LDFLAGS' : [
- '-framework Foundation',
- '-framework AppKit',
'-framework Cocoa',
'-framework OpenGL',
'-framework CoreVideo',
- '-framework CoreAudio',
- '-framework AudioToolbox',
],
},
}],
],
},
- 'dependencies': [
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- '<(webrtc_root)/modules/modules.gyp:video_capture_module',
- '<(webrtc_root)/modules/modules.gyp:media_file',
- '<(webrtc_root)/test/test.gyp:frame_generator',
- '<(webrtc_root)/test/test.gyp:test_support',
- ],
},
],
+ 'conditions': [
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'webrtc_test_common_unittests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ 'webrtc_test_common',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'fake_network_pipe_unittest.cc',
+ ],
+ },
+ ], #targets
+ }], # include_tests
+ ], # conditions
}
diff --git a/chromium/third_party/webrtc/tools/OWNERS b/chromium/third_party/webrtc/tools/OWNERS
index 7b4acbba6c3..1532e3fda58 100644
--- a/chromium/third_party/webrtc/tools/OWNERS
+++ b/chromium/third_party/webrtc/tools/OWNERS
@@ -1,2 +1,9 @@
phoglund@webrtc.org
-kjellander@webrtc.org \ No newline at end of file
+kjellander@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/tools/barcode_tools/barcode_decoder.py b/chromium/third_party/webrtc/tools/barcode_tools/barcode_decoder.py
index 7a366686a40..b7b7ddd4a6e 100755
--- a/chromium/third_party/webrtc/tools/barcode_tools/barcode_decoder.py
+++ b/chromium/third_party/webrtc/tools/barcode_tools/barcode_decoder.py
@@ -103,7 +103,6 @@ def _decode_barcode_in_file(file_name, command_line_decoder):
try:
out = helper_functions.run_shell_command(
command, fail_msg='Error during decoding of %s' % file_name)
- print 'Image %s : decoded barcode: %s' % (file_name, out)
text_file = open('%s.txt' % file_name[:-4], 'w')
text_file.write(out)
text_file.close()
diff --git a/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.cc b/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.cc
index b8c90b10718..c2a5e4991f8 100644
--- a/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.cc
+++ b/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.cc
@@ -17,6 +17,9 @@
#include <string>
#define STATS_LINE_LENGTH 32
+#define Y4M_FILE_HEADER_MAX_SIZE 200
+#define Y4M_FRAME_DELIMITER "FRAME"
+#define Y4M_FRAME_HEADER_SIZE 6
namespace webrtc {
namespace test {
@@ -84,46 +87,81 @@ bool GetNextStatsLine(FILE* stats_file, char* line) {
return true;
}
-bool GetNextI420Frame(FILE* input_file, int width, int height,
- uint8* result_frame) {
+bool ExtractFrameFromYuvFile(const char* i420_file_name, int width, int height,
+ int frame_number, uint8* result_frame) {
int frame_size = GetI420FrameSize(width, height);
+ int offset = frame_number * frame_size; // Calculate offset for the frame.
bool errors = false;
+ FILE* input_file = fopen(i420_file_name, "rb");
+ if (input_file == NULL) {
+ fprintf(stderr, "Couldn't open input file for reading: %s\n",
+ i420_file_name);
+ return false;
+ }
+
+ // Change stream pointer to new offset.
+ fseek(input_file, offset, SEEK_SET);
+
size_t bytes_read = fread(result_frame, 1, frame_size, input_file);
- if (bytes_read != static_cast<size_t>(frame_size)) {
- // If end-of-file is reached, don't print an error.
- if (feof(input_file)) {
- return false;
- }
- fprintf(stdout, "Error while reading frame from file\n");
+ if (bytes_read != static_cast<size_t>(frame_size) &&
+ ferror(input_file)) {
+ fprintf(stdout, "Error while reading frame no %d from file %s\n",
+ frame_number, i420_file_name);
errors = true;
}
+ fclose(input_file);
return !errors;
}
-bool ExtractFrameFromI420(const char* i420_file_name, int width, int height,
- int frame_number, uint8* result_frame) {
+bool ExtractFrameFromY4mFile(const char* y4m_file_name, int width, int height,
+ int frame_number, uint8* result_frame) {
int frame_size = GetI420FrameSize(width, height);
- int offset = frame_number * frame_size; // Calculate offset for the frame.
+ int frame_offset = frame_number * frame_size;
bool errors = false;
- FILE* input_file = fopen(i420_file_name, "rb");
+ FILE* input_file = fopen(y4m_file_name, "rb");
if (input_file == NULL) {
fprintf(stderr, "Couldn't open input file for reading: %s\n",
- i420_file_name);
+ y4m_file_name);
return false;
}
- // Change stream pointer to new offset.
- fseek(input_file, offset, SEEK_SET);
+ // YUV4MPEG2, a.k.a. Y4M File format has a file header and a frame header. The
+ // file header has the aspect: "YUV4MPEG2 C420 W640 H360 Ip F30:1 A1:1".
+ // Skip the header if this is the first frame of the file.
+ if (frame_number == 0) {
+ char frame_header[Y4M_FILE_HEADER_MAX_SIZE];
+ size_t bytes_read =
+ fread(frame_header, 1, Y4M_FILE_HEADER_MAX_SIZE, input_file);
+ if (bytes_read != static_cast<size_t>(frame_size) && ferror(input_file)) {
+ fprintf(stdout, "Error while reading first frame from file %s\n",
+ y4m_file_name);
+ fclose(input_file);
+ return false;
+ }
+ std::string header_contents(frame_header);
+ std::size_t found = header_contents.find(Y4M_FRAME_DELIMITER);
+ if (found == std::string::npos) {
+ fprintf(stdout, "Corrupted Y4M header, could not find \"FRAME\" in %s\n",
+ header_contents.c_str());
+ fclose(input_file);
+ return false;
+ }
+ frame_offset = static_cast<int>(found);
+ }
+
+ // Change stream pointer to new offset, skipping the frame header as well.
+ fseek(input_file, frame_offset + Y4M_FRAME_HEADER_SIZE, SEEK_SET);
size_t bytes_read = fread(result_frame, 1, frame_size, input_file);
if (bytes_read != static_cast<size_t>(frame_size) &&
ferror(input_file)) {
fprintf(stdout, "Error while reading frame no %d from file %s\n",
- frame_number, i420_file_name);
+ frame_number, y4m_file_name);
errors = true;
}
+
fclose(input_file);
return !errors;
}
@@ -176,6 +214,12 @@ double CalculateMetrics(VideoAnalysisMetricsType video_metrics_type,
void RunAnalysis(const char* reference_file_name, const char* test_file_name,
const char* stats_file_name, int width, int height,
ResultsContainer* results) {
+ // Check if the reference_file_name ends with "y4m".
+ bool y4m_mode = false;
+ if (std::string(reference_file_name).find("y4m") != std::string::npos){
+ y4m_mode = true;
+ }
+
int size = GetI420FrameSize(width, height);
FILE* stats_file = fopen(stats_file_name, "r");
@@ -202,10 +246,15 @@ void RunAnalysis(const char* reference_file_name, const char* test_file_name,
assert(extracted_test_frame != -1);
assert(decoded_frame_number != -1);
- ExtractFrameFromI420(test_file_name, width, height, extracted_test_frame,
- test_frame);
- ExtractFrameFromI420(reference_file_name, width, height,
- decoded_frame_number, reference_frame);
+ ExtractFrameFromYuvFile(test_file_name, width, height, extracted_test_frame,
+ test_frame);
+ if (y4m_mode) {
+ ExtractFrameFromY4mFile(reference_file_name, width, height,
+ decoded_frame_number, reference_frame);
+ } else {
+ ExtractFrameFromYuvFile(reference_file_name, width, height,
+ decoded_frame_number, reference_frame);
+ }
// Calculate the PSNR and SSIM.
double result_psnr = CalculateMetrics(kPSNR, reference_frame, test_frame,
diff --git a/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.h b/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.h
index b2ecc082c75..31064a28f81 100644
--- a/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.h
+++ b/chromium/third_party/webrtc/tools/frame_analyzer/video_quality_analysis.h
@@ -97,13 +97,14 @@ bool IsThereBarcodeError(std::string line);
// frame_0023 0284, we will get 284.
int ExtractDecodedFrameNumber(std::string line);
-// Gets the next frame from an open I420 file.
-bool GetNextI420Frame(FILE* input_file, int width, int height,
- uint8* result_frame);
-
-// Extracts an I420 frame at position frame_number from the file.
-bool ExtractFrameFromI420(const char* i420_file_name, int width, int height,
- int frame_number, uint8* result_frame);
+// Extracts an I420 frame at position frame_number from the raw YUV file.
+bool ExtractFrameFromYuvFile(const char* i420_file_name, int width, int height,
+ int frame_number, uint8* result_frame);
+
+// Extracts an I420 frame at position frame_number from the Y4M file. The first
+// frame has corresponded |frame_number| 0.
+bool ExtractFrameFromY4mFile(const char* i420_file_name, int width, int height,
+ int frame_number, uint8* result_frame);
} // namespace test
diff --git a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
index 6e252e87111..93a548fe568 100644
--- a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
+++ b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
@@ -38,7 +38,7 @@ int EditFrames(const string& in_path, int width, int height,
// Frame size of I420.
int frame_length = CalcBufferSize(kI420, width, height);
- webrtc::scoped_array<uint8_t> temp_buffer(new uint8_t[frame_length]);
+ webrtc::scoped_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);
FILE* out_fid = fopen(out_path.c_str(), "wb");
diff --git a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
index 83302181fb5..bdcc2f21f51 100644
--- a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
+++ b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
@@ -53,8 +53,8 @@ class FrameEditingTest : public ::testing::Test {
}
// Compares the frames in both streams to the end of one of the streams.
void CompareToTheEnd(FILE* test_video_fid, FILE* ref_video_fid,
- scoped_array<int>* ref_buffer,
- scoped_array<int>* test_buffer) {
+ scoped_ptr<int[]>* ref_buffer,
+ scoped_ptr<int[]>* test_buffer) {
while (!feof(test_video_fid) && !feof(ref_video_fid)) {
num_bytes_read_ = fread(ref_buffer->get(), 1, kFrameSize, ref_video_fid);
if (!feof(ref_video_fid)) {
@@ -78,8 +78,8 @@ class FrameEditingTest : public ::testing::Test {
FILE* original_fid_;
FILE* edited_fid_;
int num_bytes_read_;
- scoped_array<int> original_buffer_;
- scoped_array<int> edited_buffer_;
+ scoped_ptr<int[]> original_buffer_;
+ scoped_ptr<int[]> edited_buffer_;
int num_frames_read_;
};
diff --git a/chromium/third_party/webrtc/tools/loopback_test/OWNERS b/chromium/third_party/webrtc/tools/loopback_test/OWNERS
new file mode 100644
index 00000000000..296f71fffc4
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/OWNERS
@@ -0,0 +1 @@
+andresp@webrtc.org
diff --git a/chromium/third_party/webrtc/tools/loopback_test/README b/chromium/third_party/webrtc/tools/loopback_test/README
new file mode 100644
index 00000000000..68f8eed6800
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/README
@@ -0,0 +1,12 @@
+Loopback test
+
+This is a simple html test framework to run a loopback test which can go via
+turn. For now the test is used to analyse bandwidth estimation and get records
+for bad scenarios.
+
+How to run:
+ ./run-server.sh (to start python serving the tests)
+ Access http://localhost:8080/loopback_test.html to run the test
+
+How to record:
+ You can use record-test.sh to get a tcpdump of a test run.
diff --git a/chromium/third_party/webrtc/tools/loopback_test/adapter.js b/chromium/third_party/webrtc/tools/loopback_test/adapter.js
new file mode 100644
index 00000000000..6c2bd04d47e
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/adapter.js
@@ -0,0 +1,211 @@
+/**
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is cloned from samples/js/base/adapter.js
+// Modify the original and do new copy instead of doing changes here.
+
+var RTCPeerConnection = null;
+var getUserMedia = null;
+var attachMediaStream = null;
+var reattachMediaStream = null;
+var webrtcDetectedBrowser = null;
+var webrtcDetectedVersion = null;
+
+function trace(text) {
+ // This function is used for logging.
+ if (text[text.length - 1] == '\n') {
+ text = text.substring(0, text.length - 1);
+ }
+ console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+function maybeFixConfiguration(pcConfig) {
+ if (pcConfig == null) {
+ return;
+ }
+ for (var i = 0; i < pcConfig.iceServers.length; i++) {
+ if (pcConfig.iceServers[i].hasOwnProperty('urls')){
+ pcConfig.iceServers[i]['url'] = pcConfig.iceServers[i]['urls'];
+ delete pcConfig.iceServers[i]['urls'];
+ }
+ }
+}
+
+if (navigator.mozGetUserMedia) {
+ console.log("This appears to be Firefox");
+
+ webrtcDetectedBrowser = "firefox";
+
+ webrtcDetectedVersion =
+ parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
+
+ // The RTCPeerConnection object.
+ var RTCPeerConnection = function(pcConfig, pcConstraints) {
+ // .urls is not supported in FF yet.
+ maybeFixConfiguration(pcConfig);
+ return new mozRTCPeerConnection(pcConfig, pcConstraints);
+ }
+
+ // The RTCSessionDescription object.
+ RTCSessionDescription = mozRTCSessionDescription;
+
+ // The RTCIceCandidate object.
+ RTCIceCandidate = mozRTCIceCandidate;
+
+ // Get UserMedia (only difference is the prefix).
+ // Code from Adam Barth.
+ getUserMedia = navigator.mozGetUserMedia.bind(navigator);
+ navigator.getUserMedia = getUserMedia;
+
+ // Creates iceServer from the url for FF.
+ createIceServer = function(url, username, password) {
+ var iceServer = null;
+ var url_parts = url.split(':');
+ if (url_parts[0].indexOf('stun') === 0) {
+ // Create iceServer with stun url.
+ iceServer = { 'url': url };
+ } else if (url_parts[0].indexOf('turn') === 0) {
+ if (webrtcDetectedVersion < 27) {
+ // Create iceServer with turn url.
+ // Ignore the transport parameter from TURN url for FF version <=27.
+ var turn_url_parts = url.split("?");
+ // Return null for createIceServer if transport=tcp.
+ if (turn_url_parts.length === 1 ||
+ turn_url_parts[1].indexOf('transport=udp') === 0) {
+ iceServer = {'url': turn_url_parts[0],
+ 'credential': password,
+ 'username': username};
+ }
+ } else {
+ // FF 27 and above supports transport parameters in TURN url,
+ // So passing in the full url to create iceServer.
+ iceServer = {'url': url,
+ 'credential': password,
+ 'username': username};
+ }
+ }
+ return iceServer;
+ };
+
+ createIceServers = function(urls, username, password) {
+ var iceServers = [];
+ // Use .url for FireFox.
+ for (i = 0; i < urls.length; i++) {
+ var iceServer = createIceServer(urls[i],
+ username,
+ password);
+ if (iceServer !== null) {
+ iceServers.push(iceServer);
+ }
+ }
+ return iceServers;
+ }
+
+ // Attach a media stream to an element.
+ attachMediaStream = function(element, stream) {
+ console.log("Attaching media stream");
+ element.mozSrcObject = stream;
+ element.play();
+ };
+
+ reattachMediaStream = function(to, from) {
+ console.log("Reattaching media stream");
+ to.mozSrcObject = from.mozSrcObject;
+ to.play();
+ };
+
+ // Fake get{Video,Audio}Tracks
+ if (!MediaStream.prototype.getVideoTracks) {
+ MediaStream.prototype.getVideoTracks = function() {
+ return [];
+ };
+ }
+
+ if (!MediaStream.prototype.getAudioTracks) {
+ MediaStream.prototype.getAudioTracks = function() {
+ return [];
+ };
+ }
+} else if (navigator.webkitGetUserMedia) {
+ console.log("This appears to be Chrome");
+
+ webrtcDetectedBrowser = "chrome";
+ webrtcDetectedVersion =
+ parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
+
+ // Creates iceServer from the url for Chrome M33 and earlier.
+ createIceServer = function(url, username, password) {
+ var iceServer = null;
+ var url_parts = url.split(':');
+ if (url_parts[0].indexOf('stun') === 0) {
+ // Create iceServer with stun url.
+ iceServer = { 'url': url };
+ } else if (url_parts[0].indexOf('turn') === 0) {
+ // Chrome M28 & above uses below TURN format.
+ iceServer = {'url': url,
+ 'credential': password,
+ 'username': username};
+ }
+ return iceServer;
+ };
+
+ // Creates iceServers from the urls for Chrome M34 and above.
+ createIceServers = function(urls, username, password) {
+ var iceServers = [];
+ if (webrtcDetectedVersion >= 34) {
+ // .urls is supported since Chrome M34.
+ iceServers = {'urls': urls,
+ 'credential': password,
+ 'username': username };
+ } else {
+ for (i = 0; i < urls.length; i++) {
+ var iceServer = createIceServer(urls[i],
+ username,
+ password);
+ if (iceServer !== null) {
+ iceServers.push(iceServer);
+ }
+ }
+ }
+ return iceServers;
+ };
+
+ // The RTCPeerConnection object.
+ var RTCPeerConnection = function(pcConfig, pcConstraints) {
+ // .urls is supported since Chrome M34.
+ if (webrtcDetectedVersion < 34) {
+ maybeFixConfiguration(pcConfig);
+ }
+ return new webkitRTCPeerConnection(pcConfig, pcConstraints);
+ }
+
+ // Get UserMedia (only difference is the prefix).
+ // Code from Adam Barth.
+ getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
+ navigator.getUserMedia = getUserMedia;
+
+ // Attach a media stream to an element.
+ attachMediaStream = function(element, stream) {
+ if (typeof element.srcObject !== 'undefined') {
+ element.srcObject = stream;
+ } else if (typeof element.mozSrcObject !== 'undefined') {
+ element.mozSrcObject = stream;
+ } else if (typeof element.src !== 'undefined') {
+ element.src = URL.createObjectURL(stream);
+ } else {
+ console.log('Error attaching stream to element.');
+ }
+ };
+
+ reattachMediaStream = function(to, from) {
+ to.src = from.src;
+ };
+} else {
+ console.log("Browser does not appear to be WebRTC-capable");
+}
diff --git a/chromium/third_party/webrtc/tools/loopback_test/loopback_test.html b/chromium/third_party/webrtc/tools/loopback_test/loopback_test.html
new file mode 100644
index 00000000000..676fbe79c40
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/loopback_test.html
@@ -0,0 +1,227 @@
+<!DOCTYPE html>
+<!--
+ This page was created to help debug and study webrtc issues such as
+ bandwidth estimation problems. It allows one to easily launch a test
+ case that establishs a connection between 2 peer connections
+-->
+<html>
+<head>
+<title>Loopback test</title>
+
+<!-- In order to plot graphs, this tools uses google visualization API which is
+ loaded via goog.load provided by google api. -->
+<script src="//www.google.com/jsapi"></script>
+
+<!-- This file is included to allow loopback_test.js instantiate a
+ RTCPeerConnection on a browser and version agnostic way. -->
+<script src="adapter.js"></script>
+
+<!-- Provides class StatTracker used by loopback_test.js to keep track of
+ RTCPeerConnection stats -->
+<script src="stat_tracker.js"></script>
+
+<!-- Provides LoopbackTest class which has the core logic for the test itself.
+ Such as: create 2 peer connections, establish a call, filter turn
+ candidates, constraint video bitrate etc.
+ -->
+<script src="loopback_test.js"></script>
+
+<style>
+#chart {
+ height: 400px;
+}
+
+#control-range {
+ height: 100px;
+}
+</style>
+</head>
+<body>
+<div id="test-launcher">
+ <p>Duration (s): <input id="duration" type="text"></p>
+ <p>Max video bitrate (kbps): <input id="max-video-bitrate" type="text"></p>
+ <p>Peer connection constraints: <input id="pc-constraints" type="text"></p>
+ <p>Force TURN: <input id="force-turn" type="checkbox" checked></p>
+ <p><input id="launcher-button" type="button" value="Run test">
+ <div id="test-status" style="display:none"></div>
+ <div id="dashboard">
+ <div id="control-category"></div>
+ <div id="chart"></div>
+ <div id="control-range"></div>
+ </div>
+</div>
+<script>
+google.load('visualization', '1.0', {'packages':['controls']});
+
+var durationInput = document.getElementById('duration');
+var maxVideoBitrateInput = document.getElementById('max-video-bitrate');
+var forceTurnInput = document.getElementById('force-turn');
+var launcherButton = document.getElementById('launcher-button');
+var autoModeInput = document.createElement('input');
+var testStatus = document.getElementById('test-status');
+var pcConstraintsInput = document.getElementById('pc-constraints');
+
+launcherButton.onclick = start;
+
+// Load parameters from the url if present. This allows one to link to
+// a specific test configuration and is used to automatically pass parameters
+// for scripts such as record-test.sh
+function getURLParameter(name, default_value) {
+ var search =
+ RegExp('(^\\?|&)' + name + '=' + '(.+?)(&|$)').exec(location.search);
+ if (search)
+ return decodeURI(search[2]);
+ else
+ return default_value;
+}
+
+durationInput.value = getURLParameter('duration', 10);
+maxVideoBitrateInput.value = getURLParameter('max-video-bitrate', 2000);
+forceTurnInput.checked = (getURLParameter('force-turn', 'true') === 'true');
+autoModeInput.checked = (getURLParameter('auto-mode', 'false') === 'true');
+pcConstraintsInput.value = getURLParameter('pc-constraints', '');
+
+if (autoModeInput.checked) start();
+
+function start() {
+ var durationMs = parseInt(durationInput.value) * 1000;
+ var maxVideoBitrateKbps = parseInt(maxVideoBitrateInput.value);
+ var forceTurn = forceTurnInput.checked;
+ var autoClose = autoModeInput.checked;
+ var pcConstraints = pcConstraintsInput.value == "" ?
+ null : JSON.parse(pcConstraintsInput.value);
+
+ var updateStatusInterval;
+ var testFinished = false;
+ function updateStatus() {
+ if (testFinished) {
+ testStatus.innerHTML = 'Test finished';
+ if (updateStatusInterval) {
+ clearInterval(updateStatusInterval);
+ updateStatusInterval = null;
+ }
+ } else {
+ if (!updateStatusInterval) {
+ updateStatusInterval = setInterval(updateStatus, 1000);
+ testStatus.innerHTML = 'Running';
+ }
+ testStatus.innerHTML += '.';
+ }
+ }
+
+ if (!(isFinite(maxVideoBitrateKbps) && maxVideoBitrateKbps > 0)) {
+ // TODO(andresp): Get a better way to show errors than alert.
+ alert("Invalid max video bitrate");
+ return;
+ }
+
+ if (!(isFinite(durationMs) && durationMs > 0)) {
+ alert("Invalid duration");
+ return;
+ }
+
+ durationInput.disabled = true;
+ forceTurnInput.disabled = true;
+ maxVideoBitrateInput.disabled = true;
+ launcherButton.style.display = 'none';
+ testStatus.style.display = 'block';
+
+ getUserMedia({audio:true, video:true},
+ gotStream, function() {});
+
+ function gotStream(stream) {
+ updateStatus();
+ var test = new LoopbackTest(stream, durationMs,
+ forceTurn,
+ pcConstraints,
+ maxVideoBitrateKbps);
+ test.run(onTestFinished.bind(test));
+ }
+
+ function onTestFinished() {
+ testFinished = true;
+ updateStatus();
+ if (autoClose) {
+ window.close();
+ } else {
+ plotStats(this.getResults());
+ }
+ }
+}
+
+function plotStats(data) {
+ var dashboard = new google.visualization.Dashboard(
+ document.getElementById('dashboard'));
+
+ var chart = new google.visualization.ChartWrapper({
+ 'containerId': 'chart',
+ 'chartType': 'LineChart',
+ 'options': { 'pointSize': 0, 'lineWidth': 1, 'interpolateNulls': true },
+ });
+
+ var rangeFilter = new google.visualization.ControlWrapper({
+ 'controlType': 'ChartRangeFilter',
+ 'containerId': 'control-range',
+ 'options': {
+ 'filterColumnIndex': 0,
+ 'ui': {
+ 'chartType': 'ScatterChart',
+ 'chartOptions': {
+ 'hAxis': {'baselineColor': 'none'}
+ },
+ 'chartView': {
+ 'columns': [0, 1]
+ },
+ 'minRangeSize': 1000 // 1 second
+ }
+ },
+ });
+
+ // Create a table with the columns of the dataset.
+ var columnsTable = new google.visualization.DataTable();
+ columnsTable.addColumn('number', 'columnIndex');
+ columnsTable.addColumn('string', 'columnLabel');
+ var initState = {selectedValues: []};
+ for (var i = 1; i < data.getNumberOfColumns(); i++) {
+ columnsTable.addRow([i, data.getColumnLabel(i)]);
+ initState.selectedValues.push(data.getColumnLabel(i));
+ }
+
+ var columnFilter = new google.visualization.ControlWrapper({
+ controlType: 'CategoryFilter',
+ containerId: 'control-category',
+ dataTable: columnsTable,
+ options: {
+ filterColumnLabel: 'columnLabel',
+ ui: {
+ label: '',
+ allowNone: false,
+ selectedValuesLayout: 'aside'
+ }
+ },
+ state: initState
+ });
+ google.visualization.events.addListener(columnFilter, 'statechange',
+ function () {
+ var state = columnFilter.getState();
+ var row;
+ var columnIndices = [0];
+ for (var i = 0; i < state.selectedValues.length; i++) {
+ row = columnsTable.getFilteredRows([{
+ column: 1,
+ value: state.selectedValues[i]}])[0];
+ columnIndices.push(columnsTable.getValue(row, 0));
+ }
+ // Sort the indices into their original order
+ columnIndices.sort(function (a, b) { return (a - b); });
+ chart.setView({columns: columnIndices});
+ chart.draw();
+ });
+
+ columnFilter.draw();
+ dashboard.bind([rangeFilter], [chart]);
+ dashboard.draw(data);
+}
+</script>
+</body>
+</html>
diff --git a/chromium/third_party/webrtc/tools/loopback_test/loopback_test.js b/chromium/third_party/webrtc/tools/loopback_test/loopback_test.js
new file mode 100644
index 00000000000..5e596b7ff37
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/loopback_test.js
@@ -0,0 +1,240 @@
+/**
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// LoopbackTest establish a one way loopback call between 2 peer connections
+// while continuously monitoring bandwidth stats. The idea is to use this as
+// a base for other future tests and to keep track of more than just bandwidth
+// stats.
+//
+// Usage:
+// var test = new LoopbackTest(stream, callDurationMs,
+// forceTurn, pcConstraints,
+// maxVideoBitrateKbps);
+// test.run(onDone);
+// function onDone() {
+// test.getResults(); // return stats recorded during the loopback test.
+// }
+//
+function LoopbackTest(
+ stream,
+ callDurationMs,
+ forceTurn,
+ pcConstraints,
+ maxVideoBitrateKbps) {
+
+ var pc1StatTracker;
+ var pc2StatTracker;
+
+ // In order to study effect of network (e.g. wifi) on peer connection one can
+ // establish a loopback call and force it to go via a turn server. This way
+ // the call won't switch to local addresses. That is achieved by filtering out
+ // all non-relay ice candidades on both peers.
+ function constrainTurnCandidates(pc) {
+ var origAddIceCandidate = pc.addIceCandidate;
+ pc.addIceCandidate = function (candidate, successCallback,
+ failureCallback) {
+ if (forceTurn && candidate.candidate.indexOf("typ relay ") == -1) {
+ trace("Dropping non-turn candidate: " + candidate.candidate);
+ successCallback();
+ return;
+ } else {
+ origAddIceCandidate.call(this, candidate, successCallback,
+ failureCallback);
+ }
+ }
+ }
+
+ // FEC makes it hard to study bwe estimation since there seems to be a spike
+ // when it is enabled and disabled. Disable it for now. FEC issue tracked on:
+ // https://code.google.com/p/webrtc/issues/detail?id=3050
+ function constrainOfferToRemoveFec(pc) {
+ var origCreateOffer = pc.createOffer;
+ pc.createOffer = function (successCallback, failureCallback, options) {
+ function filteredSuccessCallback(desc) {
+ desc.sdp = desc.sdp.replace(/(m=video 1 [^\r]+)(116 117)(\r\n)/g,
+ '$1\r\n');
+ desc.sdp = desc.sdp.replace(/a=rtpmap:116 red\/90000\r\n/g, '');
+ desc.sdp = desc.sdp.replace(/a=rtpmap:117 ulpfec\/90000\r\n/g, '');
+ successCallback(desc);
+ }
+ origCreateOffer.call(this, filteredSuccessCallback, failureCallback,
+ options);
+ }
+ }
+
+ // Constraint max video bitrate by modifying the SDP when creating an answer.
+ function constrainBitrateAnswer(pc) {
+ var origCreateAnswer = pc.createAnswer;
+ pc.createAnswer = function (successCallback, failureCallback, options) {
+ function filteredSuccessCallback(desc) {
+ if (maxVideoBitrateKbps) {
+ desc.sdp = desc.sdp.replace(
+ /a=mid:video\r\n/g,
+ 'a=mid:video\r\nb=AS:' + maxVideoBitrateKbps + '\r\n');
+ }
+ successCallback(desc);
+ }
+ origCreateAnswer.call(this, filteredSuccessCallback, failureCallback,
+ options);
+ }
+ }
+
+ // Run the actual LoopbackTest.
+ this.run = function(doneCallback) {
+ if (forceTurn) requestTurn(start, fail);
+ else start();
+
+ function start(turnServer) {
+ var pcConfig = forceTurn ? { iceServers: [turnServer] } : null;
+ console.log(pcConfig);
+ var pc1 = new RTCPeerConnection(pcConfig, pcConstraints);
+ constrainTurnCandidates(pc1);
+ constrainOfferToRemoveFec(pc1);
+ pc1StatTracker = new StatTracker(pc1, 50);
+ pc1StatTracker.recordStat("EstimatedSendBitrate",
+ "bweforvideo", "googAvailableSendBandwidth");
+ pc1StatTracker.recordStat("TransmitBitrate",
+ "bweforvideo", "googTransmitBitrate");
+ pc1StatTracker.recordStat("TargetEncodeBitrate",
+ "bweforvideo", "googTargetEncBitrate");
+ pc1StatTracker.recordStat("ActualEncodedBitrate",
+ "bweforvideo", "googActualEncBitrate");
+
+ var pc2 = new RTCPeerConnection(pcConfig, pcConstraints);
+ constrainTurnCandidates(pc2);
+ constrainBitrateAnswer(pc2);
+ pc2StatTracker = new StatTracker(pc2, 50);
+ pc2StatTracker.recordStat("REMB",
+ "bweforvideo", "googAvailableReceiveBandwidth");
+
+ pc1.addStream(stream);
+ var call = new Call(pc1, pc2);
+
+ call.start();
+ setTimeout(function () {
+ call.stop();
+ pc1StatTracker.stop();
+ pc2StatTracker.stop();
+ success();
+ }, callDurationMs);
+ }
+
+ function success() {
+ trace("Success");
+ doneCallback();
+ }
+
+ function fail(msg) {
+ trace("Fail: " + msg);
+ doneCallback();
+ }
+ }
+
+ // Returns a google visualization datatable with the recorded samples during
+ // the loopback test.
+ this.getResults = function () {
+ return mergeDataTable(pc1StatTracker.dataTable(),
+ pc2StatTracker.dataTable());
+ }
+
+ // Helper class to establish and manage a call between 2 peer connections.
+ // Usage:
+ // var c = new Call(pc1, pc2);
+ // c.start();
+ // c.stop();
+ //
+ function Call(pc1, pc2) {
+ pc1.onicecandidate = applyIceCandidate.bind(pc2);
+ pc2.onicecandidate = applyIceCandidate.bind(pc1);
+
+ function applyIceCandidate(e) {
+ if (e.candidate) {
+ this.addIceCandidate(new RTCIceCandidate(e.candidate),
+ onAddIceCandidateSuccess,
+ onAddIceCandidateError);
+ }
+ }
+
+ function onAddIceCandidateSuccess() {}
+ function onAddIceCandidateError(error) {
+ trace("Failed to add Ice Candidate: " + error.toString());
+ }
+
+ this.start = function() {
+ pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
+
+ function onCreateSessionDescriptionError(error) {
+ trace('Failed to create session description: ' + error.toString());
+ }
+
+ function gotDescription1(desc){
+ trace("Offer: " + desc.sdp);
+ pc1.setLocalDescription(desc);
+ pc2.setRemoteDescription(desc);
+ // Since the "remote" side has no media stream we need
+ // to pass in the right constraints in order for it to
+ // accept the incoming offer of audio and video.
+ pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError);
+ }
+
+ function gotDescription2(desc){
+ trace("Answer: " + desc.sdp);
+ pc2.setLocalDescription(desc);
+ pc1.setRemoteDescription(desc);
+ }
+ }
+
+ this.stop = function() {
+ pc1.close();
+ pc2.close();
+ }
+ }
+
+ // Request a turn server. This uses the same servers as apprtc.
+ function requestTurn(successCallback, failureCallback) {
+ var currentDomain = document.domain;
+ if (currentDomain.search('localhost') === -1 &&
+ currentDomain.search('webrtc.googlecode.com') === -1) {
+ failureCallback("Domain not authorized for turn server: " +
+ currentDomain);
+ return;
+ }
+
+ // Get a turn server from computeengineondemand.appspot.com.
+ var turnUrl = 'https://computeengineondemand.appspot.com/' +
+ 'turn?username=156547625762562&key=4080218913';
+ var xmlhttp = new XMLHttpRequest();
+ xmlhttp.onreadystatechange = onTurnResult;
+ xmlhttp.open('GET', turnUrl, true);
+ xmlhttp.send();
+
+ function onTurnResult() {
+ if (this.readyState !== 4) {
+ return;
+ }
+
+ if (this.status === 200) {
+ var turnServer = JSON.parse(xmlhttp.responseText);
+ // Create turnUris using the polyfill (adapter.js).
+ turnServer.uris = turnServer.uris.filter(
+ function (e) { return e.search('transport=udp') != -1; }
+ );
+ var iceServers = createIceServers(turnServer.uris,
+ turnServer.username,
+ turnServer.password);
+ if (iceServers !== null) {
+ successCallback(iceServers);
+ return;
+ }
+ }
+ failureCallback("Failed to get a turn server.");
+ }
+ }
+}
diff --git a/chromium/third_party/webrtc/tools/loopback_test/record-test.sh b/chromium/third_party/webrtc/tools/loopback_test/record-test.sh
new file mode 100755
index 00000000000..92d92024375
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/record-test.sh
@@ -0,0 +1,60 @@
+#!/bin/sh
+#
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+#
+# This script is used to record a tcp dump of running a loop back test.
+# Example use case:
+#
+# $ ./run-server.sh & # spawns a server to serve the html pages
+# # on localhost:8080
+#
+# (recording 3 tests with 5mins and bitrates 1mbps, 2mbps and 3mbps)
+# $ sudo -v # Caches sudo credentials needed
+# # for tcpdump
+# $ export INTERFACE=eth1 # Defines interface to record packets
+# $ export CHROME_UNDER_TESTING=./chrome # Define which chrome to run on tests
+# $ export TEST="http://localhost:8080/loopback_test.html?auto-mode=true"
+# $ record-test.sh ./record1.pcap "$TEST&duration=300&max-video-bitrate=1000"
+# $ record-test.sh ./record2.pcap "$TEST&duration=300&max-video-bitrate=2000"
+# $ record-test.sh ./record3.pcap "$TEST&duration=300&max-video-bitrate=3000"
+
+# Indicate an error and exit with a nonzero status if any of the required
+# environment variables is Null or Unset.
+: ${INTERFACE:?"Need to set INTERFACE env variable"}
+: ${CHROME_UNDER_TESTING:?"Need to set CHROME_UNDER_TESTING env variable"}
+
+if [ ! -x "$CHROME_UNDER_TESTING" ]; then
+ echo "CHROME_UNDER_TESTING=$CHROME_UNDER_TESTING does not seem to exist."
+ exit 1
+fi
+
+if [ "$#" -ne 2 ]; then
+ echo "Usage: $0 <test-url> <network-dump>"
+ exit 1
+fi
+TEST_URL=$1
+OUTPUT_RECORDING=$2
+
+sudo -nv > /dev/null 2>&1
+if [ $? != 0 ]; then
+ echo "Run \"sudo -v\" to cache your credentials." \
+ "They are needed to run tcpdump."
+ exit
+fi
+
+echo "Recording $INTERFACE into ${OUTPUT_RECORDING}"
+sudo -n tcpdump -i "$INTERFACE" -w - > "${OUTPUT_RECORDING}" &
+TCPDUMP_PID=$!
+
+echo "Starting ${CHROME_UNDER_TESTING} with ${TEST_URL}."
+# Using real camera instead of --use-fake-device-for-media-stream as it
+# does not produces images complex enough to reach 3mbps.
+# Flag --use-fake-ui-for-media-stream automatically allows getUserMedia calls.
+$CHROME_UNDER_TESTING --use-fake-ui-for-media-stream "${TEST_URL}"
+kill ${TCPDUMP_PID}
diff --git a/chromium/third_party/webrtc/tools/loopback_test/run-server.sh b/chromium/third_party/webrtc/tools/loopback_test/run-server.sh
new file mode 100755
index 00000000000..35c0797c24d
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/run-server.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+#
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+#
+# This script is used to launch a simple http server for files in the same
+# location as the script itself.
+cd "`dirname \"$0\"`"
+echo "Starting http server in port 8080."
+exec python -m SimpleHTTPServer 8080
diff --git a/chromium/third_party/webrtc/tools/loopback_test/stat_tracker.js b/chromium/third_party/webrtc/tools/loopback_test/stat_tracker.js
new file mode 100644
index 00000000000..49f46c39f68
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/loopback_test/stat_tracker.js
@@ -0,0 +1,94 @@
+/**
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// StatTracker is a helper class to keep track of stats on a RTCPeerConnection
+// object. It uses google visualization datatables to keep the recorded samples
+// and simplify plugging them into graphs later.
+//
+// Usage example:
+// var tracker = new StatTracker(pc, pollInterval);
+// tracker.recordStat("EstimatedSendBitrate",
+// "bweforvideo", "googAvailableSendBandwidth");
+// ...
+// tracker.stop();
+// tracker.dataTable(); // returns the recorded values. In this case
+// a table with 2 columns { Time, EstimatedSendBitrate } and a row for each
+// sample taken until stop() was called.
+//
+function StatTracker(pc, pollInterval) {
+ pollInterval = pollInterval || 250;
+
+ var dataTable = new google.visualization.DataTable();
+ var timeColumnIndex = dataTable.addColumn('datetime', 'Time');
+ var recording = true;
+
+ // Set of sampling functions. Functions registered here are called
+ // once per getStats with the given report and a rowIndex for the
+ // sample period so they can extract and record the tracked variables.
+ var samplingFunctions = {};
+
+ // Accessor to the current recorded stats.
+ this.dataTable = function() { return dataTable; }
+
+ // recordStat(varName, recordName, statName) adds a samplingFunction that
+ // records namedItem(recordName).stat(statName) from RTCStatsReport for each
+ // sample into a column named varName in the dataTable.
+ this.recordStat = function (varName, recordName, statName) {
+ var columnIndex = dataTable.addColumn('number', varName);
+ samplingFunctions[varName] = function (report, rowIndex) {
+ var sample;
+ var record = report.namedItem(recordName);
+ if (record) sample = record.stat(statName);
+ dataTable.setCell(rowIndex, columnIndex, sample);
+ }
+ }
+
+ // Stops the polling of stats from the peer connection.
+ this.stop = function() {
+ recording = false;
+ }
+
+ // RTCPeerConnection.getStats is asynchronous. In order to avoid having
+ // too many pending getStats requests going, this code only queues the
+ // next getStats with setTimeout after the previous one returns, instead
+ // of using setInterval.
+ function poll() {
+ pc.getStats(function (report) {
+ if (!recording) return;
+ setTimeout(poll, pollInterval);
+ var result = report.result();
+ if (result.length < 1) return;
+
+ var rowIndex = dataTable.addRow();
+ dataTable.setCell(rowIndex, timeColumnIndex, result[0].timestamp);
+ for (var v in samplingFunctions)
+ samplingFunctions[v](report, rowIndex);
+ });
+ }
+ setTimeout(poll, pollInterval);
+}
+
+/**
+ * Utility method to perform a full join between data tables from StatTracker.
+ */
+function mergeDataTable(dataTable1, dataTable2) {
+ function allColumns(cols) {
+ var a = [];
+ for (var i = 1; i < cols; ++i) a.push(i);
+ return a;
+ }
+ return google.visualization.data.join(
+ dataTable1,
+ dataTable2,
+ 'full',
+ [[0, 0]],
+ allColumns(dataTable1.getNumberOfColumns()),
+ allColumns(dataTable2.getNumberOfColumns()));
+}
diff --git a/chromium/third_party/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc b/chromium/third_party/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
index 15246106848..9c9b131a210 100644
--- a/chromium/third_party/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
+++ b/chromium/third_party/webrtc/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
@@ -18,10 +19,16 @@
#include "webrtc/tools/frame_analyzer/video_quality_analysis.h"
#include "webrtc/tools/simple_command_line_parser.h"
+#define MAX_NUM_FRAMES_PER_FILE INT_MAX
+
void CompareFiles(const char* reference_file_name, const char* test_file_name,
const char* results_file_name, int width, int height) {
- FILE* ref_file = fopen(reference_file_name, "rb");
- FILE* test_file = fopen(test_file_name, "rb");
+ // Check if the reference_file_name ends with "y4m".
+ bool y4m_mode = false;
+ if (std::string(reference_file_name).find("y4m") != std::string::npos){
+ y4m_mode = true;
+ }
+
FILE* results_file = fopen(results_file_name, "w");
int size = webrtc::test::GetI420FrameSize(width, height);
@@ -30,10 +37,19 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
uint8* test_frame = new uint8[size];
uint8* ref_frame = new uint8[size];
- int frame_counter = 0;
+ bool read_result = true;
+ for(int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
+ ++frame_counter){
+ read_result &= (y4m_mode) ? webrtc::test::ExtractFrameFromY4mFile(
+ reference_file_name, width, height, frame_counter, ref_frame):
+ webrtc::test::ExtractFrameFromYuvFile(reference_file_name, width,
+ height, frame_counter, ref_frame);
+ read_result &= webrtc::test::ExtractFrameFromYuvFile(test_file_name, width,
+ height, frame_counter, test_frame);
+
+ if (!read_result)
+ break;
- while (webrtc::test::GetNextI420Frame(ref_file, width, height, ref_frame) &&
- webrtc::test::GetNextI420Frame(test_file, width, height, test_frame)) {
// Calculate the PSNR and SSIM.
double result_psnr = webrtc::test::CalculateMetrics(
webrtc::test::kPSNR, ref_frame, test_frame, width, height);
@@ -41,13 +57,10 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
webrtc::test::kSSIM, ref_frame, test_frame, width, height);
fprintf(results_file, "Frame: %d, PSNR: %f, SSIM: %f\n", frame_counter,
result_psnr, result_ssim);
- ++frame_counter;
}
delete[] test_frame;
delete[] ref_frame;
- fclose(ref_file);
- fclose(test_file);
fclose(results_file);
}
diff --git a/chromium/third_party/webrtc/tools/simple_command_line_parser.h b/chromium/third_party/webrtc/tools/simple_command_line_parser.h
index 348e8c3db1f..6bb33137f50 100644
--- a/chromium/third_party/webrtc/tools/simple_command_line_parser.h
+++ b/chromium/third_party/webrtc/tools/simple_command_line_parser.h
@@ -15,7 +15,7 @@
#include <string>
#include <vector>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/test/testsupport/gtest_prod_util.h"
// This is a very basic command line parsing class. We pass the command line
diff --git a/chromium/third_party/webrtc/tools/tools.gyp b/chromium/third_party/webrtc/tools/tools.gyp
index b8dc4c142d4..ee2a11d4500 100644
--- a/chromium/third_party/webrtc/tools/tools.gyp
+++ b/chromium/third_party/webrtc/tools/tools.gyp
@@ -91,6 +91,7 @@
'type': 'executable',
'dependencies': [
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
],
'sources': [
'force_mic_volume_max/force_mic_volume_max.cc',
@@ -106,6 +107,7 @@
'dependencies': [
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
],
@@ -135,7 +137,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -146,7 +148,7 @@
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
'conditions': [
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'tools_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/tools/tools_unittests.isolate b/chromium/third_party/webrtc/tools/tools_unittests.isolate
index 540378059b9..18065749553 100644
--- a/chromium/third_party/webrtc/tools/tools_unittests.isolate
+++ b/chromium/third_party/webrtc/tools/tools_unittests.isolate
@@ -8,29 +8,27 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/tools_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../resources/foreman_cif.yuv',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/tools_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/typedefs.h b/chromium/third_party/webrtc/typedefs.h
index 472c74b088b..d8977ff4cbc 100644
--- a/chromium/third_party/webrtc/typedefs.h
+++ b/chromium/third_party/webrtc/typedefs.h
@@ -14,13 +14,6 @@
#ifndef WEBRTC_TYPEDEFS_H_
#define WEBRTC_TYPEDEFS_H_
-// For access to standard POSIXish features, use WEBRTC_POSIX instead of a
-// more specific macro.
-#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || \
- defined(WEBRTC_ANDROID)
-#define WEBRTC_POSIX
-#endif
-
// Processor architecture detection. For more info on what's defined, see:
// http://msdn.microsoft.com/en-us/library/b0084kay.aspx
// http://www.agner.org/optimize/calling_conventions.pdf
@@ -30,6 +23,9 @@
#define WEBRTC_ARCH_X86_64
#define WEBRTC_ARCH_64_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
+#elif defined(__aarch64__)
+#define WEBRTC_ARCH_64_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
#elif defined(_M_IX86) || defined(__i386__)
#define WEBRTC_ARCH_X86_FAMILY
#define WEBRTC_ARCH_X86
@@ -48,6 +44,9 @@
#elif defined(__MIPSEL__)
#define WEBRTC_ARCH_32_BITS
#define WEBRTC_ARCH_LITTLE_ENDIAN
+#elif defined(__pnacl__)
+#define WEBRTC_ARCH_32_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
#else
#error Please add support for your architecture in typedefs.h
#endif
@@ -56,14 +55,15 @@
#error Define either WEBRTC_ARCH_LITTLE_ENDIAN or WEBRTC_ARCH_BIG_ENDIAN
#endif
-#if defined(__SSE2__) || defined(_MSC_VER)
-#define WEBRTC_USE_SSE2
+#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__)) || \
+ (defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON))
+#define WEBRTC_CPU_DETECTION
#endif
#if !defined(_MSC_VER)
#include <stdint.h>
#else
-// Define C99 equivalent types, since MSVC doesn't provide stdint.h.
+// Define C99 equivalent types, since pre-2010 MSVC doesn't provide stdint.h.
typedef signed char int8_t;
typedef signed short int16_t;
typedef signed int int32_t;
diff --git a/chromium/third_party/webrtc/video/OWNERS b/chromium/third_party/webrtc/video/OWNERS
index 506407499f3..cb0452fa68d 100644
--- a/chromium/third_party/webrtc/video/OWNERS
+++ b/chromium/third_party/webrtc/video/OWNERS
@@ -2,3 +2,8 @@ mflodman@webrtc.org
stefan@webrtc.org
wu@webrtc.org
mallinath@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/video/bitrate_estimator_tests.cc b/chromium/third_party/webrtc/video/bitrate_estimator_tests.cc
index 15bacd3aad0..f8b9060f608 100644
--- a/chromium/third_party/webrtc/video/bitrate_estimator_tests.cc
+++ b/chromium/third_party/webrtc/video/bitrate_estimator_tests.cc
@@ -17,7 +17,10 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
@@ -50,10 +53,12 @@ class BitrateEstimatorTest : public ::testing::Test {
}
virtual void SetUp() {
- // Create receiver call first so that we are guaranteed to have a trace
- // callback when sender call is created.
+ Trace::CreateTrace();
+ Trace::SetTraceCallback(&receiver_trace_);
+ // Reduce the chance that spurious traces will ruin the test.
+ Trace::set_level_filter(kTraceTerseInfo);
+
Call::Config receiver_call_config(&receive_transport_);
- receiver_call_config.trace_callback = &receiver_trace_;
receiver_call_.reset(Call::Create(receiver_call_config));
Call::Config sender_call_config(&send_transport_);
@@ -64,14 +69,17 @@ class BitrateEstimatorTest : public ::testing::Test {
send_config_ = sender_call_->GetDefaultSendConfig();
send_config_.rtp.ssrcs.push_back(kSendSsrc);
- // send_config_.encoder will be set by every stream separately.
- send_config_.internal_source = false;
- test::FakeEncoder::SetCodecSettings(&send_config_.codec, 1);
- send_config_.codec.plType = kSendPayloadType;
+ // Encoders will be set separately per stream.
+ send_config_.encoder_settings.encoder = NULL;
+ send_config_.encoder_settings.payload_name = "FAKE";
+ send_config_.encoder_settings.payload_type = kSendPayloadType;
+ video_streams_ = test::CreateVideoStreams(1);
receive_config_ = receiver_call_->GetDefaultReceiveConfig();
- receive_config_.codecs.clear();
- receive_config_.codecs.push_back(send_config_.codec);
+ assert(receive_config_.codecs.empty());
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config_.codecs.push_back(codec);
// receive_config_.external_decoders will be set by every stream separately.
receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
receive_config_.rtp.local_ssrc = kReceiverLocalSsrc;
@@ -93,8 +101,10 @@ class BitrateEstimatorTest : public ::testing::Test {
streams_.pop_back();
}
- // The TraceCallback instance MUST outlive Calls, destroy Calls explicitly.
receiver_call_.reset();
+
+ Trace::SetTraceCallback(NULL);
+ Trace::ReturnTrace();
}
protected:
@@ -110,17 +120,14 @@ class BitrateEstimatorTest : public ::testing::Test {
}
void PushExpectedLogLine(const std::string& expected_log_line) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped lock(crit_sect_.get());
expected_log_lines_.push_back(expected_log_line);
}
virtual void Print(TraceLevel level,
const char* message,
int length) OVERRIDE {
- CriticalSectionScoped cs(crit_sect_.get());
- if (!(level & kTraceStateInfo)) {
- return;
- }
+ CriticalSectionScoped lock(crit_sect_.get());
std::string msg(message);
if (msg.find("BitrateEstimator") != std::string::npos) {
received_log_lines_.push_back(msg);
@@ -146,9 +153,9 @@ class BitrateEstimatorTest : public ::testing::Test {
private:
typedef std::list<std::string> Strings;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
- Strings received_log_lines_;
- Strings expected_log_lines_;
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ Strings received_log_lines_ GUARDED_BY(crit_sect_);
+ Strings expected_log_lines_ GUARDED_BY(crit_sect_);
scoped_ptr<EventWrapper> done_;
};
@@ -163,27 +170,28 @@ class BitrateEstimatorTest : public ::testing::Test {
fake_encoder_(Clock::GetRealTimeClock()),
fake_decoder_() {
test_->send_config_.rtp.ssrcs[0]++;
- test_->send_config_.encoder = &fake_encoder_;
- send_stream_ =
- test_->sender_call_->CreateVideoSendStream(test_->send_config_);
+ test_->send_config_.encoder_settings.encoder = &fake_encoder_;
+ send_stream_ = test_->sender_call_->CreateVideoSendStream(
+ test_->send_config_, test_->video_streams_, NULL);
+ assert(test_->video_streams_.size() == 1);
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::Create(send_stream_->Input(),
- test_->send_config_.codec.width,
- test_->send_config_.codec.height,
+ test_->video_streams_[0].width,
+ test_->video_streams_[0].height,
30,
Clock::GetRealTimeClock()));
- send_stream_->StartSending();
+ send_stream_->Start();
frame_generator_capturer_->Start();
ExternalVideoDecoder decoder;
decoder.decoder = &fake_decoder_;
- decoder.payload_type = test_->send_config_.codec.plType;
+ decoder.payload_type = test_->send_config_.encoder_settings.payload_type;
test_->receive_config_.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0];
test_->receive_config_.rtp.local_ssrc++;
test_->receive_config_.external_decoders.push_back(decoder);
receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream(
test_->receive_config_);
- receive_stream_->StartReceiving();
+ receive_stream_->Start();
is_sending_receiving_ = true;
}
@@ -199,8 +207,8 @@ class BitrateEstimatorTest : public ::testing::Test {
void StopSending() {
if (is_sending_receiving_) {
frame_generator_capturer_->Stop();
- send_stream_->StopSending();
- receive_stream_->StopReceiving();
+ send_stream_->Stop();
+ receive_stream_->Stop();
is_sending_receiving_ = false;
}
}
@@ -221,6 +229,7 @@ class BitrateEstimatorTest : public ::testing::Test {
scoped_ptr<Call> sender_call_;
scoped_ptr<Call> receiver_call_;
VideoSendStream::Config send_config_;
+ std::vector<VideoStream> video_streams_;
VideoReceiveStream::Config receive_config_;
std::vector<Stream*> streams_;
};
@@ -236,6 +245,20 @@ TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefault) {
EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
}
+TEST_F(BitrateEstimatorTest, ImmediatelySwitchToAST) {
+ send_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTime, kASTExtensionId));
+ receiver_trace_.PushExpectedLogLine(
+ "RemoteBitrateEstimatorFactory: Instantiating.");
+ receiver_trace_.PushExpectedLogLine(
+ "RemoteBitrateEstimatorFactory: Instantiating.");
+ receiver_trace_.PushExpectedLogLine("Switching to absolute send time RBE.");
+ receiver_trace_.PushExpectedLogLine(
+ "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
+ streams_.push_back(new Stream(this));
+ EXPECT_EQ(kEventSignaled, receiver_trace_.Wait());
+}
+
TEST_F(BitrateEstimatorTest, SwitchesToAST) {
send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
diff --git a/chromium/third_party/webrtc/video/call.cc b/chromium/third_party/webrtc/video/call.cc
index 12daa8eaaeb..6daa8b07f94 100644
--- a/chromium/third_party/webrtc/video/call.cc
+++ b/chromium/third_party/webrtc/video/call.cc
@@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video/video_receive_stream.h"
#include "webrtc/video/video_send_stream.h"
@@ -33,18 +34,45 @@ const char* RtpExtension::kTOffset = "urn:ietf:params:rtp-hdrext:toffset";
const char* RtpExtension::kAbsSendTime =
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time";
namespace internal {
+
+class CpuOveruseObserverProxy : public webrtc::CpuOveruseObserver {
+ public:
+ CpuOveruseObserverProxy(OveruseCallback* overuse_callback)
+ : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ overuse_callback_(overuse_callback) {
+ assert(overuse_callback != NULL);
+ }
+
+ virtual ~CpuOveruseObserverProxy() {}
+
+ virtual void OveruseDetected() OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ overuse_callback_->OnOveruse();
+ }
+
+ virtual void NormalUsage() OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ overuse_callback_->OnNormalUse();
+ }
+
+ private:
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ OveruseCallback* overuse_callback_ GUARDED_BY(crit_);
+};
+
class Call : public webrtc::Call, public PacketReceiver {
public:
Call(webrtc::VideoEngine* video_engine, const Call::Config& config);
virtual ~Call();
virtual PacketReceiver* Receiver() OVERRIDE;
- virtual std::vector<VideoCodec> GetVideoCodecs() OVERRIDE;
virtual VideoSendStream::Config GetDefaultSendConfig() OVERRIDE;
virtual VideoSendStream* CreateVideoSendStream(
- const VideoSendStream::Config& config) OVERRIDE;
+ const VideoSendStream::Config& config,
+ const std::vector<VideoStream>& video_streams,
+ const void* encoder_settings) OVERRIDE;
virtual void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream)
OVERRIDE;
@@ -60,24 +88,28 @@ class Call : public webrtc::Call, public PacketReceiver {
virtual uint32_t SendBitrateEstimate() OVERRIDE;
virtual uint32_t ReceiveBitrateEstimate() OVERRIDE;
- virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE;
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE;
private:
- bool DeliverRtcp(const uint8_t* packet, size_t length);
- bool DeliverRtp(const RTPHeader& header,
- const uint8_t* packet,
- size_t length);
+ DeliveryStatus DeliverRtcp(const uint8_t* packet, size_t length);
+ DeliveryStatus DeliverRtp(const RTPHeader& header,
+ const uint8_t* packet,
+ size_t length);
Call::Config config_;
- std::map<uint32_t, VideoReceiveStream*> receive_ssrcs_;
+ std::map<uint32_t, VideoReceiveStream*> receive_ssrcs_
+ GUARDED_BY(receive_lock_);
scoped_ptr<RWLockWrapper> receive_lock_;
- std::map<uint32_t, VideoSendStream*> send_ssrcs_;
+ std::map<uint32_t, VideoSendStream*> send_ssrcs_ GUARDED_BY(send_lock_);
scoped_ptr<RWLockWrapper> send_lock_;
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
+ scoped_ptr<CpuOveruseObserverProxy> overuse_observer_proxy_;
+
VideoEngine* video_engine_;
ViERTP_RTCP* rtp_rtcp_;
ViECodec* codec_;
@@ -88,90 +120,10 @@ class Call : public webrtc::Call, public PacketReceiver {
};
} // namespace internal
-class TraceDispatcher : public TraceCallback {
- public:
- TraceDispatcher()
- : crit_(CriticalSectionWrapper::CreateCriticalSection()),
- initialized_(false),
- filter_(kTraceNone) {}
-
- ~TraceDispatcher() {
- if (initialized_) {
- Trace::ReturnTrace();
- VideoEngine::SetTraceCallback(NULL);
- }
- }
-
- virtual void Print(TraceLevel level,
- const char* message,
- int length) OVERRIDE {
- CriticalSectionScoped lock(crit_.get());
- for (std::map<Call*, Call::Config*>::iterator it = callbacks_.begin();
- it != callbacks_.end();
- ++it) {
- if ((level & it->second->trace_filter) != kTraceNone)
- it->second->trace_callback->Print(level, message, length);
- }
- }
-
- void RegisterCallback(Call* call, Call::Config* config) {
- if (config->trace_callback == NULL)
- return;
-
- CriticalSectionScoped lock(crit_.get());
- callbacks_[call] = config;
-
- filter_ |= config->trace_filter;
- if (filter_ != kTraceNone && !initialized_) {
- initialized_ = true;
- Trace::CreateTrace();
- VideoEngine::SetTraceCallback(this);
- }
- VideoEngine::SetTraceFilter(filter_);
- }
-
- void DeregisterCallback(Call* call) {
- CriticalSectionScoped lock(crit_.get());
- callbacks_.erase(call);
-
- filter_ = kTraceNone;
- for (std::map<Call*, Call::Config*>::iterator it = callbacks_.begin();
- it != callbacks_.end();
- ++it) {
- filter_ |= it->second->trace_filter;
- }
-
- VideoEngine::SetTraceFilter(filter_);
- }
-
- private:
- scoped_ptr<CriticalSectionWrapper> crit_;
- bool initialized_;
- unsigned int filter_;
- std::map<Call*, Call::Config*> callbacks_;
-};
-
-namespace internal {
-TraceDispatcher* global_trace_dispatcher = NULL;
-} // internal
-
-void CreateTraceDispatcher() {
- if (internal::global_trace_dispatcher == NULL) {
- TraceDispatcher* dispatcher = new TraceDispatcher();
- // TODO(pbos): Atomic compare and exchange.
- if (internal::global_trace_dispatcher == NULL) {
- internal::global_trace_dispatcher = dispatcher;
- } else {
- delete dispatcher;
- }
- }
-}
-
Call* Call::Create(const Call::Config& config) {
- CreateTraceDispatcher();
-
- VideoEngine* video_engine = config.webrtc_config != NULL ?
- VideoEngine::Create(*config.webrtc_config) : VideoEngine::Create();
+ VideoEngine* video_engine = config.webrtc_config != NULL
+ ? VideoEngine::Create(*config.webrtc_config)
+ : VideoEngine::Create();
assert(video_engine != NULL);
return new internal::Call(video_engine, config);
@@ -179,6 +131,8 @@ Call* Call::Create(const Call::Config& config) {
namespace internal {
+const int kDefaultVideoStreamBitrateBps = 300000;
+
Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
: config_(config),
receive_lock_(RWLockWrapper::CreateRWLock()),
@@ -189,7 +143,10 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
assert(video_engine != NULL);
assert(config.send_transport != NULL);
- global_trace_dispatcher->RegisterCallback(this, &config_);
+ if (config.overuse_callback) {
+ overuse_observer_proxy_.reset(
+ new CpuOveruseObserverProxy(config.overuse_callback));
+ }
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine_);
assert(rtp_rtcp_ != NULL);
@@ -207,7 +164,6 @@ Call::Call(webrtc::VideoEngine* video_engine, const Call::Config& config)
}
Call::~Call() {
- global_trace_dispatcher->DeregisterCallback(this);
base_->DeleteChannel(base_channel_id_);
base_->Release();
codec_->Release();
@@ -217,34 +173,29 @@ Call::~Call() {
PacketReceiver* Call::Receiver() { return this; }
-std::vector<VideoCodec> Call::GetVideoCodecs() {
- std::vector<VideoCodec> codecs;
-
- VideoCodec codec;
- for (size_t i = 0; i < static_cast<size_t>(codec_->NumberOfCodecs()); ++i) {
- if (codec_->GetCodec(static_cast<unsigned char>(i), codec) == 0) {
- codecs.push_back(codec);
- }
- }
- return codecs;
-}
-
VideoSendStream::Config Call::GetDefaultSendConfig() {
VideoSendStream::Config config;
- codec_->GetCodec(0, config.codec);
return config;
}
VideoSendStream* Call::CreateVideoSendStream(
- const VideoSendStream::Config& config) {
+ const VideoSendStream::Config& config,
+ const std::vector<VideoStream>& video_streams,
+ const void* encoder_settings) {
assert(config.rtp.ssrcs.size() > 0);
- assert(config.rtp.ssrcs.size() >= config.codec.numberOfSimulcastStreams);
- VideoSendStream* send_stream = new VideoSendStream(config_.send_transport,
- config_.overuse_detection,
- video_engine_,
- config,
- base_channel_id_);
+ // TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if
+ // the call has already started.
+ VideoSendStream* send_stream = new VideoSendStream(
+ config_.send_transport,
+ overuse_observer_proxy_.get(),
+ video_engine_,
+ config,
+ video_streams,
+ encoder_settings,
+ base_channel_id_,
+ config_.start_bitrate_bps != -1 ? config_.start_bitrate_bps
+ : kDefaultVideoStreamBitrateBps);
WriteLockScoped write_lock(*send_lock_);
for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) {
@@ -294,6 +245,12 @@ VideoReceiveStream* Call::CreateVideoReceiveStream(
WriteLockScoped write_lock(*receive_lock_);
assert(receive_ssrcs_.find(config.rtp.remote_ssrc) == receive_ssrcs_.end());
receive_ssrcs_[config.rtp.remote_ssrc] = receive_stream;
+ // TODO(pbos): Configure different RTX payloads per receive payload.
+ VideoReceiveStream::Config::Rtp::RtxMap::const_iterator it =
+ config.rtp.rtx.begin();
+ if (it != config.rtp.rtx.end())
+ receive_ssrcs_[it->second.ssrc] = receive_stream;
+
return receive_stream;
}
@@ -304,14 +261,18 @@ void Call::DestroyVideoReceiveStream(
VideoReceiveStream* receive_stream_impl = NULL;
{
WriteLockScoped write_lock(*receive_lock_);
- for (std::map<uint32_t, VideoReceiveStream*>::iterator it =
- receive_ssrcs_.begin();
- it != receive_ssrcs_.end();
- ++it) {
+ // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
+ // separate SSRC there can be either one or two.
+ std::map<uint32_t, VideoReceiveStream*>::iterator it =
+ receive_ssrcs_.begin();
+ while (it != receive_ssrcs_.end()) {
if (it->second == static_cast<VideoReceiveStream*>(receive_stream)) {
+ assert(receive_stream_impl == NULL ||
+ receive_stream_impl == it->second);
receive_stream_impl = it->second;
- receive_ssrcs_.erase(it);
- break;
+ receive_ssrcs_.erase(it++);
+ } else {
+ ++it;
}
}
}
@@ -330,9 +291,12 @@ uint32_t Call::ReceiveBitrateEstimate() {
return 0;
}
-bool Call::DeliverRtcp(const uint8_t* packet, size_t length) {
+Call::PacketReceiver::DeliveryStatus Call::DeliverRtcp(const uint8_t* packet,
+ size_t length) {
// TODO(pbos): Figure out what channel needs it actually.
// Do NOT broadcast! Also make sure it's a valid packet.
+ // Return DELIVERY_UNKNOWN_SSRC if it can be determined that
+ // there's no receiver of the packet.
bool rtcp_delivered = false;
{
ReadLockScoped read_lock(*receive_lock_);
@@ -355,35 +319,33 @@ bool Call::DeliverRtcp(const uint8_t* packet, size_t length) {
rtcp_delivered = true;
}
}
- return rtcp_delivered;
+ return rtcp_delivered ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
}
-bool Call::DeliverRtp(const RTPHeader& header,
- const uint8_t* packet,
- size_t length) {
- VideoReceiveStream* receiver;
- {
- ReadLockScoped read_lock(*receive_lock_);
- std::map<uint32_t, VideoReceiveStream*>::iterator it =
- receive_ssrcs_.find(header.ssrc);
- if (it == receive_ssrcs_.end()) {
- // TODO(pbos): Log some warning, SSRC without receiver.
- return false;
- }
+Call::PacketReceiver::DeliveryStatus Call::DeliverRtp(const RTPHeader& header,
+ const uint8_t* packet,
+ size_t length) {
+ ReadLockScoped read_lock(*receive_lock_);
+ std::map<uint32_t, VideoReceiveStream*>::iterator it =
+ receive_ssrcs_.find(header.ssrc);
- receiver = it->second;
- }
- return receiver->DeliverRtp(static_cast<const uint8_t*>(packet), length);
+ if (it == receive_ssrcs_.end())
+ return DELIVERY_UNKNOWN_SSRC;
+
+ return it->second->DeliverRtp(static_cast<const uint8_t*>(packet), length)
+ ? DELIVERY_OK
+ : DELIVERY_PACKET_ERROR;
}
-bool Call::DeliverPacket(const uint8_t* packet, size_t length) {
+Call::PacketReceiver::DeliveryStatus Call::DeliverPacket(const uint8_t* packet,
+ size_t length) {
// TODO(pbos): ExtensionMap if there are extensions.
if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)))
return DeliverRtcp(packet, length);
RTPHeader rtp_header;
if (!rtp_header_parser_->Parse(packet, static_cast<int>(length), &rtp_header))
- return false;
+ return DELIVERY_PACKET_ERROR;
return DeliverRtp(rtp_header, packet, length);
}
diff --git a/chromium/third_party/webrtc/video/call_perf_tests.cc b/chromium/third_party/webrtc/video/call_perf_tests.cc
index 49c648877cd..fa3f2388c52 100644
--- a/chromium/third_party/webrtc/video/call_perf_tests.cc
+++ b/chromium/third_party/webrtc/video/call_perf_tests.cc
@@ -16,12 +16,15 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/call.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
@@ -45,13 +48,56 @@ static const uint32_t kReceiverLocalSsrc = 0x123456;
static const uint8_t kSendPayloadType = 125;
class CallPerfTest : public ::testing::Test {
+ public:
+ CallPerfTest()
+ : send_stream_(NULL), fake_encoder_(Clock::GetRealTimeClock()) {}
+
+ protected:
+ void CreateTestConfig(Call* call) {
+ send_config_ = call->GetDefaultSendConfig();
+ send_config_.rtp.ssrcs.push_back(kSendSsrc);
+ send_config_.encoder_settings.encoder = &fake_encoder_;
+ send_config_.encoder_settings.payload_type = kSendPayloadType;
+ send_config_.encoder_settings.payload_name = "FAKE";
+ video_streams_ = test::CreateVideoStreams(1);
+ }
+
+ void RunVideoSendTest(Call* call,
+ const VideoSendStream::Config& config,
+ test::RtpRtcpObserver* observer) {
+ send_stream_ = call->CreateVideoSendStream(config, video_streams_, NULL);
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(
+ send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
+ send_stream_->Start();
+ frame_generator_capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, observer->Wait());
+
+ observer->StopSending();
+ frame_generator_capturer->Stop();
+ send_stream_->Stop();
+ call->DestroyVideoSendStream(send_stream_);
+ }
+
+ void TestMinTransmitBitrate(bool pad_to_min_bitrate);
+
+ void TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
+ int threshold_ms,
+ int start_time_ms,
+ int run_time_ms);
+
+ VideoSendStream::Config send_config_;
+ std::vector<VideoStream> video_streams_;
+ VideoSendStream* send_stream_;
+ test::FakeEncoder fake_encoder_;
};
class SyncRtcpObserver : public test::RtpRtcpObserver {
public:
- explicit SyncRtcpObserver(int delay_ms)
- : test::RtpRtcpObserver(kLongTimeoutMs, delay_ms),
- critical_section_(CriticalSectionWrapper::CreateCriticalSection()) {}
+ explicit SyncRtcpObserver(const FakeNetworkPipe::Config& config)
+ : test::RtpRtcpObserver(kLongTimeoutMs, config),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()) {}
virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
RTCPUtility::RTCPParserV2 parser(packet, length, true);
@@ -62,7 +108,7 @@ class SyncRtcpObserver : public test::RtpRtcpObserver {
packet_type = parser.Iterate()) {
if (packet_type == RTCPUtility::kRtcpSrCode) {
const RTCPUtility::RTCPPacket& packet = parser.Packet();
- synchronization::RtcpMeasurement ntp_rtp_pair(
+ RtcpMeasurement ntp_rtp_pair(
packet.SR.NTPMostSignificant,
packet.SR.NTPLeastSignificant,
packet.SR.RTPTimestamp);
@@ -73,22 +119,22 @@ class SyncRtcpObserver : public test::RtpRtcpObserver {
}
int64_t RtpTimestampToNtp(uint32_t timestamp) const {
- CriticalSectionScoped cs(critical_section_.get());
+ CriticalSectionScoped lock(crit_.get());
int64_t timestamp_in_ms = -1;
if (ntp_rtp_pairs_.size() == 2) {
// TODO(stefan): We can't EXPECT_TRUE on this call due to a bug in the
// RTCP sender where it sends RTCP SR before any RTP packets, which leads
// to a bogus NTP/RTP mapping.
- synchronization::RtpToNtpMs(timestamp, ntp_rtp_pairs_, &timestamp_in_ms);
+ RtpToNtpMs(timestamp, ntp_rtp_pairs_, &timestamp_in_ms);
return timestamp_in_ms;
}
return -1;
}
private:
- void StoreNtpRtpPair(synchronization::RtcpMeasurement ntp_rtp_pair) {
- CriticalSectionScoped cs(critical_section_.get());
- for (synchronization::RtcpList::iterator it = ntp_rtp_pairs_.begin();
+ void StoreNtpRtpPair(RtcpMeasurement ntp_rtp_pair) {
+ CriticalSectionScoped lock(crit_.get());
+ for (RtcpList::iterator it = ntp_rtp_pairs_.begin();
it != ntp_rtp_pairs_.end();
++it) {
if (ntp_rtp_pair.ntp_secs == it->ntp_secs &&
@@ -105,8 +151,8 @@ class SyncRtcpObserver : public test::RtpRtcpObserver {
ntp_rtp_pairs_.push_front(ntp_rtp_pair);
}
- scoped_ptr<CriticalSectionWrapper> critical_section_;
- synchronization::RtcpList ntp_rtp_pairs_;
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ RtcpList ntp_rtp_pairs_ GUARDED_BY(crit_);
};
class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
@@ -119,7 +165,7 @@ class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
int voe_channel,
VoEVideoSync* voe_sync,
SyncRtcpObserver* audio_observer)
- : SyncRtcpObserver(0),
+ : SyncRtcpObserver(FakeNetworkPipe::Config()),
clock_(clock),
voe_channel_(voe_channel),
voe_sync_(voe_sync),
@@ -144,14 +190,18 @@ class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
int64_t stream_offset = latest_audio_ntp - latest_video_ntp;
std::stringstream ss;
ss << stream_offset;
- webrtc::test::PrintResult(
- "stream_offset", "", "synchronization", ss.str(), "ms", false);
+ webrtc::test::PrintResult("stream_offset",
+ "",
+ "synchronization",
+ ss.str(),
+ "ms",
+ false);
int64_t time_since_creation = now_ms - creation_time_ms_;
// During the first couple of seconds audio and video can falsely be
// estimated as being synchronized. We don't want to trigger on those.
if (time_since_creation < kStartupTimeMs)
return;
- if (abs(latest_audio_ntp - latest_video_ntp) < kInSyncThresholdMs) {
+ if (std::abs(latest_audio_ntp - latest_video_ntp) < kInSyncThresholdMs) {
if (first_time_in_sync_ == -1) {
first_time_in_sync_ = now_ms;
webrtc::test::PrintResult("sync_convergence_time",
@@ -167,7 +217,7 @@ class VideoRtcpAndSyncObserver : public SyncRtcpObserver, public VideoRenderer {
}
private:
- Clock* clock_;
+ Clock* const clock_;
int voe_channel_;
VoEVideoSync* voe_sync_;
SyncRtcpObserver* audio_observer_;
@@ -189,10 +239,13 @@ TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) {
EXPECT_EQ(0, voe_base->Init(&fake_audio_device, NULL));
int channel = voe_base->CreateChannel();
- const int kVoiceDelayMs = 500;
- SyncRtcpObserver audio_observer(kVoiceDelayMs);
- VideoRtcpAndSyncObserver observer(
- Clock::GetRealTimeClock(), channel, voe_sync, &audio_observer);
+ FakeNetworkPipe::Config net_config;
+ net_config.queue_delay_ms = 500;
+ SyncRtcpObserver audio_observer(net_config);
+ VideoRtcpAndSyncObserver observer(Clock::GetRealTimeClock(),
+ channel,
+ voe_sync,
+ &audio_observer);
Call::Config receiver_config(observer.ReceiveTransport());
receiver_config.voice_engine = voice_engine;
@@ -208,16 +261,17 @@ TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) {
: channel_(channel),
voe_network_(voe_network),
parser_(RtpHeaderParser::Create()) {}
- virtual bool DeliverPacket(const uint8_t* packet, size_t length) {
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
int ret;
if (parser_->IsRtcp(packet, static_cast<int>(length))) {
ret = voe_network_->ReceivedRTCPPacket(
channel_, packet, static_cast<unsigned int>(length));
} else {
ret = voe_network_->ReceivedRTPPacket(
- channel_, packet, static_cast<unsigned int>(length));
+ channel_, packet, static_cast<unsigned int>(length), PacketTime());
}
- return ret == 0;
+ return ret == 0 ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
}
private:
@@ -229,46 +283,44 @@ TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) {
audio_observer.SetReceivers(&voe_packet_receiver, &voe_packet_receiver);
internal::TransportAdapter transport_adapter(audio_observer.SendTransport());
+ transport_adapter.Enable();
EXPECT_EQ(0,
voe_network->RegisterExternalTransport(channel, transport_adapter));
observer.SetReceivers(receiver_call->Receiver(), sender_call->Receiver());
- test::FakeEncoder fake_encoder(Clock::GetRealTimeClock());
test::FakeDecoder fake_decoder;
- VideoSendStream::Config send_config = sender_call->GetDefaultSendConfig();
- send_config.rtp.ssrcs.push_back(kSendSsrc);
- send_config.encoder = &fake_encoder;
- send_config.internal_source = false;
- test::FakeEncoder::SetCodecSettings(&send_config.codec, 1);
- send_config.codec.plType = kSendPayloadType;
+ CreateTestConfig(sender_call.get());
VideoReceiveStream::Config receive_config =
receiver_call->GetDefaultReceiveConfig();
- receive_config.codecs.clear();
- receive_config.codecs.push_back(send_config.codec);
+ assert(receive_config.codecs.empty());
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config.codecs.push_back(codec);
+ assert(receive_config.external_decoders.empty());
ExternalVideoDecoder decoder;
decoder.decoder = &fake_decoder;
- decoder.payload_type = send_config.codec.plType;
+ decoder.payload_type = send_config_.encoder_settings.payload_type;
receive_config.external_decoders.push_back(decoder);
- receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0];
+ receive_config.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config.renderer = &observer;
receive_config.audio_channel_id = channel;
VideoSendStream* send_stream =
- sender_call->CreateVideoSendStream(send_config);
+ sender_call->CreateVideoSendStream(send_config_, video_streams_, NULL);
VideoReceiveStream* receive_stream =
receiver_call->CreateVideoReceiveStream(receive_config);
scoped_ptr<test::FrameGeneratorCapturer> capturer(
test::FrameGeneratorCapturer::Create(send_stream->Input(),
- send_config.codec.width,
- send_config.codec.height,
+ video_streams_[0].width,
+ video_streams_[0].height,
30,
Clock::GetRealTimeClock()));
- receive_stream->StartReceiving();
- send_stream->StartSending();
+ receive_stream->Start();
+ send_stream->Start();
capturer->Start();
fake_audio_device.Start();
@@ -285,8 +337,8 @@ TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) {
fake_audio_device.Stop();
capturer->Stop();
- send_stream->StopSending();
- receive_stream->StopReceiving();
+ send_stream->Stop();
+ receive_stream->Stop();
observer.StopSending();
audio_observer.StopSending();
@@ -299,4 +351,347 @@ TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSync) {
receiver_call->DestroyVideoReceiveStream(receive_stream);
VoiceEngine::Delete(voice_engine);
}
+
+class CaptureNtpTimeObserver : public test::RtpRtcpObserver,
+ public VideoRenderer {
+ public:
+ CaptureNtpTimeObserver(Clock* clock,
+ const FakeNetworkPipe::Config& config,
+ int threshold_ms,
+ int start_time_ms,
+ int run_time_ms)
+ : RtpRtcpObserver(kLongTimeoutMs, config),
+ clock_(clock),
+ threshold_ms_(threshold_ms),
+ start_time_ms_(start_time_ms),
+ run_time_ms_(run_time_ms),
+ creation_time_ms_(clock_->TimeInMilliseconds()),
+ capturer_(NULL),
+ rtp_start_timestamp_set_(false),
+ rtp_start_timestamp_(0) {}
+
+ virtual void RenderFrame(const I420VideoFrame& video_frame,
+ int time_to_render_ms) OVERRIDE {
+ if (video_frame.ntp_time_ms() <= 0) {
+ // Haven't got enough RTCP SR in order to calculate the capture ntp time.
+ return;
+ }
+
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ int64_t time_since_creation = now_ms - creation_time_ms_;
+ if (time_since_creation < start_time_ms_) {
+ // Wait for |start_time_ms_| before start measuring.
+ return;
+ }
+
+ if (time_since_creation > run_time_ms_) {
+ observation_complete_->Set();
+ }
+
+ FrameCaptureTimeList::iterator iter =
+ capture_time_list_.find(video_frame.timestamp());
+ EXPECT_TRUE(iter != capture_time_list_.end());
+
+ // The real capture time has been wrapped to uint32_t before converted
+ // to rtp timestamp in the sender side. So here we convert the estimated
+ // capture time to a uint32_t 90k timestamp also for comparing.
+ uint32_t estimated_capture_timestamp =
+ 90 * static_cast<uint32_t>(video_frame.ntp_time_ms());
+ uint32_t real_capture_timestamp = iter->second;
+ int time_offset_ms = real_capture_timestamp - estimated_capture_timestamp;
+ time_offset_ms = time_offset_ms / 90;
+ std::stringstream ss;
+ ss << time_offset_ms;
+
+ webrtc::test::PrintResult("capture_ntp_time",
+ "",
+ "real - estimated",
+ ss.str(),
+ "ms",
+ true);
+ EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_);
+ }
+
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+
+ if (!rtp_start_timestamp_set_) {
+ // Calculate the rtp timestamp offset in order to calculate the real
+ // capture time.
+ uint32_t first_capture_timestamp =
+ 90 * static_cast<uint32_t>(capturer_->first_frame_capture_time());
+ rtp_start_timestamp_ = header.timestamp - first_capture_timestamp;
+ rtp_start_timestamp_set_ = true;
+ }
+
+ uint32_t capture_timestamp = header.timestamp - rtp_start_timestamp_;
+ capture_time_list_.insert(capture_time_list_.end(),
+ std::make_pair(header.timestamp,
+ capture_timestamp));
+ return SEND_PACKET;
+ }
+
+ void SetCapturer(test::FrameGeneratorCapturer* capturer) {
+ capturer_ = capturer;
+ }
+
+ private:
+ Clock* clock_;
+ int threshold_ms_;
+ int start_time_ms_;
+ int run_time_ms_;
+ int64_t creation_time_ms_;
+ test::FrameGeneratorCapturer* capturer_;
+ bool rtp_start_timestamp_set_;
+ uint32_t rtp_start_timestamp_;
+ typedef std::map<uint32_t, uint32_t> FrameCaptureTimeList;
+ FrameCaptureTimeList capture_time_list_;
+};
+
+void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
+ int threshold_ms,
+ int start_time_ms,
+ int run_time_ms) {
+ CaptureNtpTimeObserver observer(Clock::GetRealTimeClock(),
+ net_config,
+ threshold_ms,
+ start_time_ms,
+ run_time_ms);
+
+ // Sender/receiver call.
+ Call::Config receiver_config(observer.ReceiveTransport());
+ scoped_ptr<Call> receiver_call(Call::Create(receiver_config));
+ scoped_ptr<Call> sender_call(
+ Call::Create(Call::Config(observer.SendTransport())));
+ observer.SetReceivers(receiver_call->Receiver(), sender_call->Receiver());
+
+ // Configure send stream.
+ CreateTestConfig(sender_call.get());
+ VideoSendStream* send_stream =
+ sender_call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ scoped_ptr<test::FrameGeneratorCapturer> capturer(
+ test::FrameGeneratorCapturer::Create(send_stream->Input(),
+ video_streams_[0].width,
+ video_streams_[0].height,
+ 30,
+ Clock::GetRealTimeClock()));
+ observer.SetCapturer(capturer.get());
+
+ // Configure receive stream.
+ VideoReceiveStream::Config receive_config =
+ receiver_call->GetDefaultReceiveConfig();
+ assert(receive_config.codecs.empty());
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config.codecs.push_back(codec);
+ assert(receive_config.external_decoders.empty());
+ ExternalVideoDecoder decoder;
+ test::FakeDecoder fake_decoder;
+ decoder.decoder = &fake_decoder;
+ decoder.payload_type = send_config_.encoder_settings.payload_type;
+ receive_config.external_decoders.push_back(decoder);
+ receive_config.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
+ receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
+ receive_config.renderer = &observer;
+ // Enable the receiver side rtt calculation.
+ receive_config.rtp.rtcp_xr.receiver_reference_time_report = true;
+ VideoReceiveStream* receive_stream =
+ receiver_call->CreateVideoReceiveStream(receive_config);
+
+ // Start the test
+ receive_stream->Start();
+ send_stream->Start();
+ capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out while waiting for estimated capture ntp time to be "
+ << "within bounds.";
+
+ capturer->Stop();
+ send_stream->Stop();
+ receive_stream->Stop();
+ observer.StopSending();
+
+ sender_call->DestroyVideoSendStream(send_stream);
+ receiver_call->DestroyVideoReceiveStream(receive_stream);
+}
+
+TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) {
+ FakeNetworkPipe::Config net_config;
+ net_config.queue_delay_ms = 100;
+ // TODO(wu): lower the threshold as the calculation/estimatation becomes more
+ // accurate.
+ const int kThresholdMs = 100;
+ const int kStartTimeMs = 10000;
+ const int kRunTimeMs = 20000;
+ TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs);
+}
+
+TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkJitter) {
+ FakeNetworkPipe::Config net_config;
+ net_config.queue_delay_ms = 100;
+ net_config.delay_standard_deviation_ms = 10;
+ // TODO(wu): lower the threshold as the calculation/estimatation becomes more
+ // accurate.
+ const int kThresholdMs = 100;
+ const int kStartTimeMs = 10000;
+ const int kRunTimeMs = 20000;
+ TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs);
+}
+
+TEST_F(CallPerfTest, RegisterCpuOveruseObserver) {
+ // Verifies that either a normal or overuse callback is triggered.
+ class OveruseCallbackObserver : public test::RtpRtcpObserver,
+ public webrtc::OveruseCallback {
+ public:
+ OveruseCallbackObserver() : RtpRtcpObserver(kLongTimeoutMs) {}
+
+ virtual void OnOveruse() OVERRIDE {
+ observation_complete_->Set();
+ }
+ virtual void OnNormalUse() OVERRIDE {
+ observation_complete_->Set();
+ }
+ };
+
+ OveruseCallbackObserver observer;
+ Call::Config call_config(observer.SendTransport());
+ call_config.overuse_callback = &observer;
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ CreateTestConfig(call.get());
+ RunVideoSendTest(call.get(), send_config_, &observer);
+}
+
+void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
+ static const int kMaxEncodeBitrateKbps = 30;
+ static const int kMinTransmitBitrateBps = 150000;
+ static const int kMinAcceptableTransmitBitrate = 130;
+ static const int kMaxAcceptableTransmitBitrate = 170;
+ static const int kNumBitrateObservationsInRange = 100;
+ class BitrateObserver : public test::RtpRtcpObserver, public PacketReceiver {
+ public:
+ explicit BitrateObserver(bool using_min_transmit_bitrate)
+ : test::RtpRtcpObserver(kLongTimeoutMs),
+ send_stream_(NULL),
+ send_transport_receiver_(NULL),
+ using_min_transmit_bitrate_(using_min_transmit_bitrate),
+ num_bitrate_observations_in_range_(0) {}
+
+ virtual void SetReceivers(PacketReceiver* send_transport_receiver,
+ PacketReceiver* receive_transport_receiver)
+ OVERRIDE {
+ send_transport_receiver_ = send_transport_receiver;
+ test::RtpRtcpObserver::SetReceivers(this, receive_transport_receiver);
+ }
+
+ void SetSendStream(VideoSendStream* send_stream) {
+ send_stream_ = send_stream;
+ }
+
+ private:
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+ if (stats.substreams.size() > 0) {
+ assert(stats.substreams.size() == 1);
+ int bitrate_kbps = stats.substreams.begin()->second.bitrate_bps / 1000;
+ if (bitrate_kbps > 0) {
+ test::PrintResult(
+ "bitrate_stats_",
+ (using_min_transmit_bitrate_ ? "min_transmit_bitrate"
+ : "without_min_transmit_bitrate"),
+ "bitrate_kbps",
+ static_cast<size_t>(bitrate_kbps),
+ "kbps",
+ false);
+ if (using_min_transmit_bitrate_) {
+ if (bitrate_kbps > kMinAcceptableTransmitBitrate &&
+ bitrate_kbps < kMaxAcceptableTransmitBitrate) {
+ ++num_bitrate_observations_in_range_;
+ }
+ } else {
+ // Expect bitrate stats to roughly match the max encode bitrate.
+ if (bitrate_kbps > kMaxEncodeBitrateKbps - 5 &&
+ bitrate_kbps < kMaxEncodeBitrateKbps + 5) {
+ ++num_bitrate_observations_in_range_;
+ }
+ }
+ if (num_bitrate_observations_in_range_ ==
+ kNumBitrateObservationsInRange)
+ observation_complete_->Set();
+ }
+ }
+ return send_transport_receiver_->DeliverPacket(packet, length);
+ }
+
+ VideoSendStream* send_stream_;
+ PacketReceiver* send_transport_receiver_;
+ const bool using_min_transmit_bitrate_;
+ int num_bitrate_observations_in_range_;
+ } observer(pad_to_min_bitrate);
+
+ scoped_ptr<Call> sender_call(
+ Call::Create(Call::Config(observer.SendTransport())));
+ scoped_ptr<Call> receiver_call(
+ Call::Create(Call::Config(observer.ReceiveTransport())));
+
+ CreateTestConfig(sender_call.get());
+ fake_encoder_.SetMaxBitrate(kMaxEncodeBitrateKbps);
+
+ observer.SetReceivers(receiver_call->Receiver(), sender_call->Receiver());
+
+ if (pad_to_min_bitrate) {
+ send_config_.rtp.min_transmit_bitrate_bps = kMinTransmitBitrateBps;
+ } else {
+ assert(send_config_.rtp.min_transmit_bitrate_bps == 0);
+ }
+
+ VideoReceiveStream::Config receive_config =
+ receiver_call->GetDefaultReceiveConfig();
+ receive_config.codecs.clear();
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config.codecs.push_back(codec);
+ test::FakeDecoder fake_decoder;
+ ExternalVideoDecoder decoder;
+ decoder.decoder = &fake_decoder;
+ decoder.payload_type = send_config_.encoder_settings.payload_type;
+ receive_config.external_decoders.push_back(decoder);
+ receive_config.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
+ receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
+
+ VideoSendStream* send_stream =
+ sender_call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ VideoReceiveStream* receive_stream =
+ receiver_call->CreateVideoReceiveStream(receive_config);
+ scoped_ptr<test::FrameGeneratorCapturer> capturer(
+ test::FrameGeneratorCapturer::Create(send_stream->Input(),
+ video_streams_[0].width,
+ video_streams_[0].height,
+ 30,
+ Clock::GetRealTimeClock()));
+ observer.SetSendStream(send_stream);
+ receive_stream->Start();
+ send_stream->Start();
+ capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timeout while waiting for send-bitrate stats.";
+
+ send_stream->Stop();
+ receive_stream->Stop();
+ observer.StopSending();
+ capturer->Stop();
+ sender_call->DestroyVideoSendStream(send_stream);
+ receiver_call->DestroyVideoReceiveStream(receive_stream);
+}
+
+TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); }
+
+TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) {
+ TestMinTransmitBitrate(false);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/call_tests.cc b/chromium/third_party/webrtc/video/call_tests.cc
index 1b0c8748798..5bcf1157642 100644
--- a/chromium/third_party/webrtc/video/call_tests.cc
+++ b/chromium/third_party/webrtc/video/call_tests.cc
@@ -18,17 +18,20 @@
#include "webrtc/call.h"
#include "webrtc/frame_callback.h"
-#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/frame_generator_capturer.h"
+#include "webrtc/test/null_transport.h"
#include "webrtc/test/rtp_rtcp_observer.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/perf_test.h"
@@ -39,8 +42,12 @@ namespace webrtc {
static unsigned int kDefaultTimeoutMs = 30 * 1000;
static unsigned int kLongTimeoutMs = 120 * 1000;
static const uint32_t kSendSsrc = 0x654321;
+static const uint32_t kSendRtxSsrc = 0x424242;
static const uint32_t kReceiverLocalSsrc = 0x123456;
static const uint8_t kSendPayloadType = 125;
+static const uint8_t kSendRtxPayloadType = 126;
+static const int kRedPayloadType = 118;
+static const int kUlpfecPayloadType = 119;
class CallTest : public ::testing::Test {
public:
@@ -66,16 +73,18 @@ class CallTest : public ::testing::Test {
receive_config_ = receiver_call_->GetDefaultReceiveConfig();
send_config_.rtp.ssrcs.push_back(kSendSsrc);
- send_config_.encoder = &fake_encoder_;
- send_config_.internal_source = false;
- test::FakeEncoder::SetCodecSettings(&send_config_.codec, 1);
- send_config_.codec.plType = kSendPayloadType;
-
- receive_config_.codecs.clear();
- receive_config_.codecs.push_back(send_config_.codec);
+ send_config_.encoder_settings.encoder = &fake_encoder_;
+ send_config_.encoder_settings.payload_name = "FAKE";
+ send_config_.encoder_settings.payload_type = kSendPayloadType;
+ video_streams_ = test::CreateVideoStreams(1);
+
+ assert(receive_config_.codecs.empty());
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config_.codecs.push_back(codec);
ExternalVideoDecoder decoder;
decoder.decoder = &fake_decoder_;
- decoder.payload_type = send_config_.codec.plType;
+ decoder.payload_type = send_config_.encoder_settings.payload_type;
receive_config_.external_decoders.push_back(decoder);
receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
receive_config_.rtp.local_ssrc = kReceiverLocalSsrc;
@@ -85,22 +94,23 @@ class CallTest : public ::testing::Test {
assert(send_stream_ == NULL);
assert(receive_stream_ == NULL);
- send_stream_ = sender_call_->CreateVideoSendStream(send_config_);
+ send_stream_ =
+ sender_call_->CreateVideoSendStream(send_config_, video_streams_, NULL);
receive_stream_ = receiver_call_->CreateVideoReceiveStream(receive_config_);
}
void CreateFrameGenerator() {
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::Create(send_stream_->Input(),
- send_config_.codec.width,
- send_config_.codec.height,
+ video_streams_[0].width,
+ video_streams_[0].height,
30,
Clock::GetRealTimeClock()));
}
void StartSending() {
- receive_stream_->StartReceiving();
- send_stream_->StartSending();
+ receive_stream_->Start();
+ send_stream_->Start();
if (frame_generator_capturer_.get() != NULL)
frame_generator_capturer_->Start();
}
@@ -109,9 +119,9 @@ class CallTest : public ::testing::Test {
if (frame_generator_capturer_.get() != NULL)
frame_generator_capturer_->Stop();
if (send_stream_ != NULL)
- send_stream_->StopSending();
+ send_stream_->Stop();
if (receive_stream_ != NULL)
- receive_stream_->StopReceiving();
+ receive_stream_->Stop();
}
void DestroyStreams() {
@@ -123,13 +133,16 @@ class CallTest : public ::testing::Test {
receive_stream_ = NULL;
}
+ void DecodesRetransmittedFrame(bool retransmit_over_rtx);
void ReceivesPliAndRecovers(int rtp_history_ms);
void RespectsRtcpMode(newapi::RtcpMode rtcp_mode);
+ void TestXrReceiverReferenceTimeReport(bool enable_rrtr);
scoped_ptr<Call> sender_call_;
scoped_ptr<Call> receiver_call_;
VideoSendStream::Config send_config_;
+ std::vector<VideoStream> video_streams_;
VideoReceiveStream::Config receive_config_;
VideoSendStream* send_stream_;
@@ -142,22 +155,20 @@ class CallTest : public ::testing::Test {
};
class NackObserver : public test::RtpRtcpObserver {
- static const int kNumberOfNacksToObserve = 4;
- static const int kInverseProbabilityToStartLossBurst = 20;
- static const int kMaxLossBurst = 10;
+ static const int kNumberOfNacksToObserve = 2;
+ static const int kLossBurstSize = 2;
+ static const int kPacketsBetweenLossBursts = 9;
public:
NackObserver()
: test::RtpRtcpObserver(kLongTimeoutMs),
rtp_parser_(RtpHeaderParser::Create()),
- drop_burst_count_(0),
sent_rtp_packets_(0),
+ packets_left_to_drop_(0),
nacks_left_(kNumberOfNacksToObserve) {}
private:
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- EXPECT_FALSE(RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)));
-
RTPHeader header;
EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
@@ -165,31 +176,29 @@ class NackObserver : public test::RtpRtcpObserver {
if (dropped_packets_.find(header.sequenceNumber) !=
dropped_packets_.end()) {
retransmitted_packets_.insert(header.sequenceNumber);
+ if (nacks_left_ == 0 &&
+ retransmitted_packets_.size() == dropped_packets_.size()) {
+ observation_complete_->Set();
+ }
return SEND_PACKET;
}
+ ++sent_rtp_packets_;
+
// Enough NACKs received, stop dropping packets.
- if (nacks_left_ == 0) {
- ++sent_rtp_packets_;
+ if (nacks_left_ == 0)
return SEND_PACKET;
- }
- // Still dropping packets.
- if (drop_burst_count_ > 0) {
- --drop_burst_count_;
- dropped_packets_.insert(header.sequenceNumber);
- return DROP_PACKET;
- }
+ // Check if it's time for a new loss burst.
+ if (sent_rtp_packets_ % kPacketsBetweenLossBursts == 0)
+ packets_left_to_drop_ = kLossBurstSize;
- // Should we start dropping packets?
- if (sent_rtp_packets_ > 0 &&
- rand() % kInverseProbabilityToStartLossBurst == 0) {
- drop_burst_count_ = rand() % kMaxLossBurst;
+ if (packets_left_to_drop_ > 0) {
+ --packets_left_to_drop_;
dropped_packets_.insert(header.sequenceNumber);
return DROP_PACKET;
}
- ++sent_rtp_packets_;
return SEND_PACKET;
}
@@ -197,105 +206,122 @@ class NackObserver : public test::RtpRtcpObserver {
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
- bool received_nack = false;
RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
while (packet_type != RTCPUtility::kRtcpNotValidCode) {
- if (packet_type == RTCPUtility::kRtcpRtpfbNackCode)
- received_nack = true;
-
+ if (packet_type == RTCPUtility::kRtcpRtpfbNackCode) {
+ --nacks_left_;
+ break;
+ }
packet_type = parser.Iterate();
}
-
- if (received_nack) {
- ReceivedNack();
- } else {
- RtcpWithoutNack();
- }
return SEND_PACKET;
}
private:
- void ReceivedNack() {
- if (nacks_left_ > 0)
- --nacks_left_;
- rtcp_without_nack_count_ = 0;
- }
-
- void RtcpWithoutNack() {
- if (nacks_left_ > 0)
- return;
- ++rtcp_without_nack_count_;
-
- // All packets retransmitted and no recent NACKs.
- if (dropped_packets_.size() == retransmitted_packets_.size() &&
- rtcp_without_nack_count_ >= kRequiredRtcpsWithoutNack) {
- observation_complete_->Set();
- }
- }
-
scoped_ptr<RtpHeaderParser> rtp_parser_;
std::set<uint16_t> dropped_packets_;
std::set<uint16_t> retransmitted_packets_;
- int drop_burst_count_;
uint64_t sent_rtp_packets_;
+ int packets_left_to_drop_;
int nacks_left_;
- int rtcp_without_nack_count_;
- static const int kRequiredRtcpsWithoutNack = 2;
};
-TEST_F(CallTest, UsesTraceCallback) {
- const unsigned int kSenderTraceFilter = kTraceDebug;
- const unsigned int kReceiverTraceFilter = kTraceDefault & (~kTraceDebug);
- class TraceObserver : public TraceCallback {
+TEST_F(CallTest, ReceiverCanBeStartedTwice) {
+ test::NullTransport transport;
+ CreateCalls(Call::Config(&transport), Call::Config(&transport));
+
+ CreateTestConfigs();
+ CreateStreams();
+
+ receive_stream_->Start();
+ receive_stream_->Start();
+
+ DestroyStreams();
+}
+
+TEST_F(CallTest, ReceiverCanBeStoppedTwice) {
+ test::NullTransport transport;
+ CreateCalls(Call::Config(&transport), Call::Config(&transport));
+
+ CreateTestConfigs();
+ CreateStreams();
+
+ receive_stream_->Stop();
+ receive_stream_->Stop();
+
+ DestroyStreams();
+}
+
+TEST_F(CallTest, RendersSingleDelayedFrame) {
+ static const int kWidth = 320;
+ static const int kHeight = 240;
+ // This constant is chosen to be higher than the timeout in the video_render
+ // module. This makes sure that frames aren't dropped if there are no other
+ // frames in the queue.
+ static const int kDelayRenderCallbackMs = 1000;
+
+ class Renderer : public VideoRenderer {
public:
- explicit TraceObserver(unsigned int filter)
- : filter_(filter), messages_left_(50), done_(EventWrapper::Create()) {}
+ Renderer() : event_(EventWrapper::Create()) {}
- virtual void Print(TraceLevel level,
- const char* message,
- int length) OVERRIDE {
- EXPECT_EQ(0u, level & (~filter_));
- if (--messages_left_ == 0)
- done_->Set();
+ virtual void RenderFrame(const I420VideoFrame& video_frame,
+ int /*time_to_render_ms*/) OVERRIDE {
+ event_->Set();
}
- EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); }
+ EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
+
+ scoped_ptr<EventWrapper> event_;
+ } renderer;
+
+ class TestFrameCallback : public I420FrameCallback {
+ public:
+ TestFrameCallback() : event_(EventWrapper::Create()) {}
+
+ EventTypeWrapper Wait() { return event_->Wait(kDefaultTimeoutMs); }
private:
- unsigned int filter_;
- unsigned int messages_left_;
- scoped_ptr<EventWrapper> done_;
- } sender_trace(kSenderTraceFilter), receiver_trace(kReceiverTraceFilter);
+ virtual void FrameCallback(I420VideoFrame* frame) OVERRIDE {
+ SleepMs(kDelayRenderCallbackMs);
+ event_->Set();
+ }
- test::DirectTransport send_transport, receive_transport;
- Call::Config sender_call_config(&send_transport);
- sender_call_config.trace_callback = &sender_trace;
- sender_call_config.trace_filter = kSenderTraceFilter;
- Call::Config receiver_call_config(&receive_transport);
- receiver_call_config.trace_callback = &receiver_trace;
- receiver_call_config.trace_filter = kReceiverTraceFilter;
- CreateCalls(sender_call_config, receiver_call_config);
- send_transport.SetReceiver(receiver_call_->Receiver());
- receive_transport.SetReceiver(sender_call_->Receiver());
+ scoped_ptr<EventWrapper> event_;
+ };
+
+ test::DirectTransport sender_transport, receiver_transport;
+
+ CreateCalls(Call::Config(&sender_transport),
+ Call::Config(&receiver_transport));
+
+ sender_transport.SetReceiver(receiver_call_->Receiver());
+ receiver_transport.SetReceiver(sender_call_->Receiver());
CreateTestConfigs();
+ TestFrameCallback pre_render_callback;
+ receive_config_.pre_render_callback = &pre_render_callback;
+ receive_config_.renderer = &renderer;
+
CreateStreams();
- CreateFrameGenerator();
StartSending();
- // Wait() waits for a couple of trace callbacks to occur.
- EXPECT_EQ(kEventSignaled, sender_trace.Wait());
- EXPECT_EQ(kEventSignaled, receiver_trace.Wait());
+ // Create frames that are smaller than the send width/height, this is done to
+ // check that the callbacks are done after processing video.
+ scoped_ptr<test::FrameGenerator> frame_generator(
+ test::FrameGenerator::Create(kWidth, kHeight));
+ send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
+ EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
+ << "Timed out while waiting for pre-render callback.";
+ EXPECT_EQ(kEventSignaled, renderer.Wait())
+ << "Timed out while waiting for the frame to render.";
StopSending();
- send_transport.StopSending();
- receive_transport.StopSending();
- DestroyStreams();
- // The TraceCallback instance MUST outlive Calls, destroy Calls explicitly.
- sender_call_.reset();
- receiver_call_.reset();
+ sender_transport.StopSending();
+ receiver_transport.StopSending();
+
+ DestroyStreams();
}
TEST_F(CallTest, TransmitsFirstFrame) {
@@ -328,7 +354,7 @@ TEST_F(CallTest, TransmitsFirstFrame) {
StartSending();
scoped_ptr<test::FrameGenerator> frame_generator(test::FrameGenerator::Create(
- send_config_.codec.width, send_config_.codec.height));
+ video_streams_[0].width, video_streams_[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, renderer.Wait())
@@ -412,6 +438,204 @@ TEST_F(CallTest, ReceivesAndRetransmitsNack) {
DestroyStreams();
}
+// TODO(pbos): Flaky, webrtc:3269
+TEST_F(CallTest, DISABLED_CanReceiveFec) {
+ class FecRenderObserver : public test::RtpRtcpObserver, public VideoRenderer {
+ public:
+ FecRenderObserver()
+ : RtpRtcpObserver(kDefaultTimeoutMs),
+ state_(kFirstPacket),
+ protected_sequence_number_(0),
+ protected_frame_timestamp_(0) {}
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE
+ EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+
+ EXPECT_EQ(kRedPayloadType, header.payloadType);
+ int encapsulated_payload_type =
+ static_cast<int>(packet[header.headerLength]);
+ if (encapsulated_payload_type != kSendPayloadType)
+ EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
+
+ switch(state_) {
+ case kFirstPacket:
+ state_ = kDropEveryOtherPacketUntilFec;
+ break;
+ case kDropEveryOtherPacketUntilFec:
+ if (encapsulated_payload_type == kUlpfecPayloadType) {
+ state_ = kDropNextMediaPacket;
+ return SEND_PACKET;
+ }
+ if (header.sequenceNumber % 2 == 0)
+ return DROP_PACKET;
+ break;
+ case kDropNextMediaPacket:
+ if (encapsulated_payload_type == kSendPayloadType) {
+ protected_sequence_number_ = header.sequenceNumber;
+ protected_frame_timestamp_ = header.timestamp;
+ state_ = kProtectedPacketDropped;
+ return DROP_PACKET;
+ }
+ break;
+ case kProtectedPacketDropped:
+ EXPECT_NE(header.sequenceNumber, protected_sequence_number_)
+ << "Protected packet retransmitted. Should not happen with FEC.";
+ break;
+ }
+
+ return SEND_PACKET;
+ }
+
+ virtual void RenderFrame(const I420VideoFrame& video_frame,
+ int time_to_render_ms) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ // Rendering frame with timestamp associated with dropped packet -> FEC
+ // protection worked.
+ if (state_ == kProtectedPacketDropped &&
+ video_frame.timestamp() == protected_frame_timestamp_) {
+ observation_complete_->Set();
+ }
+ }
+
+ enum {
+ kFirstPacket,
+ kDropEveryOtherPacketUntilFec,
+ kDropNextMediaPacket,
+ kProtectedPacketDropped,
+ } state_;
+
+ uint32_t protected_sequence_number_ GUARDED_BY(crit_);
+ uint32_t protected_frame_timestamp_ GUARDED_BY(crit_);
+ } observer;
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+
+ observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver());
+
+ CreateTestConfigs();
+ // TODO(pbos): Run this test with combined NACK/FEC enabled as well.
+ // int rtp_history_ms = 1000;
+ // receive_config_.rtp.nack.rtp_history_ms = rtp_history_ms;
+ // send_config_.rtp.nack.rtp_history_ms = rtp_history_ms;
+ send_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+
+ receive_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ receive_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ receive_config_.renderer = &observer;
+
+ CreateStreams();
+ CreateFrameGenerator();
+ StartSending();
+
+ // Wait() waits for an event triggered when NACKs have been received, NACKed
+ // packets retransmitted and frames rendered again.
+ EXPECT_EQ(kEventSignaled, observer.Wait());
+
+ StopSending();
+
+ observer.StopSending();
+
+ DestroyStreams();
+}
+
+// This test drops second RTP packet with a marker bit set, makes sure it's
+// retransmitted and renders. Retransmission SSRCs are also checked.
+void CallTest::DecodesRetransmittedFrame(bool retransmit_over_rtx) {
+ static const int kDroppedFrameNumber = 2;
+ class RetransmissionObserver : public test::RtpRtcpObserver,
+ public I420FrameCallback {
+ public:
+ RetransmissionObserver(bool expect_rtx)
+ : RtpRtcpObserver(kDefaultTimeoutMs),
+ retransmission_ssrc_(expect_rtx ? kSendRtxSsrc : kSendSsrc),
+ retransmission_payload_type_(expect_rtx ? kSendRtxPayloadType
+ : kSendPayloadType),
+ marker_bits_observed_(0),
+ retransmitted_timestamp_(0),
+ frame_retransmitted_(false) {}
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ RTPHeader header;
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
+
+ if (header.timestamp == retransmitted_timestamp_) {
+ EXPECT_EQ(retransmission_ssrc_, header.ssrc);
+ EXPECT_EQ(retransmission_payload_type_, header.payloadType);
+ frame_retransmitted_ = true;
+ return SEND_PACKET;
+ }
+
+ EXPECT_EQ(kSendSsrc, header.ssrc);
+ EXPECT_EQ(kSendPayloadType, header.payloadType);
+
+ // Found the second frame's final packet, drop this and expect a
+ // retransmission.
+ if (header.markerBit && ++marker_bits_observed_ == kDroppedFrameNumber) {
+ retransmitted_timestamp_ = header.timestamp;
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ virtual void FrameCallback(I420VideoFrame* frame) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ if (frame->timestamp() == retransmitted_timestamp_) {
+ EXPECT_TRUE(frame_retransmitted_);
+ observation_complete_->Set();
+ }
+ }
+
+ const uint32_t retransmission_ssrc_;
+ const int retransmission_payload_type_;
+ int marker_bits_observed_;
+ uint32_t retransmitted_timestamp_;
+ bool frame_retransmitted_;
+ } observer(retransmit_over_rtx);
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+
+ observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver());
+
+ CreateTestConfigs();
+ send_config_.rtp.nack.rtp_history_ms =
+ receive_config_.rtp.nack.rtp_history_ms = 1000;
+ if (retransmit_over_rtx) {
+ send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrc);
+ send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
+ int payload_type = send_config_.encoder_settings.payload_type;
+ receive_config_.rtp.rtx[payload_type].ssrc = kSendRtxSsrc;
+ receive_config_.rtp.rtx[payload_type].payload_type = kSendRtxPayloadType;
+ }
+ receive_config_.pre_render_callback = &observer;
+
+ CreateStreams();
+ CreateFrameGenerator();
+ StartSending();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out while waiting for retransmission to render.";
+
+ StopSending();
+ observer.StopSending();
+ DestroyStreams();
+}
+
+TEST_F(CallTest, DecodesRetransmittedFrame) {
+ DecodesRetransmittedFrame(false);
+}
+
+TEST_F(CallTest, DecodesRetransmittedFrameOverRtx) {
+ DecodesRetransmittedFrame(true);
+}
+
TEST_F(CallTest, UsesFrameCallbacks) {
static const int kWidth = 320;
static const int kHeight = 240;
@@ -477,11 +701,18 @@ TEST_F(CallTest, UsesFrameCallbacks) {
receiver_transport.SetReceiver(sender_call_->Receiver());
CreateTestConfigs();
- send_config_.encoder = NULL;
- send_config_.codec = sender_call_->GetVideoCodecs()[0];
- send_config_.codec.width = kWidth;
- send_config_.codec.height = kHeight;
+ scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ send_config_.encoder_settings.encoder = encoder.get();
+ send_config_.encoder_settings.payload_name = "VP8";
+ ASSERT_EQ(1u, video_streams_.size()) << "Test setup error.";
+ video_streams_[0].width = kWidth;
+ video_streams_[0].height = kHeight;
send_config_.pre_encode_callback = &pre_encode_callback;
+ receive_config_.codecs.clear();
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config_.encoder_settings);
+ receive_config_.external_decoders.clear();
+ receive_config_.codecs.push_back(codec);
receive_config_.pre_render_callback = &pre_render_callback;
receive_config_.renderer = &renderer;
@@ -515,31 +746,25 @@ class PliObserver : public test::RtpRtcpObserver, public VideoRenderer {
public:
explicit PliObserver(bool nack_enabled)
: test::RtpRtcpObserver(kLongTimeoutMs),
- rtp_header_parser_(RtpHeaderParser::Create()),
nack_enabled_(nack_enabled),
- first_retransmitted_timestamp_(0),
- last_send_timestamp_(0),
- rendered_frame_(false),
+ highest_dropped_timestamp_(0),
+ frames_to_drop_(0),
received_pli_(false) {}
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
- // Drop all NACK retransmissions. This is to force transmission of a PLI.
- if (header.timestamp < last_send_timestamp_)
+ // Drop all retransmitted packets to force a PLI.
+ if (header.timestamp <= highest_dropped_timestamp_)
return DROP_PACKET;
- if (received_pli_) {
- if (first_retransmitted_timestamp_ == 0) {
- first_retransmitted_timestamp_ = header.timestamp;
- }
- } else if (rendered_frame_ && rand() % kInverseDropProbability == 0) {
+ if (frames_to_drop_ > 0) {
+ highest_dropped_timestamp_ = header.timestamp;
+ --frames_to_drop_;
return DROP_PACKET;
}
- last_send_timestamp_ = header.timestamp;
return SEND_PACKET;
}
@@ -563,23 +788,20 @@ class PliObserver : public test::RtpRtcpObserver, public VideoRenderer {
virtual void RenderFrame(const I420VideoFrame& video_frame,
int time_to_render_ms) OVERRIDE {
- CriticalSectionScoped crit_(lock_.get());
- if (first_retransmitted_timestamp_ != 0 &&
- video_frame.timestamp() > first_retransmitted_timestamp_) {
- EXPECT_TRUE(received_pli_);
+ CriticalSectionScoped lock(crit_.get());
+ if (received_pli_ && video_frame.timestamp() > highest_dropped_timestamp_) {
observation_complete_->Set();
}
- rendered_frame_ = true;
+ if (!received_pli_)
+ frames_to_drop_ = kPacketsToDrop;
}
private:
- scoped_ptr<RtpHeaderParser> rtp_header_parser_;
- bool nack_enabled_;
-
- uint32_t first_retransmitted_timestamp_;
- uint32_t last_send_timestamp_;
+ static const int kPacketsToDrop = 1;
- bool rendered_frame_;
+ bool nack_enabled_;
+ uint32_t highest_dropped_timestamp_;
+ int frames_to_drop_;
bool received_pli_;
};
@@ -620,7 +842,7 @@ TEST_F(CallTest, DISABLED_ReceivesPliAndRecoversWithoutNack) {
ReceivesPliAndRecovers(0);
}
-TEST_F(CallTest, SurvivesIncomingRtpPacketsToDestroyedReceiveStream) {
+TEST_F(CallTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
class PacketInputObserver : public PacketReceiver {
public:
explicit PacketInputObserver(PacketReceiver* receiver)
@@ -631,13 +853,16 @@ TEST_F(CallTest, SurvivesIncomingRtpPacketsToDestroyedReceiveStream) {
}
private:
- virtual bool DeliverPacket(const uint8_t* packet, size_t length) {
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length))) {
return receiver_->DeliverPacket(packet, length);
} else {
- EXPECT_FALSE(receiver_->DeliverPacket(packet, length));
+ DeliveryStatus delivery_status =
+ receiver_->DeliverPacket(packet, length);
+ EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status);
delivered_packet_->Set();
- return false;
+ return delivery_status;
}
}
@@ -792,7 +1017,7 @@ TEST_F(CallTest, SendsAndReceivesMultipleStreams) {
done_->Set();
}
- void Wait() { done_->Wait(kDefaultTimeoutMs); }
+ EventTypeWrapper Wait() { return done_->Wait(kDefaultTimeoutMs); }
private:
test::FrameGeneratorCapturer** capturer_;
@@ -820,27 +1045,43 @@ TEST_F(CallTest, SendsAndReceivesMultipleStreams) {
VideoOutputObserver* observers[kNumStreams];
test::FrameGeneratorCapturer* frame_generators[kNumStreams];
+ scoped_ptr<VP8Encoder> encoders[kNumStreams];
+ for (size_t i = 0; i < kNumStreams; ++i)
+ encoders[i].reset(VP8Encoder::Create());
+
for (size_t i = 0; i < kNumStreams; ++i) {
uint32_t ssrc = codec_settings[i].ssrc;
int width = codec_settings[i].width;
int height = codec_settings[i].height;
observers[i] = new VideoOutputObserver(&frame_generators[i], width, height);
+ VideoSendStream::Config send_config = sender_call->GetDefaultSendConfig();
+ send_config.rtp.ssrcs.push_back(ssrc);
+ send_config.encoder_settings.encoder = encoders[i].get();
+ send_config.encoder_settings.payload_name = "VP8";
+ send_config.encoder_settings.payload_type = 124;
+ std::vector<VideoStream> video_streams = test::CreateVideoStreams(1);
+ VideoStream* stream = &video_streams[0];
+ stream->width = width;
+ stream->height = height;
+ stream->max_framerate = 5;
+ stream->min_bitrate_bps = stream->target_bitrate_bps =
+ stream->max_bitrate_bps = 100000;
+ send_streams[i] =
+ sender_call->CreateVideoSendStream(send_config, video_streams, NULL);
+ send_streams[i]->Start();
+
VideoReceiveStream::Config receive_config =
receiver_call->GetDefaultReceiveConfig();
receive_config.renderer = observers[i];
receive_config.rtp.remote_ssrc = ssrc;
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config.encoder_settings);
+ receive_config.codecs.push_back(codec);
receive_streams[i] =
receiver_call->CreateVideoReceiveStream(receive_config);
- receive_streams[i]->StartReceiving();
-
- VideoSendStream::Config send_config = sender_call->GetDefaultSendConfig();
- send_config.rtp.ssrcs.push_back(ssrc);
- send_config.codec.width = width;
- send_config.codec.height = height;
- send_streams[i] = sender_call->CreateVideoSendStream(send_config);
- send_streams[i]->StartSending();
+ receive_streams[i]->Start();
frame_generators[i] = test::FrameGeneratorCapturer::Create(
send_streams[i]->Input(), width, height, 30, Clock::GetRealTimeClock());
@@ -848,7 +1089,8 @@ TEST_F(CallTest, SendsAndReceivesMultipleStreams) {
}
for (size_t i = 0; i < kNumStreams; ++i) {
- observers[i]->Wait();
+ EXPECT_EQ(kEventSignaled, observers[i]->Wait())
+ << "Timed out while waiting for observer " << i << " to render.";
}
for (size_t i = 0; i < kNumStreams; ++i) {
@@ -866,9 +1108,10 @@ TEST_F(CallTest, SendsAndReceivesMultipleStreams) {
TEST_F(CallTest, ObserversEncodedFrames) {
class EncodedFrameTestObserver : public EncodedFrameObserver {
public:
- EncodedFrameTestObserver() : length_(0),
- frame_type_(kFrameEmpty),
- called_(EventWrapper::Create()) {}
+ EncodedFrameTestObserver()
+ : length_(0),
+ frame_type_(kFrameEmpty),
+ called_(EventWrapper::Create()) {}
virtual ~EncodedFrameTestObserver() {}
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) {
@@ -879,9 +1122,7 @@ TEST_F(CallTest, ObserversEncodedFrames) {
called_->Set();
}
- EventTypeWrapper Wait() {
- return called_->Wait(kDefaultTimeoutMs);
- }
+ EventTypeWrapper Wait() { return called_->Wait(kDefaultTimeoutMs); }
void ExpectEqualFrames(const EncodedFrameTestObserver& observer) {
ASSERT_EQ(length_, observer.length_)
@@ -918,7 +1159,7 @@ TEST_F(CallTest, ObserversEncodedFrames) {
StartSending();
scoped_ptr<test::FrameGenerator> frame_generator(test::FrameGenerator::Create(
- send_config_.codec.width, send_config_.codec.height));
+ video_streams_[0].width, video_streams_[0].height));
send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())
@@ -985,4 +1226,387 @@ TEST_F(CallTest, ReceiveStreamSendsRemb) {
observer.StopSending();
DestroyStreams();
}
+
+void CallTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
+ static const int kNumRtcpReportPacketsToObserve = 5;
+ class RtcpXrObserver : public test::RtpRtcpObserver {
+ public:
+ explicit RtcpXrObserver(bool enable_rrtr)
+ : test::RtpRtcpObserver(kDefaultTimeoutMs),
+ enable_rrtr_(enable_rrtr),
+ sent_rtcp_sr_(0),
+ sent_rtcp_rr_(0),
+ sent_rtcp_rrtr_(0),
+ sent_rtcp_dlrr_(0) {}
+
+ private:
+ // Receive stream should send RR packets (and RRTR packets if enabled).
+ virtual Action OnReceiveRtcp(const uint8_t* packet,
+ size_t length) OVERRIDE {
+ RTCPUtility::RTCPParserV2 parser(packet, length, true);
+ EXPECT_TRUE(parser.IsValid());
+
+ RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
+ while (packet_type != RTCPUtility::kRtcpNotValidCode) {
+ if (packet_type == RTCPUtility::kRtcpRrCode) {
+ ++sent_rtcp_rr_;
+ } else if (packet_type ==
+ RTCPUtility::kRtcpXrReceiverReferenceTimeCode) {
+ ++sent_rtcp_rrtr_;
+ }
+ EXPECT_NE(packet_type, RTCPUtility::kRtcpSrCode);
+ EXPECT_NE(packet_type, RTCPUtility::kRtcpXrDlrrReportBlockItemCode);
+ packet_type = parser.Iterate();
+ }
+ return SEND_PACKET;
+ }
+ // Send stream should send SR packets (and DLRR packets if enabled).
+ virtual Action OnSendRtcp(const uint8_t* packet, size_t length) {
+ RTCPUtility::RTCPParserV2 parser(packet, length, true);
+ EXPECT_TRUE(parser.IsValid());
+
+ RTCPUtility::RTCPPacketTypes packet_type = parser.Begin();
+ while (packet_type != RTCPUtility::kRtcpNotValidCode) {
+ if (packet_type == RTCPUtility::kRtcpSrCode) {
+ ++sent_rtcp_sr_;
+ } else if (packet_type == RTCPUtility::kRtcpXrDlrrReportBlockItemCode) {
+ ++sent_rtcp_dlrr_;
+ }
+ EXPECT_NE(packet_type, RTCPUtility::kRtcpXrReceiverReferenceTimeCode);
+ packet_type = parser.Iterate();
+ }
+ if (sent_rtcp_sr_ > kNumRtcpReportPacketsToObserve &&
+ sent_rtcp_rr_ > kNumRtcpReportPacketsToObserve) {
+ if (enable_rrtr_) {
+ EXPECT_GT(sent_rtcp_rrtr_, 0);
+ EXPECT_GT(sent_rtcp_dlrr_, 0);
+ } else {
+ EXPECT_EQ(0, sent_rtcp_rrtr_);
+ EXPECT_EQ(0, sent_rtcp_dlrr_);
+ }
+ observation_complete_->Set();
+ }
+ return SEND_PACKET;
+ }
+ bool enable_rrtr_;
+ int sent_rtcp_sr_;
+ int sent_rtcp_rr_;
+ int sent_rtcp_rrtr_;
+ int sent_rtcp_dlrr_;
+ } observer(enable_rrtr);
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+ observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver());
+
+ CreateTestConfigs();
+ receive_config_.rtp.rtcp_mode = newapi::kRtcpReducedSize;
+ receive_config_.rtp.rtcp_xr.receiver_reference_time_report = enable_rrtr;
+
+ CreateStreams();
+ CreateFrameGenerator();
+ StartSending();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out while waiting for RTCP SR/RR packets to be sent.";
+
+ StopSending();
+ observer.StopSending();
+ DestroyStreams();
+}
+
+class StatsObserver : public test::RtpRtcpObserver, public I420FrameCallback {
+ public:
+ StatsObserver()
+ : test::RtpRtcpObserver(kLongTimeoutMs),
+ receive_stream_(NULL),
+ send_stream_(NULL),
+ expected_receive_ssrc_(),
+ expected_send_ssrcs_(),
+ check_stats_event_(EventWrapper::Create()) {}
+
+ void SetExpectedReceiveSsrc(uint32_t ssrc) { expected_receive_ssrc_ = ssrc; }
+
+ void SetExpectedSendSsrcs(const std::vector<uint32_t>& ssrcs) {
+ for (std::vector<uint32_t>::const_iterator it = ssrcs.begin();
+ it != ssrcs.end();
+ ++it) {
+ expected_send_ssrcs_.insert(*it);
+ }
+ }
+
+ void SetExpectedCName(std::string cname) { expected_cname_ = cname; }
+
+ void SetReceiveStream(VideoReceiveStream* stream) {
+ receive_stream_ = stream;
+ }
+
+ void SetSendStream(VideoSendStream* stream) { send_stream_ = stream; }
+
+ void WaitForFilledStats() {
+ Clock* clock = Clock::GetRealTimeClock();
+ int64_t now = clock->TimeInMilliseconds();
+ int64_t stop_time = now + kLongTimeoutMs;
+ bool receive_ok = false;
+ bool send_ok = false;
+
+ while (now < stop_time) {
+ if (!receive_ok)
+ receive_ok = CheckReceiveStats();
+ if (!send_ok)
+ send_ok = CheckSendStats();
+
+ if (receive_ok && send_ok)
+ return;
+
+ int64_t time_until_timout_ = stop_time - now;
+ if (time_until_timout_ > 0)
+ check_stats_event_->Wait(time_until_timout_);
+ now = clock->TimeInMilliseconds();
+ }
+
+ ADD_FAILURE() << "Timed out waiting for filled stats.";
+ for (std::map<std::string, bool>::const_iterator it =
+ receive_stats_filled_.begin();
+ it != receive_stats_filled_.end();
+ ++it) {
+ if (!it->second) {
+ ADD_FAILURE() << "Missing receive stats: " << it->first;
+ }
+ }
+
+ for (std::map<std::string, bool>::const_iterator it =
+ send_stats_filled_.begin();
+ it != send_stats_filled_.end();
+ ++it) {
+ if (!it->second) {
+ ADD_FAILURE() << "Missing send stats: " << it->first;
+ }
+ }
+ }
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ check_stats_event_->Set();
+ return SEND_PACKET;
+ }
+
+ virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
+ check_stats_event_->Set();
+ return SEND_PACKET;
+ }
+
+ virtual Action OnReceiveRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ check_stats_event_->Set();
+ return SEND_PACKET;
+ }
+
+ virtual Action OnReceiveRtcp(const uint8_t* packet, size_t length) OVERRIDE {
+ check_stats_event_->Set();
+ return SEND_PACKET;
+ }
+
+ virtual void FrameCallback(I420VideoFrame* video_frame) OVERRIDE {
+ // Ensure that we have at least 5ms send side delay.
+ int64_t render_time = video_frame->render_time_ms();
+ if (render_time > 0)
+ video_frame->set_render_time_ms(render_time - 5);
+ }
+
+ bool CheckReceiveStats() {
+ assert(receive_stream_ != NULL);
+ VideoReceiveStream::Stats stats = receive_stream_->GetStats();
+ EXPECT_EQ(expected_receive_ssrc_, stats.ssrc);
+
+ // Make sure all fields have been populated.
+
+ receive_stats_filled_["IncomingRate"] |=
+ stats.network_frame_rate != 0 || stats.bitrate_bps != 0;
+
+ receive_stats_filled_["FrameCallback"] |= stats.decode_frame_rate != 0;
+
+ receive_stats_filled_["FrameRendered"] |= stats.render_frame_rate != 0;
+
+ receive_stats_filled_["StatisticsUpdated"] |=
+ stats.rtcp_stats.cumulative_lost != 0 ||
+ stats.rtcp_stats.extended_max_sequence_number != 0 ||
+ stats.rtcp_stats.fraction_lost != 0 || stats.rtcp_stats.jitter != 0;
+
+ receive_stats_filled_["DataCountersUpdated"] |=
+ stats.rtp_stats.bytes != 0 || stats.rtp_stats.fec_packets != 0 ||
+ stats.rtp_stats.header_bytes != 0 || stats.rtp_stats.packets != 0 ||
+ stats.rtp_stats.padding_bytes != 0 ||
+ stats.rtp_stats.retransmitted_packets != 0;
+
+ receive_stats_filled_["CodecStats"] |=
+ stats.avg_delay_ms != 0 || stats.discarded_packets != 0 ||
+ stats.key_frames != 0 || stats.delta_frames != 0;
+
+ receive_stats_filled_["CName"] |= stats.c_name == expected_cname_;
+
+ return AllStatsFilled(receive_stats_filled_);
+ }
+
+ bool CheckSendStats() {
+ assert(send_stream_ != NULL);
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+
+ send_stats_filled_["NumStreams"] |=
+ stats.substreams.size() == expected_send_ssrcs_.size();
+
+ send_stats_filled_["Delay"] |=
+ stats.avg_delay_ms != 0 || stats.max_delay_ms != 0;
+
+ receive_stats_filled_["CName"] |= stats.c_name == expected_cname_;
+
+ for (std::map<uint32_t, StreamStats>::const_iterator it =
+ stats.substreams.begin();
+ it != stats.substreams.end();
+ ++it) {
+ EXPECT_TRUE(expected_send_ssrcs_.find(it->first) !=
+ expected_send_ssrcs_.end());
+
+ send_stats_filled_[CompoundKey("IncomingRate", it->first)] |=
+ stats.input_frame_rate != 0;
+
+ const StreamStats& stream_stats = it->second;
+
+ send_stats_filled_[CompoundKey("StatisticsUpdated", it->first)] |=
+ stream_stats.rtcp_stats.cumulative_lost != 0 ||
+ stream_stats.rtcp_stats.extended_max_sequence_number != 0 ||
+ stream_stats.rtcp_stats.fraction_lost != 0;
+
+ send_stats_filled_[CompoundKey("DataCountersUpdated", it->first)] |=
+ stream_stats.rtp_stats.fec_packets != 0 ||
+ stream_stats.rtp_stats.padding_bytes != 0 ||
+ stream_stats.rtp_stats.retransmitted_packets != 0 ||
+ stream_stats.rtp_stats.packets != 0;
+
+ send_stats_filled_[CompoundKey("BitrateStatisticsObserver", it->first)] |=
+ stream_stats.bitrate_bps != 0;
+
+ send_stats_filled_[CompoundKey("FrameCountObserver", it->first)] |=
+ stream_stats.delta_frames != 0 || stream_stats.key_frames != 0;
+
+ send_stats_filled_[CompoundKey("OutgoingRate", it->first)] |=
+ stats.encode_frame_rate != 0;
+ }
+
+ return AllStatsFilled(send_stats_filled_);
+ }
+
+ std::string CompoundKey(const char* name, uint32_t ssrc) {
+ std::ostringstream oss;
+ oss << name << "_" << ssrc;
+ return oss.str();
+ }
+
+ bool AllStatsFilled(const std::map<std::string, bool>& stats_map) {
+ for (std::map<std::string, bool>::const_iterator it = stats_map.begin();
+ it != stats_map.end();
+ ++it) {
+ if (!it->second)
+ return false;
+ }
+ return true;
+ }
+
+ VideoReceiveStream* receive_stream_;
+ std::map<std::string, bool> receive_stats_filled_;
+
+ VideoSendStream* send_stream_;
+ std::map<std::string, bool> send_stats_filled_;
+
+ uint32_t expected_receive_ssrc_;
+ std::set<uint32_t> expected_send_ssrcs_;
+ std::string expected_cname_;
+
+ scoped_ptr<EventWrapper> check_stats_event_;
+};
+
+TEST_F(CallTest, GetStats) {
+ StatsObserver observer;
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+
+ observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver());
+
+ CreateTestConfigs();
+ send_config_.pre_encode_callback = &observer; // Used to inject delay.
+ send_config_.rtp.c_name = "SomeCName";
+
+ observer.SetExpectedReceiveSsrc(receive_config_.rtp.local_ssrc);
+ observer.SetExpectedSendSsrcs(send_config_.rtp.ssrcs);
+ observer.SetExpectedCName(send_config_.rtp.c_name);
+
+ CreateStreams();
+ observer.SetReceiveStream(receive_stream_);
+ observer.SetSendStream(send_stream_);
+ CreateFrameGenerator();
+ StartSending();
+
+ observer.WaitForFilledStats();
+
+ StopSending();
+ observer.StopSending();
+ DestroyStreams();
+}
+
+TEST_F(CallTest, ReceiverReferenceTimeReportEnabled) {
+ TestXrReceiverReferenceTimeReport(true);
+}
+
+TEST_F(CallTest, ReceiverReferenceTimeReportDisabled) {
+ TestXrReceiverReferenceTimeReport(false);
+}
+
+TEST_F(CallTest, TestReceivedRtpPacketStats) {
+ static const size_t kNumRtpPacketsToSend = 5;
+ class ReceivedRtpStatsObserver : public test::RtpRtcpObserver {
+ public:
+ ReceivedRtpStatsObserver()
+ : test::RtpRtcpObserver(kDefaultTimeoutMs),
+ receive_stream_(NULL),
+ sent_rtp_(0) {}
+
+ void SetReceiveStream(VideoReceiveStream* stream) {
+ receive_stream_ = stream;
+ }
+
+ private:
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ if (sent_rtp_ >= kNumRtpPacketsToSend) {
+ VideoReceiveStream::Stats stats = receive_stream_->GetStats();
+ if (kNumRtpPacketsToSend == stats.rtp_stats.packets) {
+ observation_complete_->Set();
+ }
+ return DROP_PACKET;
+ }
+ ++sent_rtp_;
+ return SEND_PACKET;
+ }
+
+ VideoReceiveStream* receive_stream_;
+ uint32_t sent_rtp_;
+ } observer;
+
+ CreateCalls(Call::Config(observer.SendTransport()),
+ Call::Config(observer.ReceiveTransport()));
+ observer.SetReceivers(receiver_call_->Receiver(), sender_call_->Receiver());
+
+ CreateTestConfigs();
+ CreateStreams();
+ observer.SetReceiveStream(receive_stream_);
+ CreateFrameGenerator();
+ StartSending();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timed out while verifying number of received RTP packets.";
+
+ StopSending();
+ observer.StopSending();
+ DestroyStreams();
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/full_stack.cc b/chromium/third_party/webrtc/video/full_stack.cc
index 1181bfe488e..6b21cbe12cd 100644
--- a/chromium/third_party/webrtc/video/full_stack.cc
+++ b/chromium/third_party/webrtc/video/full_stack.cc
@@ -12,29 +12,30 @@
#include <deque>
#include <map>
-#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/sleep.h"
-#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/encoder_settings.h"
+#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/statistics.h"
-#include "webrtc/test/video_renderer.h"
+#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
-DEFINE_int32(seconds, 10, "Seconds to run each clip.");
-
namespace webrtc {
static const uint32_t kSendSsrc = 0x654321;
+static const int kFullStackTestDurationSecs = 10;
struct FullStackTestParams {
const char* test_label;
@@ -79,18 +80,18 @@ class VideoAnalyzer : public PacketReceiver,
transport_(transport),
receiver_(NULL),
test_label_(test_label),
+ frames_left_(duration_frames),
dropped_frames_(0),
+ last_render_time_(0),
rtp_timestamp_delta_(0),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
first_send_frame_(NULL),
- last_render_time_(0),
avg_psnr_threshold_(avg_psnr_threshold),
avg_ssim_threshold_(avg_ssim_threshold),
- frames_left_(duration_frames),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()),
comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread,
this)),
- trigger_(EventWrapper::Create()) {
+ done_(EventWrapper::Create()) {
unsigned int id;
EXPECT_TRUE(comparison_thread_->Start(id));
}
@@ -110,12 +111,13 @@ class VideoAnalyzer : public PacketReceiver,
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
- virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE {
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
parser->Parse(packet, static_cast<int>(length), &header);
{
- CriticalSectionScoped cs(crit_.get());
+ CriticalSectionScoped lock(crit_.get());
recv_times_[header.timestamp - rtp_timestamp_delta_] =
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
}
@@ -123,14 +125,10 @@ class VideoAnalyzer : public PacketReceiver,
return receiver_->DeliverPacket(packet, length);
}
- virtual void PutFrame(const I420VideoFrame& video_frame) OVERRIDE {
- ADD_FAILURE() << "PutFrame() should not have been called in this test.";
- }
-
virtual void SwapFrame(I420VideoFrame* video_frame) OVERRIDE {
I420VideoFrame* copy = NULL;
{
- CriticalSectionScoped cs(crit_.get());
+ CriticalSectionScoped lock(crit_.get());
if (frame_pool_.size() > 0) {
copy = frame_pool_.front();
frame_pool_.pop_front();
@@ -143,7 +141,7 @@ class VideoAnalyzer : public PacketReceiver,
copy->set_timestamp(copy->render_time_ms() * 90);
{
- CriticalSectionScoped cs(crit_.get());
+ CriticalSectionScoped lock(crit_.get());
if (first_send_frame_ == NULL && rtp_timestamp_delta_ == 0)
first_send_frame_ = copy;
@@ -159,7 +157,7 @@ class VideoAnalyzer : public PacketReceiver,
parser->Parse(packet, static_cast<int>(length), &header);
{
- CriticalSectionScoped cs(crit_.get());
+ CriticalSectionScoped lock(crit_.get());
if (rtp_timestamp_delta_ == 0) {
rtp_timestamp_delta_ =
header.timestamp - first_send_frame_->timestamp();
@@ -182,29 +180,27 @@ class VideoAnalyzer : public PacketReceiver,
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
- {
- CriticalSectionScoped cs(crit_.get());
- while (frames_.front()->timestamp() < send_timestamp) {
- AddFrameComparison(
- frames_.front(), &last_rendered_frame_, true, render_time_ms);
- frame_pool_.push_back(frames_.front());
- frames_.pop_front();
- }
-
- I420VideoFrame* reference_frame = frames_.front();
+ CriticalSectionScoped lock(crit_.get());
+ while (frames_.front()->timestamp() < send_timestamp) {
+ AddFrameComparison(
+ frames_.front(), &last_rendered_frame_, true, render_time_ms);
+ frame_pool_.push_back(frames_.front());
frames_.pop_front();
- assert(reference_frame != NULL);
- EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
- assert(reference_frame->timestamp() == send_timestamp);
-
- AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
- frame_pool_.push_back(reference_frame);
}
+ I420VideoFrame* reference_frame = frames_.front();
+ frames_.pop_front();
+ assert(reference_frame != NULL);
+ EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
+ assert(reference_frame->timestamp() == send_timestamp);
+
+ AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
+ frame_pool_.push_back(reference_frame);
+
last_rendered_frame_.CopyFrame(video_frame);
}
- void Wait() { trigger_->Wait(120 * 1000); }
+ void Wait() { done_->Wait(120 * 1000); }
VideoSendStreamInput* input_;
Transport* transport_;
@@ -248,7 +244,8 @@ class VideoAnalyzer : public PacketReceiver,
void AddFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
- int64_t render_time_ms) {
+ int64_t render_time_ms)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_) {
int64_t send_time_ms = send_times_[reference->timestamp()];
send_times_.erase(reference->timestamp());
int64_t recv_time_ms = recv_times_[reference->timestamp()];
@@ -311,7 +308,7 @@ class VideoAnalyzer : public PacketReceiver,
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
- trigger_->Set();
+ done_->Set();
return false;
}
@@ -351,31 +348,32 @@ class VideoAnalyzer : public PacketReceiver,
unit);
}
- const char* test_label_;
+ const char* const test_label_;
test::Statistics sender_time_;
test::Statistics receiver_time_;
test::Statistics psnr_;
test::Statistics ssim_;
test::Statistics end_to_end_;
test::Statistics rendered_delta_;
-
+ int frames_left_;
int dropped_frames_;
- std::deque<I420VideoFrame*> frames_;
- std::deque<I420VideoFrame*> frame_pool_;
- I420VideoFrame last_rendered_frame_;
- std::map<uint32_t, int64_t> send_times_;
- std::map<uint32_t, int64_t> recv_times_;
- uint32_t rtp_timestamp_delta_;
- I420VideoFrame* first_send_frame_;
int64_t last_render_time_;
- double avg_psnr_threshold_;
- double avg_ssim_threshold_;
- int frames_left_;
- scoped_ptr<CriticalSectionWrapper> crit_;
- scoped_ptr<CriticalSectionWrapper> comparison_lock_;
- scoped_ptr<ThreadWrapper> comparison_thread_;
- std::deque<FrameComparison> comparisons_;
- scoped_ptr<EventWrapper> trigger_;
+ uint32_t rtp_timestamp_delta_;
+
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ std::deque<I420VideoFrame*> frames_ GUARDED_BY(crit_);
+ std::deque<I420VideoFrame*> frame_pool_ GUARDED_BY(crit_);
+ I420VideoFrame last_rendered_frame_ GUARDED_BY(crit_);
+ std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_);
+ std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_);
+ I420VideoFrame* first_send_frame_ GUARDED_BY(crit_);
+ double avg_psnr_threshold_ GUARDED_BY(crit_);
+ double avg_ssim_threshold_ GUARDED_BY(crit_);
+
+ const scoped_ptr<CriticalSectionWrapper> comparison_lock_;
+ const scoped_ptr<ThreadWrapper> comparison_thread_;
+ std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_);
+ const scoped_ptr<EventWrapper> done_;
};
TEST_P(FullStackTest, NoPacketLoss) {
@@ -388,7 +386,7 @@ TEST_P(FullStackTest, NoPacketLoss) {
params.test_label,
params.avg_psnr_threshold,
params.avg_ssim_threshold,
- FLAGS_seconds * params.clip.fps);
+ kFullStackTestDurationSecs * params.clip.fps);
Call::Config call_config(&analyzer);
@@ -399,15 +397,20 @@ TEST_P(FullStackTest, NoPacketLoss) {
VideoSendStream::Config send_config = call->GetDefaultSendConfig();
send_config.rtp.ssrcs.push_back(kSendSsrc);
- // TODO(pbos): static_cast shouldn't be required after mflodman refactors the
- // VideoCodec struct.
- send_config.codec.width = static_cast<uint16_t>(params.clip.width);
- send_config.codec.height = static_cast<uint16_t>(params.clip.height);
- send_config.codec.minBitrate = params.bitrate;
- send_config.codec.startBitrate = params.bitrate;
- send_config.codec.maxBitrate = params.bitrate;
-
- VideoSendStream* send_stream = call->CreateVideoSendStream(send_config);
+ scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ send_config.encoder_settings.encoder = encoder.get();
+ send_config.encoder_settings.payload_name = "VP8";
+ send_config.encoder_settings.payload_type = 124;
+ std::vector<VideoStream> video_streams = test::CreateVideoStreams(1);
+ VideoStream* stream = &video_streams[0];
+ stream->width = params.clip.width;
+ stream->height = params.clip.height;
+ stream->min_bitrate_bps = stream->target_bitrate_bps =
+ stream->max_bitrate_bps = params.bitrate * 1000;
+ stream->max_framerate = params.clip.fps;
+
+ VideoSendStream* send_stream =
+ call->CreateVideoSendStream(send_config, video_streams, NULL);
analyzer.input_ = send_stream->Input();
scoped_ptr<test::FrameGeneratorCapturer> file_capturer(
@@ -423,6 +426,9 @@ TEST_P(FullStackTest, NoPacketLoss) {
<< ".yuv. Is this resource file present?";
VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config.encoder_settings);
+ receive_config.codecs.push_back(codec);
receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0];
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config.renderer = &analyzer;
@@ -430,16 +436,15 @@ TEST_P(FullStackTest, NoPacketLoss) {
VideoReceiveStream* receive_stream =
call->CreateVideoReceiveStream(receive_config);
- receive_stream->StartReceiving();
- send_stream->StartSending();
-
+ receive_stream->Start();
+ send_stream->Start();
file_capturer->Start();
analyzer.Wait();
file_capturer->Stop();
- send_stream->StopSending();
- receive_stream->StopReceiving();
+ send_stream->Stop();
+ receive_stream->Stop();
call->DestroyVideoReceiveStream(receive_stream);
call->DestroyVideoSendStream(send_stream);
diff --git a/chromium/third_party/webrtc/video/loopback.cc b/chromium/third_party/webrtc/video/loopback.cc
index e9b08931fae..ea65ebb4222 100644
--- a/chromium/third_party/webrtc/video/loopback.cc
+++ b/chromium/third_party/webrtc/video/loopback.cc
@@ -12,38 +12,57 @@
#include <map>
+#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/call.h"
+#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/direct_transport.h"
-#include "webrtc/test/flags.h"
+#include "webrtc/test/encoder_settings.h"
+#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/run_loop.h"
-#include "webrtc/test/run_tests.h"
+#include "webrtc/test/run_test.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/test/video_renderer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+namespace flags {
-class LoopbackTest : public ::testing::Test {
- protected:
- std::map<uint32_t, bool> reserved_ssrcs_;
-};
+DEFINE_int32(width, 640, "Video width.");
+size_t Width() { return static_cast<size_t>(FLAGS_width); }
+
+DEFINE_int32(height, 480, "Video height.");
+size_t Height() { return static_cast<size_t>(FLAGS_height); }
+
+DEFINE_int32(fps, 30, "Frames per second.");
+int Fps() { return static_cast<int>(FLAGS_fps); }
+
+DEFINE_int32(min_bitrate, 50, "Minimum video bitrate.");
+size_t MinBitrate() { return static_cast<size_t>(FLAGS_min_bitrate); }
+
+DEFINE_int32(start_bitrate, 300, "Video starting bitrate.");
+size_t StartBitrate() { return static_cast<size_t>(FLAGS_start_bitrate); }
+
+DEFINE_int32(max_bitrate, 800, "Maximum video bitrate.");
+size_t MaxBitrate() { return static_cast<size_t>(FLAGS_max_bitrate); }
+} // namespace flags
static const uint32_t kSendSsrc = 0x654321;
static const uint32_t kReceiverLocalSsrc = 0x123456;
-TEST_F(LoopbackTest, Test) {
+void Loopback() {
scoped_ptr<test::VideoRenderer> local_preview(test::VideoRenderer::Create(
- "Local Preview", test::flags::Width(), test::flags::Height()));
+ "Local Preview", flags::Width(), flags::Height()));
scoped_ptr<test::VideoRenderer> loopback_video(test::VideoRenderer::Create(
- "Loopback Video", test::flags::Width(), test::flags::Height()));
+ "Loopback Video", flags::Width(), flags::Height()));
test::DirectTransport transport;
Call::Config call_config(&transport);
- call_config.overuse_detection = true;
+ call_config.start_bitrate_bps =
+ static_cast<int>(flags::StartBitrate()) * 1000;
scoped_ptr<Call> call(Call::Create(call_config));
// Loopback, call sends to itself.
@@ -54,45 +73,52 @@ TEST_F(LoopbackTest, Test) {
send_config.local_renderer = local_preview.get();
- // TODO(pbos): static_cast shouldn't be required after mflodman refactors the
- // VideoCodec struct.
- send_config.codec.width = static_cast<uint16_t>(test::flags::Width());
- send_config.codec.height = static_cast<uint16_t>(test::flags::Height());
- send_config.codec.minBitrate =
- static_cast<unsigned int>(test::flags::MinBitrate());
- send_config.codec.startBitrate =
- static_cast<unsigned int>(test::flags::StartBitrate());
- send_config.codec.maxBitrate =
- static_cast<unsigned int>(test::flags::MaxBitrate());
-
- VideoSendStream* send_stream = call->CreateVideoSendStream(send_config);
+ scoped_ptr<VP8Encoder> encoder(VP8Encoder::Create());
+ send_config.encoder_settings.encoder = encoder.get();
+ send_config.encoder_settings.payload_name = "VP8";
+ send_config.encoder_settings.payload_type = 124;
+ std::vector<VideoStream> video_streams = test::CreateVideoStreams(1);
+ VideoStream* stream = &video_streams[0];
+ stream->width = flags::Width();
+ stream->height = flags::Height();
+ stream->min_bitrate_bps = static_cast<int>(flags::MinBitrate()) * 1000;
+ stream->target_bitrate_bps = static_cast<int>(flags::MaxBitrate()) * 1000;
+ stream->max_bitrate_bps = static_cast<int>(flags::MaxBitrate()) * 1000;
+ stream->max_framerate = 30;
+ stream->max_qp = 56;
+
+ VideoSendStream* send_stream =
+ call->CreateVideoSendStream(send_config, video_streams, NULL);
Clock* test_clock = Clock::GetRealTimeClock();
scoped_ptr<test::VideoCapturer> camera(
test::VideoCapturer::Create(send_stream->Input(),
- test::flags::Width(),
- test::flags::Height(),
- test::flags::Fps(),
+ flags::Width(),
+ flags::Height(),
+ flags::Fps(),
test_clock));
VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
receive_config.rtp.remote_ssrc = send_config.rtp.ssrcs[0];
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config.renderer = loopback_video.get();
+ VideoCodec codec =
+ test::CreateDecoderVideoCodec(send_config.encoder_settings);
+ receive_config.codecs.push_back(codec);
VideoReceiveStream* receive_stream =
call->CreateVideoReceiveStream(receive_config);
- receive_stream->StartReceiving();
- send_stream->StartSending();
+ receive_stream->Start();
+ send_stream->Start();
camera->Start();
test::PressEnterToContinue();
camera->Stop();
- send_stream->StopSending();
- receive_stream->StopReceiving();
+ send_stream->Stop();
+ receive_stream->Stop();
call->DestroyVideoReceiveStream(receive_stream);
call->DestroyVideoSendStream(send_stream);
@@ -100,3 +126,11 @@ TEST_F(LoopbackTest, Test) {
transport.StopSending();
}
} // namespace webrtc
+
+int main(int argc, char* argv[]) {
+ ::testing::InitGoogleTest(&argc, argv);
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ webrtc::test::RunTest(webrtc::Loopback);
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/video/rampup_tests.cc b/chromium/third_party/webrtc/video/rampup_tests.cc
index 0386bd0bff7..94f1c19d93c 100644
--- a/chromium/third_party/webrtc/video/rampup_tests.cc
+++ b/chromium/third_party/webrtc/video/rampup_tests.cc
@@ -10,6 +10,7 @@
#include <assert.h>
#include <map>
+#include <string>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -27,6 +28,7 @@
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/direct_transport.h"
+#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
@@ -36,27 +38,27 @@
namespace webrtc {
namespace {
- static const int kAbsoluteSendTimeExtensionId = 7;
- static const int kMaxPacketSize = 1500;
-}
+static const int kTransmissionTimeOffsetExtensionId = 6;
+static const int kMaxPacketSize = 1500;
+static const unsigned int kSingleStreamTargetBps = 1000000;
class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
public:
typedef std::map<uint32_t, int> BytesSentMap;
typedef std::map<uint32_t, uint32_t> SsrcMap;
- StreamObserver(int num_expected_ssrcs,
- const SsrcMap& rtx_media_ssrcs,
+ StreamObserver(const SsrcMap& rtx_media_ssrcs,
newapi::Transport* feedback_transport,
Clock* clock)
- : critical_section_(CriticalSectionWrapper::CreateCriticalSection()),
- all_ssrcs_sent_(EventWrapper::Create()),
+ : clock_(clock),
+ test_done_(EventWrapper::Create()),
rtp_parser_(RtpHeaderParser::Create()),
feedback_transport_(feedback_transport),
receive_stats_(ReceiveStatistics::Create(clock)),
- payload_registry_(new RTPPayloadRegistry(
- -1, RTPPayloadStrategy::CreateStrategy(false))),
- clock_(clock),
- num_expected_ssrcs_(num_expected_ssrcs),
+ payload_registry_(
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(false))),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ expected_bitrate_bps_(0),
+ start_bitrate_bps_(0),
rtx_media_ssrcs_(rtx_media_ssrcs),
total_sent_(0),
padding_sent_(0),
@@ -70,38 +72,48 @@ class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
// be able to produce an RTCP with REMB.
RtpRtcp::Configuration config;
config.receive_statistics = receive_stats_.get();
+ feedback_transport_.Enable();
config.outgoing_transport = &feedback_transport_;
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
rtp_rtcp_->SetREMBStatus(true);
rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
- rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
- kAbsoluteSendTimeExtensionId);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId);
AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 30000;
remote_bitrate_estimator_.reset(
- rbe_factory.Create(this, clock, kRemoteBitrateEstimatorMinBitrateBps));
+ rbe_factory.Create(this, clock, kMimdControl,
+ kRemoteBitrateEstimatorMinBitrateBps));
+ }
+
+ void set_expected_bitrate_bps(unsigned int expected_bitrate_bps) {
+ CriticalSectionScoped lock(crit_.get());
+ expected_bitrate_bps_ = expected_bitrate_bps;
+ }
+
+ void set_start_bitrate_bps(unsigned int start_bitrate_bps) {
+ CriticalSectionScoped lock(crit_.get());
+ start_bitrate_bps_ = start_bitrate_bps;
}
virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
- unsigned int bitrate) {
- CriticalSectionScoped lock(critical_section_.get());
- if (ssrcs.size() == num_expected_ssrcs_ && bitrate >= kExpectedBitrateBps) {
+ unsigned int bitrate) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ assert(expected_bitrate_bps_ > 0);
+ if (start_bitrate_bps_ != 0) {
+ // For tests with an explicitly set start bitrate, verify the first
+ // bitrate estimate is close to the start bitrate and lower than the
+ // test target bitrate. This is to verify a call respects the configured
+ // start bitrate, but due to the BWE implementation we can't guarantee the
+ // first estimate really is as high as the start bitrate.
+ EXPECT_GT(bitrate, 0.9 * start_bitrate_bps_);
+ EXPECT_LT(bitrate, expected_bitrate_bps_);
+ start_bitrate_bps_ = 0;
+ }
+ if (bitrate >= expected_bitrate_bps_) {
+ // Just trigger if there was any rtx padding packet.
if (rtx_media_ssrcs_.empty() || rtx_media_sent_ > 0) {
- const ::testing::TestInfo* const test_info =
- ::testing::UnitTest::GetInstance()->current_test_info();
- webrtc::test::PrintResult("total-sent", "", test_info->name(),
- total_sent_, "bytes", false);
- webrtc::test::PrintResult("padding-sent", "", test_info->name(),
- padding_sent_, "bytes", false);
- webrtc::test::PrintResult("rtx-media-sent", "", test_info->name(),
- rtx_media_sent_, "bytes", false);
- webrtc::test::PrintResult("total-packets-sent", "", test_info->name(),
- total_packets_sent_, "packets", false);
- webrtc::test::PrintResult("padding-packets-sent", "", test_info->name(),
- padding_packets_sent_, "packets", false);
- webrtc::test::PrintResult("rtx-packets-sent", "", test_info->name(),
- rtx_media_packets_sent_, "packets", false);
- all_ssrcs_sent_->Set();
+ TriggerTestDone();
}
}
rtp_rtcp_->SetREMBData(
@@ -110,7 +122,7 @@ class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
}
virtual bool SendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- CriticalSectionScoped lock(critical_section_.get());
+ CriticalSectionScoped lock(crit_.get());
RTPHeader header;
EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
receive_stats_->IncomingPacket(header, length, false);
@@ -132,13 +144,14 @@ class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
uint8_t restored_packet[kMaxPacketSize];
uint8_t* restored_packet_ptr = restored_packet;
int restored_length = static_cast<int>(length);
- payload_registry_->RestoreOriginalPacket(
- &restored_packet_ptr, packet, &restored_length,
- rtx_media_ssrcs_[header.ssrc],
- header);
+ payload_registry_->RestoreOriginalPacket(&restored_packet_ptr,
+ packet,
+ &restored_length,
+ rtx_media_ssrcs_[header.ssrc],
+ header);
length = restored_length;
- EXPECT_TRUE(rtp_parser_->Parse(restored_packet, static_cast<int>(length),
- &header));
+ EXPECT_TRUE(rtp_parser_->Parse(
+ restored_packet, static_cast<int>(length), &header));
} else {
rtp_rtcp_->SetRemoteSSRC(header.ssrc);
}
@@ -149,51 +162,359 @@ class StreamObserver : public newapi::Transport, public RemoteBitrateObserver {
return true;
}
- EventTypeWrapper Wait() { return all_ssrcs_sent_->Wait(120 * 1000); }
+ EventTypeWrapper Wait() { return test_done_->Wait(120 * 1000); }
private:
- static const unsigned int kExpectedBitrateBps = 1200000;
+ void ReportResult(const std::string& measurement,
+ size_t value,
+ const std::string& units) {
+ webrtc::test::PrintResult(
+ measurement, "",
+ ::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ value, units, false);
+ }
- scoped_ptr<CriticalSectionWrapper> critical_section_;
- scoped_ptr<EventWrapper> all_ssrcs_sent_;
- scoped_ptr<RtpHeaderParser> rtp_parser_;
+ void TriggerTestDone() EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ ReportResult("total-sent", total_sent_, "bytes");
+ ReportResult("padding-sent", padding_sent_, "bytes");
+ ReportResult("rtx-media-sent", rtx_media_sent_, "bytes");
+ ReportResult("total-packets-sent", total_packets_sent_, "packets");
+ ReportResult("padding-packets-sent", padding_packets_sent_, "packets");
+ ReportResult("rtx-packets-sent", rtx_media_packets_sent_, "packets");
+ test_done_->Set();
+ }
+
+ Clock* const clock_;
+ const scoped_ptr<EventWrapper> test_done_;
+ const scoped_ptr<RtpHeaderParser> rtp_parser_;
scoped_ptr<RtpRtcp> rtp_rtcp_;
internal::TransportAdapter feedback_transport_;
- scoped_ptr<ReceiveStatistics> receive_stats_;
- scoped_ptr<RTPPayloadRegistry> payload_registry_;
+ const scoped_ptr<ReceiveStatistics> receive_stats_;
+ const scoped_ptr<RTPPayloadRegistry> payload_registry_;
scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
- Clock* clock_;
- const size_t num_expected_ssrcs_;
- SsrcMap rtx_media_ssrcs_;
- size_t total_sent_;
- size_t padding_sent_;
- size_t rtx_media_sent_;
- int total_packets_sent_;
- int padding_packets_sent_;
- int rtx_media_packets_sent_;
+
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ unsigned int expected_bitrate_bps_ GUARDED_BY(crit_);
+ unsigned int start_bitrate_bps_ GUARDED_BY(crit_);
+ SsrcMap rtx_media_ssrcs_ GUARDED_BY(crit_);
+ size_t total_sent_ GUARDED_BY(crit_);
+ size_t padding_sent_ GUARDED_BY(crit_);
+ size_t rtx_media_sent_ GUARDED_BY(crit_);
+ int total_packets_sent_ GUARDED_BY(crit_);
+ int padding_packets_sent_ GUARDED_BY(crit_);
+ int rtx_media_packets_sent_ GUARDED_BY(crit_);
};
-class RampUpTest : public ::testing::TestWithParam<bool> {
+class LowRateStreamObserver : public test::DirectTransport,
+ public RemoteBitrateObserver,
+ public PacketReceiver {
+ public:
+ LowRateStreamObserver(newapi::Transport* feedback_transport,
+ Clock* clock,
+ size_t number_of_streams,
+ bool rtx_used)
+ : clock_(clock),
+ number_of_streams_(number_of_streams),
+ rtx_used_(rtx_used),
+ test_done_(EventWrapper::Create()),
+ rtp_parser_(RtpHeaderParser::Create()),
+ feedback_transport_(feedback_transport),
+ receive_stats_(ReceiveStatistics::Create(clock)),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ send_stream_(NULL),
+ test_state_(kFirstRampup),
+ state_start_ms_(clock_->TimeInMilliseconds()),
+ interval_start_ms_(state_start_ms_),
+ last_remb_bps_(0),
+ sent_bytes_(0),
+ total_overuse_bytes_(0),
+ suspended_in_stats_(false) {
+ RtpRtcp::Configuration config;
+ config.receive_statistics = receive_stats_.get();
+ feedback_transport_.Enable();
+ config.outgoing_transport = &feedback_transport_;
+ rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
+ rtp_rtcp_->SetREMBStatus(true);
+ rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
+ rtp_parser_->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionTimeOffsetExtensionId);
+ AbsoluteSendTimeRemoteBitrateEstimatorFactory rbe_factory;
+ const uint32_t kRemoteBitrateEstimatorMinBitrateBps = 10000;
+ remote_bitrate_estimator_.reset(
+ rbe_factory.Create(this, clock, kMimdControl,
+ kRemoteBitrateEstimatorMinBitrateBps));
+ forward_transport_config_.link_capacity_kbps =
+ kHighBandwidthLimitBps / 1000;
+ forward_transport_config_.queue_length = 100; // Something large.
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test::DirectTransport::SetReceiver(this);
+ }
+
+ virtual void SetSendStream(const VideoSendStream* send_stream) {
+ CriticalSectionScoped lock(crit_.get());
+ send_stream_ = send_stream;
+ }
+
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ CriticalSectionScoped lock(crit_.get());
+ rtp_rtcp_->SetREMBData(
+ bitrate, static_cast<uint8_t>(ssrcs.size()), &ssrcs[0]);
+ rtp_rtcp_->Process();
+ last_remb_bps_ = bitrate;
+ }
+
+ virtual bool SendRtp(const uint8_t* data, size_t length) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ sent_bytes_ += length;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (now_ms > interval_start_ms_ + 1000) { // Let at least 1 second pass.
+ // Verify that the send rate was about right.
+ unsigned int average_rate_bps = static_cast<unsigned int>(sent_bytes_) *
+ 8 * 1000 / (now_ms - interval_start_ms_);
+ // TODO(holmer): Why is this failing?
+ // EXPECT_LT(average_rate_bps, last_remb_bps_ * 1.1);
+ if (average_rate_bps > last_remb_bps_ * 1.1) {
+ total_overuse_bytes_ +=
+ sent_bytes_ -
+ last_remb_bps_ / 8 * (now_ms - interval_start_ms_) / 1000;
+ }
+ EvolveTestState(average_rate_bps);
+ interval_start_ms_ = now_ms;
+ sent_bytes_ = 0;
+ }
+ return test::DirectTransport::SendRtp(data, length);
+ }
+
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
+ CriticalSectionScoped lock(crit_.get());
+ RTPHeader header;
+ EXPECT_TRUE(rtp_parser_->Parse(packet, static_cast<int>(length), &header));
+ receive_stats_->IncomingPacket(header, length, false);
+ remote_bitrate_estimator_->IncomingPacket(
+ clock_->TimeInMilliseconds(), static_cast<int>(length - 12), header);
+ if (remote_bitrate_estimator_->TimeUntilNextProcess() <= 0) {
+ remote_bitrate_estimator_->Process();
+ }
+ suspended_in_stats_ = send_stream_->GetStats().suspended;
+ return DELIVERY_OK;
+ }
+
+ virtual bool SendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
+ return true;
+ }
+
+ // Produces a string similar to "1stream_nortx", depending on the values of
+ // number_of_streams_ and rtx_used_;
+ std::string GetModifierString() {
+ std::string str("_");
+ char temp_str[5];
+ sprintf(temp_str, "%i", static_cast<int>(number_of_streams_));
+ str += std::string(temp_str);
+ str += "stream";
+ str += (number_of_streams_ > 1 ? "s" : "");
+ str += "_";
+ str += (rtx_used_ ? "" : "no");
+ str += "rtx";
+ return str;
+ }
+
+ // This method defines the state machine for the ramp up-down-up test.
+ void EvolveTestState(unsigned int bitrate_bps) {
+ int64_t now = clock_->TimeInMilliseconds();
+ CriticalSectionScoped lock(crit_.get());
+ assert(send_stream_ != NULL);
+ switch (test_state_) {
+ case kFirstRampup: {
+ EXPECT_FALSE(suspended_in_stats_);
+ if (bitrate_bps > kExpectedHighBitrateBps) {
+ // The first ramp-up has reached the target bitrate. Change the
+ // channel limit, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kLowBandwidthLimitBps / 1000;
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test_state_ = kLowRate;
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "first_rampup",
+ now - state_start_ms_,
+ "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
+ }
+ break;
+ }
+ case kLowRate: {
+ if (bitrate_bps < kExpectedLowBitrateBps && suspended_in_stats_) {
+ // The ramp-down was successful. Change the channel limit back to a
+ // high value, and move to the next test state.
+ forward_transport_config_.link_capacity_kbps =
+ kHighBandwidthLimitBps / 1000;
+ test::DirectTransport::SetConfig(forward_transport_config_);
+ test_state_ = kSecondRampup;
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "rampdown",
+ now - state_start_ms_,
+ "ms",
+ false);
+ state_start_ms_ = now;
+ interval_start_ms_ = now;
+ sent_bytes_ = 0;
+ }
+ break;
+ }
+ case kSecondRampup: {
+ if (bitrate_bps > kExpectedHighBitrateBps && !suspended_in_stats_) {
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "second_rampup",
+ now - state_start_ms_,
+ "ms",
+ false);
+ webrtc::test::PrintResult("ramp_up_down_up",
+ GetModifierString(),
+ "total_overuse",
+ total_overuse_bytes_,
+ "bytes",
+ false);
+ test_done_->Set();
+ }
+ break;
+ }
+ }
+ }
+
+ EventTypeWrapper Wait() { return test_done_->Wait(120 * 1000); }
+
+ private:
+ static const unsigned int kHighBandwidthLimitBps = 80000;
+ static const unsigned int kExpectedHighBitrateBps = 60000;
+ static const unsigned int kLowBandwidthLimitBps = 20000;
+ static const unsigned int kExpectedLowBitrateBps = 20000;
+ enum TestStates { kFirstRampup, kLowRate, kSecondRampup };
+
+ Clock* const clock_;
+ const size_t number_of_streams_;
+ const bool rtx_used_;
+ const scoped_ptr<EventWrapper> test_done_;
+ const scoped_ptr<RtpHeaderParser> rtp_parser_;
+ scoped_ptr<RtpRtcp> rtp_rtcp_;
+ internal::TransportAdapter feedback_transport_;
+ const scoped_ptr<ReceiveStatistics> receive_stats_;
+ scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ const VideoSendStream* send_stream_ GUARDED_BY(crit_);
+ FakeNetworkPipe::Config forward_transport_config_ GUARDED_BY(crit_);
+ TestStates test_state_ GUARDED_BY(crit_);
+ int64_t state_start_ms_ GUARDED_BY(crit_);
+ int64_t interval_start_ms_ GUARDED_BY(crit_);
+ unsigned int last_remb_bps_ GUARDED_BY(crit_);
+ size_t sent_bytes_ GUARDED_BY(crit_);
+ size_t total_overuse_bytes_ GUARDED_BY(crit_);
+ bool suspended_in_stats_ GUARDED_BY(crit_);
+};
+} // namespace
+
+class RampUpTest : public ::testing::Test {
public:
virtual void SetUp() { reserved_ssrcs_.clear(); }
protected:
- void RunRampUpTest(bool pacing, bool rtx) {
- const size_t kNumberOfStreams = 3;
- std::vector<uint32_t> ssrcs;
- for (size_t i = 0; i < kNumberOfStreams; ++i)
- ssrcs.push_back(static_cast<uint32_t>(i + 1));
- uint32_t kRtxSsrcs[kNumberOfStreams] = {111, 112, 113};
+ void RunRampUpTest(bool rtx,
+ size_t num_streams,
+ unsigned int start_bitrate_bps) {
+ std::vector<uint32_t> ssrcs(GenerateSsrcs(num_streams, 100));
+ std::vector<uint32_t> rtx_ssrcs(GenerateSsrcs(num_streams, 200));
StreamObserver::SsrcMap rtx_ssrc_map;
if (rtx) {
for (size_t i = 0; i < ssrcs.size(); ++i)
- rtx_ssrc_map[kRtxSsrcs[i]] = ssrcs[i];
+ rtx_ssrc_map[rtx_ssrcs[i]] = ssrcs[i];
}
test::DirectTransport receiver_transport;
- int num_expected_ssrcs = kNumberOfStreams + (rtx ? 1 : 0);
- StreamObserver stream_observer(
- num_expected_ssrcs, rtx_ssrc_map, &receiver_transport,
- Clock::GetRealTimeClock());
+ StreamObserver stream_observer(rtx_ssrc_map,
+ &receiver_transport,
+ Clock::GetRealTimeClock());
+
+ Call::Config call_config(&stream_observer);
+ if (start_bitrate_bps != 0) {
+ call_config.start_bitrate_bps = start_bitrate_bps;
+ stream_observer.set_start_bitrate_bps(start_bitrate_bps);
+ }
+ scoped_ptr<Call> call(Call::Create(call_config));
+ VideoSendStream::Config send_config = call->GetDefaultSendConfig();
+
+ receiver_transport.SetReceiver(call->Receiver());
+
+ test::FakeEncoder encoder(Clock::GetRealTimeClock());
+ send_config.encoder_settings.encoder = &encoder;
+ send_config.encoder_settings.payload_type = 125;
+ send_config.encoder_settings.payload_name = "FAKE";
+ std::vector<VideoStream> video_streams =
+ test::CreateVideoStreams(num_streams);
+
+ if (num_streams == 1) {
+ video_streams[0].target_bitrate_bps = 2000000;
+ video_streams[0].max_bitrate_bps = 2000000;
+ }
+
+ send_config.rtp.nack.rtp_history_ms = 1000;
+ send_config.rtp.ssrcs = ssrcs;
+ if (rtx) {
+ send_config.rtp.rtx.payload_type = 96;
+ send_config.rtp.rtx.ssrcs = rtx_ssrcs;
+ send_config.rtp.rtx.pad_with_redundant_payloads = true;
+ }
+ send_config.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTOffset,
+ kTransmissionTimeOffsetExtensionId));
+
+ if (num_streams == 1) {
+ // For single stream rampup until 1mbps
+ stream_observer.set_expected_bitrate_bps(kSingleStreamTargetBps);
+ } else {
+ // For multi stream rampup until all streams are being sent. That means
+ // enough birate to send all the target streams plus the min bitrate of
+ // the last one.
+ int expected_bitrate_bps = video_streams.back().min_bitrate_bps;
+ for (size_t i = 0; i < video_streams.size() - 1; ++i) {
+ expected_bitrate_bps += video_streams[i].target_bitrate_bps;
+ }
+ stream_observer.set_expected_bitrate_bps(expected_bitrate_bps);
+ }
+
+ VideoSendStream* send_stream =
+ call->CreateVideoSendStream(send_config, video_streams, NULL);
+
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(send_stream->Input(),
+ video_streams.back().width,
+ video_streams.back().height,
+ video_streams.back().max_framerate,
+ Clock::GetRealTimeClock()));
+
+ send_stream->Start();
+ frame_generator_capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, stream_observer.Wait());
+
+ frame_generator_capturer->Stop();
+ send_stream->Stop();
+
+ call->DestroyVideoSendStream(send_stream);
+ }
+
+ void RunRampUpDownUpTest(size_t number_of_streams, bool rtx) {
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 0; i < number_of_streams; ++i)
+ ssrcs.push_back(static_cast<uint32_t>(i + 1));
+ test::DirectTransport receiver_transport;
+ LowRateStreamObserver stream_observer(
+ &receiver_transport, Clock::GetRealTimeClock(), number_of_streams, rtx);
Call::Config call_config(&stream_observer);
webrtc::Config webrtc_config;
@@ -205,55 +526,89 @@ class RampUpTest : public ::testing::TestWithParam<bool> {
receiver_transport.SetReceiver(call->Receiver());
test::FakeEncoder encoder(Clock::GetRealTimeClock());
- send_config.encoder = &encoder;
- send_config.internal_source = false;
- test::FakeEncoder::SetCodecSettings(&send_config.codec, kNumberOfStreams);
- send_config.codec.plType = 125;
- send_config.pacing = pacing;
+ send_config.encoder_settings.encoder = &encoder;
+ send_config.encoder_settings.payload_type = 125;
+ send_config.encoder_settings.payload_name = "FAKE";
+ std::vector<VideoStream> video_streams =
+ test::CreateVideoStreams(number_of_streams);
+
send_config.rtp.nack.rtp_history_ms = 1000;
- send_config.rtp.ssrcs.insert(send_config.rtp.ssrcs.begin(), ssrcs.begin(),
- ssrcs.end());
- if (rtx) {
- send_config.rtp.rtx.rtx_payload_type = 96;
- send_config.rtp.rtx.ssrcs.insert(send_config.rtp.rtx.ssrcs.begin(),
- kRtxSsrcs,
- kRtxSsrcs + kNumberOfStreams);
- }
+ send_config.rtp.ssrcs.insert(
+ send_config.rtp.ssrcs.begin(), ssrcs.begin(), ssrcs.end());
send_config.rtp.extensions.push_back(
- RtpExtension(RtpExtension::kAbsSendTime, kAbsoluteSendTimeExtensionId));
+ RtpExtension(RtpExtension::kTOffset,
+ kTransmissionTimeOffsetExtensionId));
+ send_config.suspend_below_min_bitrate = true;
- VideoSendStream* send_stream = call->CreateVideoSendStream(send_config);
+ VideoSendStream* send_stream =
+ call->CreateVideoSendStream(send_config, video_streams, NULL);
+ stream_observer.SetSendStream(send_stream);
+
+ size_t width = 0;
+ size_t height = 0;
+ for (size_t i = 0; i < video_streams.size(); ++i) {
+ size_t stream_width = video_streams[i].width;
+ size_t stream_height = video_streams[i].height;
+ if (stream_width > width)
+ width = stream_width;
+ if (stream_height > height)
+ height = stream_height;
+ }
scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
test::FrameGeneratorCapturer::Create(send_stream->Input(),
- send_config.codec.width,
- send_config.codec.height,
+ width,
+ height,
30,
Clock::GetRealTimeClock()));
- send_stream->StartSending();
+ send_stream->Start();
frame_generator_capturer->Start();
EXPECT_EQ(kEventSignaled, stream_observer.Wait());
+ stream_observer.StopSending();
+ receiver_transport.StopSending();
frame_generator_capturer->Stop();
- send_stream->StopSending();
+ send_stream->Stop();
call->DestroyVideoSendStream(send_stream);
}
+
+ private:
+ std::vector<uint32_t> GenerateSsrcs(size_t num_streams,
+ uint32_t ssrc_offset) {
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 0; i != num_streams; ++i)
+ ssrcs.push_back(static_cast<uint32_t>(ssrc_offset + i));
+ return ssrcs;
+ }
+
std::map<uint32_t, bool> reserved_ssrcs_;
};
-TEST_F(RampUpTest, WithoutPacing) {
- RunRampUpTest(false, false);
+TEST_F(RampUpTest, SingleStream) {
+ RunRampUpTest(false, 1, 0);
+}
+
+TEST_F(RampUpTest, Simulcast) {
+ RunRampUpTest(false, 3, 0);
}
-TEST_F(RampUpTest, WithPacing) {
- RunRampUpTest(true, false);
+TEST_F(RampUpTest, SimulcastWithRtx) {
+ RunRampUpTest(true, 3, 0);
}
-TEST_F(RampUpTest, WithPacingAndRtx) {
- RunRampUpTest(true, true);
+TEST_F(RampUpTest, SingleStreamWithHighStartBitrate) {
+ RunRampUpTest(false, 1, 0.9 * kSingleStreamTargetBps);
}
+TEST_F(RampUpTest, UpDownUpOneStream) { RunRampUpDownUpTest(1, false); }
+
+TEST_F(RampUpTest, UpDownUpThreeStreams) { RunRampUpDownUpTest(3, false); }
+
+TEST_F(RampUpTest, UpDownUpOneStreamRtx) { RunRampUpDownUpTest(1, true); }
+
+TEST_F(RampUpTest, UpDownUpThreeStreamsRtx) { RunRampUpDownUpTest(3, true); }
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/receive_statistics_proxy.cc b/chromium/third_party/webrtc/video/receive_statistics_proxy.cc
new file mode 100644
index 00000000000..60042818e89
--- /dev/null
+++ b/chromium/third_party/webrtc/video/receive_statistics_proxy.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/receive_statistics_proxy.h"
+
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+
+namespace webrtc {
+namespace internal {
+
+ReceiveStatisticsProxy::ReceiveStatisticsProxy(uint32_t ssrc,
+ Clock* clock,
+ ViERTP_RTCP* rtp_rtcp,
+ ViECodec* codec,
+ int channel)
+ : channel_(channel),
+ clock_(clock),
+ codec_(codec),
+ rtp_rtcp_(rtp_rtcp),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ // 1000ms window, scale 1000 for ms to s.
+ decode_fps_estimator_(1000, 1000),
+ renders_fps_estimator_(1000, 1000) {
+ stats_.ssrc = ssrc;
+}
+
+ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {}
+
+VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
+ VideoReceiveStream::Stats stats;
+ {
+ CriticalSectionScoped lock(crit_.get());
+ stats = stats_;
+ }
+ stats.c_name = GetCName();
+ codec_->GetReceiveSideDelay(channel_, &stats.avg_delay_ms);
+ stats.discarded_packets = codec_->GetDiscardedPackets(channel_);
+ codec_->GetReceiveCodecStastistics(
+ channel_, stats.key_frames, stats.delta_frames);
+
+ return stats;
+}
+
+std::string ReceiveStatisticsProxy::GetCName() const {
+ char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength];
+ if (rtp_rtcp_->GetRemoteRTCPCName(channel_, rtcp_cname) != 0)
+ rtcp_cname[0] = '\0';
+ return rtcp_cname;
+}
+
+void ReceiveStatisticsProxy::IncomingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) {
+ CriticalSectionScoped lock(crit_.get());
+ stats_.network_frame_rate = framerate;
+ stats_.bitrate_bps = bitrate;
+}
+
+void ReceiveStatisticsProxy::StatisticsUpdated(
+ const webrtc::RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+
+ stats_.rtcp_stats = statistics;
+}
+
+void ReceiveStatisticsProxy::DataCountersUpdated(
+ const webrtc::StreamDataCounters& counters,
+ uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+
+ stats_.rtp_stats = counters;
+}
+
+void ReceiveStatisticsProxy::OnDecodedFrame() {
+ uint64_t now = clock_->TimeInMilliseconds();
+
+ CriticalSectionScoped lock(crit_.get());
+ decode_fps_estimator_.Update(1, now);
+ stats_.decode_frame_rate = decode_fps_estimator_.Rate(now);
+}
+
+void ReceiveStatisticsProxy::OnRenderedFrame() {
+ uint64_t now = clock_->TimeInMilliseconds();
+
+ CriticalSectionScoped lock(crit_.get());
+ renders_fps_estimator_.Update(1, now);
+ stats_.render_frame_rate = renders_fps_estimator_.Rate(now);
+}
+
+} // namespace internal
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/receive_statistics_proxy.h b/chromium/third_party/webrtc/video/receive_statistics_proxy.h
new file mode 100644
index 00000000000..35e5cc3e663
--- /dev/null
+++ b/chromium/third_party/webrtc/video/receive_statistics_proxy.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_
+#define WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_
+
+#include <string>
+
+#include "webrtc/common_types.h"
+#include "webrtc/frame_callback.h"
+#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+#include "webrtc/video_receive_stream.h"
+#include "webrtc/video_renderer.h"
+
+namespace webrtc {
+
+class Clock;
+class CriticalSectionWrapper;
+class ViECodec;
+class ViEDecoderObserver;
+
+namespace internal {
+
+class ReceiveStatisticsProxy : public ViEDecoderObserver,
+ public RtcpStatisticsCallback,
+ public StreamDataCountersCallback {
+ public:
+ ReceiveStatisticsProxy(uint32_t ssrc,
+ Clock* clock,
+ ViERTP_RTCP* rtp_rtcp,
+ ViECodec* codec,
+ int channel);
+ virtual ~ReceiveStatisticsProxy();
+
+ VideoReceiveStream::Stats GetStats() const;
+
+ void OnDecodedFrame();
+ void OnRenderedFrame();
+
+ // Overrides ViEDecoderObserver.
+ virtual void IncomingCodecChanged(const int video_channel,
+ const VideoCodec& video_codec) OVERRIDE {}
+ virtual void IncomingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) OVERRIDE;
+ virtual void DecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) OVERRIDE {}
+ virtual void RequestNewKeyFrame(const int video_channel) OVERRIDE {}
+
+ // Overrides RtcpStatisticsBallback.
+ virtual void StatisticsUpdated(const webrtc::RtcpStatistics& statistics,
+ uint32_t ssrc) OVERRIDE;
+
+ // Overrides StreamDataCountersCallback.
+ virtual void DataCountersUpdated(const webrtc::StreamDataCounters& counters,
+ uint32_t ssrc) OVERRIDE;
+
+ private:
+ std::string GetCName() const;
+
+ const int channel_;
+ Clock* const clock_;
+ ViECodec* const codec_;
+ ViERTP_RTCP* const rtp_rtcp_;
+
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ VideoReceiveStream::Stats stats_ GUARDED_BY(crit_);
+ RateStatistics decode_fps_estimator_ GUARDED_BY(crit_);
+ RateStatistics renders_fps_estimator_ GUARDED_BY(crit_);
+};
+
+} // namespace internal
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_RECEIVE_STATISTICS_PROXY_H_
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.cc b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
new file mode 100644
index 00000000000..c9bd05c1709
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/send_statistics_proxy.h"
+
+#include <map>
+
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+
+namespace webrtc {
+
+SendStatisticsProxy::SendStatisticsProxy(
+ const VideoSendStream::Config& config,
+ SendStatisticsProxy::StatsProvider* stats_provider)
+ : config_(config),
+ stats_provider_(stats_provider),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+SendStatisticsProxy::~SendStatisticsProxy() {}
+
+void SendStatisticsProxy::OutgoingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) {
+ CriticalSectionScoped lock(crit_.get());
+ stats_.encode_frame_rate = framerate;
+}
+
+void SendStatisticsProxy::SuspendChange(int video_channel, bool is_suspended) {
+ CriticalSectionScoped lock(crit_.get());
+ stats_.suspended = is_suspended;
+}
+
+void SendStatisticsProxy::CapturedFrameRate(const int capture_id,
+ const unsigned char frame_rate) {
+ CriticalSectionScoped lock(crit_.get());
+ stats_.input_frame_rate = frame_rate;
+}
+
+VideoSendStream::Stats SendStatisticsProxy::GetStats() const {
+ VideoSendStream::Stats stats;
+ {
+ CriticalSectionScoped lock(crit_.get());
+ stats = stats_;
+ }
+ stats_provider_->GetSendSideDelay(&stats);
+ stats.c_name = stats_provider_->GetCName();
+ return stats;
+}
+
+StreamStats* SendStatisticsProxy::GetStatsEntry(uint32_t ssrc) {
+ std::map<uint32_t, StreamStats>::iterator it = stats_.substreams.find(ssrc);
+ if (it != stats_.substreams.end())
+ return &it->second;
+
+ if (std::find(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc) ==
+ config_.rtp.ssrcs.end())
+ return NULL;
+
+ return &stats_.substreams[ssrc]; // Insert new entry and return ptr.
+}
+
+void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ StreamStats* stats = GetStatsEntry(ssrc);
+ if (stats == NULL)
+ return;
+
+ stats->rtcp_stats = statistics;
+}
+
+void SendStatisticsProxy::DataCountersUpdated(
+ const StreamDataCounters& counters,
+ uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ StreamStats* stats = GetStatsEntry(ssrc);
+ if (stats == NULL)
+ return;
+
+ stats->rtp_stats = counters;
+}
+
+void SendStatisticsProxy::Notify(const BitrateStatistics& bitrate,
+ uint32_t ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ StreamStats* stats = GetStatsEntry(ssrc);
+ if (stats == NULL)
+ return;
+
+ stats->bitrate_bps = bitrate.bitrate_bps;
+}
+
+void SendStatisticsProxy::FrameCountUpdated(FrameType frame_type,
+ uint32_t frame_count,
+ const unsigned int ssrc) {
+ CriticalSectionScoped lock(crit_.get());
+ StreamStats* stats = GetStatsEntry(ssrc);
+ if (stats == NULL)
+ return;
+
+ switch (frame_type) {
+ case kVideoFrameDelta:
+ stats->delta_frames = frame_count;
+ break;
+ case kVideoFrameKey:
+ stats->key_frames = frame_count;
+ break;
+ case kFrameEmpty:
+ case kAudioFrameSpeech:
+ case kAudioFrameCN:
+ break;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.h b/chromium/third_party/webrtc/video/send_statistics_proxy.h
new file mode 100644
index 00000000000..a1ff14cd668
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
+#define WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
+
+#include <string>
+
+#include "webrtc/common_types.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_engine/include/vie_capture.h"
+#include "webrtc/video_send_stream.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class SendStatisticsProxy : public RtcpStatisticsCallback,
+ public StreamDataCountersCallback,
+ public BitrateStatisticsObserver,
+ public FrameCountObserver,
+ public ViEEncoderObserver,
+ public ViECaptureObserver {
+ public:
+ class StatsProvider {
+ protected:
+ StatsProvider() {}
+ virtual ~StatsProvider() {}
+
+ public:
+ virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) = 0;
+ virtual std::string GetCName() = 0;
+ };
+
+ SendStatisticsProxy(const VideoSendStream::Config& config,
+ StatsProvider* stats_provider);
+ virtual ~SendStatisticsProxy();
+
+ VideoSendStream::Stats GetStats() const;
+
+ protected:
+ // From RtcpStatisticsCallback.
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) OVERRIDE;
+ // From StreamDataCountersCallback.
+ virtual void DataCountersUpdated(const StreamDataCounters& counters,
+ uint32_t ssrc) OVERRIDE;
+
+ // From BitrateStatisticsObserver.
+ virtual void Notify(const BitrateStatistics& stats, uint32_t ssrc) OVERRIDE;
+
+ // From FrameCountObserver.
+ virtual void FrameCountUpdated(FrameType frame_type,
+ uint32_t frame_count,
+ const unsigned int ssrc) OVERRIDE;
+
+ // From ViEEncoderObserver.
+ virtual void OutgoingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) OVERRIDE;
+
+ virtual void SuspendChange(int video_channel, bool is_suspended) OVERRIDE;
+
+ // From ViECaptureObserver.
+ virtual void BrightnessAlarm(const int capture_id,
+ const Brightness brightness) OVERRIDE {}
+
+ virtual void CapturedFrameRate(const int capture_id,
+ const unsigned char frame_rate) OVERRIDE;
+
+ virtual void NoPictureAlarm(const int capture_id,
+ const CaptureAlarm alarm) OVERRIDE {}
+
+ private:
+ StreamStats* GetStatsEntry(uint32_t ssrc) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ const VideoSendStream::Config config_;
+ StatsProvider* const stats_provider_;
+ scoped_ptr<CriticalSectionWrapper> crit_;
+ VideoSendStream::Stats stats_ GUARDED_BY(crit_);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_SEND_STATISTICS_PROXY_H_
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc b/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc
new file mode 100644
index 00000000000..8f35ee4d03d
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc
@@ -0,0 +1,244 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes unit tests for SendStatisticsProxy.
+#include "webrtc/video/send_statistics_proxy.h"
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+
+class SendStatisticsProxyTest : public ::testing::Test,
+ protected SendStatisticsProxy::StatsProvider {
+ public:
+ SendStatisticsProxyTest() : avg_delay_ms_(0), max_delay_ms_(0) {}
+ virtual ~SendStatisticsProxyTest() {}
+
+ protected:
+ virtual void SetUp() {
+ statistics_proxy_.reset(
+ new SendStatisticsProxy(GetTestConfig(), this));
+ config_ = GetTestConfig();
+ expected_ = VideoSendStream::Stats();
+ }
+
+ VideoSendStream::Config GetTestConfig() {
+ VideoSendStream::Config config;
+ config.rtp.ssrcs.push_back(17);
+ config.rtp.ssrcs.push_back(42);
+ return config;
+ }
+
+ virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) OVERRIDE {
+ stats->avg_delay_ms = avg_delay_ms_;
+ stats->max_delay_ms = max_delay_ms_;
+ return true;
+ }
+
+ virtual std::string GetCName() { return cname_; }
+
+ void ExpectEqual(VideoSendStream::Stats one, VideoSendStream::Stats other) {
+ EXPECT_EQ(one.avg_delay_ms, other.avg_delay_ms);
+ EXPECT_EQ(one.input_frame_rate, other.input_frame_rate);
+ EXPECT_EQ(one.encode_frame_rate, other.encode_frame_rate);
+ EXPECT_EQ(one.avg_delay_ms, other.avg_delay_ms);
+ EXPECT_EQ(one.max_delay_ms, other.max_delay_ms);
+ EXPECT_EQ(one.suspended, other.suspended);
+ EXPECT_EQ(one.c_name, other.c_name);
+
+ EXPECT_EQ(one.substreams.size(), other.substreams.size());
+ for (std::map<uint32_t, StreamStats>::const_iterator it =
+ one.substreams.begin();
+ it != one.substreams.end();
+ ++it) {
+ std::map<uint32_t, StreamStats>::const_iterator corresponding_it =
+ other.substreams.find(it->first);
+ ASSERT_TRUE(corresponding_it != other.substreams.end());
+ const StreamStats& a = it->second;
+ const StreamStats& b = corresponding_it->second;
+
+ EXPECT_EQ(a.key_frames, b.key_frames);
+ EXPECT_EQ(a.delta_frames, b.delta_frames);
+ EXPECT_EQ(a.bitrate_bps, b.bitrate_bps);
+
+ EXPECT_EQ(a.rtp_stats.bytes, b.rtp_stats.bytes);
+ EXPECT_EQ(a.rtp_stats.header_bytes, b.rtp_stats.header_bytes);
+ EXPECT_EQ(a.rtp_stats.padding_bytes, b.rtp_stats.padding_bytes);
+ EXPECT_EQ(a.rtp_stats.packets, b.rtp_stats.packets);
+ EXPECT_EQ(a.rtp_stats.retransmitted_packets,
+ b.rtp_stats.retransmitted_packets);
+ EXPECT_EQ(a.rtp_stats.fec_packets, b.rtp_stats.fec_packets);
+
+ EXPECT_EQ(a.rtcp_stats.fraction_lost, b.rtcp_stats.fraction_lost);
+ EXPECT_EQ(a.rtcp_stats.cumulative_lost, b.rtcp_stats.cumulative_lost);
+ EXPECT_EQ(a.rtcp_stats.extended_max_sequence_number,
+ b.rtcp_stats.extended_max_sequence_number);
+ EXPECT_EQ(a.rtcp_stats.jitter, b.rtcp_stats.jitter);
+ }
+ }
+
+ scoped_ptr<SendStatisticsProxy> statistics_proxy_;
+ VideoSendStream::Config config_;
+ int avg_delay_ms_;
+ int max_delay_ms_;
+ std::string cname_;
+ VideoSendStream::Stats expected_;
+ typedef std::map<uint32_t, StreamStats>::const_iterator StreamIterator;
+};
+
+TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
+ RtcpStatisticsCallback* callback = statistics_proxy_.get();
+ for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
+ it != config_.rtp.ssrcs.end();
+ ++it) {
+ const uint32_t ssrc = *it;
+ StreamStats& ssrc_stats = expected_.substreams[ssrc];
+
+ // Add statistics with some arbitrary, but unique, numbers.
+ uint32_t offset = ssrc * sizeof(RtcpStatistics);
+ ssrc_stats.rtcp_stats.cumulative_lost = offset;
+ ssrc_stats.rtcp_stats.extended_max_sequence_number = offset + 1;
+ ssrc_stats.rtcp_stats.fraction_lost = offset + 2;
+ ssrc_stats.rtcp_stats.jitter = offset + 3;
+ callback->StatisticsUpdated(ssrc_stats.rtcp_stats, ssrc);
+ }
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ ExpectEqual(expected_, stats);
+}
+
+TEST_F(SendStatisticsProxyTest, FrameRates) {
+ const int capture_fps = 31;
+ const int encode_fps = 29;
+
+ ViECaptureObserver* capture_observer = statistics_proxy_.get();
+ capture_observer->CapturedFrameRate(0, capture_fps);
+ ViEEncoderObserver* encoder_observer = statistics_proxy_.get();
+ encoder_observer->OutgoingRate(0, encode_fps, 0);
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(capture_fps, stats.input_frame_rate);
+ EXPECT_EQ(encode_fps, stats.encode_frame_rate);
+}
+
+TEST_F(SendStatisticsProxyTest, Suspended) {
+ // Verify that the value is false by default.
+ EXPECT_FALSE(statistics_proxy_->GetStats().suspended);
+
+ // Verify that we can set it to true.
+ ViEEncoderObserver* encoder_observer = statistics_proxy_.get();
+ encoder_observer->SuspendChange(0, true);
+ EXPECT_TRUE(statistics_proxy_->GetStats().suspended);
+
+ // Verify that we can set it back to false again.
+ encoder_observer->SuspendChange(0, false);
+ EXPECT_FALSE(statistics_proxy_->GetStats().suspended);
+}
+
+TEST_F(SendStatisticsProxyTest, FrameCounts) {
+ FrameCountObserver* observer = statistics_proxy_.get();
+ for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
+ it != config_.rtp.ssrcs.end();
+ ++it) {
+ const uint32_t ssrc = *it;
+ // Add statistics with some arbitrary, but unique, numbers.
+ StreamStats& stats = expected_.substreams[ssrc];
+ uint32_t offset = ssrc * sizeof(StreamStats);
+ stats.key_frames = offset;
+ stats.delta_frames = offset + 1;
+ observer->FrameCountUpdated(kVideoFrameKey, stats.key_frames, ssrc);
+ observer->FrameCountUpdated(kVideoFrameDelta, stats.delta_frames, ssrc);
+ }
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ ExpectEqual(expected_, stats);
+}
+
+TEST_F(SendStatisticsProxyTest, DataCounters) {
+ StreamDataCountersCallback* callback = statistics_proxy_.get();
+ for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
+ it != config_.rtp.ssrcs.end();
+ ++it) {
+ const uint32_t ssrc = *it;
+ StreamDataCounters& counters = expected_.substreams[ssrc].rtp_stats;
+ // Add statistics with some arbitrary, but unique, numbers.
+ uint32_t offset = ssrc * sizeof(StreamDataCounters);
+ counters.bytes = offset;
+ counters.header_bytes = offset + 1;
+ counters.fec_packets = offset + 2;
+ counters.padding_bytes = offset + 3;
+ counters.retransmitted_packets = offset + 4;
+ counters.packets = offset + 5;
+ callback->DataCountersUpdated(counters, ssrc);
+ }
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ ExpectEqual(expected_, stats);
+}
+
+TEST_F(SendStatisticsProxyTest, Bitrate) {
+ BitrateStatisticsObserver* observer = statistics_proxy_.get();
+ for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
+ it != config_.rtp.ssrcs.end();
+ ++it) {
+ const uint32_t ssrc = *it;
+ BitrateStatistics bitrate;
+ bitrate.bitrate_bps = ssrc;
+ observer->Notify(bitrate, ssrc);
+ expected_.substreams[ssrc].bitrate_bps = ssrc;
+ }
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ ExpectEqual(expected_, stats);
+}
+
+TEST_F(SendStatisticsProxyTest, StreamStats) {
+ avg_delay_ms_ = 1;
+ max_delay_ms_ = 2;
+ cname_ = "qwertyuiop";
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+
+ EXPECT_EQ(avg_delay_ms_, stats.avg_delay_ms);
+ EXPECT_EQ(max_delay_ms_, stats.max_delay_ms);
+ EXPECT_EQ(cname_, stats.c_name);
+}
+
+TEST_F(SendStatisticsProxyTest, NoSubstreams) {
+ uint32_t exluded_ssrc =
+ *std::max_element(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end()) + 1;
+ // From RtcpStatisticsCallback.
+ RtcpStatistics rtcp_stats;
+ RtcpStatisticsCallback* rtcp_callback = statistics_proxy_.get();
+ rtcp_callback->StatisticsUpdated(rtcp_stats, exluded_ssrc);
+
+ // From StreamDataCountersCallback.
+ StreamDataCounters rtp_stats;
+ StreamDataCountersCallback* rtp_callback = statistics_proxy_.get();
+ rtp_callback->DataCountersUpdated(rtp_stats, exluded_ssrc);
+
+ // From BitrateStatisticsObserver.
+ BitrateStatistics bitrate;
+ BitrateStatisticsObserver* bitrate_observer = statistics_proxy_.get();
+ bitrate_observer->Notify(bitrate, exluded_ssrc);
+
+ // From FrameCountObserver.
+ FrameCountObserver* fps_observer = statistics_proxy_.get();
+ fps_observer->FrameCountUpdated(kVideoFrameKey, 1, exluded_ssrc);
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ EXPECT_TRUE(stats.substreams.empty());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/transport_adapter.cc b/chromium/third_party/webrtc/video/transport_adapter.cc
index 7cc6a0a4340..6f27d9972a5 100644
--- a/chromium/third_party/webrtc/video/transport_adapter.cc
+++ b/chromium/third_party/webrtc/video/transport_adapter.cc
@@ -14,11 +14,14 @@ namespace webrtc {
namespace internal {
TransportAdapter::TransportAdapter(newapi::Transport* transport)
- : transport_(transport) {}
+ : transport_(transport), enabled_(0) {}
int TransportAdapter::SendPacket(int /*channel*/,
const void* packet,
int length) {
+ if (enabled_.Value() == 0)
+ return false;
+
bool success = transport_->SendRtp(static_cast<const uint8_t*>(packet),
static_cast<size_t>(length));
return success ? length : -1;
@@ -27,10 +30,21 @@ int TransportAdapter::SendPacket(int /*channel*/,
int TransportAdapter::SendRTCPPacket(int /*channel*/,
const void* packet,
int length) {
+ if (enabled_.Value() == 0)
+ return false;
+
bool success = transport_->SendRtcp(static_cast<const uint8_t*>(packet),
static_cast<size_t>(length));
return success ? length : -1;
}
+void TransportAdapter::Enable() {
+ // If this exchange fails it means enabled_ was already true, no need to
+ // check result and iterate.
+ enabled_.CompareExchange(1, 0);
+}
+
+void TransportAdapter::Disable() { enabled_.CompareExchange(0, 1); }
+
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/transport_adapter.h b/chromium/third_party/webrtc/video/transport_adapter.h
index 3686f38a92f..79f995be9b6 100644
--- a/chromium/third_party/webrtc/video/transport_adapter.h
+++ b/chromium/third_party/webrtc/video/transport_adapter.h
@@ -11,6 +11,7 @@
#define WEBRTC_VIDEO_ENGINE_INTERNAL_TRANSPORT_ADAPTER_H_
#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
#include "webrtc/transport.h"
namespace webrtc {
@@ -25,8 +26,12 @@ class TransportAdapter : public webrtc::Transport {
virtual int SendRTCPPacket(int /*channel*/, const void* packet, int length)
OVERRIDE;
+ void Enable();
+ void Disable();
+
private:
newapi::Transport *transport_;
+ Atomic32 enabled_;
};
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_receive_stream.cc b/chromium/third_party/webrtc/video/video_receive_stream.cc
index 8d4dfd64874..9c3298c55d6 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream.cc
+++ b/chromium/third_party/webrtc/video/video_receive_stream.cc
@@ -13,8 +13,12 @@
#include <assert.h>
#include <stdlib.h>
+#include <string>
+
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
@@ -36,6 +40,7 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
: transport_adapter_(transport),
encoded_frame_proxy_(config.pre_decode_callback),
config_(config),
+ clock_(Clock::GetRealTimeClock()),
channel_(-1) {
video_engine_base_ = ViEBase::GetInterface(video_engine);
video_engine_base_->CreateReceiveChannel(channel_, base_channel);
@@ -57,10 +62,21 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
}
assert(config_.rtp.remote_ssrc != 0);
+ // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
assert(config_.rtp.local_ssrc != 0);
assert(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
rtp_rtcp_->SetLocalSSRC(channel_, config_.rtp.local_ssrc);
+ // TODO(pbos): Support multiple RTX, per video payload.
+ Config::Rtp::RtxMap::const_iterator it = config_.rtp.rtx.begin();
+ if (it != config_.rtp.rtx.end()) {
+ assert(it->second.ssrc != 0);
+ assert(it->second.payload_type != 0);
+
+ rtp_rtcp_->SetRemoteSSRCType(channel_, kViEStreamTypeRtx, it->second.ssrc);
+ rtp_rtcp_->SetRtxReceivePayloadType(channel_, it->second.payload_type);
+ }
+
rtp_rtcp_->SetRembStatus(channel_, false, config_.rtp.remb);
for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
@@ -84,6 +100,32 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
codec_ = ViECodec::GetInterface(video_engine);
+ if (config_.rtp.fec.ulpfec_payload_type != -1) {
+ // ULPFEC without RED doesn't make sense.
+ assert(config_.rtp.fec.red_payload_type != -1);
+ VideoCodec codec;
+ memset(&codec, 0, sizeof(codec));
+ codec.codecType = kVideoCodecULPFEC;
+ strcpy(codec.plName, "ulpfec");
+ codec.plType = config_.rtp.fec.ulpfec_payload_type;
+ if (codec_->SetReceiveCodec(channel_, codec) != 0) {
+ LOG(LS_ERROR) << "Could not set ULPFEC codec. This shouldn't happen.";
+ abort();
+ }
+ }
+ if (config_.rtp.fec.red_payload_type != -1) {
+ VideoCodec codec;
+ memset(&codec, 0, sizeof(codec));
+ codec.codecType = kVideoCodecRED;
+ strcpy(codec.plName, "red");
+ codec.plType = config_.rtp.fec.red_payload_type;
+ if (codec_->SetReceiveCodec(channel_, codec) != 0) {
+ LOG(LS_ERROR) << "Could not set RED codec. This shouldn't happen.";
+ abort();
+ }
+ }
+
+ assert(!config_.codecs.empty());
for (size_t i = 0; i < config_.codecs.size(); ++i) {
if (codec_->SetReceiveCodec(channel_, config_.codecs[i]) != 0) {
// TODO(pbos): Abort gracefully, this can be a runtime error.
@@ -92,16 +134,29 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
}
}
+ stats_proxy_.reset(new ReceiveStatisticsProxy(
+ config_.rtp.local_ssrc, clock_, rtp_rtcp_, codec_, channel_));
+
+ if (rtp_rtcp_->RegisterReceiveChannelRtcpStatisticsCallback(
+ channel_, stats_proxy_.get()) != 0)
+ abort();
+
+ if (rtp_rtcp_->RegisterReceiveChannelRtpStatisticsCallback(
+ channel_, stats_proxy_.get()) != 0)
+ abort();
+
+ if (codec_->RegisterDecoderObserver(channel_, *stats_proxy_) != 0)
+ abort();
+
external_codec_ = ViEExternalCodec::GetInterface(video_engine);
for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
- ExternalVideoDecoder* decoder = &config_.external_decoders[i];
+ const ExternalVideoDecoder& decoder = config_.external_decoders[i];
if (external_codec_->RegisterExternalReceiveCodec(
channel_,
- decoder->payload_type,
- decoder->decoder,
- decoder->renderer,
- decoder->expected_delay_ms) !=
- 0) {
+ decoder.payload_type,
+ decoder.decoder,
+ decoder.renderer,
+ decoder.expected_delay_ms) != 0) {
// TODO(pbos): Abort gracefully? Can this be a runtime error?
abort();
}
@@ -122,10 +177,11 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
image_process_->RegisterPreDecodeImageCallback(channel_,
&encoded_frame_proxy_);
}
- image_process_->RegisterPreRenderCallback(channel_,
- config_.pre_render_callback);
+ image_process_->RegisterPreRenderCallback(channel_, this);
- clock_ = Clock::GetRealTimeClock();
+ if (config.rtp.rtcp_xr.receiver_reference_time_report) {
+ rtp_rtcp_->SetRtcpXrRrtrStatus(channel_, true);
+ }
}
VideoReceiveStream::~VideoReceiveStream() {
@@ -145,24 +201,35 @@ VideoReceiveStream::~VideoReceiveStream() {
image_process_->Release();
video_engine_base_->Release();
external_codec_->Release();
+ codec_->DeregisterDecoderObserver(channel_);
+ rtp_rtcp_->DeregisterReceiveChannelRtpStatisticsCallback(channel_,
+ stats_proxy_.get());
+ rtp_rtcp_->DeregisterReceiveChannelRtcpStatisticsCallback(channel_,
+ stats_proxy_.get());
codec_->Release();
network_->Release();
render_->Release();
rtp_rtcp_->Release();
}
-void VideoReceiveStream::StartReceiving() {
+void VideoReceiveStream::Start() {
+ transport_adapter_.Enable();
if (render_->StartRender(channel_) != 0)
abort();
if (video_engine_base_->StartReceive(channel_) != 0)
abort();
}
-void VideoReceiveStream::StopReceiving() {
+void VideoReceiveStream::Stop() {
if (render_->StopRender(channel_) != 0)
abort();
if (video_engine_base_->StopReceive(channel_) != 0)
abort();
+ transport_adapter_.Disable();
+}
+
+VideoReceiveStream::Stats VideoReceiveStream::GetStats() const {
+ return stats_proxy_->GetStats();
}
void VideoReceiveStream::GetCurrentReceiveCodec(VideoCodec* receive_codec) {
@@ -176,17 +243,25 @@ bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
bool VideoReceiveStream::DeliverRtp(const uint8_t* packet, size_t length) {
return network_->ReceivedRTPPacket(
- channel_, packet, static_cast<int>(length),
- PacketTime()) == 0;
+ channel_, packet, static_cast<int>(length), PacketTime()) == 0;
+}
+
+void VideoReceiveStream::FrameCallback(I420VideoFrame* video_frame) {
+ stats_proxy_->OnDecodedFrame();
+
+ if (config_.pre_render_callback)
+ config_.pre_render_callback->FrameCallback(video_frame);
}
int32_t VideoReceiveStream::RenderFrame(const uint32_t stream_id,
I420VideoFrame& video_frame) {
- if (config_.renderer == NULL)
- return 0;
+ if (config_.renderer != NULL)
+ config_.renderer->RenderFrame(
+ video_frame,
+ video_frame.render_time_ms() - clock_->TimeInMilliseconds());
+
+ stats_proxy_->OnRenderedFrame();
- config_.renderer->RenderFrame(
- video_frame, video_frame.render_time_ms() - clock_->TimeInMilliseconds());
return 0;
}
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/video_receive_stream.h b/chromium/third_party/webrtc/video/video_receive_stream.h
index 65fd887d339..2a3c6dfed38 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream.h
+++ b/chromium/third_party/webrtc/video/video_receive_stream.h
@@ -16,7 +16,9 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
+#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video/transport_adapter.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_receive_stream.h"
@@ -36,7 +38,9 @@ class VoiceEngine;
namespace internal {
class VideoReceiveStream : public webrtc::VideoReceiveStream,
+ public I420FrameCallback,
public VideoRenderCallback {
+
public:
VideoReceiveStream(webrtc::VideoEngine* video_engine,
const VideoReceiveStream::Config& config,
@@ -45,11 +49,16 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
int base_channel);
virtual ~VideoReceiveStream();
- virtual void StartReceiving() OVERRIDE;
- virtual void StopReceiving() OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual void Stop() OVERRIDE;
+ virtual Stats GetStats() const OVERRIDE;
virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) OVERRIDE;
+ // Overrides I420FrameCallback.
+ virtual void FrameCallback(I420VideoFrame* video_frame) OVERRIDE;
+
+ // Overrides VideoRenderCallback.
virtual int32_t RenderFrame(const uint32_t stream_id,
I420VideoFrame& video_frame) OVERRIDE;
@@ -60,8 +69,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
private:
TransportAdapter transport_adapter_;
EncodedFrameCallbackAdapter encoded_frame_proxy_;
- VideoReceiveStream::Config config_;
- Clock* clock_;
+ const VideoReceiveStream::Config config_;
+ Clock* const clock_;
ViEBase* video_engine_base_;
ViECodec* codec_;
@@ -71,6 +80,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
ViERTP_RTCP* rtp_rtcp_;
ViEImageProcess* image_process_;
+ scoped_ptr<ReceiveStatisticsProxy> stats_proxy_;
+
int channel_;
};
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/video_send_stream.cc b/chromium/third_party/webrtc/video/video_send_stream.cc
index e18b3462336..f9bbd572375 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream.cc
@@ -10,10 +10,12 @@
#include "webrtc/video/video_send_stream.h"
+#include <sstream>
#include <string>
#include <vector>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
@@ -21,83 +23,119 @@
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
-namespace internal {
+std::string
+VideoSendStream::Config::EncoderSettings::ToString() const {
+ std::stringstream ss;
+ ss << "{payload_name: " << payload_name;
+ ss << ", payload_type: " << payload_type;
+ if (encoder != NULL)
+ ss << ", encoder: " << (encoder != NULL ? "(encoder)" : "NULL");
+ ss << '}';
+ return ss.str();
+}
-// Super simple and temporary overuse logic. This will move to the application
-// as soon as the new API allows changing send codec on the fly.
-class ResolutionAdaptor : public webrtc::CpuOveruseObserver {
- public:
- ResolutionAdaptor(ViECodec* codec, int channel, size_t width, size_t height)
- : codec_(codec),
- channel_(channel),
- max_width_(width),
- max_height_(height) {}
-
- virtual ~ResolutionAdaptor() {}
-
- virtual void OveruseDetected() OVERRIDE {
- VideoCodec codec;
- if (codec_->GetSendCodec(channel_, codec) != 0)
- return;
-
- if (codec.width / 2 < min_width || codec.height / 2 < min_height)
- return;
-
- codec.width /= 2;
- codec.height /= 2;
- codec_->SetSendCodec(channel_, codec);
+std::string VideoSendStream::Config::Rtp::Rtx::ToString()
+ const {
+ std::stringstream ss;
+ ss << "{ssrcs: {";
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ ss << ssrcs[i];
+ if (i != ssrcs.size() - 1)
+ ss << "}, {";
}
+ ss << '}';
- virtual void NormalUsage() OVERRIDE {
- VideoCodec codec;
- if (codec_->GetSendCodec(channel_, codec) != 0)
- return;
-
- if (codec.width * 2u > max_width_ || codec.height * 2u > max_height_)
- return;
+ ss << ", payload_type: " << payload_type;
+ ss << '}';
+ return ss.str();
+}
- codec.width *= 2;
- codec.height *= 2;
- codec_->SetSendCodec(channel_, codec);
+std::string VideoSendStream::Config::Rtp::ToString() const {
+ std::stringstream ss;
+ ss << "{ssrcs: {";
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ ss << ssrcs[i];
+ if (i != ssrcs.size() - 1)
+ ss << "}, {";
}
+ ss << '}';
- private:
- // Temporary and arbitrary chosen minimum resolution.
- static const size_t min_width = 160;
- static const size_t min_height = 120;
+ ss << ", max_packet_size: " << max_packet_size;
+ if (min_transmit_bitrate_bps != 0)
+ ss << ", min_transmit_bitrate_bps: " << min_transmit_bitrate_bps;
- ViECodec* codec_;
- const int channel_;
+ ss << ", extensions: {";
+ for (size_t i = 0; i < extensions.size(); ++i) {
+ ss << extensions[i].ToString();
+ if (i != extensions.size() - 1)
+ ss << "}, {";
+ }
+ ss << '}';
+
+ if (nack.rtp_history_ms != 0)
+ ss << ", nack.rtp_history_ms: " << nack.rtp_history_ms;
+ if (fec.ulpfec_payload_type != -1 || fec.red_payload_type != -1)
+ ss << ", fec: " << fec.ToString();
+ if (rtx.payload_type != 0 || !rtx.ssrcs.empty())
+ ss << ", rtx: " << rtx.ToString();
+ if (c_name != "")
+ ss << ", c_name: " << c_name;
+ ss << '}';
+ return ss.str();
+}
- const size_t max_width_;
- const size_t max_height_;
-};
+std::string VideoSendStream::Config::ToString() const {
+ std::stringstream ss;
+ ss << "{encoder_settings: " << encoder_settings.ToString();
+ ss << ", rtp: " << rtp.ToString();
+ if (pre_encode_callback != NULL)
+ ss << ", (pre_encode_callback)";
+ if (post_encode_callback != NULL)
+ ss << ", (post_encode_callback)";
+ if (local_renderer != NULL) {
+ ss << ", (local_renderer, render_delay_ms: " << render_delay_ms << ")";
+ }
+ if (target_delay_ms > 0)
+ ss << ", target_delay_ms: " << target_delay_ms;
+ if (suspend_below_min_bitrate)
+ ss << ", suspend_below_min_bitrate: on";
+ ss << '}';
+ return ss.str();
+}
+namespace internal {
VideoSendStream::VideoSendStream(newapi::Transport* transport,
- bool overuse_detection,
+ CpuOveruseObserver* overuse_observer,
webrtc::VideoEngine* video_engine,
const VideoSendStream::Config& config,
- int base_channel)
+ const std::vector<VideoStream> video_streams,
+ const void* encoder_settings,
+ int base_channel,
+ int start_bitrate_bps)
: transport_adapter_(transport),
encoded_frame_proxy_(config.post_encode_callback),
- codec_lock_(CriticalSectionWrapper::CreateCriticalSection()),
config_(config),
+ start_bitrate_bps_(start_bitrate_bps),
external_codec_(NULL),
- channel_(-1) {
+ channel_(-1),
+ stats_proxy_(new SendStatisticsProxy(config, this)) {
video_engine_base_ = ViEBase::GetInterface(video_engine);
video_engine_base_->CreateChannel(channel_, base_channel);
assert(channel_ != -1);
+ assert(start_bitrate_bps_ > 0);
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
assert(rtp_rtcp_ != NULL);
assert(config_.rtp.ssrcs.size() > 0);
- if (config_.suspend_below_min_bitrate)
- config_.pacing = true;
- rtp_rtcp_->SetTransmissionSmoothingStatus(channel_, config_.pacing);
+
+ assert(config_.rtp.min_transmit_bitrate_bps >= 0);
+ rtp_rtcp_->SetMinTransmitBitrate(channel_,
+ config_.rtp.min_transmit_bitrate_bps / 1000);
for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
const std::string& extension = config_.rtp.extensions[i].name;
@@ -154,26 +192,24 @@ VideoSendStream::VideoSendStream(newapi::Transport* transport,
network_->SetMTU(channel_,
static_cast<unsigned int>(config_.rtp.max_packet_size + 28));
- if (config.encoder) {
- external_codec_ = ViEExternalCodec::GetInterface(video_engine);
- if (external_codec_->RegisterExternalSendCodec(
- channel_, config.codec.plType, config.encoder,
- config.internal_source) != 0) {
- abort();
- }
+ assert(config.encoder_settings.encoder != NULL);
+ assert(config.encoder_settings.payload_type >= 0);
+ assert(config.encoder_settings.payload_type <= 127);
+ external_codec_ = ViEExternalCodec::GetInterface(video_engine);
+ if (external_codec_->RegisterExternalSendCodec(
+ channel_,
+ config.encoder_settings.payload_type,
+ config.encoder_settings.encoder,
+ false) != 0) {
+ abort();
}
codec_ = ViECodec::GetInterface(video_engine);
- if (!SetCodec(config_.codec))
+ if (!ReconfigureVideoEncoder(video_streams, encoder_settings))
abort();
- if (overuse_detection) {
- overuse_observer_.reset(
- new ResolutionAdaptor(codec_, channel_, config_.codec.width,
- config_.codec.height));
- video_engine_base_->RegisterCpuOveruseObserver(channel_,
- overuse_observer_.get());
- }
+ if (overuse_observer)
+ video_engine_base_->RegisterCpuOveruseObserver(channel_, overuse_observer);
image_process_ = ViEImageProcess::GetInterface(video_engine);
image_process_->RegisterPreEncodeCallback(channel_,
@@ -183,12 +219,31 @@ VideoSendStream::VideoSendStream(newapi::Transport* transport,
&encoded_frame_proxy_);
}
- if (config.suspend_below_min_bitrate) {
+ if (config_.suspend_below_min_bitrate)
codec_->SuspendBelowMinBitrate(channel_);
- }
+
+ rtp_rtcp_->RegisterSendChannelRtcpStatisticsCallback(channel_,
+ stats_proxy_.get());
+ rtp_rtcp_->RegisterSendChannelRtpStatisticsCallback(channel_,
+ stats_proxy_.get());
+ rtp_rtcp_->RegisterSendBitrateObserver(channel_, stats_proxy_.get());
+ rtp_rtcp_->RegisterSendFrameCountObserver(channel_, stats_proxy_.get());
+
+ codec_->RegisterEncoderObserver(channel_, *stats_proxy_);
+ capture_->RegisterObserver(capture_id_, *stats_proxy_);
}
VideoSendStream::~VideoSendStream() {
+ capture_->DeregisterObserver(capture_id_);
+ codec_->DeregisterEncoderObserver(channel_);
+
+ rtp_rtcp_->DeregisterSendFrameCountObserver(channel_, stats_proxy_.get());
+ rtp_rtcp_->DeregisterSendBitrateObserver(channel_, stats_proxy_.get());
+ rtp_rtcp_->DeregisterSendChannelRtpStatisticsCallback(channel_,
+ stats_proxy_.get());
+ rtp_rtcp_->DeregisterSendChannelRtcpStatisticsCallback(channel_,
+ stats_proxy_.get());
+
image_process_->DeRegisterPreEncodeCallback(channel_);
network_->DeregisterSendTransport(channel_);
@@ -196,10 +251,8 @@ VideoSendStream::~VideoSendStream() {
capture_->DisconnectCaptureDevice(channel_);
capture_->ReleaseCaptureDevice(capture_id_);
- if (external_codec_) {
- external_codec_->DeRegisterExternalSendCodec(channel_,
- config_.codec.plType);
- }
+ external_codec_->DeRegisterExternalSendCodec(
+ channel_, config_.encoder_settings.payload_type);
video_engine_base_->DeleteChannel(channel_);
@@ -213,47 +266,114 @@ VideoSendStream::~VideoSendStream() {
rtp_rtcp_->Release();
}
-void VideoSendStream::PutFrame(const I420VideoFrame& frame) {
- input_frame_.CopyFrame(frame);
- SwapFrame(&input_frame_);
-}
-
void VideoSendStream::SwapFrame(I420VideoFrame* frame) {
- // TODO(pbos): Warn if frame is "too far" into the future, or too old. This
- // would help detect if frame's being used without NTP.
- // TO REVIEWER: Is there any good check for this? Should it be
- // skipped?
- if (frame != &input_frame_)
- input_frame_.SwapFrame(frame);
-
// TODO(pbos): Local rendering should not be done on the capture thread.
if (config_.local_renderer != NULL)
- config_.local_renderer->RenderFrame(input_frame_, 0);
+ config_.local_renderer->RenderFrame(*frame, 0);
- external_capture_->SwapFrame(&input_frame_);
+ external_capture_->SwapFrame(frame);
}
VideoSendStreamInput* VideoSendStream::Input() { return this; }
-void VideoSendStream::StartSending() {
- if (video_engine_base_->StartSend(channel_) != 0)
- abort();
- if (video_engine_base_->StartReceive(channel_) != 0)
- abort();
+void VideoSendStream::Start() {
+ transport_adapter_.Enable();
+ video_engine_base_->StartSend(channel_);
+ video_engine_base_->StartReceive(channel_);
}
-void VideoSendStream::StopSending() {
- if (video_engine_base_->StopSend(channel_) != 0)
- abort();
- if (video_engine_base_->StopReceive(channel_) != 0)
- abort();
+void VideoSendStream::Stop() {
+ video_engine_base_->StopSend(channel_);
+ video_engine_base_->StopReceive(channel_);
+ transport_adapter_.Disable();
}
-bool VideoSendStream::SetCodec(const VideoCodec& codec) {
- assert(config_.rtp.ssrcs.size() >= codec.numberOfSimulcastStreams);
+bool VideoSendStream::ReconfigureVideoEncoder(
+ const std::vector<VideoStream>& streams,
+ const void* encoder_settings) {
+ assert(!streams.empty());
+ assert(config_.rtp.ssrcs.size() >= streams.size());
+ // TODO(pbos): Wire encoder_settings.
+ assert(encoder_settings == NULL);
+
+ VideoCodec video_codec;
+ memset(&video_codec, 0, sizeof(video_codec));
+ video_codec.codecType =
+ (config_.encoder_settings.payload_name == "VP8" ? kVideoCodecVP8
+ : kVideoCodecGeneric);
+
+ if (video_codec.codecType == kVideoCodecVP8) {
+ video_codec.codecSpecific.VP8.resilience = kResilientStream;
+ video_codec.codecSpecific.VP8.numberOfTemporalLayers = 1;
+ video_codec.codecSpecific.VP8.denoisingOn = true;
+ video_codec.codecSpecific.VP8.errorConcealmentOn = false;
+ video_codec.codecSpecific.VP8.automaticResizeOn = false;
+ video_codec.codecSpecific.VP8.frameDroppingOn = true;
+ video_codec.codecSpecific.VP8.keyFrameInterval = 3000;
+ }
- CriticalSectionScoped crit(codec_lock_.get());
- if (codec_->SetSendCodec(channel_, codec) != 0)
+ strncpy(video_codec.plName,
+ config_.encoder_settings.payload_name.c_str(),
+ kPayloadNameSize - 1);
+ video_codec.plName[kPayloadNameSize - 1] = '\0';
+ video_codec.plType = config_.encoder_settings.payload_type;
+ video_codec.numberOfSimulcastStreams =
+ static_cast<unsigned char>(streams.size());
+ video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
+ assert(streams.size() <= kMaxSimulcastStreams);
+ for (size_t i = 0; i < streams.size(); ++i) {
+ SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
+ assert(streams[i].width > 0);
+ assert(streams[i].height > 0);
+ assert(streams[i].max_framerate > 0);
+ // Different framerates not supported per stream at the moment.
+ assert(streams[i].max_framerate == streams[0].max_framerate);
+ assert(streams[i].min_bitrate_bps >= 0);
+ assert(streams[i].target_bitrate_bps >= streams[i].min_bitrate_bps);
+ assert(streams[i].max_bitrate_bps >= streams[i].target_bitrate_bps);
+ assert(streams[i].max_qp >= 0);
+
+ sim_stream->width = static_cast<unsigned short>(streams[i].width);
+ sim_stream->height = static_cast<unsigned short>(streams[i].height);
+ sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
+ sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
+ sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
+ sim_stream->qpMax = streams[i].max_qp;
+ // TODO(pbos): Implement mapping for temporal layers.
+ assert(streams[i].temporal_layers.empty());
+
+ video_codec.width = std::max(video_codec.width,
+ static_cast<unsigned short>(streams[i].width));
+ video_codec.height = std::max(
+ video_codec.height, static_cast<unsigned short>(streams[i].height));
+ video_codec.minBitrate =
+ std::min(video_codec.minBitrate,
+ static_cast<unsigned int>(streams[i].min_bitrate_bps / 1000));
+ video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
+ video_codec.qpMax = std::max(video_codec.qpMax,
+ static_cast<unsigned int>(streams[i].max_qp));
+ }
+ video_codec.startBitrate =
+ static_cast<unsigned int>(start_bitrate_bps_) / 1000;
+
+ if (video_codec.minBitrate < kViEMinCodecBitrate)
+ video_codec.minBitrate = kViEMinCodecBitrate;
+ if (video_codec.maxBitrate < kViEMinCodecBitrate)
+ video_codec.maxBitrate = kViEMinCodecBitrate;
+ if (video_codec.startBitrate < video_codec.minBitrate)
+ video_codec.startBitrate = video_codec.minBitrate;
+ if (video_codec.startBitrate > video_codec.maxBitrate)
+ video_codec.startBitrate = video_codec.maxBitrate;
+
+ if (video_codec.startBitrate < video_codec.minBitrate)
+ video_codec.startBitrate = video_codec.minBitrate;
+ if (video_codec.startBitrate > video_codec.maxBitrate)
+ video_codec.startBitrate = video_codec.maxBitrate;
+
+ assert(streams[0].max_framerate > 0);
+ video_codec.maxFramerate = streams[0].max_framerate;
+
+ if (codec_->SetSendCodec(channel_, video_codec) != 0)
return false;
for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
@@ -263,9 +383,10 @@ bool VideoSendStream::SetCodec(const VideoCodec& codec) {
static_cast<unsigned char>(i));
}
- config_.codec = codec;
- if (config_.rtp.rtx.ssrcs.empty())
+ if (config_.rtp.rtx.ssrcs.empty()) {
+ assert(!config_.rtp.rtx.pad_with_redundant_payloads);
return true;
+ }
// Set up RTX.
assert(config_.rtp.rtx.ssrcs.size() == config_.rtp.ssrcs.size());
@@ -276,22 +397,35 @@ bool VideoSendStream::SetCodec(const VideoCodec& codec) {
static_cast<unsigned char>(i));
}
- if (config_.rtp.rtx.rtx_payload_type != 0) {
- rtp_rtcp_->SetRtxSendPayloadType(channel_,
- config_.rtp.rtx.rtx_payload_type);
+ if (config_.rtp.rtx.pad_with_redundant_payloads) {
+ rtp_rtcp_->SetPadWithRedundantPayloads(channel_, true);
}
- return true;
-}
+ assert(config_.rtp.rtx.payload_type >= 0);
+ rtp_rtcp_->SetRtxSendPayloadType(channel_, config_.rtp.rtx.payload_type);
-VideoCodec VideoSendStream::GetCodec() {
- CriticalSectionScoped crit(codec_lock_.get());
- return config_.codec;
+ return true;
}
bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
return network_->ReceivedRTCPPacket(
channel_, packet, static_cast<int>(length)) == 0;
}
+
+VideoSendStream::Stats VideoSendStream::GetStats() const {
+ return stats_proxy_->GetStats();
+}
+
+bool VideoSendStream::GetSendSideDelay(VideoSendStream::Stats* stats) {
+ return codec_->GetSendSideDelay(
+ channel_, &stats->avg_delay_ms, &stats->max_delay_ms);
+}
+
+std::string VideoSendStream::GetCName() {
+ char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength];
+ rtp_rtcp_->GetRTCPCName(channel_, rtcp_cname);
+ return rtcp_cname;
+}
+
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_send_stream.h b/chromium/third_party/webrtc/video/video_send_stream.h
index 7433e8a3b97..ed77665c1a9 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.h
+++ b/chromium/third_party/webrtc/video/video_send_stream.h
@@ -11,10 +11,9 @@
#ifndef WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_
#define WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_
-#include <vector>
-
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
+#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/transport_adapter.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
@@ -22,6 +21,7 @@
namespace webrtc {
+class CpuOveruseObserver;
class VideoEngine;
class ViEBase;
class ViECapture;
@@ -34,41 +34,47 @@ class ViERTP_RTCP;
namespace internal {
-class ResolutionAdaptor;
-
class VideoSendStream : public webrtc::VideoSendStream,
- public VideoSendStreamInput {
+ public VideoSendStreamInput,
+ public SendStatisticsProxy::StatsProvider {
public:
VideoSendStream(newapi::Transport* transport,
- bool overuse_detection,
+ CpuOveruseObserver* overuse_observer,
webrtc::VideoEngine* video_engine,
const VideoSendStream::Config& config,
- int base_channel);
+ const std::vector<VideoStream> video_streams,
+ const void* encoder_settings,
+ int base_channel,
+ int start_bitrate);
virtual ~VideoSendStream();
- virtual void PutFrame(const I420VideoFrame& frame) OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual void Stop() OVERRIDE;
- virtual void SwapFrame(I420VideoFrame* frame) OVERRIDE;
+ virtual bool ReconfigureVideoEncoder(const std::vector<VideoStream>& streams,
+ const void* encoder_settings) OVERRIDE;
- virtual VideoSendStreamInput* Input() OVERRIDE;
+ virtual Stats GetStats() const OVERRIDE;
- virtual void StartSending() OVERRIDE;
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
- virtual void StopSending() OVERRIDE;
+ // From VideoSendStreamInput.
+ virtual void SwapFrame(I420VideoFrame* frame) OVERRIDE;
- virtual bool SetCodec(const VideoCodec& codec) OVERRIDE;
- virtual VideoCodec GetCodec() OVERRIDE;
+ // From webrtc::VideoSendStream.
+ virtual VideoSendStreamInput* Input() OVERRIDE;
- public:
- bool DeliverRtcp(const uint8_t* packet, size_t length);
+ protected:
+ // From SendStatisticsProxy::StreamStatsProvider.
+ virtual bool GetSendSideDelay(VideoSendStream::Stats* stats) OVERRIDE;
+ virtual std::string GetCName() OVERRIDE;
private:
- I420VideoFrame input_frame_;
TransportAdapter transport_adapter_;
EncodedFrameCallbackAdapter encoded_frame_proxy_;
- scoped_ptr<CriticalSectionWrapper> codec_lock_;
- VideoSendStream::Config config_;
+ const VideoSendStream::Config config_;
+ const int start_bitrate_bps_;
ViEBase* video_engine_base_;
ViECapture* capture_;
@@ -81,7 +87,8 @@ class VideoSendStream : public webrtc::VideoSendStream,
int channel_;
int capture_id_;
- scoped_ptr<ResolutionAdaptor> overuse_observer_;
+
+ const scoped_ptr<SendStatisticsProxy> stats_proxy_;
};
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_send_stream_tests.cc b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
index 25f334f900b..dcbcfe1bb87 100644
--- a/chromium/third_party/webrtc/video/video_send_stream_tests.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
@@ -15,6 +15,7 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/frame_callback.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -23,56 +24,60 @@
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/test/direct_transport.h"
-#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
+#include "webrtc/test/encoder_settings.h"
+#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/rtp_rtcp_observer.h"
+#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video/transport_adapter.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
+enum VideoFormat { kGeneric, kVP8, };
+
class VideoSendStreamTest : public ::testing::Test {
public:
- VideoSendStreamTest() : fake_encoder_(Clock::GetRealTimeClock()) {}
+ VideoSendStreamTest()
+ : send_stream_(NULL), fake_encoder_(Clock::GetRealTimeClock()) {}
protected:
void RunSendTest(Call* call,
- const VideoSendStream::Config& config,
test::RtpRtcpObserver* observer) {
- send_stream_ = call->CreateVideoSendStream(config);
+ send_stream_ =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
test::FrameGeneratorCapturer::Create(
send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
- send_stream_->StartSending();
+ send_stream_->Start();
frame_generator_capturer->Start();
EXPECT_EQ(kEventSignaled, observer->Wait());
observer->StopSending();
frame_generator_capturer->Stop();
- send_stream_->StopSending();
+ send_stream_->Stop();
call->DestroyVideoSendStream(send_stream_);
}
- VideoSendStream::Config GetSendTestConfig(Call* call,
- size_t number_of_streams) {
- assert(number_of_streams <= kNumSendSsrcs);
- VideoSendStream::Config config = call->GetDefaultSendConfig();
- config.encoder = &fake_encoder_;
- config.internal_source = false;
- for (size_t i = 0; i < number_of_streams; ++i)
- config.rtp.ssrcs.push_back(kSendSsrcs[i]);
- config.pacing = true;
- test::FakeEncoder::SetCodecSettings(&config.codec, number_of_streams);
- config.codec.plType = kFakeSendPayloadType;
- return config;
+ void CreateTestConfig(Call* call, size_t num_streams) {
+ assert(num_streams <= kNumSendSsrcs);
+ send_config_ = call->GetDefaultSendConfig();
+ send_config_.encoder_settings.encoder = &fake_encoder_;
+ send_config_.encoder_settings.payload_name = "FAKE";
+ send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
+ video_streams_ = test::CreateVideoStreams(num_streams);
+ send_config_.encoder_settings.payload_type = kFakeSendPayloadType;
+ for (size_t i = 0; i < num_streams; ++i)
+ send_config_.rtp.ssrcs.push_back(kSendSsrcs[i]);
}
void TestNackRetransmission(uint32_t retransmit_ssrc,
- uint8_t retransmit_payload_type,
- bool enable_pacing);
+ uint8_t retransmit_payload_type);
+
+ void TestPacketFragmentationSize(VideoFormat format, bool with_fec);
void SendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first);
@@ -84,6 +89,8 @@ class VideoSendStreamTest : public ::testing::Test {
static const uint32_t kSendRtxSsrc;
static const uint32_t kSendSsrcs[kNumSendSsrcs];
+ VideoSendStream::Config send_config_;
+ std::vector<VideoStream> video_streams_;
VideoSendStream* send_stream_;
test::FakeEncoder fake_encoder_;
};
@@ -92,8 +99,8 @@ const uint8_t VideoSendStreamTest::kSendPayloadType = 100;
const uint8_t VideoSendStreamTest::kFakeSendPayloadType = 125;
const uint8_t VideoSendStreamTest::kSendRtxPayloadType = 98;
const uint32_t VideoSendStreamTest::kSendRtxSsrc = 0xBADCAFE;
-const uint32_t VideoSendStreamTest::kSendSsrcs[kNumSendSsrcs] = { 0xC0FFED,
- 0xC0FFEE, 0xC0FFEF };
+const uint32_t VideoSendStreamTest::kSendSsrcs[kNumSendSsrcs] = {
+ 0xC0FFED, 0xC0FFEE, 0xC0FFEF};
const uint32_t VideoSendStreamTest::kSendSsrc =
VideoSendStreamTest::kSendSsrcs[0];
@@ -120,10 +127,10 @@ void VideoSendStreamTest::SendsSetSsrcs(size_t num_ssrcs,
// to fail on TSan as the codec gets set before the SSRCs are
// set up and some frames are sent on a random-generated SSRC
// before the correct SSRC gets set.
- //EXPECT_TRUE(valid_ssrcs_[header.ssrc])
+ // EXPECT_TRUE(valid_ssrcs_[header.ssrc])
// << "Received unknown SSRC: " << header.ssrc;
//
- //if (!valid_ssrcs_[header.ssrc])
+ // if (!valid_ssrcs_[header.ssrc])
// observation_complete_->Set();
if (!is_observed_[header.ssrc]) {
@@ -151,26 +158,27 @@ void VideoSendStreamTest::SendsSetSsrcs(size_t num_ssrcs,
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config =
- GetSendTestConfig(call.get(), num_ssrcs);
+ CreateTestConfig(call.get(), num_ssrcs);
if (num_ssrcs > 1) {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
- for (size_t i = 0; i < num_ssrcs; ++i) {
- send_config.codec.simulcastStream[i].minBitrate = 10;
- send_config.codec.simulcastStream[i].targetBitrate = 10;
- send_config.codec.simulcastStream[i].maxBitrate = 10;
+ for (size_t i = 0; i < video_streams_.size(); ++i) {
+ video_streams_[i].min_bitrate_bps = 10000;
+ video_streams_[i].target_bitrate_bps = 10000;
+ video_streams_[i].max_bitrate_bps = 10000;
}
}
+ std::vector<VideoStream> all_streams = video_streams_;
if (send_single_ssrc_first)
- send_config.codec.numberOfSimulcastStreams = 1;
+ video_streams_.resize(1);
- send_stream_ = call->CreateVideoSendStream(send_config);
+ send_stream_ =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
test::FrameGeneratorCapturer::Create(
send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
- send_stream_->StartSending();
+ send_stream_->Start();
frame_generator_capturer->Start();
EXPECT_EQ(kEventSignaled, observer.Wait())
@@ -179,26 +187,50 @@ void VideoSendStreamTest::SendsSetSsrcs(size_t num_ssrcs,
if (send_single_ssrc_first) {
// Set full simulcast and continue with the rest of the SSRCs.
- send_config.codec.numberOfSimulcastStreams =
- static_cast<unsigned char>(num_ssrcs);
- send_stream_->SetCodec(send_config.codec);
+ send_stream_->ReconfigureVideoEncoder(all_streams, NULL);
EXPECT_EQ(kEventSignaled, observer.Wait())
<< "Timed out while waiting on additional SSRCs.";
}
observer.StopSending();
frame_generator_capturer->Stop();
- send_stream_->StopSending();
+ send_stream_->Stop();
call->DestroyVideoSendStream(send_stream_);
-};
+}
+
+TEST_F(VideoSendStreamTest, CanStartStartedStream) {
+ test::NullTransport transport;
+ Call::Config call_config(&transport);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ CreateTestConfig(call.get(), 1);
+ VideoSendStream* stream =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ stream->Start();
+ stream->Start();
+ call->DestroyVideoSendStream(stream);
+}
+
+TEST_F(VideoSendStreamTest, CanStopStoppedStream) {
+ test::NullTransport transport;
+ Call::Config call_config(&transport);
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ CreateTestConfig(call.get(), 1);
+ VideoSendStream* stream =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ stream->Stop();
+ stream->Stop();
+ call->DestroyVideoSendStream(stream);
+}
TEST_F(VideoSendStreamTest, SendsSetSsrc) { SendsSetSsrcs(1, false); }
-TEST_F(VideoSendStreamTest, SendsSetSimulcastSsrcs) {
+TEST_F(VideoSendStreamTest, DISABLED_SendsSetSimulcastSsrcs) {
SendsSetSsrcs(kNumSendSsrcs, false);
}
-TEST_F(VideoSendStreamTest, CanSwitchToUseAllSsrcs) {
+TEST_F(VideoSendStreamTest, DISABLED_CanSwitchToUseAllSsrcs) {
SendsSetSsrcs(kNumSendSsrcs, true);
}
@@ -229,10 +261,10 @@ TEST_F(VideoSendStreamTest, SupportsCName) {
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.rtp.c_name = kCName;
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.c_name = kCName;
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
}
TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
@@ -246,8 +278,7 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
EXPECT_FALSE(header.extension.hasTransmissionTimeOffset);
EXPECT_TRUE(header.extension.hasAbsoluteSendTime);
@@ -262,11 +293,11 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) {
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.rtp.extensions.push_back(
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
}
TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
@@ -274,10 +305,10 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
class DelayedEncoder : public test::FakeEncoder {
public:
explicit DelayedEncoder(Clock* clock) : test::FakeEncoder(clock) {}
- virtual int32_t Encode(
- const I420VideoFrame& input_image,
- const CodecSpecificInfo* codec_specific_info,
- const std::vector<VideoFrameType>* frame_types) OVERRIDE {
+ virtual int32_t Encode(const I420VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<VideoFrameType>* frame_types)
+ OVERRIDE {
// A delay needs to be introduced to assure that we get a timestamp
// offset.
SleepMs(5);
@@ -294,8 +325,7 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
EXPECT_TRUE(header.extension.hasTransmissionTimeOffset);
EXPECT_FALSE(header.extension.hasAbsoluteSendTime);
@@ -310,12 +340,12 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) {
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.encoder = &encoder;
- send_config.rtp.extensions.push_back(
+ CreateTestConfig(call.get(), 1);
+ send_config_.encoder_settings.encoder = &encoder;
+ send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOffsetExtensionId));
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
}
class FakeReceiveStatistics : public NullReceiveStatistics {
@@ -348,7 +378,8 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
stats_.cumulative_lost = cumulative_lost;
stats_.extended_max_sequence_number = extended_max_sequence_number;
}
- virtual bool GetStatistics(Statistics* statistics, bool reset) OVERRIDE {
+ virtual bool GetStatistics(RtcpStatistics* statistics,
+ bool reset) OVERRIDE {
*statistics = stats_;
return true;
}
@@ -367,7 +398,8 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE {
return true;
}
- Statistics stats_;
+
+ RtcpStatistics stats_;
};
scoped_ptr<LossyStatistician> lossy_stats_;
@@ -382,9 +414,10 @@ TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) {
Call::Config call_config(&transport);
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- VideoSendStream* video_send_stream = call->CreateVideoSendStream(send_config);
- video_send_stream->StartSending();
+ CreateTestConfig(call.get(), 1);
+ VideoSendStream* video_send_stream =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ video_send_stream->Start();
I420VideoFrame frame;
frame.CreateEmptyFrame(
@@ -408,12 +441,13 @@ TEST_F(VideoSendStreamTest, SupportsFec) {
transport_adapter_(SendTransport()),
send_count_(0),
received_media_(false),
- received_fec_(false) {}
+ received_fec_(false) {
+ transport_adapter_.Enable();
+ }
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
// Send lossy receive reports to trigger FEC enabling.
if (send_count_++ % 2 != 0) {
@@ -460,17 +494,16 @@ TEST_F(VideoSendStreamTest, SupportsFec) {
observer.SetReceivers(call->Receiver(), NULL);
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.rtp.fec.red_payload_type = kRedPayloadType;
- send_config.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
}
void VideoSendStreamTest::TestNackRetransmission(
uint32_t retransmit_ssrc,
- uint8_t retransmit_payload_type,
- bool enable_pacing) {
+ uint8_t retransmit_payload_type) {
class NackObserver : public test::RtpRtcpObserver {
public:
explicit NackObserver(uint32_t retransmit_ssrc,
@@ -480,16 +513,18 @@ void VideoSendStreamTest::TestNackRetransmission(
send_count_(0),
retransmit_ssrc_(retransmit_ssrc),
retransmit_payload_type_(retransmit_payload_type),
- nacked_sequence_number_(0) {}
+ nacked_sequence_number_(-1) {
+ transport_adapter_.Enable();
+ }
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
RTPHeader header;
- EXPECT_TRUE(
- parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
// Nack second packet after receiving the third one.
if (++send_count_ == 3) {
- nacked_sequence_number_ = header.sequenceNumber - 1;
+ uint16_t nack_sequence_number = header.sequenceNumber - 1;
+ nacked_sequence_number_ = nack_sequence_number;
NullReceiveStatistics null_stats;
RTCPSender rtcp_sender(
0, false, Clock::GetRealTimeClock(), &null_stats);
@@ -502,7 +537,7 @@ void VideoSendStreamTest::TestNackRetransmission(
EXPECT_EQ(0,
rtcp_sender.SendRTCP(
- feedback_state, kRtcpNack, 1, &nacked_sequence_number_));
+ feedback_state, kRtcpNack, 1, &nack_sequence_number));
}
uint16_t sequence_number = header.sequenceNumber;
@@ -527,77 +562,110 @@ void VideoSendStreamTest::TestNackRetransmission(
int send_count_;
uint32_t retransmit_ssrc_;
uint8_t retransmit_payload_type_;
- uint16_t nacked_sequence_number_;
+ int nacked_sequence_number_;
} observer(retransmit_ssrc, retransmit_payload_type);
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
observer.SetReceivers(call->Receiver(), NULL);
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.rtp.nack.rtp_history_ms = 1000;
- send_config.rtp.rtx.rtx_payload_type = retransmit_payload_type;
- send_config.pacing = enable_pacing;
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.nack.rtp_history_ms = 1000;
+ send_config_.rtp.rtx.payload_type = retransmit_payload_type;
if (retransmit_ssrc != kSendSsrc)
- send_config.rtp.rtx.ssrcs.push_back(retransmit_ssrc);
+ send_config_.rtp.rtx.ssrcs.push_back(retransmit_ssrc);
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
}
TEST_F(VideoSendStreamTest, RetransmitsNack) {
// Normal NACKs should use the send SSRC.
- TestNackRetransmission(kSendSsrc, kFakeSendPayloadType, false);
+ TestNackRetransmission(kSendSsrc, kFakeSendPayloadType);
}
TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) {
// NACKs over RTX should use a separate SSRC.
- TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType, false);
-}
-
-TEST_F(VideoSendStreamTest, RetransmitsNackOverRtxWithPacing) {
- // NACKs over RTX should use a separate SSRC.
- TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType, true);
+ TestNackRetransmission(kSendRtxSsrc, kSendRtxPayloadType);
}
-TEST_F(VideoSendStreamTest, FragmentsAccordingToMaxPacketSize) {
+void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
+ bool with_fec) {
+ static const int kRedPayloadType = 118;
+ static const int kUlpfecPayloadType = 119;
// Observer that verifies that the expected number of packets and bytes
// arrive for each frame size, from start_size to stop_size.
class FrameFragmentationObserver : public test::RtpRtcpObserver,
public EncodedFrameObserver {
public:
- FrameFragmentationObserver(size_t max_packet_size,
+ FrameFragmentationObserver(uint32_t max_packet_size,
uint32_t start_size,
uint32_t stop_size,
- test::ConfigurableFrameSizeEncoder* encoder)
- : RtpRtcpObserver(30 * 1000),
+ test::ConfigurableFrameSizeEncoder* encoder,
+ bool test_generic_packetization,
+ bool use_fec)
+ : RtpRtcpObserver(120 * 1000), // Timeout after two minutes.
+ transport_adapter_(SendTransport()),
+ encoder_(encoder),
max_packet_size_(max_packet_size),
+ stop_size_(stop_size),
+ test_generic_packetization_(test_generic_packetization),
+ use_fec_(use_fec),
+ packet_count_(0),
accumulated_size_(0),
accumulated_payload_(0),
- stop_size_(stop_size),
+ fec_packet_received_(false),
current_size_rtp_(start_size),
- current_size_frame_(start_size),
- encoder_(encoder) {
+ current_size_frame_(start_size) {
// Fragmentation required, this test doesn't make sense without it.
assert(stop_size > max_packet_size);
+ transport_adapter_.Enable();
}
- virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
+ virtual Action OnSendRtp(const uint8_t* packet, size_t size) OVERRIDE {
+ uint32_t length = static_cast<int>(size);
RTPHeader header;
- EXPECT_TRUE(
- parser_->Parse(packet, static_cast<int>(length), &header));
+ EXPECT_TRUE(parser_->Parse(packet, length, &header));
EXPECT_LE(length, max_packet_size_);
+ if (use_fec_) {
+ uint8_t payload_type = packet[header.headerLength];
+ bool is_fec = header.payloadType == kRedPayloadType &&
+ payload_type == kUlpfecPayloadType;
+ if (is_fec) {
+ fec_packet_received_ = true;
+ return SEND_PACKET;
+ }
+ }
+
accumulated_size_ += length;
- // Payload size = packet size - minus RTP header, padding and one byte
- // generic header.
- accumulated_payload_ +=
- length - (header.headerLength + header.paddingLength + 1);
+
+ if (use_fec_)
+ TriggerLossReport(header);
+
+ if (test_generic_packetization_) {
+ uint32_t overhead = header.headerLength + header.paddingLength +
+ (1 /* Generic header */);
+ if (use_fec_)
+ overhead += 1; // RED for FEC header.
+ accumulated_payload_ += length - overhead;
+ }
// Marker bit set indicates last packet of a frame.
if (header.markerBit) {
+ if (use_fec_ && accumulated_payload_ == current_size_rtp_ - 1) {
+ // With FEC enabled, frame size is incremented asynchronously, so
+ // "old" frames one byte too small may arrive. Accept, but don't
+ // increase expected frame size.
+ accumulated_size_ = 0;
+ accumulated_payload_ = 0;
+ return SEND_PACKET;
+ }
+
EXPECT_GE(accumulated_size_, current_size_rtp_);
- EXPECT_EQ(accumulated_payload_, current_size_rtp_);
+ if (test_generic_packetization_) {
+ EXPECT_EQ(current_size_rtp_, accumulated_payload_);
+ }
// Last packet of frame; reset counters.
accumulated_size_ = 0;
@@ -606,32 +674,68 @@ TEST_F(VideoSendStreamTest, FragmentsAccordingToMaxPacketSize) {
// Done! (Don't increase size again, might arrive more @ stop_size).
observation_complete_->Set();
} else {
- // Increase next expected frame size.
- ++current_size_rtp_;
+ // Increase next expected frame size. If testing with FEC, make sure
+ // a FEC packet has been received for this frame size before
+ // proceeding, to make sure that redundancy packets don't exceed
+ // size limit.
+ if (!use_fec_) {
+ ++current_size_rtp_;
+ } else if (fec_packet_received_) {
+ fec_packet_received_ = false;
+ ++current_size_rtp_;
+ ++current_size_frame_;
+ }
}
}
return SEND_PACKET;
}
+ void TriggerLossReport(const RTPHeader& header) {
+ // Send lossy receive reports to trigger FEC enabling.
+ if (packet_count_++ % 2 != 0) {
+ // Receive statistics reporting having lost 50% of the packets.
+ FakeReceiveStatistics lossy_receive_stats(
+ kSendSsrc, header.sequenceNumber, packet_count_ / 2, 127);
+ RTCPSender rtcp_sender(
+ 0, false, Clock::GetRealTimeClock(), &lossy_receive_stats);
+ EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_));
+
+ rtcp_sender.SetRTCPStatus(kRtcpNonCompound);
+ rtcp_sender.SetRemoteSSRC(kSendSsrc);
+
+ RTCPSender::FeedbackState feedback_state;
+
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
+ }
+
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) {
// Increase frame size for next encoded frame, in the context of the
// encoder thread.
- if (current_size_frame_ < stop_size_) {
+ if (!use_fec_ &&
+ current_size_frame_.Value() < static_cast<int32_t>(stop_size_)) {
++current_size_frame_;
}
- encoder_->SetFrameSize(current_size_frame_);
+ encoder_->SetFrameSize(current_size_frame_.Value());
}
private:
- size_t max_packet_size_;
- size_t accumulated_size_;
- size_t accumulated_payload_;
+ internal::TransportAdapter transport_adapter_;
+ test::ConfigurableFrameSizeEncoder* const encoder_;
+
+ const uint32_t max_packet_size_;
+ const uint32_t stop_size_;
+ const bool test_generic_packetization_;
+ const bool use_fec_;
+
+ uint32_t packet_count_;
+ uint32_t accumulated_size_;
+ uint32_t accumulated_payload_;
+ bool fec_packet_received_;
- uint32_t stop_size_;
uint32_t current_size_rtp_;
- uint32_t current_size_frame_;
- test::ConfigurableFrameSizeEncoder* encoder_;
+ Atomic32 current_size_frame_;
};
// Use a fake encoder to output a frame of every size in the range [90, 290],
@@ -641,83 +745,55 @@ TEST_F(VideoSendStreamTest, FragmentsAccordingToMaxPacketSize) {
static const uint32_t start = 90;
static const uint32_t stop = 290;
+ // Don't auto increment if FEC is used; continue sending frame size until
+ // a FEC packet has been received.
test::ConfigurableFrameSizeEncoder encoder(stop);
encoder.SetFrameSize(start);
- FrameFragmentationObserver observer(kMaxPacketSize, start, stop, &encoder);
+ FrameFragmentationObserver observer(
+ kMaxPacketSize, start, stop, &encoder, format == kGeneric, with_fec);
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.encoder = &encoder;
- send_config.rtp.max_packet_size = kMaxPacketSize;
- send_config.post_encode_callback = &observer;
-
- RunSendTest(call.get(), send_config, &observer);
-}
-
-TEST_F(VideoSendStreamTest, CanChangeSendCodec) {
- static const uint8_t kFirstPayloadType = 121;
- static const uint8_t kSecondPayloadType = 122;
-
- class CodecChangeObserver : public test::RtpRtcpObserver {
- public:
- CodecChangeObserver(VideoSendStream** send_stream_ptr)
- : RtpRtcpObserver(30 * 1000),
- received_first_payload_(EventWrapper::Create()),
- send_stream_ptr_(send_stream_ptr) {}
-
- virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- RTPHeader header;
- EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
-
- if (header.payloadType == kFirstPayloadType) {
- received_first_payload_->Set();
- } else if (header.payloadType == kSecondPayloadType) {
- observation_complete_->Set();
- }
-
- return SEND_PACKET;
- }
-
- virtual EventTypeWrapper Wait() OVERRIDE {
- EXPECT_EQ(kEventSignaled, received_first_payload_->Wait(30 * 1000))
- << "Timed out while waiting for first payload.";
+ observer.SetReceivers(call->Receiver(), NULL);
- EXPECT_TRUE((*send_stream_ptr_)->SetCodec(second_codec_));
+ CreateTestConfig(call.get(), 1);
+ if (with_fec) {
+ send_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ }
- EXPECT_EQ(kEventSignaled, RtpRtcpObserver::Wait())
- << "Timed out while waiting for second payload type.";
+ if (format == kVP8)
+ send_config_.encoder_settings.payload_name = "VP8";
- // Return OK regardless, prevents double error reporting.
- return kEventSignaled;
- }
+ send_config_.encoder_settings.encoder = &encoder;
+ send_config_.rtp.max_packet_size = kMaxPacketSize;
+ send_config_.post_encode_callback = &observer;
- void SetSecondCodec(const VideoCodec& codec) {
- second_codec_ = codec;
- }
+ // Add an extension header, to make the RTP header larger than the base
+ // length of 12 bytes.
+ static const uint8_t kAbsSendTimeExtensionId = 13;
+ send_config_.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTime, kAbsSendTimeExtensionId));
- private:
- scoped_ptr<EventWrapper> received_first_payload_;
- VideoSendStream** send_stream_ptr_;
- VideoCodec second_codec_;
- } observer(&send_stream_);
+ RunSendTest(call.get(), &observer);
+}
- Call::Config call_config(observer.SendTransport());
- scoped_ptr<Call> call(Call::Create(call_config));
+// TODO(sprang): Is there any way of speeding up these tests?
+TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSize) {
+ TestPacketFragmentationSize(kGeneric, false);
+}
- std::vector<VideoCodec> codecs = call->GetVideoCodecs();
- ASSERT_GE(codecs.size(), 2u)
- << "Test needs at least 2 separate codecs to work.";
- codecs[0].plType = kFirstPayloadType;
- codecs[1].plType = kSecondPayloadType;
- observer.SetSecondCodec(codecs[1]);
+TEST_F(VideoSendStreamTest, FragmentsGenericAccordingToMaxPacketSizeWithFec) {
+ TestPacketFragmentationSize(kGeneric, true);
+}
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.codec = codecs[0];
- send_config.encoder = NULL;
+TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSize) {
+ TestPacketFragmentationSize(kVP8, false);
+}
- RunSendTest(call.get(), send_config, &observer);
+TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) {
+ TestPacketFragmentationSize(kVP8, true);
}
// The test will go through a number of phases.
@@ -727,23 +803,27 @@ TEST_F(VideoSendStreamTest, CanChangeSendCodec) {
// 3. Wait until |kSuspendTimeFrames| have been captured without seeing any RTP
// packets.
// 4. Signal a high REMB and then wait for the RTP stream to start again.
-// When the stream is detected again, the test ends.
+// When the stream is detected again, and the stats show that the stream
+// is no longer suspended, the test ends.
TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps.
class RembObserver : public test::RtpRtcpObserver, public I420FrameCallback {
public:
- RembObserver()
+ RembObserver(VideoSendStream** send_stream_ptr)
: RtpRtcpObserver(30 * 1000), // Timeout after 30 seconds.
transport_adapter_(&transport_),
clock_(Clock::GetRealTimeClock()),
+ send_stream_ptr_(send_stream_ptr),
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
test_state_(kBeforeSuspend),
rtp_count_(0),
last_sequence_number_(0),
suspended_frame_count_(0),
low_remb_bps_(0),
- high_remb_bps_(0),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+ high_remb_bps_(0) {
+ transport_adapter_.Enable();
+ }
void SetReceiver(PacketReceiver* receiver) {
transport_.SetReceiver(receiver);
@@ -752,13 +832,13 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
// Receive statistics reporting having lost 0% of the packets.
// This is needed for the send-side bitrate controller to work properly.
- CriticalSectionScoped lock(crit_sect_.get());
+ CriticalSectionScoped lock(crit_.get());
SendRtcpFeedback(0); // REMB is only sent if value is > 0.
return SEND_PACKET;
}
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_sect_.get());
+ CriticalSectionScoped lock(crit_.get());
++rtp_count_;
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, static_cast<int>(length), &header));
@@ -772,15 +852,19 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
if (header.paddingLength == 0) {
// Received non-padding packet during suspension period. Reset the
// counter.
- // TODO(hlundin): We should probably make this test more advanced in
- // the future, so that it verifies that the bitrate can go below the
- // min_bitrate. This requires that the fake encoder sees the
- // min_bitrate, and never goes below it. See WebRTC Issue 2655.
suspended_frame_count_ = 0;
}
} else if (test_state_ == kWaitingForPacket) {
if (header.paddingLength == 0) {
- // Non-padding packet observed. Test is complete.
+ // Non-padding packet observed. Test is almost complete. Will just
+ // have to wait for the stats to change.
+ test_state_ = kWaitingForStats;
+ }
+ } else if (test_state_ == kWaitingForStats) {
+ assert(*send_stream_ptr_);
+ VideoSendStream::Stats stats = (*send_stream_ptr_)->GetStats();
+ if (stats.suspended == false) {
+ // Stats flipped to false. Test is complete.
observation_complete_->Set();
}
}
@@ -790,29 +874,39 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
// This method implements the I420FrameCallback.
void FrameCallback(I420VideoFrame* video_frame) OVERRIDE {
- CriticalSectionScoped lock(crit_sect_.get());
+ CriticalSectionScoped lock(crit_.get());
if (test_state_ == kDuringSuspend &&
++suspended_frame_count_ > kSuspendTimeFrames) {
+ assert(*send_stream_ptr_);
+ VideoSendStream::Stats stats = (*send_stream_ptr_)->GetStats();
+ EXPECT_TRUE(stats.suspended);
SendRtcpFeedback(high_remb_bps_);
test_state_ = kWaitingForPacket;
}
}
- void set_low_remb_bps(int value) { low_remb_bps_ = value; }
+ void set_low_remb_bps(int value) {
+ CriticalSectionScoped lock(crit_.get());
+ low_remb_bps_ = value;
+ }
- void set_high_remb_bps(int value) { high_remb_bps_ = value; }
+ void set_high_remb_bps(int value) {
+ CriticalSectionScoped lock(crit_.get());
+ high_remb_bps_ = value;
+ }
- virtual void Stop() { transport_.StopSending(); }
+ void Stop() { transport_.StopSending(); }
private:
enum TestState {
kBeforeSuspend,
kDuringSuspend,
kWaitingForPacket,
- kAfterSuspend
+ kWaitingForStats
};
- virtual void SendRtcpFeedback(int remb_value) {
+ virtual void SendRtcpFeedback(int remb_value)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_) {
FakeReceiveStatistics receive_stats(
kSendSsrc, last_sequence_number_, rtp_count_, 0);
RTCPSender rtcp_sender(0, false, clock_, &receive_stats);
@@ -830,33 +924,37 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
internal::TransportAdapter transport_adapter_;
test::DirectTransport transport_;
- Clock* clock_;
- TestState test_state_;
- int rtp_count_;
- int last_sequence_number_;
- int suspended_frame_count_;
- int low_remb_bps_;
- int high_remb_bps_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
- } observer;
+ Clock* const clock_;
+ VideoSendStream** const send_stream_ptr_;
+
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ TestState test_state_ GUARDED_BY(crit_);
+ int rtp_count_ GUARDED_BY(crit_);
+ int last_sequence_number_ GUARDED_BY(crit_);
+ int suspended_frame_count_ GUARDED_BY(crit_);
+ int low_remb_bps_ GUARDED_BY(crit_);
+ int high_remb_bps_ GUARDED_BY(crit_);
+ } observer(&send_stream_);
+ // Note that |send_stream_| is created in RunSendTest(), called below. This
+ // is why a pointer to |send_stream_| must be provided here.
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
observer.SetReceiver(call->Receiver());
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 1);
- send_config.rtp.nack.rtp_history_ms = 1000;
- send_config.pre_encode_callback = &observer;
- send_config.suspend_below_min_bitrate = true;
- unsigned int min_bitrate_bps =
- send_config.codec.simulcastStream[0].minBitrate * 1000;
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.nack.rtp_history_ms = 1000;
+ send_config_.pre_encode_callback = &observer;
+ send_config_.suspend_below_min_bitrate = true;
+ int min_bitrate_bps = video_streams_[0].min_bitrate_bps;
observer.set_low_remb_bps(min_bitrate_bps - 10000);
- unsigned int threshold_window = std::max(min_bitrate_bps / 10, 10000u);
- ASSERT_GT(send_config.codec.simulcastStream[0].maxBitrate * 1000,
+ int threshold_window = std::max(min_bitrate_bps / 10, 10000);
+ ASSERT_GT(video_streams_[0].max_bitrate_bps,
min_bitrate_bps + threshold_window + 5000);
observer.set_high_remb_bps(min_bitrate_bps + threshold_window + 5000);
- RunSendTest(call.get(), send_config, &observer);
+ RunSendTest(call.get(), &observer);
+ observer.Stop();
}
TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
@@ -865,27 +963,31 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
PacketObserver()
: RtpRtcpObserver(30 * 1000), // Timeout after 30 seconds.
clock_(Clock::GetRealTimeClock()),
- last_packet_time_ms_(-1),
transport_adapter_(ReceiveTransport()),
- capturer_(NULL),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+ crit_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_packet_time_ms_(-1),
+ capturer_(NULL) {
+ transport_adapter_.Enable();
+ }
void SetCapturer(test::FrameGeneratorCapturer* capturer) {
+ CriticalSectionScoped lock(crit_.get());
capturer_ = capturer;
}
virtual Action OnSendRtp(const uint8_t* packet, size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_sect_.get());
+ CriticalSectionScoped lock(crit_.get());
last_packet_time_ms_ = clock_->TimeInMilliseconds();
capturer_->Stop();
return SEND_PACKET;
}
virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
- CriticalSectionScoped lock(crit_sect_.get());
+ CriticalSectionScoped lock(crit_.get());
const int kVideoMutedThresholdMs = 10000;
- if (last_packet_time_ms_ > 0 && clock_->TimeInMilliseconds() -
- last_packet_time_ms_ > kVideoMutedThresholdMs)
+ if (last_packet_time_ms_ > 0 &&
+ clock_->TimeInMilliseconds() - last_packet_time_ms_ >
+ kVideoMutedThresholdMs)
observation_complete_->Set();
// Receive statistics reporting having lost 50% of the packets.
FakeReceiveStatistics receive_stats(kSendSsrcs[0], 1, 1, 0);
@@ -903,25 +1005,26 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
}
private:
- Clock* clock_;
- int64_t last_packet_time_ms_;
+ Clock* const clock_;
internal::TransportAdapter transport_adapter_;
- test::FrameGeneratorCapturer* capturer_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ const scoped_ptr<CriticalSectionWrapper> crit_;
+ int64_t last_packet_time_ms_ GUARDED_BY(crit_);
+ test::FrameGeneratorCapturer* capturer_ GUARDED_BY(crit_);
} observer;
Call::Config call_config(observer.SendTransport());
scoped_ptr<Call> call(Call::Create(call_config));
observer.SetReceivers(call->Receiver(), call->Receiver());
- VideoSendStream::Config send_config = GetSendTestConfig(call.get(), 3);
+ CreateTestConfig(call.get(), 3);
- send_stream_ = call->CreateVideoSendStream(send_config);
+ send_stream_ =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
test::FrameGeneratorCapturer::Create(
send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
observer.SetCapturer(frame_generator_capturer.get());
- send_stream_->StartSending();
+ send_stream_->Start();
frame_generator_capturer->Start();
EXPECT_EQ(kEventSignaled, observer.Wait())
@@ -929,7 +1032,190 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
observer.StopSending();
frame_generator_capturer->Stop();
- send_stream_->StopSending();
+ send_stream_->Stop();
+ call->DestroyVideoSendStream(send_stream_);
+}
+
+TEST_F(VideoSendStreamTest, ProducesStats) {
+ static const std::string kCName =
+ "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo=";
+ static const uint32_t kTimeoutMs = 30 * 1000;
+ class StatsObserver : public test::RtpRtcpObserver {
+ public:
+ StatsObserver()
+ : RtpRtcpObserver(kTimeoutMs),
+ stream_(NULL),
+ event_(EventWrapper::Create()) {}
+
+ virtual Action OnSendRtcp(const uint8_t* packet, size_t length) OVERRIDE {
+ event_->Set();
+
+ return SEND_PACKET;
+ }
+
+ bool WaitForFilledStats() {
+ Clock* clock = Clock::GetRealTimeClock();
+ int64_t now = clock->TimeInMilliseconds();
+ int64_t stop_time = now + kTimeoutMs;
+ while (now < stop_time) {
+ int64_t time_left = stop_time - now;
+ if (time_left > 0 && event_->Wait(time_left) == kEventSignaled &&
+ CheckStats()) {
+ return true;
+ }
+ now = clock->TimeInMilliseconds();
+ }
+ return false;
+ }
+
+ bool CheckStats() {
+ VideoSendStream::Stats stats = stream_->GetStats();
+ // Check that all applicable data sources have been used.
+ if (stats.input_frame_rate > 0 && stats.encode_frame_rate > 0 &&
+ stats.avg_delay_ms > 0 && stats.c_name == kCName &&
+ !stats.substreams.empty()) {
+ uint32_t ssrc = stats.substreams.begin()->first;
+ EXPECT_NE(
+ config_.rtp.ssrcs.end(),
+ std::find(
+ config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc));
+ // Check for data populated by various sources. RTCP excluded as this
+ // data is received from remote side. Tested in call tests instead.
+ const StreamStats& entry = stats.substreams[ssrc];
+ if (entry.key_frames > 0u && entry.bitrate_bps > 0 &&
+ entry.rtp_stats.packets > 0u) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ void SetConfig(const VideoSendStream::Config& config) { config_ = config; }
+
+ void SetSendStream(VideoSendStream* stream) { stream_ = stream; }
+
+ VideoSendStream* stream_;
+ VideoSendStream::Config config_;
+ scoped_ptr<EventWrapper> event_;
+ } observer;
+
+ Call::Config call_config(observer.SendTransport());
+ scoped_ptr<Call> call(Call::Create(call_config));
+
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.c_name = kCName;
+ observer.SetConfig(send_config_);
+
+ send_stream_ =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ observer.SetSendStream(send_stream_);
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(
+ send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
+ send_stream_->Start();
+ frame_generator_capturer->Start();
+
+ EXPECT_TRUE(observer.WaitForFilledStats())
+ << "Timed out waiting for filled statistics.";
+
+ observer.StopSending();
+ frame_generator_capturer->Stop();
+ send_stream_->Stop();
+ call->DestroyVideoSendStream(send_stream_);
+}
+
+// This test first observes "high" bitrate use at which point it sends a REMB to
+// indicate that it should be lowered significantly. The test then observes that
+// the bitrate observed is sinking well below the min-transmit-bitrate threshold
+// to verify that the min-transmit bitrate respects incoming REMB.
+//
+// Note that the test starts at "high" bitrate and does not ramp up to "higher"
+// bitrate since no receiver block or remb is sent in the initial phase.
+TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
+ static const int kMinTransmitBitrateBps = 400000;
+ static const int kHighBitrateBps = 150000;
+ static const int kRembBitrateBps = 80000;
+ static const int kRembRespectedBitrateBps = 100000;
+ class BitrateObserver: public test::RtpRtcpObserver, public PacketReceiver {
+ public:
+ BitrateObserver()
+ : RtpRtcpObserver(30 * 1000),
+ feedback_transport_(ReceiveTransport()),
+ send_stream_(NULL),
+ bitrate_capped_(false) {
+ RtpRtcp::Configuration config;
+ feedback_transport_.Enable();
+ config.outgoing_transport = &feedback_transport_;
+ rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(config));
+ rtp_rtcp_->SetREMBStatus(true);
+ rtp_rtcp_->SetRTCPStatus(kRtcpNonCompound);
+ }
+
+ void SetSendStream(VideoSendStream* send_stream) {
+ send_stream_ = send_stream;
+ }
+
+ private:
+ virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
+ size_t length) OVERRIDE {
+ if (RtpHeaderParser::IsRtcp(packet, static_cast<int>(length)))
+ return DELIVERY_OK;
+
+ RTPHeader header;
+ if (!parser_->Parse(packet, static_cast<int>(length), &header))
+ return DELIVERY_PACKET_ERROR;
+ assert(send_stream_ != NULL);
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+ if (!stats.substreams.empty()) {
+ EXPECT_EQ(1u, stats.substreams.size());
+ int bitrate_bps = stats.substreams.begin()->second.bitrate_bps;
+ test::PrintResult(
+ "bitrate_stats_",
+ "min_transmit_bitrate_low_remb",
+ "bitrate_bps",
+ static_cast<size_t>(bitrate_bps),
+ "bps",
+ false);
+ if (bitrate_bps > kHighBitrateBps) {
+ rtp_rtcp_->SetREMBData(kRembBitrateBps, 1, &header.ssrc);
+ rtp_rtcp_->Process();
+ bitrate_capped_ = true;
+ } else if (bitrate_capped_ &&
+ bitrate_bps < kRembRespectedBitrateBps) {
+ observation_complete_->Set();
+ }
+ }
+ return DELIVERY_OK;
+ }
+
+ scoped_ptr<RtpRtcp> rtp_rtcp_;
+ internal::TransportAdapter feedback_transport_;
+ VideoSendStream* send_stream_;
+ bool bitrate_capped_;
+ } observer;
+
+ Call::Config call_config(observer.SendTransport());
+ scoped_ptr<Call> call(Call::Create(call_config));
+ observer.SetReceivers(&observer, call->Receiver());
+
+ CreateTestConfig(call.get(), 1);
+ send_config_.rtp.min_transmit_bitrate_bps = kMinTransmitBitrateBps;
+ send_stream_ =
+ call->CreateVideoSendStream(send_config_, video_streams_, NULL);
+ observer.SetSendStream(send_stream_);
+
+ scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer(
+ test::FrameGeneratorCapturer::Create(
+ send_stream_->Input(), 320, 240, 30, Clock::GetRealTimeClock()));
+ send_stream_->Start();
+ frame_generator_capturer->Start();
+
+ EXPECT_EQ(kEventSignaled, observer.Wait())
+ << "Timeout while waiting for low bitrate stats after REMB.";
+
+ observer.StopSending();
+ frame_generator_capturer->Stop();
+ send_stream_->Stop();
call->DestroyVideoSendStream(send_stream_);
}
diff --git a/chromium/third_party/webrtc/video/webrtc_video.gypi b/chromium/third_party/webrtc/video/webrtc_video.gypi
index 5f7784d313b..4de970abdeb 100644
--- a/chromium/third_party/webrtc/video/webrtc_video.gypi
+++ b/chromium/third_party/webrtc/video/webrtc_video.gypi
@@ -14,6 +14,10 @@
'video/call.cc',
'video/encoded_frame_callback_adapter.cc',
'video/encoded_frame_callback_adapter.h',
+ 'video/send_statistics_proxy.cc',
+ 'video/send_statistics_proxy.h',
+ 'video/receive_statistics_proxy.cc',
+ 'video/receive_statistics_proxy.h',
'video/transport_adapter.cc',
'video/transport_adapter.h',
'video/video_receive_stream.cc',
diff --git a/chromium/third_party/webrtc/video_engine/Android.mk b/chromium/third_party/webrtc/video_engine/Android.mk
index b0a0d63ff28..74866cc2eb7 100644
--- a/chromium/third_party/webrtc/video_engine/Android.mk
+++ b/chromium/third_party/webrtc/video_engine/Android.mk
@@ -20,7 +20,6 @@ LOCAL_SRC_FILES := \
vie_base_impl.cc \
vie_capture_impl.cc \
vie_codec_impl.cc \
- vie_encryption_impl.cc \
vie_external_codec_impl.cc \
vie_file_impl.cc \
vie_image_process_impl.cc \
@@ -61,7 +60,6 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common_video/vplib/main/interface \
$(LOCAL_PATH)/../modules/interface \
$(LOCAL_PATH)/../modules/audio_coding/main/interface \
- $(LOCAL_PATH)/../modules/media_file/interface \
$(LOCAL_PATH)/../modules/rtp_rtcp/interface \
$(LOCAL_PATH)/../modules/udp_transport/interface \
$(LOCAL_PATH)/../modules/utility/interface \
diff --git a/chromium/third_party/webrtc/video_engine/OWNERS b/chromium/third_party/webrtc/video_engine/OWNERS
index 506407499f3..14bb8f3e989 100644
--- a/chromium/third_party/webrtc/video_engine/OWNERS
+++ b/chromium/third_party/webrtc/video_engine/OWNERS
@@ -2,3 +2,10 @@ mflodman@webrtc.org
stefan@webrtc.org
wu@webrtc.org
mallinath@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/video_engine/call_stats.cc b/chromium/third_party/webrtc/video_engine/call_stats.cc
index a79a7d2fb51..05385c35eab 100644
--- a/chromium/third_party/webrtc/video_engine/call_stats.cc
+++ b/chromium/third_party/webrtc/video_engine/call_stats.cc
@@ -15,7 +15,6 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/video_engine/call_stats.h b/chromium/third_party/webrtc/video_engine/call_stats.h
index 5c021a484ea..5eb33358810 100644
--- a/chromium/third_party/webrtc/video_engine/call_stats.h
+++ b/chromium/third_party/webrtc/video_engine/call_stats.h
@@ -13,8 +13,8 @@
#include <list>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/video_engine/encoder_state_feedback.h b/chromium/third_party/webrtc/video_engine/encoder_state_feedback.h
index 4e0cdc35fc5..a58532ce7f5 100644
--- a/chromium/third_party/webrtc/video_engine/encoder_state_feedback.h
+++ b/chromium/third_party/webrtc/video_engine/encoder_state_feedback.h
@@ -16,7 +16,7 @@
#include <map>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/video_engine/encoder_state_feedback_unittest.cc b/chromium/third_party/webrtc/video_engine/encoder_state_feedback_unittest.cc
index 7f331659123..4e15752fdec 100644
--- a/chromium/third_party/webrtc/video_engine/encoder_state_feedback_unittest.cc
+++ b/chromium/third_party/webrtc/video_engine/encoder_state_feedback_unittest.cc
@@ -17,26 +17,17 @@
#include "webrtc/common.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/video_engine/vie_encoder.h"
-namespace webrtc {
+using ::testing::NiceMock;
-// TODO(mflodman) Create a common mock in module utility.
-class TestProcessThread : public ProcessThread {
- public:
- TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(const Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
+namespace webrtc {
class MockVieEncoder : public ViEEncoder {
public:
- explicit MockVieEncoder(TestProcessThread* process_thread)
+ explicit MockVieEncoder(ProcessThread* process_thread)
: ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {}
~MockVieEncoder() {}
@@ -55,10 +46,10 @@ class MockVieEncoder : public ViEEncoder {
class VieKeyRequestTest : public ::testing::Test {
protected:
virtual void SetUp() {
- process_thread_.reset(new TestProcessThread());
+ process_thread_.reset(new NiceMock<MockProcessThread>);
encoder_state_feedback_.reset(new EncoderStateFeedback());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
};
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_base.h b/chromium/third_party/webrtc/video_engine/include/vie_base.h
index cd5f3856e18..0a528cb1272 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_base.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_base.h
@@ -43,6 +43,101 @@ class CpuOveruseObserver {
virtual ~CpuOveruseObserver() {}
};
+// Limits on standard deviation for under/overuse.
+#ifdef WEBRTC_ANDROID
+const float kOveruseStdDevMs = 32.0f;
+const float kNormalUseStdDevMs = 27.0f;
+#elif WEBRTC_LINUX
+const float kOveruseStdDevMs = 20.0f;
+const float kNormalUseStdDevMs = 14.0f;
+#elif WEBRTC_MAC
+const float kOveruseStdDevMs = 27.0f;
+const float kNormalUseStdDevMs = 21.0f;
+#elif WEBRTC_WIN
+const float kOveruseStdDevMs = 20.0f;
+const float kNormalUseStdDevMs = 14.0f;
+#else
+const float kOveruseStdDevMs = 30.0f;
+const float kNormalUseStdDevMs = 20.0f;
+#endif
+
+struct CpuOveruseOptions {
+ CpuOveruseOptions()
+ : enable_capture_jitter_method(true),
+ low_capture_jitter_threshold_ms(kNormalUseStdDevMs),
+ high_capture_jitter_threshold_ms(kOveruseStdDevMs),
+ enable_encode_usage_method(false),
+ low_encode_usage_threshold_percent(60),
+ high_encode_usage_threshold_percent(90),
+ low_encode_time_rsd_threshold(-1),
+ high_encode_time_rsd_threshold(-1),
+ frame_timeout_interval_ms(1500),
+ min_frame_samples(120),
+ min_process_count(3),
+ high_threshold_consecutive_count(2) {}
+
+ // Method based on inter-arrival jitter of captured frames.
+ bool enable_capture_jitter_method;
+ float low_capture_jitter_threshold_ms; // Threshold for triggering underuse.
+ float high_capture_jitter_threshold_ms; // Threshold for triggering overuse.
+ // Method based on encode time of frames.
+ bool enable_encode_usage_method;
+ int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
+ int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
+ int low_encode_time_rsd_threshold; // Additional threshold for triggering
+ // underuse (used in addition to
+ // threshold above if configured).
+ int high_encode_time_rsd_threshold; // Additional threshold for triggering
+ // overuse (used in addition to
+ // threshold above if configured).
+ // General settings.
+ int frame_timeout_interval_ms; // The maximum allowed interval between two
+ // frames before resetting estimations.
+ int min_frame_samples; // The minimum number of frames required.
+ int min_process_count; // The number of initial process times required before
+ // triggering an overuse/underuse.
+ int high_threshold_consecutive_count; // The number of consecutive checks
+ // above the high threshold before
+ // triggering an overuse.
+
+ bool Equals(const CpuOveruseOptions& o) const {
+ return enable_capture_jitter_method == o.enable_capture_jitter_method &&
+ low_capture_jitter_threshold_ms == o.low_capture_jitter_threshold_ms &&
+ high_capture_jitter_threshold_ms ==
+ o.high_capture_jitter_threshold_ms &&
+ enable_encode_usage_method == o.enable_encode_usage_method &&
+ low_encode_usage_threshold_percent ==
+ o.low_encode_usage_threshold_percent &&
+ high_encode_usage_threshold_percent ==
+ o.high_encode_usage_threshold_percent &&
+ low_encode_time_rsd_threshold == o.low_encode_time_rsd_threshold &&
+ high_encode_time_rsd_threshold == o.high_encode_time_rsd_threshold &&
+ frame_timeout_interval_ms == o.frame_timeout_interval_ms &&
+ min_frame_samples == o.min_frame_samples &&
+ min_process_count == o.min_process_count &&
+ high_threshold_consecutive_count == o.high_threshold_consecutive_count;
+ }
+};
+
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics()
+ : capture_jitter_ms(-1),
+ avg_encode_time_ms(-1),
+ encode_usage_percent(-1),
+ encode_rsd(-1),
+ capture_queue_delay_ms_per_s(-1) {}
+
+ int capture_jitter_ms; // The current estimated jitter in ms based on
+ // incoming captured frames.
+ int avg_encode_time_ms; // The average encode time in ms.
+ int encode_usage_percent; // The average encode time divided by the average
+ // time difference between incoming captured frames.
+ int encode_rsd; // The relative std dev of encode time of frames.
+ int capture_queue_delay_ms_per_s; // The current time delay between an
+ // incoming captured frame until the frame
+ // is being processed. The delay is
+ // expressed in ms delay per second.
+};
class WEBRTC_DLLEXPORT VideoEngine {
public:
@@ -67,7 +162,7 @@ class WEBRTC_DLLEXPORT VideoEngine {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Android specific.
- static int SetAndroidObjects(JavaVM* java_vm);
+ static int SetAndroidObjects(JavaVM* java_vm, jobject context);
#endif
protected:
@@ -120,17 +215,20 @@ class WEBRTC_DLLEXPORT ViEBase {
virtual int RegisterCpuOveruseObserver(int channel,
CpuOveruseObserver* observer) = 0;
+ // Sets options for cpu overuse detector.
+ // TODO(asapersson): Remove default implementation.
+ virtual int SetCpuOveruseOptions(int channel,
+ const CpuOveruseOptions& options) {
+ return -1;
+ }
+
// Gets cpu overuse measures.
- // capture_jitter_ms: The current estimated jitter in ms based on incoming
- // captured frames.
- // avg_encode_time_ms: The average encode time in ms.
- // encode_usage_percent: The average encode time divided by the average time
- // difference between incoming captured frames.
- // capture_queue_delay_ms_per_s: The current time delay between an incoming
- // captured frame until the frame is being
- // processed. The delay is expressed in ms
- // delay per second.
// TODO(asapersson): Remove default implementation.
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics) {
+ return -1;
+ }
+ // TODO(asapersson): Remove this function when libjingle has been updated.
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_encryption.h b/chromium/third_party/webrtc/video_engine/include/vie_encryption.h
deleted file mode 100644
index 6cf721f8f44..00000000000
--- a/chromium/third_party/webrtc/video_engine/include/vie_encryption.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This sub-API supports the following functionalities:
-// - External encryption and decryption.
-
-#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
-#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
-
-#include "webrtc/common_types.h"
-
-namespace webrtc {
-class VideoEngine;
-
-class WEBRTC_DLLEXPORT ViEEncryption {
- public:
- // Factory for the ViEEncryption sub‐API and increases an internal reference
- // counter if successful. Returns NULL if the API is not supported or if
- // construction fails.
- static ViEEncryption* GetInterface(VideoEngine* video_engine);
-
- // Releases the ViEEncryption sub-API and decreases an internal reference
- // counter.
- // Returns the new reference count. This value should be zero
- // for all sub-API:s before the VideoEngine object can be safely deleted.
- virtual int Release() = 0;
-
- // This function registers a encryption derived instance and enables
- // external encryption for the specified channel.
- virtual int RegisterExternalEncryption(const int video_channel,
- Encryption& encryption) = 0;
-
- // This function deregisters a registered encryption derived instance
- // and disables external encryption.
- virtual int DeregisterExternalEncryption(const int video_channel) = 0;
-
- protected:
- ViEEncryption() {}
- virtual ~ViEEncryption() {}
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_errors.h b/chromium/third_party/webrtc/video_engine/include/vie_errors.h
index 35af194939c..1e9be1d49ad 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_errors.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_errors.h
@@ -98,12 +98,6 @@ enum ViEErrors {
kViERtpRtcpObserverNotRegistered, // No observer registered.
kViERtpRtcpUnknownError, // An unknown error has occurred. Check the log file.
- // ViEEncryption.
- kViEEncryptionInvalidChannelId = 12700, // Channel id does not exist.
- kViEEncryptionInvalidSrtpParameter, // DEPRECATED
- kViEEncryptionSrtpNotSupported, // DEPRECATED
- kViEEncryptionUnknownError, // An unknown error has occurred. Check the log file.
-
// ViEImageProcess.
kViEImageProcessInvalidChannelId = 12800, // No Channel exist with the provided channel id.
kViEImageProcessInvalidCaptureId, // No capture device exist with the provided capture id.
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_image_process.h b/chromium/third_party/webrtc/video_engine/include/vie_image_process.h
index aff2d613fd5..e24e98fb7dc 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_image_process.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_image_process.h
@@ -33,8 +33,11 @@ class WEBRTC_DLLEXPORT ViEEffectFilter {
public:
// This method is called with an I420 video frame allowing the user to
// modify the video frame.
- virtual int Transform(int size, unsigned char* frameBuffer,
- unsigned int timeStamp90KHz, unsigned int width,
+ virtual int Transform(int size,
+ unsigned char* frame_buffer,
+ int64_t ntp_time_ms,
+ unsigned int timestamp,
+ unsigned int width,
unsigned int height) = 0;
protected:
ViEEffectFilter() {}
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_network.h b/chromium/third_party/webrtc/video_engine/include/vie_network.h
index 4a9e6ce203f..bb368182cfe 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_network.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_network.h
@@ -79,6 +79,20 @@ class WEBRTC_DLLEXPORT ViENetwork {
// over the network.
virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
+ // Forward (audio) packet to bandwidth estimator for the given video channel,
+ // for aggregated audio+video BWE.
+ virtual int ReceivedBWEPacket(const int video_channel,
+ int64_t arrival_time_ms, int payload_size, const RTPHeader& header) {
+ return 0;
+ }
+
+ // TODO(holmer): Remove the default implementation when this has been fixed
+ // in fakewebrtcvideoengine.cc.
+ virtual bool SetBandwidthEstimationConfig(int video_channel,
+ const webrtc::Config& config) {
+ return false;
+ }
+
protected:
ViENetwork() {}
virtual ~ViENetwork() {}
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_render.h b/chromium/third_party/webrtc/video_engine/include/vie_render.h
index ab61d7aca73..2c9b0f55b3f 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_render.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_render.h
@@ -38,10 +38,13 @@ class ExternalRenderer {
virtual int DeliverFrame(unsigned char* buffer,
int buffer_size,
// RTP timestamp in 90kHz.
- uint32_t time_stamp,
- // Wallclock render time in miliseconds
- int64_t render_time,
- // Handle of the underlying video frame,
+ uint32_t timestamp,
+ // NTP time of the capture time in local timebase
+ // in milliseconds.
+ int64_t ntp_time_ms,
+ // Wallclock render time in milliseconds.
+ int64_t render_time_ms,
+ // Handle of the underlying video frame.
void* handle) = 0;
// Returns true if the renderer supports textures. DeliverFrame can be called
diff --git a/chromium/third_party/webrtc/video_engine/include/vie_rtp_rtcp.h b/chromium/third_party/webrtc/video_engine/include/vie_rtp_rtcp.h
index 9d899ad7f39..972ca58486e 100644
--- a/chromium/third_party/webrtc/video_engine/include/vie_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/video_engine/include/vie_rtp_rtcp.h
@@ -27,6 +27,7 @@
namespace webrtc {
class VideoEngine;
+struct ReceiveBandwidthEstimatorStats;
// This enumerator sets the RTCP mode.
enum ViERTCPMode {
@@ -48,11 +49,6 @@ enum StreamType {
kViEStreamTypeRtx = 1 // Retransmission media stream
};
-enum BandwidthEstimationMode {
- kViEMultiStreamEstimation,
- kViESingleStreamEstimation
-};
-
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterRTPObserver() and
@@ -138,6 +134,15 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
virtual int SetRtxSendPayloadType(const int video_channel,
const uint8_t payload_type) = 0;
+ // This enables sending redundant payloads when padding up the bitrate instead
+ // of sending dummy padding packets. This feature is off by default and will
+ // only have an effect if RTX is also enabled.
+ // TODO(holmer): Remove default implementation once this has been implemented
+ // in libjingle.
+ virtual int SetPadWithRedundantPayloads(int video_channel, bool enable) {
+ return 0;
+ }
+
virtual int SetRtxReceivePayloadType(const int video_channel,
const uint8_t payload_type) = 0;
@@ -265,6 +270,22 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
virtual int SetTransmissionSmoothingStatus(int video_channel,
bool enable) = 0;
+ // Sets a minimal bitrate which will be padded to when the encoder doesn't
+ // produce enough bitrate.
+ // TODO(pbos): Remove default implementation when libjingle's
+ // FakeWebRtcVideoEngine is updated.
+ virtual int SetMinTransmitBitrate(int video_channel,
+ int min_transmit_bitrate_kbps) {
+ return -1;
+ };
+
+ // Set a constant amount to deduct from received bitrate estimates before
+ // using it to allocate capacity among outgoing video streams.
+ virtual int SetReservedTransmitBitrate(
+ int video_channel, unsigned int reserved_transmit_bitrate_bps) {
+ return 0;
+ }
+
// This function returns our locally created statistics of the received RTP
// stream.
virtual int GetReceiveChannelRtcpStatistics(const int video_channel,
@@ -359,6 +380,14 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
virtual int DeregisterReceiveChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) = 0;
+
+ // Gets sent and received RTCP packet types.
+ // TODO(asapersson): Remove default implementation.
+ virtual int GetRtcpPacketTypeCounters(
+ int video_channel,
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const { return -1; }
+
// The function gets bandwidth usage statistics from the sent RTP streams in
// bits/s.
virtual int GetBandwidthUsage(const int video_channel,
@@ -389,6 +418,20 @@ class WEBRTC_DLLEXPORT ViERTP_RTCP {
const int video_channel,
unsigned int* estimated_bandwidth) const = 0;
+ // This function gets the receive-side bandwidth esitmator statistics.
+ // TODO(jiayl): remove the default impl when libjingle's FakeWebRtcVideoEngine
+ // is updated.
+ virtual int GetReceiveBandwidthEstimatorStats(
+ const int video_channel,
+ ReceiveBandwidthEstimatorStats* output) const { return -1; }
+
+ // This function gets the PacedSender queuing delay for the last sent frame.
+ // TODO(jiayl): remove the default impl when libjingle is updated.
+ virtual int GetPacerQueuingDelayMs(
+ const int video_channel, int* delay_ms) const {
+ return -1;
+ }
+
// This function enables capturing of RTP packets to a binary file on a
// specific channel and for a given direction. The file can later be
// replayed using e.g. RTP Tools rtpplay since the binary file format is
diff --git a/chromium/third_party/webrtc/video_engine/mock/mock_vie_frame_provider_base.h b/chromium/third_party/webrtc/video_engine/mock/mock_vie_frame_provider_base.h
new file mode 100644
index 00000000000..d4e17f4d27c
--- /dev/null
+++ b/chromium/third_party/webrtc/video_engine/mock/mock_vie_frame_provider_base.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+
+#include "webrtc/video_engine/vie_frame_provider_base.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockViEFrameCallback : public ViEFrameCallback {
+ public:
+ MOCK_METHOD4(DeliverFrame,
+ void(int id,
+ I420VideoFrame* video_frame,
+ int num_csrcs,
+ const uint32_t CSRC[kRtpCsrcSize]));
+ MOCK_METHOD2(DelayChanged, void(int id, int frame_delay));
+ MOCK_METHOD3(GetPreferedFrameSettings,
+ int(int* width, int* height, int* frame_rate));
+ MOCK_METHOD1(ProviderDestroyed, void(int id));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/chromium/third_party/webrtc/video_engine/overuse_frame_detector.cc b/chromium/third_party/webrtc/video_engine/overuse_frame_detector.cc
index a5e2d6f5de7..764c2584c3b 100644
--- a/chromium/third_party/webrtc/video_engine/overuse_frame_detector.cc
+++ b/chromium/third_party/webrtc/video_engine/overuse_frame_detector.cc
@@ -15,12 +15,12 @@
#include <algorithm>
#include <list>
+#include <map>
#include "webrtc/modules/video_coding/utility/include/exp_filter.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -29,15 +29,8 @@ namespace webrtc {
namespace {
const int64_t kProcessIntervalMs = 5000;
-// Consecutive checks above threshold to trigger overuse.
-const int kConsecutiveChecksAboveThreshold = 2;
-
-// Minimum samples required to perform a check.
-const size_t kMinFrameSampleCount = 15;
-
// Weight factor to apply to the standard deviation.
const float kWeightFactor = 0.997f;
-
// Weight factor to apply to the average.
const float kWeightFactorMean = 0.98f;
@@ -45,12 +38,12 @@ const float kWeightFactorMean = 0.98f;
const int kQuickRampUpDelayMs = 10 * 1000;
// Delay between rampup attempts. Initially uses standard, scales up to max.
const int kStandardRampUpDelayMs = 30 * 1000;
-const int kMaxRampUpDelayMs = 120 * 1000;
+const int kMaxRampUpDelayMs = 240 * 1000;
// Expontential back-off factor, to prevent annoying up-down behaviour.
const double kRampUpBackoffFactor = 2.0;
-// The initial average encode time (set to a fairly small value).
-const float kInitialAvgEncodeTimeMs = 5.0f;
+// Max number of overuses detected before always applying the rampup delay.
+const int kMaxOverusesBeforeApplyRampupDelay = 7;
// The maximum exponent to use in VCMExpFilter.
const float kSampleDiffMs = 33.0f;
@@ -63,23 +56,28 @@ Statistics::Statistics() :
count_(0),
filtered_samples_(new VCMExpFilter(kWeightFactorMean)),
filtered_variance_(new VCMExpFilter(kWeightFactor)) {
+ Reset();
+}
+
+void Statistics::SetOptions(const CpuOveruseOptions& options) {
+ options_ = options;
}
void Statistics::Reset() {
sum_ = 0.0;
count_ = 0;
+ filtered_variance_->Reset(kWeightFactor);
+ filtered_variance_->Apply(1.0f, InitialVariance());
}
void Statistics::AddSample(float sample_ms) {
sum_ += sample_ms;
++count_;
- if (count_ < kMinFrameSampleCount) {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
// Initialize filtered samples.
filtered_samples_->Reset(kWeightFactorMean);
filtered_samples_->Apply(1.0f, InitialMean());
- filtered_variance_->Reset(kWeightFactor);
- filtered_variance_->Apply(1.0f, InitialVariance());
return;
}
@@ -98,7 +96,8 @@ float Statistics::InitialMean() const {
float Statistics::InitialVariance() const {
// Start in between the underuse and overuse threshold.
- float average_stddev = (kNormalUseStdDevMs + kOveruseStdDevMs)/2.0f;
+ float average_stddev = (options_.low_capture_jitter_threshold_ms +
+ options_.high_capture_jitter_threshold_ms) / 2.0f;
return average_stddev * average_stddev;
}
@@ -116,6 +115,7 @@ class OveruseFrameDetector::EncodeTimeAvg {
public:
EncodeTimeAvg()
: kWeightFactor(0.5f),
+ kInitialAvgEncodeTimeMs(5.0f),
filtered_encode_time_ms_(new VCMExpFilter(kWeightFactor)) {
filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
}
@@ -127,12 +127,13 @@ class OveruseFrameDetector::EncodeTimeAvg {
filtered_encode_time_ms_->Apply(exp, encode_time_ms);
}
- int filtered_encode_time_ms() const {
+ int Value() const {
return static_cast<int>(filtered_encode_time_ms_->Value() + 0.5);
}
private:
const float kWeightFactor;
+ const float kInitialAvgEncodeTimeMs;
scoped_ptr<VCMExpFilter> filtered_encode_time_ms_;
};
@@ -142,13 +143,27 @@ class OveruseFrameDetector::EncodeUsage {
EncodeUsage()
: kWeightFactorFrameDiff(0.998f),
kWeightFactorEncodeTime(0.995f),
+ kInitialSampleDiffMs(40.0f),
+ kMaxSampleDiffMs(45.0f),
+ count_(0),
filtered_encode_time_ms_(new VCMExpFilter(kWeightFactorEncodeTime)),
filtered_frame_diff_ms_(new VCMExpFilter(kWeightFactorFrameDiff)) {
- filtered_encode_time_ms_->Apply(1.0f, kInitialAvgEncodeTimeMs);
- filtered_frame_diff_ms_->Apply(1.0f, kSampleDiffMs);
+ Reset();
}
~EncodeUsage() {}
+ void SetOptions(const CpuOveruseOptions& options) {
+ options_ = options;
+ }
+
+ void Reset() {
+ count_ = 0;
+ filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
+ filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
+ filtered_encode_time_ms_->Reset(kWeightFactorEncodeTime);
+ filtered_encode_time_ms_->Apply(1.0f, InitialEncodeTimeMs());
+ }
+
void AddSample(float sample_ms) {
float exp = sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
@@ -156,25 +171,149 @@ class OveruseFrameDetector::EncodeUsage {
}
void AddEncodeSample(float encode_time_ms, int64_t diff_last_sample_ms) {
+ ++count_;
float exp = diff_last_sample_ms / kSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_encode_time_ms_->Apply(exp, encode_time_ms);
}
- int UsageInPercent() const {
+ int Value() const {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
+ return static_cast<int>(InitialUsageInPercent() + 0.5f);
+ }
float frame_diff_ms = std::max(filtered_frame_diff_ms_->Value(), 1.0f);
+ frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
float encode_usage_percent =
100.0f * filtered_encode_time_ms_->Value() / frame_diff_ms;
return static_cast<int>(encode_usage_percent + 0.5);
}
private:
+ float InitialUsageInPercent() const {
+ // Start in between the underuse and overuse threshold.
+ return (options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f;
+ }
+
+ float InitialEncodeTimeMs() const {
+ return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
+ }
+
const float kWeightFactorFrameDiff;
const float kWeightFactorEncodeTime;
+ const float kInitialSampleDiffMs;
+ const float kMaxSampleDiffMs;
+ uint64_t count_;
+ CpuOveruseOptions options_;
scoped_ptr<VCMExpFilter> filtered_encode_time_ms_;
scoped_ptr<VCMExpFilter> filtered_frame_diff_ms_;
};
+// Class for calculating the relative standard deviation of encode times.
+class OveruseFrameDetector::EncodeTimeRsd {
+ public:
+ EncodeTimeRsd(Clock* clock)
+ : kWeightFactor(0.6f),
+ count_(0),
+ filtered_rsd_(new VCMExpFilter(kWeightFactor)),
+ hist_samples_(0),
+ hist_sum_(0.0f),
+ last_process_time_ms_(clock->TimeInMilliseconds()) {
+ Reset();
+ }
+ ~EncodeTimeRsd() {}
+
+ void SetOptions(const CpuOveruseOptions& options) {
+ options_ = options;
+ }
+
+ void Reset() {
+ count_ = 0;
+ filtered_rsd_->Reset(kWeightFactor);
+ filtered_rsd_->Apply(1.0f, InitialValue());
+ hist_.clear();
+ hist_samples_ = 0;
+ hist_sum_ = 0.0f;
+ }
+
+ void AddEncodeSample(float encode_time_ms) {
+ int bin = static_cast<int>(encode_time_ms + 0.5f);
+ if (bin <= 0) {
+ // The frame was probably not encoded, skip possible dropped frame.
+ return;
+ }
+ ++count_;
+ ++hist_[bin];
+ ++hist_samples_;
+ hist_sum_ += bin;
+ }
+
+ void Process(int64_t now) {
+ if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
+ // Have not received min number of frames since last reset.
+ return;
+ }
+ const int kMinHistSamples = 20;
+ if (hist_samples_ < kMinHistSamples) {
+ return;
+ }
+ const int64_t kMinDiffSinceLastProcessMs = 1000;
+ int64_t diff_last_process_ms = now - last_process_time_ms_;
+ if (now - last_process_time_ms_ <= kMinDiffSinceLastProcessMs) {
+ return;
+ }
+ last_process_time_ms_ = now;
+
+ // Calculate variance (using samples above the mean).
+ // Checks for a larger encode time of some frames while there is a small
+ // increase in the average time.
+ int mean = hist_sum_ / hist_samples_;
+ float variance = 0.0f;
+ int total_count = 0;
+ for (std::map<int,int>::iterator it = hist_.begin();
+ it != hist_.end(); ++it) {
+ int time = it->first;
+ int count = it->second;
+ if (time > mean) {
+ total_count += count;
+ for (int i = 0; i < count; ++i) {
+ variance += ((time - mean) * (time - mean));
+ }
+ }
+ }
+ variance /= std::max(total_count, 1);
+ float cov = sqrt(variance) / mean;
+
+ hist_.clear();
+ hist_samples_ = 0;
+ hist_sum_ = 0.0f;
+
+ float exp = static_cast<float>(diff_last_process_ms) / kProcessIntervalMs;
+ exp = std::min(exp, kMaxExp);
+ filtered_rsd_->Apply(exp, 100.0f * cov);
+ }
+
+ int Value() const {
+ return static_cast<int>(filtered_rsd_->Value() + 0.5);
+ }
+
+ private:
+ float InitialValue() const {
+ // Start in between the underuse and overuse threshold.
+ return std::max(((options_.low_encode_time_rsd_threshold +
+ options_.high_encode_time_rsd_threshold) / 2.0f), 0.0f);
+ }
+
+ const float kWeightFactor;
+ uint32_t count_; // Number of encode samples since last reset.
+ CpuOveruseOptions options_;
+ scoped_ptr<VCMExpFilter> filtered_rsd_;
+ int hist_samples_;
+ float hist_sum_;
+ std::map<int,int> hist_; // Histogram of encode time of frames.
+ int64_t last_process_time_ms_;
+};
+
// Class for calculating the capture queue delay change.
class OveruseFrameDetector::CaptureQueueDelay {
public:
@@ -221,7 +360,7 @@ class OveruseFrameDetector::CaptureQueueDelay {
return delay_ms_;
}
- int filtered_delay_ms_per_s() const {
+ int Value() const {
return static_cast<int>(filtered_delay_ms_per_s_->Value() + 0.5);
}
@@ -232,25 +371,23 @@ class OveruseFrameDetector::CaptureQueueDelay {
scoped_ptr<VCMExpFilter> filtered_delay_ms_per_s_;
};
-OveruseFrameDetector::OveruseFrameDetector(Clock* clock,
- float normaluse_stddev_ms,
- float overuse_stddev_ms)
+OveruseFrameDetector::OveruseFrameDetector(Clock* clock)
: crit_(CriticalSectionWrapper::CreateCriticalSection()),
- normaluse_stddev_ms_(normaluse_stddev_ms),
- overuse_stddev_ms_(overuse_stddev_ms),
observer_(NULL),
clock_(clock),
next_process_time_(clock_->TimeInMilliseconds()),
+ num_process_times_(0),
last_capture_time_(0),
last_overuse_time_(0),
checks_above_threshold_(0),
+ num_overuse_detections_(0),
last_rampup_time_(0),
in_quick_rampup_(false),
current_rampup_delay_ms_(kStandardRampUpDelayMs),
num_pixels_(0),
- last_capture_jitter_ms_(-1),
last_encode_sample_ms_(0),
encode_time_(new EncodeTimeAvg()),
+ encode_rsd_(new EncodeTimeRsd(clock)),
encode_usage_(new EncodeUsage()),
capture_queue_delay_(new CaptureQueueDelay()) {
}
@@ -263,24 +400,32 @@ void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) {
observer_ = observer;
}
-int OveruseFrameDetector::AvgEncodeTimeMs() const {
+void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {
+ assert(options.min_frame_samples > 0);
CriticalSectionScoped cs(crit_.get());
- return encode_time_->filtered_encode_time_ms();
-}
-
-int OveruseFrameDetector::EncodeUsagePercent() const {
- CriticalSectionScoped cs(crit_.get());
- return encode_usage_->UsageInPercent();
+ if (options_.Equals(options)) {
+ return;
+ }
+ options_ = options;
+ capture_deltas_.SetOptions(options);
+ encode_usage_->SetOptions(options);
+ encode_rsd_->SetOptions(options);
+ ResetAll(num_pixels_);
}
-int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const {
+int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->filtered_delay_ms_per_s();
+ return capture_queue_delay_->delay_ms();
}
-int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
+void OveruseFrameDetector::GetCpuOveruseMetrics(
+ CpuOveruseMetrics* metrics) const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->delay_ms();
+ metrics->capture_jitter_ms = static_cast<int>(capture_deltas_.StdDev() + 0.5);
+ metrics->avg_encode_time_ms = encode_time_->Value();
+ metrics->encode_rsd = encode_rsd_->Value();
+ metrics->encode_usage_percent = encode_usage_->Value();
+ metrics->capture_queue_delay_ms_per_s = capture_queue_delay_->Value();
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
@@ -288,26 +433,45 @@ int32_t OveruseFrameDetector::TimeUntilNextProcess() {
return next_process_time_ - clock_->TimeInMilliseconds();
}
+bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
+ if (num_pixels != num_pixels_) {
+ return true;
+ }
+ return false;
+}
+
+bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
+ if (last_capture_time_ == 0) {
+ return false;
+ }
+ return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
+}
+
+void OveruseFrameDetector::ResetAll(int num_pixels) {
+ num_pixels_ = num_pixels;
+ capture_deltas_.Reset();
+ encode_usage_->Reset();
+ encode_rsd_->Reset();
+ capture_queue_delay_->ClearFrames();
+ last_capture_time_ = 0;
+ num_process_times_ = 0;
+}
+
void OveruseFrameDetector::FrameCaptured(int width, int height) {
CriticalSectionScoped cs(crit_.get());
- int num_pixels = width * height;
- if (num_pixels != num_pixels_) {
- // Frame size changed, reset statistics.
- num_pixels_ = num_pixels;
- capture_deltas_.Reset();
- last_capture_time_ = 0;
- capture_queue_delay_->ClearFrames();
+ int64_t now = clock_->TimeInMilliseconds();
+ if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
+ ResetAll(width * height);
}
- int64_t time = clock_->TimeInMilliseconds();
if (last_capture_time_ != 0) {
- capture_deltas_.AddSample(time - last_capture_time_);
- encode_usage_->AddSample(time - last_capture_time_);
+ capture_deltas_.AddSample(now - last_capture_time_);
+ encode_usage_->AddSample(now - last_capture_time_);
}
- last_capture_time_ = time;
+ last_capture_time_ = now;
- capture_queue_delay_->FrameCaptured(time);
+ capture_queue_delay_->FrameCaptured(now);
}
void OveruseFrameDetector::FrameProcessingStarted() {
@@ -322,15 +486,11 @@ void OveruseFrameDetector::FrameEncoded(int encode_time_ms) {
int64_t diff_ms = time - last_encode_sample_ms_;
encode_time_->AddEncodeSample(encode_time_ms, diff_ms);
encode_usage_->AddEncodeSample(encode_time_ms, diff_ms);
+ encode_rsd_->AddEncodeSample(encode_time_ms);
}
last_encode_sample_ms_ = time;
}
-int OveruseFrameDetector::last_capture_jitter_ms() const {
- CriticalSectionScoped cs(crit_.get());
- return last_capture_jitter_ms_;
-}
-
int32_t OveruseFrameDetector::Process() {
CriticalSectionScoped cs(crit_.get());
@@ -342,20 +502,23 @@ int32_t OveruseFrameDetector::Process() {
int64_t diff_ms = now - next_process_time_ + kProcessIntervalMs;
next_process_time_ = now + kProcessIntervalMs;
+ ++num_process_times_;
- // Don't trigger overuse unless we've seen a certain number of frames.
- if (capture_deltas_.Count() < kMinFrameSampleCount)
- return 0;
-
+ encode_rsd_->Process(now);
capture_queue_delay_->CalculateDelayChange(diff_ms);
+ if (num_process_times_ <= options_.min_process_count) {
+ return 0;
+ }
+
if (IsOverusing()) {
// If the last thing we did was going up, and now have to back down, we need
// to check if this peak was short. If so we should back off to avoid going
// back and forth between this load, the system doesn't seem to handle it.
bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
if (check_for_backoff) {
- if (now - last_rampup_time_ < kStandardRampUpDelayMs) {
+ if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
+ num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
// Going up was not ok for very long, back off.
current_rampup_delay_ms_ *= kRampUpBackoffFactor;
if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
@@ -369,6 +532,7 @@ int32_t OveruseFrameDetector::Process() {
last_overuse_time_ = now;
in_quick_rampup_ = false;
checks_above_threshold_ = 0;
+ ++num_overuse_detections_;
if (observer_ != NULL)
observer_->OveruseDetected();
@@ -380,31 +544,39 @@ int32_t OveruseFrameDetector::Process() {
observer_->NormalUsage();
}
- WEBRTC_TRACE(
- webrtc::kTraceInfo,
- webrtc::kTraceVideo,
- -1,
- "Capture input stats: avg: %.2fms, std_dev: %.2fms (rampup delay: "
- "%dms, overuse: >=%.2fms, "
- "underuse: <%.2fms)",
- capture_deltas_.Mean(),
- capture_deltas_.StdDev(),
- in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_,
- overuse_stddev_ms_,
- normaluse_stddev_ms_);
-
- last_capture_jitter_ms_ = static_cast<int>(capture_deltas_.StdDev() + 0.5);
+ int rampup_delay =
+ in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
+ LOG(LS_VERBOSE) << " Frame stats: capture avg: " << capture_deltas_.Mean()
+ << " capture stddev " << capture_deltas_.StdDev()
+ << " encode usage " << encode_usage_->Value()
+ << " encode rsd " << encode_rsd_->Value()
+ << " overuse detections " << num_overuse_detections_
+ << " rampup delay " << rampup_delay;
return 0;
}
bool OveruseFrameDetector::IsOverusing() {
- if (capture_deltas_.StdDev() >= overuse_stddev_ms_) {
+ bool overusing = false;
+ if (options_.enable_capture_jitter_method) {
+ overusing = capture_deltas_.StdDev() >=
+ options_.high_capture_jitter_threshold_ms;
+ } else if (options_.enable_encode_usage_method) {
+ bool encode_usage_overuse =
+ encode_usage_->Value() >= options_.high_encode_usage_threshold_percent;
+ bool encode_rsd_overuse = false;
+ if (options_.high_encode_time_rsd_threshold > 0) {
+ encode_rsd_overuse =
+ (encode_rsd_->Value() >= options_.high_encode_time_rsd_threshold);
+ }
+ overusing = encode_usage_overuse || encode_rsd_overuse;
+ }
+
+ if (overusing) {
++checks_above_threshold_;
} else {
checks_above_threshold_ = 0;
}
-
- return checks_above_threshold_ >= kConsecutiveChecksAboveThreshold;
+ return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
}
bool OveruseFrameDetector::IsUnderusing(int64_t time_now) {
@@ -412,6 +584,20 @@ bool OveruseFrameDetector::IsUnderusing(int64_t time_now) {
if (time_now < last_rampup_time_ + delay)
return false;
- return capture_deltas_.StdDev() < normaluse_stddev_ms_;
+ bool underusing = false;
+ if (options_.enable_capture_jitter_method) {
+ underusing = capture_deltas_.StdDev() <
+ options_.low_capture_jitter_threshold_ms;
+ } else if (options_.enable_encode_usage_method) {
+ bool encode_usage_underuse =
+ encode_usage_->Value() < options_.low_encode_usage_threshold_percent;
+ bool encode_rsd_underuse = true;
+ if (options_.low_encode_time_rsd_threshold > 0) {
+ encode_rsd_underuse =
+ (encode_rsd_->Value() < options_.low_encode_time_rsd_threshold);
+ }
+ underusing = encode_usage_underuse && encode_rsd_underuse;
+ }
+ return underusing;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/overuse_frame_detector.h b/chromium/third_party/webrtc/video_engine/overuse_frame_detector.h
index 7cbb21ce6d0..efd23dc4a52 100644
--- a/chromium/third_party/webrtc/video_engine/overuse_frame_detector.h
+++ b/chromium/third_party/webrtc/video_engine/overuse_frame_detector.h
@@ -11,9 +11,10 @@
#ifndef WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
#define WEBRTC_VIDEO_ENGINE_OVERUSE_FRAME_DETECTOR_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/video_engine/include/vie_base.h"
namespace webrtc {
@@ -22,18 +23,6 @@ class CpuOveruseObserver;
class CriticalSectionWrapper;
class VCMExpFilter;
-// Limits on standard deviation for under/overuse.
-#ifdef WEBRTC_LINUX
-const float kOveruseStdDevMs = 15.0f;
-const float kNormalUseStdDevMs = 7.0f;
-#elif WEBRTC_MAC
-const float kOveruseStdDevMs = 22.0f;
-const float kNormalUseStdDevMs = 12.0f;
-#else
-const float kOveruseStdDevMs = 17.0f;
-const float kNormalUseStdDevMs = 10.0f;
-#endif
-
// TODO(pbos): Move this somewhere appropriate.
class Statistics {
public:
@@ -41,6 +30,7 @@ class Statistics {
void AddSample(float sample_ms);
void Reset();
+ void SetOptions(const CpuOveruseOptions& options);
float Mean() const;
float StdDev() const;
@@ -52,6 +42,7 @@ class Statistics {
float sum_;
uint64_t count_;
+ CpuOveruseOptions options_;
scoped_ptr<VCMExpFilter> filtered_samples_;
scoped_ptr<VCMExpFilter> filtered_variance_;
};
@@ -59,15 +50,16 @@ class Statistics {
// Use to detect system overuse based on jitter in incoming frames.
class OveruseFrameDetector : public Module {
public:
- explicit OveruseFrameDetector(Clock* clock,
- float normaluse_stddev_ms,
- float overuse_stddev_ms);
+ explicit OveruseFrameDetector(Clock* clock);
~OveruseFrameDetector();
// Registers an observer receiving overuse and underuse callbacks. Set
// 'observer' to NULL to disable callbacks.
void SetObserver(CpuOveruseObserver* observer);
+ // Sets options for overuse detection.
+ void SetOptions(const CpuOveruseOptions& options);
+
// Called for each captured frame.
void FrameCaptured(int width, int height);
@@ -78,25 +70,23 @@ class OveruseFrameDetector : public Module {
void FrameEncoded(int encode_time_ms);
// Accessors.
- // The last estimated jitter based on the incoming captured frames.
- int last_capture_jitter_ms() const;
-
- // Running average of reported encode time (FrameEncoded()).
- // Only used for stats.
- int AvgEncodeTimeMs() const;
-
- // The average encode time divided by the average time difference between
- // incoming captured frames.
- // This variable is currently only used for statistics.
- int EncodeUsagePercent() const;
-
- // The current time delay between an incoming captured frame (FrameCaptured())
- // until the frame is being processed (FrameProcessingStarted()).
- // (Note: if a new frame is received before an old frame has been processed,
- // the old frame is skipped).
- // The delay is returned as the delay in ms per second.
- // This variable is currently only used for statistics.
- int AvgCaptureQueueDelayMsPerS() const;
+
+ // Returns CpuOveruseMetrics where
+ // capture_jitter_ms: The estimated jitter based on incoming captured frames.
+ // avg_encode_time_ms: Running average of reported encode time
+ // (FrameEncoded()). Only used for stats.
+ // encode_usage_percent: The average encode time divided by the average time
+ // difference between incoming captured frames.
+ // capture_queue_delay_ms_per_s: The current time delay between an incoming
+ // captured frame (FrameCaptured()) until the
+ // frame is being processed
+ // (FrameProcessingStarted()). (Note: if a new
+ // frame is received before an old frame has
+ // been processed, the old frame is skipped).
+ // The delay is expressed in ms delay per sec.
+ // Only used for stats.
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
+
int CaptureQueueDelayMsPerS() const;
// Implements Module.
@@ -105,30 +95,36 @@ class OveruseFrameDetector : public Module {
private:
class EncodeTimeAvg;
+ class EncodeTimeRsd;
class EncodeUsage;
class CaptureQueueDelay;
bool IsOverusing();
bool IsUnderusing(int64_t time_now);
+ bool FrameTimeoutDetected(int64_t now) const;
+ bool FrameSizeChanged(int num_pixels) const;
+
+ void ResetAll(int num_pixels);
+
// Protecting all members.
scoped_ptr<CriticalSectionWrapper> crit_;
- // Limits on standard deviation for under/overuse.
- const float normaluse_stddev_ms_;
- const float overuse_stddev_ms_;
-
// Observer getting overuse reports.
CpuOveruseObserver* observer_;
+ CpuOveruseOptions options_;
+
Clock* clock_;
int64_t next_process_time_;
+ int64_t num_process_times_;
Statistics capture_deltas_;
int64_t last_capture_time_;
int64_t last_overuse_time_;
int checks_above_threshold_;
+ int num_overuse_detections_;
int64_t last_rampup_time_;
bool in_quick_rampup_;
@@ -137,10 +133,9 @@ class OveruseFrameDetector : public Module {
// Number of pixels of last captured frame.
int num_pixels_;
- int last_capture_jitter_ms_;
-
int64_t last_encode_sample_ms_;
scoped_ptr<EncodeTimeAvg> encode_time_;
+ scoped_ptr<EncodeTimeRsd> encode_rsd_;
scoped_ptr<EncodeUsage> encode_usage_;
scoped_ptr<CaptureQueueDelay> capture_queue_delay_;
diff --git a/chromium/third_party/webrtc/video_engine/overuse_frame_detector_unittest.cc b/chromium/third_party/webrtc/video_engine/overuse_frame_detector_unittest.cc
index f974f28898c..4e5d4bda27f 100644
--- a/chromium/third_party/webrtc/video_engine/overuse_frame_detector_unittest.cc
+++ b/chromium/third_party/webrtc/video_engine/overuse_frame_detector_unittest.cc
@@ -17,6 +17,12 @@
#include "webrtc/video_engine/overuse_frame_detector.h"
namespace webrtc {
+namespace {
+ const int kWidth = 640;
+ const int kHeight = 480;
+ const int kFrameInterval33ms = 33;
+ const int kProcessIntervalMs = 5000;
+} // namespace
class MockCpuOveruseObserver : public CpuOveruseObserver {
public:
@@ -27,98 +33,291 @@ class MockCpuOveruseObserver : public CpuOveruseObserver {
MOCK_METHOD0(NormalUsage, void());
};
+class CpuOveruseObserverImpl : public CpuOveruseObserver {
+ public:
+ CpuOveruseObserverImpl() :
+ overuse_(0),
+ normaluse_(0) {}
+ virtual ~CpuOveruseObserverImpl() {}
+
+ void OveruseDetected() { ++overuse_; }
+ void NormalUsage() { ++normaluse_; }
+
+ int overuse_;
+ int normaluse_;
+};
+
class OveruseFrameDetectorTest : public ::testing::Test {
protected:
virtual void SetUp() {
clock_.reset(new SimulatedClock(1234));
observer_.reset(new MockCpuOveruseObserver());
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(),
- 10.0f,
- 15.0f));
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get()));
+
+ options_.low_capture_jitter_threshold_ms = 10.0f;
+ options_.high_capture_jitter_threshold_ms = 15.0f;
+ options_.min_process_count = 0;
+ overuse_detector_->SetOptions(options_);
overuse_detector_->SetObserver(observer_.get());
}
- void InsertFramesWithInterval(size_t num_frames, int interval_ms) {
+ int InitialJitter() {
+ return ((options_.low_capture_jitter_threshold_ms +
+ options_.high_capture_jitter_threshold_ms) / 2.0f) + 0.5;
+ }
+
+ int InitialEncodeUsage() {
+ return ((options_.low_encode_usage_threshold_percent +
+ options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
+ }
+
+ int InitialEncodeRsd() {
+ return std::max(
+ ((options_.low_encode_time_rsd_threshold +
+ options_.high_encode_time_rsd_threshold) / 2.0f) + 0.5f, 0.0f);
+ }
+
+ void InsertFramesWithInterval(
+ size_t num_frames, int interval_ms, int width, int height) {
while (num_frames-- > 0) {
clock_->AdvanceTimeMilliseconds(interval_ms);
- overuse_detector_->FrameCaptured(640, 480);
+ overuse_detector_->FrameCaptured(width, height);
}
}
- void TriggerOveruse() {
- int regular_frame_interval_ms = 33;
+ void InsertAndEncodeFramesWithInterval(
+ int num_frames, int interval_ms, int width, int height, int encode_ms) {
+ while (num_frames-- > 0) {
+ overuse_detector_->FrameCaptured(width, height);
+ clock_->AdvanceTimeMilliseconds(encode_ms);
+ overuse_detector_->FrameEncoded(encode_ms);
+ clock_->AdvanceTimeMilliseconds(interval_ms - encode_ms);
+ }
+ }
- EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ void TriggerOveruse(int num_times) {
+ for (int i = 0; i < num_times; ++i) {
+ InsertFramesWithInterval(200, kFrameInterval33ms, kWidth, kHeight);
+ InsertFramesWithInterval(50, 110, kWidth, kHeight);
+ overuse_detector_->Process();
+ }
+ }
- InsertFramesWithInterval(50, regular_frame_interval_ms);
- InsertFramesWithInterval(50, 110);
+ void TriggerNormalUsage() {
+ InsertFramesWithInterval(900, kFrameInterval33ms, kWidth, kHeight);
overuse_detector_->Process();
+ }
+
+ void TriggerOveruseWithEncodeUsage(int num_times) {
+ const int kEncodeTimeMs = 32;
+ for (int i = 0; i < num_times; ++i) {
+ InsertAndEncodeFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs);
+ overuse_detector_->Process();
+ }
+ }
+
+ void TriggerOveruseWithEncodeRsd(int num_times) {
+ const int kEncodeTimeMs1 = 10;
+ const int kEncodeTimeMs2 = 25;
+ for (int i = 0; i < num_times; ++i) {
+ InsertAndEncodeFramesWithInterval(
+ 200, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs1);
+ InsertAndEncodeFramesWithInterval(
+ 10, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs2);
+ overuse_detector_->Process();
+ }
+ }
- InsertFramesWithInterval(50, regular_frame_interval_ms);
- InsertFramesWithInterval(50, 110);
+ void TriggerNormalUsageWithEncodeTime() {
+ const int kEncodeTimeMs = 5;
+ InsertAndEncodeFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs);
overuse_detector_->Process();
}
- void TriggerNormalUsage() {
- int regular_frame_interval_ms = 33;
+ int CaptureJitterMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.capture_jitter_ms;
+ }
- EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ int AvgEncodeTimeMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.avg_encode_time_ms;
+ }
- InsertFramesWithInterval(900, regular_frame_interval_ms);
- overuse_detector_->Process();
+ int EncodeUsagePercent() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.encode_usage_percent;
+ }
+
+ int EncodeRsd() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.encode_rsd;
}
+ CpuOveruseOptions options_;
scoped_ptr<SimulatedClock> clock_;
scoped_ptr<MockCpuOveruseObserver> observer_;
scoped_ptr<OveruseFrameDetector> overuse_detector_;
};
+// enable_capture_jitter_method = true;
+// CaptureJitterMs() > high_capture_jitter_threshold_ms => overuse.
+// CaptureJitterMs() < low_capture_jitter_threshold_ms => underuse.
TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
- TriggerOveruse();
+ // capture_jitter > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
}
TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
- TriggerOveruse();
+ // capture_jitter > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ // capture_jitter < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerNormalUsage();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
+ overuse_detector_->SetObserver(NULL);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerNormalUsage();
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithMethodDisabled) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = false;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
TriggerNormalUsage();
}
TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
- TriggerOveruse();
- TriggerOveruse();
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
TriggerNormalUsage();
}
+TEST_F(OveruseFrameDetectorTest, TriggerNormalUsageWithMinProcessCount) {
+ CpuOveruseObserverImpl overuse_observer_;
+ overuse_detector_->SetObserver(&overuse_observer_);
+ options_.min_process_count = 1;
+ overuse_detector_->SetOptions(options_);
+ InsertFramesWithInterval(900, kFrameInterval33ms, kWidth, kHeight);
+ overuse_detector_->Process();
+ EXPECT_EQ(0, overuse_observer_.normaluse_);
+ clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
+ overuse_detector_->Process();
+ EXPECT_EQ(1, overuse_observer_.normaluse_);
+}
+
TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
+ for(size_t i = 0; i < 64; ++i) {
+ TriggerOveruse(options_.high_threshold_consecutive_count);
+ }
+}
+
+TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ options_.high_threshold_consecutive_count = 2;
+ overuse_detector_->SetOptions(options_);
+ TriggerOveruse(2);
+}
+
+TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ options_.high_threshold_consecutive_count = 2;
+ overuse_detector_->SetOptions(options_);
+ TriggerOveruse(1);
+}
+
+TEST_F(OveruseFrameDetectorTest, GetCpuOveruseMetrics) {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ EXPECT_GT(metrics.capture_jitter_ms, 0);
+ EXPECT_GT(metrics.avg_encode_time_ms, 0);
+ EXPECT_GT(metrics.encode_usage_percent, 0);
+ EXPECT_GE(metrics.capture_queue_delay_ms_per_s, 0);
+ EXPECT_GE(metrics.encode_rsd, 0);
+}
+
+TEST_F(OveruseFrameDetectorTest, CaptureJitter) {
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+ InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterResolutionChange) {
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+ InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
+ // Verify reset.
+ InsertFramesWithInterval(1, kFrameInterval33ms, kWidth, kHeight + 1);
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterFrameTimeout) {
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+ InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
+ InsertFramesWithInterval(
+ 1, options_.frame_timeout_interval_ms, kWidth, kHeight);
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
+ // Verify reset.
+ InsertFramesWithInterval(
+ 1, options_.frame_timeout_interval_ms + 1, kWidth, kHeight);
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+}
- for(size_t i = 0; i < 64; ++i)
- TriggerOveruse();
+TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterChangingThreshold) {
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+ options_.high_capture_jitter_threshold_ms = 90.0f;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
+ options_.low_capture_jitter_threshold_ms = 30.0f;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
-TEST_F(OveruseFrameDetectorTest, LastCaptureJitter) {
- EXPECT_EQ(-1, overuse_detector_->last_capture_jitter_ms());
- TriggerOveruse();
- EXPECT_GT(overuse_detector_->last_capture_jitter_ms(), 0);
+TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) {
+ options_.min_frame_samples = 40;
+ overuse_detector_->SetOptions(options_);
+ InsertFramesWithInterval(40, kFrameInterval33ms, kWidth, kHeight);
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) {
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0);
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelay) {
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
clock_->AdvanceTimeMilliseconds(100);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100);
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayMultipleFrames) {
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
clock_->AdvanceTimeMilliseconds(10);
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
clock_->AdvanceTimeMilliseconds(20);
overuse_detector_->FrameProcessingStarted();
@@ -128,9 +327,9 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayMultipleFrames) {
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayResetAtResolutionSwitch) {
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
clock_->AdvanceTimeMilliseconds(10);
- overuse_detector_->FrameCaptured(321, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight + 1);
clock_->AdvanceTimeMilliseconds(20);
overuse_detector_->FrameProcessingStarted();
@@ -138,7 +337,7 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayResetAtResolutionSwitch) {
}
TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) {
- overuse_detector_->FrameCaptured(320, 180);
+ overuse_detector_->FrameCaptured(kWidth, kHeight);
clock_->AdvanceTimeMilliseconds(100);
overuse_detector_->FrameProcessingStarted();
EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100);
@@ -149,22 +348,139 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) {
TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
const int kInitialAvgEncodeTimeInMs = 5;
- EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs());
for (int i = 0; i < 30; i++) {
clock_->AdvanceTimeMilliseconds(33);
overuse_detector_->FrameEncoded(2);
}
- EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(2, AvgEncodeTimeMs());
+}
+
+TEST_F(OveruseFrameDetectorTest, InitialEncodeUsage) {
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, EncodedUsage) {
- for (int i = 0; i < 30; i++) {
- overuse_detector_->FrameCaptured(320, 180);
- clock_->AdvanceTimeMilliseconds(5);
- overuse_detector_->FrameEncoded(5);
- clock_->AdvanceTimeMilliseconds(33-5);
- }
- EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent());
+ const int kEncodeTimeMs = 5;
+ InsertAndEncodeFramesWithInterval(
+ 1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs);
+ EXPECT_EQ(kEncodeTimeMs * 100 / kFrameInterval33ms, EncodeUsagePercent());
}
+TEST_F(OveruseFrameDetectorTest, EncodeUsageResetAfterChangingThreshold) {
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
+ options_.high_encode_usage_threshold_percent = 100;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
+ options_.low_encode_usage_threshold_percent = 20;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
+}
+
+// enable_encode_usage_method = true;
+// EncodeUsagePercent() > high_encode_usage_threshold_percent => overuse.
+// EncodeUsagePercent() < low_encode_usage_threshold_percent => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithEncodeUsage) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ overuse_detector_->SetOptions(options_);
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruseWithEncodeUsage(options_.high_threshold_consecutive_count);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithEncodeUsage) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ overuse_detector_->SetOptions(options_);
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruseWithEncodeUsage(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerNormalUsageWithEncodeTime();
+}
+
+TEST_F(OveruseFrameDetectorTest,
+ OveruseAndRecoverWithEncodeUsageMethodDisabled) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = false;
+ overuse_detector_->SetOptions(options_);
+ // usage > high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
+ TriggerOveruseWithEncodeUsage(options_.high_threshold_consecutive_count);
+ // usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerNormalUsageWithEncodeTime();
+}
+
+TEST_F(OveruseFrameDetectorTest, EncodeRsdResetAfterChangingThreshold) {
+ EXPECT_EQ(InitialEncodeRsd(), EncodeRsd());
+ options_.high_encode_time_rsd_threshold = 100;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialEncodeRsd(), EncodeRsd());
+ options_.low_encode_time_rsd_threshold = 20;
+ overuse_detector_->SetOptions(options_);
+ EXPECT_EQ(InitialEncodeRsd(), EncodeRsd());
+}
+
+// enable_encode_usage_method = true;
+// low/high_encode_time_rsd_threshold >= 0
+// EncodeUsagePercent() > high_encode_usage_threshold_percent ||
+// EncodeRsd() > high_encode_time_rsd_threshold => overuse.
+// EncodeUsagePercent() < low_encode_usage_threshold_percent &&
+// EncodeRsd() < low_encode_time_rsd_threshold => underuse.
+TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithEncodeRsd) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ options_.high_encode_time_rsd_threshold = 80;
+ overuse_detector_->SetOptions(options_);
+ // rsd > high, usage < high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruseWithEncodeRsd(options_.high_threshold_consecutive_count);
+ EXPECT_LT(EncodeUsagePercent(), options_.high_encode_usage_threshold_percent);
+}
+
+TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithEncodeRsd) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ options_.low_encode_time_rsd_threshold = 20;
+ options_.high_encode_time_rsd_threshold = 80;
+ overuse_detector_->SetOptions(options_);
+ // rsd > high, usage < high => overuse
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
+ TriggerOveruseWithEncodeRsd(options_.high_threshold_consecutive_count);
+ EXPECT_LT(EncodeUsagePercent(), options_.high_encode_usage_threshold_percent);
+ // rsd < low, usage < low => underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(testing::AtLeast(1));
+ TriggerNormalUsageWithEncodeTime();
+}
+
+TEST_F(OveruseFrameDetectorTest, NoUnderuseWithEncodeRsd_UsageGtLowThreshold) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ options_.low_encode_usage_threshold_percent = 1;
+ options_.low_encode_time_rsd_threshold = 20;
+ options_.high_encode_time_rsd_threshold = 90;
+ overuse_detector_->SetOptions(options_);
+ // rsd < low, usage > low => no underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerNormalUsageWithEncodeTime();
+ EXPECT_LT(EncodeRsd(), options_.low_encode_time_rsd_threshold);
+ EXPECT_GT(EncodeUsagePercent(), options_.low_encode_usage_threshold_percent);
+}
+
+TEST_F(OveruseFrameDetectorTest, NoUnderuseWithEncodeRsd_RsdGtLowThreshold) {
+ options_.enable_capture_jitter_method = false;
+ options_.enable_encode_usage_method = true;
+ options_.low_encode_usage_threshold_percent = 20;
+ options_.low_encode_time_rsd_threshold = 1;
+ options_.high_encode_time_rsd_threshold = 90;
+ overuse_detector_->SetOptions(options_);
+ // rsd > low, usage < low => no underuse
+ EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
+ TriggerNormalUsageWithEncodeTime();
+ EXPECT_GT(EncodeRsd(), options_.low_encode_time_rsd_threshold);
+ EXPECT_LT(EncodeUsagePercent(), options_.low_encode_usage_threshold_percent);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/stream_synchronization.cc b/chromium/third_party/webrtc/video_engine/stream_synchronization.cc
index 6192dfa5bf2..8f72fa93cd5 100644
--- a/chromium/third_party/webrtc/video_engine/stream_synchronization.cc
+++ b/chromium/third_party/webrtc/video_engine/stream_synchronization.cc
@@ -16,7 +16,7 @@
#include <algorithm>
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -64,15 +64,15 @@ bool StreamSynchronization::ComputeRelativeDelay(
return false;
}
int64_t audio_last_capture_time_ms;
- if (!synchronization::RtpToNtpMs(audio_measurement.latest_timestamp,
- audio_measurement.rtcp,
- &audio_last_capture_time_ms)) {
+ if (!RtpToNtpMs(audio_measurement.latest_timestamp,
+ audio_measurement.rtcp,
+ &audio_last_capture_time_ms)) {
return false;
}
int64_t video_last_capture_time_ms;
- if (!synchronization::RtpToNtpMs(video_measurement.latest_timestamp,
- video_measurement.rtcp,
- &video_last_capture_time_ms)) {
+ if (!RtpToNtpMs(video_measurement.latest_timestamp,
+ video_measurement.rtcp,
+ &video_last_capture_time_ms)) {
return false;
}
if (video_last_capture_time_ms < 0) {
@@ -96,18 +96,12 @@ bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
assert(total_audio_delay_target_ms && total_video_delay_target_ms);
int current_video_delay_ms = *total_video_delay_target_ms;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
- "Audio delay is: %d for voice channel: %d",
- current_audio_delay_ms, audio_channel_id_);
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
- "Network delay diff is: %d for voice channel: %d",
- channel_delay_->network_delay, audio_channel_id_);
+ LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
+ << ", network delay diff: " << channel_delay_->network_delay
+ << " current diff: " << relative_delay_ms
+ << " for channel " << audio_channel_id_;
// Calculate the difference between the lowest possible video delay and
// the current audio delay.
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
- "Current diff is: %d for audio channel: %d",
- relative_delay_ms, audio_channel_id_);
-
int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
relative_delay_ms;
@@ -198,11 +192,10 @@ bool StreamSynchronization::ComputeDelays(int relative_delay_ms,
channel_delay_->last_video_delay_ms = new_video_delay_ms;
channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
- "Sync video delay %d ms for video channel and audio delay %d for audio "
- "channel %d",
- new_video_delay_ms, channel_delay_->extra_audio_delay_ms,
- audio_channel_id_);
+ LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
+ << " and audio delay " << channel_delay_->extra_audio_delay_ms
+ << " for video channel " << video_channel_id_
+ << " for audio channel " << audio_channel_id_;
// Return values.
*total_video_delay_target_ms = new_video_delay_ms;
diff --git a/chromium/third_party/webrtc/video_engine/stream_synchronization.h b/chromium/third_party/webrtc/video_engine/stream_synchronization.h
index 078b93898f8..5fa9536d17e 100644
--- a/chromium/third_party/webrtc/video_engine/stream_synchronization.h
+++ b/chromium/third_party/webrtc/video_engine/stream_synchronization.h
@@ -13,7 +13,7 @@
#include <list>
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -24,7 +24,7 @@ class StreamSynchronization {
public:
struct Measurements {
Measurements() : rtcp(), latest_receive_time_ms(0), latest_timestamp(0) {}
- synchronization::RtcpList rtcp;
+ RtcpList rtcp;
int64_t latest_receive_time_ms;
uint32_t latest_timestamp;
};
diff --git a/chromium/third_party/webrtc/video_engine/stream_synchronization_unittest.cc b/chromium/third_party/webrtc/video_engine/stream_synchronization_unittest.cc
index 8876485790d..7136f1e1c71 100644
--- a/chromium/third_party/webrtc/video_engine/stream_synchronization_unittest.cc
+++ b/chromium/third_party/webrtc/video_engine/stream_synchronization_unittest.cc
@@ -33,9 +33,8 @@ class Time {
: kNtpJan1970(2208988800UL),
time_now_ms_(offset) {}
- synchronization::RtcpMeasurement GenerateRtcp(int frequency,
- uint32_t offset) const {
- synchronization::RtcpMeasurement rtcp;
+ RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
+ RtcpMeasurement rtcp;
NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
rtcp.rtp_timestamp = NowRtp(frequency, offset);
return rtcp;
diff --git a/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.gypi b/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.gypi
index 0a9250c195e..962916607f6 100644
--- a/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.gypi
+++ b/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.gypi
@@ -13,6 +13,7 @@
'type': 'executable',
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/modules/modules.gyp:video_render_module',
'<(webrtc_root)/modules/modules.gyp:video_capture_module',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
@@ -21,7 +22,7 @@
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/test/libtest/libtest.gyp:libtest',
+ '<(webrtc_root)/test/test.gyp:field_trial',
'video_engine_core',
'libvietest',
],
@@ -42,7 +43,7 @@
'automated/two_windows_fixture.cc',
'automated/vie_api_integration_test.cc',
'automated/vie_extended_integration_test.cc',
- 'automated/vie_rtp_fuzz_test.cc',
+ 'automated/vie_network_test.cc',
'automated/vie_standard_integration_test.cc',
'automated/vie_video_verification_test.cc',
@@ -68,7 +69,6 @@
'source/vie_autotest_base.cc',
'source/vie_autotest_capture.cc',
'source/vie_autotest_codec.cc',
- 'source/vie_autotest_encryption.cc',
'source/vie_autotest_image_process.cc',
'source/vie_autotest_loopback.cc',
'source/vie_autotest_main.cc',
diff --git a/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.isolate b/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.isolate
index 577e63f3e95..762a7ac5a00 100644
--- a/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.isolate
+++ b/chromium/third_party/webrtc/video_engine/test/auto_test/vie_auto_test.isolate
@@ -8,30 +8,28 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../../data/',
- '../../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/vie_auto_test<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_touched': [
- '../../../../DEPS',
+ '<(DEPTH)/DEPS',
],
'isolate_dependency_tracked': [
- '../../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/vie_auto_test<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/video_engine/test/libvietest/libvietest.gypi b/chromium/third_party/webrtc/video_engine/test/libvietest/libvietest.gypi
index a3ba22ec16c..33d0b0f7342 100644
--- a/chromium/third_party/webrtc/video_engine/test/libvietest/libvietest.gypi
+++ b/chromium/third_party/webrtc/video_engine/test/libvietest/libvietest.gypi
@@ -28,14 +28,12 @@
'helpers/vie_to_file_renderer.cc',
# Testbed classes
- 'include/fake_network_pipe.h',
'include/tb_capture_device.h',
'include/tb_external_transport.h',
'include/tb_I420_codec.h',
'include/tb_interfaces.h',
'include/tb_video_channel.h',
- 'testbed/fake_network_pipe.cc',
'testbed/tb_capture_device.cc',
'testbed/tb_external_transport.cc',
'testbed/tb_I420_codec.cc',
@@ -48,23 +46,4 @@
],
},
],
- 'conditions': [
- ['include_tests==1', {
- 'targets': [
- {
- 'target_name': 'libvietest_unittests',
- 'type': 'executable',
- 'dependencies': [
- 'libvietest',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'sources': [
- 'testbed/fake_network_pipe_unittest.cc',
- ],
- },
- ], #targets
- }], # include_tests
- ], # conditions
}
diff --git a/chromium/third_party/webrtc/video_engine/video_engine_core.gypi b/chromium/third_party/webrtc/video_engine/video_engine_core.gypi
index 3793969d4c6..ce9d5365bed 100644
--- a/chromium/third_party/webrtc/video_engine/video_engine_core.gypi
+++ b/chromium/third_party/webrtc/video_engine/video_engine_core.gypi
@@ -17,7 +17,6 @@
'<(webrtc_root)/common_video/common_video.gyp:common_video',
# ModulesShared
- '<(webrtc_root)/modules/modules.gyp:media_file',
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
@@ -39,7 +38,6 @@
'include/vie_base.h',
'include/vie_capture.h',
'include/vie_codec.h',
- 'include/vie_encryption.h',
'include/vie_errors.h',
'include/vie_external_codec.h',
'include/vie_image_process.h',
@@ -56,7 +54,6 @@
'vie_capture_impl.h',
'vie_codec_impl.h',
'vie_defines.h',
- 'vie_encryption_impl.h',
'vie_external_codec_impl.h',
'vie_image_process_impl.h',
'vie_impl.h',
@@ -89,7 +86,6 @@
'vie_base_impl.cc',
'vie_capture_impl.cc',
'vie_codec_impl.cc',
- 'vie_encryption_impl.cc',
'vie_external_codec_impl.cc',
'vie_image_process_impl.cc',
'vie_impl.cc',
@@ -135,12 +131,14 @@
'encoder_state_feedback_unittest.cc',
'overuse_frame_detector_unittest.cc',
'stream_synchronization_unittest.cc',
+ 'vie_capturer_unittest.cc',
+ 'vie_codec_unittest.cc',
'vie_remb_unittest.cc',
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -151,7 +149,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'video_engine_core_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/video_engine/video_engine_core_unittests.isolate b/chromium/third_party/webrtc/video_engine/video_engine_core_unittests.isolate
index 941cbad40d4..ec65e739944 100644
--- a/chromium/third_party/webrtc/video_engine/video_engine_core_unittests.isolate
+++ b/chromium/third_party/webrtc/video_engine/video_engine_core_unittests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_engine_core_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/video_engine/vie_base_impl.cc b/chromium/third_party/webrtc/video_engine/vie_base_impl.cc
index 35fd26f8b5b..29fbe7fb843 100644
--- a/chromium/third_party/webrtc/video_engine/vie_base_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_base_impl.cc
@@ -20,7 +20,7 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -45,42 +45,27 @@ ViEBase* ViEBase::GetInterface(VideoEngine* video_engine) {
}
int ViEBaseImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
- "ViEBase::Release()");
(*this)--; // Decrease ref count.
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
- "ViEBase release too many times");
- shared_data_.SetLastError(kViEAPIDoesNotExist);
+ LOG(LS_WARNING) << "ViEBase released too many times.";
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_.instance_id(),
- "ViEBase reference count: %d", ref_count);
return ref_count;
}
ViEBaseImpl::ViEBaseImpl(const Config& config)
- : shared_data_(config) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
- "ViEBaseImpl::ViEBaseImpl() Ctor");
-}
+ : shared_data_(config) {}
-ViEBaseImpl::~ViEBaseImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
- "ViEBaseImpl::ViEBaseImpl() Dtor");
-}
+ViEBaseImpl::~ViEBaseImpl() {}
int ViEBaseImpl::Init() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
- "Init");
return 0;
}
int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s", __FUNCTION__);
+ LOG_F(LS_INFO) << "SetVoiceEngine";
if (shared_data_.channel_manager()->SetVoiceEngine(voice_engine) != 0) {
shared_data_.SetLastError(kViEBaseVoEFailure);
return -1;
@@ -90,15 +75,10 @@ int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel,
CpuOveruseObserver* observer) {
+ LOG_F(LS_INFO) << "RegisterCpuOveruseObserver on channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideo,
- ViEId(shared_data_.instance_id()),
- "%s: channel %d doesn't exist",
- __FUNCTION__,
- video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -118,6 +98,29 @@ int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel,
return 0;
}
+int ViEBaseImpl::SetCpuOveruseOptions(int video_channel,
+ const CpuOveruseOptions& options) {
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return -1;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ assert(vie_encoder);
+
+ ViEInputManagerScoped is(*(shared_data_.input_manager()));
+ ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+ if (provider) {
+ ViECapturer* capturer = is.Capture(provider->Id());
+ if (capturer) {
+ capturer->SetCpuOveruseOptions(options);
+ return 0;
+ }
+ }
+ return -1;
+}
+
int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
@@ -126,12 +129,6 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideo,
- ViEId(shared_data_.instance_id()),
- "%s: channel %d doesn't exist",
- __FUNCTION__,
- video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -143,10 +140,35 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
if (provider) {
ViECapturer* capturer = is.Capture(provider->Id());
if (capturer) {
- capturer->CpuOveruseMeasures(capture_jitter_ms,
- avg_encode_time_ms,
- encode_usage_percent,
- capture_queue_delay_ms_per_s);
+ CpuOveruseMetrics metrics;
+ capturer->GetCpuOveruseMetrics(&metrics);
+ *capture_jitter_ms = metrics.capture_jitter_ms;
+ *avg_encode_time_ms = metrics.avg_encode_time_ms;
+ *encode_usage_percent = metrics.encode_usage_percent;
+ *capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int ViEBaseImpl::GetCpuOveruseMetrics(int video_channel,
+ CpuOveruseMetrics* metrics) {
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return -1;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ assert(vie_encoder);
+
+ ViEInputManagerScoped is(*(shared_data_.input_manager()));
+ ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+ if (provider) {
+ ViECapturer* capturer = is.Capture(provider->Id());
+ if (capturer) {
+ capturer->GetCpuOveruseMetrics(metrics);
return 0;
}
}
@@ -154,17 +176,18 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
}
int ViEBaseImpl::CreateChannel(int& video_channel) { // NOLINT
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s", __FUNCTION__);
- if (shared_data_.channel_manager()->CreateChannel(&video_channel) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: Could not create channel", __FUNCTION__);
+ return CreateChannel(video_channel, static_cast<const Config*>(NULL));
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
+ const Config* config) {
+ if (shared_data_.channel_manager()->CreateChannel(&video_channel,
+ config) == -1) {
video_channel = -1;
shared_data_.SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: channel created: %d", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "Video channel created: " << video_channel;
return 0;
}
@@ -179,15 +202,10 @@ int ViEBaseImpl::CreateReceiveChannel(int& video_channel, // NOLINT
}
int ViEBaseImpl::DeleteChannel(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s(%d)", __FUNCTION__, video_channel);
{
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id()),
- "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -204,25 +222,19 @@ int ViEBaseImpl::DeleteChannel(const int video_channel) {
}
if (shared_data_.channel_manager()->DeleteChannel(video_channel) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: Could not delete channel %d", __FUNCTION__,
- video_channel);
shared_data_.SetLastError(kViEBaseUnknownError);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: channel deleted: %d", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "Channel deleted " << video_channel;
return 0;
}
int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
const int audio_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s(%d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "ConnectAudioChannel, video channel " << video_channel
+ << ", audio channel " << audio_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(video_channel)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -236,12 +248,9 @@ int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
}
int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s(%d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "DisconnectAudioChannel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(video_channel)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -255,16 +264,10 @@ int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
}
int ViEBaseImpl::StartSend(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
-
+ LOG_F(LS_INFO) << "StartSend: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -272,9 +275,7 @@ int ViEBaseImpl::StartSend(const int video_channel) {
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
assert(vie_encoder != NULL);
if (vie_encoder->Owner() != video_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "Can't start ssend on a receive only channel.");
+ LOG_F(LS_ERROR) << "Can't start send on a receive only channel.";
shared_data_.SetLastError(kViEBaseReceiveOnlyChannel);
return -1;
}
@@ -284,13 +285,10 @@ int ViEBaseImpl::StartSend(const int video_channel) {
int32_t error = vie_channel->StartSend();
if (error != 0) {
vie_encoder->Restart();
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Could not start sending on channel %d", __FUNCTION__,
- video_channel);
if (error == kViEBaseAlreadySending) {
shared_data_.SetLastError(kViEBaseAlreadySending);
}
+ LOG_F(LS_ERROR) << "Could not start sending " << video_channel;
shared_data_.SetLastError(kViEBaseUnknownError);
return -1;
}
@@ -300,29 +298,21 @@ int ViEBaseImpl::StartSend(const int video_channel) {
}
int ViEBaseImpl::StopSend(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "StopSend " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
int32_t error = vie_channel->StopSend();
if (error != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Could not stop sending on channel %d", __FUNCTION__,
- video_channel);
if (error == kViEBaseNotSending) {
shared_data_.SetLastError(kViEBaseNotSending);
} else {
+ LOG_F(LS_ERROR) << "Could not stop sending " << video_channel;
shared_data_.SetLastError(kViEBaseUnknownError);
}
return -1;
@@ -331,16 +321,11 @@ int ViEBaseImpl::StopSend(const int video_channel) {
}
int ViEBaseImpl::StartReceive(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "StartReceive " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -352,16 +337,10 @@ int ViEBaseImpl::StartReceive(const int video_channel) {
}
int ViEBaseImpl::StopReceive(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
-
+ LOG_F(LS_INFO) << "StopReceive " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_.instance_id(), video_channel),
- "%s: Channel %d does not exist", __FUNCTION__, video_channel);
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -373,8 +352,6 @@ int ViEBaseImpl::StopReceive(const int video_channel) {
}
int ViEBaseImpl::GetVersion(char version[1024]) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
- "GetVersion(version=?)");
assert(kViEVersionMaxMessageSize == 1024);
if (!version) {
shared_data_.SetLastError(kViEBaseInvalidArgument);
@@ -383,22 +360,15 @@ int ViEBaseImpl::GetVersion(char version[1024]) {
// Add WebRTC Version.
std::stringstream version_stream;
- version_stream << "VideoEngine 3.47.0" << std::endl;
+ version_stream << "VideoEngine 3.54.0" << std::endl;
// Add build info.
version_stream << "Build: " << BUILDINFO << std::endl;
-#ifdef WEBRTC_EXTERNAL_TRANSPORT
- version_stream << "External transport build" << std::endl;
-#endif
int version_length = version_stream.tellp();
assert(version_length < 1024);
memcpy(version, version_stream.str().c_str(), version_length);
version[version_length] = '\0';
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
- ViEId(shared_data_.instance_id()), "GetVersion() => %s",
- version);
return 0;
}
@@ -410,9 +380,6 @@ int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
int original_channel, bool sender) {
ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
if (!cs.Channel(original_channel)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s - original_channel does not exist.", __FUNCTION__,
- shared_data_.instance_id());
shared_data_.SetLastError(kViEBaseInvalidChannelId);
return -1;
}
@@ -420,14 +387,13 @@ int ViEBaseImpl::CreateChannel(int& video_channel, // NOLINT
if (shared_data_.channel_manager()->CreateChannel(&video_channel,
original_channel,
sender) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: Could not create channel", __FUNCTION__);
video_channel = -1;
shared_data_.SetLastError(kViEBaseChannelCreationFailed);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
- "%s: channel created: %d", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "VideoChannel created: " << video_channel
+ << ", base channel " << original_channel
+ << ", is send channel : " << sender;
return 0;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_base_impl.h b/chromium/third_party/webrtc/video_engine/vie_base_impl.h
index 231efc9a60c..d6a046e608e 100644
--- a/chromium/third_party/webrtc/video_engine/vie_base_impl.h
+++ b/chromium/third_party/webrtc/video_engine/vie_base_impl.h
@@ -33,6 +33,10 @@ class ViEBaseImpl
virtual int SetVoiceEngine(VoiceEngine* voice_engine);
virtual int RegisterCpuOveruseObserver(int channel,
CpuOveruseObserver* observer);
+ virtual int SetCpuOveruseOptions(int channel,
+ const CpuOveruseOptions& options);
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics);
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
@@ -40,6 +44,8 @@ class ViEBaseImpl
int* capture_queue_delay_ms_per_s);
virtual int CreateChannel(int& video_channel); // NOLINT
virtual int CreateChannel(int& video_channel, // NOLINT
+ const Config* config);
+ virtual int CreateChannel(int& video_channel, // NOLINT
int original_channel);
virtual int CreateReceiveChannel(int& video_channel, // NOLINT
int original_channel);
diff --git a/chromium/third_party/webrtc/video_engine/vie_capture_impl.cc b/chromium/third_party/webrtc/video_engine/vie_capture_impl.cc
index 85353c30acc..9581fb7f2cf 100644
--- a/chromium/third_party/webrtc/video_engine/vie_capture_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_capture_impl.cc
@@ -12,7 +12,7 @@
#include <map>
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -43,37 +43,24 @@ ViECapture* ViECapture::GetInterface(VideoEngine* video_engine) {
}
int ViECaptureImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViECapture::Release()");
// Decrease ref count
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViECapture release too many times");
+ LOG(LS_WARNING) << "ViECapture released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViECapture reference count: %d", ref_count);
return ref_count;
}
ViECaptureImpl::ViECaptureImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViECaptureImpl::ViECaptureImpl() Ctor");
-}
+ : shared_data_(shared_data) {}
-ViECaptureImpl::~ViECaptureImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViECaptureImpl::~ViECaptureImpl() Dtor");
-}
+ViECaptureImpl::~ViECaptureImpl() {}
int ViECaptureImpl::NumberOfCaptureDevices() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
return shared_data_->input_manager()->NumberOfCaptureDevices();
}
@@ -83,8 +70,6 @@ int ViECaptureImpl::GetCaptureDevice(unsigned int list_number,
unsigned int device_nameUTF8Length,
char* unique_idUTF8,
unsigned int unique_idUTF8Length) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(list_number: %d)", __FUNCTION__, list_number);
return shared_data_->input_manager()->GetDeviceName(
list_number,
device_nameUTF8, device_nameUTF8Length,
@@ -95,8 +80,7 @@ int ViECaptureImpl::AllocateCaptureDevice(
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(unique_idUTF8: %s)", __FUNCTION__, unique_idUTF8);
+ LOG(LS_INFO) << "AllocateCaptureDevice " << unique_idUTF8;
const int32_t result =
shared_data_->input_manager()->CreateCaptureDevice(
unique_idUTF8,
@@ -111,8 +95,6 @@ int ViECaptureImpl::AllocateCaptureDevice(
int ViECaptureImpl::AllocateExternalCaptureDevice(
int& capture_id, ViEExternalCapture*& external_capture) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
const int32_t result =
shared_data_->input_manager()->CreateExternalCaptureDevice(
external_capture, capture_id);
@@ -121,56 +103,45 @@ int ViECaptureImpl::AllocateExternalCaptureDevice(
shared_data_->SetLastError(result);
return -1;
}
+ LOG(LS_INFO) << "External capture device allocated: " << capture_id;
return 0;
}
int ViECaptureImpl::AllocateCaptureDevice(
VideoCaptureModule& capture_module, int& capture_id) { // NOLINT
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
int32_t result = shared_data_->input_manager()->CreateCaptureDevice(
&capture_module, capture_id);
if (result != 0) {
shared_data_->SetLastError(result);
return -1;
}
+ LOG(LS_INFO) << "External capture device, by module, allocated: "
+ << capture_id;
return 0;
}
int ViECaptureImpl::ReleaseCaptureDevice(const int capture_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
+ LOG(LS_INFO) << "ReleaseCaptureDevice " << capture_id;
{
ViEInputManagerScoped is((*(shared_data_->input_manager())));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
}
-
- // Destroy the capture device.
return shared_data_->input_manager()->DestroyCaptureDevice(capture_id);
}
int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(capture_id: %d, video_channel: %d)", __FUNCTION__,
- capture_id, video_channel);
+ LOG(LS_INFO) << "Connect capture id " << capture_id
+ << " to channel " << video_channel;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -178,26 +149,18 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__,
- video_channel);
+ LOG(LS_ERROR) << "Channel doesn't exist.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
if (vie_encoder->Owner() != video_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "Can't connect capture device to a receive only channel.");
+ LOG(LS_ERROR) << "Can't connect capture device to a receive device.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
// Check if the encoder already has a connected frame provider
if (is.FrameProvider(vie_encoder) != NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d already connected to a capture device.",
- __FUNCTION__, video_channel);
+ LOG(LS_ERROR) << "Channel already connected to capture device.";
shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected);
return -1;
}
@@ -215,17 +178,12 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "DisconnectCaptureDevice " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__,
- video_channel);
+ LOG(LS_ERROR) << "Channel doesn't exist.";
shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
return -1;
}
@@ -233,19 +191,11 @@ int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: No capture device connected to channel %d",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
if (frame_provider->Id() < kViECaptureIdBase ||
frame_provider->Id() > kViECaptureIdMax) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: No capture device connected to channel %d",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECaptureDeviceNotConnected);
return -1;
}
@@ -263,16 +213,11 @@ int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
int ViECaptureImpl::StartCapture(const int capture_id,
const CaptureCapability& capture_capability) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
+ LOG(LS_INFO) << "StartCapture " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -288,28 +233,22 @@ int ViECaptureImpl::StartCapture(const int capture_id,
}
int ViECaptureImpl::StopCapture(const int capture_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
+ LOG(LS_INFO) << "StopCapture " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
if (!vie_capture->Started()) {
shared_data_->SetLastError(kViECaptureDeviceNotStarted);
- return -1;
+ return 0;
}
if (vie_capture->Stop() != 0) {
shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
-
return 0;
}
@@ -331,16 +270,12 @@ int ViECaptureImpl::SetRotateCapturedFrames(
i_rotation = 270;
break;
}
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(rotation: %d)", __FUNCTION__, i_rotation);
+ LOG(LS_INFO) << "SetRotateCaptureFrames for " << capture_id
+ << ", rotation " << i_rotation;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -353,17 +288,12 @@ int ViECaptureImpl::SetRotateCapturedFrames(
int ViECaptureImpl::SetCaptureDelay(const int capture_id,
const unsigned int capture_delay_ms) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d, capture_delay_ms %u)", __FUNCTION__,
- capture_id, capture_delay_ms);
+ LOG(LS_INFO) << "SetCaptureDelay " << capture_delay_ms
+ << ", for device " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -378,8 +308,6 @@ int ViECaptureImpl::SetCaptureDelay(const int capture_id,
int ViECaptureImpl::NumberOfCapabilities(
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
#if defined(WEBRTC_MAC)
// TODO(mflodman) Move to capture module!
@@ -387,9 +315,7 @@ int ViECaptureImpl::NumberOfCapabilities(
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s This API is not supported on Mac OS", __FUNCTION__,
- shared_data_->instance_id());
+ LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
return -1;
#endif
return shared_data_->input_manager()->NumberOfCaptureCapabilities(
@@ -401,18 +327,14 @@ int ViECaptureImpl::GetCaptureCapability(const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
const unsigned int capability_number,
CaptureCapability& capability) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
#if defined(WEBRTC_MAC)
// TODO(mflodman) Move to capture module!
// QTKit framework handles all capabilities and capture settings
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
+ LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s This API is not supported on Mac OS", __FUNCTION__,
- shared_data_->instance_id());
return -1;
#endif
if (shared_data_->input_manager()->GetCaptureCapability(
@@ -436,15 +358,9 @@ int ViECaptureImpl::ShowCaptureSettingsDialogBox(
// automatically (mandatory).
// Thus this function cannot be supported on the Mac platform.
shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s This API is not supported on Mac OS", __FUNCTION__,
- shared_data_->instance_id());
+ LOG_F(LS_ERROR) << "API not supported on Mac OS X.";
return -1;
#endif
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s capture_id (capture_device_name: %s)", __FUNCTION__,
- unique_idUTF8);
-
return shared_data_->input_manager()->DisplayCaptureSettingsDialogBox(
unique_idUTF8, dialog_title,
parent_window, x, y);
@@ -452,8 +368,6 @@ int ViECaptureImpl::ShowCaptureSettingsDialogBox(
int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
RotateCapturedFrame& orientation) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s (capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
if (shared_data_->input_manager()->GetOrientation(
unique_idUTF8,
orientation) != 0) {
@@ -466,13 +380,11 @@ int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
const bool enable) {
+ LOG(LS_INFO) << "EnableBrightnessAlarm for device " << capture_id
+ << ", status " << enable;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
@@ -485,20 +397,15 @@ int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
int ViECaptureImpl::RegisterObserver(const int capture_id,
ViECaptureObserver& observer) {
+ LOG(LS_INFO) << "Register capture observer " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
if (vie_capture->IsObserverRegistered()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Observer already registered", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Observer already registered.";
shared_data_->SetLastError(kViECaptureObserverAlreadyRegistered);
return -1;
}
@@ -513,10 +420,6 @@ int ViECaptureImpl::DeregisterObserver(const int capture_id) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), capture_id),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
return -1;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_capturer.cc b/chromium/third_party/webrtc/video_engine/vie_capturer.cc
index aec5b39a1d8..30d663357c7 100644
--- a/chromium/third_party/webrtc/video_engine/vie_capturer.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_capturer.cc
@@ -10,6 +10,7 @@
#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/utility/interface/process_thread.h"
@@ -19,10 +20,9 @@
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
-#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/overuse_frame_detector.h"
#include "webrtc/video_engine/vie_defines.h"
@@ -59,26 +59,15 @@ ViECapturer::ViECapturer(int capture_id,
denoising_enabled_(false),
observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
observer_(NULL),
- overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
- kNormalUseStdDevMs,
- kOveruseStdDevMs)) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
- "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
- capture_id, engine_id);
+ overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
unsigned int t_id = 0;
- if (capture_thread_.Start(t_id)) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
- "%s: thread started: %u", __FUNCTION__, t_id);
- } else {
+ if (!capture_thread_.Start(t_id)) {
assert(false);
}
module_process_thread_.RegisterModule(overuse_detector_.get());
}
ViECapturer::~ViECapturer() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
- "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
- capture_id_, engine_id_);
module_process_thread_.DeRegisterModule(overuse_detector_.get());
// Stop the thread.
@@ -103,10 +92,6 @@ ViECapturer::~ViECapturer() {
delete &deliver_event_;
} else {
assert(false);
- WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
- ViEId(engine_id_, capture_id_),
- "%s: Not able to stop capture thread for device %d, leaking",
- __FUNCTION__, capture_id_);
}
if (image_proc_module_) {
@@ -209,8 +194,6 @@ int ViECapturer::FrameCallbackChanged() {
}
int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
- __FUNCTION__);
int width;
int height;
int frame_rate;
@@ -247,15 +230,11 @@ int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
}
int32_t ViECapturer::Stop() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
- __FUNCTION__);
requested_capability_ = CaptureCapability();
return capture_module_->StopCapture();
}
bool ViECapturer::Started() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
- __FUNCTION__);
return capture_module_->CaptureStarted();
}
@@ -267,19 +246,17 @@ void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
overuse_detector_->SetObserver(observer);
}
-void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const {
- *capture_jitter_ms = overuse_detector_->last_capture_jitter_ms();
- *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
- *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
- *capture_queue_delay_ms_per_s =
- overuse_detector_->AvgCaptureQueueDelayMsPerS();
+void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) {
+ overuse_detector_->SetOptions(options);
+}
+
+void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const {
+ overuse_detector_->GetCpuOveruseMetrics(metrics);
}
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
- return capture_module_->SetCaptureDelay(delay_ms);
+ capture_module_->SetCaptureDelay(delay_ms);
+ return 0;
}
int32_t ViECapturer::SetRotateCapturedFrames(
@@ -308,10 +285,6 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
uint16_t height,
RawVideoType video_type,
unsigned long long capture_time) { // NOLINT
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "ExternalCapture::IncomingFrame width %d, height %d, "
- "capture_time %u", width, height, capture_time);
-
if (!external_capture_module_) {
return -1;
}
@@ -326,11 +299,6 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame,
int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
unsigned long long capture_time) { // NOLINT
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "ExternalCapture::IncomingFrame width %d, height %d, "
- " capture_time %u", video_frame.width, video_frame.height,
- capture_time);
-
if (!external_capture_module_) {
return -1;
}
@@ -352,10 +320,7 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
video_frame.v_pitch);
if (ret < 0) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideo,
- ViEId(engine_id_, capture_id_),
- "Failed to create I420VideoFrame");
+ LOG_F(LS_ERROR) << "Could not create I420Frame.";
return -1;
}
@@ -366,12 +331,13 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
void ViECapturer::SwapFrame(I420VideoFrame* frame) {
external_capture_module_->IncomingI420VideoFrame(frame,
frame->render_time_ms());
+ frame->set_timestamp(0);
+ frame->set_ntp_time_ms(0);
+ frame->set_render_time_ms(0);
}
void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
I420VideoFrame& video_frame) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
CriticalSectionScoped cs(capture_cs_.get());
// Make sure we render this frame earlier since we know the render time set
// is slightly off since it's being set when the frame has been received from
@@ -381,18 +347,22 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
- captured_frame_.SwapFrame(&video_frame);
+ if (video_frame.native_handle() != NULL) {
+ captured_frame_.reset(video_frame.CloneFrame());
+ } else {
+ if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
+ captured_frame_.reset(new I420VideoFrame());
+ captured_frame_->SwapFrame(&video_frame);
+ }
capture_event_.Set();
- overuse_detector_->FrameCaptured(captured_frame_.width(),
- captured_frame_.height());
- return;
+ overuse_detector_->FrameCaptured(captured_frame_->width(),
+ captured_frame_->height());
}
void ViECapturer::OnCaptureDelayChanged(const int32_t id,
const int32_t delay) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
- delay);
+ LOG(LS_INFO) << "Capture delayed change to " << delay
+ << " for device " << id;
// Deliver the network delay to all registered callbacks.
ViEFrameProviderBase::SetFrameDelay(delay);
@@ -402,26 +372,9 @@ int32_t ViECapturer::RegisterEffectFilter(
ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(deliver_cs_.get());
- if (!effect_filter) {
- if (!effect_filter_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: no effect filter added for capture device %d",
- __FUNCTION__, capture_id_);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: deregister effect filter for device %d", __FUNCTION__,
- capture_id_);
- } else {
- if (effect_filter_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: effect filter already added for capture device %d",
- __FUNCTION__, capture_id_);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: register effect filter for device %d", __FUNCTION__,
- capture_id_);
+ if (effect_filter != NULL && effect_filter_ != NULL) {
+ LOG_F(LS_ERROR) << "Effect filter already registered.";
+ return -1;
}
effect_filter_ = effect_filter;
return 0;
@@ -433,9 +386,7 @@ int32_t ViECapturer::IncImageProcRefCount() {
image_proc_module_ = VideoProcessingModule::Create(
ViEModuleId(engine_id_, capture_id_));
if (!image_proc_module_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: could not create video processing module",
- __FUNCTION__);
+ LOG_F(LS_ERROR) << "Could not create video processing module.";
return -1;
}
}
@@ -454,10 +405,6 @@ int32_t ViECapturer::DecImageProcRefCount() {
}
int32_t ViECapturer::EnableDenoising(bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
- capture_id_, enable);
-
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (denoising_enabled_) {
@@ -476,20 +423,13 @@ int32_t ViECapturer::EnableDenoising(bool enable) {
denoising_enabled_ = false;
DecImageProcRefCount();
}
-
return 0;
}
int32_t ViECapturer::EnableDeflickering(bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
- capture_id_, enable);
-
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (deflicker_frame_stats_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: deflickering already enabled", __FUNCTION__);
return -1;
}
if (IncImageProcRefCount() != 0) {
@@ -498,8 +438,6 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
} else {
if (deflicker_frame_stats_ == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: deflickering not enabled", __FUNCTION__);
return -1;
}
DecImageProcRefCount();
@@ -510,15 +448,9 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
}
int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
- capture_id_, enable);
-
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (brightness_frame_stats_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: BrightnessAlarm already enabled", __FUNCTION__);
return -1;
}
if (IncImageProcRefCount() != 0) {
@@ -528,8 +460,6 @@ int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
} else {
DecImageProcRefCount();
if (brightness_frame_stats_ == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: deflickering not enabled", __FUNCTION__);
return -1;
}
delete brightness_frame_stats_;
@@ -549,7 +479,9 @@ bool ViECapturer::ViECaptureProcess() {
deliver_cs_->Enter();
if (SwapCapturedAndDeliverFrameIfAvailable()) {
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
- DeliverI420Frame(&deliver_frame_);
+ DeliverI420Frame(deliver_frame_.get());
+ if (deliver_frame_->native_handle() != NULL)
+ deliver_frame_.reset(); // Release the texture so it can be reused.
}
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) {
@@ -570,15 +502,18 @@ bool ViECapturer::ViECaptureProcess() {
}
void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
+ if (video_frame->native_handle() != NULL) {
+ ViEFrameProviderBase::DeliverFrame(video_frame);
+ return;
+ }
+
// Apply image enhancement and effect filter.
if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
*video_frame) == 0) {
image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
} else {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: could not get frame stats for captured frame",
- __FUNCTION__);
+ LOG_F(LS_ERROR) << "Could not get frame stats.";
}
}
if (denoising_enabled_) {
@@ -601,8 +536,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
current_brightness_level_ = Bright;
break;
default:
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s: Brightness detection failed", __FUNCTION__);
+ break;
}
}
}
@@ -610,10 +544,13 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
unsigned int length = CalcBufferSize(kI420,
video_frame->width(),
video_frame->height());
- scoped_array<uint8_t> video_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get());
- effect_filter_->Transform(length, video_buffer.get(),
- video_frame->timestamp(), video_frame->width(),
+ effect_filter_->Transform(length,
+ video_buffer.get(),
+ video_frame->ntp_time_ms(),
+ video_frame->timestamp(),
+ video_frame->width(),
video_frame->height());
}
// Deliver the captured frame to all observers (channels, renderer or file).
@@ -638,28 +575,26 @@ bool ViECapturer::CaptureCapabilityFixed() {
}
int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
- if (observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
- "%s Observer already registered", __FUNCTION__, capture_id_);
- return -1;
- }
- if (capture_module_->RegisterCaptureCallback(*this) != 0) {
- return -1;
+ {
+ CriticalSectionScoped cs(observer_cs_.get());
+ if (observer_) {
+ LOG_F(LS_ERROR) << "Observer already registered.";
+ return -1;
+ }
+ observer_ = observer;
}
+ capture_module_->RegisterCaptureCallback(*this);
capture_module_->EnableFrameRateCallback(true);
capture_module_->EnableNoPictureAlarm(true);
- observer_ = observer;
return 0;
}
int32_t ViECapturer::DeRegisterObserver() {
- CriticalSectionScoped cs(observer_cs_.get());
- if (!observer_) {
- return 0;
- }
capture_module_->EnableFrameRateCallback(false);
capture_module_->EnableNoPictureAlarm(false);
capture_module_->DeRegisterCaptureCallback();
+
+ CriticalSectionScoped cs(observer_cs_.get());
observer_ = NULL;
return 0;
}
@@ -671,17 +606,13 @@ bool ViECapturer::IsObserverRegistered() {
void ViECapturer::OnCaptureFrameRate(const int32_t id,
const uint32_t frame_rate) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
- "OnCaptureFrameRate %d", frame_rate);
-
CriticalSectionScoped cs(observer_cs_.get());
observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
}
void ViECapturer::OnNoPictureAlarm(const int32_t id,
const VideoCaptureAlarm alarm) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
- "OnNoPictureAlarm %d", alarm);
+ LOG(LS_WARNING) << "OnNoPictureAlarm " << id;
CriticalSectionScoped cs(observer_cs_.get());
CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
@@ -690,11 +621,21 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id,
bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
CriticalSectionScoped cs(capture_cs_.get());
- if (captured_frame_.IsZeroSize())
+ if (captured_frame_ == NULL)
+ return false;
+
+ if (captured_frame_->native_handle() != NULL) {
+ deliver_frame_.reset(captured_frame_.release());
+ return true;
+ }
+
+ if (captured_frame_->IsZeroSize())
return false;
- deliver_frame_.SwapFrame(&captured_frame_);
- captured_frame_.ResetSize();
+ if (deliver_frame_ == NULL)
+ deliver_frame_.reset(new I420VideoFrame());
+ deliver_frame_->SwapFrame(captured_frame_.get());
+ captured_frame_->ResetSize();
return true;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_capturer.h b/chromium/third_party/webrtc/video_engine/vie_capturer.h
index 1fa2b53df18..8e893577b2a 100644
--- a/chromium/third_party/webrtc/video_engine/vie_capturer.h
+++ b/chromium/third_party/webrtc/video_engine/vie_capturer.h
@@ -20,7 +20,9 @@
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
+#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_frame_provider_base.h"
@@ -105,11 +107,8 @@ class ViECapturer
const char* CurrentDeviceName() const;
void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
-
- void CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const;
+ void SetCpuOveruseOptions(const CpuOveruseOptions& options);
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
protected:
ViECapturer(int capture_id,
@@ -170,8 +169,8 @@ class ViECapturer
EventWrapper& capture_event_;
EventWrapper& deliver_event_;
- I420VideoFrame captured_frame_;
- I420VideoFrame deliver_frame_;
+ scoped_ptr<I420VideoFrame> captured_frame_;
+ scoped_ptr<I420VideoFrame> deliver_frame_;
// Image processing.
ViEEffectFilter* effect_filter_;
@@ -185,7 +184,7 @@ class ViECapturer
// Statistics observer.
scoped_ptr<CriticalSectionWrapper> observer_cs_;
- ViECaptureObserver* observer_;
+ ViECaptureObserver* observer_ GUARDED_BY(observer_cs_.get());
CaptureCapability requested_capability_;
diff --git a/chromium/third_party/webrtc/video_engine/vie_capturer_unittest.cc b/chromium/third_party/webrtc/video_engine/vie_capturer_unittest.cc
new file mode 100644
index 00000000000..edaf13b54b4
--- /dev/null
+++ b/chromium/third_party/webrtc/video_engine/vie_capturer_unittest.cc
@@ -0,0 +1,263 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes unit tests for ViECapturer.
+
+#include "webrtc/video_engine/vie_capturer.h"
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/video_capture/include/mock/mock_video_capture.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/ref_count.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+#include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::WithArg;
+
+// If an output frame does not arrive in 500ms, the test will fail.
+#define FRAME_TIMEOUT_MS 500
+
+namespace webrtc {
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2);
+I420VideoFrame* CreateI420VideoFrame(uint8_t length);
+
+class FakeNativeHandle : public NativeHandle {
+ public:
+ FakeNativeHandle() {}
+ virtual ~FakeNativeHandle() {}
+ virtual void* GetHandle() { return NULL; }
+};
+
+class ViECapturerTest : public ::testing::Test {
+ protected:
+ ViECapturerTest()
+ : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()),
+ mock_process_thread_(new NiceMock<MockProcessThread>),
+ mock_frame_callback_(new NiceMock<MockViEFrameCallback>),
+ data_callback_(NULL),
+ output_frame_event_(EventWrapper::Create()) {
+ }
+
+ virtual void SetUp() {
+ EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_))
+ .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback));
+ EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _))
+ .WillRepeatedly(
+ WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame)));
+
+ Config config;
+ vie_capturer_.reset(
+ ViECapturer::CreateViECapture(
+ 0, 0, config, mock_capture_module_.get(), *mock_process_thread_));
+ vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get());
+ }
+
+ virtual void TearDown() {
+ // ViECapturer accesses |mock_process_thread_| in destructor and should
+ // be deleted first.
+ vie_capturer_.reset();
+ }
+
+ void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) {
+ data_callback_ = &data_callback;
+ }
+
+ void AddInputFrame(I420VideoFrame* frame) {
+ data_callback_->OnIncomingCapturedFrame(0, *frame);
+ }
+
+ void AddOutputFrame(I420VideoFrame* frame) {
+ if (frame->native_handle() == NULL)
+ output_frame_ybuffers_.push_back(frame->buffer(kYPlane));
+ // Clone the frames because ViECapturer owns the frames.
+ output_frames_.push_back(frame->CloneFrame());
+ output_frame_event_->Set();
+ }
+
+ void WaitOutputFrame() {
+ EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ }
+
+ scoped_ptr<MockVideoCaptureModule> mock_capture_module_;
+ scoped_ptr<MockProcessThread> mock_process_thread_;
+ scoped_ptr<MockViEFrameCallback> mock_frame_callback_;
+
+ // Used to send input capture frames to ViECapturer.
+ VideoCaptureDataCallback* data_callback_;
+
+ scoped_ptr<ViECapturer> vie_capturer_;
+
+ // Input capture frames of ViECapturer.
+ ScopedVector<I420VideoFrame> input_frames_;
+
+ // Indicate an output frame has arrived.
+ scoped_ptr<EventWrapper> output_frame_event_;
+
+ // Output delivered frames of ViECaptuer.
+ ScopedVector<I420VideoFrame> output_frames_;
+
+ // The pointers of Y plane buffers of output frames. This is used to verify
+ // the frame are swapped and not copied.
+ std::vector<uint8_t*> output_frame_ybuffers_;
+};
+
+TEST_F(ViECapturerTest, TestTextureFrames) {
+ const int kNumFrame = 3;
+ for (int i = 0 ; i < kNumFrame; ++i) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i));
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestI420Frames) {
+ const int kNumFrame = 4;
+ ScopedVector<I420VideoFrame> copied_input_frames;
+ std::vector<uint8_t*> ybuffer_pointers;
+ for (int i = 0; i < kNumFrame; ++i) {
+ input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
+ ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane));
+ // Copy input frames because the buffer data will be swapped.
+ copied_input_frames.push_back(input_frames_[i]->CloneFrame());
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_));
+ // Make sure the buffer is swapped and not copied.
+ for (int i = 0; i < kNumFrame; ++i)
+ EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
+ // The pipeline should be filled with frames with allocated buffers. Check
+ // the last input frame has the same allocated size after swapping.
+ EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane),
+ copied_input_frames.back()->allocated_size(kYPlane));
+}
+
+TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[0]);
+ WaitOutputFrame();
+
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[1]);
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
+ return EqualTextureFrames(frame1, frame2);
+ return EqualBufferFrames(frame1, frame2);
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.native_handle() == frame2.native_handle()) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
+ (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
+ (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()) &&
+ (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+ (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+ (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
+ (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
+ frame1.allocated_size(kYPlane)) == 0) &&
+ (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
+ frame1.allocated_size(kUPlane)) == 0) &&
+ (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
+ frame1.allocated_size(kVPlane)) == 0));
+}
+
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2) {
+ if (frames1.size() != frames2.size())
+ return false;
+ for (size_t i = 0; i < frames1.size(); ++i) {
+ if (!EqualFrames(*frames1[i], *frames2[i]))
+ return false;
+ }
+ return true;
+}
+
+I420VideoFrame* CreateI420VideoFrame(uint8_t data) {
+ I420VideoFrame* frame = new I420VideoFrame();
+ const int width = 36;
+ const int height = 24;
+ const int kSizeY = width * height * 2;
+ const int kSizeUV = width * height;
+ uint8_t buffer[kSizeY];
+ memset(buffer, data, kSizeY);
+ frame->CreateFrame(
+ kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
+ width / 2, width / 2);
+ frame->set_timestamp(data);
+ frame->set_ntp_time_ms(data);
+ frame->set_render_time_ms(data);
+ return frame;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel.cc b/chromium/third_party/webrtc/video_engine/vie_channel.cc
index 2305ea78d5a..62bb095996f 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_channel.cc
@@ -24,8 +24,8 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_errors.h"
@@ -76,10 +76,10 @@ ViEChannel::ViEChannel(int32_t channel_id,
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
rtp_rtcp_cs_(CriticalSectionWrapper::CreateCriticalSection()),
default_rtp_rtcp_(default_rtp_rtcp),
- vcm_(*VideoCodingModule::Create(ViEModuleId(engine_id, channel_id))),
- vie_receiver_(channel_id, &vcm_, remote_bitrate_estimator, this),
+ vcm_(VideoCodingModule::Create()),
+ vie_receiver_(channel_id, vcm_, remote_bitrate_estimator, this),
vie_sender_(channel_id),
- vie_sync_(&vcm_, this),
+ vie_sync_(vcm_, this),
stats_observer_(new ChannelStatsObserver(this)),
module_process_thread_(module_process_thread),
codec_observer_(NULL),
@@ -89,6 +89,7 @@ ViEChannel::ViEChannel(int32_t channel_id,
intra_frame_observer_(intra_frame_observer),
rtt_stats_(rtt_stats),
paced_sender_(paced_sender),
+ pad_with_redundant_payloads_(false),
bandwidth_observer_(bandwidth_observer),
send_timestamp_extension_id_(kInvalidRtpExtensionId),
absolute_send_time_extension_id_(kInvalidRtpExtensionId),
@@ -96,19 +97,13 @@ ViEChannel::ViEChannel(int32_t channel_id,
decoder_reset_(true),
wait_for_key_frame_(false),
decode_thread_(NULL),
- external_encryption_(NULL),
effect_filter_(NULL),
color_enhancement_(false),
mtu_(0),
sender_(sender),
nack_history_size_sender_(kSendSidePacketHistorySize),
max_nack_reordering_threshold_(kMaxPacketAgeToNack),
- pre_render_callback_(NULL),
- config_(config) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, channel_id),
- "ViEChannel::ViEChannel(channel_id: %d, engine_id: %d)",
- channel_id, engine_id);
-
+ pre_render_callback_(NULL) {
RtpRtcp::Configuration configuration;
configuration.id = ViEModuleId(engine_id, channel_id);
configuration.audio = false;
@@ -124,97 +119,52 @@ ViEChannel::ViEChannel(int32_t channel_id,
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
- vcm_.SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
+ vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
}
int32_t ViEChannel::Init() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
- engine_id_);
-
if (module_process_thread_.RegisterModule(
vie_receiver_.GetReceiveStatistics()) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Failed to register receive-statistics to process thread",
- __FUNCTION__);
return -1;
}
// RTP/RTCP initialization.
if (rtp_rtcp_->SetSendingMediaStatus(false) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP::SetSendingMediaStatus failure", __FUNCTION__);
return -1;
}
if (module_process_thread_.RegisterModule(rtp_rtcp_.get()) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP::RegisterModule failure", __FUNCTION__);
return -1;
}
- if (rtp_rtcp_->SetKeyFrameRequestMethod(kKeyFrameReqFirRtp) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP::SetKeyFrameRequestMethod failure", __FUNCTION__);
- }
- if (rtp_rtcp_->SetRTCPStatus(kRtcpCompound) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
- }
+ rtp_rtcp_->SetKeyFrameRequestMethod(kKeyFrameReqFirRtp);
+ rtp_rtcp_->SetRTCPStatus(kRtcpCompound);
if (paced_sender_) {
- if (rtp_rtcp_->SetStorePacketsStatus(true, nack_history_size_sender_) !=
- 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s:SetStorePacketsStatus failure", __FUNCTION__);
- }
+ rtp_rtcp_->SetStorePacketsStatus(true, nack_history_size_sender_);
}
- // VCM initialization
- if (vcm_.InitializeReceiver() != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: VCM::InitializeReceiver failure", __FUNCTION__);
+ if (vcm_->InitializeReceiver() != 0) {
return -1;
}
- if (vcm_.SetVideoProtection(kProtectionKeyOnLoss, true)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::SetVideoProtection failure", __FUNCTION__);
+ if (vcm_->SetVideoProtection(kProtectionKeyOnLoss, true)) {
return -1;
}
- if (vcm_.RegisterReceiveCallback(this) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::RegisterReceiveCallback failure", __FUNCTION__);
+ if (vcm_->RegisterReceiveCallback(this) != 0) {
return -1;
}
- if (vcm_.RegisterFrameTypeCallback(this) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::RegisterFrameTypeCallback failure", __FUNCTION__);
- }
- if (vcm_.RegisterReceiveStatisticsCallback(this) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::RegisterReceiveStatisticsCallback failure",
- __FUNCTION__);
- }
- if (vcm_.RegisterDecoderTimingCallback(this) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::RegisterDecoderTimingCallback failure",
- __FUNCTION__);
- }
- if (vcm_.SetRenderDelay(kViEDefaultRenderDelayMs) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::SetRenderDelay failure", __FUNCTION__);
- }
- if (module_process_thread_.RegisterModule(&vcm_) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: VCM::RegisterModule(vcm) failure", __FUNCTION__);
+ vcm_->RegisterFrameTypeCallback(this);
+ vcm_->RegisterReceiveStatisticsCallback(this);
+ vcm_->RegisterDecoderTimingCallback(this);
+ vcm_->SetRenderDelay(kViEDefaultRenderDelayMs);
+ if (module_process_thread_.RegisterModule(vcm_) != 0) {
return -1;
}
#ifdef VIDEOCODEC_VP8
VideoCodec video_codec;
- if (vcm_.Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
+ if (vcm_->Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
rtp_rtcp_->RegisterSendPayload(video_codec);
// TODO(holmer): Can we call SetReceiveCodec() here instead?
if (!vie_receiver_.RegisterPayload(video_codec)) {
return -1;
}
- vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_);
- vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+ vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_);
+ vcm_->RegisterSendCodec(&video_codec, number_of_cores_,
rtp_rtcp_->MaxDataPayloadLength());
} else {
assert(false);
@@ -225,14 +175,10 @@ int32_t ViEChannel::Init() {
}
ViEChannel::~ViEChannel() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, channel_id_),
- "ViEChannel Destructor, channel_id: %d, engine_id: %d",
- channel_id_, engine_id_);
-
// Make sure we don't get more callbacks from the RTP module.
module_process_thread_.DeRegisterModule(vie_receiver_.GetReceiveStatistics());
module_process_thread_.DeRegisterModule(rtp_rtcp_.get());
- module_process_thread_.DeRegisterModule(&vcm_);
+ module_process_thread_.DeRegisterModule(vcm_);
module_process_thread_.DeRegisterModule(&vie_sync_);
while (simulcast_rtp_rtcp_.size() > 0) {
std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
@@ -250,27 +196,22 @@ ViEChannel::~ViEChannel() {
StopDecodeThread();
}
// Release modules.
- VideoCodingModule::Destroy(&vcm_);
+ VideoCodingModule::Destroy(vcm_);
}
int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
bool new_stream) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: codec_type: %d", __FUNCTION__, video_codec.codecType);
-
if (!sender_) {
return 0;
}
if (video_codec.codecType == kVideoCodecRED ||
video_codec.codecType == kVideoCodecULPFEC) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: codec_type: %d is not a valid send codec.", __FUNCTION__,
- video_codec.codecType);
+ LOG_F(LS_ERROR) << "Not a valid send codec " << video_codec.codecType;
return -1;
}
if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Too many simulcast streams", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Incorrect config "
+ << video_codec.numberOfSimulcastStreams;
return -1;
}
// Update the RTP module with the settings.
@@ -328,10 +269,7 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
// Silently ignore error.
module_process_thread_.RegisterModule(rtp_rtcp);
- if (rtp_rtcp->SetRTCPStatus(rtp_rtcp_->RTCP()) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
- }
+ rtp_rtcp->SetRTCPStatus(rtp_rtcp_->RTCP());
if (rtp_rtcp_->StorePackets()) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
@@ -371,8 +309,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not register payload type", __FUNCTION__);
return -1;
}
if (mtu_ != 0) {
@@ -389,9 +325,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (rtp_rtcp->RegisterSendRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset,
send_timestamp_extension_id_) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not register transmission time extension",
- __FUNCTION__);
}
} else {
rtp_rtcp->DeregisterSendRtpHeaderExtension(
@@ -404,15 +337,11 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (rtp_rtcp->RegisterSendRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime,
absolute_send_time_extension_id_) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not register absolute send time extension",
- __FUNCTION__);
}
} else {
rtp_rtcp->DeregisterSendRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime);
}
- rtp_rtcp->SetRtcpXrRrtrStatus(rtp_rtcp_->RtcpXrRrtrStatus());
rtp_rtcp->RegisterSendFrameCountObserver(
rtp_rtcp_->GetSendFrameCountObserver());
rtp_rtcp->RegisterSendChannelRtcpStatisticsCallback(
@@ -458,8 +387,6 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
// registered or not...
rtp_rtcp_->DeRegisterSendPayload(video_codec.plType);
if (rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not register payload type", __FUNCTION__);
return -1;
}
if (restart_rtp) {
@@ -474,23 +401,15 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
}
int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
- // We will not receive simulcast streams, so no need to handle that use case.
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
if (!vie_receiver_.SetReceiveCodec(video_codec)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not register receive payload type", __FUNCTION__);
return -1;
}
if (video_codec.codecType != kVideoCodecRED &&
video_codec.codecType != kVideoCodecULPFEC) {
// Register codec type with VCM, but do not register RED or ULPFEC.
- if (vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_,
+ if (vcm_->RegisterReceiveCodec(&video_codec, number_of_cores_,
wait_for_key_frame_) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not register decoder", __FUNCTION__);
return -1;
}
}
@@ -498,12 +417,7 @@ int32_t ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
}
int32_t ViEChannel::GetReceiveCodec(VideoCodec* video_codec) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
- if (vcm_.ReceiveCodec(video_codec) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get receive codec", __FUNCTION__);
+ if (vcm_->ReceiveCodec(video_codec) != 0) {
return -1;
}
return 0;
@@ -513,21 +427,11 @@ int32_t ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (codec_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: already added", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Observer already registered.";
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer added", __FUNCTION__);
codec_observer_ = observer;
} else {
- if (!codec_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no observer added", __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer removed", __FUNCTION__);
codec_observer_ = NULL;
}
return 0;
@@ -537,48 +441,33 @@ int32_t ViEChannel::RegisterExternalDecoder(const uint8_t pl_type,
VideoDecoder* decoder,
bool buffered_rendering,
int32_t render_delay) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
int32_t result;
- result = vcm_.RegisterExternalDecoder(decoder, pl_type, buffered_rendering);
+ result = vcm_->RegisterExternalDecoder(decoder, pl_type, buffered_rendering);
if (result != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not register external decoder with VCM.",
- __FUNCTION__);
return result;
}
- return vcm_.SetRenderDelay(render_delay);
+ return vcm_->SetRenderDelay(render_delay);
}
-int32_t ViEChannel::DeRegisterExternalDecoder(
- const uint8_t pl_type) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s pl_type", __FUNCTION__, pl_type);
-
+int32_t ViEChannel::DeRegisterExternalDecoder(const uint8_t pl_type) {
VideoCodec current_receive_codec;
int32_t result = 0;
- result = vcm_.ReceiveCodec(&current_receive_codec);
- if (vcm_.RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
+ result = vcm_->ReceiveCodec(&current_receive_codec);
+ if (vcm_->RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
return -1;
}
if (result == 0 && current_receive_codec.plType == pl_type) {
- result = vcm_.RegisterReceiveCodec(&current_receive_codec, number_of_cores_,
- wait_for_key_frame_);
+ result = vcm_->RegisterReceiveCodec(
+ &current_receive_codec, number_of_cores_, wait_for_key_frame_);
}
return result;
}
-int32_t ViEChannel::ReceiveCodecStatistics(
- uint32_t* num_key_frames, uint32_t* num_delta_frames) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
+int32_t ViEChannel::ReceiveCodecStatistics(uint32_t* num_key_frames,
+ uint32_t* num_delta_frames) {
VCMFrameCount received_frames;
- if (vcm_.ReceivedFrameCount(received_frames) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get received frame information", __FUNCTION__);
+ if (vcm_->ReceivedFrameCount(received_frames) != VCM_OK) {
return -1;
}
*num_key_frames = received_frames.numKeyFrames;
@@ -587,53 +476,40 @@ int32_t ViEChannel::ReceiveCodecStatistics(
}
uint32_t ViEChannel::DiscardedPackets() const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
- return vcm_.DiscardedPackets();
+ return vcm_->DiscardedPackets();
}
int ViEChannel::ReceiveDelay() const {
- return vcm_.Delay();
+ return vcm_->Delay();
}
int32_t ViEChannel::WaitForKeyFrame(bool wait) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(wait: %d)", __FUNCTION__, wait);
wait_for_key_frame_ = wait;
return 0;
}
int32_t ViEChannel::SetSignalPacketLossStatus(bool enable,
bool only_key_frames) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(enable: %d)", __FUNCTION__, enable);
if (enable) {
if (only_key_frames) {
- vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
- if (vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, true) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s failed %d", __FUNCTION__, enable);
+ vcm_->SetVideoProtection(kProtectionKeyOnLoss, false);
+ if (vcm_->SetVideoProtection(kProtectionKeyOnKeyLoss, true) != VCM_OK) {
return -1;
}
} else {
- vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
- if (vcm_.SetVideoProtection(kProtectionKeyOnLoss, true) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s failed %d", __FUNCTION__, enable);
+ vcm_->SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+ if (vcm_->SetVideoProtection(kProtectionKeyOnLoss, true) != VCM_OK) {
return -1;
}
}
} else {
- vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
- vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+ vcm_->SetVideoProtection(kProtectionKeyOnLoss, false);
+ vcm_->SetVideoProtection(kProtectionKeyOnKeyLoss, false);
}
return 0;
}
int32_t ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %d", __FUNCTION__, rtcp_mode);
-
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end();
@@ -645,21 +521,13 @@ int32_t ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
}
int32_t ViEChannel::GetRTCPMode(RTCPMethod* rtcp_mode) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
*rtcp_mode = rtp_rtcp_->RTCP();
return 0;
}
int32_t ViEChannel::SetNACKStatus(const bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(enable: %d)", __FUNCTION__, enable);
-
// Update the decoding VCM.
- if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
- enable);
+ if (vcm_->SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
return -1;
}
if (enable) {
@@ -667,33 +535,21 @@ int32_t ViEChannel::SetNACKStatus(const bool enable) {
SetFECStatus(false, 0, 0);
}
// Update the decoding VCM.
- if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
- enable);
+ if (vcm_->SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
return -1;
}
return ProcessNACKRequest(enable);
}
int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(enable: %d)", __FUNCTION__, enable);
-
if (enable) {
// Turn on NACK.
- NACKMethod nackMethod = kNackRtcp;
if (rtp_rtcp_->RTCP() == kRtcpOff) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not enable NACK, RTPC not on ", __FUNCTION__);
return -1;
}
vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Using NACK method %d", __FUNCTION__, nackMethod);
rtp_rtcp_->SetStorePacketsStatus(true, nack_history_size_sender_);
-
- vcm_.RegisterPacketRequestCallback(this);
+ vcm_->RegisterPacketRequestCallback(this);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -704,7 +560,7 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
rtp_rtcp->SetStorePacketsStatus(true, nack_history_size_sender_);
}
// Don't introduce errors when NACK is enabled.
- vcm_.SetDecodeErrorMode(kNoErrors);
+ vcm_->SetDecodeErrorMode(kNoErrors);
} else {
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
@@ -715,14 +571,14 @@ int32_t ViEChannel::ProcessNACKRequest(const bool enable) {
rtp_rtcp->SetStorePacketsStatus(false, 0);
}
}
- vcm_.RegisterPacketRequestCallback(NULL);
+ vcm_->RegisterPacketRequestCallback(NULL);
if (paced_sender_ == NULL) {
rtp_rtcp_->SetStorePacketsStatus(false, 0);
}
vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
// When NACK is off, allow decoding with errors. Otherwise, the video
// will freeze, and will only recover with a complete key frame.
- vcm_.SetDecodeErrorMode(kWithErrors);
+ vcm_->SetDecodeErrorMode(kWithErrors);
}
return 0;
}
@@ -742,15 +598,8 @@ int32_t ViEChannel::ProcessFECRequest(
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(enable: %d, payload_typeRED: %u, payload_typeFEC: %u)",
- __FUNCTION__, enable, payload_typeRED, payload_typeFEC);
-
if (rtp_rtcp_->SetGenericFECStatus(enable, payload_typeRED,
payload_typeFEC) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not change FEC status to %d", __FUNCTION__,
- enable);
return -1;
}
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -767,11 +616,7 @@ int32_t ViEChannel::SetHybridNACKFECStatus(
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
- // Update the decoding VCM with hybrid mode.
- if (vcm_.SetVideoProtection(kProtectionNackFEC, enable) != VCM_OK) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
- enable);
+ if (vcm_->SetVideoProtection(kProtectionNackFEC, enable) != VCM_OK) {
return -1;
}
@@ -785,9 +630,7 @@ int32_t ViEChannel::SetHybridNACKFECStatus(
int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Target sender buffering delay out of bounds: %d",
- __FUNCTION__, target_delay_ms);
+ LOG(LS_ERROR) << "Invalid send buffer value.";
return -1;
}
if (target_delay_ms == 0) {
@@ -800,16 +643,7 @@ int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
nack_history_size_sender_ = kSendSidePacketHistorySize;
}
}
- // Setting nack_history_size_.
- // First disabling (forcing free) and then resetting to desired value.
- if (rtp_rtcp_->SetStorePacketsStatus(false, 0) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s:SetStorePacketsStatus failure", __FUNCTION__);
- return -1;
- }
if (rtp_rtcp_->SetStorePacketsStatus(true, nack_history_size_sender_) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s:SetStorePacketsStatus failure", __FUNCTION__);
return -1;
}
return 0;
@@ -817,9 +651,7 @@ int ViEChannel::SetSenderBufferingMode(int target_delay_ms) {
int ViEChannel::SetReceiverBufferingMode(int target_delay_ms) {
if ((target_delay_ms < 0) || (target_delay_ms > kMaxTargetDelayMs)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Target receiver buffering delay out of bounds: %d",
- __FUNCTION__, target_delay_ms);
+ LOG(LS_ERROR) << "Invalid receive buffer delay value.";
return -1;
}
int max_nack_list_size;
@@ -836,9 +668,9 @@ int ViEChannel::SetReceiverBufferingMode(int target_delay_ms) {
max_incomplete_time_ms = static_cast<int>(kMaxIncompleteTimeMultiplier *
target_delay_ms + 0.5f);
}
- vcm_.SetNackSettings(max_nack_list_size, max_nack_reordering_threshold_,
+ vcm_->SetNackSettings(max_nack_list_size, max_nack_reordering_threshold_,
max_incomplete_time_ms);
- vcm_.SetMinReceiverDelay(target_delay_ms);
+ vcm_->SetMinReceiverDelay(target_delay_ms);
if (vie_sync_.SetTargetBufferingDelay(target_delay_ms) < 0)
return -1;
return 0;
@@ -853,14 +685,10 @@ int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
int32_t ViEChannel::SetKeyFrameRequestMethod(
const KeyFrameRequestMethod method) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %d", __FUNCTION__, method);
return rtp_rtcp_->SetKeyFrameRequestMethod(method);
}
bool ViEChannel::EnableRemb(bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "ViEChannel::EnableRemb: %d", enable);
if (rtp_rtcp_->SetREMBStatus(enable) != 0)
return false;
return true;
@@ -939,10 +767,6 @@ int ViEChannel::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
void ViEChannel::SetRtcpXrRrtrStatus(bool enable) {
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
rtp_rtcp_->SetRtcpXrRrtrStatus(enable);
- for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
- it != simulcast_rtp_rtcp_.end(); it++) {
- (*it)->SetRtcpXrRrtrStatus(enable);
- }
}
void ViEChannel::SetTransmissionSmoothingStatus(bool enable) {
@@ -951,19 +775,14 @@ void ViEChannel::SetTransmissionSmoothingStatus(bool enable) {
}
int32_t ViEChannel::EnableTMMBR(const bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %d", __FUNCTION__, enable);
return rtp_rtcp_->SetTMMBRStatus(enable);
}
int32_t ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %d", __FUNCTION__, enable);
CriticalSectionScoped cs(callback_cs_.get());
if (enable && !codec_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: No ViECodecObserver set", __FUNCTION__, enable);
+ LOG(LS_ERROR) << "No ViECodecObserver set.";
return -1;
}
do_key_frame_callbackRequest_ = enable;
@@ -973,19 +792,13 @@ int32_t ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
const StreamType usage,
const uint8_t simulcast_idx) {
- WEBRTC_TRACE(webrtc::kTraceInfo,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s(usage:%d, SSRC: 0x%x, idx:%u)",
- __FUNCTION__, usage, SSRC, simulcast_idx);
- int rtx_settings = kRtxRetransmitted;
- if (config_.Get<PaddingStrategy>().redundant_payloads)
- rtx_settings |= kRtxRedundantPayloads;
if (simulcast_idx == 0) {
if (usage == kViEStreamTypeRtx) {
- return rtp_rtcp_->SetRTXSendStatus(rtx_settings, true, SSRC);
+ rtp_rtcp_->SetRtxSsrc(SSRC);
+ } else {
+ rtp_rtcp_->SetSSRC(SSRC);
}
- return rtp_rtcp_->SetSSRC(SSRC);
+ return 0;
}
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
if (simulcast_idx > simulcast_rtp_rtcp_.size()) {
@@ -999,28 +812,20 @@ int32_t ViEChannel::SetSSRC(const uint32_t SSRC,
}
RtpRtcp* rtp_rtcp_module = *it;
if (usage == kViEStreamTypeRtx) {
- return rtp_rtcp_module->SetRTXSendStatus(rtx_settings, true, SSRC);
+ rtp_rtcp_module->SetRtxSsrc(SSRC);
+ } else {
+ rtp_rtcp_module->SetSSRC(SSRC);
}
- return rtp_rtcp_module->SetSSRC(SSRC);
+ return 0;
}
int32_t ViEChannel::SetRemoteSSRCType(const StreamType usage,
const uint32_t SSRC) {
- WEBRTC_TRACE(webrtc::kTraceInfo,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s(usage:%d, SSRC: 0x%x)",
- __FUNCTION__, usage, SSRC);
-
- vie_receiver_.SetRtxStatus(true, SSRC);
+ vie_receiver_.SetRtxSsrc(SSRC);
return 0;
}
-// TODO(mflodman) Add kViEStreamTypeRtx.
int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
if (idx == 0) {
*ssrc = rtp_rtcp_->SSRC();
return 0;
@@ -1040,86 +845,86 @@ int32_t ViEChannel::GetLocalSSRC(uint8_t idx, unsigned int* ssrc) {
}
int32_t ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
*ssrc = vie_receiver_.GetRemoteSsrc();
return 0;
}
int32_t ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
uint32_t arrayCSRC[kRtpCsrcSize];
memset(arrayCSRC, 0, sizeof(arrayCSRC));
int num_csrcs = vie_receiver_.GetCsrcs(arrayCSRC);
if (num_csrcs > 0) {
memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(uint32_t));
- for (int idx = 0; idx < num_csrcs; idx++) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "\tCSRC[%d] = %lu", idx, CSRCs[idx]);
- }
- } else {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: CSRC list is empty", __FUNCTION__);
}
return 0;
}
-int ViEChannel::SetRtxSendPayloadType(int payload_type) {
- if (rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: already sending", __FUNCTION__);
- return -1;
+void ViEChannel::SetPadWithRedundantPayloads(bool enable) {
+ {
+ CriticalSectionScoped cs(callback_cs_.get());
+ pad_with_redundant_payloads_ = enable;
+ }
+ int mode;
+ uint32_t ssrc;
+ int payload_type;
+ rtp_rtcp_->RTXSendStatus(&mode, &ssrc, &payload_type);
+ if (mode != kRtxOff) {
+ // Since RTX was already enabled we have to reset it with payload-based
+ // padding on.
+ SetRtxSendStatus(true);
}
+}
+
+int ViEChannel::SetRtxSendPayloadType(int payload_type) {
rtp_rtcp_->SetRtxSendPayloadType(payload_type);
- CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end(); it++) {
(*it)->SetRtxSendPayloadType(payload_type);
}
+ SetRtxSendStatus(true);
return 0;
}
+void ViEChannel::SetRtxSendStatus(bool enable) {
+ int rtx_settings = kRtxOff;
+ if (enable) {
+ CriticalSectionScoped cs(callback_cs_.get());
+ rtx_settings = kRtxRetransmitted;
+ if (pad_with_redundant_payloads_)
+ rtx_settings |= kRtxRedundantPayloads;
+ }
+ rtp_rtcp_->SetRTXSendStatus(rtx_settings);
+ CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+ for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+ it != simulcast_rtp_rtcp_.end(); it++) {
+ (*it)->SetRTXSendStatus(rtx_settings);
+ }
+}
+
void ViEChannel::SetRtxReceivePayloadType(int payload_type) {
vie_receiver_.SetRtxPayloadType(payload_type);
}
int32_t ViEChannel::SetStartSequenceNumber(uint16_t sequence_number) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
if (rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: already sending", __FUNCTION__);
return -1;
}
return rtp_rtcp_->SetSequenceNumber(sequence_number);
}
int32_t ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
if (rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: already sending", __FUNCTION__);
return -1;
}
return rtp_rtcp_->SetCNAME(rtcp_cname);
}
int32_t ViEChannel::GetRTCPCName(char rtcp_cname[]) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
return rtp_rtcp_->CNAME(rtcp_cname);
}
int32_t ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
uint32_t remoteSSRC = vie_receiver_.GetRemoteSsrc();
return rtp_rtcp_->RemoteCNAME(remoteSSRC, rtcp_cname);
}
@@ -1128,21 +933,11 @@ int32_t ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (rtp_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer alread added", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Observer already registered.";
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer added", __FUNCTION__);
rtp_observer_ = observer;
} else {
- if (!rtp_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no observer added", __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer removed", __FUNCTION__);
rtp_observer_ = NULL;
}
return 0;
@@ -1152,21 +947,11 @@ int32_t ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
if (observer) {
if (rtcp_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer alread added", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Observer already registered.";
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer added", __FUNCTION__);
rtcp_observer_ = observer;
} else {
- if (!rtcp_observer_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no observer added", __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: observer removed", __FUNCTION__);
rtcp_observer_ = NULL;
}
return 0;
@@ -1177,34 +962,25 @@ int32_t ViEChannel::SendApplicationDefinedRTCPPacket(
uint32_t name,
const uint8_t* data,
uint16_t data_length_in_bytes) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
if (!rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: not sending", __FUNCTION__);
return -1;
}
if (!data) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no input argument", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Invalid input.";
return -1;
}
if (data_length_in_bytes % 4 != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: input length error", __FUNCTION__);
+ LOG(LS_ERROR) << "Invalid input length.";
return -1;
}
RTCPMethod rtcp_method = rtp_rtcp_->RTCP();
if (rtcp_method == kRtcpOff) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: RTCP not enabled", __FUNCTION__);
+ LOG_F(LS_ERROR) << "RTCP not enable.";
return -1;
}
// Create and send packet.
if (rtp_rtcp_->SetRTCPApplicationSpecificData(sub_type, name, data,
data_length_in_bytes) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not send RTCP application data", __FUNCTION__);
return -1;
}
return 0;
@@ -1215,9 +991,6 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
// TODO(pwestin) how do we do this for simulcast ? average for all
// except cumulative_lost that is the sum ?
// CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -1235,8 +1008,6 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
// Otherwise use the first report block.
std::vector<RTCPReportBlock> remote_stats;
if (rtp_rtcp_->RemoteRTCPStat(&remote_stats) != 0 || remote_stats.empty()) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get remote stats", __FUNCTION__);
return -1;
}
std::vector<RTCPReportBlock>::const_iterator statistics =
@@ -1262,8 +1033,6 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
uint16_t dummy;
uint16_t rtt = 0;
if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get RTT", __FUNCTION__);
return -1;
}
*rtt_ms = rtt;
@@ -1272,8 +1041,6 @@ int32_t ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
void ViEChannel::RegisterSendChannelRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
rtp_rtcp_->RegisterSendChannelRtcpStatisticsCallback(callback);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
@@ -1291,17 +1058,12 @@ int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint32_t* extended_max,
uint32_t* jitter_samples,
int32_t* rtt_ms) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
uint32_t remote_ssrc = vie_receiver_.GetRemoteSsrc();
StreamStatistician* statistician =
vie_receiver_.GetReceiveStatistics()->GetStatistician(remote_ssrc);
- StreamStatistician::Statistics receive_stats;
+ RtcpStatistics receive_stats;
if (!statistician || !statistician->GetStatistics(
&receive_stats, rtp_rtcp_->RTCP() == kRtcpOff)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get received RTP statistics", __FUNCTION__);
return -1;
}
*fraction_lost = receive_stats.fraction_lost;
@@ -1311,21 +1073,21 @@ int32_t ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
uint16_t dummy = 0;
uint16_t rtt = 0;
- if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get RTT", __FUNCTION__);
- }
+ rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy);
*rtt_ms = rtt;
return 0;
}
+void ViEChannel::RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
+ callback);
+}
+
int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
uint32_t* bytes_received,
uint32_t* packets_received) const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
StreamStatistician* statistician = vie_receiver_.GetReceiveStatistics()->
GetStatistician(vie_receiver_.GetRemoteSsrc());
*bytes_received = 0;
@@ -1333,8 +1095,6 @@ int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
if (statistician)
statistician->GetDataCounters(bytes_received, packets_received);
if (rtp_rtcp_->DataCountersRTP(bytes_sent, packets_sent) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not get counters", __FUNCTION__);
return -1;
}
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -1345,16 +1105,23 @@ int32_t ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
uint32_t packets_sent_temp = 0;
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp);
- bytes_sent += bytes_sent_temp;
- packets_sent += packets_sent_temp;
+ *bytes_sent += bytes_sent_temp;
+ *packets_sent += packets_sent_temp;
+ }
+ for (std::list<RtpRtcp*>::const_iterator it = removed_rtp_rtcp_.begin();
+ it != removed_rtp_rtcp_.end(); ++it) {
+ uint32_t bytes_sent_temp = 0;
+ uint32_t packets_sent_temp = 0;
+ RtpRtcp* rtp_rtcp = *it;
+ rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp);
+ *bytes_sent += bytes_sent_temp;
+ *packets_sent += packets_sent_temp;
}
return 0;
}
void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
rtp_rtcp_->RegisterSendChannelRtpStatisticsCallback(callback);
{
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -1366,13 +1133,39 @@ void ViEChannel::RegisterSendChannelRtpStatisticsCallback(
}
}
+void ViEChannel::RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ vie_receiver_.GetReceiveStatistics()->RegisterRtpStatisticsCallback(callback);
+}
+
+void ViEChannel::GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const {
+ rtp_rtcp_->GetRtcpPacketTypeCounters(packets_sent, packets_received);
+
+ CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+ for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+ it != simulcast_rtp_rtcp_.end(); ++it) {
+ RtcpPacketTypeCounter sent;
+ RtcpPacketTypeCounter received;
+ (*it)->GetRtcpPacketTypeCounters(&sent, &received);
+ packets_sent->Add(sent);
+ packets_received->Add(received);
+ }
+ for (std::list<RtpRtcp*>::const_iterator it = removed_rtp_rtcp_.begin();
+ it != removed_rtp_rtcp_.end(); ++it) {
+ RtcpPacketTypeCounter sent;
+ RtcpPacketTypeCounter received;
+ (*it)->GetRtcpPacketTypeCounters(&sent, &received);
+ packets_sent->Add(sent);
+ packets_received->Add(received);
+ }
+}
+
void ViEChannel::GetBandwidthUsage(uint32_t* total_bitrate_sent,
uint32_t* video_bitrate_sent,
uint32_t* fec_bitrate_sent,
uint32_t* nackBitrateSent) const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
rtp_rtcp_->BitrateSent(total_bitrate_sent, video_bitrate_sent,
fec_bitrate_sent, nackBitrateSent);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -1393,9 +1186,6 @@ void ViEChannel::GetBandwidthUsage(uint32_t* total_bitrate_sent,
bool ViEChannel::GetSendSideDelay(int* avg_send_delay,
int* max_send_delay) const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
*avg_send_delay = 0;
*max_send_delay = 0;
bool valid_estimate = false;
@@ -1436,22 +1226,13 @@ void ViEChannel::RegisterSendBitrateObserver(
}
}
-void ViEChannel::GetEstimatedReceiveBandwidth(
- uint32_t* estimated_bandwidth) const {
- vie_receiver_.EstimatedReceiveBandwidth(estimated_bandwidth);
+void ViEChannel::GetReceiveBandwidthEstimatorStats(
+ ReceiveBandwidthEstimatorStats* output) const {
+ vie_receiver_.GetReceiveBandwidthEstimatorStats(output);
}
int32_t ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
- RTPDirections direction) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
- if (direction != kRtpIncoming && direction != kRtpOutgoing) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: invalid input", __FUNCTION__);
- return -1;
- }
-
+ RTPDirections direction) {
if (direction == kRtpIncoming) {
return vie_receiver_.StartRTPDump(file_nameUTF8);
} else {
@@ -1460,15 +1241,6 @@ int32_t ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
}
int32_t ViEChannel::StopRTPDump(RTPDirections direction) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
- if (direction != kRtpIncoming && direction != kRtpOutgoing) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: invalid input", __FUNCTION__);
- return -1;
- }
-
if (direction == kRtpIncoming) {
return vie_receiver_.StopRTPDump();
} else {
@@ -1478,25 +1250,16 @@ int32_t ViEChannel::StopRTPDump(RTPDirections direction) {
int32_t ViEChannel::StartSend() {
CriticalSectionScoped cs(callback_cs_.get());
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
-
if (!external_transport_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: send sockets not initialized", __FUNCTION__);
+ LOG(LS_ERROR) << "No transport set.";
return -1;
}
rtp_rtcp_->SetSendingMediaStatus(true);
if (rtp_rtcp_->Sending()) {
- // Already sending.
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Already sending", __FUNCTION__);
return kViEBaseAlreadySending;
}
if (rtp_rtcp_->SetSendingStatus(true) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Could not start sending RTP", __FUNCTION__);
return -1;
}
CriticalSectionScoped cs_rtp(rtp_rtcp_cs_.get());
@@ -1511,9 +1274,6 @@ int32_t ViEChannel::StartSend() {
}
int32_t ViEChannel::StopSend() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
rtp_rtcp_->SetSendingMediaStatus(false);
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
@@ -1523,16 +1283,12 @@ int32_t ViEChannel::StopSend() {
rtp_rtcp->SetSendingMediaStatus(false);
}
if (!rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Not sending", __FUNCTION__);
return kViEBaseNotSending;
}
// Reset.
rtp_rtcp_->ResetSendDataCountersRTP();
if (rtp_rtcp_->SetSendingStatus(false) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not stop RTP sending", __FUNCTION__);
return -1;
}
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
@@ -1551,13 +1307,7 @@ bool ViEChannel::Sending() {
int32_t ViEChannel::StartReceive() {
CriticalSectionScoped cs(callback_cs_.get());
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
if (StartDecodeThread() != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not start decoder thread", __FUNCTION__);
-
vie_receiver_.StopReceive();
return -1;
}
@@ -1566,53 +1316,34 @@ int32_t ViEChannel::StartReceive() {
}
int32_t ViEChannel::StopReceive() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
vie_receiver_.StopReceive();
StopDecodeThread();
- vcm_.ResetDecoder();
+ vcm_->ResetDecoder();
return 0;
}
int32_t ViEChannel::RegisterSendTransport(Transport* transport) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
if (rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Sending", __FUNCTION__);
return -1;
}
CriticalSectionScoped cs(callback_cs_.get());
if (external_transport_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: transport already registered", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Transport already registered.";
return -1;
}
external_transport_ = transport;
vie_sender_.RegisterSendTransport(transport);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Transport registered: 0x%p", __FUNCTION__,
- &external_transport_);
-
return 0;
}
int32_t ViEChannel::DeregisterSendTransport() {
CriticalSectionScoped cs(callback_cs_.get());
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
if (!external_transport_) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no transport registered", __FUNCTION__);
- return -1;
+ return 0;
}
if (rtp_rtcp_->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: Sending", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Can't deregister transport when sending.";
return -1;
}
external_transport_ = NULL;
@@ -1645,10 +1376,7 @@ int32_t ViEChannel::ReceivedRTCPPacket(
}
int32_t ViEChannel::SetMTU(uint16_t mtu) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
if (rtp_rtcp_->SetMaxTransferUnit(mtu) != 0) {
- // Logging done.
return -1;
}
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@@ -1663,15 +1391,10 @@ int32_t ViEChannel::SetMTU(uint16_t mtu) {
}
uint16_t ViEChannel::MaxDataPayloadLength() const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
return rtp_rtcp_->MaxDataPayloadLength();
}
int32_t ViEChannel::EnableColorEnhancement(bool enable) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(enable: %d)", __FUNCTION__, enable);
-
CriticalSectionScoped cs(callback_cs_.get());
color_enhancement_ = enable;
return 0;
@@ -1712,10 +1435,13 @@ int32_t ViEChannel::FrameToRender(
unsigned int length = CalcBufferSize(kI420,
video_frame.width(),
video_frame.height());
- scoped_array<uint8_t> video_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(video_frame, length, video_buffer.get());
- effect_filter_->Transform(length, video_buffer.get(),
- video_frame.timestamp(), video_frame.width(),
+ effect_filter_->Transform(length,
+ video_buffer.get(),
+ video_frame.ntp_time_ms(),
+ video_frame.timestamp(),
+ video_frame.width(),
video_frame.height());
}
if (color_enhancement_) {
@@ -1729,8 +1455,6 @@ int32_t ViEChannel::FrameToRender(
arr_ofCSRC[0] = vie_receiver_.GetRemoteSsrc();
no_of_csrcs = 1;
}
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(timestamp:%u)", __FUNCTION__, video_frame.timestamp());
DeliverFrame(&video_frame, no_of_csrcs, arr_ofCSRC);
return 0;
}
@@ -1749,9 +1473,6 @@ int32_t ViEChannel::OnReceiveStatisticsUpdate(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: bitrate %u, framerate %u", __FUNCTION__, bit_rate,
- frame_rate);
codec_observer_->IncomingRate(channel_id_, frame_rate, bit_rate);
}
return 0;
@@ -1777,8 +1498,6 @@ void ViEChannel::OnDecoderTiming(int decode_ms,
}
int32_t ViEChannel::RequestKeyFrame() {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
{
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_ && do_key_frame_callbackRequest_) {
@@ -1795,8 +1514,6 @@ int32_t ViEChannel::SliceLossIndicationRequest(
int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
uint16_t length) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s(length: %d)", __FUNCTION__, length);
return rtp_rtcp_->SendNACK(sequence_numbers, length);
}
@@ -1805,12 +1522,12 @@ bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
}
bool ViEChannel::ChannelDecodeProcess() {
- vcm_.Decode(kMaxDecodeWaitTimeMs);
+ vcm_->Decode(kMaxDecodeWaitTimeMs);
return true;
}
void ViEChannel::OnRttUpdate(uint32_t rtt) {
- vcm_.SetReceiveChannelParameters(rtt);
+ vcm_->SetReceiveChannelParameters(rtt);
}
int32_t ViEChannel::StartDecodeThread() {
@@ -1823,8 +1540,6 @@ int32_t ViEChannel::StartDecodeThread() {
this, kHighestPriority,
"DecodingThread");
if (!decode_thread_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not create decode thread", __FUNCTION__);
return -1;
}
@@ -1832,20 +1547,14 @@ int32_t ViEChannel::StartDecodeThread() {
if (decode_thread_->Start(thread_id) == false) {
delete decode_thread_;
decode_thread_ = NULL;
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not start decode thread", __FUNCTION__);
+ LOG(LS_ERROR) << "Could not start decode thread.";
return -1;
}
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: decode thread with id %u started", __FUNCTION__);
return 0;
}
int32_t ViEChannel::StopDecodeThread() {
if (!decode_thread_) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: decode thread not running", __FUNCTION__);
return 0;
}
@@ -1853,63 +1562,14 @@ int32_t ViEChannel::StopDecodeThread() {
if (decode_thread_->Stop()) {
delete decode_thread_;
} else {
- // Couldn't stop the thread, leak instead of crash.
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: could not stop decode thread", __FUNCTION__);
assert(false && "could not stop decode thread");
}
decode_thread_ = NULL;
return 0;
}
-int32_t ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(callback_cs_.get());
- if (external_encryption_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: external encryption already registered", __FUNCTION__);
- return -1;
- }
-
- external_encryption_ = encryption;
-
- vie_receiver_.RegisterExternalDecryption(encryption);
- vie_sender_.RegisterExternalEncryption(encryption);
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s", "external encryption object registerd with channel=%d",
- channel_id_);
- return 0;
-}
-
-int32_t ViEChannel::DeRegisterExternalEncryption() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(callback_cs_.get());
- if (!external_encryption_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: external encryption is not registered", __FUNCTION__);
- return -1;
- }
-
- external_transport_ = NULL;
- vie_receiver_.DeregisterExternalDecryption();
- vie_sender_.DeregisterExternalEncryption();
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s external encryption object de-registerd with channel=%d",
- __FUNCTION__, channel_id_);
- return 0;
-}
-
int32_t ViEChannel::SetVoiceChannel(int32_t ve_channel_id,
VoEVideoSync* ve_sync_interface) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s, audio channel %d, video channel %d", __FUNCTION__,
- ve_channel_id, channel_id_);
-
if (ve_sync_interface) {
// Register lip sync
module_process_thread_.RegisterModule(&vie_sync_);
@@ -1928,26 +1588,9 @@ int32_t ViEChannel::VoiceChannel() {
int32_t ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
- if (!effect_filter) {
- if (!effect_filter_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: no effect filter added for channel %d",
- __FUNCTION__, channel_id_);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: deregister effect filter for device %d", __FUNCTION__,
- channel_id_);
- } else {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: register effect filter for device %d", __FUNCTION__,
- channel_id_);
- if (effect_filter_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: effect filter already added for channel %d",
- __FUNCTION__, channel_id_);
- return -1;
- }
+ if (effect_filter && effect_filter_) {
+ LOG(LS_ERROR) << "Effect filter already registered.";
+ return -1;
}
effect_filter_ = effect_filter;
return 0;
@@ -1961,8 +1604,7 @@ void ViEChannel::RegisterPreRenderCallback(
void ViEChannel::RegisterPreDecodeImageCallback(
EncodedImageCallback* pre_decode_callback) {
- CriticalSectionScoped cs(callback_cs_.get());
- vcm_.RegisterPreDecodeImageCallback(pre_decode_callback);
+ vcm_->RegisterPreDecodeImageCallback(pre_decode_callback);
}
void ViEChannel::OnApplicationDataReceived(const int32_t id,
@@ -1971,8 +1613,6 @@ void ViEChannel::OnApplicationDataReceived(const int32_t id,
const uint16_t length,
const uint8_t* data) {
if (channel_id_ != ChannelId(id)) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s, incorrect id", __FUNCTION__, id);
return;
}
CriticalSectionScoped cs(callback_cs_.get());
@@ -1992,10 +1632,9 @@ int32_t ViEChannel::OnInitializeDecoder(
const int frequency,
const uint8_t channels,
const uint32_t rate) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: payload_type %d, payload_name %s", __FUNCTION__,
- payload_type, payload_name);
- vcm_.ResetDecoder();
+ LOG(LS_INFO) << "OnInitializeDecoder " << payload_type << " "
+ << payload_name;
+ vcm_->ResetDecoder();
CriticalSectionScoped cs(callback_cs_.get());
decoder_reset_ = true;
@@ -2003,16 +1642,7 @@ int32_t ViEChannel::OnInitializeDecoder(
}
void ViEChannel::OnIncomingSSRCChanged(const int32_t id, const uint32_t ssrc) {
- if (channel_id_ != ChannelId(id)) {
- assert(false);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s, incorrect id", __FUNCTION__, id);
- return;
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %u", __FUNCTION__, ssrc);
-
+ assert(channel_id_ == ChannelId(id));
rtp_rtcp_->SetRemoteSSRC(ssrc);
CriticalSectionScoped cs(callback_cs_.get());
@@ -2026,19 +1656,7 @@ void ViEChannel::OnIncomingSSRCChanged(const int32_t id, const uint32_t ssrc) {
void ViEChannel::OnIncomingCSRCChanged(const int32_t id,
const uint32_t CSRC,
const bool added) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %u added: %d", __FUNCTION__, CSRC, added);
-
- if (channel_id_ != ChannelId(id)) {
- assert(false);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s, incorrect id", __FUNCTION__, id);
- return;
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
- "%s: %u", __FUNCTION__, CSRC);
-
+ assert(channel_id_ == ChannelId(id));
CriticalSectionScoped cs(callback_cs_.get());
{
if (rtp_observer_) {
@@ -2065,4 +1683,8 @@ void ViEChannel::RegisterSendFrameCountObserver(
}
}
+void ViEChannel::ReceivedBWEPacket(int64_t arrival_time_ms,
+ int payload_size, const RTPHeader& header) {
+ vie_receiver_.ReceivedBWEPacket(arrival_time_ms, payload_size, header);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel.h b/chromium/third_party/webrtc/video_engine/vie_channel.h
index 33bf7bf22f8..23b7e5e5923 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel.h
+++ b/chromium/third_party/webrtc/video_engine/vie_channel.h
@@ -34,7 +34,6 @@ class ChannelStatsObserver;
class Config;
class CriticalSectionWrapper;
class EncodedImageCallback;
-class Encryption;
class I420FrameCallback;
class PacedSender;
class ProcessThread;
@@ -146,6 +145,8 @@ class ViEChannel
int32_t GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]);
int SetRtxSendPayloadType(int payload_type);
+ // Only has an effect once RTX is enabled.
+ void SetPadWithRedundantPayloads(bool enable);
void SetRtxReceivePayloadType(int payload_type);
// Sets the starting sequence number, must be called before StartSend.
@@ -185,6 +186,10 @@ class ViEChannel
uint32_t* jitter_samples,
int32_t* rtt_ms);
+ // Called on generation of RTCP stats
+ void RegisterReceiveChannelRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback);
+
// Gets sent/received packets statistics.
int32_t GetRtpStatistics(uint32_t* bytes_sent,
uint32_t* packets_sent,
@@ -195,12 +200,20 @@ class ViEChannel
void RegisterSendChannelRtpStatisticsCallback(
StreamDataCountersCallback* callback);
+ // Called on update of RTP statistics.
+ void RegisterReceiveChannelRtpStatisticsCallback(
+ StreamDataCountersCallback* callback);
+
+ void GetRtcpPacketTypeCounters(RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const;
+
void GetBandwidthUsage(uint32_t* total_bitrate_sent,
uint32_t* video_bitrate_sent,
uint32_t* fec_bitrate_sent,
uint32_t* nackBitrateSent) const;
bool GetSendSideDelay(int* avg_send_delay, int* max_send_delay) const;
- void GetEstimatedReceiveBandwidth(uint32_t* estimated_bandwidth) const;
+ void GetReceiveBandwidthEstimatorStats(
+ ReceiveBandwidthEstimatorStats* output) const;
// Called on any new send bitrate estimate.
void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
@@ -323,9 +336,6 @@ class ViEChannel
virtual int32_t ResendPackets(const uint16_t* sequence_numbers,
uint16_t length);
- int32_t RegisterExternalEncryption(Encryption* encryption);
- int32_t DeRegisterExternalEncryption();
-
int32_t SetVoiceChannel(int32_t ve_channel_id,
VoEVideoSync* ve_sync_interface);
int32_t VoiceChannel();
@@ -342,6 +352,9 @@ class ViEChannel
void RegisterSendFrameCountObserver(FrameCountObserver* observer);
+ void ReceivedBWEPacket(int64_t arrival_time_ms, int payload_size,
+ const RTPHeader& header);
+
protected:
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();
@@ -359,6 +372,7 @@ class ViEChannel
const unsigned char payload_typeFEC);
// Compute NACK list parameters for the buffering mode.
int GetRequiredNackListSize(int target_delay_ms);
+ void SetRtxSendStatus(bool enable);
int32_t channel_id_;
int32_t engine_id_;
@@ -375,7 +389,7 @@ class ViEChannel
scoped_ptr<RtpRtcp> rtp_rtcp_;
std::list<RtpRtcp*> simulcast_rtp_rtcp_;
std::list<RtpRtcp*> removed_rtp_rtcp_;
- VideoCodingModule& vcm_;
+ VideoCodingModule* const vcm_;
ViEReceiver vie_receiver_;
ViESender vie_sender_;
ViESyncModule vie_sync_;
@@ -392,11 +406,11 @@ class ViEChannel
RtcpIntraFrameObserver* intra_frame_observer_;
RtcpRttStats* rtt_stats_;
PacedSender* paced_sender_;
+ bool pad_with_redundant_payloads_;
scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
int send_timestamp_extension_id_;
int absolute_send_time_extension_id_;
- bool using_packet_spread_;
Transport* external_transport_;
@@ -406,8 +420,6 @@ class ViEChannel
bool wait_for_key_frame_;
ThreadWrapper* decode_thread_;
- Encryption* external_encryption_;
-
ViEEffectFilter* effect_filter_;
bool color_enhancement_;
@@ -418,7 +430,6 @@ class ViEChannel
int nack_history_size_sender_;
int max_nack_reordering_threshold_;
I420FrameCallback* pre_render_callback_;
- const Config& config_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel_group.cc b/chromium/third_party/webrtc/video_engine/vie_channel_group.cc
index f079a10e584..35df3bbf7c1 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel_group.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_channel_group.cc
@@ -11,12 +11,14 @@
#include "webrtc/video_engine/vie_channel_group.h"
#include "webrtc/common.h"
+#include "webrtc/experiments.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_engine/encoder_state_feedback.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -31,14 +33,18 @@ static const uint32_t kTimeOffsetSwitchThreshold = 30;
class WrappingBitrateEstimator : public RemoteBitrateEstimator {
public:
WrappingBitrateEstimator(int engine_id, RemoteBitrateObserver* observer,
- Clock* clock, ProcessThread* process_thread)
+ Clock* clock, ProcessThread* process_thread,
+ const Config& config)
: observer_(observer),
clock_(clock),
process_thread_(process_thread),
crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
engine_id_(engine_id),
- min_bitrate_bps_(30000),
- rbe_(RemoteBitrateEstimatorFactory().Create(observer_, clock_,
+ min_bitrate_bps_(config.Get<RemoteBitrateEstimatorMinRate>().min_rate),
+ rate_control_type_(kMimdControl),
+ rbe_(RemoteBitrateEstimatorFactory().Create(observer_,
+ clock_,
+ rate_control_type_,
min_bitrate_bps_)),
using_absolute_send_time_(false),
packets_since_absolute_send_time_(0) {
@@ -53,7 +59,7 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
int payload_size,
const RTPHeader& header) {
CriticalSectionScoped cs(crit_sect_.get());
- PickEstimator(header);
+ PickEstimatorFromHeader(header);
rbe_->IncomingPacket(arrival_time_ms, payload_size, header);
}
@@ -83,19 +89,32 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
return rbe_->LatestEstimate(ssrcs, bitrate_bps);
}
+ virtual bool GetStats(ReceiveBandwidthEstimatorStats* output) const {
+ CriticalSectionScoped cs(crit_sect_.get());
+ return rbe_->GetStats(output);
+ }
+
+ void SetConfig(const webrtc::Config& config) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ RateControlType new_control_type =
+ config.Get<AimdRemoteRateControl>().enabled ? kAimdControl :
+ kMimdControl;
+ if (new_control_type != rate_control_type_) {
+ rate_control_type_ = new_control_type;
+ PickEstimator();
+ }
+ }
+
private:
- // Instantiate RBE for Time Offset or Absolute Send Time extensions.
- void PickEstimator(const RTPHeader& header) {
+ void PickEstimatorFromHeader(const RTPHeader& header)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_.get()) {
if (header.extension.hasAbsoluteSendTime) {
// If we see AST in header, switch RBE strategy immediately.
if (!using_absolute_send_time_) {
- process_thread_->DeRegisterModule(rbe_.get());
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_),
- "WrappingBitrateEstimator: Switching to absolute send time RBE.");
- rbe_.reset(AbsoluteSendTimeRemoteBitrateEstimatorFactory().Create(
- observer_, clock_, min_bitrate_bps_));
- process_thread_->RegisterModule(rbe_.get());
+ LOG(LS_INFO) <<
+ "WrappingBitrateEstimator: Switching to absolute send time RBE.";
using_absolute_send_time_ = true;
+ PickEstimator();
}
packets_since_absolute_send_time_ = 0;
} else {
@@ -103,25 +122,35 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
if (using_absolute_send_time_) {
++packets_since_absolute_send_time_;
if (packets_since_absolute_send_time_ >= kTimeOffsetSwitchThreshold) {
- process_thread_->DeRegisterModule(rbe_.get());
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_),
- "WrappingBitrateEstimator: Switching to transmission time offset "
- "RBE.");
- rbe_.reset(RemoteBitrateEstimatorFactory().Create(observer_, clock_,
- min_bitrate_bps_));
- process_thread_->RegisterModule(rbe_.get());
+ LOG(LS_INFO) << "WrappingBitrateEstimator: Switching to transmission "
+ << "time offset RBE.";
using_absolute_send_time_ = false;
+ PickEstimator();
}
}
}
}
+ // Instantiate RBE for Time Offset or Absolute Send Time extensions.
+ void PickEstimator() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_.get()) {
+ process_thread_->DeRegisterModule(rbe_.get());
+ if (using_absolute_send_time_) {
+ rbe_.reset(AbsoluteSendTimeRemoteBitrateEstimatorFactory().Create(
+ observer_, clock_, rate_control_type_, min_bitrate_bps_));
+ } else {
+ rbe_.reset(RemoteBitrateEstimatorFactory().Create(
+ observer_, clock_, rate_control_type_, min_bitrate_bps_));
+ }
+ process_thread_->RegisterModule(rbe_.get());
+ }
+
RemoteBitrateObserver* observer_;
Clock* clock_;
ProcessThread* process_thread_;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
const int engine_id_;
const uint32_t min_bitrate_bps_;
+ RateControlType rate_control_type_;
scoped_ptr<RemoteBitrateEstimator> rbe_;
bool using_absolute_send_time_;
uint32_t packets_since_absolute_send_time_;
@@ -130,23 +159,39 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
};
} // namespace
-ChannelGroup::ChannelGroup(int engine_id, ProcessThread* process_thread,
- const Config& config)
+ChannelGroup::ChannelGroup(int engine_id,
+ ProcessThread* process_thread,
+ const Config* config)
: remb_(new VieRemb()),
- bitrate_controller_(BitrateController::CreateBitrateController(true)),
+ bitrate_controller_(
+ BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
+ true)),
call_stats_(new CallStats()),
- remote_bitrate_estimator_(new WrappingBitrateEstimator(engine_id,
- remb_.get(), Clock::GetRealTimeClock(),
- process_thread)),
encoder_state_feedback_(new EncoderStateFeedback()),
+ config_(config),
+ own_config_(),
process_thread_(process_thread) {
- call_stats_->RegisterStatsObserver(remote_bitrate_estimator_.get());
+ if (!config) {
+ own_config_.reset(new Config);
+ config_ = own_config_.get();
+ }
+ assert(config_); // Must have a valid config pointer here.
+ remote_bitrate_estimator_.reset(
+ new WrappingBitrateEstimator(engine_id,
+ remb_.get(),
+ Clock::GetRealTimeClock(),
+ process_thread,
+ *config_)),
+ call_stats_->RegisterStatsObserver(remote_bitrate_estimator_.get());
+
process_thread->RegisterModule(call_stats_.get());
+ process_thread->RegisterModule(bitrate_controller_.get());
}
ChannelGroup::~ChannelGroup() {
- call_stats_->DeregisterStatsObserver(remote_bitrate_estimator_.get());
+ process_thread_->DeRegisterModule(bitrate_controller_.get());
process_thread_->DeRegisterModule(call_stats_.get());
+ call_stats_->DeregisterStatsObserver(remote_bitrate_estimator_.get());
assert(channels_.empty());
assert(!remb_->InUse());
}
@@ -208,4 +253,10 @@ bool ChannelGroup::SetChannelRembStatus(int channel_id, bool sender,
}
return true;
}
+
+void ChannelGroup::SetBandwidthEstimationConfig(const webrtc::Config& config) {
+ WrappingBitrateEstimator* estimator =
+ static_cast<WrappingBitrateEstimator*>(remote_bitrate_estimator_.get());
+ estimator->SetConfig(config);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel_group.h b/chromium/third_party/webrtc/video_engine/vie_channel_group.h
index 95a042ef093..40ce71ef194 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel_group.h
+++ b/chromium/third_party/webrtc/video_engine/vie_channel_group.h
@@ -32,7 +32,7 @@ class VieRemb;
class ChannelGroup {
public:
ChannelGroup(int engine_id, ProcessThread* process_thread,
- const Config& config);
+ const Config* config);
~ChannelGroup();
void AddChannel(int channel_id);
@@ -42,6 +42,7 @@ class ChannelGroup {
bool SetChannelRembStatus(int channel_id, bool sender, bool receiver,
ViEChannel* channel);
+ void SetBandwidthEstimationConfig(const webrtc::Config& config);
BitrateController* GetBitrateController();
CallStats* GetCallStats();
@@ -57,6 +58,9 @@ class ChannelGroup {
scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
ChannelSet channels_;
+ const Config* config_;
+ // Placeholder for the case where this owns the config.
+ scoped_ptr<Config> own_config_;
// Registered at construct time and assumed to outlive this class.
ProcessThread* process_thread_;
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel_manager.cc b/chromium/third_party/webrtc/video_engine/vie_channel_manager.cc
index b62e2829bca..6f35c24f619 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel_manager.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_channel_manager.cc
@@ -10,11 +10,12 @@
#include "webrtc/video_engine/vie_channel_manager.h"
+#include "webrtc/common.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/call_stats.h"
#include "webrtc/video_engine/encoder_state_feedback.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -37,19 +38,13 @@ ViEChannelManager::ViEChannelManager(
voice_sync_interface_(NULL),
voice_engine_(NULL),
module_process_thread_(NULL),
- config_(config) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id),
- "ViEChannelManager::ViEChannelManager(engine_id: %d)",
- engine_id);
+ engine_config_(config) {
for (int idx = 0; idx < free_channel_ids_size_; idx++) {
free_channel_ids_[idx] = true;
}
}
ViEChannelManager::~ViEChannelManager() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_),
- "ViEChannelManager Destructor, engine_id: %d", engine_id_);
-
while (channel_map_.size() > 0) {
ChannelMap::iterator it = channel_map_.begin();
// DeleteChannel will erase this channel from the map and invalidate |it|.
@@ -79,7 +74,8 @@ void ViEChannelManager::SetModuleProcessThread(
module_process_thread_ = module_process_thread;
}
-int ViEChannelManager::CreateChannel(int* channel_id) {
+int ViEChannelManager::CreateChannel(int* channel_id,
+ const Config* channel_group_config) {
CriticalSectionScoped cs(channel_id_critsect_);
// Get a new channel id.
@@ -90,11 +86,11 @@ int ViEChannelManager::CreateChannel(int* channel_id) {
// Create a new channel group and add this channel.
ChannelGroup* group = new ChannelGroup(engine_id_, module_process_thread_,
- config_);
+ channel_group_config);
BitrateController* bitrate_controller = group->GetBitrateController();
ViEEncoder* vie_encoder = new ViEEncoder(engine_id_, new_channel_id,
number_of_cores_,
- config_,
+ engine_config_,
*module_process_thread_,
bitrate_controller);
@@ -163,7 +159,7 @@ int ViEChannelManager::CreateChannel(int* channel_id,
if (sender) {
// We need to create a new ViEEncoder.
vie_encoder = new ViEEncoder(engine_id_, new_channel_id, number_of_cores_,
- config_,
+ engine_config_,
*module_process_thread_,
bitrate_controller);
if (!(vie_encoder->Init() &&
@@ -223,8 +219,6 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
ChannelMap::iterator c_it = channel_map_.find(channel_id);
if (c_it == channel_map_.end()) {
// No such channel.
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
- "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
return -1;
}
vie_channel = c_it->second;
@@ -274,22 +268,17 @@ int ViEChannelManager::DeleteChannel(int channel_id) {
// deleted, which might take time.
// If statment just to show that this object is not always deleted.
if (vie_encoder) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
- "%s ViEEncoder deleted for channel %d", __FUNCTION__,
- channel_id);
+ LOG(LS_VERBOSE) << "ViEEncoder deleted for channel " << channel_id;
delete vie_encoder;
}
// If statment just to show that this object is not always deleted.
if (group) {
// Delete the group if empty last since the encoder holds a pointer to the
// BitrateController object that the group owns.
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
- "%s ChannelGroup deleted for channel %d", __FUNCTION__,
- channel_id);
+ LOG(LS_VERBOSE) << "Channel group deleted for channel " << channel_id;
delete group;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
- "%s Channel %d deleted", __FUNCTION__, channel_id);
+ LOG(LS_VERBOSE) << "Channel deleted " << channel_id;
return 0;
}
@@ -304,9 +293,6 @@ int ViEChannelManager::SetVoiceEngine(VoiceEngine* voice_engine) {
// Get new sync interface.
sync_interface = VoEVideoSync::GetInterface(voice_engine);
if (!sync_interface) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
- "%s Can't get audio sync interface from VoiceEngine.",
- __FUNCTION__);
return -1;
}
}
@@ -327,8 +313,7 @@ int ViEChannelManager::ConnectVoiceChannel(int channel_id,
int audio_channel_id) {
CriticalSectionScoped cs(channel_id_critsect_);
if (!voice_sync_interface_) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
- "No VoE set");
+ LOG_F(LS_ERROR) << "No VoE set.";
return -1;
}
ViEChannel* channel = ViEChannelPtr(channel_id);
@@ -366,6 +351,19 @@ bool ViEChannelManager::SetRembStatus(int channel_id, bool sender,
return group->SetChannelRembStatus(channel_id, sender, receiver, channel);
}
+bool ViEChannelManager::SetReservedTransmitBitrate(
+ int channel_id, uint32_t reserved_transmit_bitrate_bps) {
+ CriticalSectionScoped cs(channel_id_critsect_);
+ ChannelGroup* group = FindGroup(channel_id);
+ if (!group) {
+ return false;
+ }
+
+ BitrateController* bitrate_controller = group->GetBitrateController();
+ bitrate_controller->SetReservedBitrate(reserved_transmit_bitrate_bps);
+ return true;
+}
+
void ViEChannelManager::UpdateSsrcs(int channel_id,
const std::list<unsigned int>& ssrcs) {
CriticalSectionScoped cs(channel_id_critsect_);
@@ -387,6 +385,43 @@ void ViEChannelManager::UpdateSsrcs(int channel_id,
}
}
+bool ViEChannelManager::SetBandwidthEstimationConfig(
+ int channel_id, const webrtc::Config& config) {
+ CriticalSectionScoped cs(channel_id_critsect_);
+ ChannelGroup* group = FindGroup(channel_id);
+ if (!group) {
+ return false;
+ }
+ group->SetBandwidthEstimationConfig(config);
+ return true;
+}
+
+bool ViEChannelManager::GetEstimatedSendBandwidth(
+ int channel_id, uint32_t* estimated_bandwidth) const {
+ CriticalSectionScoped cs(channel_id_critsect_);
+ ChannelGroup* group = FindGroup(channel_id);
+ if (!group) {
+ return false;
+ }
+ group->GetBitrateController()->AvailableBandwidth(estimated_bandwidth);
+ return true;
+}
+
+bool ViEChannelManager::GetEstimatedReceiveBandwidth(
+ int channel_id, uint32_t* estimated_bandwidth) const {
+ CriticalSectionScoped cs(channel_id_critsect_);
+ ChannelGroup* group = FindGroup(channel_id);
+ if (!group) {
+ return false;
+ }
+ std::vector<unsigned int> ssrcs;
+ if (!group->GetRemoteBitrateEstimator()->LatestEstimate(
+ &ssrcs, estimated_bandwidth) || ssrcs.empty()) {
+ *estimated_bandwidth = 0;
+ }
+ return true;
+}
+
bool ViEChannelManager::CreateChannelObject(
int channel_id,
ViEEncoder* vie_encoder,
@@ -402,7 +437,7 @@ bool ViEChannelManager::CreateChannelObject(
ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
number_of_cores_,
- config_,
+ engine_config_,
*module_process_thread_,
intra_frame_observer,
bandwidth_observer,
@@ -412,21 +447,15 @@ bool ViEChannelManager::CreateChannelObject(
send_rtp_rtcp_module,
sender);
if (vie_channel->Init() != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
- "%s could not init channel", __FUNCTION__, channel_id);
delete vie_channel;
return false;
}
VideoCodec encoder;
if (vie_encoder->GetEncoder(&encoder) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
- "%s: Could not GetEncoder.", __FUNCTION__);
delete vie_channel;
return false;
}
if (sender && vie_channel->SetSendCodec(encoder) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
- "%s: Could not SetSendCodec.", __FUNCTION__);
delete vie_channel;
return false;
}
@@ -440,8 +469,7 @@ ViEChannel* ViEChannelManager::ViEChannelPtr(int channel_id) const {
CriticalSectionScoped cs(channel_id_critsect_);
ChannelMap::const_iterator it = channel_map_.find(channel_id);
if (it == channel_map_.end()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
- "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
+ LOG(LS_ERROR) << "Channel doesn't exist " << channel_id;
return NULL;
}
return it->second;
@@ -466,8 +494,7 @@ int ViEChannelManager::FreeChannelId() {
}
idx++;
}
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
- "Max number of channels reached: %d", channel_map_.size());
+ LOG(LS_ERROR) << "Max number of channels reached.";
return -1;
}
@@ -478,8 +505,8 @@ void ViEChannelManager::ReturnChannelId(int channel_id) {
free_channel_ids_[channel_id - kViEChannelIdBase] = true;
}
-ChannelGroup* ViEChannelManager::FindGroup(int channel_id) {
- for (ChannelGroups::iterator it = channel_groups_.begin();
+ChannelGroup* ViEChannelManager::FindGroup(int channel_id) const {
+ for (ChannelGroups::const_iterator it = channel_groups_.begin();
it != channel_groups_.end(); ++it) {
if ((*it)->HasChannel(channel_id)) {
return *it;
diff --git a/chromium/third_party/webrtc/video_engine/vie_channel_manager.h b/chromium/third_party/webrtc/video_engine/vie_channel_manager.h
index db9eb113420..2112dacb263 100644
--- a/chromium/third_party/webrtc/video_engine/vie_channel_manager.h
+++ b/chromium/third_party/webrtc/video_engine/vie_channel_manager.h
@@ -50,7 +50,8 @@ class ViEChannelManager: private ViEManagerBase {
void SetModuleProcessThread(ProcessThread* module_process_thread);
// Creates a new channel. 'channel_id' will be the id of the created channel.
- int CreateChannel(int* channel_id);
+ int CreateChannel(int* channel_id,
+ const Config* config);
// Creates a new channel grouped with |original_channel|. The new channel
// will get its own |ViEEncoder| if |sender| is set to true. It will be a
@@ -74,10 +75,22 @@ class ViEChannelManager: private ViEManagerBase {
// Adds a channel to include when sending REMB.
bool SetRembStatus(int channel_id, bool sender, bool receiver);
+ bool SetReservedTransmitBitrate(int channel_id,
+ uint32_t reserved_transmit_bitrate_bps);
+
// Updates the SSRCs for a channel. If one of the SSRCs already is registered,
// it will simply be ignored and no error is returned.
void UpdateSsrcs(int channel_id, const std::list<unsigned int>& ssrcs);
+ // Sets bandwidth estimation related configurations.
+ bool SetBandwidthEstimationConfig(int channel_id,
+ const webrtc::Config& config);
+
+ bool GetEstimatedSendBandwidth(int channel_id,
+ uint32_t* estimated_bandwidth) const;
+ bool GetEstimatedReceiveBandwidth(int channel_id,
+ uint32_t* estimated_bandwidth) const;
+
private:
// Creates a channel object connected to |vie_encoder|. Assumed to be called
// protected.
@@ -104,7 +117,7 @@ class ViEChannelManager: private ViEManagerBase {
void ReturnChannelId(int channel_id);
// Returns the iterator to the ChannelGroup containing |channel_id|.
- ChannelGroup* FindGroup(int channel_id);
+ ChannelGroup* FindGroup(int channel_id) const;
// Returns true if at least one other channels uses the same ViEEncoder as
// channel_id.
@@ -131,7 +144,7 @@ class ViEChannelManager: private ViEManagerBase {
VoiceEngine* voice_engine_;
ProcessThread* module_process_thread_;
- const Config& config_;
+ const Config& engine_config_;
};
class ViEChannelManagerScoped: private ViEManagerScopedBase {
diff --git a/chromium/third_party/webrtc/video_engine/vie_codec_impl.cc b/chromium/third_party/webrtc/video_engine/vie_codec_impl.cc
index b46eb88d37e..3ba56de54d8 100644
--- a/chromium/third_party/webrtc/video_engine/vie_codec_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_codec_impl.cc
@@ -15,7 +15,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -28,6 +27,52 @@
namespace webrtc {
+static void LogCodec(const VideoCodec& codec) {
+ LOG(LS_INFO) << "CodecType " << codec.codecType
+ << ", pl_type " << static_cast<int>(codec.plType)
+ << ", resolution " << codec.width
+ << " x " << codec.height
+ << ", start br " << codec.startBitrate
+ << ", min br " << codec.minBitrate
+ << ", max br " << codec.maxBitrate
+ << ", max fps " << static_cast<int>(codec.maxFramerate)
+ << ", max qp " << codec.qpMax
+ << ", number of streams "
+ << static_cast<int>(codec.numberOfSimulcastStreams);
+ if (codec.codecType == kVideoCodecVP8) {
+ LOG(LS_INFO) << "VP8 specific settings";
+ LOG(LS_INFO) << "pictureLossIndicationOn "
+ << codec.codecSpecific.VP8.pictureLossIndicationOn
+ << ", feedbackModeOn "
+ << codec.codecSpecific.VP8.feedbackModeOn
+ << ", complexity "
+ << codec.codecSpecific.VP8.complexity
+ << ", resilience "
+ << codec.codecSpecific.VP8.resilience
+ << ", numberOfTemporalLayers "
+ << static_cast<int>(
+ codec.codecSpecific.VP8.numberOfTemporalLayers)
+ << ", keyFrameinterval "
+ << codec.codecSpecific.VP8.keyFrameInterval;
+ for (int idx = 0; idx < codec.numberOfSimulcastStreams; ++idx) {
+ LOG(LS_INFO) << "Stream " << codec.simulcastStream[idx].width
+ << " x " << codec.simulcastStream[idx].height;
+ LOG(LS_INFO) << "Temporal layers "
+ << static_cast<int>(
+ codec.simulcastStream[idx].numberOfTemporalLayers)
+ << ", min br "
+ << codec.simulcastStream[idx].minBitrate
+ << ", target br "
+ << codec.simulcastStream[idx].targetBitrate
+ << ", max br "
+ << codec.simulcastStream[idx].maxBitrate
+ << ", qp max "
+ << codec.simulcastStream[idx].qpMax;
+ }
+ }
+}
+
+
ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
if (!video_engine) {
@@ -44,45 +89,33 @@ ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
}
int ViECodecImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViECodecImpl::Release()");
+ LOG(LS_INFO) << "ViECodec::Release.";
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViECodec released too many times");
+ LOG(LS_WARNING) << "ViECodec released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViECodec reference count: %d", ref_count);
return ref_count;
}
ViECodecImpl::ViECodecImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViECodecImpl::ViECodecImpl() Ctor");
}
ViECodecImpl::~ViECodecImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViECodecImpl::~ViECodecImpl() Dtor");
}
int ViECodecImpl::NumberOfCodecs() const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
// +2 because of FEC(RED and ULPFEC)
return static_cast<int>((VideoCodingModule::NumberOfCodecs() + 2));
}
int ViECodecImpl::GetCodec(const unsigned char list_number,
VideoCodec& video_codec) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(list_number: %d)", __FUNCTION__, list_number);
if (list_number == VideoCodingModule::NumberOfCodecs()) {
memset(&video_codec, 0, sizeof(VideoCodec));
strcpy(video_codec.plName, "red");
@@ -94,9 +127,6 @@ int ViECodecImpl::GetCodec(const unsigned char list_number,
video_codec.codecType = kVideoCodecULPFEC;
video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
} else if (VideoCodingModule::Codec(list_number, &video_codec) != VCM_OK) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Could not get codec for list_number: %u", __FUNCTION__,
- list_number);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -105,33 +135,8 @@ int ViECodecImpl::GetCodec(const unsigned char list_number,
int ViECodecImpl::SetSendCodec(const int video_channel,
const VideoCodec& video_codec) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
- video_channel, video_codec.codecType);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d"
- "maxBr: %d, min_br: %d, frame_rate: %d, qpMax: %u,"
- "numberOfSimulcastStreams: %u )", __FUNCTION__,
- video_codec.codecType, video_codec.plType, video_codec.width,
- video_codec.height, video_codec.startBitrate,
- video_codec.maxBitrate, video_codec.minBitrate,
- video_codec.maxFramerate, video_codec.qpMax,
- video_codec.numberOfSimulcastStreams);
- if (video_codec.codecType == kVideoCodecVP8) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "pictureLossIndicationOn: %d, feedbackModeOn: %d, "
- "complexity: %d, resilience: %d, numberOfTemporalLayers: %u"
- "keyFrameInterval %d",
- video_codec.codecSpecific.VP8.pictureLossIndicationOn,
- video_codec.codecSpecific.VP8.feedbackModeOn,
- video_codec.codecSpecific.VP8.complexity,
- video_codec.codecSpecific.VP8.resilience,
- video_codec.codecSpecific.VP8.numberOfTemporalLayers,
- video_codec.codecSpecific.VP8.keyFrameInterval);
- }
+ LOG(LS_INFO) << "SetSendCodec for channel " << video_channel;
+ LogCodec(video_codec);
if (!CodecValid(video_codec)) {
// Error logged.
shared_data_->SetLastError(kViECodecInvalidCodec);
@@ -141,9 +146,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -151,9 +153,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
assert(vie_encoder);
if (vie_encoder->Owner() != video_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Receive only channel %d", __FUNCTION__, video_channel);
+ LOG_F(LS_ERROR) << "Receive only channel.";
shared_data_->SetLastError(kViECodecReceiveOnlyChannel);
return -1;
}
@@ -166,14 +166,14 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
video_codec_internal.height *
video_codec_internal.maxFramerate)
/ 1000;
- if (video_codec_internal.startBitrate > video_codec_internal.maxBitrate) {
- // Don't limit the set start bitrate.
- video_codec_internal.maxBitrate = video_codec_internal.startBitrate;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: New max bitrate set to %d kbps", __FUNCTION__,
- video_codec_internal.maxBitrate);
+ LOG(LS_INFO) << "New max bitrate set " << video_codec_internal.maxBitrate;
+ }
+
+ if (video_codec_internal.startBitrate < video_codec_internal.minBitrate) {
+ video_codec_internal.startBitrate = video_codec_internal.minBitrate;
+ }
+ if (video_codec_internal.startBitrate > video_codec_internal.maxBitrate) {
+ video_codec_internal.startBitrate = video_codec_internal.maxBitrate;
}
VideoCodec encoder;
@@ -192,10 +192,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
vie_encoder->Pause();
if (vie_encoder->SetEncoder(video_codec_internal) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not change encoder for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -207,10 +203,6 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
++it) {
bool ret = true;
if ((*it)->SetSendCodec(video_codec_internal, new_rtp_stream) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not set send codec for channel %d", __FUNCTION__,
- video_channel);
ret = false;
}
if (!ret) {
@@ -225,9 +217,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
if (video_codec_internal.numberOfSimulcastStreams == 0) {
unsigned int ssrc = 0;
if (vie_channel->GetLocalSSRC(0, &ssrc) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get ssrc", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Could not get ssrc.";
}
ssrcs.push_back(ssrc);
} else {
@@ -235,9 +225,7 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
++idx) {
unsigned int ssrc = 0;
if (vie_channel->GetLocalSSRC(idx, &ssrc) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get ssrc for idx %d", __FUNCTION__, idx);
+ LOG_F(LS_ERROR) << "Could not get ssrc for stream " << idx;
}
ssrcs.push_back(ssrc);
}
@@ -264,16 +252,9 @@ int ViECodecImpl::SetSendCodec(const int video_channel,
int ViECodecImpl::GetSendCodec(const int video_channel,
VideoCodec& video_codec) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -282,21 +263,11 @@ int ViECodecImpl::GetSendCodec(const int video_channel,
int ViECodecImpl::SetReceiveCodec(const int video_channel,
const VideoCodec& video_codec) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
- video_channel, video_codec.codecType);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d,"
- "maxBr: %d, min_br: %d, frame_rate: %d", __FUNCTION__,
- video_codec.codecType, video_codec.plType, video_codec.width,
- video_codec.height, video_codec.startBitrate,
- video_codec.maxBitrate, video_codec.minBitrate,
- video_codec.maxFramerate);
+ LOG(LS_INFO) << "SetReceiveCodec for channel " << video_channel;
+ LOG(LS_INFO) << "Codec type " << video_codec.codecType
+ << ", payload type " << video_codec.plType;
if (CodecValid(video_codec) == false) {
- // Error logged.
shared_data_->SetLastError(kViECodecInvalidCodec);
return -1;
}
@@ -304,18 +275,11 @@ int ViECodecImpl::SetReceiveCodec(const int video_channel,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_channel->SetReceiveCodec(video_codec) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not set receive codec for channel %d",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -324,17 +288,9 @@ int ViECodecImpl::SetReceiveCodec(const int video_channel,
int ViECodecImpl::GetReceiveCodec(const int video_channel,
VideoCodec& video_codec) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
- video_channel, video_codec.codecType);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -350,16 +306,11 @@ int ViECodecImpl::GetCodecConfigParameters(
const int video_channel,
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "GetCodecConfigParameters " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -374,17 +325,12 @@ int ViECodecImpl::GetCodecConfigParameters(
int ViECodecImpl::SetImageScaleStatus(const int video_channel,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
- enable);
+ LOG(LS_INFO) << "SetImageScaleStates for channel " << video_channel
+ << ", enable: " << enable;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -399,17 +345,9 @@ int ViECodecImpl::SetImageScaleStatus(const int video_channel,
int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel %d)", __FUNCTION__, video_channel);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No send codec for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -424,17 +362,9 @@ int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__,
- video_channel);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -447,20 +377,11 @@ int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
int ViECodecImpl::GetReceiveSideDelay(const int video_channel,
int* delay_ms) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
- if (delay_ms == NULL) {
- LOG_F(LS_ERROR) << "NULL pointer argument.";
- return -1;
- }
+ assert(delay_ms != NULL);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -474,18 +395,9 @@ int ViECodecImpl::GetReceiveSideDelay(const int video_channel,
int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
unsigned int* bitrate) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
- video_channel);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No send codec for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -493,17 +405,9 @@ int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
}
unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
- video_channel);
-
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -512,16 +416,12 @@ unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "SetKeyFrameRequestCallbackStatus for " << video_channel
+ << ", enacle " << enable;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -535,17 +435,13 @@ int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
const bool enable,
const bool only_key_frames) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(video_channel: %d, enable: %d, only_key_frames: %d)",
- __FUNCTION__, video_channel, enable);
+ LOG(LS_INFO) << "SetSignalKeyPacketLossStatus for " << video_channel
+ << "enable, " << enable
+ << ", only key frames " << only_key_frames;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -558,23 +454,15 @@ int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
ViEEncoderObserver& observer) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
+ LOG(LS_INFO) << "RegisterEncoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_encoder->RegisterCodecObserver(&observer) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not register codec observer at channel",
- __FUNCTION__);
shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
@@ -582,15 +470,11 @@ int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
}
int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
+ LOG(LS_INFO) << "DeregisterEncoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -603,23 +487,15 @@ int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
ViEDecoderObserver& observer) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s", __FUNCTION__);
+ LOG(LS_INFO) << "RegisterDecoderObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_channel->RegisterCodecObserver(&observer) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not register codec observer at channel",
- __FUNCTION__);
shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
return -1;
}
@@ -627,16 +503,11 @@ int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
}
int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id()), "%s",
- __FUNCTION__);
+ LOG(LS_INFO) << "DeregisterDecodeObserver for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -648,15 +519,11 @@ int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
}
int ViECodecImpl::SendKeyFrame(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG(LS_INFO) << "SendKeyFrame on channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -669,17 +536,12 @@ int ViECodecImpl::SendKeyFrame(const int video_channel) {
int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
const bool wait) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d, wait: %d)", __FUNCTION__, video_channel,
- wait);
+ LOG(LS_INFO) << "WaitForFirstKeyFrame for channel " << video_channel
+ << ", wait " << wait;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return -1;
}
@@ -692,44 +554,35 @@ int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
int ViECodecImpl::StartDebugRecording(int video_channel,
const char* file_name_utf8) {
+ LOG(LS_INFO) << "StartDebugRecording for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StartDebugRecording(file_name_utf8);
}
int ViECodecImpl::StopDebugRecording(int video_channel) {
+ LOG(LS_INFO) << "StopDebugRecording for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder %d", __FUNCTION__, video_channel);
return -1;
}
return vie_encoder->StopDebugRecording();
}
void ViECodecImpl::SuspendBelowMinBitrate(int video_channel) {
+ LOG(LS_INFO) << "SuspendBelowMinBitrate for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No encoder %d", __FUNCTION__, video_channel);
return;
}
vie_encoder->SuspendBelowMinBitrate();
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
return;
}
// Must enable pacing when enabling SuspendBelowMinBitrate. Otherwise, no
@@ -743,9 +596,6 @@ bool ViECodecImpl::GetSendSideDelay(int video_channel, int* avg_delay_ms,
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidChannelId);
return false;
}
@@ -763,8 +613,7 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// We only care about the type and name for red.
return true;
}
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Codec type doesn't match pl_name", video_codec.plType);
+ LOG_F(LS_ERROR) << "Invalid RED configuration.";
return false;
} else if (video_codec.codecType == kVideoCodecULPFEC) {
#if defined(WIN32)
@@ -775,8 +624,7 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// We only care about the type and name for ULPFEC.
return true;
}
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Codec type doesn't match pl_name", video_codec.plType);
+ LOG_F(LS_ERROR) << "Invalid ULPFEC configuration.";
return false;
} else if ((video_codec.codecType == kVideoCodecVP8 &&
strncmp(video_codec.plName, "VP8", 4) == 0) ||
@@ -784,32 +632,28 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
strncmp(video_codec.plName, "I420", 4) == 0)) {
// OK.
} else if (video_codec.codecType != kVideoCodecGeneric) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Codec type doesn't match pl_name", video_codec.plType);
+ LOG(LS_ERROR) << "Codec type and name mismatch.";
return false;
}
if (video_codec.plType == 0 || video_codec.plType > 127) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Invalid codec payload type: %d", video_codec.plType);
+ LOG(LS_ERROR) << "Invalif payload type: " << video_codec.plType;
return false;
}
if (video_codec.width > kViEMaxCodecWidth ||
video_codec.height > kViEMaxCodecHeight) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid codec size: %u x %u",
- video_codec.width, video_codec.height);
+ LOG(LS_ERROR) << "Invalid codec resolution " << video_codec.width
+ << " x " << video_codec.height;
return false;
}
if (video_codec.startBitrate < kViEMinCodecBitrate) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid start_bitrate: %u",
- video_codec.startBitrate);
+ LOG(LS_ERROR) << "Invalid start bitrate.";
return false;
}
if (video_codec.minBitrate < kViEMinCodecBitrate) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid min_bitrate: %u",
- video_codec.minBitrate);
+ LOG(LS_ERROR) << "Invalid min bitrate.";
return false;
}
return true;
diff --git a/chromium/third_party/webrtc/video_engine/vie_codec_unittest.cc b/chromium/third_party/webrtc/video_engine/vie_codec_unittest.cc
new file mode 100644
index 00000000000..9f648ec5212
--- /dev/null
+++ b/chromium/third_party/webrtc/video_engine/vie_codec_unittest.cc
@@ -0,0 +1,230 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+// Builds VP8 codec with 0 simulcast streams.
+void BuildVP8Codec(webrtc::VideoCodec* video_codec) {
+ video_codec->codecType = kVideoCodecVP8;
+ strncpy(video_codec->plName, "VP8", 4);
+ video_codec->plType = 100;
+ video_codec->width = 1280;
+ video_codec->height = 720;
+
+ video_codec->startBitrate = 1000; // kbps
+ video_codec->maxBitrate = 2000; // kbps
+ video_codec->minBitrate = 1000; // kbps
+ video_codec->maxFramerate = 30;
+
+ video_codec->qpMax = 50;
+ video_codec->numberOfSimulcastStreams = 0;
+ video_codec->mode = kRealtimeVideo;
+
+ // Set VP8 codec specific info.
+ video_codec->codecSpecific.VP8.pictureLossIndicationOn = true;
+ video_codec->codecSpecific.VP8.feedbackModeOn = true;
+ video_codec->codecSpecific.VP8.complexity = kComplexityNormal;
+ video_codec->codecSpecific.VP8.resilience = kResilienceOff;
+ video_codec->codecSpecific.VP8.numberOfTemporalLayers = 0;
+ video_codec->codecSpecific.VP8.denoisingOn = true;
+ video_codec->codecSpecific.VP8.errorConcealmentOn = true;
+ video_codec->codecSpecific.VP8.automaticResizeOn = true;
+ video_codec->codecSpecific.VP8.frameDroppingOn = true;
+ video_codec->codecSpecific.VP8.keyFrameInterval = 200;
+}
+
+
+void SetSimulcastSettings(webrtc::VideoCodec* video_codec) {
+ // Simulcast settings.
+ video_codec->numberOfSimulcastStreams = 1;
+ video_codec->simulcastStream[0].width = 320;
+ video_codec->simulcastStream[0].height = 180;
+ video_codec->simulcastStream[0].numberOfTemporalLayers = 0;
+ video_codec->simulcastStream[0].maxBitrate = 100;
+ video_codec->simulcastStream[0].targetBitrate = 100;
+ video_codec->simulcastStream[0].minBitrate = 0;
+ video_codec->simulcastStream[0].qpMax = video_codec->qpMax;
+}
+
+
+// This test compares two VideoCodecInst objects except codec specific and
+// simulcast streams.
+TEST(ViECodecTest, TestCompareCodecs) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+
+ EXPECT_TRUE(codec1 == codec2);
+ EXPECT_FALSE(codec1 != codec2);
+
+ // plname is case insensitive.
+ strncpy(codec2.plName, "vp8", 4);
+ EXPECT_TRUE(codec1 == codec2);
+
+ codec2.codecType = kVideoCodecUnknown;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify pltype.
+ BuildVP8Codec(&codec2);
+ codec2.plType = 101;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifing height and width.
+ BuildVP8Codec(&codec2);
+ codec2.width = 640;
+ codec2.height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify framerate, default value is 30.
+ BuildVP8Codec(&codec2);
+ codec2.maxFramerate = 15;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modifying startBitrate, default value is 1000 kbps.
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 2000;
+ EXPECT_FALSE(codec1 == codec2);
+ // maxBitrate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 3000;
+ EXPECT_FALSE(codec1 == codec2);
+ // minBirate
+ BuildVP8Codec(&codec2);
+ codec2.startBitrate = 500;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify qpMax.
+ BuildVP8Codec(&codec2);
+ codec2.qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Modify mode
+ BuildVP8Codec(&codec2);
+ codec2.mode = kScreensharing;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// Test VP8 specific comparision.
+TEST(ViECodecTest, TestCompareVP8CodecSpecific) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ codec2.codecSpecific.VP8.pictureLossIndicationOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // feedbackModeOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.feedbackModeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // complexity
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.complexity = kComplexityHigh;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // resilience
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.resilience = kResilientStream;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // denoisingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.denoisingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // errorConcealmentOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.errorConcealmentOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // pictureLossIndicationOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.automaticResizeOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // frameDroppingOn
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.frameDroppingOn = false;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // keyFrameInterval
+ BuildVP8Codec(&codec2);
+ codec2.codecSpecific.VP8.keyFrameInterval = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+// This test compares simulcast stream information in VideoCodec.
+TEST(ViECodecTest, TestCompareSimulcastStreams) {
+ VideoCodec codec1, codec2;
+ memset(&codec1, 0, sizeof(VideoCodec));
+ memset(&codec2, 0, sizeof(VideoCodec));
+
+ BuildVP8Codec(&codec1);
+ BuildVP8Codec(&codec2);
+ // Set simulacast settings.
+ SetSimulcastSettings(&codec1);
+ SetSimulcastSettings(&codec2);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // Modify number of streams.
+ codec2.numberOfSimulcastStreams = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Resetting steram count.
+ codec2.numberOfSimulcastStreams = 1;
+ // Modify height and width in codec2.
+ codec2.simulcastStream[0].width = 640;
+ codec2.simulcastStream[0].height = 480;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // numberOfTemporalLayers
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].numberOfTemporalLayers = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // maxBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].maxBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // targetBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].targetBitrate = 1000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // minBitrate
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].minBitrate = 50;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // qpMax
+ SetSimulcastSettings(&codec2);
+ codec2.simulcastStream[0].qpMax = 100;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_defines.h b/chromium/third_party/webrtc/video_engine/vie_defines.h
index 97f64f49d05..eb499fa8fff 100644
--- a/chromium/third_party/webrtc/video_engine/vie_defines.h
+++ b/chromium/third_party/webrtc/video_engine/vie_defines.h
@@ -34,7 +34,7 @@ namespace webrtc {
enum { kViEMinKeyRequestIntervalMs = 300 };
// ViEBase
-enum { kViEMaxNumberOfChannels = 32 };
+enum { kViEMaxNumberOfChannels = 64 };
enum { kViEVersionMaxMessageSize = 1024 };
enum { kViEMaxModuleVersionSize = 960 };
@@ -51,14 +51,6 @@ enum { kViEMaxCodecHeight = 3072 };
enum { kViEMaxCodecFramerate = 60 };
enum { kViEMinCodecBitrate = 30 };
-// ViEEncryption
-enum { kViEMaxSrtpKeyLength = 30 };
-enum { kViEMinSrtpEncryptLength = 16 };
-enum { kViEMaxSrtpEncryptLength = 256 };
-enum { kViEMaxSrtpAuthSh1Length = 20 };
-enum { kViEMaxSrtpTagAuthNullLength = 12 };
-enum { kViEMaxSrtpKeyAuthNullLength = 256 };
-
// ViENetwork
enum { kViEMaxMtu = 1500 };
enum { kViESocketThreads = 1 };
diff --git a/chromium/third_party/webrtc/video_engine/vie_encoder.cc b/chromium/third_party/webrtc/video_engine/vie_encoder.cc
index ee5da3857ea..40a61deb6d7 100644
--- a/chromium/third_party/webrtc/video_engine/vie_encoder.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_encoder.cc
@@ -26,7 +26,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
@@ -140,8 +139,7 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
: engine_id_(engine_id),
channel_id_(channel_id),
number_of_cores_(number_of_cores),
- vcm_(*webrtc::VideoCodingModule::Create(ViEModuleId(engine_id,
- channel_id))),
+ vcm_(*webrtc::VideoCodingModule::Create()),
vpm_(*webrtc::VideoProcessingModule::Create(ViEModuleId(engine_id,
channel_id))),
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
@@ -149,6 +147,7 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
bitrate_controller_(bitrate_controller),
time_of_last_incoming_frame_ms_(0),
send_padding_(false),
+ min_transmit_bitrate_kbps_(0),
target_delay_ms_(0),
network_is_transmitting_(true),
encoder_paused_(false),
@@ -165,11 +164,6 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
qm_callback_(NULL),
video_suspended_(false),
pre_encode_callback_(NULL) {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
- ViEId(engine_id, channel_id),
- "%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
- this);
-
RtpRtcp::Configuration configuration;
configuration.id = ViEModuleId(engine_id_, channel_id_);
configuration.audio = false; // Video.
@@ -183,9 +177,6 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
bool ViEEncoder::Init() {
if (vcm_.InitializeSender() != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s InitializeSender failure", __FUNCTION__);
return false;
}
vpm_.EnableTemporalDecimation(true);
@@ -196,9 +187,6 @@ bool ViEEncoder::Init() {
if (module_process_thread_.RegisterModule(&vcm_) != 0 ||
module_process_thread_.RegisterModule(default_rtp_rtcp_.get()) != 0 ||
module_process_thread_.RegisterModule(paced_sender_.get()) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s RegisterModule failure", __FUNCTION__);
return false;
}
if (qm_callback_) {
@@ -209,29 +197,26 @@ bool ViEEncoder::Init() {
#ifdef VIDEOCODEC_VP8
VideoCodec video_codec;
if (vcm_.Codec(webrtc::kVideoCodecVP8, &video_codec) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s Codec failure", __FUNCTION__);
return false;
}
- send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ }
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
default_rtp_rtcp_->MaxDataPayloadLength()) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s RegisterSendCodec failure", __FUNCTION__);
return false;
}
if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s RegisterSendPayload failure", __FUNCTION__);
return false;
}
#else
VideoCodec video_codec;
if (vcm_.Codec(webrtc::kVideoCodecI420, &video_codec) == VCM_OK) {
- send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ }
vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
default_rtp_rtcp_->MaxDataPayloadLength());
default_rtp_rtcp_->RegisterSendPayload(video_codec);
@@ -241,30 +226,18 @@ bool ViEEncoder::Init() {
#endif
if (vcm_.RegisterTransportCallback(this) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "ViEEncoder: VCM::RegisterTransportCallback failure");
return false;
}
if (vcm_.RegisterSendStatisticsCallback(this) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "ViEEncoder: VCM::RegisterSendStatisticsCallback failure");
return false;
}
if (vcm_.RegisterVideoQMCallback(qm_callback_) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "VCM::RegisterQMCallback failure");
return false;
}
return true;
}
ViEEncoder::~ViEEncoder() {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "ViEEncoder Destructor 0x%p, engine_id: %d", this, engine_id_);
if (bitrate_controller_) {
bitrate_controller_->RemoveBitrateObserver(bitrate_observer_.get());
}
@@ -282,10 +255,6 @@ int ViEEncoder::Owner() const {
}
void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s(%s)", __FUNCTION__,
- is_transmitting ? "transmitting" : "not transmitting");
{
CriticalSectionScoped cs(data_cs_.get());
network_is_transmitting_ = is_transmitting;
@@ -298,17 +267,11 @@ void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
}
void ViEEncoder::Pause() {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
encoder_paused_ = true;
}
void ViEEncoder::Restart() {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
encoder_paused_ = false;
}
@@ -319,9 +282,6 @@ uint8_t ViEEncoder::NumberOfCodecs() {
int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
if (vcm_.Codec(list_index, video_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: Could not get codec",
- __FUNCTION__);
return -1;
}
return 0;
@@ -330,43 +290,27 @@ int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: pltype %u", __FUNCTION__,
- pl_type);
-
if (encoder == NULL)
return -1;
if (vcm_.RegisterExternalEncoder(encoder, pl_type, internal_source) !=
- VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not register external encoder");
+ VCM_OK) {
return -1;
}
return 0;
}
int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: pltype %u", __FUNCTION__, pl_type);
-
webrtc::VideoCodec current_send_codec;
if (vcm_.SendCodec(&current_send_codec) == VCM_OK) {
uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Failed to get the current encoder target bitrate.");
+ LOG(LS_WARNING) << "Failed to get the current encoder target bitrate.";
}
current_send_codec.startBitrate = (current_bitrate_bps + 500) / 1000;
}
if (vcm_.RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not deregister external encoder");
return -1;
}
@@ -375,12 +319,17 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
if (current_send_codec.plType == pl_type) {
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
- send_padding_ = current_send_codec.numberOfSimulcastStreams > 1;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ send_padding_ = current_send_codec.numberOfSimulcastStreams > 1;
+ }
+ // TODO(mflodman): Unfortunately the VideoCodec that VCM has cached a
+ // raw pointer to an |extra_options| that's long gone. Clearing it here is
+ // a hack to prevent the following code from crashing. This should be fixed
+ // for realz. https://code.google.com/p/chromium/issues/detail?id=348222
+ current_send_codec.extra_options = NULL;
if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not use internal encoder");
return -1;
}
}
@@ -388,23 +337,13 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
}
int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
- video_codec.codecType, video_codec.width, video_codec.height);
// Setting target width and height for VPM.
if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate) != VPM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not set VPM target dimensions");
return -1;
}
if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could register RTP module video payload");
return -1;
}
// Convert from kbps to bps.
@@ -417,12 +356,12 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
- send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ {
+ CriticalSectionScoped cs(data_cs_.get());
+ send_padding_ = video_codec.numberOfSimulcastStreams > 1;
+ }
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not register send codec");
return -1;
}
@@ -430,9 +369,6 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
// start and stop sending.
if (default_rtp_rtcp_->Sending() == false) {
if (default_rtp_rtcp_->SetSendingStatus(true) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could start RTP module sending");
return -1;
}
}
@@ -442,17 +378,19 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
kTransmissionMaxBitrateMultiplier *
video_codec.maxBitrate * 1000);
+ CriticalSectionScoped crit(data_cs_.get());
+ int pad_up_to_bitrate_kbps = video_codec.startBitrate;
+ if (pad_up_to_bitrate_kbps < min_transmit_bitrate_kbps_)
+ pad_up_to_bitrate_kbps = min_transmit_bitrate_kbps_;
+
+ paced_sender_->UpdateBitrate(kPaceMultiplier * video_codec.startBitrate,
+ pad_up_to_bitrate_kbps);
+
return 0;
}
int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-
if (vcm_.SendCodec(video_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not get VCM send codec");
return -1;
}
return 0;
@@ -461,16 +399,10 @@ int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
int32_t ViEEncoder::GetCodecConfigParameters(
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-
int32_t num_parameters =
vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize);
if (num_parameters <= 0) {
config_parameters_size = 0;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not get config parameters");
return -1;
}
config_parameters_size = static_cast<unsigned char>(num_parameters);
@@ -478,16 +410,11 @@ int32_t ViEEncoder::GetCodecConfigParameters(
}
int32_t ViEEncoder::ScaleInputImage(bool enable) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s(enable %d)", __FUNCTION__,
- enable);
-
VideoFrameResampling resampling_mode = kFastRescaling;
- if (enable == true) {
+ // TODO(mflodman) What?
+ if (enable) {
// kInterpolation is currently not supported.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s not supported",
- __FUNCTION__, enable);
+ LOG_F(LS_ERROR) << "Not supported.";
return -1;
}
vpm_.SetInputFrameResampleMode(resampling_mode);
@@ -507,7 +434,8 @@ int ViEEncoder::TimeToSendPadding(int bytes) {
bool send_padding;
{
CriticalSectionScoped cs(data_cs_.get());
- send_padding = send_padding_ || video_suspended_;
+ send_padding =
+ send_padding_ || video_suspended_ || min_transmit_bitrate_kbps_ > 0;
}
if (send_padding) {
return default_rtp_rtcp_->TimeToSendPadding(bytes);
@@ -533,9 +461,6 @@ bool ViEEncoder::EncoderPaused() const {
}
RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-
return default_rtp_rtcp_.get();
}
@@ -543,18 +468,13 @@ void ViEEncoder::DeliverFrame(int id,
I420VideoFrame* video_frame,
int num_csrcs,
const uint32_t CSRC[kRtpCsrcSize]) {
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: %llu", __FUNCTION__,
- video_frame->timestamp());
+ if (default_rtp_rtcp_->SendingMedia() == false) {
+ // We've paused or we have no channels attached, don't encode.
+ return;
+ }
{
CriticalSectionScoped cs(data_cs_.get());
time_of_last_incoming_frame_ms_ = TickTime::MillisecondTimestamp();
- if (default_rtp_rtcp_->SendingMedia() == false) {
- // We've paused or we have no channels attached, don't encode.
- return;
- }
if (EncoderPaused()) {
if (!encoder_paused_and_dropped_frame_) {
TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
@@ -567,6 +487,10 @@ void ViEEncoder::DeliverFrame(int id,
}
encoder_paused_and_dropped_frame_ = false;
}
+ if (video_frame->native_handle() != NULL) {
+ // TODO(wuchengli): add texture support. http://crbug.com/362437
+ return;
+ }
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
@@ -583,10 +507,11 @@ void ViEEncoder::DeliverFrame(int id,
unsigned int length = CalcBufferSize(kI420,
video_frame->width(),
video_frame->height());
- scoped_array<uint8_t> video_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
ExtractBuffer(*video_frame, length, video_buffer.get());
effect_filter_->Transform(length,
video_buffer.get(),
+ video_frame->ntp_time_ms(),
video_frame->timestamp(),
video_frame->width(),
video_frame->height());
@@ -613,11 +538,6 @@ void ViEEncoder::DeliverFrame(int id,
return;
}
if (ret != VPM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Error preprocessing frame %u", __FUNCTION__,
- video_frame->timestamp());
return;
}
// Frame was not sampled => use original.
@@ -646,47 +566,24 @@ void ViEEncoder::DeliverFrame(int id,
has_received_sli_ = false;
has_received_rpsi_ = false;
- if (vcm_.AddVideoFrame(*decimated_frame,
- vpm_.ContentMetrics(),
- &codec_specific_info) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Error encoding frame %u", __FUNCTION__,
- video_frame->timestamp());
- }
+ vcm_.AddVideoFrame(*decimated_frame, vpm_.ContentMetrics(),
+ &codec_specific_info);
return;
}
#endif
- if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Error encoding frame %u", __FUNCTION__,
- video_frame->timestamp());
- }
+ vcm_.AddVideoFrame(*decimated_frame);
}
void ViEEncoder::DelayChanged(int id, int frame_delay) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: %u", __FUNCTION__,
- frame_delay);
-
default_rtp_rtcp_->SetCameraDelay(frame_delay);
}
int ViEEncoder::GetPreferedFrameSettings(int* width,
int* height,
int* frame_rate) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec));
if (vcm_.SendCodec(&video_codec) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Could not get VCM send codec");
return -1;
}
@@ -697,21 +594,13 @@ int ViEEncoder::GetPreferedFrameSettings(int* width,
}
int ViEEncoder::SendKeyFrame() {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
return vcm_.IntraFrameRequest(0);
}
int32_t ViEEncoder::SendCodecStatistics(
uint32_t* num_key_frames, uint32_t* num_delta_frames) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
-
webrtc::VCMFrameCount sent_frames;
if (vcm_.SentFrameCount(sent_frames) != VCM_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Could not get sent frame information", __FUNCTION__);
return -1;
}
*num_key_frames = sent_frames.numKeyFrames;
@@ -719,20 +608,11 @@ int32_t ViEEncoder::SendCodecStatistics(
return 0;
}
-int32_t ViEEncoder::EstimatedSendBandwidth(
- uint32_t* available_bandwidth) const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
-
- if (!bitrate_controller_->AvailableBandwidth(available_bandwidth)) {
- return -1;
- }
- return 0;
+int32_t ViEEncoder::PacerQueuingDelayMs() const {
+ return paced_sender_->QueueInMs();
}
int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
- __FUNCTION__);
if (vcm_.Bitrate(bitrate) != 0)
return -1;
return 0;
@@ -768,9 +648,6 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
}
if (fec_enabled_ || nack_enabled_) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: FEC status ",
- __FUNCTION__, fec_enabled);
vcm_.RegisterProtectionCallback(this);
// The send codec must be registered to set correct MTU.
webrtc::VideoCodec codec;
@@ -778,17 +655,12 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool enable_nack) {
uint16_t max_pay_load = default_rtp_rtcp_->MaxDataPayloadLength();
uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "Failed to get the current encoder target bitrate.");
+ LOG_F(LS_WARNING) <<
+ "Failed to get the current encoder target bitrate.";
}
// Convert to start bitrate in kbps.
codec.startBitrate = (current_bitrate_bps + 500) / 1000;
if (vcm_.RegisterSendCodec(&codec, number_of_cores_, max_pay_load) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Failed to update Sendcodec when enabling FEC",
- __FUNCTION__, fec_enabled);
return -1;
}
}
@@ -847,30 +719,8 @@ int32_t ViEEncoder::ProtectionRequest(
uint32_t* sent_video_rate_bps,
uint32_t* sent_nack_rate_bps,
uint32_t* sent_fec_rate_bps) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s, deltaFECRate: %u, key_fecrate: %u, "
- "delta_use_uep_protection: %d, key_use_uep_protection: %d, "
- "delta_max_fec_frames: %d, key_max_fec_frames: %d, "
- "delta_mask_type: %d, key_mask_type: %d, ",
- __FUNCTION__,
- delta_fec_params->fec_rate,
- key_fec_params->fec_rate,
- delta_fec_params->use_uep_protection,
- key_fec_params->use_uep_protection,
- delta_fec_params->max_fec_frames,
- key_fec_params->max_fec_frames,
- delta_fec_params->fec_mask_type,
- key_fec_params->fec_mask_type);
- if (default_rtp_rtcp_->SetFecParameters(delta_fec_params,
- key_fec_params) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Could not update FEC parameters", __FUNCTION__);
- }
- default_rtp_rtcp_->BitrateSent(NULL,
- sent_video_rate_bps,
- sent_fec_rate_bps,
+ default_rtp_rtcp_->SetFecParameters(delta_fec_params, key_fec_params);
+ default_rtp_rtcp_->BitrateSent(NULL, sent_video_rate_bps, sent_fec_rate_bps,
sent_nack_rate_bps);
return 0;
}
@@ -879,9 +729,6 @@ int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
const uint32_t frame_rate) {
CriticalSectionScoped cs(callback_cs_.get());
if (codec_observer_) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: bitrate %u, framerate %u",
- __FUNCTION__, bit_rate, frame_rate);
codec_observer_->OutgoingRate(channel_id_, frame_rate, bit_rate);
}
return 0;
@@ -889,29 +736,11 @@ int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
int32_t ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
CriticalSectionScoped cs(callback_cs_.get());
- if (observer) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: observer added",
- __FUNCTION__);
- if (codec_observer_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: observer already set.",
- __FUNCTION__);
- return -1;
- }
- codec_observer_ = observer;
- } else {
- if (codec_observer_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: observer does not exist.", __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: observer removed",
- __FUNCTION__);
- codec_observer_ = NULL;
+ if (observer && codec_observer_) {
+ LOG_F(LS_ERROR) << "Observer already set.";
+ return -1;
}
+ codec_observer_ = observer;
return 0;
}
@@ -929,8 +758,6 @@ void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
// Key frame request from remote side, signal to VCM.
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
int idx = 0;
@@ -950,9 +777,6 @@ void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
int64_t now = TickTime::MillisecondTimestamp();
if (time_last_intra_request_ms_[ssrc] + kViEMinKeyRequestIntervalMs > now) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: Not encoding new intra due to timing", __FUNCTION__);
return;
}
time_last_intra_request_ms_[ssrc] = now;
@@ -1004,15 +828,19 @@ bool ViEEncoder::SetSsrcs(const std::list<unsigned int>& ssrcs) {
return true;
}
+void ViEEncoder::SetMinTransmitBitrate(int min_transmit_bitrate_kbps) {
+ assert(min_transmit_bitrate_kbps >= 0);
+ CriticalSectionScoped crit(data_cs_.get());
+ min_transmit_bitrate_kbps_ = min_transmit_bitrate_kbps;
+}
+
// Called from ViEBitrateObserver.
void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
const uint32_t round_trip_time_ms) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
- __FUNCTION__, bitrate_bps, fraction_lost, round_trip_time_ms);
-
+ LOG(LS_VERBOSE) << "OnNetworkChanged, bitrate" << bitrate_bps
+ << " packet loss " << fraction_lost
+ << " rtt " << round_trip_time_ms;
vcm_.SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
bool video_is_suspended = vcm_.VideoSuspended();
int bitrate_kbps = bitrate_bps / 1000;
@@ -1029,60 +857,51 @@ void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
// Find the max amount of padding we can allow ourselves to send at this
// point, based on which streams are currently active and what our current
// available bandwidth is.
- int max_padding_bitrate_kbps = 0;
int pad_up_to_bitrate_kbps = 0;
if (send_codec.numberOfSimulcastStreams == 0) {
- max_padding_bitrate_kbps = send_codec.minBitrate;
pad_up_to_bitrate_kbps = send_codec.minBitrate;
} else {
- int i = send_codec.numberOfSimulcastStreams - 1;
- for (std::vector<uint32_t>::reverse_iterator it = stream_bitrates.rbegin();
- it != stream_bitrates.rend(); ++it) {
- if (*it > 0) {
- max_padding_bitrate_kbps = std::min((*it + 500) / 1000,
- stream_configs[i].minBitrate);
- break;
- }
- --i;
- }
pad_up_to_bitrate_kbps =
stream_configs[send_codec.numberOfSimulcastStreams - 1].minBitrate;
for (int i = 0; i < send_codec.numberOfSimulcastStreams - 1; ++i) {
pad_up_to_bitrate_kbps += stream_configs[i].targetBitrate;
}
}
- if (video_is_suspended || send_codec.numberOfSimulcastStreams > 1) {
- pad_up_to_bitrate_kbps = std::min(bitrate_kbps, pad_up_to_bitrate_kbps);
- } else {
- // Disable padding if only sending one stream and video isn't suspended.
+
+ // Disable padding if only sending one stream and video isn't suspended and
+ // min-transmit bitrate isn't used (applied later).
+ if (!video_is_suspended && send_codec.numberOfSimulcastStreams <= 1)
pad_up_to_bitrate_kbps = 0;
- }
{
- // The amount of padding should decay to zero if no frames are being
- // captured.
CriticalSectionScoped cs(data_cs_.get());
+ // The amount of padding should decay to zero if no frames are being
+ // captured unless a min-transmit bitrate is used.
int64_t now_ms = TickTime::MillisecondTimestamp();
if (now_ms - time_of_last_incoming_frame_ms_ > kStopPaddingThresholdMs)
- max_padding_bitrate_kbps = 0;
- }
+ pad_up_to_bitrate_kbps = 0;
- paced_sender_->UpdateBitrate(bitrate_kbps,
- max_padding_bitrate_kbps,
- pad_up_to_bitrate_kbps);
- default_rtp_rtcp_->SetTargetSendBitrate(stream_bitrates);
- {
- CriticalSectionScoped cs(data_cs_.get());
+ // Pad up to min bitrate.
+ if (pad_up_to_bitrate_kbps < min_transmit_bitrate_kbps_)
+ pad_up_to_bitrate_kbps = min_transmit_bitrate_kbps_;
+
+ // Padding may never exceed bitrate estimate.
+ if (pad_up_to_bitrate_kbps > bitrate_kbps)
+ pad_up_to_bitrate_kbps = bitrate_kbps;
+
+ paced_sender_->UpdateBitrate(kPaceMultiplier * bitrate_kbps,
+ pad_up_to_bitrate_kbps);
+ default_rtp_rtcp_->SetTargetSendBitrate(stream_bitrates);
if (video_suspended_ == video_is_suspended)
return;
video_suspended_ = video_is_suspended;
}
- // State changed, inform codec observer.
+
+ // Video suspend-state changed, inform codec observer.
+ CriticalSectionScoped crit(callback_cs_.get());
if (codec_observer_) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: video_suspended_ changed to %i",
- __FUNCTION__, video_is_suspended);
+ LOG(LS_INFO) << "Video suspended " << video_is_suspended
+ << " for channel " << channel_id_;
codec_observer_->SuspendChange(channel_id_, video_is_suspended);
}
}
@@ -1093,26 +912,9 @@ PacedSender* ViEEncoder::GetPacedSender() {
int32_t ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
- if (effect_filter == NULL) {
- if (effect_filter_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: no effect filter added",
- __FUNCTION__);
- return -1;
- }
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: deregister effect filter",
- __FUNCTION__);
- } else {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_), "%s: register effect",
- __FUNCTION__);
- if (effect_filter_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, channel_id_),
- "%s: effect filter already added ", __FUNCTION__);
- return -1;
- }
+ if (effect_filter != NULL && effect_filter_ != NULL) {
+ LOG_F(LS_ERROR) << "Filter already set.";
+ return -1;
}
effect_filter_ = effect_filter;
return 0;
diff --git a/chromium/third_party/webrtc/video_engine/vie_encoder.h b/chromium/third_party/webrtc/video_engine/vie_encoder.h
index 24bd720230b..3eb5f55a7bf 100644
--- a/chromium/third_party/webrtc/video_engine/vie_encoder.h
+++ b/chromium/third_party/webrtc/video_engine/vie_encoder.h
@@ -20,6 +20,7 @@
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
#include "webrtc/frame_callback.h"
#include "webrtc/video_engine/vie_defines.h"
@@ -109,8 +110,7 @@ class ViEEncoder
int32_t SendCodecStatistics(uint32_t* num_key_frames,
uint32_t* num_delta_frames);
- int32_t EstimatedSendBandwidth(
- uint32_t* available_bandwidth) const;
+ int PacerQueuingDelayMs() const;
int CodecTargetBitrate(uint32_t* bitrate) const;
// Loss protection.
@@ -153,6 +153,8 @@ class ViEEncoder
// Sets SSRCs for all streams.
bool SetSsrcs(const std::list<unsigned int>& ssrcs);
+ void SetMinTransmitBitrate(int min_transmit_bitrate_kbps);
+
// Effect filter.
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
@@ -206,6 +208,7 @@ class ViEEncoder
int64_t time_of_last_incoming_frame_ms_;
bool send_padding_;
+ int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
int target_delay_ms_;
bool network_is_transmitting_;
bool encoder_paused_;
@@ -215,7 +218,7 @@ class ViEEncoder
bool fec_enabled_;
bool nack_enabled_;
- ViEEncoderObserver* codec_observer_;
+ ViEEncoderObserver* codec_observer_ GUARDED_BY(callback_cs_);
ViEEffectFilter* effect_filter_;
ProcessThread& module_process_thread_;
diff --git a/chromium/third_party/webrtc/video_engine/vie_encryption_impl.cc b/chromium/third_party/webrtc/video_engine/vie_encryption_impl.cc
deleted file mode 100644
index b09c449fc04..00000000000
--- a/chromium/third_party/webrtc/video_engine/vie_encryption_impl.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video_engine/vie_encryption_impl.h"
-
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/video_engine/include/vie_errors.h"
-#include "webrtc/video_engine/vie_channel.h"
-#include "webrtc/video_engine/vie_channel_manager.h"
-#include "webrtc/video_engine/vie_defines.h"
-#include "webrtc/video_engine/vie_impl.h"
-#include "webrtc/video_engine/vie_shared_data.h"
-
-namespace webrtc {
-
-ViEEncryption* ViEEncryption::GetInterface(VideoEngine* video_engine) {
-#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
- if (video_engine == NULL) {
- return NULL;
- }
- VideoEngineImpl* vie_impl = static_cast<VideoEngineImpl*>(video_engine);
- ViEEncryptionImpl* vie_encryption_impl = vie_impl;
- // Increase ref count.
- (*vie_encryption_impl)++;
- return vie_encryption_impl;
-#else
- return NULL;
-#endif
-}
-
-int ViEEncryptionImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViEEncryptionImpl::Release()");
- // Decrease ref count.
- (*this)--;
-
- int32_t ref_count = GetCount();
- if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViEEncryptionImpl release too many times");
- shared_data_->SetLastError(kViEAPIDoesNotExist);
- return -1;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViEEncryptionImpl reference count: %d", ref_count);
- return ref_count;
-}
-
-ViEEncryptionImpl::ViEEncryptionImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEEncryptionImpl::ViEEncryptionImpl() Ctor");
-}
-
-ViEEncryptionImpl::~ViEEncryptionImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEEncryptionImpl::~ViEEncryptionImpl() Dtor");
-}
-
-int ViEEncryptionImpl::RegisterExternalEncryption(const int video_channel,
- Encryption& encryption) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "RegisterExternalEncryption(video_channel=%d)", video_channel);
-
- ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
- ViEChannel* vie_channel = cs.Channel(video_channel);
- if (vie_channel == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
- shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
- return -1;
- }
- if (vie_channel->RegisterExternalEncryption(&encryption) != 0) {
- shared_data_->SetLastError(kViEEncryptionUnknownError);
- return -1;
- }
- return 0;
-}
-
-int ViEEncryptionImpl::DeregisterExternalEncryption(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "RegisterExternalEncryption(video_channel=%d)", video_channel);
-
- ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
- ViEChannel* vie_channel = cs.Channel(video_channel);
- if (vie_channel == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: No channel %d", __FUNCTION__, video_channel);
- shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
- return -1;
- }
-
- if (vie_channel->DeRegisterExternalEncryption() != 0) {
- shared_data_->SetLastError(kViEEncryptionUnknownError);
- return -1;
- }
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_encryption_impl.h b/chromium/third_party/webrtc/video_engine/vie_encryption_impl.h
deleted file mode 100644
index 96779e39523..00000000000
--- a/chromium/third_party/webrtc/video_engine/vie_encryption_impl.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
-
-#include "webrtc/typedefs.h"
-#include "webrtc/video_engine/include/vie_encryption.h"
-#include "webrtc/video_engine/vie_ref_count.h"
-
-namespace webrtc {
-
-class ViESharedData;
-
-class ViEEncryptionImpl
- : public ViEEncryption,
- public ViERefCount {
- public:
- virtual int Release();
-
- // Implements ViEEncryption.
- virtual int RegisterExternalEncryption(const int video_channel,
- Encryption& encryption);
- virtual int DeregisterExternalEncryption(const int video_channel);
-
- protected:
- explicit ViEEncryptionImpl(ViESharedData* shared_data);
- virtual ~ViEEncryptionImpl();
-
- private:
- ViESharedData* shared_data_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
diff --git a/chromium/third_party/webrtc/video_engine/vie_external_codec_impl.cc b/chromium/third_party/webrtc/video_engine/vie_external_codec_impl.cc
index 13e421be0c6..7eb015c4b6b 100644
--- a/chromium/third_party/webrtc/video_engine/vie_external_codec_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_external_codec_impl.cc
@@ -11,7 +11,7 @@
#include "webrtc/video_engine/vie_external_codec_impl.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
@@ -37,62 +37,42 @@ ViEExternalCodec* ViEExternalCodec::GetInterface(VideoEngine* video_engine) {
}
int ViEExternalCodecImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViEExternalCodec::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViEExternalCodec release too many times");
+ LOG(LS_WARNING) << "ViEExternalCodec released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViEExternalCodec reference count: %d", ref_count);
return ref_count;
}
ViEExternalCodecImpl::ViEExternalCodecImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEExternalCodecImpl::ViEExternalCodecImpl() Ctor");
}
ViEExternalCodecImpl::~ViEExternalCodecImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEExternalCodecImpl::~ViEExternalCodecImpl() Dtor");
}
int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
const unsigned char pl_type,
VideoEncoder* encoder,
bool internal_source) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s channel %d pl_type %d encoder 0x%x", __FUNCTION__,
- video_channel, pl_type, encoder);
+ assert(encoder != NULL);
+ LOG(LS_INFO) << "Register external encoder for channel " << video_channel
+ << ", pl_type " << static_cast<int>(pl_type)
+ << ", internal_source " << internal_source;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument video_channel %u. Does it exist?",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
- if (!encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument Encoder 0x%x.", __FUNCTION__, encoder);
- shared_data_->SetLastError(kViECodecInvalidArgument);
- return -1;
- }
-
- if (vie_encoder->RegisterExternalEncoder(encoder, pl_type, internal_source)
- != 0) {
+ if (vie_encoder->RegisterExternalEncoder(encoder, pl_type,
+ internal_source) != 0) {
shared_data_->SetLastError(kViECodecUnknownError);
return -1;
}
@@ -101,17 +81,11 @@ int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
int ViEExternalCodecImpl::DeRegisterExternalSendCodec(
const int video_channel, const unsigned char pl_type) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s channel %d pl_type %d", __FUNCTION__, video_channel,
- pl_type);
+ LOG(LS_INFO) << "Deregister external encoder for channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument video_channel %u. Does it exist?",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -129,25 +103,15 @@ int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
VideoDecoder* decoder,
bool decoder_render,
int render_delay) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s channel %d pl_type %d decoder 0x%x, decoder_render %d, "
- "renderDelay %d", __FUNCTION__, video_channel, pl_type, decoder,
- decoder_render, render_delay);
+ LOG(LS_INFO) << "Register exrernal decoder for channel " << video_channel
+ << ", pl_type " << pl_type
+ << ", decoder_render " << decoder_render
+ << ", render_delay " << render_delay;
+ assert(decoder != NULL);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument video_channel %u. Does it exist?",
- __FUNCTION__, video_channel);
- shared_data_->SetLastError(kViECodecInvalidArgument);
- return -1;
- }
- if (!decoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument decoder 0x%x.", __FUNCTION__, decoder);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
@@ -161,18 +125,13 @@ int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
}
int ViEExternalCodecImpl::DeRegisterExternalReceiveCodec(
-const int video_channel, const unsigned char pl_type) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s channel %d pl_type %u", __FUNCTION__, video_channel,
- pl_type);
+ const int video_channel, const unsigned char pl_type) {
+ LOG(LS_INFO) << "DeRegisterExternalReceiveCodec for channel " << video_channel
+ << ", pl_type " << pl_type;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Invalid argument video_channel %u. Does it exist?",
- __FUNCTION__, video_channel);
shared_data_->SetLastError(kViECodecInvalidArgument);
return -1;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_file_image.cc b/chromium/third_party/webrtc/video_engine/vie_file_image.cc
index c756c26bf4d..919da9fa1bd 100644
--- a/chromium/third_party/webrtc/video_engine/vie_file_image.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_file_image.cc
@@ -20,7 +20,6 @@
#include "webrtc/common_video/interface/video_image.h"
#include "webrtc/common_video/jpeg/include/jpeg.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -32,38 +31,25 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
FILE* image_file = fopen(file_nameUTF8, "rb");
if (!image_file) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "%s could not open file %s", __FUNCTION__, file_nameUTF8);
return -1;
}
if (fseek(image_file, 0, SEEK_END) != 0) {
fclose(image_file);
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "ConvertJPEGToVideoFrame fseek SEEK_END error for file %s",
- file_nameUTF8);
return -1;
}
int buffer_size = ftell(image_file);
if (buffer_size == -1) {
fclose(image_file);
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "ConvertJPEGToVideoFrame could tell file size for file %s",
- file_nameUTF8);
return -1;
}
image_buffer._size = buffer_size;
if (fseek(image_file, 0, SEEK_SET) != 0) {
fclose(image_file);
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "ConvertJPEGToVideoFrame fseek SEEK_SET error for file %s",
- file_nameUTF8);
return -1;
}
image_buffer._buffer = new uint8_t[ image_buffer._size + 1];
if (image_buffer._size != fread(image_buffer._buffer, sizeof(uint8_t),
image_buffer._size, image_file)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "%s could not read file %s", __FUNCTION__, file_nameUTF8);
fclose(image_file);
delete [] image_buffer._buffer;
return -1;
@@ -76,14 +62,8 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
image_buffer._buffer = NULL;
if (ret == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "%s could decode file %s from jpeg format", __FUNCTION__,
- file_nameUTF8);
return -1;
} else if (ret == -3) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
- "%s could not convert jpeg's data to i420 format",
- __FUNCTION__, file_nameUTF8);
}
return 0;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_frame_provider_base.cc b/chromium/third_party/webrtc/video_engine/vie_frame_provider_base.cc
index 1ed096641e2..3dceb170057 100644
--- a/chromium/third_party/webrtc/video_engine/vie_frame_provider_base.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_frame_provider_base.cc
@@ -14,8 +14,8 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@@ -29,9 +29,8 @@ ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
ViEFrameProviderBase::~ViEFrameProviderBase() {
if (frame_callbacks_.size() > 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
- "FrameCallbacks still exist when Provider deleted %d",
- frame_callbacks_.size());
+ LOG_F(LS_WARNING) << "FrameCallbacks still exist when Provider deleted: "
+ << frame_callbacks_.size();
}
for (FrameCallbacks::iterator it = frame_callbacks_.begin();
@@ -76,8 +75,7 @@ void ViEFrameProviderBase::DeliverFrame(
static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
if (process_time > 25) {
// Warn if the delivery time is too long.
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
- "%s Too long time: %ums", __FUNCTION__, process_time);
+ LOG(LS_WARNING) << "Too long time delivering frame " << process_time;
}
#endif
}
@@ -131,16 +129,10 @@ int ViEFrameProviderBase::GetBestFormat(int* best_width,
int ViEFrameProviderBase::RegisterFrameCallback(
int observer_id, ViEFrameCallback* callback_object) {
assert(callback_object);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
- __FUNCTION__, callback_object);
{
CriticalSectionScoped cs(provider_cs_.get());
if (std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
callback_object) != frame_callbacks_.end()) {
- // This object is already registered.
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
- "%s 0x%p already registered", __FUNCTION__,
- callback_object);
assert(false && "frameObserver already registered");
return -1;
}
@@ -157,21 +149,15 @@ int ViEFrameProviderBase::RegisterFrameCallback(
int ViEFrameProviderBase::DeregisterFrameCallback(
const ViEFrameCallback* callback_object) {
assert(callback_object);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
- __FUNCTION__, callback_object);
CriticalSectionScoped cs(provider_cs_.get());
FrameCallbacks::iterator it = std::find(frame_callbacks_.begin(),
frame_callbacks_.end(),
callback_object);
if (it == frame_callbacks_.end()) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
- "%s 0x%p not found", __FUNCTION__, callback_object);
return -1;
}
frame_callbacks_.erase(it);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
- "%s 0x%p deregistered", __FUNCTION__, callback_object);
// Notify implementer of this class that the callback list have changed.
FrameCallbackChanged();
@@ -181,8 +167,6 @@ int ViEFrameProviderBase::DeregisterFrameCallback(
bool ViEFrameProviderBase::IsFrameCallbackRegistered(
const ViEFrameCallback* callback_object) {
assert(callback_object);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
- "%s(0x%p)", __FUNCTION__, callback_object);
CriticalSectionScoped cs(provider_cs_.get());
return std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
diff --git a/chromium/third_party/webrtc/video_engine/vie_image_process_impl.cc b/chromium/third_party/webrtc/video_engine/vie_image_process_impl.cc
index b10e774a3ee..d089c0490d8 100644
--- a/chromium/third_party/webrtc/video_engine/vie_image_process_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_image_process_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/video_engine/vie_image_process_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -39,45 +39,30 @@ ViEImageProcess* ViEImageProcess::GetInterface(VideoEngine* video_engine) {
}
int ViEImageProcessImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViEImageProcess::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViEImageProcess release too many times");
+ LOG(LS_ERROR) << "ViEImageProcess release too many times";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViEImageProcess reference count: %d", ref_count);
return ref_count;
}
ViEImageProcessImpl::ViEImageProcessImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEImageProcessImpl::ViEImageProcessImpl() Ctor");
-}
+ : shared_data_(shared_data) {}
-ViEImageProcessImpl::~ViEImageProcessImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViEImageProcessImpl::~ViEImageProcessImpl() Dtor");
-}
+ViEImageProcessImpl::~ViEImageProcessImpl() {}
int ViEImageProcessImpl::RegisterCaptureEffectFilter(
const int capture_id,
ViEEffectFilter& capture_filter) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
+ LOG_F(LS_INFO) << "capture_id: " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
@@ -89,15 +74,11 @@ int ViEImageProcessImpl::RegisterCaptureEffectFilter(
}
int ViEImageProcessImpl::DeregisterCaptureEffectFilter(const int capture_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
+ LOG_F(LS_INFO) << "capture_id: " << capture_id;
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
@@ -111,14 +92,11 @@ int ViEImageProcessImpl::DeregisterCaptureEffectFilter(const int capture_id) {
int ViEImageProcessImpl::RegisterSendEffectFilter(
const int video_channel,
ViEEffectFilter& send_filter) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "video_channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (vie_encoder == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
@@ -131,14 +109,11 @@ int ViEImageProcessImpl::RegisterSendEffectFilter(
}
int ViEImageProcessImpl::DeregisterSendEffectFilter(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "video_channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (vie_encoder == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
@@ -152,14 +127,11 @@ int ViEImageProcessImpl::DeregisterSendEffectFilter(const int video_channel) {
int ViEImageProcessImpl::RegisterRenderEffectFilter(
const int video_channel,
ViEEffectFilter& render_filter) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "video_channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
@@ -171,14 +143,11 @@ int ViEImageProcessImpl::RegisterRenderEffectFilter(
}
int ViEImageProcessImpl::DeregisterRenderEffectFilter(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "video_channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
@@ -192,16 +161,12 @@ int ViEImageProcessImpl::DeregisterRenderEffectFilter(const int video_channel) {
int ViEImageProcessImpl::EnableDeflickering(const int capture_id,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
- enable);
+ LOG_F(LS_INFO) << "capture_id: " << capture_id
+ << " enable: " << (enable ? "on" : "off");
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
@@ -219,16 +184,12 @@ int ViEImageProcessImpl::EnableDeflickering(const int capture_id,
int ViEImageProcessImpl::EnableDenoising(const int capture_id,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
- enable);
+ LOG_F(LS_INFO) << "capture_id: " << capture_id
+ << " enable: " << (enable ? "on" : "off");
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Capture device %d doesn't exist", __FUNCTION__,
- capture_id);
shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
@@ -246,15 +207,12 @@ int ViEImageProcessImpl::EnableDenoising(const int capture_id,
int ViEImageProcessImpl::EnableColorEnhancement(const int video_channel,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
- enable);
+ LOG_F(LS_INFO) << "video_channel: " << video_channel
+ << " enable: " << (enable ? "on" : "off");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
return -1;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_impl.cc b/chromium/third_party/webrtc/video_engine/vie_impl.cc
index a5c7eba6ee8..3cdf5da2f1d 100644
--- a/chromium/third_party/webrtc/video_engine/vie_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_impl.cc
@@ -11,6 +11,7 @@
#include "webrtc/video_engine/vie_impl.h"
#include "webrtc/common.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
#ifdef WEBRTC_ANDROID
@@ -31,90 +32,71 @@ VideoEngine* VideoEngine::Create(const Config& config) {
}
bool VideoEngine::Delete(VideoEngine*& video_engine) {
- if (!video_engine) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "VideoEngine::Delete - No argument");
+ if (!video_engine)
return false;
- }
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "VideoEngine::Delete(vie = 0x%p)", video_engine);
+
+ LOG_F(LS_INFO);
VideoEngineImpl* vie_impl = static_cast<VideoEngineImpl*>(video_engine);
// Check all reference counters.
ViEBaseImpl* vie_base = vie_impl;
if (vie_base->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViEBase ref count: %d", vie_base->GetCount());
+ LOG(LS_ERROR) << "ViEBase ref count > 0: " << vie_base->GetCount();
return false;
}
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
ViECaptureImpl* vie_capture = vie_impl;
if (vie_capture->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViECapture ref count: %d", vie_capture->GetCount());
+ LOG(LS_ERROR) << "ViECapture ref count > 0: " << vie_capture->GetCount();
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
ViECodecImpl* vie_codec = vie_impl;
if (vie_codec->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViECodec ref count: %d", vie_codec->GetCount());
- return false;
- }
-#endif
-#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
- ViEEncryptionImpl* vie_encryption = vie_impl;
- if (vie_encryption->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViEEncryption ref count: %d", vie_encryption->GetCount());
+ LOG(LS_ERROR) << "ViECodec ref count > 0: " << vie_codec->GetCount();
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
ViEExternalCodecImpl* vie_external_codec = vie_impl;
if (vie_external_codec->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViEEncryption ref count: %d", vie_encryption->GetCount());
+ LOG(LS_ERROR) << "ViEExternalCodec ref count > 0: "
+ << vie_external_codec->GetCount();
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
ViEFileImpl* vie_file = vie_impl;
if (vie_file->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViEFile ref count: %d", vie_file->GetCount());
+ LOG(LS_ERROR) << "ViEFile ref count > 0: " << vie_file->GetCount();
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
ViEImageProcessImpl* vie_image_process = vie_impl;
if (vie_image_process->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViEImageProcess ref count: %d",
- vie_image_process->GetCount());
+ LOG(LS_ERROR) << "ViEImageProcess ref count > 0: "
+ << vie_image_process->GetCount();
return false;
}
#endif
ViENetworkImpl* vie_network = vie_impl;
if (vie_network->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViENetwork ref count: %d", vie_network->GetCount());
+ LOG(LS_ERROR) << "ViENetwork ref count > 0: " << vie_network->GetCount();
return false;
}
#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
ViERenderImpl* vie_render = vie_impl;
if (vie_render->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViERender ref count: %d", vie_render->GetCount());
+ LOG(LS_ERROR) << "ViERender ref count > 0: " << vie_render->GetCount();
return false;
}
#endif
#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
ViERTP_RTCPImpl* vie_rtp_rtcp = vie_impl;
if (vie_rtp_rtcp->GetCount() > 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "ViERTP_RTCP ref count: %d", vie_rtp_rtcp->GetCount());
+ LOG(LS_ERROR) << "ViERTP_RTCP ref count > 0: " << vie_rtp_rtcp->GetCount();
return false;
}
#endif
@@ -123,8 +105,6 @@ bool VideoEngine::Delete(VideoEngine*& video_engine) {
vie_impl = NULL;
video_engine = NULL;
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, kModuleId,
- "%s: instance deleted.", __FUNCTION__);
return true;
}
@@ -136,9 +116,8 @@ int VideoEngine::SetTraceFile(const char* file_nameUTF8,
if (Trace::SetTraceFile(file_nameUTF8, add_file_counter) == -1) {
return -1;
}
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "SetTraceFileName(file_nameUTF8 = %s, add_file_counter = %d",
- file_nameUTF8, add_file_counter);
+ LOG_F(LS_INFO) << "filename: " << file_nameUTF8
+ << " add_file_counter: " << (add_file_counter ? "yes" : "no");
return 0;
}
@@ -147,35 +126,29 @@ int VideoEngine::SetTraceFilter(const unsigned int filter) {
if (filter == kTraceNone && old_filter != kTraceNone) {
// Do the logging before turning it off.
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "SetTraceFilter(filter = 0x%x)", filter);
+ LOG_F(LS_INFO) << "filter: " << filter;
}
Trace::set_level_filter(filter);
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "SetTraceFilter(filter = 0x%x)", filter);
+ LOG_F(LS_INFO) << "filter: " << filter;
return 0;
}
int VideoEngine::SetTraceCallback(TraceCallback* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "SetTraceCallback(TraceCallback = 0x%p)", callback);
+ LOG_F(LS_INFO);
return Trace::SetTraceCallback(callback);
}
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int VideoEngine::SetAndroidObjects(JavaVM* javaVM) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
- "SetAndroidObjects()");
+int VideoEngine::SetAndroidObjects(JavaVM* javaVM, jobject context) {
+ LOG_F(LS_INFO);
- if (SetCaptureAndroidVM(javaVM) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "Could not set capture Android VM");
+ if (SetCaptureAndroidVM(javaVM, context) != 0) {
+ LOG(LS_ERROR) << "Could not set capture Android VM";
return -1;
}
if (SetRenderAndroidVM(javaVM) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
- "Could not set render Android VM");
+ LOG(LS_ERROR) << "Could not set render Android VM";
return -1;
}
return 0;
diff --git a/chromium/third_party/webrtc/video_engine/vie_impl.h b/chromium/third_party/webrtc/video_engine/vie_impl.h
index 290a67e5afc..086282d1874 100644
--- a/chromium/third_party/webrtc/video_engine/vie_impl.h
+++ b/chromium/third_party/webrtc/video_engine/vie_impl.h
@@ -24,9 +24,6 @@
#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
#include "webrtc/video_engine/vie_codec_impl.h"
#endif
-#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
-#include "webrtc/video_engine/vie_encryption_impl.h"
-#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
#include "webrtc/video_engine/vie_file_impl.h"
#endif
@@ -54,9 +51,6 @@ class VideoEngineImpl
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
public ViECaptureImpl,
#endif
-#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
- public ViEEncryptionImpl,
-#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
public ViEFileImpl,
#endif
@@ -84,9 +78,6 @@ class VideoEngineImpl
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
ViECaptureImpl(ViEBaseImpl::shared_data()),
#endif
-#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
- ViEEncryptionImpl(ViEBaseImpl::shared_data()),
-#endif
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
ViEFileImpl(ViEBaseImpl::shared_data()),
#endif
diff --git a/chromium/third_party/webrtc/video_engine/vie_input_manager.cc b/chromium/third_party/webrtc/video_engine/vie_input_manager.cc
index f5727c56485..68570e51cfe 100644
--- a/chromium/third_party/webrtc/video_engine/vie_input_manager.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_input_manager.cc
@@ -17,8 +17,8 @@
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_defines.h"
@@ -33,17 +33,12 @@ ViEInputManager::ViEInputManager(const int engine_id, const Config& config)
vie_frame_provider_map_(),
capture_device_info_(NULL),
module_process_thread_(NULL) {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s", __FUNCTION__);
-
for (int idx = 0; idx < kViEMaxCaptureDevices; idx++) {
free_capture_device_id_[idx] = true;
}
}
ViEInputManager::~ViEInputManager() {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s", __FUNCTION__);
for (FrameProviderMap::iterator it = vie_frame_provider_map_.begin();
it != vie_frame_provider_map_.end();
++it) {
@@ -59,8 +54,6 @@ void ViEInputManager::SetModuleProcessThread(
}
int ViEInputManager::NumberOfCaptureDevices() {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
- __FUNCTION__);
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@@ -74,8 +67,6 @@ int ViEInputManager::GetDeviceName(uint32_t device_number,
uint32_t device_name_length,
char* device_unique_idUTF8,
uint32_t device_unique_idUTF8Length) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(device_number: %d)", __FUNCTION__, device_number);
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@@ -89,8 +80,6 @@ int ViEInputManager::GetDeviceName(uint32_t device_number,
int ViEInputManager::NumberOfCaptureCapabilities(
const char* device_unique_idUTF8) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
- __FUNCTION__);
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@@ -103,9 +92,6 @@ int ViEInputManager::GetCaptureCapability(
const char* device_unique_idUTF8,
const uint32_t device_capability_number,
CaptureCapability& capability) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(device_unique_idUTF8: %s, device_capability_number: %d)",
- __FUNCTION__, device_unique_idUTF8, device_capability_number);
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@@ -131,9 +117,6 @@ int ViEInputManager::GetCaptureCapability(
int ViEInputManager::GetOrientation(const char* device_unique_idUTF8,
RotateCapturedFrame& orientation) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(device_unique_idUTF8: %s,)", __FUNCTION__,
- device_unique_idUTF8);
CriticalSectionScoped cs(device_info_cs_.get());
if (capture_device_info_ == NULL)
capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
@@ -180,8 +163,6 @@ int ViEInputManager::CreateCaptureDevice(
const char* device_unique_idUTF8,
const uint32_t device_unique_idUTF8Length,
int& capture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(device_unique_id: %s)", __FUNCTION__, device_unique_idUTF8);
CriticalSectionScoped cs(map_cs_.get());
// Make sure the device is not already allocated.
@@ -227,57 +208,39 @@ int ViEInputManager::CreateCaptureDevice(
const char* cast_id = reinterpret_cast<const char*>(device_unique_idUTF8);
if (strncmp(cast_id, reinterpret_cast<const char*>(found_unique_name),
strlen(cast_id)) == 0) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s:%d Capture device was found by unique ID: %s. Returning",
- __FUNCTION__, __LINE__, device_unique_idUTF8);
found_device = true;
break;
}
}
if (!found_device) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s:%d Capture device NOT found by unique ID: %s. Returning",
- __FUNCTION__, __LINE__, device_unique_idUTF8);
+ LOG(LS_ERROR) << "Capture device not found: " << device_unique_idUTF8;
return kViECaptureDeviceDoesNotExist;
}
int newcapture_id = 0;
- if (GetFreeCaptureId(&newcapture_id) == false) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Maximum supported number of capture devices already in "
- "use", __FUNCTION__);
+ if (!GetFreeCaptureId(&newcapture_id)) {
+ LOG(LS_ERROR) << "All capture devices already allocated.";
return kViECaptureDeviceMaxNoDevicesAllocated;
}
ViECapturer* vie_capture = ViECapturer::CreateViECapture(
newcapture_id, engine_id_, config_, device_unique_idUTF8,
device_unique_idUTF8Length, *module_process_thread_);
if (!vie_capture) {
- ReturnCaptureId(newcapture_id);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Could not create capture module for %s", __FUNCTION__,
- device_unique_idUTF8);
+ ReturnCaptureId(newcapture_id);
return kViECaptureDeviceUnknownError;
}
vie_frame_provider_map_[newcapture_id] = vie_capture;
capture_id = newcapture_id;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(device_unique_id: %s, capture_id: %d)", __FUNCTION__,
- device_unique_idUTF8, capture_id);
return 0;
}
int ViEInputManager::CreateCaptureDevice(VideoCaptureModule* capture_module,
int& capture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
- __FUNCTION__);
-
CriticalSectionScoped cs(map_cs_.get());
int newcapture_id = 0;
if (!GetFreeCaptureId(&newcapture_id)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Maximum supported number of capture devices already in "
- "use", __FUNCTION__);
+ LOG(LS_ERROR) << "All capture devices already allocated.";
return kViECaptureDeviceMaxNoDevicesAllocated;
}
@@ -285,21 +248,15 @@ int ViEInputManager::CreateCaptureDevice(VideoCaptureModule* capture_module,
newcapture_id, engine_id_, config_,
capture_module, *module_process_thread_);
if (!vie_capture) {
- ReturnCaptureId(newcapture_id);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Could attach capture module.", __FUNCTION__);
+ ReturnCaptureId(newcapture_id);
return kViECaptureDeviceUnknownError;
}
vie_frame_provider_map_[newcapture_id] = vie_capture;
capture_id = newcapture_id;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s, capture_id: %d", __FUNCTION__, capture_id);
return 0;
}
int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(capture_id: %d)", __FUNCTION__, capture_id);
ViECapturer* vie_capture = NULL;
{
// We need exclusive access to the object to delete it.
@@ -309,18 +266,14 @@ int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
vie_capture = ViECapturePtr(capture_id);
if (!vie_capture) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(capture_id: %d) - No such capture device id",
- __FUNCTION__, capture_id);
+ LOG(LS_ERROR) << "No such capture device id: " << capture_id;
return -1;
}
uint32_t num_callbacks =
vie_capture->NumberOfRegisteredFrameCallbacks();
if (num_callbacks > 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
- ViEId(engine_id_), "%s(capture_id: %d) - %u registered "
- "callbacks when destroying capture device",
- __FUNCTION__, capture_id, num_callbacks);
+ LOG(LS_WARNING) << num_callbacks << " still registered to capture id "
+ << capture_id << " when destroying capture device.";
}
vie_frame_provider_map_.erase(capture_id);
ReturnCaptureId(capture_id);
@@ -335,15 +288,11 @@ int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
int ViEInputManager::CreateExternalCaptureDevice(
ViEExternalCapture*& external_capture,
int& capture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
- __FUNCTION__);
CriticalSectionScoped cs(map_cs_.get());
int newcapture_id = 0;
if (GetFreeCaptureId(&newcapture_id) == false) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Maximum supported number of capture devices already in "
- "use", __FUNCTION__);
+ LOG(LS_ERROR) << "All capture devices already allocated.";
return kViECaptureDeviceMaxNoDevicesAllocated;
}
@@ -351,30 +300,21 @@ int ViEInputManager::CreateExternalCaptureDevice(
newcapture_id, engine_id_, config_, NULL, 0, *module_process_thread_);
if (!vie_capture) {
ReturnCaptureId(newcapture_id);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: Could not create capture module for external capture.",
- __FUNCTION__);
return kViECaptureDeviceUnknownError;
}
vie_frame_provider_map_[newcapture_id] = vie_capture;
capture_id = newcapture_id;
external_capture = vie_capture;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s, capture_id: %d)", __FUNCTION__, capture_id);
return 0;
}
bool ViEInputManager::GetFreeCaptureId(int* freecapture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
- __FUNCTION__);
for (int id = 0; id < kViEMaxCaptureDevices; id++) {
if (free_capture_device_id_[id]) {
// We found a free capture device id.
free_capture_device_id_[id] = false;
*freecapture_id = id + kViECaptureIdBase;
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s: new id: %d", __FUNCTION__, *freecapture_id);
return true;
}
}
@@ -382,8 +322,6 @@ bool ViEInputManager::GetFreeCaptureId(int* freecapture_id) {
}
void ViEInputManager::ReturnCaptureId(int capture_id) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
- "%s(%d)", __FUNCTION__, capture_id);
CriticalSectionScoped cs(map_cs_.get());
if (capture_id >= kViECaptureIdBase &&
capture_id < kViEMaxCaptureDevices + kViECaptureIdBase) {
@@ -420,8 +358,10 @@ ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(int provider_id) const {
ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
if (!(capture_id >= kViECaptureIdBase &&
- capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices))
+ capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices)) {
+ LOG(LS_ERROR) << "Capture device doesn't exist " << capture_id << ".";
return NULL;
+ }
return static_cast<ViECapturer*>(ViEFrameProvider(capture_id));
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_manager_base.h b/chromium/third_party/webrtc/video_engine/vie_manager_base.h
index 088a2b8a31f..f9f008f3214 100644
--- a/chromium/third_party/webrtc/video_engine/vie_manager_base.h
+++ b/chromium/third_party/webrtc/video_engine/vie_manager_base.h
@@ -11,11 +11,13 @@
#ifndef WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
#define WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+
namespace webrtc {
class RWLockWrapper;
-class ViEManagerBase {
+class LOCKABLE ViEManagerBase {
friend class ViEManagedItemScopedBase;
friend class ViEManagerScopedBase;
friend class ViEManagerWriteScoped;
@@ -25,24 +27,25 @@ class ViEManagerBase {
private:
// Exclusive lock, used by ViEManagerWriteScoped.
- void WriteLockManager();
+ void WriteLockManager() EXCLUSIVE_LOCK_FUNCTION();
// Releases exclusive lock, used by ViEManagerWriteScoped.
- void ReleaseWriteLockManager();
+ void ReleaseWriteLockManager() UNLOCK_FUNCTION();
// Increases lock count, used by ViEManagerScopedBase.
- void ReadLockManager() const;
+ void ReadLockManager() const SHARED_LOCK_FUNCTION();
// Releases the lock count, used by ViEManagerScopedBase.
- void ReleaseLockManager() const;
+ void ReleaseLockManager() const UNLOCK_FUNCTION();
RWLockWrapper& instance_rwlock_;
};
-class ViEManagerWriteScoped {
+class SCOPED_LOCKABLE ViEManagerWriteScoped {
public:
- explicit ViEManagerWriteScoped(ViEManagerBase* vie_manager);
- ~ViEManagerWriteScoped();
+ explicit ViEManagerWriteScoped(ViEManagerBase* vie_manager)
+ EXCLUSIVE_LOCK_FUNCTION(vie_manager);
+ ~ViEManagerWriteScoped() UNLOCK_FUNCTION();
private:
ViEManagerBase* vie_manager_;
diff --git a/chromium/third_party/webrtc/video_engine/vie_network_impl.cc b/chromium/third_party/webrtc/video_engine/vie_network_impl.cc
index 9965f9eddd3..4a9d8665856 100644
--- a/chromium/third_party/webrtc/video_engine/vie_network_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_network_impl.cc
@@ -16,7 +16,7 @@
#endif
#include "webrtc/engine_configurations.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
@@ -39,35 +39,25 @@ ViENetwork* ViENetwork::GetInterface(VideoEngine* video_engine) {
}
int ViENetworkImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViENetwork::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViENetwork release too many times");
+ LOG(LS_ERROR) << "ViENetwork release too many times";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViENetwork reference count: %d", ref_count);
return ref_count;
}
void ViENetworkImpl::SetNetworkTransmissionState(const int video_channel,
const bool is_transmitting) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(event: Network %s)", __FUNCTION__,
- is_transmitting ? "transmitting" : "not transmitting");
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " transmitting: " << (is_transmitting ? "yes" : "no");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "An encoder doesn't exist for this channel");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return;
}
@@ -75,34 +65,21 @@ void ViENetworkImpl::SetNetworkTransmissionState(const int video_channel,
}
ViENetworkImpl::ViENetworkImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViENetworkImpl::ViENetworkImpl() Ctor");
-}
+ : shared_data_(shared_data) {}
-ViENetworkImpl::~ViENetworkImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViENetworkImpl::~ViENetworkImpl() Dtor");
-}
+ViENetworkImpl::~ViENetworkImpl() {}
int ViENetworkImpl::RegisterSendTransport(const int video_channel,
Transport& transport) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s Channel doesn't exist", __FUNCTION__);
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s Channel already sending.", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Already sending on channel: " << video_channel;
shared_data_->SetLastError(kViENetworkAlreadySending);
return -1;
}
@@ -114,22 +91,15 @@ int ViENetworkImpl::RegisterSendTransport(const int video_channel,
}
int ViENetworkImpl::DeregisterSendTransport(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel: " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s Channel doesn't exist", __FUNCTION__);
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
if (vie_channel->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s Channel already sending", __FUNCTION__);
+ LOG_F(LS_ERROR) << "Actively sending on channel: " << video_channel;
shared_data_->SetLastError(kViENetworkAlreadySending);
return -1;
}
@@ -143,17 +113,9 @@ int ViENetworkImpl::DeregisterSendTransport(const int video_channel) {
int ViENetworkImpl::ReceivedRTPPacket(const int video_channel, const void* data,
const int length,
const PacketTime& packet_time) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
- video_channel, length);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- // The channel doesn't exists
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
@@ -162,16 +124,9 @@ int ViENetworkImpl::ReceivedRTPPacket(const int video_channel, const void* data,
int ViENetworkImpl::ReceivedRTCPPacket(const int video_channel,
const void* data, const int length) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
- video_channel, length);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
@@ -179,15 +134,10 @@ int ViENetworkImpl::ReceivedRTCPPacket(const int video_channel,
}
int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, mtu: %u)", __FUNCTION__, video_channel, mtu);
+ LOG_F(LS_INFO) << "channel: " << video_channel << " mtu: " << mtu;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "Channel doesn't exist");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return -1;
}
@@ -197,4 +147,24 @@ int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
}
return 0;
}
+
+int ViENetworkImpl::ReceivedBWEPacket(const int video_channel,
+ int64_t arrival_time_ms, int payload_size, const RTPHeader& header) {
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_->SetLastError(kViENetworkInvalidChannelId);
+ return -1;
+ }
+
+ vie_channel->ReceivedBWEPacket(arrival_time_ms, payload_size, header);
+ return 0;
+}
+
+bool ViENetworkImpl::SetBandwidthEstimationConfig(
+ int video_channel, const webrtc::Config& config) {
+ LOG_F(LS_INFO) << "channel: " << video_channel;
+ return shared_data_->channel_manager()->SetBandwidthEstimationConfig(
+ video_channel, config);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_network_impl.h b/chromium/third_party/webrtc/video_engine/vie_network_impl.h
index 4aa39981bd2..03547d88bac 100644
--- a/chromium/third_party/webrtc/video_engine/vie_network_impl.h
+++ b/chromium/third_party/webrtc/video_engine/vie_network_impl.h
@@ -39,6 +39,12 @@ class ViENetworkImpl
const int length);
virtual int SetMTU(int video_channel, unsigned int mtu);
+ virtual int ReceivedBWEPacket(const int video_channel,
+ int64_t arrival_time_ms, int payload_size, const RTPHeader& header);
+
+ virtual bool SetBandwidthEstimationConfig(int video_channel,
+ const webrtc::Config& config);
+
protected:
explicit ViENetworkImpl(ViESharedData* shared_data);
virtual ~ViENetworkImpl();
diff --git a/chromium/third_party/webrtc/video_engine/vie_receiver.cc b/chromium/third_party/webrtc/video_engine/vie_receiver.cc
index 2946c4a08f9..5d90ac678ab 100644
--- a/chromium/third_party/webrtc/video_engine/vie_receiver.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_receiver.cc
@@ -15,6 +15,7 @@
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/fec_receiver.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
@@ -22,7 +23,9 @@
#include "webrtc/modules/utility/interface/rtp_dump.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -32,32 +35,27 @@ ViEReceiver::ViEReceiver(const int32_t channel_id,
RemoteBitrateEstimator* remote_bitrate_estimator,
RtpFeedback* rtp_feedback)
: receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- channel_id_(channel_id),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(new RTPPayloadRegistry(
- channel_id, RTPPayloadStrategy::CreateStrategy(false))),
+ RTPPayloadStrategy::CreateStrategy(false))),
rtp_receiver_(RtpReceiver::CreateVideoReceiver(
channel_id, Clock::GetRealTimeClock(), this, rtp_feedback,
rtp_payload_registry_.get())),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
- fec_receiver_(FecReceiver::Create(channel_id, this)),
+ fec_receiver_(FecReceiver::Create(this)),
rtp_rtcp_(NULL),
vcm_(module_vcm),
remote_bitrate_estimator_(remote_bitrate_estimator),
- external_decryption_(NULL),
- decryption_buffer_(NULL),
+ ntp_estimator_(new RemoteNtpTimeEstimator(Clock::GetRealTimeClock())),
rtp_dump_(NULL),
receiving_(false),
- restored_packet_in_use_(false) {
+ restored_packet_in_use_(false),
+ receiving_ast_enabled_(false) {
assert(remote_bitrate_estimator);
}
ViEReceiver::~ViEReceiver() {
- if (decryption_buffer_) {
- delete[] decryption_buffer_;
- decryption_buffer_ = NULL;
- }
if (rtp_dump_) {
rtp_dump_->Stop();
RtpDump::DestroyRtpDump(rtp_dump_);
@@ -98,12 +96,12 @@ void ViEReceiver::SetNackStatus(bool enable,
rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
}
-void ViEReceiver::SetRtxStatus(bool enable, uint32_t ssrc) {
- rtp_payload_registry_->SetRtxStatus(enable, ssrc);
+void ViEReceiver::SetRtxPayloadType(int payload_type) {
+ rtp_payload_registry_->SetRtxPayloadType(payload_type);
}
-void ViEReceiver::SetRtxPayloadType(uint32_t payload_type) {
- rtp_payload_registry_->SetRtxPayloadType(payload_type);
+void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
+ rtp_payload_registry_->SetRtxSsrc(ssrc);
}
uint32_t ViEReceiver::GetRemoteSsrc() const {
@@ -114,28 +112,6 @@ int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
return rtp_receiver_->CSRCs(csrcs);
}
-int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
- CriticalSectionScoped cs(receive_cs_.get());
- if (external_decryption_) {
- return -1;
- }
- decryption_buffer_ = new uint8_t[kViEMaxMtu];
- if (decryption_buffer_ == NULL) {
- return -1;
- }
- external_decryption_ = decryption;
- return 0;
-}
-
-int ViEReceiver::DeregisterExternalDecryption() {
- CriticalSectionScoped cs(receive_cs_.get());
- if (external_decryption_ == NULL) {
- return -1;
- }
- external_decryption_ = NULL;
- return 0;
-}
-
void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
rtp_rtcp_ = module;
}
@@ -168,9 +144,15 @@ bool ViEReceiver::SetReceiveTimestampOffsetStatus(bool enable, int id) {
bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
if (enable) {
- return rtp_header_parser_->RegisterRtpHeaderExtension(
- kRtpExtensionAbsoluteSendTime, id);
+ if (rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id)) {
+ receiving_ast_enabled_ = true;
+ return true;
+ } else {
+ return false;
+ }
} else {
+ receiving_ast_enabled_ = false;
return rtp_header_parser_->DeregisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime);
}
@@ -179,20 +161,25 @@ bool ViEReceiver::SetReceiveAbsoluteSendTimeStatus(bool enable, int id) {
int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
int rtp_packet_length,
const PacketTime& packet_time) {
- return InsertRTPPacket(static_cast<const int8_t*>(rtp_packet),
+ return InsertRTPPacket(static_cast<const uint8_t*>(rtp_packet),
rtp_packet_length, packet_time);
}
int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
int rtcp_packet_length) {
- return InsertRTCPPacket(static_cast<const int8_t*>(rtcp_packet),
+ return InsertRTCPPacket(static_cast<const uint8_t*>(rtcp_packet),
rtcp_packet_length);
}
int32_t ViEReceiver::OnReceivedPayloadData(
const uint8_t* payload_data, const uint16_t payload_size,
const WebRtcRTPHeader* rtp_header) {
- if (vcm_->IncomingPacket(payload_data, payload_size, *rtp_header) != 0) {
+ WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
+ rtp_header_with_ntp.ntp_time_ms =
+ ntp_estimator_->Estimate(rtp_header->header.timestamp);
+ if (vcm_->IncomingPacket(payload_data,
+ payload_size,
+ rtp_header_with_ntp) != 0) {
// Check this...
return -1;
}
@@ -203,61 +190,43 @@ bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
int rtp_packet_length) {
RTPHeader header;
if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
- WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVideo, channel_id_,
- "IncomingPacket invalid RTP header");
return false;
}
header.payload_type_frequency = kVideoPayloadTypeFrequency;
return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
}
-int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
+void ViEReceiver::ReceivedBWEPacket(
+ int64_t arrival_time_ms, int payload_size, const RTPHeader& header) {
+ // Only forward if the incoming packet *and* the channel are both configured
+ // to receive absolute sender time. RTP time stamps may have different rates
+ // for audio and video and shouldn't be mixed.
+ if (header.extension.hasAbsoluteSendTime && receiving_ast_enabled_) {
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_size,
+ header);
+ }
+}
+
+int ViEReceiver::InsertRTPPacket(const uint8_t* rtp_packet,
int rtp_packet_length,
const PacketTime& packet_time) {
- // TODO(mflodman) Change decrypt to get rid of this cast.
- int8_t* tmp_ptr = const_cast<int8_t*>(rtp_packet);
- unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
- int received_packet_length = rtp_packet_length;
-
{
CriticalSectionScoped cs(receive_cs_.get());
if (!receiving_) {
return -1;
}
-
- if (external_decryption_) {
- int decrypted_length = kViEMaxMtu;
- external_decryption_->decrypt(channel_id_, received_packet,
- decryption_buffer_, received_packet_length,
- &decrypted_length);
- if (decrypted_length <= 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "RTP decryption failed");
- return -1;
- } else if (decrypted_length > kViEMaxMtu) {
- WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, channel_id_,
- "InsertRTPPacket: %d bytes is allocated as RTP decrytption"
- " output, external decryption used %d bytes. => memory is "
- " now corrupted", kViEMaxMtu, decrypted_length);
- return -1;
- }
- received_packet = decryption_buffer_;
- received_packet_length = decrypted_length;
- }
-
if (rtp_dump_) {
- rtp_dump_->DumpPacket(received_packet,
- static_cast<uint16_t>(received_packet_length));
+ rtp_dump_->DumpPacket(rtp_packet,
+ static_cast<uint16_t>(rtp_packet_length));
}
}
+
RTPHeader header;
- if (!rtp_header_parser_->Parse(received_packet, received_packet_length,
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
&header)) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, channel_id_,
- "Incoming packet: Invalid RTP header");
return -1;
}
- int payload_length = received_packet_length - header.headerLength;
+ int payload_length = rtp_packet_length - header.headerLength;
int64_t arrival_time_ms;
if (packet_time.timestamp != -1)
arrival_time_ms = (packet_time.timestamp + 500) / 1000;
@@ -269,11 +238,16 @@ int ViEReceiver::InsertRTPPacket(const int8_t* rtp_packet,
header.payload_type_frequency = kVideoPayloadTypeFrequency;
bool in_order = IsPacketInOrder(header);
- rtp_receive_statistics_->IncomingPacket(header, received_packet_length,
- IsPacketRetransmitted(header, in_order));
rtp_payload_registry_->SetIncomingPayloadType(header);
- return ReceivePacket(received_packet, received_packet_length, header,
- in_order) ? 0 : -1;
+ int ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order)
+ ? 0
+ : -1;
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ rtp_receive_statistics_->IncomingPacket(
+ header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
+ return ret;
}
bool ViEReceiver::ReceivePacket(const uint8_t* packet,
@@ -299,15 +273,20 @@ bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
int packet_length,
const RTPHeader& header) {
if (rtp_payload_registry_->IsRed(header)) {
+ int8_t ulpfec_pt = rtp_payload_registry_->ulpfec_payload_type();
+ if (packet[header.headerLength] == ulpfec_pt)
+ rtp_receive_statistics_->FecPacketReceived(header.ssrc);
if (fec_receiver_->AddReceivedRedPacket(
- header, packet, packet_length,
- rtp_payload_registry_->ulpfec_payload_type()) != 0) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, channel_id_,
- "Incoming RED packet error");
+ header, packet, packet_length, ulpfec_pt) != 0) {
return false;
}
return fec_receiver_->ProcessReceivedFec() == 0;
} else if (rtp_payload_registry_->IsRtx(header)) {
+ if (header.headerLength + header.paddingLength == packet_length) {
+ // This is an empty packet and should be silently dropped before trying to
+ // parse the RTX header.
+ return true;
+ }
// Remove the RTX header and parse the original RTP header.
if (packet_length < header.headerLength)
return false;
@@ -315,16 +294,14 @@ bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
return false;
CriticalSectionScoped cs(receive_cs_.get());
if (restored_packet_in_use_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, channel_id_,
- "Multiple RTX headers detected, dropping packet");
+ LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
return false;
}
uint8_t* restored_packet_ptr = restored_packet_;
if (!rtp_payload_registry_->RestoreOriginalPacket(
&restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
header)) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, channel_id_,
- "Incoming RTX packet: invalid RTP header");
+ LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header";
return false;
}
restored_packet_in_use_ = true;
@@ -335,55 +312,34 @@ bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
return false;
}
-int ViEReceiver::InsertRTCPPacket(const int8_t* rtcp_packet,
+int ViEReceiver::InsertRTCPPacket(const uint8_t* rtcp_packet,
int rtcp_packet_length) {
- // TODO(mflodman) Change decrypt to get rid of this cast.
- int8_t* tmp_ptr = const_cast<int8_t*>(rtcp_packet);
- unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
- int received_packet_length = rtcp_packet_length;
{
CriticalSectionScoped cs(receive_cs_.get());
if (!receiving_) {
return -1;
}
- if (external_decryption_) {
- int decrypted_length = kViEMaxMtu;
- external_decryption_->decrypt_rtcp(channel_id_, received_packet,
- decryption_buffer_,
- received_packet_length,
- &decrypted_length);
- if (decrypted_length <= 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "RTP decryption failed");
- return -1;
- } else if (decrypted_length > kViEMaxMtu) {
- WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, channel_id_,
- "InsertRTCPPacket: %d bytes is allocated as RTP "
- " decrytption output, external decryption used %d bytes. "
- " => memory is now corrupted",
- kViEMaxMtu, decrypted_length);
- return -1;
- }
- received_packet = decryption_buffer_;
- received_packet_length = decrypted_length;
- }
-
if (rtp_dump_) {
rtp_dump_->DumpPacket(
- received_packet, static_cast<uint16_t>(received_packet_length));
+ rtcp_packet, static_cast<uint16_t>(rtcp_packet_length));
}
- }
- {
- CriticalSectionScoped cs(receive_cs_.get());
+
std::list<RtpRtcp*>::iterator it = rtp_rtcp_simulcast_.begin();
while (it != rtp_rtcp_simulcast_.end()) {
RtpRtcp* rtp_rtcp = *it++;
- rtp_rtcp->IncomingRtcpPacket(received_packet, received_packet_length);
+ rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
}
}
assert(rtp_rtcp_); // Should be set by owner at construction time.
- return rtp_rtcp_->IncomingRtcpPacket(received_packet, received_packet_length);
+ int ret = rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+ if (ret != 0) {
+ return ret;
+ }
+
+ ntp_estimator_->UpdateRtcpTimestamp(rtp_receiver_->SSRC(), rtp_rtcp_);
+
+ return 0;
}
void ViEReceiver::StartReceive() {
@@ -404,16 +360,12 @@ int ViEReceiver::StartRTPDump(const char file_nameUTF8[1024]) {
} else {
rtp_dump_ = RtpDump::CreateRtpDump();
if (rtp_dump_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StartRTPDump: Failed to create RTP dump");
return -1;
}
}
if (rtp_dump_->Start(file_nameUTF8) != 0) {
RtpDump::DestroyRtpDump(rtp_dump_);
rtp_dump_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StartRTPDump: Failed to start RTP dump");
return -1;
}
return 0;
@@ -424,34 +376,18 @@ int ViEReceiver::StopRTPDump() {
if (rtp_dump_) {
if (rtp_dump_->IsActive()) {
rtp_dump_->Stop();
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StopRTPDump: Dump not active");
}
RtpDump::DestroyRtpDump(rtp_dump_);
rtp_dump_ = NULL;
} else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StopRTPDump: RTP dump not started");
return -1;
}
return 0;
}
-// TODO(holmer): To be moved to ViEChannelGroup.
-void ViEReceiver::EstimatedReceiveBandwidth(
- unsigned int* available_bandwidth) const {
- std::vector<unsigned int> ssrcs;
-
- // LatestEstimate returns an error if there is no valid bitrate estimate, but
- // ViEReceiver instead returns a zero estimate.
- remote_bitrate_estimator_->LatestEstimate(&ssrcs, available_bandwidth);
- if (std::find(ssrcs.begin(), ssrcs.end(), rtp_receiver_->SSRC()) !=
- ssrcs.end()) {
- *available_bandwidth /= ssrcs.size();
- } else {
- *available_bandwidth = 0;
- }
+void ViEReceiver::GetReceiveBandwidthEstimatorStats(
+ ReceiveBandwidthEstimatorStats* output) const {
+ remote_bitrate_estimator_->GetStats(output);
}
ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
diff --git a/chromium/third_party/webrtc/video_engine/vie_receiver.h b/chromium/third_party/webrtc/video_engine/vie_receiver.h
index 6f480cbb4dd..817caddbc49 100644
--- a/chromium/third_party/webrtc/video_engine/vie_receiver.h
+++ b/chromium/third_party/webrtc/video_engine/vie_receiver.h
@@ -24,8 +24,8 @@
namespace webrtc {
class CriticalSectionWrapper;
-class Encryption;
class FecReceiver;
+class RemoteNtpTimeEstimator;
class ReceiveStatistics;
class RemoteBitrateEstimator;
class RtpDump;
@@ -34,6 +34,7 @@ class RTPPayloadRegistry;
class RtpReceiver;
class RtpRtcp;
class VideoCodingModule;
+struct ReceiveBandwidthEstimatorStats;
class ViEReceiver : public RtpData {
public:
@@ -46,15 +47,12 @@ class ViEReceiver : public RtpData {
bool RegisterPayload(const VideoCodec& video_codec);
void SetNackStatus(bool enable, int max_nack_reordering_threshold);
- void SetRtxStatus(bool enable, uint32_t ssrc);
- void SetRtxPayloadType(uint32_t payload_type);
+ void SetRtxPayloadType(int payload_type);
+ void SetRtxSsrc(uint32_t ssrc);
uint32_t GetRemoteSsrc() const;
int GetCsrcs(uint32_t* csrcs) const;
- int RegisterExternalDecryption(Encryption* decryption);
- int DeregisterExternalDecryption();
-
void SetRtpRtcpModule(RtpRtcp* module);
RtpReceiver* GetRtpReceiver() const;
@@ -83,26 +81,30 @@ class ViEReceiver : public RtpData {
const uint16_t payload_size,
const WebRtcRTPHeader* rtp_header);
- void EstimatedReceiveBandwidth(unsigned int* available_bandwidth) const;
+ void GetReceiveBandwidthEstimatorStats(
+ ReceiveBandwidthEstimatorStats* output) const;
ReceiveStatistics* GetReceiveStatistics() const;
+ void ReceivedBWEPacket(int64_t arrival_time_ms, int payload_size,
+ const RTPHeader& header);
private:
- int InsertRTPPacket(const int8_t* rtp_packet, int rtp_packet_length,
+ int InsertRTPPacket(const uint8_t* rtp_packet, int rtp_packet_length,
const PacketTime& packet_time);
- bool ReceivePacket(const uint8_t* packet, int packet_length,
- const RTPHeader& header, bool in_order);
+ bool ReceivePacket(const uint8_t* packet,
+ int packet_length,
+ const RTPHeader& header,
+ bool in_order);
// Parses and handles for instance RTX and RED headers.
// This function assumes that it's being called from only one thread.
bool ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
int packet_length,
const RTPHeader& header);
- int InsertRTCPPacket(const int8_t* rtcp_packet, int rtcp_packet_length);
+ int InsertRTCPPacket(const uint8_t* rtcp_packet, int rtcp_packet_length);
bool IsPacketInOrder(const RTPHeader& header) const;
bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
scoped_ptr<CriticalSectionWrapper> receive_cs_;
- const int32_t channel_id_;
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<RtpReceiver> rtp_receiver_;
@@ -113,12 +115,13 @@ class ViEReceiver : public RtpData {
VideoCodingModule* vcm_;
RemoteBitrateEstimator* remote_bitrate_estimator_;
- Encryption* external_decryption_;
- uint8_t* decryption_buffer_;
+ scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
+
RtpDump* rtp_dump_;
bool receiving_;
uint8_t restored_packet_[kViEMaxMtu];
bool restored_packet_in_use_;
+ bool receiving_ast_enabled_;
};
} // namespace webrt
diff --git a/chromium/third_party/webrtc/video_engine/vie_remb.cc b/chromium/third_party/webrtc/video_engine/vie_remb.cc
index 6b0f161d935..d04f0a30ad2 100644
--- a/chromium/third_party/webrtc/video_engine/vie_remb.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_remb.cc
@@ -22,8 +22,7 @@
namespace webrtc {
-const int kRembSendIntervallMs = 1000;
-const unsigned int kRembMinimumBitrateKbps = 50;
+const int kRembSendIntervalMs = 200;
// % threshold for if we should send a new REMB asap.
const unsigned int kSendThresholdPercent = 97;
@@ -38,15 +37,12 @@ VieRemb::~VieRemb() {}
void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
- "VieRemb::AddReceiveChannel(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
receive_modules_.end())
return;
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "AddRembChannel");
// The module probably doesn't have a remote SSRC yet, so don't add it to the
// map.
receive_modules_.push_back(rtp_rtcp);
@@ -54,8 +50,6 @@ void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
- "VieRemb::RemoveReceiveChannel(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
for (RtpModules::iterator it = receive_modules_.begin();
@@ -69,8 +63,6 @@ void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
- "VieRemb::AddRembSender(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
@@ -83,8 +75,6 @@ void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
assert(rtp_rtcp);
- WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
- "VieRemb::RemoveRembSender(%p)", rtp_rtcp);
CriticalSectionScoped cs(list_crit_.get());
for (RtpModules::iterator it = rtcp_sender_.begin();
@@ -106,8 +96,6 @@ bool VieRemb::InUse() const {
void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
unsigned int bitrate) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "VieRemb::UpdateBitrateEstimate(bitrate: %u)", bitrate);
list_crit_->Enter();
// If we already have an estimate, check if the new total estimate is below
// kSendThresholdPercent of the previous estimate.
@@ -117,7 +105,7 @@ void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
// The new bitrate estimate is less than kSendThresholdPercent % of the
// last report. Send a REMB asap.
- last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervallMs;
+ last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervalMs;
}
}
bitrate_ = bitrate;
@@ -125,7 +113,7 @@ void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
// Calculate total receive bitrate estimate.
int64_t now = TickTime::MillisecondTimestamp();
- if (now - last_remb_time_ < kRembSendIntervallMs) {
+ if (now - last_remb_time_ < kRembSendIntervalMs) {
list_crit_->Leave();
return;
}
@@ -145,11 +133,6 @@ void VieRemb::OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
}
last_send_bitrate_ = bitrate_;
- // Never send a REMB lower than last_send_bitrate_.
- if (last_send_bitrate_ < kRembMinimumBitrateKbps) {
- last_send_bitrate_ = kRembMinimumBitrateKbps;
- }
-
list_crit_->Leave();
if (sender) {
diff --git a/chromium/third_party/webrtc/video_engine/vie_remb_unittest.cc b/chromium/third_party/webrtc/video_engine/vie_remb_unittest.cc
index 0fd9e51097a..1f0b70c516c 100644
--- a/chromium/third_party/webrtc/video_engine/vie_remb_unittest.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_remb_unittest.cc
@@ -18,35 +18,26 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/video_engine/vie_remb.h"
using ::testing::_;
using ::testing::AnyNumber;
+using ::testing::NiceMock;
using ::testing::Return;
namespace webrtc {
-class TestProcessThread : public ProcessThread {
- public:
- explicit TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(const Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
-
class ViERembTest : public ::testing::Test {
protected:
virtual void SetUp() {
TickTime::UseFakeClock(12345);
- process_thread_.reset(new TestProcessThread);
+ process_thread_.reset(new NiceMock<MockProcessThread>);
vie_remb_.reset(new VieRemb());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<VieRemb> vie_remb_;
};
diff --git a/chromium/third_party/webrtc/video_engine/vie_render_impl.cc b/chromium/third_party/webrtc/video_engine/vie_render_impl.cc
index b7b7c94cde4..00c0186a26d 100644
--- a/chromium/third_party/webrtc/video_engine/vie_render_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_render_impl.cc
@@ -13,7 +13,7 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -44,36 +44,24 @@ ViERender* ViERender::GetInterface(VideoEngine* video_engine) {
}
int ViERenderImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViERender::Release()");
// Decrease ref count
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViERender release too many times");
+ LOG(LS_ERROR) << "ViERender release too many times";
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViERender reference count: %d", ref_count);
return ref_count;
}
ViERenderImpl::ViERenderImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViERenderImpl::ViERenderImpl() Ctor");
-}
+ : shared_data_(shared_data) {}
-ViERenderImpl::~ViERenderImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViERenderImpl::~ViERenderImpl() Dtor");
-}
+ViERenderImpl::~ViERenderImpl() {}
int ViERenderImpl::RegisterVideoRenderModule(
VideoRender& render_module) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s (&render_module: %p)", __FUNCTION__, &render_module);
+ LOG_F(LS_INFO);
if (shared_data_->render_manager()->RegisterVideoRenderModule(
&render_module) != 0) {
shared_data_->SetLastError(kViERenderUnknownError);
@@ -84,8 +72,7 @@ int ViERenderImpl::RegisterVideoRenderModule(
int ViERenderImpl::DeRegisterVideoRenderModule(
VideoRender& render_module) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s (&render_module: %p)", __FUNCTION__, &render_module);
+ LOG_F(LS_INFO);
if (shared_data_->render_manager()->DeRegisterVideoRenderModule(
&render_module) != 0) {
// Error logging is done in ViERenderManager::DeRegisterVideoRenderModule.
@@ -99,17 +86,14 @@ int ViERenderImpl::AddRenderer(const int render_id, void* window,
const unsigned int z_order, const float left,
const float top, const float right,
const float bottom) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s (render_id: %d, window: 0x%p, z_order: %u, left: %f, "
- "top: %f, right: %f, bottom: %f)",
- __FUNCTION__, render_id, window, z_order, left, top, right,
- bottom);
+ LOG_F(LS_INFO) << "render_id: " << render_id << " z_order: " << z_order
+ << " left: " << left << " top: " << top << " right: " << right
+ << " bottom: " << bottom;
{
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
if (rs.Renderer(render_id)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s - Renderer already exist %d.", __FUNCTION__,
- render_id);
+ LOG(LS_ERROR) << "Renderer for render_id: " << render_id
+ << " already exists.";
shared_data_->SetLastError(kViERenderAlreadyExists);
return -1;
}
@@ -119,9 +103,6 @@ int ViERenderImpl::AddRenderer(const int render_id, void* window,
ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -137,9 +118,6 @@ int ViERenderImpl::AddRenderer(const int render_id, void* window,
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -154,17 +132,12 @@ int ViERenderImpl::AddRenderer(const int render_id, void* window,
}
int ViERenderImpl::RemoveRenderer(const int render_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s(render_id: %d)", __FUNCTION__, render_id);
+ LOG_F(LS_INFO) << "render_id: " << render_id;
ViERenderer* renderer = NULL;
{
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s No render exist with render_id: %d", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -176,10 +149,6 @@ int ViERenderImpl::RemoveRenderer(const int render_id) {
ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
ViEChannel* channel = cm.Channel(render_id);
if (!channel) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: no channel with id %d exists ", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -189,10 +158,6 @@ int ViERenderImpl::RemoveRenderer(const int render_id) {
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* provider = is.FrameProvider(render_id);
if (!provider) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: no provider with id %d exists ", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -206,16 +171,10 @@ int ViERenderImpl::RemoveRenderer(const int render_id) {
}
int ViERenderImpl::StartRender(const int render_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s(channel: %d)", __FUNCTION__, render_id);
+ LOG_F(LS_INFO) << "render_id: " << render_id;
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: No renderer with render Id %d exist.", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -227,16 +186,10 @@ int ViERenderImpl::StartRender(const int render_id) {
}
int ViERenderImpl::StopRender(const int render_id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s(channel: %d)", __FUNCTION__, render_id);
+ LOG_F(LS_INFO) << "render_id: " << render_id;
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: No renderer with render_id %d exist.", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -248,16 +201,11 @@ int ViERenderImpl::StopRender(const int render_id) {
}
int ViERenderImpl::SetExpectedRenderDelay(int render_id, int render_delay) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s(channel: %d)", __FUNCTION__, render_id);
+ LOG_F(LS_INFO) << "render_id: " << render_id
+ << " render_delay: " << render_delay;
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: No renderer with render_id %d exist.", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -271,16 +219,12 @@ int ViERenderImpl::SetExpectedRenderDelay(int render_id, int render_delay) {
int ViERenderImpl::ConfigureRender(int render_id, const unsigned int z_order,
const float left, const float top,
const float right, const float bottom) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s(channel: %d)", __FUNCTION__, render_id);
+ LOG_F(LS_INFO) << "render_id: " << render_id << " z_order: " << z_order
+ << " left: " << left << " top: " << top << " right: " << right
+ << " bottom: " << bottom;
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: No renderer with render_id %d exist.", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -298,10 +242,6 @@ int ViERenderImpl::MirrorRenderStream(const int render_id, const bool enable,
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
ViERenderer* renderer = rs.Renderer(render_id);
if (!renderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: No renderer with render_id %d exist.", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -326,10 +266,7 @@ int ViERenderImpl::AddRenderer(const int render_id,
video_input_format != kVideoRGB565 &&
video_input_format != kVideoARGB4444 &&
video_input_format != kVideoARGB1555) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), render_id),
- "%s: Unsupported video frame format requested",
- __FUNCTION__, render_id);
+ LOG(LS_ERROR) << "Unsupported video frame format requested.";
shared_data_->SetLastError(kViERenderInvalidFrameFormat);
return -1;
}
@@ -337,9 +274,7 @@ int ViERenderImpl::AddRenderer(const int render_id,
// Verify the renderer doesn't exist.
ViERenderManagerScoped rs(*(shared_data_->render_manager()));
if (rs.Renderer(render_id)) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s - Renderer already exist %d.", __FUNCTION__,
- render_id);
+ LOG_F(LS_ERROR) << "Renderer already exists for render_id: " << render_id;
shared_data_->SetLastError(kViERenderAlreadyExists);
return -1;
}
@@ -349,9 +284,6 @@ int ViERenderImpl::AddRenderer(const int render_id,
ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -373,9 +305,6 @@ int ViERenderImpl::AddRenderer(const int render_id,
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
- "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
@@ -402,12 +331,6 @@ int ViERenderImpl::AddRenderCallback(int render_id,
ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
if (!frame_provider) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideo,
- ViEId(shared_data_->instance_id()),
- "%s: FrameProvider id %d doesn't exist",
- __FUNCTION__,
- render_id);
shared_data_->SetLastError(kViERenderInvalidRenderId);
return -1;
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_render_manager.cc b/chromium/third_party/webrtc/video_engine/vie_render_manager.cc
index 2480d90e4df..e044bfac928 100644
--- a/chromium/third_party/webrtc/video_engine/vie_render_manager.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_render_manager.cc
@@ -15,7 +15,7 @@
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_renderer.h"
@@ -35,14 +35,9 @@ ViERenderManager::ViERenderManager(int32_t engine_id)
: list_cs_(CriticalSectionWrapper::CreateCriticalSection()),
engine_id_(engine_id),
use_external_render_module_(false) {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id),
- "ViERenderManager::ViERenderManager(engine_id: %d) - "
- "Constructor", engine_id);
}
ViERenderManager::~ViERenderManager() {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
- "ViERenderManager Destructor, engine_id: %d", engine_id_);
for (RendererMap::iterator it = stream_to_vie_renderer_.begin();
it != stream_to_vie_renderer_.end();
++it) {
@@ -57,15 +52,12 @@ int32_t ViERenderManager::RegisterVideoRenderModule(
// the registrant render module is associated with.
VideoRender* current_module = FindRenderModule(render_module->Window());
if (current_module) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "A module is already registered for this window (window=%p, "
- "current module=%p, registrant module=%p.",
- render_module->Window(), current_module, render_module);
+ LOG_F(LS_ERROR) << "A render module is already registered for this window.";
return -1;
}
// Register module.
- render_list_.PushBack(static_cast<void*>(render_module));
+ render_list_.push_back(render_module);
use_external_render_module_ = true;
return 0;
}
@@ -75,30 +67,22 @@ int32_t ViERenderManager::DeRegisterVideoRenderModule(
// Check if there are streams in the module.
uint32_t n_streams = render_module->GetNumIncomingRenderStreams();
if (n_streams != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "There are still %d streams in this module, cannot "
- "de-register", n_streams);
+ LOG(LS_ERROR) << "There are still " << n_streams
+ << "in this module, cannot de-register.";
return -1;
}
- // Erase the render module from the map.
- ListItem* list_item = NULL;
- bool found = false;
- for (list_item = render_list_.First(); list_item != NULL;
- list_item = render_list_.Next(list_item)) {
- if (render_module == static_cast<VideoRender*>(list_item->GetItem())) {
- // We've found our renderer.
- render_list_.Erase(list_item);
- found = true;
- break;
+ for (RenderList::iterator iter = render_list_.begin();
+ iter != render_list_.end(); ++iter) {
+ if (render_module == *iter) {
+ // We've found our renderer. Erase the render module from the map.
+ render_list_.erase(iter);
+ return 0;
}
}
- if (!found) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "Module not registered");
- return -1;
- }
- return 0;
+
+ LOG(LS_ERROR) << "Module not registered.";
+ return -1;
}
ViERenderer* ViERenderManager::AddRenderStream(const int32_t render_id,
@@ -112,9 +96,7 @@ ViERenderer* ViERenderManager::AddRenderStream(const int32_t render_id,
if (stream_to_vie_renderer_.find(render_id) !=
stream_to_vie_renderer_.end()) {
- // This stream is already added to a renderer, not allowed!
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "Render stream already exists");
+ LOG(LS_ERROR) << "Render stream already exists";
return NULL;
}
@@ -124,12 +106,10 @@ ViERenderer* ViERenderManager::AddRenderStream(const int32_t render_id,
// No render module for this window, create a new one.
render_module = VideoRender::CreateVideoRender(ViEModuleId(engine_id_, -1),
window, false);
- if (!render_module) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
- "Could not create new render module");
+ if (!render_module)
return NULL;
- }
- render_list_.PushBack(static_cast<void*>(render_module));
+
+ render_list_.push_back(render_module);
}
ViERenderer* vie_renderer = ViERenderer::CreateViERenderer(render_id,
@@ -138,12 +118,9 @@ ViERenderer* ViERenderManager::AddRenderStream(const int32_t render_id,
*this, z_order,
left, top, right,
bottom);
- if (!vie_renderer) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(engine_id_, render_id),
- "Could not create new render stream");
+ if (!vie_renderer)
return NULL;
- }
+
stream_to_vie_renderer_[render_id] = vie_renderer;
return vie_renderer;
}
@@ -156,9 +133,7 @@ int32_t ViERenderManager::RemoveRenderStream(
CriticalSectionScoped cs(list_cs_.get());
RendererMap::iterator it = stream_to_vie_renderer_.find(render_id);
if (it == stream_to_vie_renderer_.end()) {
- // No such stream
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(engine_id_),
- "No renderer for this stream found, channel_id");
+ LOG(LS_ERROR) << "No renderer found for render_id: " << render_id;
return 0;
}
@@ -176,12 +151,11 @@ int32_t ViERenderManager::RemoveRenderStream(
if (!use_external_render_module_ &&
renderer.GetNumIncomingRenderStreams() == 0) {
// Erase the render module from the map.
- ListItem* list_item = NULL;
- for (list_item = render_list_.First(); list_item != NULL;
- list_item = render_list_.Next(list_item)) {
- if (&renderer == static_cast<VideoRender*>(list_item->GetItem())) {
+ for (RenderList::iterator iter = render_list_.begin();
+ iter != render_list_.end(); ++iter) {
+ if (&renderer == *iter) {
// We've found our renderer.
- render_list_.Erase(list_item);
+ render_list_.erase(iter);
break;
}
}
@@ -192,21 +166,14 @@ int32_t ViERenderManager::RemoveRenderStream(
}
VideoRender* ViERenderManager::FindRenderModule(void* window) {
- VideoRender* renderer = NULL;
- ListItem* list_item = NULL;
- for (list_item = render_list_.First(); list_item != NULL;
- list_item = render_list_.Next(list_item)) {
- renderer = static_cast<VideoRender*>(list_item->GetItem());
- if (renderer == NULL) {
- break;
- }
- if (renderer->Window() == window) {
+ for (RenderList::iterator iter = render_list_.begin();
+ iter != render_list_.end(); ++iter) {
+ if ((*iter)->Window() == window) {
// We've found the render module.
- break;
+ return *iter;
}
- renderer = NULL;
}
- return renderer;
+ return NULL;
}
ViERenderer* ViERenderManager::ViERenderPtr(int32_t render_id) const {
diff --git a/chromium/third_party/webrtc/video_engine/vie_render_manager.h b/chromium/third_party/webrtc/video_engine/vie_render_manager.h
index bc32893a2fc..c1314cdedcc 100644
--- a/chromium/third_party/webrtc/video_engine/vie_render_manager.h
+++ b/chromium/third_party/webrtc/video_engine/vie_render_manager.h
@@ -11,9 +11,9 @@
#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
+#include <list>
#include <map>
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/vie_manager_base.h"
@@ -46,6 +46,7 @@ class ViERenderManager : private ViEManagerBase {
int32_t RemoveRenderStream(int32_t render_id);
private:
+ typedef std::list<VideoRender*> RenderList;
// Returns a pointer to the render module if it exists in the render list.
// Assumed protected.
VideoRender* FindRenderModule(void* window);
@@ -58,8 +59,7 @@ class ViERenderManager : private ViEManagerBase {
// Protected by ViEManagerBase.
typedef std::map<int32_t, ViERenderer*> RendererMap;
RendererMap stream_to_vie_renderer_;
-
- ListWrapper render_list_;
+ RenderList render_list_;
bool use_external_render_module_;
};
diff --git a/chromium/third_party/webrtc/video_engine/vie_renderer.cc b/chromium/third_party/webrtc/video_engine/vie_renderer.cc
index e18ebe98987..b4ec73828dd 100644
--- a/chromium/third_party/webrtc/video_engine/vie_renderer.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_renderer.cc
@@ -181,6 +181,7 @@ int32_t ViEExternalRendererImpl::RenderFrame(
external_renderer_->DeliverFrame(NULL,
0,
video_frame.timestamp(),
+ video_frame.ntp_time_ms(),
video_frame.render_time_ms(),
video_frame.native_handle());
} else {
@@ -244,6 +245,7 @@ int32_t ViEExternalRendererImpl::RenderFrame(
external_renderer_->DeliverFrame(out_frame->Buffer(),
out_frame->Length(),
video_frame.timestamp(),
+ video_frame.ntp_time_ms(),
video_frame.render_time_ms(),
NULL);
}
diff --git a/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.cc b/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.cc
index 2bd47beca92..53610b4a326 100644
--- a/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.cc
@@ -12,7 +12,7 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
@@ -90,48 +90,31 @@ ViERTP_RTCP* ViERTP_RTCP::GetInterface(VideoEngine* video_engine) {
}
int ViERTP_RTCPImpl::Release() {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
- "ViERTP_RTCP::Release()");
// Decrease ref count.
(*this)--;
int32_t ref_count = GetCount();
if (ref_count < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
- "ViERTP_RTCP release too many times");
+ LOG(LS_ERROR) << "ViERTP_RTCP released too many times.";
shared_data_->SetLastError(kViEAPIDoesNotExist);
return -1;
}
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
- "ViERTP_RTCP reference count: %d", ref_count);
return ref_count;
}
ViERTP_RTCPImpl::ViERTP_RTCPImpl(ViESharedData* shared_data)
- : shared_data_(shared_data) {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViERTP_RTCPImpl::ViERTP_RTCPImpl() Ctor");
-}
+ : shared_data_(shared_data) {}
-ViERTP_RTCPImpl::~ViERTP_RTCPImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
- "ViERTP_RTCPImpl::~ViERTP_RTCPImpl() Dtor");
-}
+ViERTP_RTCPImpl::~ViERTP_RTCPImpl() {}
int ViERTP_RTCPImpl::SetLocalSSRC(const int video_channel,
const unsigned int SSRC,
const StreamType usage,
const unsigned char simulcast_idx) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
+ LOG_F(LS_INFO) << "channel: " << video_channel << " ssrc: " << SSRC << "";
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- // The channel doesn't exists
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -145,20 +128,13 @@ int ViERTP_RTCPImpl::SetLocalSSRC(const int video_channel,
int ViERTP_RTCPImpl::SetRemoteSSRCType(const int videoChannel,
const StreamType usage,
const unsigned int SSRC) const {
- WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
- ViEId(shared_data_->instance_id(), videoChannel),
- "%s(channel: %d, usage:%d SSRC: 0x%x)",
- __FUNCTION__, usage, videoChannel, SSRC);
+ LOG_F(LS_INFO) << "channel: " << videoChannel
+ << " usage: " << static_cast<int>(usage) << " ssrc: " << SSRC;
// Get the channel
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
if (ptrViEChannel == NULL) {
- // The channel doesn't exists
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
- ViEId(shared_data_->instance_id(), videoChannel),
- "%s: Channel %d doesn't exist",
- __FUNCTION__, videoChannel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -171,15 +147,9 @@ int ViERTP_RTCPImpl::SetRemoteSSRCType(const int videoChannel,
int ViERTP_RTCPImpl::GetLocalSSRC(const int video_channel,
unsigned int& SSRC) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -193,15 +163,9 @@ int ViERTP_RTCPImpl::GetLocalSSRC(const int video_channel,
int ViERTP_RTCPImpl::GetRemoteSSRC(const int video_channel,
unsigned int& SSRC) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel, SSRC);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -214,15 +178,9 @@ int ViERTP_RTCPImpl::GetRemoteSSRC(const int video_channel,
int ViERTP_RTCPImpl::GetRemoteCSRCs(const int video_channel,
unsigned int CSRCs[kRtpCsrcSize]) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -235,15 +193,11 @@ int ViERTP_RTCPImpl::GetRemoteCSRCs(const int video_channel,
int ViERTP_RTCPImpl::SetRtxSendPayloadType(const int video_channel,
const uint8_t payload_type) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " payload_type: " << payload_type;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -253,17 +207,28 @@ int ViERTP_RTCPImpl::SetRtxSendPayloadType(const int video_channel,
return 0;
}
+int ViERTP_RTCPImpl::SetPadWithRedundantPayloads(int video_channel,
+ bool enable) {
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " pad with redundant payloads: " << (enable ? "enable" :
+ "disable");
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ return -1;
+ }
+ vie_channel->SetPadWithRedundantPayloads(enable);
+ return 0;
+}
+
int ViERTP_RTCPImpl::SetRtxReceivePayloadType(const int video_channel,
const uint8_t payload_type) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " payload_type: " << payload_type;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -273,24 +238,14 @@ int ViERTP_RTCPImpl::SetRtxReceivePayloadType(const int video_channel,
int ViERTP_RTCPImpl::SetStartSequenceNumber(const int video_channel,
uint16_t sequence_number) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, sequence_number: %u)", __FUNCTION__,
- video_channel, sequence_number);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
if (vie_channel->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d already sending.", __FUNCTION__,
- video_channel);
+ LOG_F(LS_ERROR) << "channel " << video_channel << " is already sending.";
shared_data_->SetLastError(kViERtpRtcpAlreadySending);
return -1;
}
@@ -303,16 +258,11 @@ int ViERTP_RTCPImpl::SetStartSequenceNumber(const int video_channel,
int ViERTP_RTCPImpl::SetRTCPStatus(const int video_channel,
const ViERTCPMode rtcp_mode) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, mode: %d)", __FUNCTION__, video_channel,
- rtcp_mode);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " mode: " << static_cast<int>(rtcp_mode);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -327,23 +277,14 @@ int ViERTP_RTCPImpl::SetRTCPStatus(const int video_channel,
int ViERTP_RTCPImpl::GetRTCPStatus(const int video_channel,
ViERTCPMode& rtcp_mode) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel, rtcp_mode);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
RTCPMethod module_mode = kRtcpOff;
if (vie_channel->GetRTCPMode(&module_mode) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: could not get current RTCP mode", __FUNCTION__);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -353,24 +294,16 @@ int ViERTP_RTCPImpl::GetRTCPStatus(const int video_channel,
int ViERTP_RTCPImpl::SetRTCPCName(const int video_channel,
const char rtcp_cname[KMaxRTCPCNameLength]) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, name: %s)", __FUNCTION__, video_channel,
- rtcp_cname);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " rtcp_cname: " << rtcp_cname;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
if (vie_channel->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d already sending.", __FUNCTION__,
- video_channel);
+ LOG_F(LS_ERROR) << "channel " << video_channel << " is already sending.";
shared_data_->SetLastError(kViERtpRtcpAlreadySending);
return -1;
}
@@ -383,15 +316,9 @@ int ViERTP_RTCPImpl::SetRTCPCName(const int video_channel,
int ViERTP_RTCPImpl::GetRTCPCName(const int video_channel,
char rtcp_cname[KMaxRTCPCNameLength]) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -405,15 +332,9 @@ int ViERTP_RTCPImpl::GetRTCPCName(const int video_channel,
int ViERTP_RTCPImpl::GetRemoteRTCPCName(
const int video_channel,
char rtcp_cname[KMaxRTCPCNameLength]) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -430,33 +351,18 @@ int ViERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
unsigned int name,
const char* data,
uint16_t data_length_in_bytes) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, sub_type: %c, name: %d, data: x, length: %u)",
- __FUNCTION__, video_channel, sub_type, name,
- data_length_in_bytes);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
if (!vie_channel->Sending()) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d not sending", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpNotSending);
return -1;
}
RTCPMethod method;
if (vie_channel->GetRTCPMode(&method) != 0 || method == kRtcpOff) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: RTCP disabled on channel %d.", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpRtcpDisabled);
return -1;
}
@@ -470,23 +376,15 @@ int ViERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
}
int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
- enable);
+ LOG_F(LS_INFO) << "channel: " << video_channel << " "
+ << (enable ? "on" : "off");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
if (vie_channel->SetNACKStatus(enable) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: failed for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -494,10 +392,6 @@ int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
// Update the encoder
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get encoder for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -508,37 +402,24 @@ int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
int ViERTP_RTCPImpl::SetFECStatus(const int video_channel, const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enable: %d, payload_typeRED: %u, "
- "payloadTypeFEC: %u)",
- __FUNCTION__, video_channel, enable, payload_typeRED,
- payload_typeFEC);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " enable: " << (enable ? "on" : "off")
+ << " payload_typeRED: " << payload_typeRED
+ << " payload_typeFEC: " << payload_typeFEC;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
if (vie_channel->SetFECStatus(enable, payload_typeRED,
payload_typeFEC) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: failed for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
// Update the encoder.
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get encoder for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -551,18 +432,13 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enable: %d, payload_typeRED: %u, "
- "payloadTypeFEC: %u)",
- __FUNCTION__, video_channel, enable, payload_typeRED,
- payload_typeFEC);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " enable: " << (enable ? "on" : "off")
+ << " payload_typeRED: " << payload_typeRED
+ << " payload_typeFEC: " << payload_typeFEC;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -570,9 +446,6 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
// Update the channel status with hybrid NACK FEC mode.
if (vie_channel->SetHybridNACKFECStatus(enable, payload_typeRED,
payload_typeFEC) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: failed for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -580,10 +453,6 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
// Update the encoder.
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get encoder for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -592,35 +461,23 @@ int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
}
int ViERTP_RTCPImpl::SetSenderBufferingMode(int video_channel,
- int target_delay_ms) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, sender target_delay: %d)",
- __FUNCTION__, video_channel, target_delay_ms);
+ int target_delay_ms) {
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " target_delay_ms: " << target_delay_ms;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get encoder for channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
// Update the channel with buffering mode settings.
if (vie_channel->SetSenderBufferingMode(target_delay_ms) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: failed for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -631,26 +488,18 @@ int ViERTP_RTCPImpl::SetSenderBufferingMode(int video_channel,
}
int ViERTP_RTCPImpl::SetReceiverBufferingMode(int video_channel,
- int target_delay_ms) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, receiver target_delay: %d)",
- __FUNCTION__, video_channel, target_delay_ms);
+ int target_delay_ms) {
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " target_delay_ms: " << target_delay_ms;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
// Update the channel with buffering mode settings.
if (vie_channel->SetReceiverBufferingMode(target_delay_ms) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: failed for channel %d", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpUnknownError);
return -1;
}
@@ -660,18 +509,12 @@ int ViERTP_RTCPImpl::SetReceiverBufferingMode(int video_channel,
int ViERTP_RTCPImpl::SetKeyFrameRequestMethod(
const int video_channel,
const ViEKeyFrameRequestMethod method) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, method: %d)", __FUNCTION__, video_channel,
- method);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " method: " << static_cast<int>(method);
- // Get the channel.
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -685,16 +528,11 @@ int ViERTP_RTCPImpl::SetKeyFrameRequestMethod(
int ViERTP_RTCPImpl::SetTMMBRStatus(const int video_channel,
const bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
- enable);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << "enable: " << (enable ? "on" : "off");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -705,12 +543,12 @@ int ViERTP_RTCPImpl::SetTMMBRStatus(const int video_channel,
return 0;
}
-int ViERTP_RTCPImpl::SetRembStatus(int video_channel, bool sender,
+int ViERTP_RTCPImpl::SetRembStatus(int video_channel,
+ bool sender,
bool receiver) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetRembStatus(%d, %d, %d)", video_channel,
- sender, receiver);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " sender: " << (sender ? "on" : "off")
+ << " receiver: " << (receiver ? "on" : "off");
if (!shared_data_->channel_manager()->SetRembStatus(video_channel, sender,
receiver)) {
return -1;
@@ -721,17 +559,12 @@ int ViERTP_RTCPImpl::SetRembStatus(int video_channel, bool sender,
int ViERTP_RTCPImpl::SetSendTimestampOffsetStatus(int video_channel,
bool enable,
int id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetSendTimestampOffsetStatus(%d, %d, %d)",
- video_channel, enable, id);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << "enable: " << (enable ? "on" : "off") << " id: " << id;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -745,16 +578,11 @@ int ViERTP_RTCPImpl::SetSendTimestampOffsetStatus(int video_channel,
int ViERTP_RTCPImpl::SetReceiveTimestampOffsetStatus(int video_channel,
bool enable,
int id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetReceiveTimestampOffsetStatus(%d, %d, %d)",
- video_channel, enable, id);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << "enable: " << (enable ? "on" : "off") << " id: " << id;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -768,17 +596,12 @@ int ViERTP_RTCPImpl::SetReceiveTimestampOffsetStatus(int video_channel,
int ViERTP_RTCPImpl::SetSendAbsoluteSendTimeStatus(int video_channel,
bool enable,
int id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetSendAbsoluteSendTimeStatus(%d, %d, %d)",
- video_channel, enable, id);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << "enable: " << (enable ? "on" : "off") << " id: " << id;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -792,16 +615,11 @@ int ViERTP_RTCPImpl::SetSendAbsoluteSendTimeStatus(int video_channel,
int ViERTP_RTCPImpl::SetReceiveAbsoluteSendTimeStatus(int video_channel,
bool enable,
int id) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetReceiveAbsoluteSendTimeStatus(%d, %d, %d)",
- video_channel, enable, id);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << "enable: " << (enable ? "on" : "off") << " id: " << id;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -813,17 +631,12 @@ int ViERTP_RTCPImpl::SetReceiveAbsoluteSendTimeStatus(int video_channel,
}
int ViERTP_RTCPImpl::SetRtcpXrRrtrStatus(int video_channel, bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "ViERTP_RTCPImpl::SetRtcpXrRrtrStatus(%d, %d)",
- video_channel, enable);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " enable: " << (enable ? "on" : "off");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -833,16 +646,11 @@ int ViERTP_RTCPImpl::SetRtcpXrRrtrStatus(int video_channel, bool enable) {
int ViERTP_RTCPImpl::SetTransmissionSmoothingStatus(int video_channel,
bool enable) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, enble: %d)", __FUNCTION__, video_channel,
- enable);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " enable: " << (enable ? "on" : "off");
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -850,19 +658,37 @@ int ViERTP_RTCPImpl::SetTransmissionSmoothingStatus(int video_channel,
return 0;
}
+int ViERTP_RTCPImpl::SetMinTransmitBitrate(int video_channel,
+ int min_transmit_bitrate_kbps) {
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " min_transmit_bitrate_kbps: " << min_transmit_bitrate_kbps;
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ if (vie_encoder == NULL)
+ return -1;
+ vie_encoder->SetMinTransmitBitrate(min_transmit_bitrate_kbps);
+ return 0;
+}
+
+int ViERTP_RTCPImpl::SetReservedTransmitBitrate(
+ int video_channel, unsigned int reserved_transmit_bitrate_bps) {
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " reserved_transmit_bitrate_bps: "
+ << reserved_transmit_bitrate_bps;
+ if (!shared_data_->channel_manager()->SetReservedTransmitBitrate(
+ video_channel, reserved_transmit_bitrate_bps)) {
+ return -1;
+ }
+ return 0;
+}
+
int ViERTP_RTCPImpl::GetReceiveChannelRtcpStatistics(
const int video_channel,
RtcpStatistics& basic_stats,
int& rtt_ms) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -885,15 +711,9 @@ int ViERTP_RTCPImpl::GetReceiveChannelRtcpStatistics(
int ViERTP_RTCPImpl::GetSendChannelRtcpStatistics(const int video_channel,
RtcpStatistics& basic_stats,
int& rtt_ms) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -916,15 +736,9 @@ int ViERTP_RTCPImpl::GetSendChannelRtcpStatistics(const int video_channel,
int ViERTP_RTCPImpl::GetRtpStatistics(const int video_channel,
StreamDataCounters& sent,
StreamDataCounters& received) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -938,20 +752,28 @@ int ViERTP_RTCPImpl::GetRtpStatistics(const int video_channel,
return 0;
}
+int ViERTP_RTCPImpl::GetRtcpPacketTypeCounters(
+ int video_channel,
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const {
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ return -1;
+ }
+ vie_channel->GetRtcpPacketTypeCounters(packets_sent, packets_received);
+ return 0;
+}
+
int ViERTP_RTCPImpl::GetBandwidthUsage(const int video_channel,
unsigned int& total_bitrate_sent,
unsigned int& video_bitrate_sent,
unsigned int& fec_bitrate_sent,
unsigned int& nackBitrateSent) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -965,58 +787,58 @@ int ViERTP_RTCPImpl::GetBandwidthUsage(const int video_channel,
int ViERTP_RTCPImpl::GetEstimatedSendBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
- ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
- ViEEncoder* vie_encoder = cs.Encoder(video_channel);
- if (!vie_encoder) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get encoder for channel %d", __FUNCTION__,
- video_channel);
- shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ if (!shared_data_->channel_manager()->GetEstimatedSendBandwidth(
+ video_channel, estimated_bandwidth)) {
return -1;
}
- return vie_encoder->EstimatedSendBandwidth(
- static_cast<uint32_t*>(estimated_bandwidth));
+ return 0;
}
int ViERTP_RTCPImpl::GetEstimatedReceiveBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ if (!shared_data_->channel_manager()->GetEstimatedReceiveBandwidth(
+ video_channel, estimated_bandwidth)) {
+ return -1;
+ }
+ return 0;
+}
+
+int ViERTP_RTCPImpl::GetReceiveBandwidthEstimatorStats(
+ const int video_channel,
+ ReceiveBandwidthEstimatorStats* output) const {
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Could not get channel %d", __FUNCTION__,
- video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
- vie_channel->GetEstimatedReceiveBandwidth(
- static_cast<uint32_t*>(estimated_bandwidth));
+ vie_channel->GetReceiveBandwidthEstimatorStats(output);
+ return 0;
+}
+
+int ViERTP_RTCPImpl::GetPacerQueuingDelayMs(
+ const int video_channel, int* delay_ms) const {
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ if (!vie_encoder) {
+ shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ return -1;
+ }
+ *delay_ms = vie_encoder->PacerQueuingDelayMs();
return 0;
}
int ViERTP_RTCPImpl::StartRTPDump(const int video_channel,
const char file_nameUTF8[1024],
RTPDirections direction) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, file_name: %s, direction: %d)", __FUNCTION__,
- video_channel, file_nameUTF8, direction);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " filename: " << file_nameUTF8
+ << " direction: " << static_cast<int>(direction);
assert(FileWrapper::kMaxFileNameSize == 1024);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1029,16 +851,11 @@ int ViERTP_RTCPImpl::StartRTPDump(const int video_channel,
int ViERTP_RTCPImpl::StopRTPDump(const int video_channel,
RTPDirections direction) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d, direction: %d)", __FUNCTION__, video_channel,
- direction);
+ LOG_F(LS_INFO) << "channel: " << video_channel
+ << " direction: " << static_cast<int>(direction);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1051,15 +868,10 @@ int ViERTP_RTCPImpl::StopRTPDump(const int video_channel,
int ViERTP_RTCPImpl::RegisterRTPObserver(const int video_channel,
ViERTPObserver& observer) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1071,15 +883,10 @@ int ViERTP_RTCPImpl::RegisterRTPObserver(const int video_channel,
}
int ViERTP_RTCPImpl::DeregisterRTPObserver(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1092,15 +899,10 @@ int ViERTP_RTCPImpl::DeregisterRTPObserver(const int video_channel) {
int ViERTP_RTCPImpl::RegisterRTCPObserver(const int video_channel,
ViERTCPObserver& observer) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1112,15 +914,10 @@ int ViERTP_RTCPImpl::RegisterRTCPObserver(const int video_channel,
}
int ViERTP_RTCPImpl::DeregisterRTCPObserver(const int video_channel) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1133,15 +930,10 @@ int ViERTP_RTCPImpl::DeregisterRTCPObserver(const int video_channel) {
int ViERTP_RTCPImpl::RegisterSendChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1151,15 +943,10 @@ int ViERTP_RTCPImpl::RegisterSendChannelRtcpStatisticsCallback(
int ViERTP_RTCPImpl::DeregisterSendChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1168,22 +955,30 @@ int ViERTP_RTCPImpl::DeregisterSendChannelRtcpStatisticsCallback(
}
int ViERTP_RTCPImpl::RegisterReceiveChannelRtcpStatisticsCallback(
- int channel, RtcpStatisticsCallback* callback) {
- // TODO(sprang): Implement
- return -1;
+ const int video_channel,
+ RtcpStatisticsCallback* callback) {
+ LOG_F(LS_INFO) << "channel " << video_channel;
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ assert(vie_channel != NULL);
+ vie_channel->RegisterReceiveChannelRtcpStatisticsCallback(callback);
+ return 0;
}
int ViERTP_RTCPImpl::DeregisterReceiveChannelRtcpStatisticsCallback(
- int channel, RtcpStatisticsCallback* callback) {
- // TODO(sprang): Implement
- return -1;
+ const int video_channel,
+ RtcpStatisticsCallback* callback) {
+ LOG_F(LS_INFO) << "channel " << video_channel;
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ assert(vie_channel != NULL);
+ vie_channel->RegisterReceiveChannelRtcpStatisticsCallback(NULL);
+ return 0;
}
int ViERTP_RTCPImpl::RegisterSendChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
@@ -1193,9 +988,7 @@ int ViERTP_RTCPImpl::RegisterSendChannelRtpStatisticsCallback(
int ViERTP_RTCPImpl::DeregisterSendChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
@@ -1204,27 +997,32 @@ int ViERTP_RTCPImpl::DeregisterSendChannelRtpStatisticsCallback(
}
int ViERTP_RTCPImpl::RegisterReceiveChannelRtpStatisticsCallback(
- int channel, StreamDataCountersCallback* callback) {
- // TODO(sprang): Implement
- return -1;
+ const int video_channel,
+ StreamDataCountersCallback* callback) {
+ LOG_F(LS_INFO) << "channel " << video_channel;
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ assert(vie_channel != NULL);
+ vie_channel->RegisterReceiveChannelRtpStatisticsCallback(callback);
+ return 0;
}
int ViERTP_RTCPImpl::DeregisterReceiveChannelRtpStatisticsCallback(
- int channel, StreamDataCountersCallback* callback) {
- // TODO(sprang): Implement
- return -1;
+ const int video_channel,
+ StreamDataCountersCallback* callback) {
+ LOG_F(LS_INFO) << "channel " << video_channel;
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ assert(vie_channel != NULL);
+ vie_channel->RegisterReceiveChannelRtpStatisticsCallback(NULL);
+ return 0;
}
// Called whenever the send bitrate is updated.
int ViERTP_RTCPImpl::RegisterSendBitrateObserver(
const int video_channel,
BitrateStatisticsObserver* observer) {
- WEBRTC_TRACE(kTraceApiCall,
- kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)",
- __FUNCTION__,
- video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
@@ -1235,12 +1033,7 @@ int ViERTP_RTCPImpl::RegisterSendBitrateObserver(
int ViERTP_RTCPImpl::DeregisterSendBitrateObserver(
const int video_channel,
BitrateStatisticsObserver* observer) {
- WEBRTC_TRACE(kTraceApiCall,
- kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)",
- __FUNCTION__,
- video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
assert(vie_channel != NULL);
@@ -1250,15 +1043,10 @@ int ViERTP_RTCPImpl::DeregisterSendBitrateObserver(
int ViERTP_RTCPImpl::RegisterSendFrameCountObserver(
int video_channel, FrameCountObserver* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
@@ -1268,20 +1056,14 @@ int ViERTP_RTCPImpl::RegisterSendFrameCountObserver(
int ViERTP_RTCPImpl::DeregisterSendFrameCountObserver(
int video_channel, FrameCountObserver* callback) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s(channel: %d)", __FUNCTION__, video_channel);
+ LOG_F(LS_INFO) << "channel " << video_channel;
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
- WEBRTC_TRACE(kTraceError, kTraceVideo,
- ViEId(shared_data_->instance_id(), video_channel),
- "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
vie_channel->RegisterSendFrameCountObserver(NULL);
return 0;
}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.h b/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.h
index caabd2cdade..5eec0efe14e 100644
--- a/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/video_engine/vie_rtp_rtcp_impl.h
@@ -41,6 +41,7 @@ class ViERTP_RTCPImpl
unsigned int CSRCs[kRtpCsrcSize]) const;
virtual int SetRtxSendPayloadType(const int video_channel,
const uint8_t payload_type);
+ virtual int SetPadWithRedundantPayloads(int video_channel, bool enable);
virtual int SetRtxReceivePayloadType(const int video_channel,
const uint8_t payload_type);
virtual int SetStartSequenceNumber(const int video_channel,
@@ -90,6 +91,10 @@ class ViERTP_RTCPImpl
int id);
virtual int SetRtcpXrRrtrStatus(int video_channel, bool enable);
virtual int SetTransmissionSmoothingStatus(int video_channel, bool enable);
+ virtual int SetMinTransmitBitrate(int video_channel,
+ int min_transmit_bitrate_kbps);
+ virtual int SetReservedTransmitBitrate(
+ int video_channel, unsigned int reserved_transmit_bitrate_bps);
virtual int GetReceiveChannelRtcpStatistics(const int video_channel,
RtcpStatistics& basic_stats,
int& rtt_ms) const;
@@ -99,6 +104,10 @@ class ViERTP_RTCPImpl
virtual int GetRtpStatistics(const int video_channel,
StreamDataCounters& sent,
StreamDataCounters& received) const;
+ virtual int GetRtcpPacketTypeCounters(
+ int video_channel,
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const;
virtual int GetBandwidthUsage(const int video_channel,
unsigned int& total_bitrate_sent,
unsigned int& video_bitrate_sent,
@@ -110,6 +119,10 @@ class ViERTP_RTCPImpl
virtual int GetEstimatedReceiveBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const;
+ virtual int GetReceiveBandwidthEstimatorStats(
+ const int video_channel, ReceiveBandwidthEstimatorStats* output) const;
+ virtual int GetPacerQueuingDelayMs(const int video_channel,
+ int* delay_ms) const;
virtual int StartRTPDump(const int video_channel,
const char file_nameUTF8[1024],
RTPDirections direction);
diff --git a/chromium/third_party/webrtc/video_engine/vie_sender.cc b/chromium/third_party/webrtc/video_engine/vie_sender.cc
index b90d56f29a7..349bc72fb90 100644
--- a/chromium/third_party/webrtc/video_engine/vie_sender.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_sender.cc
@@ -21,18 +21,11 @@ namespace webrtc {
ViESender::ViESender(int channel_id)
: channel_id_(channel_id),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- external_encryption_(NULL),
- encryption_buffer_(NULL),
transport_(NULL),
rtp_dump_(NULL) {
}
ViESender::~ViESender() {
- if (encryption_buffer_) {
- delete[] encryption_buffer_;
- encryption_buffer_ = NULL;
- }
-
if (rtp_dump_) {
rtp_dump_->Stop();
RtpDump::DestroyRtpDump(rtp_dump_);
@@ -40,32 +33,6 @@ ViESender::~ViESender() {
}
}
-int ViESender::RegisterExternalEncryption(Encryption* encryption) {
- CriticalSectionScoped cs(critsect_.get());
- if (external_encryption_) {
- return -1;
- }
- encryption_buffer_ = new uint8_t[kViEMaxMtu];
- if (encryption_buffer_ == NULL) {
- return -1;
- }
- external_encryption_ = encryption;
- return 0;
-}
-
-int ViESender::DeregisterExternalEncryption() {
- CriticalSectionScoped cs(critsect_.get());
- if (external_encryption_ == NULL) {
- return -1;
- }
- if (encryption_buffer_) {
- delete[] encryption_buffer_;
- encryption_buffer_ = NULL;
- }
- external_encryption_ = NULL;
- return 0;
-}
-
int ViESender::RegisterSendTransport(Transport* transport) {
CriticalSectionScoped cs(critsect_.get());
if (transport_) {
@@ -92,16 +59,12 @@ int ViESender::StartRTPDump(const char file_nameUTF8[1024]) {
} else {
rtp_dump_ = RtpDump::CreateRtpDump();
if (rtp_dump_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StartSRTPDump: Failed to create RTP dump");
return -1;
}
}
if (rtp_dump_->Start(file_nameUTF8) != 0) {
RtpDump::DestroyRtpDump(rtp_dump_);
rtp_dump_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StartRTPDump: Failed to start RTP dump");
return -1;
}
return 0;
@@ -112,15 +75,10 @@ int ViESender::StopRTPDump() {
if (rtp_dump_) {
if (rtp_dump_->IsActive()) {
rtp_dump_->Stop();
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StopRTPDump: Dump not active");
}
RtpDump::DestroyRtpDump(rtp_dump_);
rtp_dump_ = NULL;
} else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
- "StopRTPDump: RTP dump not started");
return -1;
}
return 0;
@@ -134,76 +92,27 @@ int ViESender::SendPacket(int vie_id, const void* data, int len) {
}
assert(ChannelId(vie_id) == channel_id_);
- // TODO(mflodman) Change decrypt to get rid of this cast.
- void* tmp_ptr = const_cast<void*>(data);
- unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
-
- // Data length for packets sent to possible encryption and to the transport.
- int send_packet_length = len;
-
if (rtp_dump_) {
- rtp_dump_->DumpPacket(send_packet, send_packet_length);
+ rtp_dump_->DumpPacket(static_cast<const uint8_t*>(data),
+ static_cast<uint16_t>(len));
}
- if (external_encryption_) {
- // Encryption buffer size.
- int encrypted_packet_length = kViEMaxMtu;
-
- external_encryption_->encrypt(channel_id_, send_packet, encryption_buffer_,
- send_packet_length, &encrypted_packet_length);
- send_packet = encryption_buffer_;
- send_packet_length = encrypted_packet_length;
- }
- const int bytes_sent = transport_->SendPacket(channel_id_, send_packet,
- send_packet_length);
- if (bytes_sent != send_packet_length) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, channel_id_,
- "ViESender::SendPacket - Transport failed to send RTP packet");
- }
- return bytes_sent;
+ return transport_->SendPacket(channel_id_, data, len);
}
int ViESender::SendRTCPPacket(int vie_id, const void* data, int len) {
CriticalSectionScoped cs(critsect_.get());
-
if (!transport_) {
return -1;
}
-
assert(ChannelId(vie_id) == channel_id_);
- // Prepare for possible encryption and sending.
- // TODO(mflodman) Change decrypt to get rid of this cast.
- void* tmp_ptr = const_cast<void*>(data);
- unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
-
- // Data length for packets sent to possible encryption and to the transport.
- int send_packet_length = len;
-
if (rtp_dump_) {
- rtp_dump_->DumpPacket(send_packet, send_packet_length);
+ rtp_dump_->DumpPacket(static_cast<const uint8_t*>(data),
+ static_cast<uint16_t>(len));
}
- if (external_encryption_) {
- // Encryption buffer size.
- int encrypted_packet_length = kViEMaxMtu;
-
- external_encryption_->encrypt_rtcp(
- channel_id_, send_packet, encryption_buffer_, send_packet_length,
- &encrypted_packet_length);
- send_packet = encryption_buffer_;
- send_packet_length = encrypted_packet_length;
- }
-
- const int bytes_sent = transport_->SendRTCPPacket(channel_id_, send_packet,
- send_packet_length);
- if (bytes_sent != send_packet_length) {
- WEBRTC_TRACE(
- webrtc::kTraceWarning, webrtc::kTraceVideo, channel_id_,
- "ViESender::SendRTCPPacket - Transport failed to send RTCP packet"
- " (%d vs %d)", bytes_sent, send_packet_length);
- }
- return bytes_sent;
+ return transport_->SendRTCPPacket(channel_id_, data, len);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_engine/vie_sender.h b/chromium/third_party/webrtc/video_engine/vie_sender.h
index 03b9d5893db..1eecc06c76c 100644
--- a/chromium/third_party/webrtc/video_engine/vie_sender.h
+++ b/chromium/third_party/webrtc/video_engine/vie_sender.h
@@ -8,8 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-// ViESender is responsible for encrypting, if enabled, packets and send to
-// network.
+// ViESender is responsible for sending packets to network.
#ifndef WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
#define WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
@@ -32,10 +31,6 @@ class ViESender: public Transport {
explicit ViESender(const int32_t channel_id);
~ViESender();
- // Registers an encryption class to use before sending packets.
- int RegisterExternalEncryption(Encryption* encryption);
- int DeregisterExternalEncryption();
-
// Registers transport to use for sending RTP and RTCP.
int RegisterSendTransport(Transport* transport);
int DeregisterSendTransport();
@@ -53,8 +48,6 @@ class ViESender: public Transport {
scoped_ptr<CriticalSectionWrapper> critsect_;
- Encryption* external_encryption_;
- uint8_t* encryption_buffer_;
Transport* transport_;
RtpDump* rtp_dump_;
};
diff --git a/chromium/third_party/webrtc/video_engine/vie_sync_module.cc b/chromium/third_party/webrtc/video_engine/vie_sync_module.cc
index 89da022bf54..a5ca6520d7c 100644
--- a/chromium/third_party/webrtc/video_engine/vie_sync_module.cc
+++ b/chromium/third_party/webrtc/video_engine/vie_sync_module.cc
@@ -14,7 +14,7 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
#include "webrtc/video_engine/stream_synchronization.h"
#include "webrtc/video_engine/vie_channel.h"
@@ -30,31 +30,24 @@ int UpdateMeasurements(StreamSynchronization::Measurements* stream,
return -1;
if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
return -1;
- synchronization::RtcpMeasurement measurement;
- if (0 != rtp_rtcp.RemoteNTP(&measurement.ntp_secs,
- &measurement.ntp_frac,
+
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp.RemoteNTP(&ntp_secs,
+ &ntp_frac,
NULL,
NULL,
- &measurement.rtp_timestamp)) {
+ &rtp_timestamp)) {
return -1;
}
- if (measurement.ntp_secs == 0 && measurement.ntp_frac == 0) {
+
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) {
return -1;
}
- for (synchronization::RtcpList::iterator it = stream->rtcp.begin();
- it != stream->rtcp.end(); ++it) {
- if (measurement.ntp_secs == (*it).ntp_secs &&
- measurement.ntp_frac == (*it).ntp_frac) {
- // This RTCP has already been added to the list.
- return 0;
- }
- }
- // We need two RTCP SR reports to map between RTP and NTP. More than two will
- // not improve the mapping.
- if (stream->rtcp.size() == 2) {
- stream->rtcp.pop_back();
- }
- stream->rtcp.push_front(measurement);
+
return 0;
}
@@ -110,8 +103,6 @@ int32_t ViESyncModule::Process() {
last_sync_time_ = TickTime::Now();
const int current_video_delay_ms = vcm_->Delay();
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, vie_channel_->Id(),
- "Video delay (JB + decoder) is %d ms", current_video_delay_ms);
if (voe_channel_id_ == -1) {
return 0;
@@ -124,11 +115,6 @@ int32_t ViESyncModule::Process() {
if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
&audio_jitter_buffer_delay_ms,
&playout_buffer_delay_ms) != 0) {
- // Could not get VoE delay value, probably not a valid channel Id or
- // the channel have not received enough packets.
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, vie_channel_->Id(),
- "%s: VE_GetDelayEstimate error for voice_channel %d",
- __FUNCTION__, voe_channel_id_);
return 0;
}
const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
@@ -174,15 +160,9 @@ int32_t ViESyncModule::Process() {
return 0;
}
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, vie_channel_->Id(),
- "Set delay current(a=%d v=%d rel=%d) target(a=%d v=%d)",
- current_audio_delay_ms, current_video_delay_ms,
- relative_delay_ms,
- target_audio_delay_ms, target_video_delay_ms);
if (voe_sync_interface_->SetMinimumPlayoutDelay(
voe_channel_id_, target_audio_delay_ms) == -1) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, vie_channel_->Id(),
- "Error setting voice delay");
+ LOG(LS_ERROR) << "Error setting voice delay.";
}
vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
return 0;
@@ -190,9 +170,8 @@ int32_t ViESyncModule::Process() {
int ViESyncModule::SetTargetBufferingDelay(int target_delay_ms) {
CriticalSectionScoped cs(data_cs_.get());
- if (!voe_sync_interface_) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, vie_channel_->Id(),
- "voe_sync_interface_ NULL, can't set playout delay.");
+ if (!voe_sync_interface_) {
+ LOG(LS_ERROR) << "voe_sync_interface_ NULL, can't set playout delay.";
return -1;
}
sync_->SetTargetBufferingDelay(target_delay_ms);
diff --git a/chromium/third_party/webrtc/video_engine_tests.isolate b/chromium/third_party/webrtc/video_engine_tests.isolate
index c8ec481e5f7..40454bd61bd 100644
--- a/chromium/third_party/webrtc/video_engine_tests.isolate
+++ b/chromium/third_party/webrtc/video_engine_tests.isolate
@@ -8,29 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../data/',
- '../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../testing/test_env.py',
'<(PRODUCT_DIR)/video_engine_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../DEPS',
- '../resources/foreman_cif_short.yuv',
- '../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/foreman_cif_short.yuv',
'<(PRODUCT_DIR)/video_engine_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/video_receive_stream.h b/chromium/third_party/webrtc/video_receive_stream.h
index e54872812e6..2584e204bdd 100644
--- a/chromium/third_party/webrtc/video_receive_stream.h
+++ b/chromium/third_party/webrtc/video_receive_stream.h
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RECEIVE_STREAM_H_
-#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RECEIVE_STREAM_H_
+#ifndef WEBRTC_VIDEO_RECEIVE_STREAM_H_
+#define WEBRTC_VIDEO_RECEIVE_STREAM_H_
+#include <map>
#include <string>
#include <vector>
@@ -25,10 +26,7 @@ namespace webrtc {
namespace newapi {
// RTCP mode to use. Compound mode is described by RFC 4585 and reduced-size
// RTCP mode is described by RFC 5506.
-enum RtcpMode {
- kRtcpCompound,
- kRtcpReducedSize
-};
+enum RtcpMode { kRtcpCompound, kRtcpReducedSize };
} // namespace newapi
class VideoDecoder;
@@ -57,39 +55,22 @@ struct ExternalVideoDecoder {
class VideoReceiveStream {
public:
- struct Stats {
+ struct Stats : public StreamStats {
Stats()
: network_frame_rate(0),
decode_frame_rate(0),
render_frame_rate(0),
- key_frames(0),
- delta_frames(0),
- video_packets(0),
- retransmitted_packets(0),
- fec_packets(0),
- padding_packets(0),
+ avg_delay_ms(0),
discarded_packets(0),
- received_bitrate_bps(0),
- receive_side_delay_ms(0) {}
- RtpStatistics rtp_stats;
+ ssrc(0) {}
+
int network_frame_rate;
int decode_frame_rate;
int render_frame_rate;
- uint32_t key_frames;
- uint32_t delta_frames;
- uint32_t video_packets;
- uint32_t retransmitted_packets;
- uint32_t fec_packets;
- uint32_t padding_packets;
+ int avg_delay_ms;
uint32_t discarded_packets;
- int32_t received_bitrate_bps;
- int receive_side_delay_ms;
- };
-
- class StatsCallback {
- public:
- virtual ~StatsCallback() {}
- virtual void ReceiveStats(const Stats& stats) = 0;
+ uint32_t ssrc;
+ std::string c_name;
};
struct Config {
@@ -119,6 +100,15 @@ class VideoReceiveStream {
// See RtcpMode for description.
newapi::RtcpMode rtcp_mode;
+ // Extended RTCP settings.
+ struct RtcpXr {
+ RtcpXr() : receiver_reference_time_report(false) {}
+
+ // True if RTCP Receiver Reference Time Report Block extension
+ // (RFC 3611) should be enabled.
+ bool receiver_reference_time_report;
+ } rtcp_xr;
+
// See draft-alvestrand-rmcat-remb for information.
bool remb;
@@ -128,9 +118,21 @@ class VideoReceiveStream {
// See FecConfig for description.
FecConfig fec;
- // RTX settings for possible payloads. RTX is disabled if the vector is
- // empty.
- std::vector<RtxConfig> rtx;
+ // RTX settings for incoming video payloads that may be received. RTX is
+ // disabled if there's no config present.
+ struct Rtx {
+ Rtx() : ssrc(0), payload_type(0) {}
+
+ // SSRCs to use for the RTX streams.
+ uint32_t ssrc;
+
+ // Payload type to use for the RTX stream.
+ int payload_type;
+ };
+
+ // Map from video RTP payload type -> RTX config.
+ typedef std::map<int, Rtx> RtxMap;
+ RtxMap rtx;
// RTP header extensions used for the received stream.
std::vector<RtpExtension> extensions;
@@ -167,13 +169,11 @@ class VideoReceiveStream {
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms;
-
- // Callback for periodically receiving receiver stats.
- StatsCallback* stats_callback;
};
- virtual void StartReceiving() = 0;
- virtual void StopReceiving() = 0;
+ virtual void Start() = 0;
+ virtual void Stop() = 0;
+ virtual Stats GetStats() const = 0;
// TODO(mflodman) Replace this with callback.
virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) = 0;
@@ -184,4 +184,4 @@ class VideoReceiveStream {
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_RECEIVE_STREAM_H_
+#endif // WEBRTC_VIDEO_RECEIVE_STREAM_H_
diff --git a/chromium/third_party/webrtc/video_send_stream.h b/chromium/third_party/webrtc/video_send_stream.h
index 9d4695dfe7f..a1bc17828d6 100644
--- a/chromium/third_party/webrtc/video_send_stream.h
+++ b/chromium/third_party/webrtc/video_send_stream.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_SEND_STREAM_H_
-#define WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_SEND_STREAM_H_
+#ifndef WEBRTC_VIDEO_SEND_STREAM_H_
+#define WEBRTC_VIDEO_SEND_STREAM_H_
+#include <map>
#include <string>
-#include <vector>
#include "webrtc/common_types.h"
#include "webrtc/config.h"
@@ -29,7 +29,6 @@ class VideoSendStreamInput {
// These methods do not lock internally and must be called sequentially.
// If your application switches input sources synchronization must be done
// externally to make sure that any old frames are not delivered concurrently.
- virtual void PutFrame(const I420VideoFrame& video_frame) = 0;
virtual void SwapFrame(I420VideoFrame* video_frame) = 0;
protected:
@@ -41,32 +40,18 @@ class VideoSendStream {
struct Stats {
Stats()
: input_frame_rate(0),
- encode_frame(0),
- key_frames(0),
- delta_frames(0),
- video_packets(0),
- retransmitted_packets(0),
- fec_packets(0),
- padding_packets(0),
- send_bitrate_bps(0),
- delay_ms(0) {}
- RtpStatistics rtp;
- int input_frame_rate;
- int encode_frame;
- uint32_t key_frames;
- uint32_t delta_frames;
- uint32_t video_packets;
- uint32_t retransmitted_packets;
- uint32_t fec_packets;
- uint32_t padding_packets;
- int32_t send_bitrate_bps;
- int delay_ms;
- };
+ encode_frame_rate(0),
+ avg_delay_ms(0),
+ max_delay_ms(0),
+ suspended(false) {}
- class StatsCallback {
- public:
- virtual ~StatsCallback() {}
- virtual void ReceiveStats(const std::vector<Stats>& stats) = 0;
+ int input_frame_rate;
+ int encode_frame_rate;
+ int avg_delay_ms;
+ int max_delay_ms;
+ bool suspended;
+ std::string c_name;
+ std::map<uint32_t, StreamStats> substreams;
};
struct Config {
@@ -75,23 +60,39 @@ class VideoSendStream {
post_encode_callback(NULL),
local_renderer(NULL),
render_delay_ms(0),
- encoder(NULL),
- internal_source(false),
target_delay_ms(0),
- pacing(false),
- stats_callback(NULL),
suspend_below_min_bitrate(false) {}
- VideoCodec codec;
+ std::string ToString() const;
+
+ struct EncoderSettings {
+ EncoderSettings() : payload_type(-1), encoder(NULL) {}
+ std::string ToString() const;
+
+ std::string payload_name;
+ int payload_type;
+
+ // Uninitialized VideoEncoder instance to be used for encoding. Will be
+ // initialized from inside the VideoSendStream.
+ webrtc::VideoEncoder* encoder;
+ } encoder_settings;
static const size_t kDefaultMaxPacketSize = 1500 - 40; // TCP over IPv4.
struct Rtp {
- Rtp() : max_packet_size(kDefaultMaxPacketSize) {}
+ Rtp()
+ : max_packet_size(kDefaultMaxPacketSize),
+ min_transmit_bitrate_bps(0) {}
+ std::string ToString() const;
std::vector<uint32_t> ssrcs;
// Max RTP packet size delivered to send transport from VideoEngine.
size_t max_packet_size;
+ // Padding will be used up to this bitrate regardless of the bitrate
+ // produced by the encoder. Padding above what's actually produced by the
+ // encoder helps maintaining a higher bitrate estimate.
+ int min_transmit_bitrate_bps;
+
// RTP header extensions to use for this send stream.
std::vector<RtpExtension> extensions;
@@ -101,8 +102,21 @@ class VideoSendStream {
// See FecConfig for description.
FecConfig fec;
- // See RtxConfig for description.
- RtxConfig rtx;
+ // Settings for RTP retransmission payload format, see RFC 4588 for
+ // details.
+ struct Rtx {
+ Rtx() : payload_type(-1), pad_with_redundant_payloads(false) {}
+ std::string ToString() const;
+ // SSRCs to use for the RTX streams.
+ std::vector<uint32_t> ssrcs;
+
+ // Payload type to use for the RTX stream.
+ int payload_type;
+ // Use redundant payloads to pad the bitrate. Instead of padding with
+ // randomized packets, we will preemptively retransmit media packets on
+ // the RTX stream.
+ bool pad_with_redundant_payloads;
+ } rtx;
// RTCP CNAME, see RFC 3550.
std::string c_name;
@@ -122,32 +136,16 @@ class VideoSendStream {
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than expected render time.
- // Only valid if |renderer| is set.
+ // Only valid if |local_renderer| is set.
int render_delay_ms;
- // TODO(mflodman) Move VideoEncoder to common_types.h and redefine.
- // External encoding. 'encoder' is the external encoder instance and
- // 'internal_source' is set to true if the encoder also captures the video
- // frames.
- VideoEncoder* encoder;
- bool internal_source;
-
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms;
- // True if network a send-side packet buffer should be used to pace out
- // packets onto the network.
- bool pacing;
-
- // Callback for periodically receiving send stats.
- StatsCallback* stats_callback;
-
// True if the stream should be suspended when the available bitrate fall
// below the minimum configured bitrate. If this variable is false, the
// stream may send at a rate higher than the estimated available bitrate.
- // Enabling suspend_below_min_bitrate will also enable pacing and padding,
- // otherwise, the video will be unable to recover from suspension.
bool suspend_below_min_bitrate;
};
@@ -155,11 +153,16 @@ class VideoSendStream {
// VideoSendStream is valid.
virtual VideoSendStreamInput* Input() = 0;
- virtual void StartSending() = 0;
- virtual void StopSending() = 0;
+ virtual void Start() = 0;
+ virtual void Stop() = 0;
+
+ // Set which streams to send. Must have at least as many SSRCs as configured
+ // in the config. Encoder settings are passed on to the encoder instance along
+ // with the VideoStream settings.
+ virtual bool ReconfigureVideoEncoder(const std::vector<VideoStream>& streams,
+ const void* encoder_settings) = 0;
- virtual bool SetCodec(const VideoCodec& codec) = 0;
- virtual VideoCodec GetCodec() = 0;
+ virtual Stats GetStats() const = 0;
protected:
virtual ~VideoSendStream() {}
@@ -167,4 +170,4 @@ class VideoSendStream {
} // namespace webrtc
-#endif // WEBRTC_VIDEO_ENGINE_NEW_INCLUDE_VIDEO_SEND_STREAM_H_
+#endif // WEBRTC_VIDEO_SEND_STREAM_H_
diff --git a/chromium/third_party/webrtc/voice_engine/Android.mk b/chromium/third_party/webrtc/voice_engine/Android.mk
index 4c5fa592463..3dc3c36be56 100644
--- a/chromium/third_party/webrtc/voice_engine/Android.mk
+++ b/chromium/third_party/webrtc/voice_engine/Android.mk
@@ -35,7 +35,6 @@ LOCAL_SRC_FILES := \
voe_call_report_impl.cc \
voe_codec_impl.cc \
voe_dtmf_impl.cc \
- voe_encryption_impl.cc \
voe_external_media_impl.cc \
voe_file_impl.cc \
voe_hardware_impl.cc \
@@ -66,12 +65,12 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../../../modules/rtp_rtcp/interface \
$(LOCAL_PATH)/../../../modules/udp_transport/interface \
$(LOCAL_PATH)/../../../modules/utility/interface \
- $(LOCAL_PATH)/../../../system_wrappers/interface
+ $(LOCAL_PATH)/../../../system_wrappers/interface
LOCAL_SHARED_LIBRARIES := \
libcutils \
libdl \
- libstlport
+ libstlport
ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
LOCAL_LDLIBS += -ldl -lpthread
diff --git a/chromium/third_party/webrtc/voice_engine/OWNERS b/chromium/third_party/webrtc/voice_engine/OWNERS
index a07ced37b30..7bb3cd52378 100644
--- a/chromium/third_party/webrtc/voice_engine/OWNERS
+++ b/chromium/third_party/webrtc/voice_engine/OWNERS
@@ -2,3 +2,10 @@ henrikg@webrtc.org
henrika@webrtc.org
niklas.enbom@webrtc.org
xians@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/voice_engine/channel.cc b/chromium/third_party/webrtc/voice_engine/channel.cc
index 1eb55afa244..f99e590ae4b 100644
--- a/chromium/third_party/webrtc/voice_engine/channel.cc
+++ b/chromium/third_party/webrtc/voice_engine/channel.cc
@@ -10,10 +10,13 @@
#include "webrtc/voice_engine/channel.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
@@ -23,6 +26,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
@@ -38,6 +42,72 @@
namespace webrtc {
namespace voe {
+// Extend the default RTCP statistics struct with max_jitter, defined as the
+// maximum jitter value seen in an RTCP report block.
+struct ChannelStatistics : public RtcpStatistics {
+ ChannelStatistics() : rtcp(), max_jitter(0) {}
+
+ RtcpStatistics rtcp;
+ uint32_t max_jitter;
+};
+
+// Statistics callback, called at each generation of a new RTCP report block.
+class StatisticsProxy : public RtcpStatisticsCallback {
+ public:
+ StatisticsProxy(uint32_t ssrc)
+ : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ ssrc_(ssrc) {}
+ virtual ~StatisticsProxy() {}
+
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) OVERRIDE {
+ if (ssrc != ssrc_)
+ return;
+
+ CriticalSectionScoped cs(stats_lock_.get());
+ stats_.rtcp = statistics;
+ if (statistics.jitter > stats_.max_jitter) {
+ stats_.max_jitter = statistics.jitter;
+ }
+ }
+
+ void ResetStatistics() {
+ CriticalSectionScoped cs(stats_lock_.get());
+ stats_ = ChannelStatistics();
+ }
+
+ ChannelStatistics GetStats() {
+ CriticalSectionScoped cs(stats_lock_.get());
+ return stats_;
+ }
+
+ private:
+ // StatisticsUpdated calls are triggered from threads in the RTP module,
+ // while GetStats calls can be triggered from the public voice engine API,
+ // hence synchronization is needed.
+ scoped_ptr<CriticalSectionWrapper> stats_lock_;
+ const uint32_t ssrc_;
+ ChannelStatistics stats_;
+};
+
+class VoEBitrateObserver : public BitrateObserver {
+ public:
+ explicit VoEBitrateObserver(Channel* owner)
+ : owner_(owner) {}
+ virtual ~VoEBitrateObserver() {}
+
+ // Implements BitrateObserver.
+ virtual void OnNetworkChanged(const uint32_t bitrate_bps,
+ const uint8_t fraction_lost,
+ const uint32_t rtt) OVERRIDE {
+ // |fraction_lost| has a scale of 0 - 255.
+ owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
+ }
+
+ private:
+ Channel* owner_;
+};
+
int32_t
Channel::SendData(FrameType frameType,
uint8_t payloadType,
@@ -56,7 +126,7 @@ Channel::SendData(FrameType frameType,
// Store current audio level in the RTP/RTCP module.
// The level will be used in combination with voice-activity state
// (frameType) to add an RTP header extension
- _rtpRtcpModule->SetAudioLevel(rtp_audioproc_->level_estimator()->RMS());
+ _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
}
// Push data from ACM to RTP/RTCP-module to deliver audio frame for
@@ -131,21 +201,6 @@ Channel::SendPacket(int channel, const void *data, int len)
return -1;
}
- // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
- // API
- if (_insertExtraRTPPacket)
- {
- uint8_t* rtpHdr = (uint8_t*)data;
- uint8_t M_PT(0);
- if (_extraMarkerBit)
- {
- M_PT = 0x80; // set the M-bit
- }
- M_PT += _extraPayloadType; // set the payload type
- *(++rtpHdr) = M_PT; // modify the M|PT-byte within the RTP header
- _insertExtraRTPPacket = false; // insert one packet only
- }
-
uint8_t* bufferToSendPtr = (uint8_t*)data;
int32_t bufferLength = len;
@@ -157,41 +212,6 @@ Channel::SendPacket(int channel, const void *data, int len)
"Channel::SendPacket() RTP dump to output file failed");
}
- // SRTP or External encryption
- if (_encrypting)
- {
- if (_encryptionPtr)
- {
- if (!_encryptionRTPBufferPtr)
- {
- // Allocate memory for encryption buffer one time only
- _encryptionRTPBufferPtr =
- new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
- memset(_encryptionRTPBufferPtr, 0,
- kVoiceEngineMaxIpPacketSizeBytes);
- }
-
- // Perform encryption (SRTP or external)
- int32_t encryptedBufferLength = 0;
- _encryptionPtr->encrypt(_channelId,
- bufferToSendPtr,
- _encryptionRTPBufferPtr,
- bufferLength,
- (int*)&encryptedBufferLength);
- if (encryptedBufferLength <= 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_ENCRYPTION_FAILED,
- kTraceError, "Channel::SendPacket() encryption failed");
- return -1;
- }
-
- // Replace default data buffer with encrypted buffer
- bufferToSendPtr = _encryptionRTPBufferPtr;
- bufferLength = encryptedBufferLength;
- }
- }
-
int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
bufferLength);
if (n < 0) {
@@ -236,39 +256,6 @@ Channel::SendRTCPPacket(int channel, const void *data, int len)
"Channel::SendPacket() RTCP dump to output file failed");
}
- // SRTP or External encryption
- if (_encrypting)
- {
- if (_encryptionPtr)
- {
- if (!_encryptionRTCPBufferPtr)
- {
- // Allocate memory for encryption buffer one time only
- _encryptionRTCPBufferPtr =
- new uint8_t[kVoiceEngineMaxIpPacketSizeBytes];
- }
-
- // Perform encryption (SRTP or external).
- int32_t encryptedBufferLength = 0;
- _encryptionPtr->encrypt_rtcp(_channelId,
- bufferToSendPtr,
- _encryptionRTCPBufferPtr,
- bufferLength,
- (int*)&encryptedBufferLength);
- if (encryptedBufferLength <= 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_ENCRYPTION_FAILED, kTraceError,
- "Channel::SendRTCPPacket() encryption failed");
- return -1;
- }
-
- // Replace default data buffer with encrypted buffer
- bufferToSendPtr = _encryptionRTCPBufferPtr;
- bufferLength = encryptedBufferLength;
- }
- }
-
int n = _transportPtr->SendRTCPPacket(channel,
bufferToSendPtr,
bufferLength);
@@ -315,22 +302,8 @@ Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
"Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
id, ssrc);
- int32_t channel = VoEChannelId(id);
- assert(channel == _channelId);
-
// Update ssrc so that NTP for AV sync can be updated.
_rtpRtcpModule->SetRemoteSSRC(ssrc);
-
- if (_rtpObserver)
- {
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (_rtpObserverPtr)
- {
- // Send new SSRC to registered observer using callback
- _rtpObserverPtr->OnIncomingSSRCChanged(channel, ssrc);
- }
- }
}
void Channel::OnIncomingCSRCChanged(int32_t id,
@@ -340,19 +313,6 @@ void Channel::OnIncomingCSRCChanged(int32_t id,
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
id, CSRC, added);
-
- int32_t channel = VoEChannelId(id);
- assert(channel == _channelId);
-
- if (_rtpObserver)
- {
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (_rtpObserverPtr)
- {
- _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
- }
- }
}
void Channel::ResetStatistics(uint32_t ssrc) {
@@ -361,6 +321,7 @@ void Channel::ResetStatistics(uint32_t ssrc) {
if (statistician) {
statistician->ResetStatistics();
}
+ statistics_proxy_->ResetStatistics();
}
void
@@ -444,7 +405,7 @@ Channel::OnPacketTimeout(int32_t id)
CriticalSectionScoped cs(_callbackCritSectPtr);
if (_voiceEngineObserverPtr)
{
- if (_receiving || _externalTransport)
+ if (channel_state_.Get().receiving || _externalTransport)
{
int32_t channel = VoEChannelId(id);
assert(channel == _channelId);
@@ -522,7 +483,7 @@ Channel::OnPeriodicDeadOrAlive(int32_t id,
// It is possible that the connection is alive even if no RTP packet has
// been received for a long time since the other side might use VAD/DTX
// and a low SID-packet update rate.
- if ((kRtpNoRtp == alive) && _playing)
+ if ((kRtpNoRtp == alive) && channel_state_.Get().playing)
{
// Detect Alive for all NetEQ states except for the case when we are
// in PLC_CNG state.
@@ -534,8 +495,6 @@ Channel::OnPeriodicDeadOrAlive(int32_t id,
isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
}
- UpdateDeadOrAliveCounters(isAlive);
-
// Send callback to the registered observer
if (_connectionObserver)
{
@@ -559,9 +518,7 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
rtpHeader->header.payloadType,
rtpHeader->type.Audio.channel);
- _lastRemoteTimeStamp = rtpHeader->header.timestamp;
-
- if (!_playing)
+ if (!channel_state_.Get().playing)
{
// Avoid inserting into NetEQ when we are not playing. Count the
// packet as discarded.
@@ -646,10 +603,14 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
// Store speech type for dead-or-alive detection
_outputSpeechType = audioFrame.speech_type_;
- // Perform far-end AudioProcessing module processing on the received signal
- if (_rxApmIsEnabled)
- {
- ApmProcessRx(audioFrame);
+ ChannelState::State state = channel_state_.Get();
+
+ if (state.rx_apm_is_enabled) {
+ int err = rx_audioproc_->ProcessStream(&audioFrame);
+ if (err) {
+ LOG(LS_ERROR) << "ProcessStream() error: " << err;
+ assert(false);
+ }
}
float output_gain = 1.0f;
@@ -688,17 +649,11 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
}
// Mix decoded PCM output with file if file mixing is enabled
- if (_outputFilePlaying)
+ if (state.output_file_playing)
{
MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
}
- // Place channel in on-hold state (~muted) if on-hold is activated
- if (_outputIsOnHold)
- {
- AudioFrameOperations::Mute(audioFrame);
- }
-
// External media
if (_outputExternalMedia)
{
@@ -729,6 +684,33 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
// Measure audio level (0-9)
_outputAudioLevel.ComputeLevel(audioFrame);
+ if (capture_start_rtp_time_stamp_ < 0 && audioFrame.timestamp_ != 0) {
+ // The first frame with a valid rtp timestamp.
+ capture_start_rtp_time_stamp_ = audioFrame.timestamp_;
+ }
+
+ if (capture_start_rtp_time_stamp_ >= 0) {
+ // audioFrame.timestamp_ should be valid from now on.
+
+ // Compute elapsed time.
+ int64_t unwrap_timestamp =
+ rtp_ts_wraparound_handler_->Unwrap(audioFrame.timestamp_);
+ audioFrame.elapsed_time_ms_ =
+ (unwrap_timestamp - capture_start_rtp_time_stamp_) /
+ (GetPlayoutFrequency() / 1000);
+
+ // Compute ntp time.
+ audioFrame.ntp_time_ms_ = ntp_estimator_->Estimate(audioFrame.timestamp_);
+ // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
+ if (audioFrame.ntp_time_ms_ > 0) {
+ // Compute |capture_start_ntp_time_ms_| so that
+ // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
+ CriticalSectionScoped lock(ts_stats_lock_.get());
+ capture_start_ntp_time_ms_ =
+ audioFrame.ntp_time_ms_ - audioFrame.elapsed_time_ms_;
+ }
+ }
+
return 0;
}
@@ -757,10 +739,10 @@ Channel::NeededFrequency(int32_t id)
// we take that frequency into consideration as well
// This is not needed on sending side, since the codec will
// limit the spectrum anyway.
- if (_outputFilePlaying)
+ if (channel_state_.Get().output_file_playing)
{
CriticalSectionScoped cs(&_fileCritSect);
- if (_outputFilePlayerPtr && _outputFilePlaying)
+ if (_outputFilePlayerPtr)
{
if(_outputFilePlayerPtr->Frequency()>highestNeeded)
{
@@ -822,9 +804,7 @@ Channel::PlayFileEnded(int32_t id)
if (id == _inputFilePlayerId)
{
- CriticalSectionScoped cs(&_fileCritSect);
-
- _inputFilePlaying = false;
+ channel_state_.SetInputFilePlaying(false);
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"Channel::PlayFileEnded() => input file player module is"
@@ -832,9 +812,7 @@ Channel::PlayFileEnded(int32_t id)
}
else if (id == _outputFilePlayerId)
{
- CriticalSectionScoped cs(&_fileCritSect);
-
- _outputFilePlaying = false;
+ channel_state_.SetOutputFilePlaying(false);
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"Channel::PlayFileEnded() => output file player module is"
@@ -869,20 +847,20 @@ Channel::Channel(int32_t channelId,
_channelId(channelId),
rtp_header_parser_(RtpHeaderParser::Create()),
rtp_payload_registry_(
- new RTPPayloadRegistry(channelId,
- RTPPayloadStrategy::CreateStrategy(true))),
+ new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
rtp_receive_statistics_(ReceiveStatistics::Create(
Clock::GetRealTimeClock())),
rtp_receiver_(RtpReceiver::CreateAudioReceiver(
VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
this, this, rtp_payload_registry_.get())),
telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
- audio_coding_(config.Get<AudioCodingModuleFactory>().Create(
+ audio_coding_(AudioCodingModule::Create(
VoEModuleId(instanceId, channelId))),
_rtpDumpIn(*RtpDump::CreateRtpDump()),
_rtpDumpOut(*RtpDump::CreateRtpDump()),
_outputAudioLevel(),
_externalTransport(false),
+ _audioLevel_dBov(0),
_inputFilePlayerPtr(NULL),
_outputFilePlayerPtr(NULL),
_outputFileRecorderPtr(NULL),
@@ -891,26 +869,25 @@ Channel::Channel(int32_t channelId,
_inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
_outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
_outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
- _inputFilePlaying(false),
- _outputFilePlaying(false),
_outputFileRecording(false),
_inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
_inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
- _inputExternalMedia(false),
_outputExternalMedia(false),
_inputExternalMediaCallbackPtr(NULL),
_outputExternalMediaCallbackPtr(NULL),
- _encryptionRTPBufferPtr(NULL),
- _decryptionRTPBufferPtr(NULL),
- _encryptionRTCPBufferPtr(NULL),
- _decryptionRTCPBufferPtr(NULL),
_timeStamp(0), // This is just an offset, RTP module will add it's own random offset
_sendTelephoneEventPayloadType(106),
+ ntp_estimator_(new RemoteNtpTimeEstimator(Clock::GetRealTimeClock())),
jitter_buffer_playout_timestamp_(0),
playout_timestamp_rtp_(0),
playout_timestamp_rtcp_(0),
+ playout_delay_ms_(0),
_numberOfDiscardedPackets(0),
send_sequence_number_(0),
+ ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
+ capture_start_rtp_time_stamp_(-1),
+ capture_start_ntp_time_ms_(-1),
_engineStatisticsPtr(NULL),
_outputMixerPtr(NULL),
_transmitMixerPtr(NULL),
@@ -919,36 +896,21 @@ Channel::Channel(int32_t channelId,
_voiceEngineObserverPtr(NULL),
_callbackCritSectPtr(NULL),
_transportPtr(NULL),
- _encryptionPtr(NULL),
- rx_audioproc_(AudioProcessing::Create(VoEModuleId(instanceId, channelId))),
_rxVadObserverPtr(NULL),
_oldVadDecision(-1),
_sendFrameType(0),
- _rtpObserverPtr(NULL),
_rtcpObserverPtr(NULL),
- _outputIsOnHold(false),
_externalPlayout(false),
_externalMixing(false),
- _inputIsOnHold(false),
- _playing(false),
- _sending(false),
- _receiving(false),
_mixFileWithMicrophone(false),
- _rtpObserver(false),
_rtcpObserver(false),
_mute(false),
_panLeft(1.0f),
_panRight(1.0f),
_outputGain(1.0f),
- _encrypting(false),
- _decrypting(false),
_playOutbandDtmfEvent(false),
_playInbandDtmfEvent(false),
- _extraPayloadType(0),
- _insertExtraRTPPacket(false),
- _extraMarkerBit(false),
_lastLocalTimeStamp(0),
- _lastRemoteTimeStamp(0),
_lastPayloadType(0),
_includeAudioLevelIndication(false),
_rtpPacketTimedOut(false),
@@ -956,18 +918,23 @@ Channel::Channel(int32_t channelId,
_rtpTimeOutSeconds(0),
_connectionObserver(false),
_connectionObserverPtr(NULL),
- _countAliveDetections(0),
- _countDeadDetections(0),
_outputSpeechType(AudioFrame::kNormalSpeech),
+ vie_network_(NULL),
+ video_channel_(-1),
_average_jitter_buffer_delay_us(0),
least_required_delay_ms_(0),
_previousTimestamp(0),
_recPacketDelayMs(20),
_RxVadDetection(false),
- _rxApmIsEnabled(false),
_rxAgcIsEnabled(false),
_rxNsIsEnabled(false),
- restored_packet_in_use_(false)
+ restored_packet_in_use_(false),
+ bitrate_controller_(
+ BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
+ true)),
+ rtcp_bandwidth_observer_(
+ bitrate_controller_->CreateRtcpBandwidthObserver()),
+ send_bitrate_observer_(new VoEBitrateObserver(this))
{
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::Channel() - ctor");
@@ -982,12 +949,22 @@ Channel::Channel(int32_t channelId,
configuration.rtcp_feedback = this;
configuration.audio_messages = this;
configuration.receive_statistics = rtp_receive_statistics_.get();
+ configuration.bandwidth_callback = rtcp_bandwidth_observer_.get();
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
+
+ statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
+ rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
+ statistics_proxy_.get());
+
+ Config audioproc_config;
+ audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
+ rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
}
Channel::~Channel()
{
+ rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::~Channel() - dtor");
@@ -995,7 +972,7 @@ Channel::~Channel()
{
DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
}
- if (_inputExternalMedia)
+ if (channel_state_.Get().input_external_media)
{
DeRegisterExternalMediaProcessing(kRecordingPerChannel);
}
@@ -1055,12 +1032,12 @@ Channel::~Channel()
// End of modules shutdown
// Delete other objects
+ if (vie_network_) {
+ vie_network_->Release();
+ vie_network_ = NULL;
+ }
RtpDump::DestroyRtpDump(&_rtpDumpIn);
RtpDump::DestroyRtpDump(&_rtpDumpOut);
- delete [] _encryptionRTPBufferPtr;
- delete [] _decryptionRTPBufferPtr;
- delete [] _encryptionRTCPBufferPtr;
- delete [] _decryptionRTCPBufferPtr;
delete &_callbackCritSect;
delete &_fileCritSect;
delete &volume_settings_critsect_;
@@ -1072,6 +1049,8 @@ Channel::Init()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::Init()");
+ channel_state_.Reset();
+
// --- Initial sanity
if ((_engineStatisticsPtr == NULL) ||
@@ -1270,7 +1249,7 @@ Channel::StartPlayout()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StartPlayout()");
- if (_playing)
+ if (channel_state_.Get().playing)
{
return 0;
}
@@ -1286,8 +1265,7 @@ Channel::StartPlayout()
}
}
- _playing = true;
-
+ channel_state_.SetPlaying(true);
if (RegisterFilePlayingToMixer() != 0)
return -1;
@@ -1299,7 +1277,7 @@ Channel::StopPlayout()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopPlayout()");
- if (!_playing)
+ if (!channel_state_.Get().playing)
{
return 0;
}
@@ -1315,7 +1293,7 @@ Channel::StopPlayout()
}
}
- _playing = false;
+ channel_state_.SetPlaying(false);
_outputAudioLevel.Clear();
return 0;
@@ -1327,21 +1305,15 @@ Channel::StartSend()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StartSend()");
// Resume the previous sequence number which was reset by StopSend().
- // This needs to be done before |_sending| is set to true.
+ // This needs to be done before |sending| is set to true.
if (send_sequence_number_)
SetInitSequenceNumber(send_sequence_number_);
+ if (channel_state_.Get().sending)
{
- // A lock is needed because |_sending| can be accessed or modified by
- // another thread at the same time.
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (_sending)
- {
- return 0;
- }
- _sending = true;
+ return 0;
}
+ channel_state_.SetSending(true);
if (_rtpRtcpModule->SetSendingStatus(true) != 0)
{
@@ -1349,7 +1321,7 @@ Channel::StartSend()
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
"StartSend() RTP/RTCP failed to start sending");
CriticalSectionScoped cs(&_callbackCritSect);
- _sending = false;
+ channel_state_.SetSending(false);
return -1;
}
@@ -1361,17 +1333,11 @@ Channel::StopSend()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopSend()");
+ if (!channel_state_.Get().sending)
{
- // A lock is needed because |_sending| can be accessed or modified by
- // another thread at the same time.
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (!_sending)
- {
- return 0;
- }
- _sending = false;
+ return 0;
}
+ channel_state_.SetSending(false);
// Store the sequence number to be able to pick up the same sequence for
// the next StartSend(). This is needed for restarting device, otherwise
@@ -1399,11 +1365,11 @@ Channel::StartReceiving()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StartReceiving()");
- if (_receiving)
+ if (channel_state_.Get().receiving)
{
return 0;
}
- _receiving = true;
+ channel_state_.SetReceiving(true);
_numberOfDiscardedPackets = 0;
return 0;
}
@@ -1413,15 +1379,12 @@ Channel::StopReceiving()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopReceiving()");
- if (!_receiving)
+ if (!channel_state_.Get().receiving)
{
return 0;
}
- // Recover DTMF detection status.
- telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
- RegisterReceiveCodecsToRTPModule();
- _receiving = false;
+ channel_state_.SetReceiving(false);
return 0;
}
@@ -1481,51 +1444,6 @@ Channel::GetNetEQPlayoutMode(NetEqModes& mode)
}
int32_t
-Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetOnHoldStatus()");
- if (mode == kHoldSendAndPlay)
- {
- _outputIsOnHold = enable;
- _inputIsOnHold = enable;
- }
- else if (mode == kHoldPlayOnly)
- {
- _outputIsOnHold = enable;
- }
- if (mode == kHoldSendOnly)
- {
- _inputIsOnHold = enable;
- }
- return 0;
-}
-
-int32_t
-Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetOnHoldStatus()");
- enabled = (_outputIsOnHold || _inputIsOnHold);
- if (_outputIsOnHold && _inputIsOnHold)
- {
- mode = kHoldSendAndPlay;
- }
- else if (_outputIsOnHold && !_inputIsOnHold)
- {
- mode = kHoldPlayOnly;
- }
- else if (!_outputIsOnHold && _inputIsOnHold)
- {
- mode = kHoldSendOnly;
- }
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
- enabled, mode);
- return 0;
-}
-
-int32_t
Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
@@ -1606,9 +1524,27 @@ Channel::SetSendCodec(const CodecInst& codec)
return -1;
}
+ bitrate_controller_->SetBitrateObserver(send_bitrate_observer_.get(),
+ codec.rate, 0, 0);
+
return 0;
}
+void
+Channel::OnNetworkChanged(const uint32_t bitrate_bps,
+ const uint8_t fraction_lost, // 0 - 255.
+ const uint32_t rtt) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+ "Channel::OnNetworkChanged(bitrate_bps=%d, fration_lost=%d, rtt=%d)",
+ bitrate_bps, fraction_lost, rtt);
+ // Normalizes rate to 0 - 100.
+ if (audio_coding_->SetPacketLossRate(100 * fraction_lost / 255) != 0) {
+ _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
+ kTraceError, "OnNetworkChanged() failed to set packet loss rate");
+ assert(false); // This should not happen.
+ }
+}
+
int32_t
Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
{
@@ -1648,14 +1584,14 @@ Channel::SetRecPayloadType(const CodecInst& codec)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetRecPayloadType()");
- if (_playing)
+ if (channel_state_.Get().playing)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_PLAYING, kTraceError,
"SetRecPayloadType() unable to set PT while playing");
return -1;
}
- if (_receiving)
+ if (channel_state_.Get().receiving)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_LISTENING, kTraceError,
@@ -1759,47 +1695,6 @@ Channel::GetRecPayloadType(CodecInst& codec)
}
int32_t
-Channel::SetAMREncFormat(AmrMode mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetAMREncFormat()");
-
- // ACM doesn't support AMR
- return -1;
-}
-
-int32_t
-Channel::SetAMRDecFormat(AmrMode mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetAMRDecFormat()");
-
- // ACM doesn't support AMR
- return -1;
-}
-
-int32_t
-Channel::SetAMRWbEncFormat(AmrMode mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetAMRWbEncFormat()");
-
- // ACM doesn't support AMR
- return -1;
-
-}
-
-int32_t
-Channel::SetAMRWbDecFormat(AmrMode mode)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetAMRWbDecFormat()");
-
- // ACM doesn't support AMR
- return -1;
-}
-
-int32_t
Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
@@ -1848,199 +1743,6 @@ Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
return 0;
}
-int32_t
-Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetISACInitTargetRate()");
-
- CodecInst sendCodec;
- if (audio_coding_->SendCodec(&sendCodec) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACInitTargetRate() failed to retrieve send codec");
- return -1;
- }
- if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
- {
- // This API is only valid if iSAC is setup to run in channel-adaptive
- // mode.
- // We do not validate the adaptive mode here. It is done later in the
- // ConfigISACBandwidthEstimator() API.
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACInitTargetRate() send codec is not iSAC");
- return -1;
- }
-
- uint8_t initFrameSizeMsec(0);
- if (16000 == sendCodec.plfreq)
- {
- // Note that 0 is a valid and corresponds to "use default
- if ((rateBps != 0 &&
- rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
- (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACInitTargetRate() invalid target rate - 1");
- return -1;
- }
- // 30 or 60ms
- initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
- }
- else if (32000 == sendCodec.plfreq)
- {
- if ((rateBps != 0 &&
- rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
- (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACInitTargetRate() invalid target rate - 2");
- return -1;
- }
- initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
- }
-
- if (audio_coding_->ConfigISACBandwidthEstimator(
- initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
- "SetISACInitTargetRate() iSAC BWE config failed");
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-Channel::SetISACMaxRate(int rateBps)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetISACMaxRate()");
-
- CodecInst sendCodec;
- if (audio_coding_->SendCodec(&sendCodec) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACMaxRate() failed to retrieve send codec");
- return -1;
- }
- if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
- {
- // This API is only valid if iSAC is selected as sending codec.
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACMaxRate() send codec is not iSAC");
- return -1;
- }
- if (16000 == sendCodec.plfreq)
- {
- if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
- (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACMaxRate() invalid max rate - 1");
- return -1;
- }
- }
- else if (32000 == sendCodec.plfreq)
- {
- if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
- (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACMaxRate() invalid max rate - 2");
- return -1;
- }
- }
- if (_sending)
- {
- _engineStatisticsPtr->SetLastError(
- VE_SENDING, kTraceError,
- "SetISACMaxRate() unable to set max rate while sending");
- return -1;
- }
-
- // Set the maximum instantaneous rate of iSAC (works for both adaptive
- // and non-adaptive mode)
- if (audio_coding_->SetISACMaxRate(rateBps) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
- "SetISACMaxRate() failed to set max rate");
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-Channel::SetISACMaxPayloadSize(int sizeBytes)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::SetISACMaxPayloadSize()");
- CodecInst sendCodec;
- if (audio_coding_->SendCodec(&sendCodec) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACMaxPayloadSize() failed to retrieve send codec");
- return -1;
- }
- if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_CODEC_ERROR, kTraceError,
- "SetISACMaxPayloadSize() send codec is not iSAC");
- return -1;
- }
- if (16000 == sendCodec.plfreq)
- {
- if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
- (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACMaxPayloadSize() invalid max payload - 1");
- return -1;
- }
- }
- else if (32000 == sendCodec.plfreq)
- {
- if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
- (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "SetISACMaxPayloadSize() invalid max payload - 2");
- return -1;
- }
- }
- if (_sending)
- {
- _engineStatisticsPtr->SetLastError(
- VE_SENDING, kTraceError,
- "SetISACMaxPayloadSize() unable to set max rate while sending");
- return -1;
- }
-
- if (audio_coding_->SetISACMaxPayloadSize(sizeBytes) == -1)
- {
- _engineStatisticsPtr->SetLastError(
- VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
- "SetISACMaxPayloadSize() failed to set max payload size");
- return -1;
- }
- return 0;
-}
-
int32_t Channel::RegisterExternalTransport(Transport& transport)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
@@ -2083,7 +1785,8 @@ Channel::DeRegisterExternalTransport()
return 0;
}
-int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
+int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
+ const PacketTime& packet_time) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ReceivedRTPPacket()");
@@ -2112,6 +1815,23 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
rtp_receive_statistics_->IncomingPacket(header, length,
IsPacketRetransmitted(header, in_order));
rtp_payload_registry_->SetIncomingPayloadType(header);
+
+ // Forward any packets to ViE bandwidth estimator, if enabled.
+ {
+ CriticalSectionScoped cs(&_callbackCritSect);
+ if (vie_network_) {
+ int64_t arrival_time_ms;
+ if (packet_time.timestamp != -1) {
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ } else {
+ arrival_time_ms = TickTime::MillisecondTimestamp();
+ }
+ int payload_length = length - header.headerLength;
+ vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
+ payload_length, header);
+ }
+ }
+
return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
}
@@ -2209,6 +1929,9 @@ int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
"Channel::IncomingRTPPacket() RTCP packet is invalid");
}
+
+ ntp_estimator_->UpdateRtcpTimestamp(rtp_receiver_->SSRC(),
+ _rtpRtcpModule.get());
return 0;
}
@@ -2226,7 +1949,7 @@ int Channel::StartPlayingFileLocally(const char* fileName,
"stopPosition=%d)", fileName, loop, format, volumeScaling,
startPosition, stopPosition);
- if (_outputFilePlaying)
+ if (channel_state_.Get().output_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_PLAYING, kTraceError,
@@ -2275,7 +1998,7 @@ int Channel::StartPlayingFileLocally(const char* fileName,
return -1;
}
_outputFilePlayerPtr->RegisterModuleFileCallback(this);
- _outputFilePlaying = true;
+ channel_state_.SetOutputFilePlaying(true);
}
if (RegisterFilePlayingToMixer() != 0)
@@ -2305,7 +2028,7 @@ int Channel::StartPlayingFileLocally(InStream* stream,
}
- if (_outputFilePlaying)
+ if (channel_state_.Get().output_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_PLAYING, kTraceError,
@@ -2353,7 +2076,7 @@ int Channel::StartPlayingFileLocally(InStream* stream,
return -1;
}
_outputFilePlayerPtr->RegisterModuleFileCallback(this);
- _outputFilePlaying = true;
+ channel_state_.SetOutputFilePlaying(true);
}
if (RegisterFilePlayingToMixer() != 0)
@@ -2367,7 +2090,7 @@ int Channel::StopPlayingFileLocally()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopPlayingFileLocally()");
- if (!_outputFilePlaying)
+ if (!channel_state_.Get().output_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
@@ -2388,7 +2111,7 @@ int Channel::StopPlayingFileLocally()
_outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
_outputFilePlayerPtr = NULL;
- _outputFilePlaying = false;
+ channel_state_.SetOutputFilePlaying(false);
}
// _fileCritSect cannot be taken while calling
// SetAnonymousMixibilityStatus. Refer to comments in
@@ -2410,7 +2133,7 @@ int Channel::IsPlayingFileLocally() const
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::IsPlayingFileLocally()");
- return (int32_t)_outputFilePlaying;
+ return channel_state_.Get().output_file_playing;
}
int Channel::RegisterFilePlayingToMixer()
@@ -2418,7 +2141,8 @@ int Channel::RegisterFilePlayingToMixer()
// Return success for not registering for file playing to mixer if:
// 1. playing file before playout is started on that channel.
// 2. starting playout without file playing on that channel.
- if (!_playing || !_outputFilePlaying)
+ if (!channel_state_.Get().playing ||
+ !channel_state_.Get().output_file_playing)
{
return 0;
}
@@ -2429,8 +2153,8 @@ int Channel::RegisterFilePlayingToMixer()
// the file, _fileCritSect will be taken. This would result in a deadlock.
if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
{
+ channel_state_.SetOutputFilePlaying(false);
CriticalSectionScoped cs(&_fileCritSect);
- _outputFilePlaying = false;
_engineStatisticsPtr->SetLastError(
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
"StartPlayingFile() failed to add participant as file to mixer");
@@ -2443,61 +2167,6 @@ int Channel::RegisterFilePlayingToMixer()
return 0;
}
-int Channel::ScaleLocalFilePlayout(float scale)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
-
- CriticalSectionScoped cs(&_fileCritSect);
-
- if (!_outputFilePlaying)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "ScaleLocalFilePlayout() isnot playing");
- return -1;
- }
- if ((_outputFilePlayerPtr == NULL) ||
- (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
- {
- _engineStatisticsPtr->SetLastError(
- VE_BAD_ARGUMENT, kTraceError,
- "SetAudioScaling() failed to scale the playout");
- return -1;
- }
-
- return 0;
-}
-
-int Channel::GetLocalPlayoutPosition(int& positionMs)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetLocalPlayoutPosition(position=?)");
-
- uint32_t position;
-
- CriticalSectionScoped cs(&_fileCritSect);
-
- if (_outputFilePlayerPtr == NULL)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
- return -1;
- }
-
- if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_BAD_FILE, kTraceError,
- "GetLocalPlayoutPosition() failed");
- return -1;
- }
- positionMs = position;
-
- return 0;
-}
-
int Channel::StartPlayingFileAsMicrophone(const char* fileName,
bool loop,
FileFormats format,
@@ -2512,7 +2181,9 @@ int Channel::StartPlayingFileAsMicrophone(const char* fileName,
"stopPosition=%d)", fileName, loop, format, volumeScaling,
startPosition, stopPosition);
- if (_inputFilePlaying)
+ CriticalSectionScoped cs(&_fileCritSect);
+
+ if (channel_state_.Get().input_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_PLAYING, kTraceWarning,
@@ -2520,8 +2191,6 @@ int Channel::StartPlayingFileAsMicrophone(const char* fileName,
return 0;
}
- CriticalSectionScoped cs(&_fileCritSect);
-
// Destroy the old instance
if (_inputFilePlayerPtr)
{
@@ -2562,7 +2231,7 @@ int Channel::StartPlayingFileAsMicrophone(const char* fileName,
return -1;
}
_inputFilePlayerPtr->RegisterModuleFileCallback(this);
- _inputFilePlaying = true;
+ channel_state_.SetInputFilePlaying(true);
return 0;
}
@@ -2587,7 +2256,9 @@ int Channel::StartPlayingFileAsMicrophone(InStream* stream,
return -1;
}
- if (_inputFilePlaying)
+ CriticalSectionScoped cs(&_fileCritSect);
+
+ if (channel_state_.Get().input_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_PLAYING, kTraceWarning,
@@ -2595,8 +2266,6 @@ int Channel::StartPlayingFileAsMicrophone(InStream* stream,
return 0;
}
- CriticalSectionScoped cs(&_fileCritSect);
-
// Destroy the old instance
if (_inputFilePlayerPtr)
{
@@ -2633,7 +2302,7 @@ int Channel::StartPlayingFileAsMicrophone(InStream* stream,
}
_inputFilePlayerPtr->RegisterModuleFileCallback(this);
- _inputFilePlaying = true;
+ channel_state_.SetInputFilePlaying(true);
return 0;
}
@@ -2643,7 +2312,9 @@ int Channel::StopPlayingFileAsMicrophone()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopPlayingFileAsMicrophone()");
- if (!_inputFilePlaying)
+ CriticalSectionScoped cs(&_fileCritSect);
+
+ if (!channel_state_.Get().input_file_playing)
{
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
@@ -2651,7 +2322,6 @@ int Channel::StopPlayingFileAsMicrophone()
return 0;
}
- CriticalSectionScoped cs(&_fileCritSect);
if (_inputFilePlayerPtr->StopPlayingFile() != 0)
{
_engineStatisticsPtr->SetLastError(
@@ -2662,7 +2332,7 @@ int Channel::StopPlayingFileAsMicrophone()
_inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
_inputFilePlayerPtr = NULL;
- _inputFilePlaying = false;
+ channel_state_.SetInputFilePlaying(false);
return 0;
}
@@ -2671,35 +2341,7 @@ int Channel::IsPlayingFileAsMicrophone() const
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::IsPlayingFileAsMicrophone()");
-
- return _inputFilePlaying;
-}
-
-int Channel::ScaleFileAsMicrophonePlayout(float scale)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
-
- CriticalSectionScoped cs(&_fileCritSect);
-
- if (!_inputFilePlaying)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "ScaleFileAsMicrophonePlayout() isnot playing");
- return -1;
- }
-
- if ((_inputFilePlayerPtr == NULL) ||
- (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
- {
- _engineStatisticsPtr->SetLastError(
- VE_BAD_ARGUMENT, kTraceError,
- "SetAudioScaling() failed to scale playout");
- return -1;
- }
-
- return 0;
+ return channel_state_.Get().input_file_playing;
}
int Channel::StartRecordingPlayout(const char* fileName,
@@ -2891,6 +2533,7 @@ int Channel::StopRecordingPlayout()
void
Channel::SetMixWithMicStatus(bool mix)
{
+ CriticalSectionScoped cs(&_fileCritSect);
_mixFileWithMicrophone=mix;
}
@@ -2977,54 +2620,6 @@ Channel::GetChannelOutputVolumeScaling(float& scaling) const
return 0;
}
-int
-Channel::RegisterExternalEncryption(Encryption& encryption)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::RegisterExternalEncryption()");
-
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (_encryptionPtr)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "RegisterExternalEncryption() encryption already enabled");
- return -1;
- }
-
- _encryptionPtr = &encryption;
-
- _decrypting = true;
- _encrypting = true;
-
- return 0;
-}
-
-int
-Channel::DeRegisterExternalEncryption()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::DeRegisterExternalEncryption()");
-
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (!_encryptionPtr)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceWarning,
- "DeRegisterExternalEncryption() encryption already disabled");
- return 0;
- }
-
- _decrypting = false;
- _encrypting = false;
-
- _encryptionPtr = NULL;
-
- return 0;
-}
-
int Channel::SendTelephoneEventOutband(unsigned char eventCode,
int lengthMs, int attenuationDb,
bool playDtmfEvent)
@@ -3242,7 +2837,7 @@ Channel::SetRxAgcStatus(bool enable, AgcModes mode)
}
_rxAgcIsEnabled = enable;
- _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+ channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
return 0;
}
@@ -3391,7 +2986,7 @@ Channel::SetRxNsStatus(bool enable, NsModes mode)
}
_rxNsIsEnabled = enable;
- _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+ channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
return 0;
}
@@ -3434,48 +3029,6 @@ Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
int
-Channel::RegisterRTPObserver(VoERTPObserver& observer)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
- "Channel::RegisterRTPObserver()");
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (_rtpObserverPtr)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "RegisterRTPObserver() observer already enabled");
- return -1;
- }
-
- _rtpObserverPtr = &observer;
- _rtpObserver = true;
-
- return 0;
-}
-
-int
-Channel::DeRegisterRTPObserver()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::DeRegisterRTPObserver()");
- CriticalSectionScoped cs(&_callbackCritSect);
-
- if (!_rtpObserverPtr)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceWarning,
- "DeRegisterRTPObserver() observer already disabled");
- return 0;
- }
-
- _rtpObserver = false;
- _rtpObserverPtr = NULL;
-
- return 0;
-}
-
-int
Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
@@ -3522,20 +3075,14 @@ Channel::SetLocalSSRC(unsigned int ssrc)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetLocalSSRC()");
- if (_sending)
+ if (channel_state_.Get().sending)
{
_engineStatisticsPtr->SetLastError(
VE_ALREADY_SENDING, kTraceError,
"SetLocalSSRC() already sending");
return -1;
}
- if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "SetLocalSSRC() failed to set SSRC");
- return -1;
- }
+ _rtpRtcpModule->SetSSRC(ssrc);
return 0;
}
@@ -3559,70 +3106,34 @@ Channel::GetRemoteSSRC(unsigned int& ssrc)
return 0;
}
-int
-Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
-{
- if (arrCSRC == NULL)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "GetRemoteCSRCs() invalid array argument");
- return -1;
- }
- uint32_t arrOfCSRC[kRtpCsrcSize];
- int32_t CSRCs(0);
- CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
- if (CSRCs > 0)
- {
- memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
- for (int i = 0; i < (int) CSRCs; i++)
- {
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_instanceId, _channelId),
- "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
- }
- } else
- {
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_instanceId, _channelId),
- "GetRemoteCSRCs() => list is empty!");
- }
- return CSRCs;
+int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
+ _includeAudioLevelIndication = enable;
+ return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
}
-int
-Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
-{
- if (rtp_audioproc_.get() == NULL) {
- rtp_audioproc_.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
- _channelId)));
- }
-
- if (rtp_audioproc_->level_estimator()->Enable(enable) !=
- AudioProcessing::kNoError) {
- _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
- "Failed to enable AudioProcessing::level_estimator()");
+int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
+ unsigned char id) {
+ rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel);
+ if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, id)) {
return -1;
}
+ return 0;
+}
- _includeAudioLevelIndication = enable;
- if (enable) {
- rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
- ID);
- } else {
- rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
- }
- return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
+int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
+ return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
}
-int
-Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
-{
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_instanceId,_channelId),
- "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
- enabled, ID);
- return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
+int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
+ rtp_header_parser_->DeregisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime);
+ if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
+ kRtpExtensionAbsoluteSendTime, id)) {
+ return -1;
+ }
+ return 0;
}
int
@@ -3810,7 +3321,7 @@ Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SendApplicationDefinedRTCPPacket()");
- if (!_sending)
+ if (!channel_state_.Get().sending)
{
_engineStatisticsPtr->SetLastError(
VE_NOT_SENDING, kTraceError,
@@ -3863,23 +3374,25 @@ Channel::GetRTPStatistics(
{
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
- StreamStatistician::Statistics statistics;
- StreamStatistician* statistician =
- rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
- if (!statistician || !statistician->GetStatistics(
- &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
- _engineStatisticsPtr->SetLastError(
- VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
- "GetRTPStatistics() failed to read RTP statistics from the "
- "RTP/RTCP module");
+ if (_rtpRtcpModule->RTCP() == kRtcpOff) {
+ // If RTCP is off, there is no timed thread in the RTCP module regularly
+ // generating new stats, trigger the update manually here instead.
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
+ if (statistician) {
+ // Don't use returned statistics, use data from proxy instead so that
+ // max jitter can be fetched atomically.
+ RtcpStatistics s;
+ statistician->GetStatistics(&s, true);
+ }
}
+ ChannelStatistics stats = statistics_proxy_->GetStats();
const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
- if (playoutFrequency > 0)
- {
- // Scale RTP statistics given the current playout frequency
- maxJitterMs = statistics.max_jitter / (playoutFrequency / 1000);
- averageJitterMs = statistics.jitter / (playoutFrequency / 1000);
+ if (playoutFrequency > 0) {
+ // Scale RTP statistics given the current playout frequency
+ maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
+ averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
}
discardedPackets = _numberOfDiscardedPackets;
@@ -3892,29 +3405,6 @@ Channel::GetRTPStatistics(
return 0;
}
-int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
- if (sender_info == NULL) {
- _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "GetRemoteRTCPSenderInfo() invalid sender_info.");
- return -1;
- }
-
- // Get the sender info from the latest received RTCP Sender Report.
- RTCPSenderInfo rtcp_sender_info;
- if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
- _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
- return -1;
- }
-
- sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
- sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
- sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
- sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
- sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
- return 0;
-}
-
int Channel::GetRemoteRTCPReportBlocks(
std::vector<ReportBlock>* report_blocks) {
if (report_blocks == NULL) {
@@ -3955,11 +3445,11 @@ int Channel::GetRemoteRTCPReportBlocks(
int
Channel::GetRTPStatistics(CallStatistics& stats)
{
- // --- Part one of the final structure (four values)
+ // --- RtcpStatistics
// The jitter statistics is updated for each received RTP packet and is
// based on received packets.
- StreamStatistician::Statistics statistics;
+ RtcpStatistics statistics;
StreamStatistician* statistician =
rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
if (!statistician || !statistician->GetStatistics(
@@ -3982,7 +3472,7 @@ Channel::GetRTPStatistics(CallStatistics& stats)
stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
stats.jitterSamples);
- // --- Part two of the final structure (one value)
+ // --- RTT
uint16_t RTT(0);
RTCPMethod method = _rtpRtcpModule->RTCP();
@@ -4025,7 +3515,7 @@ Channel::GetRTPStatistics(CallStatistics& stats)
VoEId(_instanceId, _channelId),
"GetRTPStatistics() => rttMs=%d", stats.rttMs);
- // --- Part three of the final structure (four values)
+ // --- Data counters
uint32_t bytesSent(0);
uint32_t packetsSent(0);
@@ -4057,18 +3547,23 @@ Channel::GetRTPStatistics(CallStatistics& stats)
stats.bytesSent, stats.packetsSent, stats.bytesReceived,
stats.packetsReceived);
+ // --- Timestamps
+ {
+ CriticalSectionScoped lock(ts_stats_lock_.get());
+ stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
+ }
return 0;
}
-int Channel::SetFECStatus(bool enable, int redPayloadtype) {
+int Channel::SetREDStatus(bool enable, int redPayloadtype) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
- "Channel::SetFECStatus()");
+ "Channel::SetREDStatus()");
if (enable) {
if (redPayloadtype < 0 || redPayloadtype > 127) {
_engineStatisticsPtr->SetLastError(
VE_PLTYPE_ERROR, kTraceError,
- "SetFECStatus() invalid RED payload type");
+ "SetREDStatus() invalid RED payload type");
return -1;
}
@@ -4080,19 +3575,19 @@ int Channel::SetFECStatus(bool enable, int redPayloadtype) {
}
}
- if (audio_coding_->SetFECStatus(enable) != 0) {
+ if (audio_coding_->SetREDStatus(enable) != 0) {
_engineStatisticsPtr->SetLastError(
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
- "SetFECStatus() failed to set FEC state in the ACM");
+ "SetREDStatus() failed to set RED state in the ACM");
return -1;
}
return 0;
}
int
-Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
+Channel::GetREDStatus(bool& enabled, int& redPayloadtype)
{
- enabled = audio_coding_->FECStatus();
+ enabled = audio_coding_->REDStatus();
if (enabled)
{
int8_t payloadType(0);
@@ -4100,22 +3595,43 @@ Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
{
_engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
+ "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
"module");
return -1;
}
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId, _channelId),
- "GetFECStatus() => enabled=%d, redPayloadtype=%d",
+ "GetREDStatus() => enabled=%d, redPayloadtype=%d",
enabled, redPayloadtype);
return 0;
}
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId, _channelId),
- "GetFECStatus() => enabled=%d", enabled);
+ "GetREDStatus() => enabled=%d", enabled);
return 0;
}
+int Channel::SetCodecFECStatus(bool enable) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+ "Channel::SetCodecFECStatus()");
+
+ if (audio_coding_->SetCodecFEC(enable) != 0) {
+ _engineStatisticsPtr->SetLastError(
+ VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+ "SetCodecFECStatus() failed to set FEC state");
+ return -1;
+ }
+ return 0;
+}
+
+bool Channel::GetCodecFECStatus() {
+ bool enabled = audio_coding_->CodecFEC();
+ WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+ VoEId(_instanceId, _channelId),
+ "GetCodecFECStatus() => enabled=%d", enabled);
+ return enabled;
+}
+
void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
// None of these functions can fail.
_rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
@@ -4208,76 +3724,19 @@ Channel::RTPDumpIsActive(RTPDirections direction)
return rtpDumpPtr->IsActive();
}
-int
-Channel::InsertExtraRTPPacket(unsigned char payloadType,
- bool markerBit,
- const char* payloadData,
- unsigned short payloadSize)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
- "Channel::InsertExtraRTPPacket()");
- if (payloadType > 127)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_PLTYPE, kTraceError,
- "InsertExtraRTPPacket() invalid payload type");
- return -1;
- }
- if (payloadData == NULL)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "InsertExtraRTPPacket() invalid payload data");
- return -1;
- }
- if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_ARGUMENT, kTraceError,
- "InsertExtraRTPPacket() invalid payload size");
- return -1;
- }
- if (!_sending)
- {
- _engineStatisticsPtr->SetLastError(
- VE_NOT_SENDING, kTraceError,
- "InsertExtraRTPPacket() not sending");
- return -1;
- }
-
- // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
- // Transport::SendPacket() will be called by the module when the RTP packet
- // is created.
- // The call to SendOutgoingData() does *not* modify the timestamp and
- // payloadtype to ensure that the RTP module generates a valid RTP packet
- // (user might utilize a non-registered payload type).
- // The marker bit and payload type will be replaced just before the actual
- // transmission, i.e., the actual modification is done *after* the RTP
- // module has delivered its RTP packet back to the VoE.
- // We will use the stored values above when the packet is modified
- // (see Channel::SendPacket()).
-
- _extraPayloadType = payloadType;
- _extraMarkerBit = markerBit;
- _insertExtraRTPPacket = true;
-
- if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
- _lastPayloadType,
- _lastLocalTimeStamp,
- // Leaving the time when this frame was
- // received from the capture device as
- // undefined for voice for now.
- -1,
- (const uint8_t*) payloadData,
- payloadSize) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_RTP_RTCP_MODULE_ERROR, kTraceError,
- "InsertExtraRTPPacket() failed to send extra RTP packet");
- return -1;
- }
+void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
+ int video_channel) {
+ CriticalSectionScoped cs(&_callbackCritSect);
+ if (vie_network_) {
+ vie_network_->Release();
+ vie_network_ = NULL;
+ }
+ video_channel_ = -1;
- return 0;
+ if (vie_network != NULL && video_channel != -1) {
+ vie_network_ = vie_network;
+ video_channel_ = video_channel;
+ }
}
uint32_t
@@ -4290,61 +3749,26 @@ Channel::Demultiplex(const AudioFrame& audioFrame)
return 0;
}
-// TODO(xians): This method borrows quite some code from
-// TransmitMixer::GenerateAudioFrame(), refactor these two methods and reduce
-// code duplication.
void Channel::Demultiplex(const int16_t* audio_data,
int sample_rate,
int number_of_frames,
int number_of_channels) {
- // The highest sample rate that WebRTC supports for mono audio is 96kHz.
- static const int kMaxNumberOfFrames = 960;
- assert(number_of_frames <= kMaxNumberOfFrames);
-
- // Get the send codec information for doing resampling or downmixing later on.
CodecInst codec;
GetSendCodec(codec);
- assert(codec.channels == 1 || codec.channels == 2);
- int support_sample_rate = std::min(32000,
- std::min(sample_rate, codec.plfreq));
-
- // Downmix the data to mono if needed.
- const int16_t* audio_ptr = audio_data;
- if (number_of_channels == 2 && codec.channels == 1) {
- if (!mono_recording_audio_.get())
- mono_recording_audio_.reset(new int16_t[kMaxNumberOfFrames]);
-
- AudioFrameOperations::StereoToMono(audio_data, number_of_frames,
- mono_recording_audio_.get());
- audio_ptr = mono_recording_audio_.get();
- }
- // Resample the data to the sample rate that the codec is using.
- if (input_resampler_.InitializeIfNeeded(sample_rate,
- support_sample_rate,
- codec.channels)) {
- WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
- "Channel::Demultiplex() unable to resample");
- return;
- }
-
- int out_length = input_resampler_.Resample(audio_ptr,
- number_of_frames * codec.channels,
- _audioFrame.data_,
- AudioFrame::kMaxDataSizeSamples);
- if (out_length == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
- "Channel::Demultiplex() resampling failed");
- return;
+ if (!mono_recording_audio_.get()) {
+ // Temporary space for DownConvertToCodecFormat.
+ mono_recording_audio_.reset(new int16_t[kMaxMonoDataSizeSamples]);
}
-
- _audioFrame.samples_per_channel_ = out_length / codec.channels;
- _audioFrame.timestamp_ = -1;
- _audioFrame.sample_rate_hz_ = support_sample_rate;
- _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
- _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
- _audioFrame.num_channels_ = codec.channels;
- _audioFrame.id_ = _channelId;
+ DownConvertToCodecFormat(audio_data,
+ number_of_frames,
+ number_of_channels,
+ sample_rate,
+ codec.channels,
+ codec.plfreq,
+ mono_recording_audio_.get(),
+ &input_resampler_,
+ &_audioFrame);
}
uint32_t
@@ -4360,17 +3784,17 @@ Channel::PrepareEncodeAndSend(int mixingFrequency)
return -1;
}
- if (_inputFilePlaying)
+ if (channel_state_.Get().input_file_playing)
{
MixOrReplaceAudioWithFile(mixingFrequency);
}
- if (Mute())
- {
- AudioFrameOperations::Mute(_audioFrame);
+ bool is_muted = Mute(); // Cache locally as Mute() takes a lock.
+ if (is_muted) {
+ AudioFrameOperations::Mute(_audioFrame);
}
- if (_inputExternalMedia)
+ if (channel_state_.Get().input_external_media)
{
CriticalSectionScoped cs(&_callbackCritSect);
const bool isStereo = (_audioFrame.num_channels_ == 2);
@@ -4388,29 +3812,13 @@ Channel::PrepareEncodeAndSend(int mixingFrequency)
InsertInbandDtmfTone();
- if (_includeAudioLevelIndication)
- {
- if (rtp_audioproc_->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
- AudioProcessing::kNoError)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice,
- VoEId(_instanceId, _channelId),
- "Error setting AudioProcessing sample rate");
- return -1;
- }
-
- if (rtp_audioproc_->set_num_channels(_audioFrame.num_channels_,
- _audioFrame.num_channels_) !=
- AudioProcessing::kNoError)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice,
- VoEId(_instanceId, _channelId),
- "Error setting AudioProcessing channels");
- return -1;
- }
-
- // Performs level analysis only; does not affect the signal.
- rtp_audioproc_->ProcessStream(&_audioFrame);
+ if (_includeAudioLevelIndication) {
+ int length = _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
+ if (is_muted) {
+ rms_level_.ProcessMuted(length);
+ } else {
+ rms_level_.Process(_audioFrame.data_, length);
+ }
}
return 0;
@@ -4485,7 +3893,7 @@ int Channel::RegisterExternalMediaProcessing(
return -1;
}
_inputExternalMediaCallbackPtr = &processObject;
- _inputExternalMedia = true;
+ channel_state_.SetInputExternalMedia(true);
}
return 0;
}
@@ -4520,7 +3928,7 @@ int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
"input external media already disabled");
return 0;
}
- _inputExternalMedia = false;
+ channel_state_.SetInputExternalMedia(false);
_inputExternalMediaCallbackPtr = NULL;
}
@@ -4531,7 +3939,7 @@ int Channel::SetExternalMixing(bool enabled) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetExternalMixing(enabled=%d)", enabled);
- if (_playing)
+ if (channel_state_.Get().playing)
{
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceError,
@@ -4546,68 +3954,6 @@ int Channel::SetExternalMixing(bool enabled) {
}
int
-Channel::ResetRTCPStatistics()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::ResetRTCPStatistics()");
- uint32_t remoteSSRC(0);
- remoteSSRC = rtp_receiver_->SSRC();
- return _rtpRtcpModule->ResetRTT(remoteSSRC);
-}
-
-int
-Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetRoundTripTimeSummary()");
- // Override default module outputs for the case when RTCP is disabled.
- // This is done to ensure that we are backward compatible with the
- // VoiceEngine where we did not use RTP/RTCP module.
- if (!_rtpRtcpModule->RTCP())
- {
- delaysMs.min = -1;
- delaysMs.max = -1;
- delaysMs.average = -1;
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
- " valid RTT measurements cannot be retrieved");
- return 0;
- }
-
- uint32_t remoteSSRC;
- uint16_t RTT;
- uint16_t avgRTT;
- uint16_t maxRTT;
- uint16_t minRTT;
- // The remote SSRC will be zero if no RTP packet has been received.
- remoteSSRC = rtp_receiver_->SSRC();
- if (remoteSSRC == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
- "Channel::GetRoundTripTimeSummary() unable to measure RTT"
- " since no RTP packet has been received yet");
- }
-
- // Retrieve RTT statistics from the RTP/RTCP module for the specified
- // channel and SSRC. The SSRC is required to parse out the correct source
- // in conference scenarios.
- if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
- "GetRoundTripTimeSummary unable to retrieve RTT values"
- " from the RTCP layer");
- delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
- }
- else
- {
- delaysMs.min = minRTT;
- delaysMs.max = maxRTT;
- delaysMs.average = avgRTT;
- }
- return 0;
-}
-
-int
Channel::GetNetworkStatistics(NetworkStatistics& stats)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
@@ -4709,20 +4055,10 @@ void Channel::UpdatePlayoutTimestamp(bool rtcp) {
return;
}
- int32_t playout_frequency = audio_coding_->PlayoutFrequency();
- CodecInst current_recive_codec;
- if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
- if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
- playout_frequency = 8000;
- } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
- playout_frequency = 48000;
- }
- }
-
jitter_buffer_playout_timestamp_ = playout_timestamp;
// Remove the playout delay.
- playout_timestamp -= (delay_ms * (playout_frequency / 1000));
+ playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
@@ -4757,7 +4093,7 @@ Channel::SetInitTimestamp(unsigned int timestamp)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetInitTimestamp()");
- if (_sending)
+ if (channel_state_.Get().sending)
{
_engineStatisticsPtr->SetLastError(
VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
@@ -4778,7 +4114,7 @@ Channel::SetInitSequenceNumber(short sequenceNumber)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetInitSequenceNumber()");
- if (_sending)
+ if (channel_state_.Get().sending)
{
_engineStatisticsPtr->SetLastError(
VE_SENDING, kTraceError,
@@ -4810,7 +4146,7 @@ Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
int32_t
Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
{
- scoped_array<int16_t> fileBuffer(new int16_t[640]);
+ scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{
@@ -4850,11 +4186,11 @@ Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
{
// Currently file stream is always mono.
// TODO(xians): Change the code when FilePlayer supports real stereo.
- Utility::MixWithSat(_audioFrame.data_,
- _audioFrame.num_channels_,
- fileBuffer.get(),
- 1,
- fileSamples);
+ MixWithSat(_audioFrame.data_,
+ _audioFrame.num_channels_,
+ fileBuffer.get(),
+ 1,
+ fileSamples);
}
else
{
@@ -4880,7 +4216,7 @@ Channel::MixAudioWithFile(AudioFrame& audioFrame,
{
assert(mixingFrequency <= 32000);
- scoped_array<int16_t> fileBuffer(new int16_t[640]);
+ scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{
@@ -4910,11 +4246,11 @@ Channel::MixAudioWithFile(AudioFrame& audioFrame,
{
// Currently file stream is always mono.
// TODO(xians): Change the code when FilePlayer supports real stereo.
- Utility::MixWithSat(audioFrame.data_,
- audioFrame.num_channels_,
- fileBuffer.get(),
- 1,
- fileSamples);
+ MixWithSat(audioFrame.data_,
+ audioFrame.num_channels_,
+ fileBuffer.get(),
+ 1,
+ fileSamples);
}
else
{
@@ -5002,28 +4338,6 @@ Channel::InsertInbandDtmfTone()
return 0;
}
-void
-Channel::ResetDeadOrAliveCounters()
-{
- _countDeadDetections = 0;
- _countAliveDetections = 0;
-}
-
-void
-Channel::UpdateDeadOrAliveCounters(bool alive)
-{
- if (alive)
- _countAliveDetections++;
- else
- _countDeadDetections++;
-}
-
-int
-Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
-{
- return 0;
-}
-
int32_t
Channel::SendPacketRaw(const void *data, int len, bool RTCP)
{
@@ -5050,44 +4364,29 @@ void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
rtp_timestamp, sequence_number);
// Get frequency of last received payload
- int rtp_receive_frequency = audio_coding_->ReceiveFrequency();
-
- CodecInst current_receive_codec;
- if (audio_coding_->ReceiveCodec(&current_receive_codec) != 0) {
- return;
- }
+ int rtp_receive_frequency = GetPlayoutFrequency();
// Update the least required delay.
least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
- if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
- // Even though the actual sampling rate for G.722 audio is
- // 16,000 Hz, the RTP clock rate for the G722 payload format is
- // 8,000 Hz because that value was erroneously assigned in
- // RFC 1890 and must remain unchanged for backward compatibility.
- rtp_receive_frequency = 8000;
- } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
- // We are resampling Opus internally to 32,000 Hz until all our
- // DSP routines can operate at 48,000 Hz, but the RTP clock
- // rate for the Opus payload format is standardized to 48,000 Hz,
- // because that is the maximum supported decoding sampling rate.
- rtp_receive_frequency = 48000;
- }
-
// |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
// every incoming packet.
uint32_t timestamp_diff_ms = (rtp_timestamp -
jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
+ if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
+ timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
+ // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
+ // timestamp, the resulting difference is negative, but is set to zero.
+ // This can happen when a network glitch causes a packet to arrive late,
+ // and during long comfort noise periods with clock drift.
+ timestamp_diff_ms = 0;
+ }
uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
(rtp_receive_frequency / 1000);
_previousTimestamp = rtp_timestamp;
- if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
- timestamp_diff_ms = 0;
- }
-
if (timestamp_diff_ms == 0) return;
if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
@@ -5152,25 +4451,6 @@ Channel::RegisterReceiveCodecsToRTPModule()
}
}
-int Channel::ApmProcessRx(AudioFrame& frame) {
- // Register the (possibly new) frame parameters.
- if (rx_audioproc_->set_sample_rate_hz(frame.sample_rate_hz_) != 0) {
- assert(false);
- LOG_FERR1(LS_ERROR, set_sample_rate_hz, frame.sample_rate_hz_);
- }
- if (rx_audioproc_->set_num_channels(frame.num_channels_,
- frame.num_channels_) != 0) {
- assert(false);
- LOG_FERR2(LS_ERROR, set_num_channels, frame.num_channels_,
- frame.num_channels_);
- }
- if (rx_audioproc_->ProcessStream(&frame) != 0) {
- assert(false);
- LOG_FERR0(LS_ERROR, ProcessStream);
- }
- return 0;
-}
-
int Channel::SetSecondarySendCodec(const CodecInst& codec,
int red_payload_type) {
// Sanity check for payload type.
@@ -5251,5 +4531,36 @@ int Channel::SetRedPayloadType(int red_payload_type) {
return 0;
}
+int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
+ unsigned char id) {
+ int error = 0;
+ _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
+ if (enable) {
+ error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
+ }
+ return error;
+}
+
+int32_t Channel::GetPlayoutFrequency() {
+ int32_t playout_frequency = audio_coding_->PlayoutFrequency();
+ CodecInst current_recive_codec;
+ if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
+ if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
+ // Even though the actual sampling rate for G.722 audio is
+ // 16,000 Hz, the RTP clock rate for the G722 payload format is
+ // 8,000 Hz because that value was erroneously assigned in
+ // RFC 1890 and must remain unchanged for backward compatibility.
+ playout_frequency = 8000;
+ } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
+ // We are resampling Opus internally to 32,000 Hz until all our
+ // DSP routines can operate at 48,000 Hz, but the RTP clock
+ // rate for the Opus payload format is standardized to 48,000 Hz,
+ // because that is the maximum supported decoding sampling rate.
+ playout_frequency = 48000;
+ }
+ }
+ return playout_frequency;
+}
+
} // namespace voe
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/channel.h b/chromium/third_party/webrtc/voice_engine/channel.h
index f8b04fdd18f..3c49d9bd297 100644
--- a/chromium/third_party/webrtc/voice_engine/channel.h
+++ b/chromium/third_party/webrtc/voice_engine/channel.h
@@ -8,13 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_H
-#define WEBRTC_VOICE_ENGINE_CHANNEL_H
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_H_
+#define WEBRTC_VOICE_ENGINE_CHANNEL_H_
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h"
+#include "webrtc/modules/audio_processing/rms_level.h"
+#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/file_player.h"
@@ -33,6 +35,11 @@
#include "webrtc/voice_engine/include/voe_dtmf.h"
#endif
+namespace rtc {
+
+class TimestampWrapAroundHandler;
+}
+
namespace webrtc {
class AudioDeviceModule;
@@ -41,12 +48,14 @@ class CriticalSectionWrapper;
class FileWrapper;
class ProcessThread;
class ReceiveStatistics;
+class RemoteNtpTimeEstimator;
class RtpDump;
class RTPPayloadRegistry;
class RtpReceiver;
class RTPReceiverAudio;
class RtpRtcp;
class TelephoneEventHandler;
+class ViENetwork;
class VoEMediaProcess;
class VoERTCPObserver;
class VoERTPObserver;
@@ -59,9 +68,88 @@ struct SenderInfo;
namespace voe {
class Statistics;
+class StatisticsProxy;
class TransmitMixer;
class OutputMixer;
+// Helper class to simplify locking scheme for members that are accessed from
+// multiple threads.
+// Example: a member can be set on thread T1 and read by an internal audio
+// thread T2. Accessing the member via this class ensures that we are
+// safe and also avoid TSan v2 warnings.
+class ChannelState {
+ public:
+ struct State {
+ State() : rx_apm_is_enabled(false),
+ input_external_media(false),
+ output_file_playing(false),
+ input_file_playing(false),
+ playing(false),
+ sending(false),
+ receiving(false) {}
+
+ bool rx_apm_is_enabled;
+ bool input_external_media;
+ bool output_file_playing;
+ bool input_file_playing;
+ bool playing;
+ bool sending;
+ bool receiving;
+ };
+
+ ChannelState() : lock_(CriticalSectionWrapper::CreateCriticalSection()) {
+ }
+ virtual ~ChannelState() {}
+
+ void Reset() {
+ CriticalSectionScoped lock(lock_.get());
+ state_ = State();
+ }
+
+ State Get() const {
+ CriticalSectionScoped lock(lock_.get());
+ return state_;
+ }
+
+ void SetRxApmIsEnabled(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.rx_apm_is_enabled = enable;
+ }
+
+ void SetInputExternalMedia(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.input_external_media = enable;
+ }
+
+ void SetOutputFilePlaying(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.output_file_playing = enable;
+ }
+
+ void SetInputFilePlaying(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.input_file_playing = enable;
+ }
+
+ void SetPlaying(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.playing = enable;
+ }
+
+ void SetSending(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.sending = enable;
+ }
+
+ void SetReceiving(bool enable) {
+ CriticalSectionScoped lock(lock_.get());
+ state_.receiving = enable;
+ }
+
+private:
+ scoped_ptr<CriticalSectionWrapper> lock_;
+ State state_;
+};
class Channel:
public RtpData,
@@ -106,8 +194,6 @@ public:
int32_t SetNetEQPlayoutMode(NetEqModes mode);
int32_t GetNetEQPlayoutMode(NetEqModes& mode);
- int32_t SetOnHoldStatus(bool enable, OnHoldModes mode);
- int32_t GetOnHoldStatus(bool& enabled, OnHoldModes& mode);
int32_t RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
int32_t DeRegisterVoiceEngineObserver();
@@ -119,14 +205,7 @@ public:
int32_t GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX);
int32_t SetRecPayloadType(const CodecInst& codec);
int32_t GetRecPayloadType(CodecInst& codec);
- int32_t SetAMREncFormat(AmrMode mode);
- int32_t SetAMRDecFormat(AmrMode mode);
- int32_t SetAMRWbEncFormat(AmrMode mode);
- int32_t SetAMRWbDecFormat(AmrMode mode);
int32_t SetSendCNPayloadType(int type, PayloadFrequencies frequency);
- int32_t SetISACInitTargetRate(int rateBps, bool useFixedFrameSize);
- int32_t SetISACMaxRate(int rateBps);
- int32_t SetISACMaxPayloadSize(int sizeBytes);
// VoE dual-streaming.
int SetSecondarySendCodec(const CodecInst& codec, int red_payload_type);
@@ -136,7 +215,8 @@ public:
// VoENetwork
int32_t RegisterExternalTransport(Transport& transport);
int32_t DeRegisterExternalTransport();
- int32_t ReceivedRTPPacket(const int8_t* data, int32_t length);
+ int32_t ReceivedRTPPacket(const int8_t* data, int32_t length,
+ const PacketTime& packet_time);
int32_t ReceivedRTCPPacket(const int8_t* data, int32_t length);
// VoEFile
@@ -154,8 +234,6 @@ public:
int StopPlayingFileLocally();
int IsPlayingFileLocally() const;
int RegisterFilePlayingToMixer();
- int ScaleLocalFilePlayout(float scale);
- int GetLocalPlayoutPosition(int& positionMs);
int StartPlayingFileAsMicrophone(const char* fileName, bool loop,
FileFormats format,
int startPosition,
@@ -170,7 +248,6 @@ public:
const CodecInst* codecInst);
int StopPlayingFileAsMicrophone();
int IsPlayingFileAsMicrophone() const;
- int ScaleFileAsMicrophonePlayout(float scale);
int StartRecordingPlayout(const char* fileName, const CodecInst* codecInst);
int StartRecordingPlayout(OutStream* stream, const CodecInst* codecInst);
int StopRecordingPlayout();
@@ -193,12 +270,6 @@ public:
int SetChannelOutputVolumeScaling(float scaling);
int GetChannelOutputVolumeScaling(float& scaling) const;
- // VoECallReport
- void ResetDeadOrAliveCounters();
- int ResetRTCPStatistics();
- int GetRoundTripTimeSummary(StatVal& delaysMs) const;
- int GetDeadOrAliveCounters(int& countDead, int& countAlive) const;
-
// VoENetEqStats
int GetNetworkStatistics(NetworkStatistics& stats);
void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const;
@@ -217,10 +288,6 @@ public:
// VoEVideoSyncExtended
int GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const;
- // VoEEncryption
- int RegisterExternalEncryption(Encryption& encryption);
- int DeRegisterExternalEncryption();
-
// VoEDtmf
int SendTelephoneEventOutband(unsigned char eventCode, int lengthMs,
int attenuationDb, bool playDtmfEvent);
@@ -248,16 +315,15 @@ public:
#endif
// VoERTP_RTCP
- int RegisterRTPObserver(VoERTPObserver& observer);
- int DeRegisterRTPObserver();
int RegisterRTCPObserver(VoERTCPObserver& observer);
int DeRegisterRTCPObserver();
int SetLocalSSRC(unsigned int ssrc);
int GetLocalSSRC(unsigned int& ssrc);
int GetRemoteSSRC(unsigned int& ssrc);
- int GetRemoteCSRCs(unsigned int arrCSRC[15]);
- int SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID);
- int GetRTPAudioLevelIndicationStatus(bool& enable, unsigned char& ID);
+ int SetSendAudioLevelIndicationStatus(bool enable, unsigned char id);
+ int SetReceiveAudioLevelIndicationStatus(bool enable, unsigned char id);
+ int SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id);
+ int SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id);
int SetRTCPStatus(bool enable);
int GetRTCPStatus(bool& enabled);
int SetRTCP_CNAME(const char cName[256]);
@@ -273,19 +339,18 @@ public:
int GetRTPStatistics(unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
unsigned int& discardedPackets);
- int GetRemoteRTCPSenderInfo(SenderInfo* sender_info);
int GetRemoteRTCPReportBlocks(std::vector<ReportBlock>* report_blocks);
int GetRTPStatistics(CallStatistics& stats);
- int SetFECStatus(bool enable, int redPayloadtype);
- int GetFECStatus(bool& enabled, int& redPayloadtype);
+ int SetREDStatus(bool enable, int redPayloadtype);
+ int GetREDStatus(bool& enabled, int& redPayloadtype);
+ int SetCodecFECStatus(bool enable);
+ bool GetCodecFECStatus();
void SetNACKStatus(bool enable, int maxNumberOfPackets);
int StartRTPDump(const char fileNameUTF8[1024], RTPDirections direction);
int StopRTPDump(RTPDirections direction);
bool RTPDumpIsActive(RTPDirections direction);
- int InsertExtraRTPPacket(unsigned char payloadType, bool markerBit,
- const char* payloadData,
- unsigned short payloadSize);
- uint32_t LastRemoteTimeStamp() { return _lastRemoteTimeStamp; }
+ // Takes ownership of the ViENetwork.
+ void SetVideoEngineBWETarget(ViENetwork* vie_network, int video_channel);
// From AudioPacketizationCallback in the ACM
int32_t SendData(FrameType frameType,
@@ -376,36 +441,25 @@ public:
}
bool Playing() const
{
- return _playing;
+ return channel_state_.Get().playing;
}
bool Sending() const
{
- // A lock is needed because |_sending| is accessed by both
- // TransmitMixer::PrepareDemux() and StartSend()/StopSend(), which
- // are called by different threads.
- CriticalSectionScoped cs(&_callbackCritSect);
- return _sending;
+ return channel_state_.Get().sending;
}
bool Receiving() const
{
- return _receiving;
+ return channel_state_.Get().receiving;
}
bool ExternalTransport() const
{
+ CriticalSectionScoped cs(&_callbackCritSect);
return _externalTransport;
}
bool ExternalMixing() const
{
return _externalMixing;
}
- bool OutputIsOnHold() const
- {
- return _outputIsOnHold;
- }
- bool InputIsOnHold() const
- {
- return _inputIsOnHold;
- }
RtpRtcp* RtpRtcpModulePtr() const
{
return _rtpRtcpModule.get();
@@ -425,6 +479,11 @@ public:
uint32_t PrepareEncodeAndSend(int mixingFrequency);
uint32_t EncodeAndSend();
+ // From BitrateObserver (called by the RTP/RTCP module).
+ void OnNetworkChanged(const uint32_t bitrate_bps,
+ const uint8_t fraction_lost, // 0 - 255.
+ const uint32_t rtt);
+
private:
bool ReceivePacket(const uint8_t* packet, int packet_length,
const RTPHeader& header, bool in_order);
@@ -437,14 +496,16 @@ private:
int InsertInbandDtmfTone();
int32_t MixOrReplaceAudioWithFile(int mixingFrequency);
int32_t MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency);
- void UpdateDeadOrAliveCounters(bool alive);
int32_t SendPacketRaw(const void *data, int len, bool RTCP);
void UpdatePacketDelay(uint32_t timestamp,
uint16_t sequenceNumber);
void RegisterReceiveCodecsToRTPModule();
- int ApmProcessRx(AudioFrame& audioFrame);
int SetRedPayloadType(int red_payload_type);
+ int SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
+ unsigned char id);
+
+ int32_t GetPlayoutFrequency();
CriticalSectionWrapper& _fileCritSect;
CriticalSectionWrapper& _callbackCritSect;
@@ -452,9 +513,12 @@ private:
uint32_t _instanceId;
int32_t _channelId;
+ ChannelState channel_state_;
+
scoped_ptr<RtpHeaderParser> rtp_header_parser_;
scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
scoped_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ scoped_ptr<StatisticsProxy> statistics_proxy_;
scoped_ptr<RtpReceiver> rtp_receiver_;
TelephoneEventHandler* telephone_event_handler_;
scoped_ptr<RtpRtcp> _rtpRtcpModule;
@@ -464,9 +528,9 @@ private:
AudioLevel _outputAudioLevel;
bool _externalTransport;
AudioFrame _audioFrame;
- scoped_array<int16_t> mono_recording_audio_;
- // Resampler is used when input data is stereo while codec is mono.
- PushResampler input_resampler_;
+ scoped_ptr<int16_t[]> mono_recording_audio_;
+ // Downsamples to the codec rate if necessary.
+ PushResampler<int16_t> input_resampler_;
uint8_t _audioLevel_dBov;
FilePlayer* _inputFilePlayerPtr;
FilePlayer* _outputFilePlayerPtr;
@@ -474,22 +538,17 @@ private:
int _inputFilePlayerId;
int _outputFilePlayerId;
int _outputFileRecorderId;
- bool _inputFilePlaying;
- bool _outputFilePlaying;
bool _outputFileRecording;
DtmfInbandQueue _inbandDtmfQueue;
DtmfInband _inbandDtmfGenerator;
- bool _inputExternalMedia;
bool _outputExternalMedia;
VoEMediaProcess* _inputExternalMediaCallbackPtr;
VoEMediaProcess* _outputExternalMediaCallbackPtr;
- uint8_t* _encryptionRTPBufferPtr;
- uint8_t* _decryptionRTPBufferPtr;
- uint8_t* _encryptionRTCPBufferPtr;
- uint8_t* _decryptionRTCPBufferPtr;
uint32_t _timeStamp;
uint8_t _sendTelephoneEventPayloadType;
+ scoped_ptr<RemoteNtpTimeEstimator> ntp_estimator_;
+
// Timestamp of the audio pulled from NetEq.
uint32_t jitter_buffer_playout_timestamp_;
uint32_t playout_timestamp_rtp_;
@@ -499,6 +558,15 @@ private:
uint16_t send_sequence_number_;
uint8_t restored_packet_[kVoiceEngineMaxIpPacketSizeBytes];
+ scoped_ptr<CriticalSectionWrapper> ts_stats_lock_;
+
+ scoped_ptr<rtc::TimestampWrapAroundHandler> rtp_ts_wraparound_handler_;
+ // The rtp timestamp of the first played out audio frame.
+ int64_t capture_start_rtp_time_stamp_;
+ // The capture ntp time (in local timebase) of the first played out audio
+ // frame.
+ int64_t capture_start_ntp_time_ms_;
+
// uses
Statistics* _engineStatisticsPtr;
OutputMixer* _outputMixerPtr;
@@ -508,42 +576,27 @@ private:
VoiceEngineObserver* _voiceEngineObserverPtr; // owned by base
CriticalSectionWrapper* _callbackCritSectPtr; // owned by base
Transport* _transportPtr; // WebRtc socket or external transport
- Encryption* _encryptionPtr; // WebRtc SRTP or external encryption
- scoped_ptr<AudioProcessing> rtp_audioproc_;
+ RMSLevel rms_level_;
scoped_ptr<AudioProcessing> rx_audioproc_; // far end AudioProcessing
VoERxVadCallback* _rxVadObserverPtr;
int32_t _oldVadDecision;
int32_t _sendFrameType; // Send data is voice, 1-voice, 0-otherwise
- VoERTPObserver* _rtpObserverPtr;
VoERTCPObserver* _rtcpObserverPtr;
// VoEBase
- bool _outputIsOnHold;
bool _externalPlayout;
bool _externalMixing;
- bool _inputIsOnHold;
- bool _playing;
- bool _sending;
- bool _receiving;
bool _mixFileWithMicrophone;
- bool _rtpObserver;
bool _rtcpObserver;
// VoEVolumeControl
bool _mute;
float _panLeft;
float _panRight;
float _outputGain;
- // VoEEncryption
- bool _encrypting;
- bool _decrypting;
// VoEDtmf
bool _playOutbandDtmfEvent;
bool _playInbandDtmfEvent;
// VoeRTP_RTCP
- uint8_t _extraPayloadType;
- bool _insertExtraRTPPacket;
- bool _extraMarkerBit;
uint32_t _lastLocalTimeStamp;
- uint32_t _lastRemoteTimeStamp;
int8_t _lastPayloadType;
bool _includeAudioLevelIndication;
// VoENetwork
@@ -552,9 +605,9 @@ private:
uint32_t _rtpTimeOutSeconds;
bool _connectionObserver;
VoEConnectionObserver* _connectionObserverPtr;
- uint32_t _countAliveDetections;
- uint32_t _countDeadDetections;
AudioFrame::SpeechType _outputSpeechType;
+ ViENetwork* vie_network_;
+ int video_channel_;
// VoEVideoSync
uint32_t _average_jitter_buffer_delay_us;
int least_required_delay_ms_;
@@ -562,13 +615,16 @@ private:
uint16_t _recPacketDelayMs;
// VoEAudioProcessing
bool _RxVadDetection;
- bool _rxApmIsEnabled;
bool _rxAgcIsEnabled;
bool _rxNsIsEnabled;
bool restored_packet_in_use_;
+ // RtcpBandwidthObserver
+ scoped_ptr<BitrateController> bitrate_controller_;
+ scoped_ptr<RtcpBandwidthObserver> rtcp_bandwidth_observer_;
+ scoped_ptr<BitrateObserver> send_bitrate_observer_;
};
} // namespace voe
} // namespace webrtc
-#endif // WEBRTC_VOICE_ENGINE_CHANNEL_H
+#endif // WEBRTC_VOICE_ENGINE_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/voice_engine/channel_manager.h b/chromium/third_party/webrtc/voice_engine/channel_manager.h
index 1da976dc83a..3c0f68147e4 100644
--- a/chromium/third_party/webrtc/voice_engine/channel_manager.h
+++ b/chromium/third_party/webrtc/voice_engine/channel_manager.h
@@ -13,8 +13,8 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/atomic32.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/voice_engine/dtmf_inband_queue.cc b/chromium/third_party/webrtc/voice_engine/dtmf_inband_queue.cc
index e2331db1495..86e8d62b72c 100644
--- a/chromium/third_party/webrtc/voice_engine/dtmf_inband_queue.cc
+++ b/chromium/third_party/webrtc/voice_engine/dtmf_inband_queue.cc
@@ -71,15 +71,17 @@ DtmfInbandQueue::NextDtmf(uint16_t* len, uint8_t* level)
return nextDtmf;
}
-bool
+bool
DtmfInbandQueue::PendingDtmf()
{
- return(_nextEmptyIndex>0);
+ CriticalSectionScoped lock(&_DtmfCritsect);
+ return _nextEmptyIndex > 0;
}
-void
+void
DtmfInbandQueue::ResetDtmf()
{
+ CriticalSectionScoped lock(&_DtmfCritsect);
_nextEmptyIndex = 0;
}
diff --git a/chromium/third_party/webrtc/voice_engine/include/mock/fake_voe_external_media.h b/chromium/third_party/webrtc/voice_engine/include/mock/fake_voe_external_media.h
index f45e1ba0f28..b327f3c52a6 100644
--- a/chromium/third_party/webrtc/voice_engine/include/mock/fake_voe_external_media.h
+++ b/chromium/third_party/webrtc/voice_engine/include/mock/fake_voe_external_media.h
@@ -53,7 +53,7 @@ class FakeVoEExternalMedia : public VoEExternalMedia {
int samples_per_channel, int sample_rate_hz,
int num_channels) {
const int length = samples_per_channel * num_channels;
- scoped_array<int16_t> data;
+ scoped_ptr<int16_t[]> data;
if (!audio) {
data.reset(new int16_t[length]);
memset(data.get(), 0, length * sizeof(data[0]));
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_base.h b/chromium/third_party/webrtc/voice_engine/include/voe_base.h
index f54027b8a4a..3a78f7b63b8 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_base.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_base.h
@@ -40,6 +40,7 @@ namespace webrtc {
class AudioDeviceModule;
class AudioProcessing;
+class AudioTransport;
class Config;
const int kVoEDefault = -1;
@@ -84,7 +85,9 @@ public:
// receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback);
+#if !defined(WEBRTC_CHROMIUM_BUILD)
static int SetAndroidObjects(void* javaVM, void* env, void* context);
+#endif
protected:
VoiceEngine() {}
@@ -169,19 +172,18 @@ public:
// Gets the last VoiceEngine error code.
virtual int LastError() = 0;
- // Stops or resumes playout and transmission on a temporary basis.
- virtual int SetOnHoldStatus(int channel, bool enable,
- OnHoldModes mode = kHoldSendAndPlay) = 0;
+ // TODO(xians): Make the interface pure virtual after libjingle
+ // implements the interface in its FakeWebRtcVoiceEngine.
+ virtual AudioTransport* audio_transport() { return NULL; }
- // Gets the current playout and transmission status.
+ // To be removed. Don't use.
+ virtual int SetOnHoldStatus(int channel, bool enable,
+ OnHoldModes mode = kHoldSendAndPlay) { return -1; }
virtual int GetOnHoldStatus(int channel, bool& enabled,
- OnHoldModes& mode) = 0;
-
- // Sets the NetEQ playout mode for a specified |channel| number.
- virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode) = 0;
-
- // Gets the NetEQ playout mode for a specified |channel| number.
- virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode) = 0;
+ OnHoldModes& mode) { return -1; }
+ virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode) { return -1; }
+ virtual int GetNetEQPlayoutMode(int channel,
+ NetEqModes& mode) { return -1; }
protected:
VoEBase() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_call_report.h b/chromium/third_party/webrtc/voice_engine/include/voe_call_report.h
deleted file mode 100644
index de2c7781851..00000000000
--- a/chromium/third_party/webrtc/voice_engine/include/voe_call_report.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This sub-API supports the following functionalities:
-//
-// - Long-term speech and noise level metrics.
-// - Long-term echo metric statistics.
-// - Round Trip Time (RTT) statistics.
-// - Dead-or-Alive connection summary.
-// - Generation of call reports to text files.
-//
-// Usage example, omitting error checking:
-//
-// using namespace webrtc;
-// VoiceEngine* voe = VoiceEngine::Create();
-// VoEBase* base = VoEBase::GetInterface(voe);
-// VoECallReport report = VoECallReport::GetInterface(voe);
-// base->Init();
-// LevelStatistics stats;
-// report->GetSpeechAndNoiseSummary(stats);
-// ...
-// base->Terminate();
-// base->Release();
-// report->Release();
-// VoiceEngine::Delete(voe);
-//
-#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
-#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
-
-#include "webrtc/common_types.h"
-
-namespace webrtc {
-
-class VoiceEngine;
-
-// VoECallReport
-class WEBRTC_DLLEXPORT VoECallReport
-{
-public:
- // Factory for the VoECallReport sub-API. Increases an internal
- // reference counter if successful. Returns NULL if the API is not
- // supported or if construction fails.
- static VoECallReport* GetInterface(VoiceEngine* voiceEngine);
-
- // Releases the VoECallReport sub-API and decreases an internal
- // reference counter. Returns the new reference count. This value should
- // be zero for all sub-API:s before the VoiceEngine object can be safely
- // deleted.
- virtual int Release() = 0;
-
- // Performs a combined reset of all components involved in generating
- // the call report for a specified |channel|. Pass in -1 to reset
- // all channels.
- virtual int ResetCallReportStatistics(int channel) = 0;
-
- // Gets minimum, maximum and average levels for long-term echo metrics.
- virtual int GetEchoMetricSummary(EchoStatistics& stats) = 0;
-
- // Gets minimum, maximum and average levels for Round Trip Time (RTT)
- // measurements.
- virtual int GetRoundTripTimeSummary(int channel,
- StatVal& delaysMs) = 0;
-
- // Gets the total amount of dead and alive connection detections
- // during a VoIP session.
- virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
- int& numOfAliveDetections) = 0;
-
- // Creates a text file in ASCII format, which contains a summary
- // of all the statistics that can be obtained by the call report sub-API.
- virtual int WriteReportToFile(const char* fileNameUTF8) = 0;
-
-protected:
- VoECallReport() { }
- virtual ~VoECallReport() { }
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_codec.h b/chromium/third_party/webrtc/voice_engine/include/voe_codec.h
index e69737d689b..34a223211ac 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_codec.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_codec.h
@@ -84,22 +84,6 @@ public:
// Gets the currently received |codec| for a specific |channel|.
virtual int GetRecCodec(int channel, CodecInst& codec) = 0;
- // Sets the initial values of target rate and frame size for iSAC
- // for a specified |channel|. This API is only valid if iSAC is setup
- // to run in channel-adaptive mode
- virtual int SetISACInitTargetRate(int channel, int rateBps,
- bool useFixedFrameSize = false) = 0;
-
- // Sets the maximum allowed iSAC rate which the codec may not exceed
- // for a single packet for the specified |channel|. The maximum rate is
- // defined as payload size per frame size in bits per second.
- virtual int SetISACMaxRate(int channel, int rateBps) = 0;
-
- // Sets the maximum allowed iSAC payload size for a specified |channel|.
- // The maximum value is set independently of the frame size, i.e.
- // 30 ms and 60 ms packets have the same limit.
- virtual int SetISACMaxPayloadSize(int channel, int sizeBytes) = 0;
-
// Sets the dynamic payload type number for a particular |codec| or
// disables (ignores) a codec for receiving. For instance, when receiving
// an invite from a SIP-based client, this function can be used to change
@@ -118,6 +102,18 @@ public:
virtual int SetSendCNPayloadType(
int channel, int type, PayloadFrequencies frequency = kFreq16000Hz) = 0;
+ // Sets the codec internal FEC (forward error correction) status for a
+ // specified |channel|. Returns 0 if success, and -1 if failed.
+ // TODO(minyue): Make SetFECStatus() pure virtual when fakewebrtcvoiceengine
+ // in talk is ready.
+ virtual int SetFECStatus(int channel, bool enable) { return -1; }
+
+ // Gets the codec internal FEC status for a specified |channel|. Returns 0
+ // with the status stored in |enabled| if success, and -1 if encountered
+ // error.
+ // TODO(minyue): Make GetFECStatus() pure virtual when fakewebrtcvoiceengine
+ // in talk is ready.
+ virtual int GetFECStatus(int channel, bool& enabled) { return -1; }
// Sets the VAD/DTX (silence suppression) status and |mode| for a
// specified |channel|. Disabling VAD (through |enable|) will also disable
@@ -130,17 +126,15 @@ public:
virtual int GetVADStatus(int channel, bool& enabled, VadModes& mode,
bool& disabledDTX) = 0;
- // Not supported
- virtual int SetAMREncFormat(int channel, AmrMode mode) = 0;
-
- // Not supported
- virtual int SetAMRDecFormat(int channel, AmrMode mode) = 0;
-
- // Not supported
- virtual int SetAMRWbEncFormat(int channel, AmrMode mode) = 0;
-
- // Not supported
- virtual int SetAMRWbDecFormat(int channel, AmrMode mode) = 0;
+ // Don't use. To be removed.
+ virtual int SetAMREncFormat(int channel, AmrMode mode) { return -1; }
+ virtual int SetAMRDecFormat(int channel, AmrMode mode) { return -1; }
+ virtual int SetAMRWbEncFormat(int channel, AmrMode mode) { return -1; }
+ virtual int SetAMRWbDecFormat(int channel, AmrMode mode) { return -1; }
+ virtual int SetISACInitTargetRate(int channel, int rateBps,
+ bool useFixedFrameSize = false) { return -1; }
+ virtual int SetISACMaxRate(int channel, int rateBps) { return -1; }
+ virtual int SetISACMaxPayloadSize(int channel, int sizeBytes) { return -1; }
protected:
VoECodec() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_dtmf.h b/chromium/third_party/webrtc/voice_engine/include/voe_dtmf.h
index 301d73f8d5d..4db8cbc5968 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_dtmf.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_dtmf.h
@@ -43,7 +43,7 @@ class VoiceEngine;
class WEBRTC_DLLEXPORT VoEDtmf
{
public:
-
+
// Factory for the VoEDtmf sub-API. Increases an internal
// reference counter if successful. Returns NULL if the API is not
// supported or if construction fails.
@@ -60,13 +60,13 @@ public:
bool outOfBand = true, int lengthMs = 160,
int attenuationDb = 10) = 0;
-
+
// Sets the dynamic payload |type| that should be used for telephone
// events.
virtual int SetSendTelephoneEventPayloadType(int channel,
unsigned char type) = 0;
-
+
// Gets the currently set dynamic payload |type| for telephone events.
virtual int GetSendTelephoneEventPayloadType(int channel,
unsigned char& type) = 0;
@@ -90,14 +90,10 @@ public:
virtual int PlayDtmfTone(int eventCode, int lengthMs = 200,
int attenuationDb = 10) = 0;
- // Starts playing out a DTMF feedback tone locally.
- // The tone will be played out until the corresponding stop function
- // is called.
+ // To be removed. Don't use.
virtual int StartPlayingDtmfTone(int eventCode,
- int attenuationDb = 10) = 0;
-
- // Stops playing out a DTMF feedback tone locally.
- virtual int StopPlayingDtmfTone() = 0;
+ int attenuationDb = 10) { return -1; }
+ virtual int StopPlayingDtmfTone() { return -1; }
protected:
VoEDtmf() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_encryption.h b/chromium/third_party/webrtc/voice_engine/include/voe_encryption.h
deleted file mode 100644
index e4b0dd0ba09..00000000000
--- a/chromium/third_party/webrtc/voice_engine/include/voe_encryption.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This sub-API supports the following functionalities:
-//
-// - External encryption and decryption.
-//
-// Usage example, omitting error checking:
-//
-// using namespace webrtc;
-// VoiceEngine* voe = VoiceEngine::Create();
-// VoEEncryption* encrypt = VoEEncryption::GetInterface(voe);
-// ...
-// encrypt->Release();
-// VoiceEngine::Delete(voe);
-//
-#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
-#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
-
-#include "webrtc/common_types.h"
-
-namespace webrtc {
-
-class VoiceEngine;
-
-class WEBRTC_DLLEXPORT VoEEncryption
-{
-public:
- // Factory for the VoEEncryption sub-API. Increases an internal
- // reference counter if successful. Returns NULL if the API is not
- // supported or if construction fails.
- static VoEEncryption* GetInterface(VoiceEngine* voiceEngine);
-
- // Releases the VoEEncryption sub-API and decreases an internal
- // reference counter. Returns the new reference count. This value should
- // be zero for all sub-API:s before the VoiceEngine object can be safely
- // deleted.
- virtual int Release() = 0;
-
- // Installs an Encryption instance and enables external encryption
- // for the selected |channel|.
- virtual int RegisterExternalEncryption(
- int channel, Encryption& encryption) = 0;
-
- // Removes an Encryption instance and disables external encryption
- // for the selected |channel|.
- virtual int DeRegisterExternalEncryption(int channel) = 0;
-
-protected:
- VoEEncryption() {}
- virtual ~VoEEncryption() {}
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_errors.h b/chromium/third_party/webrtc/voice_engine/include/voe_errors.h
index 4ce0e5c725a..572fc922ed7 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_errors.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_errors.h
@@ -36,7 +36,7 @@
#define VE_DTMF_OUTOF_RANGE 8022
#define VE_INVALID_CHANNELS 8023
#define VE_SET_PLTYPE_FAILED 8024
-#define VE_ENCRYPT_NOT_INITED 8025
+// 8025 is not used
#define VE_NOT_INITED 8026
#define VE_NOT_SENDING 8027
#define VE_EXT_TRANSPORT_NOT_SUPPORTED 8028
@@ -67,7 +67,7 @@
#define VE_SEND_ERROR 8092
#define VE_CANNOT_REMOVE_CONF_CHANNEL 8093
#define VE_PLTYPE_ERROR 8094
-#define VE_SET_FEC_FAILED 8095
+#define VE_SET_RED_FAILED 8095
#define VE_CANNOT_GET_PLAY_DATA 8096
#define VE_APM_ERROR 8097
#define VE_RUNTIME_PLAY_WARNING 8098
@@ -114,8 +114,8 @@
#define VE_RTP_KEEPALIVE_FAILED 9023
#define VE_SEND_DTMF_FAILED 9024
#define VE_CANNOT_RETRIEVE_CNAME 9025
-#define VE_DECRYPTION_FAILED 9026
-#define VE_ENCRYPTION_FAILED 9027
+// 9026 is not used
+// 9027 is not used
#define VE_CANNOT_RETRIEVE_RTP_STAT 9028
#define VE_GQOS_ERROR 9029
#define VE_BINDING_SOCKET_TO_LOCAL_ADDRESS_FAILED 9030
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_external_media.h b/chromium/third_party/webrtc/voice_engine/include/voe_external_media.h
index 1051d66e1db..bcfd81b8959 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_external_media.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_external_media.h
@@ -7,28 +7,6 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-
-// In some cases it is desirable to use an audio source or sink which may
-// not be available to the VoiceEngine, such as a DV camera. This sub-API
-// contains functions that allow for the use of such external recording
-// sources and playout sinks. It also describes how recorded data, or data
-// to be played out, can be modified outside the VoiceEngine.
-//
-// Usage example, omitting error checking:
-//
-// using namespace webrtc;
-// VoiceEngine* voe = VoiceEngine::Create();
-// VoEBase* base = VoEBase::GetInterface(voe);
-// VoEMediaProcess media = VoEMediaProcess::GetInterface(voe);
-// base->Init();
-// ...
-// media->SetExternalRecordingStatus(true);
-// ...
-// base->Terminate();
-// base->Release();
-// media->Release();
-// VoiceEngine::Delete(voe);
-//
#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
@@ -83,28 +61,6 @@ public:
virtual int DeRegisterExternalMediaProcessing(
int channel, ProcessingTypes type) = 0;
- // Toogles state of external recording.
- virtual int SetExternalRecordingStatus(bool enable) = 0;
-
- // Toogles state of external playout.
- virtual int SetExternalPlayoutStatus(bool enable) = 0;
-
- // This function accepts externally recorded audio. During transmission,
- // this method should be called at as regular an interval as possible
- // with frames of corresponding size.
- virtual int ExternalRecordingInsertData(
- const int16_t speechData10ms[], int lengthSamples,
- int samplingFreqHz, int current_delay_ms) = 0;
-
- // This function gets audio for an external playout sink.
- // During transmission, this function should be called every ~10 ms
- // to obtain a new 10 ms frame of audio. The length of the block will
- // be 160, 320, 440 or 480 samples (for 16, 32, 44 or 48 kHz sampling
- // rates respectively).
- virtual int ExternalPlayoutGetData(
- int16_t speechData10ms[], int samplingFreqHz,
- int current_delay_ms, int& lengthSamples) = 0;
-
// Pulls an audio frame from the specified |channel| for external mixing.
// If the |desired_sample_rate_hz| is 0, the signal will be returned with
// its native frequency, otherwise it will be resampled. Valid frequencies
@@ -115,6 +71,16 @@ public:
// Sets the state of external mixing. Cannot be changed during playback.
virtual int SetExternalMixing(int channel, bool enable) = 0;
+ // Don't use. To be removed.
+ virtual int SetExternalRecordingStatus(bool enable) { return -1; }
+ virtual int SetExternalPlayoutStatus(bool enable) { return -1; }
+ virtual int ExternalRecordingInsertData(
+ const int16_t speechData10ms[], int lengthSamples,
+ int samplingFreqHz, int current_delay_ms) { return -1; }
+ virtual int ExternalPlayoutGetData(
+ int16_t speechData10ms[], int samplingFreqHz,
+ int current_delay_ms, int& lengthSamples) { return -1; }
+
protected:
VoEExternalMedia() {}
virtual ~VoEExternalMedia() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_file.h b/chromium/third_party/webrtc/voice_engine/include/voe_file.h
index 128fdf0da25..bd14284b0e3 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_file.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_file.h
@@ -84,9 +84,6 @@ public:
// Returns the current file playing state for a specific |channel|.
virtual int IsPlayingFileLocally(int channel) = 0;
- // Sets the volume scaling for a speaker file that is already playing.
- virtual int ScaleLocalFilePlayout(int channel, float scale) = 0;
-
// Starts reading data from a file and transmits the data either
// mixed with or instead of the microphone signal.
virtual int StartPlayingFileAsMicrophone(
@@ -112,9 +109,6 @@ public:
// Returns whether the |channel| is currently playing a file as microphone.
virtual int IsPlayingFileAsMicrophone(int channel) = 0;
- // Sets the volume scaling for a microphone file that is already playing.
- virtual int ScaleFileAsMicrophonePlayout(int channel, float scale) = 0;
-
// Starts recording the mixed playout audio.
virtual int StartRecordingPlayout(int channel,
const char* fileNameUTF8,
@@ -140,40 +134,31 @@ public:
// Stops recording the microphone signal.
virtual int StopRecordingMicrophone() = 0;
-
- // Gets the duration of a file.
+ // Don't use. To be removed.
+ virtual int ScaleLocalFilePlayout(int channel, float scale) { return -1; }
+ virtual int ScaleFileAsMicrophonePlayout(
+ int channel, float scale) { return -1; }
virtual int GetFileDuration(const char* fileNameUTF8, int& durationMs,
- FileFormats format = kFileFormatPcm16kHzFile) = 0;
-
- // Gets the current played position of a file on a specific |channel|.
- virtual int GetPlaybackPosition(int channel, int& positionMs) = 0;
-
+ FileFormats format = kFileFormatPcm16kHzFile) { return -1; }
+ virtual int GetPlaybackPosition(int channel, int& positionMs) { return -1; }
virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
- const char* fileNameOutUTF8) = 0;
-
+ const char* fileNameOutUTF8) { return -1; }
virtual int ConvertPCMToWAV(InStream* streamIn,
- OutStream* streamOut) = 0;
-
+ OutStream* streamOut) { return -1; }
virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8) = 0;
-
+ const char* fileNameOutUTF8) { return -1; }
virtual int ConvertWAVToPCM(InStream* streamIn,
- OutStream* streamOut) = 0;
-
+ OutStream* streamOut) { return -1; }
virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
const char* fileNameOutUTF8,
- CodecInst* compression) = 0;
-
+ CodecInst* compression) { return -1; }
virtual int ConvertPCMToCompressed(InStream* streamIn,
OutStream* streamOut,
- CodecInst* compression) = 0;
-
+ CodecInst* compression) { return -1; }
virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8) = 0;
-
+ const char* fileNameOutUTF8) { return -1; }
virtual int ConvertCompressedToPCM(InStream* streamIn,
- OutStream* streamOut) = 0;
-
+ OutStream* streamOut) { return -1; }
protected:
VoEFile() {}
virtual ~VoEFile() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_hardware.h b/chromium/third_party/webrtc/voice_engine/include/voe_hardware.h
index 5c247d74362..23255a8a032 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_hardware.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_hardware.h
@@ -70,12 +70,6 @@ public:
virtual int GetPlayoutDeviceName(int index, char strNameUTF8[128],
char strGuidUTF8[128]) = 0;
- // Checks if the sound card is available to be opened for recording.
- virtual int GetRecordingDeviceStatus(bool& isAvailable) = 0;
-
- // Checks if the sound card is available to be opened for playout.
- virtual int GetPlayoutDeviceStatus(bool& isAvailable) = 0;
-
// Sets the audio device used for recording.
virtual int SetRecordingDevice(
int index, StereoChannel recordingChannel = kStereoBoth) = 0;
@@ -89,47 +83,24 @@ public:
// Gets the currently used (active) audio device layer.
virtual int GetAudioDeviceLayer(AudioLayers& audioLayer) = 0;
- // Gets the VoiceEngine's current CPU consumption in terms of the percent
- // of total CPU availability. [Windows only]
- virtual int GetCPULoad(int& loadPercent) = 0;
-
- // Not supported
- virtual int ResetAudioDevice() = 0;
-
- // Not supported
- virtual int AudioDeviceControl(
- unsigned int par1, unsigned int par2, unsigned int par3) = 0;
-
- // Not supported
- virtual int SetLoudspeakerStatus(bool enable) = 0;
-
- // Not supported
- virtual int GetLoudspeakerStatus(bool& enabled) = 0;
-
// Native sample rate controls (samples/sec)
virtual int SetRecordingSampleRate(unsigned int samples_per_sec) = 0;
virtual int RecordingSampleRate(unsigned int* samples_per_sec) const = 0;
virtual int SetPlayoutSampleRate(unsigned int samples_per_sec) = 0;
virtual int PlayoutSampleRate(unsigned int* samples_per_sec) const = 0;
- // *Experimental - not recommended for use.*
- // Enables the Windows Core Audio built-in AEC. Fails on other platforms.
- //
- // Currently incompatible with the standard VoE AEC and AGC; don't attempt
- // to enable them while this is active.
- //
- // Must be called before VoEBase::StartSend(). When enabled:
- // 1. VoEBase::StartPlayout() must be called before VoEBase::StartSend().
- // 2. VoEBase::StopSend() should be called before VoEBase::StopPlayout().
- // The reverse order may cause garbage audio to be rendered or the
- // capture side to halt until StopSend() is called.
- //
- // As a consequence, SetPlayoutDevice() should be used with caution
- // during a call. It will function, but may cause the above issues for
- // the duration it takes to complete. (In practice, it should complete
- // fast enough to avoid audible degradation).
- virtual int EnableBuiltInAEC(bool enable) = 0;
- virtual bool BuiltInAECIsEnabled() const = 0;
+ // To be removed. Don't use.
+ virtual int EnableBuiltInAEC(bool enable) { return -1; }
+ virtual bool BuiltInAECIsEnabled() const { return false; }
+ virtual int GetRecordingDeviceStatus(bool& isAvailable) { return -1; }
+ virtual int GetPlayoutDeviceStatus(bool& isAvailable) { return -1; }
+ virtual int ResetAudioDevice() { return -1; }
+ virtual int AudioDeviceControl(unsigned int par1, unsigned int par2,
+ unsigned int par3) { return -1; }
+ virtual int SetLoudspeakerStatus(bool enable) { return -1; }
+ virtual int GetLoudspeakerStatus(bool& enabled) { return -1; }
+ virtual int GetCPULoad(int& loadPercent) { return -1; }
+
protected:
VoEHardware() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_network.h b/chromium/third_party/webrtc/voice_engine/include/voe_network.h
index 8259e32f0b5..4c55f13f1be 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_network.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_network.h
@@ -80,8 +80,15 @@ public:
// The packets received from the network should be passed to this
// function when external transport is enabled. Note that the data
// including the RTP-header must also be given to the VoiceEngine.
- virtual int ReceivedRTPPacket(
- int channel, const void* data, unsigned int length) = 0;
+ virtual int ReceivedRTPPacket(int channel,
+ const void* data,
+ unsigned int length) = 0;
+ virtual int ReceivedRTPPacket(int channel,
+ const void* data,
+ unsigned int length,
+ const PacketTime& packet_time) {
+ return 0;
+ }
// The packets received from the network should be passed to this
// function when external transport is enabled. Note that the data
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_rtp_rtcp.h b/chromium/third_party/webrtc/voice_engine/include/voe_rtp_rtcp.h
index 30d8add8c6d..ce6f8499313 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_rtp_rtcp.h
@@ -15,10 +15,9 @@
// - Transmission of RTCP sender reports.
// - Obtaining RTCP data from incoming RTCP sender reports.
// - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
-// - Forward Error Correction (FEC).
+// - Redundant Coding (RED)
// - Writing RTP and RTCP packets to binary files for off-line analysis of
// the call quality.
-// - Inserting extra RTP packets into active audio stream.
//
// Usage example, omitting error checking:
//
@@ -45,6 +44,7 @@
namespace webrtc {
+class ViENetwork;
class VoiceEngine;
// VoERTPObserver
@@ -86,6 +86,9 @@ struct CallStatistics
int packetsSent;
int bytesReceived;
int packetsReceived;
+ // The capture ntp time (in local timebase) of the first played out audio
+ // frame.
+ int64_t capture_start_ntp_time_ms_;
};
// See section 6.4.1 in http://www.ietf.org/rfc/rfc3550.txt for details.
@@ -125,24 +128,6 @@ public:
// deleted.
virtual int Release() = 0;
- // Registers an instance of a VoERTPObserver derived class for a specified
- // |channel|. It will allow the user to observe callbacks related to the
- // RTP protocol such as changes in the incoming SSRC.
- virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer) = 0;
-
- // Deregisters an instance of a VoERTPObserver derived class for a
- // specified |channel|.
- virtual int DeRegisterRTPObserver(int channel) = 0;
-
- // Registers an instance of a VoERTCPObserver derived class for a specified
- // |channel|.
- virtual int RegisterRTCPObserver(
- int channel, VoERTCPObserver& observer) = 0;
-
- // Deregisters an instance of a VoERTCPObserver derived class for a
- // specified |channel|.
- virtual int DeRegisterRTCPObserver(int channel) = 0;
-
// Sets the local RTP synchronization source identifier (SSRC) explicitly.
virtual int SetLocalSSRC(int channel, unsigned int ssrc) = 0;
@@ -153,15 +138,28 @@ public:
virtual int GetRemoteSSRC(int channel, unsigned int& ssrc) = 0;
// Sets the status of rtp-audio-level-indication on a specific |channel|.
- virtual int SetRTPAudioLevelIndicationStatus(
- int channel, bool enable, unsigned char ID = 1) = 0;
-
- // Sets the status of rtp-audio-level-indication on a specific |channel|.
- virtual int GetRTPAudioLevelIndicationStatus(
- int channel, bool& enabled, unsigned char& ID) = 0;
+ virtual int SetSendAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id = 1) = 0;
- // Gets the CSRCs of the incoming RTP packets.
- virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]) = 0;
+ // Sets the status of receiving rtp-audio-level-indication on a specific
+ // |channel|.
+ virtual int SetReceiveAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id = 1) {
+ // TODO(wu): Remove default implementation once talk is updated.
+ return 0;
+ }
+
+ // Sets the status of sending absolute sender time on a specific |channel|.
+ virtual int SetSendAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id) = 0;
+
+ // Sets status of receiving absolute sender time on a specific |channel|.
+ virtual int SetReceiveAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id) = 0;
// Sets the RTCP status on a specific |channel|.
virtual int SetRTCPStatus(int channel, bool enable) = 0;
@@ -195,11 +193,6 @@ public:
// Gets RTCP statistics for a specific |channel|.
virtual int GetRTCPStatistics(int channel, CallStatistics& stats) = 0;
- // Gets the sender info part of the last received RTCP Sender Report (SR)
- // on a specified |channel|.
- virtual int GetRemoteRTCPSenderInfo(
- int channel, SenderInfo* sender_info) = 0;
-
// Gets the report block parts of the last received RTCP Sender Report (SR),
// or RTCP Receiver Report (RR) on a specified |channel|. Each vector
// element also contains the SSRC of the sender in addition to a report
@@ -207,18 +200,33 @@ public:
virtual int GetRemoteRTCPReportBlocks(
int channel, std::vector<ReportBlock>* receive_blocks) = 0;
- // Sends an RTCP APP packet on a specific |channel|.
- virtual int SendApplicationDefinedRTCPPacket(
- int channel, unsigned char subType, unsigned int name,
- const char* data, unsigned short dataLengthInBytes) = 0;
+ // Sets the Redundant Coding (RED) status on a specific |channel|.
+ // TODO(minyue): Make SetREDStatus() pure virtual when fakewebrtcvoiceengine
+ // in talk is ready.
+ virtual int SetREDStatus(
+ int channel, bool enable, int redPayloadtype = -1) { return -1; }
+
+ // Gets the RED status on a specific |channel|.
+ // TODO(minyue): Make GetREDStatus() pure virtual when fakewebrtcvoiceengine
+ // in talk is ready.
+ virtual int GetREDStatus(
+ int channel, bool& enabled, int& redPayloadtype) { return -1; }
// Sets the Forward Error Correction (FEC) status on a specific |channel|.
+ // TODO(minyue): Remove SetFECStatus() when SetFECStatus() is replaced by
+ // SetREDStatus() in fakewebrtcvoiceengine.
virtual int SetFECStatus(
- int channel, bool enable, int redPayloadtype = -1) = 0;
+ int channel, bool enable, int redPayloadtype = -1) {
+ return SetREDStatus(channel, enable, redPayloadtype);
+ };
// Gets the FEC status on a specific |channel|.
+ // TODO(minyue): Remove GetFECStatus() when GetFECStatus() is replaced by
+ // GetREDStatus() in fakewebrtcvoiceengine.
virtual int GetFECStatus(
- int channel, bool& enabled, int& redPayloadtype) = 0;
+ int channel, bool& enabled, int& redPayloadtype) {
+ return SetREDStatus(channel, enabled, redPayloadtype);
+ }
// This function enables Negative Acknowledgment (NACK) using RTCP,
// implemented based on RFC 4585. NACK retransmits RTP packets if lost on
@@ -246,16 +254,33 @@ public:
virtual int RTPDumpIsActive(
int channel, RTPDirections direction = kRtpIncoming) = 0;
- // Sends an extra RTP packet using an existing/active RTP session.
- // It is possible to set the payload type, marker bit and payload
- // of the extra RTP
+ // Sets video engine channel to receive incoming audio packets for
+ // aggregated bandwidth estimation. Takes ownership of the ViENetwork
+ // interface.
+ virtual int SetVideoEngineBWETarget(int channel, ViENetwork* vie_network,
+ int video_channel) {
+ return 0;
+ }
+
+ // Will be removed. Don't use.
+ virtual int RegisterRTPObserver(int channel,
+ VoERTPObserver& observer) { return -1; };
+ virtual int DeRegisterRTPObserver(int channel) { return -1; };
+ virtual int RegisterRTCPObserver(
+ int channel, VoERTCPObserver& observer) { return -1; };
+ virtual int DeRegisterRTCPObserver(int channel) { return -1; };
+ virtual int GetRemoteCSRCs(int channel,
+ unsigned int arrCSRC[15]) { return -1; };
virtual int InsertExtraRTPPacket(
- int channel, unsigned char payloadType, bool markerBit,
- const char* payloadData, unsigned short payloadSize) = 0;
-
- // Gets the timestamp of the last RTP packet received by |channel|.
+ int channel, unsigned char payloadType, bool markerBit,
+ const char* payloadData, unsigned short payloadSize) { return -1; };
+ virtual int GetRemoteRTCPSenderInfo(
+ int channel, SenderInfo* sender_info) { return -1; };
+ virtual int SendApplicationDefinedRTCPPacket(
+ int channel, unsigned char subType, unsigned int name,
+ const char* data, unsigned short dataLengthInBytes) { return -1; };
virtual int GetLastRemoteTimeStamp(int channel,
- uint32_t* lastRemoteTimeStamp) = 0;
+ uint32_t* lastRemoteTimeStamp) { return -1; };
protected:
VoERTP_RTCP() {}
diff --git a/chromium/third_party/webrtc/voice_engine/include/voe_volume_control.h b/chromium/third_party/webrtc/voice_engine/include/voe_volume_control.h
index c67d984b564..89b64c130ee 100644
--- a/chromium/third_party/webrtc/voice_engine/include/voe_volume_control.h
+++ b/chromium/third_party/webrtc/voice_engine/include/voe_volume_control.h
@@ -62,12 +62,6 @@ public:
// Gets the speaker |volume| level.
virtual int GetSpeakerVolume(unsigned int& volume) = 0;
- // Mutes the speaker device completely in the operating system.
- virtual int SetSystemOutputMute(bool enable) = 0;
-
- // Gets the output device mute state in the operating system.
- virtual int GetSystemOutputMute(bool &enabled) = 0;
-
// Sets the microphone volume level. Valid range is [0,255].
virtual int SetMicVolume(unsigned int volume) = 0;
@@ -81,12 +75,6 @@ public:
// Gets the current microphone input mute state.
virtual int GetInputMute(int channel, bool& enabled) = 0;
- // Mutes the microphone device completely in the operating system.
- virtual int SetSystemInputMute(bool enable) = 0;
-
- // Gets the mute state of the input device in the operating system.
- virtual int GetSystemInputMute(bool& enabled) = 0;
-
// Gets the microphone speech |level|, mapped non-linearly to the range
// [0,9].
virtual int GetSpeechInputLevel(unsigned int& level) = 0;
@@ -117,6 +105,12 @@ public:
// Gets the current left and right scaling factors.
virtual int GetOutputVolumePan(int channel, float& left, float& right) = 0;
+ // Don't use. Will be removed.
+ virtual int SetSystemOutputMute(bool enable) { return -1; }
+ virtual int GetSystemOutputMute(bool &enabled) { return -1; }
+ virtual int SetSystemInputMute(bool enable) { return -1; }
+ virtual int GetSystemInputMute(bool& enabled) { return -1; }
+
protected:
VoEVolumeControl() {};
virtual ~VoEVolumeControl() {};
diff --git a/chromium/third_party/webrtc/voice_engine/output_mixer.cc b/chromium/third_party/webrtc/voice_engine/output_mixer.cc
index dba96fe7c72..7cf98d390cb 100644
--- a/chromium/third_party/webrtc/voice_engine/output_mixer.cc
+++ b/chromium/third_party/webrtc/voice_engine/output_mixer.cc
@@ -16,11 +16,10 @@
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
-#include "webrtc/voice_engine/output_mixer_internal.h"
#include "webrtc/voice_engine/statistics.h"
+#include "webrtc/voice_engine/utility.h"
namespace webrtc {
-
namespace voe {
void
@@ -237,29 +236,6 @@ int OutputMixer::PlayDtmfTone(uint8_t eventCode, int lengthMs,
return 0;
}
-int OutputMixer::StartPlayingDtmfTone(uint8_t eventCode,
- int attenuationDb)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
- "OutputMixer::StartPlayingDtmfTone()");
- if (_dtmfGenerator.StartTone(eventCode, attenuationDb) != 0)
- {
- _engineStatisticsPtr->SetLastError(
- VE_STILL_PLAYING_PREV_DTMF,
- kTraceError,
- "OutputMixer::StartPlayingDtmfTone())");
- return -1;
- }
- return 0;
-}
-
-int OutputMixer::StopPlayingDtmfTone()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
- "OutputMixer::StopPlayingDtmfTone()");
- return (_dtmfGenerator.StopTone());
-}
-
int32_t
OutputMixer::SetMixabilityStatus(MixerParticipant& participant,
bool mixable)
@@ -528,11 +504,12 @@ int OutputMixer::GetMixedAudio(int sample_rate_hz,
frame->sample_rate_hz_ = sample_rate_hz;
// TODO(andrew): Ideally the downmixing would occur much earlier, in
// AudioCodingModule.
- return RemixAndResample(_audioFrame, &resampler_, frame);
+ RemixAndResample(_audioFrame, &resampler_, frame);
+ return 0;
}
int32_t
-OutputMixer::DoOperationsOnCombinedSignal()
+OutputMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm)
{
if (_audioFrame.sample_rate_hz_ != _mixingFrequencyHz)
{
@@ -565,24 +542,25 @@ OutputMixer::DoOperationsOnCombinedSignal()
}
// --- Far-end Voice Quality Enhancement (AudioProcessing Module)
-
- APMAnalyzeReverseStream();
+ if (feed_data_to_apm)
+ APMAnalyzeReverseStream();
// --- External media processing
-
- if (_externalMedia)
{
CriticalSectionScoped cs(&_callbackCritSect);
- const bool isStereo = (_audioFrame.num_channels_ == 2);
- if (_externalMediaCallbackPtr)
+ if (_externalMedia)
{
- _externalMediaCallbackPtr->Process(
- -1,
- kPlaybackAllChannelsMixed,
- (int16_t*)_audioFrame.data_,
- _audioFrame.samples_per_channel_,
- _audioFrame.sample_rate_hz_,
- isStereo);
+ const bool is_stereo = (_audioFrame.num_channels_ == 2);
+ if (_externalMediaCallbackPtr)
+ {
+ _externalMediaCallbackPtr->Process(
+ -1,
+ kPlaybackAllChannelsMixed,
+ (int16_t*)_audioFrame.data_,
+ _audioFrame.samples_per_channel_,
+ _audioFrame.sample_rate_hz_,
+ is_stereo);
+ }
}
}
@@ -601,9 +579,8 @@ void OutputMixer::APMAnalyzeReverseStream() {
// side. Downmix to mono.
AudioFrame frame;
frame.num_channels_ = 1;
- frame.sample_rate_hz_ = _audioProcessingModulePtr->sample_rate_hz();
- if (RemixAndResample(_audioFrame, &audioproc_resampler_, &frame) == -1)
- return;
+ frame.sample_rate_hz_ = _audioProcessingModulePtr->input_sample_rate_hz();
+ RemixAndResample(_audioFrame, &audioproc_resampler_, &frame);
if (_audioProcessingModulePtr->AnalyzeReverseStream(&frame) == -1) {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
@@ -656,5 +633,4 @@ OutputMixer::InsertInbandDtmfTone()
}
} // namespace voe
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/output_mixer.h b/chromium/third_party/webrtc/voice_engine/output_mixer.h
index fc9afd8c542..e5c65e41bc1 100644
--- a/chromium/third_party/webrtc/voice_engine/output_mixer.h
+++ b/chromium/third_party/webrtc/voice_engine/output_mixer.h
@@ -54,13 +54,9 @@ public:
// VoEDtmf
int PlayDtmfTone(uint8_t eventCode, int lengthMs, int attenuationDb);
- int StartPlayingDtmfTone(uint8_t eventCode, int attenuationDb);
-
- int StopPlayingDtmfTone();
-
int32_t MixActiveChannels();
- int32_t DoOperationsOnCombinedSignal();
+ int32_t DoOperationsOnCombinedSignal(bool feed_data_to_apm);
int32_t SetMixabilityStatus(MixerParticipant& participant,
bool mixable);
@@ -133,8 +129,10 @@ private:
CriticalSectionWrapper& _fileCritSect;
AudioConferenceMixer& _mixerModule;
AudioFrame _audioFrame;
- PushResampler resampler_; // converts mixed audio to fit ADM format
- PushResampler audioproc_resampler_; // converts mixed audio to fit APM rate
+ // Converts mixed audio to the audio device output rate.
+ PushResampler<int16_t> resampler_;
+ // Converts mixed audio to the audio processing rate.
+ PushResampler<int16_t> audioproc_resampler_;
AudioLevel _audioLevel; // measures audio level for the combined signal
DtmfInband _dtmfGenerator;
int _instanceId;
diff --git a/chromium/third_party/webrtc/voice_engine/output_mixer_internal.cc b/chromium/third_party/webrtc/voice_engine/output_mixer_internal.cc
deleted file mode 100644
index 55eedb38364..00000000000
--- a/chromium/third_party/webrtc/voice_engine/output_mixer_internal.cc
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/voice_engine/output_mixer_internal.h"
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/modules/utility/interface/audio_frame_operations.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-namespace voe {
-
-int RemixAndResample(const AudioFrame& src_frame,
- PushResampler* resampler,
- AudioFrame* dst_frame) {
- const int16_t* audio_ptr = src_frame.data_;
- int audio_ptr_num_channels = src_frame.num_channels_;
- int16_t mono_audio[AudioFrame::kMaxDataSizeSamples];
-
- // Downmix before resampling.
- if (src_frame.num_channels_ == 2 && dst_frame->num_channels_ == 1) {
- AudioFrameOperations::StereoToMono(src_frame.data_,
- src_frame.samples_per_channel_,
- mono_audio);
- audio_ptr = mono_audio;
- audio_ptr_num_channels = 1;
- }
-
- if (resampler->InitializeIfNeeded(src_frame.sample_rate_hz_,
- dst_frame->sample_rate_hz_,
- audio_ptr_num_channels) == -1) {
- dst_frame->CopyFrom(src_frame);
- LOG_FERR3(LS_ERROR, InitializeIfNeeded, src_frame.sample_rate_hz_,
- dst_frame->sample_rate_hz_, audio_ptr_num_channels);
- return -1;
- }
-
- const int src_length = src_frame.samples_per_channel_ *
- audio_ptr_num_channels;
- int out_length = resampler->Resample(audio_ptr, src_length, dst_frame->data_,
- AudioFrame::kMaxDataSizeSamples);
- if (out_length == -1) {
- dst_frame->CopyFrom(src_frame);
- LOG_FERR3(LS_ERROR, Resample, src_length, dst_frame->data_,
- AudioFrame::kMaxDataSizeSamples);
- return -1;
- }
- dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels;
-
- // Upmix after resampling.
- if (src_frame.num_channels_ == 1 && dst_frame->num_channels_ == 2) {
- // The audio in dst_frame really is mono at this point; MonoToStereo will
- // set this back to stereo.
- dst_frame->num_channels_ = 1;
- AudioFrameOperations::MonoToStereo(dst_frame);
- }
- return 0;
-}
-
-} // namespace voe
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/output_mixer_internal.h b/chromium/third_party/webrtc/voice_engine/output_mixer_internal.h
deleted file mode 100644
index 88a3a5b2f12..00000000000
--- a/chromium/third_party/webrtc/voice_engine/output_mixer_internal.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
-#define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
-
-namespace webrtc {
-
-class AudioFrame;
-class PushResampler;
-
-namespace voe {
-
-// Upmix or downmix and resample the audio in |src_frame| to |dst_frame|.
-// Expects |dst_frame| to have its |num_channels_| and |sample_rate_hz_| set to
-// the desired values. Updates |samples_per_channel_| accordingly.
-//
-// On failure, returns -1 and copies |src_frame| to |dst_frame|.
-int RemixAndResample(const AudioFrame& src_frame,
- PushResampler* resampler,
- AudioFrame* dst_frame);
-
-} // namespace voe
-} // namespace webrtc
-
-#endif // VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/voice_engine/output_mixer_unittest.cc b/chromium/third_party/webrtc/voice_engine/output_mixer_unittest.cc
deleted file mode 100644
index 006c45fa1eb..00000000000
--- a/chromium/third_party/webrtc/voice_engine/output_mixer_unittest.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <math.h>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/voice_engine/output_mixer.h"
-#include "webrtc/voice_engine/output_mixer_internal.h"
-
-namespace webrtc {
-namespace voe {
-namespace {
-
-class OutputMixerTest : public ::testing::Test {
- protected:
- OutputMixerTest() {
- src_frame_.sample_rate_hz_ = 16000;
- src_frame_.samples_per_channel_ = src_frame_.sample_rate_hz_ / 100;
- src_frame_.num_channels_ = 1;
- dst_frame_.CopyFrom(src_frame_);
- golden_frame_.CopyFrom(src_frame_);
- }
-
- void RunResampleTest(int src_channels, int src_sample_rate_hz,
- int dst_channels, int dst_sample_rate_hz);
-
- PushResampler resampler_;
- AudioFrame src_frame_;
- AudioFrame dst_frame_;
- AudioFrame golden_frame_;
-};
-
-// Sets the signal value to increase by |data| with every sample. Floats are
-// used so non-integer values result in rounding error, but not an accumulating
-// error.
-void SetMonoFrame(AudioFrame* frame, float data, int sample_rate_hz) {
- memset(frame->data_, 0, sizeof(frame->data_));
- frame->num_channels_ = 1;
- frame->sample_rate_hz_ = sample_rate_hz;
- frame->samples_per_channel_ = sample_rate_hz / 100;
- for (int i = 0; i < frame->samples_per_channel_; i++) {
- frame->data_[i] = data * i;
- }
-}
-
-// Keep the existing sample rate.
-void SetMonoFrame(AudioFrame* frame, float data) {
- SetMonoFrame(frame, data, frame->sample_rate_hz_);
-}
-
-// Sets the signal value to increase by |left| and |right| with every sample in
-// each channel respectively.
-void SetStereoFrame(AudioFrame* frame, float left, float right,
- int sample_rate_hz) {
- memset(frame->data_, 0, sizeof(frame->data_));
- frame->num_channels_ = 2;
- frame->sample_rate_hz_ = sample_rate_hz;
- frame->samples_per_channel_ = sample_rate_hz / 100;
- for (int i = 0; i < frame->samples_per_channel_; i++) {
- frame->data_[i * 2] = left * i;
- frame->data_[i * 2 + 1] = right * i;
- }
-}
-
-// Keep the existing sample rate.
-void SetStereoFrame(AudioFrame* frame, float left, float right) {
- SetStereoFrame(frame, left, right, frame->sample_rate_hz_);
-}
-
-void VerifyParams(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
- EXPECT_EQ(ref_frame.num_channels_, test_frame.num_channels_);
- EXPECT_EQ(ref_frame.samples_per_channel_, test_frame.samples_per_channel_);
- EXPECT_EQ(ref_frame.sample_rate_hz_, test_frame.sample_rate_hz_);
-}
-
-// Computes the best SNR based on the error between |ref_frame| and
-// |test_frame|. It allows for up to a |max_delay| in samples between the
-// signals to compensate for the resampling delay.
-float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame,
- int max_delay) {
- VerifyParams(ref_frame, test_frame);
- float best_snr = 0;
- int best_delay = 0;
- for (int delay = 0; delay <= max_delay; delay++) {
- float mse = 0;
- float variance = 0;
- for (int i = 0; i < ref_frame.samples_per_channel_ *
- ref_frame.num_channels_ - delay; i++) {
- int error = ref_frame.data_[i] - test_frame.data_[i + delay];
- mse += error * error;
- variance += ref_frame.data_[i] * ref_frame.data_[i];
- }
- float snr = 100; // We assign 100 dB to the zero-error case.
- if (mse > 0)
- snr = 10 * log10(variance / mse);
- if (snr > best_snr) {
- best_snr = snr;
- best_delay = delay;
- }
- }
- printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
- return best_snr;
-}
-
-void VerifyFramesAreEqual(const AudioFrame& ref_frame,
- const AudioFrame& test_frame) {
- VerifyParams(ref_frame, test_frame);
- for (int i = 0; i < ref_frame.samples_per_channel_ * ref_frame.num_channels_;
- i++) {
- EXPECT_EQ(ref_frame.data_[i], test_frame.data_[i]);
- }
-}
-
-void OutputMixerTest::RunResampleTest(int src_channels,
- int src_sample_rate_hz,
- int dst_channels,
- int dst_sample_rate_hz) {
- PushResampler resampler; // Create a new one with every test.
- const int16_t kSrcLeft = 30; // Shouldn't overflow for any used sample rate.
- const int16_t kSrcRight = 15;
- const float resampling_factor = (1.0 * src_sample_rate_hz) /
- dst_sample_rate_hz;
- const float dst_left = resampling_factor * kSrcLeft;
- const float dst_right = resampling_factor * kSrcRight;
- const float dst_mono = (dst_left + dst_right) / 2;
- if (src_channels == 1)
- SetMonoFrame(&src_frame_, kSrcLeft, src_sample_rate_hz);
- else
- SetStereoFrame(&src_frame_, kSrcLeft, kSrcRight, src_sample_rate_hz);
-
- if (dst_channels == 1) {
- SetMonoFrame(&dst_frame_, 0, dst_sample_rate_hz);
- if (src_channels == 1)
- SetMonoFrame(&golden_frame_, dst_left, dst_sample_rate_hz);
- else
- SetMonoFrame(&golden_frame_, dst_mono, dst_sample_rate_hz);
- } else {
- SetStereoFrame(&dst_frame_, 0, 0, dst_sample_rate_hz);
- if (src_channels == 1)
- SetStereoFrame(&golden_frame_, dst_left, dst_left, dst_sample_rate_hz);
- else
- SetStereoFrame(&golden_frame_, dst_left, dst_right, dst_sample_rate_hz);
- }
-
- // The sinc resampler has a known delay, which we compute here. Multiplying by
- // two gives us a crude maximum for any resampling, as the old resampler
- // typically (but not always) has lower delay.
- static const int kInputKernelDelaySamples = 16;
- const int max_delay = static_cast<double>(dst_sample_rate_hz)
- / src_sample_rate_hz * kInputKernelDelaySamples * dst_channels * 2;
- printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
- src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
- EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler, &dst_frame_));
- if (src_sample_rate_hz == 96000 && dst_sample_rate_hz == 8000) {
- // The sinc resampler gives poor SNR at this extreme conversion, but we
- // expect to see this rarely in practice.
- EXPECT_GT(ComputeSNR(golden_frame_, dst_frame_, max_delay), 14.0f);
- } else {
- EXPECT_GT(ComputeSNR(golden_frame_, dst_frame_, max_delay), 46.0f);
- }
-}
-
-TEST_F(OutputMixerTest, RemixAndResampleCopyFrameSucceeds) {
- // Stereo -> stereo.
- SetStereoFrame(&src_frame_, 10, 10);
- SetStereoFrame(&dst_frame_, 0, 0);
- EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
- VerifyFramesAreEqual(src_frame_, dst_frame_);
-
- // Mono -> mono.
- SetMonoFrame(&src_frame_, 20);
- SetMonoFrame(&dst_frame_, 0);
- EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
- VerifyFramesAreEqual(src_frame_, dst_frame_);
-}
-
-TEST_F(OutputMixerTest, RemixAndResampleMixingOnlySucceeds) {
- // Stereo -> mono.
- SetStereoFrame(&dst_frame_, 0, 0);
- SetMonoFrame(&src_frame_, 10);
- SetStereoFrame(&golden_frame_, 10, 10);
- EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
- VerifyFramesAreEqual(dst_frame_, golden_frame_);
-
- // Mono -> stereo.
- SetMonoFrame(&dst_frame_, 0);
- SetStereoFrame(&src_frame_, 10, 20);
- SetMonoFrame(&golden_frame_, 15);
- EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
- VerifyFramesAreEqual(golden_frame_, dst_frame_);
-}
-
-TEST_F(OutputMixerTest, RemixAndResampleSucceeds) {
- // TODO(ajm): convert this to the parameterized TEST_P style used in
- // sinc_resampler_unittest.cc. We can then easily add tighter SNR thresholds.
- const int kSampleRates[] = {8000, 16000, 32000, 44100, 48000, 96000};
- const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
- const int kChannels[] = {1, 2};
- const int kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
- for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
- for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
- for (int src_channel = 0; src_channel < kChannelsSize; src_channel++) {
- for (int dst_channel = 0; dst_channel < kChannelsSize; dst_channel++) {
- RunResampleTest(kChannels[src_channel], kSampleRates[src_rate],
- kChannels[dst_channel], kSampleRates[dst_rate]);
- }
- }
- }
- }
-}
-
-} // namespace
-} // namespace voe
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/transmit_mixer.cc b/chromium/third_party/webrtc/voice_engine/transmit_mixer.cc
index 743d8e20a43..76b507f9d1a 100644
--- a/chromium/third_party/webrtc/voice_engine/transmit_mixer.cc
+++ b/chromium/third_party/webrtc/voice_engine/transmit_mixer.cc
@@ -25,14 +25,8 @@
#define WEBRTC_ABS(a) (((a) < 0) ? -(a) : (a))
namespace webrtc {
-
namespace voe {
-// Used for downmixing before resampling.
-// TODO(ajm): audio_device should advertise the maximum sample rate it can
-// provide.
-static const int kMaxMonoDeviceDataSizeSamples = 960; // 10 ms, 96 kHz, mono.
-
// TODO(ajm): The thread safety of this is dubious...
void
TransmitMixer::OnPeriodicProcess()
@@ -47,17 +41,19 @@ TransmitMixer::OnPeriodicProcess()
if (_voiceEngineObserverPtr)
{
if (_typingNoiseDetected) {
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
- "TransmitMixer::OnPeriodicProcess() => "
- "CallbackOnError(VE_TYPING_NOISE_WARNING)");
- _voiceEngineObserverPtr->CallbackOnError(-1,
- VE_TYPING_NOISE_WARNING);
+ WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+ "TransmitMixer::OnPeriodicProcess() => "
+ "CallbackOnError(VE_TYPING_NOISE_WARNING)");
+ _voiceEngineObserverPtr->CallbackOnError(
+ -1,
+ VE_TYPING_NOISE_WARNING);
} else {
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
- "TransmitMixer::OnPeriodicProcess() => "
- "CallbackOnError(VE_TYPING_NOISE_OFF_WARNING)");
- _voiceEngineObserverPtr->CallbackOnError(
- -1, VE_TYPING_NOISE_OFF_WARNING);
+ WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+ "TransmitMixer::OnPeriodicProcess() => "
+ "CallbackOnError(VE_TYPING_NOISE_OFF_WARNING)");
+ _voiceEngineObserverPtr->CallbackOnError(
+ -1,
+ VE_TYPING_NOISE_OFF_WARNING);
}
}
_typingNoiseWarningPending = false;
@@ -194,16 +190,8 @@ TransmitMixer::TransmitMixer(uint32_t instanceId) :
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
- _timeActive(0),
- _timeSinceLastTyping(0),
- _penaltyCounter(0),
_typingNoiseWarningPending(false),
_typingNoiseDetected(false),
- _timeWindow(10), // 10ms slots accepted to count as a hit
- _costPerTyping(100), // Penalty added for a typing + activity coincide
- _reportingThreshold(300), // Threshold for _penaltyCounter
- _penaltyDecay(1), // how much we reduce _penaltyCounter every 10 ms.
- _typeEventDelay(2), // how "old" event we check for
#endif
_saturationWarning(false),
_instanceId(instanceId),
@@ -322,10 +310,7 @@ void TransmitMixer::GetSendCodecInfo(int* max_sample_rate, int* max_channels) {
if (channel->Sending()) {
CodecInst codec;
channel->GetSendCodec(codec);
- // TODO(tlegrand): Remove the 32 kHz restriction once we have full 48 kHz
- // support in Audio Coding Module.
- *max_sample_rate = std::min(32000,
- std::max(*max_sample_rate, codec.plfreq));
+ *max_sample_rate = std::max(*max_sample_rate, codec.plfreq);
*max_channels = std::max(*max_channels, codec.channels);
}
}
@@ -348,13 +333,10 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
totalDelayMS, clockDrift, currentMicLevel);
// --- Resample input audio and create/store the initial audio frame
- if (GenerateAudioFrame(static_cast<const int16_t*>(audioSamples),
- nSamples,
- nChannels,
- samplesPerSec) == -1)
- {
- return -1;
- }
+ GenerateAudioFrame(static_cast<const int16_t*>(audioSamples),
+ nSamples,
+ nChannels,
+ samplesPerSec);
{
CriticalSectionScoped cs(&_callbackCritSect);
@@ -368,7 +350,7 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
}
// --- Near-end audio processing.
- ProcessAudio(totalDelayMS, clockDrift, currentMicLevel);
+ ProcessAudio(totalDelayMS, clockDrift, currentMicLevel, keyPressed);
if (swap_stereo_channels_ && stereo_codec_)
// Only bother swapping if we're using a stereo codec.
@@ -403,7 +385,12 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
}
// --- Record to file
- if (_fileRecording)
+ bool file_recording = false;
+ {
+ CriticalSectionScoped cs(&_critSect);
+ file_recording = _fileRecording;
+ }
+ if (file_recording)
{
RecordAudioToFile(_audioFrame.sample_rate_hz_);
}
@@ -434,10 +421,7 @@ TransmitMixer::DemuxAndMix()
it.Increment())
{
Channel* channelPtr = it.GetChannel();
- if (channelPtr->InputIsOnHold())
- {
- channelPtr->UpdateLocalTimeStamp();
- } else if (channelPtr->Sending())
+ if (channelPtr->Sending())
{
// Demultiplex makes a copy of its input.
channelPtr->Demultiplex(_audioFrame);
@@ -453,9 +437,7 @@ void TransmitMixer::DemuxAndMix(const int voe_channels[],
voe::ChannelOwner ch = _channelManagerPtr->GetChannel(voe_channels[i]);
voe::Channel* channel_ptr = ch.channel();
if (channel_ptr) {
- if (channel_ptr->InputIsOnHold()) {
- channel_ptr->UpdateLocalTimeStamp();
- } else if (channel_ptr->Sending()) {
+ if (channel_ptr->Sending()) {
// Demultiplex makes a copy of its input.
channel_ptr->Demultiplex(_audioFrame);
channel_ptr->PrepareEncodeAndSend(_audioFrame.sample_rate_hz_);
@@ -474,7 +456,7 @@ TransmitMixer::EncodeAndSend()
it.Increment())
{
Channel* channelPtr = it.GetChannel();
- if (channelPtr->Sending() && !channelPtr->InputIsOnHold())
+ if (channelPtr->Sending())
{
channelPtr->EncodeAndSend();
}
@@ -487,14 +469,13 @@ void TransmitMixer::EncodeAndSend(const int voe_channels[],
for (int i = 0; i < number_of_voe_channels; ++i) {
voe::ChannelOwner ch = _channelManagerPtr->GetChannel(voe_channels[i]);
voe::Channel* channel_ptr = ch.channel();
- if (channel_ptr && channel_ptr->Sending() && !channel_ptr->InputIsOnHold())
+ if (channel_ptr && channel_ptr->Sending())
channel_ptr->EncodeAndSend();
}
}
uint32_t TransmitMixer::CaptureLevel() const
{
- CriticalSectionScoped cs(&_critSect);
return _captureLevel;
}
@@ -700,34 +681,6 @@ int TransmitMixer::IsPlayingFileAsMicrophone() const
return _filePlaying;
}
-int TransmitMixer::ScaleFileAsMicrophonePlayout(float scale)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
- "TransmitMixer::ScaleFileAsMicrophonePlayout(scale=%5.3f)",
- scale);
-
- CriticalSectionScoped cs(&_critSect);
-
- if (!_filePlaying)
- {
- _engineStatisticsPtr->SetLastError(
- VE_INVALID_OPERATION, kTraceError,
- "ScaleFileAsMicrophonePlayout() isnot playing file");
- return -1;
- }
-
- if ((_filePlayerPtr == NULL) ||
- (_filePlayerPtr->SetAudioScaling(scale) != 0))
- {
- _engineStatisticsPtr->SetLastError(
- VE_BAD_ARGUMENT, kTraceError,
- "SetAudioScaling() failed to scale playout");
- return -1;
- }
-
- return 0;
-}
-
int TransmitMixer::StartRecordingMicrophone(const char* fileName,
const CodecInst* codecInst)
{
@@ -735,6 +688,8 @@ int TransmitMixer::StartRecordingMicrophone(const char* fileName,
"TransmitMixer::StartRecordingMicrophone(fileName=%s)",
fileName);
+ CriticalSectionScoped cs(&_critSect);
+
if (_fileRecording)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
@@ -768,8 +723,6 @@ int TransmitMixer::StartRecordingMicrophone(const char* fileName,
format = kFileFormatCompressedFile;
}
- CriticalSectionScoped cs(&_critSect);
-
// Destroy the old instance
if (_fileRecorderPtr)
{
@@ -814,6 +767,8 @@ int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::StartRecordingMicrophone()");
+ CriticalSectionScoped cs(&_critSect);
+
if (_fileRecording)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
@@ -846,8 +801,6 @@ int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
format = kFileFormatCompressedFile;
}
- CriticalSectionScoped cs(&_critSect);
-
// Destroy the old instance
if (_fileRecorderPtr)
{
@@ -891,6 +844,8 @@ int TransmitMixer::StopRecordingMicrophone()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::StopRecordingMicrophone()");
+ CriticalSectionScoped cs(&_critSect);
+
if (!_fileRecording)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
@@ -898,8 +853,6 @@ int TransmitMixer::StopRecordingMicrophone()
return 0;
}
- CriticalSectionScoped cs(&_critSect);
-
if (_fileRecorderPtr->StopRecording() != 0)
{
_engineStatisticsPtr->SetLastError(
@@ -1177,63 +1130,42 @@ bool TransmitMixer::IsRecordingCall()
bool TransmitMixer::IsRecordingMic()
{
-
+ CriticalSectionScoped cs(&_critSect);
return _fileRecording;
}
-// TODO(andrew): use RemixAndResample for this.
-int TransmitMixer::GenerateAudioFrame(const int16_t audio[],
- int samples_per_channel,
- int num_channels,
- int sample_rate_hz) {
- int destination_rate;
+void TransmitMixer::GenerateAudioFrame(const int16_t* audio,
+ int samples_per_channel,
+ int num_channels,
+ int sample_rate_hz) {
+ int codec_rate;
int num_codec_channels;
- GetSendCodecInfo(&destination_rate, &num_codec_channels);
-
- // Never upsample the capture signal here. This should be done at the
- // end of the send chain.
- destination_rate = std::min(destination_rate, sample_rate_hz);
- stereo_codec_ = num_codec_channels == 2;
-
- const int16_t* audio_ptr = audio;
- int16_t mono_audio[kMaxMonoDeviceDataSizeSamples];
- assert(samples_per_channel <= kMaxMonoDeviceDataSizeSamples);
- // If no stereo codecs are in use, we downmix a stereo stream from the
- // device early in the chain, before resampling.
- if (num_channels == 2 && !stereo_codec_) {
- AudioFrameOperations::StereoToMono(audio, samples_per_channel,
- mono_audio);
- audio_ptr = mono_audio;
- num_channels = 1;
- }
-
- if (resampler_.InitializeIfNeeded(sample_rate_hz,
- destination_rate,
- num_channels) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
- "TransmitMixer::GenerateAudioFrame() unable to resample");
- return -1;
+ GetSendCodecInfo(&codec_rate, &num_codec_channels);
+ // TODO(ajm): This currently restricts the sample rate to 32 kHz.
+ // See: https://code.google.com/p/webrtc/issues/detail?id=3146
+ // When 48 kHz is supported natively by AudioProcessing, this will have
+ // to be changed to handle 44.1 kHz.
+ int max_sample_rate_hz = kAudioProcMaxNativeSampleRateHz;
+ if (audioproc_->echo_control_mobile()->is_enabled()) {
+ // AECM only supports 8 and 16 kHz.
+ max_sample_rate_hz = 16000;
}
+ codec_rate = std::min(codec_rate, max_sample_rate_hz);
+ stereo_codec_ = num_codec_channels == 2;
- int out_length = resampler_.Resample(audio_ptr,
- samples_per_channel * num_channels,
- _audioFrame.data_,
- AudioFrame::kMaxDataSizeSamples);
- if (out_length == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
- "TransmitMixer::GenerateAudioFrame() resampling failed");
- return -1;
+ if (!mono_buffer_.get()) {
+ // Temporary space for DownConvertToCodecFormat.
+ mono_buffer_.reset(new int16_t[kMaxMonoDataSizeSamples]);
}
-
- _audioFrame.samples_per_channel_ = out_length / num_channels;
- _audioFrame.id_ = _instanceId;
- _audioFrame.timestamp_ = -1;
- _audioFrame.sample_rate_hz_ = destination_rate;
- _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
- _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
- _audioFrame.num_channels_ = num_channels;
-
- return 0;
+ DownConvertToCodecFormat(audio,
+ samples_per_channel,
+ num_channels,
+ sample_rate_hz,
+ num_codec_channels,
+ codec_rate,
+ mono_buffer_.get(),
+ &resampler_,
+ &_audioFrame);
}
int32_t TransmitMixer::RecordAudioToFile(
@@ -1262,7 +1194,7 @@ int32_t TransmitMixer::RecordAudioToFile(
int32_t TransmitMixer::MixOrReplaceAudioWithFile(
int mixingFrequency)
{
- scoped_array<int16_t> fileBuffer(new int16_t[640]);
+ scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
int fileSamples(0);
{
@@ -1293,11 +1225,11 @@ int32_t TransmitMixer::MixOrReplaceAudioWithFile(
{
// Currently file stream is always mono.
// TODO(xians): Change the code when FilePlayer supports real stereo.
- Utility::MixWithSat(_audioFrame.data_,
- _audioFrame.num_channels_,
- fileBuffer.get(),
- 1,
- fileSamples);
+ MixWithSat(_audioFrame.data_,
+ _audioFrame.num_channels_,
+ fileBuffer.get(),
+ 1,
+ fileSamples);
} else
{
// Replace ACM audio with file.
@@ -1316,19 +1248,7 @@ int32_t TransmitMixer::MixOrReplaceAudioWithFile(
}
void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
- int current_mic_level) {
- if (audioproc_->set_num_channels(_audioFrame.num_channels_,
- _audioFrame.num_channels_) != 0) {
- assert(false);
- LOG_FERR2(LS_ERROR, set_num_channels, _audioFrame.num_channels_,
- _audioFrame.num_channels_);
- }
-
- if (audioproc_->set_sample_rate_hz(_audioFrame.sample_rate_hz_) != 0) {
- assert(false);
- LOG_FERR1(LS_ERROR, set_sample_rate_hz, _audioFrame.sample_rate_hz_);
- }
-
+ int current_mic_level, bool key_pressed) {
if (audioproc_->set_stream_delay_ms(delay_ms) != 0) {
// A redundant warning is reported in AudioDevice, which we've throttled
// to avoid flooding the logs. Relegate this one to LS_VERBOSE to avoid
@@ -1338,8 +1258,8 @@ void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
GainControl* agc = audioproc_->gain_control();
if (agc->set_stream_analog_level(current_mic_level) != 0) {
- assert(false);
LOG_FERR1(LS_ERROR, set_stream_analog_level, current_mic_level);
+ assert(false);
}
EchoCancellation* aec = audioproc_->echo_cancellation();
@@ -1347,71 +1267,42 @@ void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
aec->set_stream_drift_samples(clock_drift);
}
+ audioproc_->set_stream_key_pressed(key_pressed);
+
int err = audioproc_->ProcessStream(&_audioFrame);
if (err != 0) {
- assert(false);
LOG(LS_ERROR) << "ProcessStream() error: " << err;
+ assert(false);
}
- CriticalSectionScoped cs(&_critSect);
-
// Store new capture level. Only updated when analog AGC is enabled.
_captureLevel = agc->stream_analog_level();
+ CriticalSectionScoped cs(&_critSect);
// Triggers a callback in OnPeriodicProcess().
_saturationWarning |= agc->stream_is_saturated();
}
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
-int TransmitMixer::TypingDetection(bool keyPressed)
+void TransmitMixer::TypingDetection(bool keyPressed)
{
+ // We let the VAD determine if we're using this feature or not.
+ if (_audioFrame.vad_activity_ == AudioFrame::kVadUnknown) {
+ return;
+ }
- // We let the VAD determine if we're using this feature or not.
- if (_audioFrame.vad_activity_ == AudioFrame::kVadUnknown)
- {
- return (0);
- }
-
- if (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
- _timeActive++;
- else
- _timeActive = 0;
-
- // Keep track if time since last typing event
- if (keyPressed)
- {
- _timeSinceLastTyping = 0;
- }
- else
- {
- ++_timeSinceLastTyping;
- }
-
- if ((_timeSinceLastTyping < _typeEventDelay)
- && (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
- && (_timeActive < _timeWindow))
- {
- _penaltyCounter += _costPerTyping;
- if (_penaltyCounter > _reportingThreshold)
- {
- // Triggers a callback in OnPeriodicProcess().
- _typingNoiseWarningPending = true;
- _typingNoiseDetected = true;
- }
- }
-
+ bool vadActive = _audioFrame.vad_activity_ == AudioFrame::kVadActive;
+ if (_typingDetection.Process(keyPressed, vadActive)) {
+ _typingNoiseWarningPending = true;
+ _typingNoiseDetected = true;
+ } else {
// If there is already a warning pending, do not change the state.
- // Otherwise sets a warning pending if noise is off now but previously on.
+ // Otherwise set a warning pending if last callback was for noise detected.
if (!_typingNoiseWarningPending && _typingNoiseDetected) {
- // Triggers a callback in OnPeriodicProcess().
_typingNoiseWarningPending = true;
_typingNoiseDetected = false;
}
-
- if (_penaltyCounter > 0)
- _penaltyCounter-=_penaltyDecay;
-
- return (0);
+ }
}
#endif
@@ -1424,12 +1315,10 @@ int TransmitMixer::GetMixingFrequency()
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
int TransmitMixer::TimeSinceLastTyping(int &seconds)
{
- // We check in VoEAudioProcessingImpl that this is only called when
- // typing detection is active.
-
- // Round to whole seconds
- seconds = (_timeSinceLastTyping + 50) / 100;
- return(0);
+ // We check in VoEAudioProcessingImpl that this is only called when
+ // typing detection is active.
+ seconds = _typingDetection.TimeSinceLastDetectionInSeconds();
+ return 0;
}
#endif
@@ -1440,19 +1329,13 @@ int TransmitMixer::SetTypingDetectionParameters(int timeWindow,
int penaltyDecay,
int typeEventDelay)
{
- if(timeWindow != 0)
- _timeWindow = timeWindow;
- if(costPerTyping != 0)
- _costPerTyping = costPerTyping;
- if(reportingThreshold != 0)
- _reportingThreshold = reportingThreshold;
- if(penaltyDecay != 0)
- _penaltyDecay = penaltyDecay;
- if(typeEventDelay != 0)
- _typeEventDelay = typeEventDelay;
-
-
- return(0);
+ _typingDetection.SetParameters(timeWindow,
+ costPerTyping,
+ reportingThreshold,
+ penaltyDecay,
+ typeEventDelay,
+ 0);
+ return 0;
}
#endif
@@ -1465,5 +1348,4 @@ bool TransmitMixer::IsStereoChannelSwappingEnabled() {
}
} // namespace voe
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/transmit_mixer.h b/chromium/third_party/webrtc/voice_engine/transmit_mixer.h
index d795eac265e..afeec3ac885 100644
--- a/chromium/third_party/webrtc/voice_engine/transmit_mixer.h
+++ b/chromium/third_party/webrtc/voice_engine/transmit_mixer.h
@@ -13,9 +13,11 @@
#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_processing/typing_detection.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/utility/interface/file_player.h"
#include "webrtc/modules/utility/interface/file_recorder.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/level_indicator.h"
#include "webrtc/voice_engine/monitor_module.h"
@@ -35,9 +37,7 @@ class MixedAudio;
class Statistics;
class TransmitMixer : public MonitorObserver,
- public FileCallback
-
-{
+ public FileCallback {
public:
static int32_t Create(TransmitMixer*& mixer, uint32_t instanceId);
@@ -70,6 +70,7 @@ public:
// channels for encoding and sending to the network.
void EncodeAndSend(const int voe_channels[], int number_of_voe_channels);
+ // Must be called on the same thread as PrepareDemux().
uint32_t CaptureLevel() const;
int32_t StopSend();
@@ -116,8 +117,6 @@ public:
int IsPlayingFileAsMicrophone() const;
- int ScaleFileAsMicrophonePlayout(float scale);
-
int StartRecordingMicrophone(const char* fileName,
const CodecInst* codecInst);
@@ -173,19 +172,20 @@ private:
// sending codecs.
void GetSendCodecInfo(int* max_sample_rate, int* max_channels);
- int GenerateAudioFrame(const int16_t audioSamples[],
- int nSamples,
- int nChannels,
- int samplesPerSec);
+ void GenerateAudioFrame(const int16_t audioSamples[],
+ int nSamples,
+ int nChannels,
+ int samplesPerSec);
int32_t RecordAudioToFile(uint32_t mixingFrequency);
int32_t MixOrReplaceAudioWithFile(
int mixingFrequency);
- void ProcessAudio(int delay_ms, int clock_drift, int current_mic_level);
+ void ProcessAudio(int delay_ms, int clock_drift, int current_mic_level,
+ bool key_pressed);
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
- int TypingDetection(bool keyPressed);
+ void TypingDetection(bool keyPressed);
#endif
// uses
@@ -198,7 +198,7 @@ private:
// owns
MonitorModule _monitorModule;
AudioFrame _audioFrame;
- PushResampler resampler_; // ADM sample rate -> mixing rate
+ PushResampler<int16_t> resampler_; // ADM sample rate -> mixing rate
FilePlayer* _filePlayerPtr;
FileRecorder* _fileRecorderPtr;
FileRecorder* _fileCallRecorderPtr;
@@ -214,19 +214,9 @@ private:
CriticalSectionWrapper& _callbackCritSect;
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
- int32_t _timeActive;
- int32_t _timeSinceLastTyping;
- int32_t _penaltyCounter;
+ webrtc::TypingDetection _typingDetection;
bool _typingNoiseWarningPending;
bool _typingNoiseDetected;
-
- // Tunable treshold values
- int _timeWindow; // nr of10ms slots accepted to count as a hit.
- int _costPerTyping; // Penalty added for a typing + activity coincide.
- int _reportingThreshold; // Threshold for _penaltyCounter.
- int _penaltyDecay; // How much we reduce _penaltyCounter every 10 ms.
- int _typeEventDelay; // How old typing events we allow
-
#endif
bool _saturationWarning;
@@ -239,10 +229,11 @@ private:
int32_t _remainingMuteMicTimeMs;
bool stereo_codec_;
bool swap_stereo_channels_;
+ scoped_ptr<int16_t[]> mono_buffer_;
};
-#endif // WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
-
} // namespace voe
} // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
diff --git a/chromium/third_party/webrtc/voice_engine/utility.cc b/chromium/third_party/webrtc/voice_engine/utility.cc
index 5b7ee8149a1..561b4ef5e24 100644
--- a/chromium/third_party/webrtc/voice_engine/utility.cc
+++ b/chromium/third_party/webrtc/voice_engine/utility.cc
@@ -10,116 +10,150 @@
#include "webrtc/voice_engine/utility.h"
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/interface/module.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc
-{
-
-namespace voe
-{
-enum{kMaxTargetLen = 2*32*10}; // stereo 32KHz 10ms
-
-void Utility::MixWithSat(int16_t target[],
- int target_channel,
- const int16_t source[],
- int source_channel,
- int source_len)
-{
- assert((target_channel == 1) || (target_channel == 2));
- assert((source_channel == 1) || (source_channel == 2));
- assert(source_len <= kMaxTargetLen);
-
- if ((target_channel == 2) && (source_channel == 1))
- {
- // Convert source from mono to stereo.
- int32_t left = 0;
- int32_t right = 0;
- for (int i = 0; i < source_len; ++i) {
- left = source[i] + target[i*2];
- right = source[i] + target[i*2 + 1];
- target[i*2] = WebRtcSpl_SatW32ToW16(left);
- target[i*2 + 1] = WebRtcSpl_SatW32ToW16(right);
- }
- }
- else if ((target_channel == 1) && (source_channel == 2))
- {
- // Convert source from stereo to mono.
- int32_t temp = 0;
- for (int i = 0; i < source_len/2; ++i) {
- temp = ((source[i*2] + source[i*2 + 1])>>1) + target[i];
- target[i] = WebRtcSpl_SatW32ToW16(temp);
- }
- }
- else
- {
- int32_t temp = 0;
- for (int i = 0; i < source_len; ++i) {
- temp = source[i] + target[i];
- target[i] = WebRtcSpl_SatW32ToW16(temp);
- }
- }
-}
+#include "webrtc/common_types.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/modules/utility/interface/audio_frame_operations.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/voice_engine/voice_engine_defines.h"
-void Utility::MixSubtractWithSat(int16_t target[],
- const int16_t source[],
- uint16_t len)
-{
- int32_t temp(0);
- for (int i = 0; i < len; i++)
- {
- temp = target[i] - source[i];
- if (temp > 32767)
- target[i] = 32767;
- else if (temp < -32768)
- target[i] = -32768;
- else
- target[i] = (int16_t) temp;
- }
-}
+namespace webrtc {
+namespace voe {
-void Utility::MixAndScaleWithSat(int16_t target[],
- const int16_t source[], float scale,
- uint16_t len)
-{
- int32_t temp(0);
- for (int i = 0; i < len; i++)
- {
- temp = (int32_t) (target[i] + scale * source[i]);
- if (temp > 32767)
- target[i] = 32767;
- else if (temp < -32768)
- target[i] = -32768;
- else
- target[i] = (int16_t) temp;
- }
+// TODO(ajm): There is significant overlap between RemixAndResample and
+// ConvertToCodecFormat, but if we're to consolidate we should probably make a
+// real converter class.
+void RemixAndResample(const AudioFrame& src_frame,
+ PushResampler<int16_t>* resampler,
+ AudioFrame* dst_frame) {
+ const int16_t* audio_ptr = src_frame.data_;
+ int audio_ptr_num_channels = src_frame.num_channels_;
+ int16_t mono_audio[AudioFrame::kMaxDataSizeSamples];
+
+ // Downmix before resampling.
+ if (src_frame.num_channels_ == 2 && dst_frame->num_channels_ == 1) {
+ AudioFrameOperations::StereoToMono(src_frame.data_,
+ src_frame.samples_per_channel_,
+ mono_audio);
+ audio_ptr = mono_audio;
+ audio_ptr_num_channels = 1;
+ }
+
+ if (resampler->InitializeIfNeeded(src_frame.sample_rate_hz_,
+ dst_frame->sample_rate_hz_,
+ audio_ptr_num_channels) == -1) {
+ LOG_FERR3(LS_ERROR, InitializeIfNeeded, src_frame.sample_rate_hz_,
+ dst_frame->sample_rate_hz_, audio_ptr_num_channels);
+ assert(false);
+ }
+
+ const int src_length = src_frame.samples_per_channel_ *
+ audio_ptr_num_channels;
+ int out_length = resampler->Resample(audio_ptr, src_length, dst_frame->data_,
+ AudioFrame::kMaxDataSizeSamples);
+ if (out_length == -1) {
+ LOG_FERR3(LS_ERROR, Resample, audio_ptr, src_length, dst_frame->data_);
+ assert(false);
+ }
+ dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels;
+
+ // Upmix after resampling.
+ if (src_frame.num_channels_ == 1 && dst_frame->num_channels_ == 2) {
+ // The audio in dst_frame really is mono at this point; MonoToStereo will
+ // set this back to stereo.
+ dst_frame->num_channels_ = 1;
+ AudioFrameOperations::MonoToStereo(dst_frame);
+ }
+
+ dst_frame->timestamp_ = src_frame.timestamp_;
+ dst_frame->elapsed_time_ms_ = src_frame.elapsed_time_ms_;
+ dst_frame->ntp_time_ms_ = src_frame.ntp_time_ms_;
}
-void Utility::Scale(int16_t vector[], float scale, uint16_t len)
-{
- for (int i = 0; i < len; i++)
- {
- vector[i] = (int16_t) (scale * vector[i]);
- }
+void DownConvertToCodecFormat(const int16_t* src_data,
+ int samples_per_channel,
+ int num_channels,
+ int sample_rate_hz,
+ int codec_num_channels,
+ int codec_rate_hz,
+ int16_t* mono_buffer,
+ PushResampler<int16_t>* resampler,
+ AudioFrame* dst_af) {
+ assert(samples_per_channel <= kMaxMonoDataSizeSamples);
+ assert(num_channels == 1 || num_channels == 2);
+ assert(codec_num_channels == 1 || codec_num_channels == 2);
+ dst_af->Reset();
+
+ // Never upsample the capture signal here. This should be done at the
+ // end of the send chain.
+ int destination_rate = std::min(codec_rate_hz, sample_rate_hz);
+
+ // If no stereo codecs are in use, we downmix a stereo stream from the
+ // device early in the chain, before resampling.
+ if (num_channels == 2 && codec_num_channels == 1) {
+ AudioFrameOperations::StereoToMono(src_data, samples_per_channel,
+ mono_buffer);
+ src_data = mono_buffer;
+ num_channels = 1;
+ }
+
+ if (resampler->InitializeIfNeeded(
+ sample_rate_hz, destination_rate, num_channels) != 0) {
+ LOG_FERR3(LS_ERROR,
+ InitializeIfNeeded,
+ sample_rate_hz,
+ destination_rate,
+ num_channels);
+ assert(false);
+ }
+
+ const int in_length = samples_per_channel * num_channels;
+ int out_length = resampler->Resample(
+ src_data, in_length, dst_af->data_, AudioFrame::kMaxDataSizeSamples);
+ if (out_length == -1) {
+ LOG_FERR3(LS_ERROR, Resample, src_data, in_length, dst_af->data_);
+ assert(false);
+ }
+
+ dst_af->samples_per_channel_ = out_length / num_channels;
+ dst_af->sample_rate_hz_ = destination_rate;
+ dst_af->num_channels_ = num_channels;
}
-void Utility::ScaleWithSat(int16_t vector[], float scale,
- uint16_t len)
-{
- int32_t temp(0);
- for (int i = 0; i < len; i++)
- {
- temp = (int32_t) (scale * vector[i]);
- if (temp > 32767)
- vector[i] = 32767;
- else if (temp < -32768)
- vector[i] = -32768;
- else
- vector[i] = (int16_t) temp;
+void MixWithSat(int16_t target[],
+ int target_channel,
+ const int16_t source[],
+ int source_channel,
+ int source_len) {
+ assert(target_channel == 1 || target_channel == 2);
+ assert(source_channel == 1 || source_channel == 2);
+
+ if (target_channel == 2 && source_channel == 1) {
+ // Convert source from mono to stereo.
+ int32_t left = 0;
+ int32_t right = 0;
+ for (int i = 0; i < source_len; ++i) {
+ left = source[i] + target[i * 2];
+ right = source[i] + target[i * 2 + 1];
+ target[i * 2] = WebRtcSpl_SatW32ToW16(left);
+ target[i * 2 + 1] = WebRtcSpl_SatW32ToW16(right);
+ }
+ } else if (target_channel == 1 && source_channel == 2) {
+ // Convert source from stereo to mono.
+ int32_t temp = 0;
+ for (int i = 0; i < source_len / 2; ++i) {
+ temp = ((source[i * 2] + source[i * 2 + 1]) >> 1) + target[i];
+ target[i] = WebRtcSpl_SatW32ToW16(temp);
}
+ } else {
+ int32_t temp = 0;
+ for (int i = 0; i < source_len; ++i) {
+ temp = source[i] + target[i];
+ target[i] = WebRtcSpl_SatW32ToW16(temp);
+ }
+ }
}
} // namespace voe
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/utility.h b/chromium/third_party/webrtc/voice_engine/utility.h
index fcb4462e520..38206959b27 100644
--- a/chromium/third_party/webrtc/voice_engine/utility.h
+++ b/chromium/third_party/webrtc/voice_engine/utility.h
@@ -12,47 +12,52 @@
* Contains functions often used by different parts of VoiceEngine.
*/
-#ifndef WEBRTC_VOICE_ENGINE_UTILITY_H
-#define WEBRTC_VOICE_ENGINE_UTILITY_H
+#ifndef WEBRTC_VOICE_ENGINE_UTILITY_H_
+#define WEBRTC_VOICE_ENGINE_UTILITY_H_
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/typedefs.h"
-#include "webrtc/voice_engine/voice_engine_defines.h"
-namespace webrtc
-{
-
-class Module;
-
-namespace voe
-{
-
-class Utility
-{
-public:
- static void MixWithSat(int16_t target[],
- int target_channel,
- const int16_t source[],
- int source_channel,
- int source_len);
-
- static void MixSubtractWithSat(int16_t target[],
- const int16_t source[],
- uint16_t len);
-
- static void MixAndScaleWithSat(int16_t target[],
- const int16_t source[],
- float scale,
- uint16_t len);
-
- static void Scale(int16_t vector[], float scale, uint16_t len);
-
- static void ScaleWithSat(int16_t vector[],
- float scale,
- uint16_t len);
-};
+namespace webrtc {
+
+class AudioFrame;
+
+namespace voe {
+
+// Upmix or downmix and resample the audio in |src_frame| to |dst_frame|.
+// Expects |dst_frame| to have its sample rate and channels members set to the
+// desired values. Updates the samples per channel member accordingly. No other
+// members will be changed.
+void RemixAndResample(const AudioFrame& src_frame,
+ PushResampler<int16_t>* resampler,
+ AudioFrame* dst_frame);
+
+// Downmix and downsample the audio in |src_data| to |dst_af| as necessary,
+// specified by |codec_num_channels| and |codec_rate_hz|. |mono_buffer| is
+// temporary space and must be of sufficient size to hold the downmixed source
+// audio (recommend using a size of kMaxMonoDataSizeSamples).
+//
+// |dst_af| will have its data and format members (sample rate, channels and
+// samples per channel) set appropriately. No other members will be changed.
+// TODO(ajm): For now, this still calls Reset() on |dst_af|. Remove this, as
+// it shouldn't be needed.
+void DownConvertToCodecFormat(const int16_t* src_data,
+ int samples_per_channel,
+ int num_channels,
+ int sample_rate_hz,
+ int codec_num_channels,
+ int codec_rate_hz,
+ int16_t* mono_buffer,
+ PushResampler<int16_t>* resampler,
+ AudioFrame* dst_af);
+
+void MixWithSat(int16_t target[],
+ int target_channel,
+ const int16_t source[],
+ int source_channel,
+ int source_len);
} // namespace voe
-
} // namespace webrtc
-#endif // WEBRTC_VOICE_ENGINE_UTILITY_H
+#endif // WEBRTC_VOICE_ENGINE_UTILITY_H_
diff --git a/chromium/third_party/webrtc/voice_engine/utility_unittest.cc b/chromium/third_party/webrtc/voice_engine/utility_unittest.cc
new file mode 100644
index 00000000000..8f7efa87f65
--- /dev/null
+++ b/chromium/third_party/webrtc/voice_engine/utility_unittest.cc
@@ -0,0 +1,263 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/voice_engine/utility.h"
+#include "webrtc/voice_engine/voice_engine_defines.h"
+
+namespace webrtc {
+namespace voe {
+namespace {
+
+enum FunctionToTest {
+ TestRemixAndResample,
+ TestDownConvertToCodecFormat
+};
+
+class UtilityTest : public ::testing::Test {
+ protected:
+ UtilityTest() {
+ src_frame_.sample_rate_hz_ = 16000;
+ src_frame_.samples_per_channel_ = src_frame_.sample_rate_hz_ / 100;
+ src_frame_.num_channels_ = 1;
+ dst_frame_.CopyFrom(src_frame_);
+ golden_frame_.CopyFrom(src_frame_);
+ }
+
+ void RunResampleTest(int src_channels, int src_sample_rate_hz,
+ int dst_channels, int dst_sample_rate_hz,
+ FunctionToTest function);
+
+ PushResampler<int16_t> resampler_;
+ AudioFrame src_frame_;
+ AudioFrame dst_frame_;
+ AudioFrame golden_frame_;
+};
+
+// Sets the signal value to increase by |data| with every sample. Floats are
+// used so non-integer values result in rounding error, but not an accumulating
+// error.
+void SetMonoFrame(AudioFrame* frame, float data, int sample_rate_hz) {
+ memset(frame->data_, 0, sizeof(frame->data_));
+ frame->num_channels_ = 1;
+ frame->sample_rate_hz_ = sample_rate_hz;
+ frame->samples_per_channel_ = sample_rate_hz / 100;
+ for (int i = 0; i < frame->samples_per_channel_; i++) {
+ frame->data_[i] = data * i;
+ }
+}
+
+// Keep the existing sample rate.
+void SetMonoFrame(AudioFrame* frame, float data) {
+ SetMonoFrame(frame, data, frame->sample_rate_hz_);
+}
+
+// Sets the signal value to increase by |left| and |right| with every sample in
+// each channel respectively.
+void SetStereoFrame(AudioFrame* frame, float left, float right,
+ int sample_rate_hz) {
+ memset(frame->data_, 0, sizeof(frame->data_));
+ frame->num_channels_ = 2;
+ frame->sample_rate_hz_ = sample_rate_hz;
+ frame->samples_per_channel_ = sample_rate_hz / 100;
+ for (int i = 0; i < frame->samples_per_channel_; i++) {
+ frame->data_[i * 2] = left * i;
+ frame->data_[i * 2 + 1] = right * i;
+ }
+}
+
+// Keep the existing sample rate.
+void SetStereoFrame(AudioFrame* frame, float left, float right) {
+ SetStereoFrame(frame, left, right, frame->sample_rate_hz_);
+}
+
+void VerifyParams(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
+ EXPECT_EQ(ref_frame.num_channels_, test_frame.num_channels_);
+ EXPECT_EQ(ref_frame.samples_per_channel_, test_frame.samples_per_channel_);
+ EXPECT_EQ(ref_frame.sample_rate_hz_, test_frame.sample_rate_hz_);
+}
+
+// Computes the best SNR based on the error between |ref_frame| and
+// |test_frame|. It allows for up to a |max_delay| in samples between the
+// signals to compensate for the resampling delay.
+float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame,
+ int max_delay) {
+ VerifyParams(ref_frame, test_frame);
+ float best_snr = 0;
+ int best_delay = 0;
+ for (int delay = 0; delay <= max_delay; delay++) {
+ float mse = 0;
+ float variance = 0;
+ for (int i = 0; i < ref_frame.samples_per_channel_ *
+ ref_frame.num_channels_ - delay; i++) {
+ int error = ref_frame.data_[i] - test_frame.data_[i + delay];
+ mse += error * error;
+ variance += ref_frame.data_[i] * ref_frame.data_[i];
+ }
+ float snr = 100; // We assign 100 dB to the zero-error case.
+ if (mse > 0)
+ snr = 10 * log10(variance / mse);
+ if (snr > best_snr) {
+ best_snr = snr;
+ best_delay = delay;
+ }
+ }
+ printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
+ return best_snr;
+}
+
+void VerifyFramesAreEqual(const AudioFrame& ref_frame,
+ const AudioFrame& test_frame) {
+ VerifyParams(ref_frame, test_frame);
+ for (int i = 0; i < ref_frame.samples_per_channel_ * ref_frame.num_channels_;
+ i++) {
+ EXPECT_EQ(ref_frame.data_[i], test_frame.data_[i]);
+ }
+}
+
+void UtilityTest::RunResampleTest(int src_channels,
+ int src_sample_rate_hz,
+ int dst_channels,
+ int dst_sample_rate_hz,
+ FunctionToTest function) {
+ PushResampler<int16_t> resampler; // Create a new one with every test.
+ const int16_t kSrcLeft = 30; // Shouldn't overflow for any used sample rate.
+ const int16_t kSrcRight = 15;
+ const float resampling_factor = (1.0 * src_sample_rate_hz) /
+ dst_sample_rate_hz;
+ const float dst_left = resampling_factor * kSrcLeft;
+ const float dst_right = resampling_factor * kSrcRight;
+ const float dst_mono = (dst_left + dst_right) / 2;
+ if (src_channels == 1)
+ SetMonoFrame(&src_frame_, kSrcLeft, src_sample_rate_hz);
+ else
+ SetStereoFrame(&src_frame_, kSrcLeft, kSrcRight, src_sample_rate_hz);
+
+ if (dst_channels == 1) {
+ SetMonoFrame(&dst_frame_, 0, dst_sample_rate_hz);
+ if (src_channels == 1)
+ SetMonoFrame(&golden_frame_, dst_left, dst_sample_rate_hz);
+ else
+ SetMonoFrame(&golden_frame_, dst_mono, dst_sample_rate_hz);
+ } else {
+ SetStereoFrame(&dst_frame_, 0, 0, dst_sample_rate_hz);
+ if (src_channels == 1)
+ SetStereoFrame(&golden_frame_, dst_left, dst_left, dst_sample_rate_hz);
+ else
+ SetStereoFrame(&golden_frame_, dst_left, dst_right, dst_sample_rate_hz);
+ }
+
+ // The sinc resampler has a known delay, which we compute here. Multiplying by
+ // two gives us a crude maximum for any resampling, as the old resampler
+ // typically (but not always) has lower delay.
+ static const int kInputKernelDelaySamples = 16;
+ const int max_delay = static_cast<double>(dst_sample_rate_hz)
+ / src_sample_rate_hz * kInputKernelDelaySamples * dst_channels * 2;
+ printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
+ src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
+ if (function == TestRemixAndResample) {
+ RemixAndResample(src_frame_, &resampler, &dst_frame_);
+ } else {
+ int16_t mono_buffer[kMaxMonoDataSizeSamples];
+ DownConvertToCodecFormat(src_frame_.data_,
+ src_frame_.samples_per_channel_,
+ src_frame_.num_channels_,
+ src_frame_.sample_rate_hz_,
+ dst_frame_.num_channels_,
+ dst_frame_.sample_rate_hz_,
+ mono_buffer,
+ &resampler,
+ &dst_frame_);
+ }
+
+ if (src_sample_rate_hz == 96000 && dst_sample_rate_hz == 8000) {
+ // The sinc resampler gives poor SNR at this extreme conversion, but we
+ // expect to see this rarely in practice.
+ EXPECT_GT(ComputeSNR(golden_frame_, dst_frame_, max_delay), 14.0f);
+ } else {
+ EXPECT_GT(ComputeSNR(golden_frame_, dst_frame_, max_delay), 46.0f);
+ }
+}
+
+TEST_F(UtilityTest, RemixAndResampleCopyFrameSucceeds) {
+ // Stereo -> stereo.
+ SetStereoFrame(&src_frame_, 10, 10);
+ SetStereoFrame(&dst_frame_, 0, 0);
+ RemixAndResample(src_frame_, &resampler_, &dst_frame_);
+ VerifyFramesAreEqual(src_frame_, dst_frame_);
+
+ // Mono -> mono.
+ SetMonoFrame(&src_frame_, 20);
+ SetMonoFrame(&dst_frame_, 0);
+ RemixAndResample(src_frame_, &resampler_, &dst_frame_);
+ VerifyFramesAreEqual(src_frame_, dst_frame_);
+}
+
+TEST_F(UtilityTest, RemixAndResampleMixingOnlySucceeds) {
+ // Stereo -> mono.
+ SetStereoFrame(&dst_frame_, 0, 0);
+ SetMonoFrame(&src_frame_, 10);
+ SetStereoFrame(&golden_frame_, 10, 10);
+ RemixAndResample(src_frame_, &resampler_, &dst_frame_);
+ VerifyFramesAreEqual(dst_frame_, golden_frame_);
+
+ // Mono -> stereo.
+ SetMonoFrame(&dst_frame_, 0);
+ SetStereoFrame(&src_frame_, 10, 20);
+ SetMonoFrame(&golden_frame_, 15);
+ RemixAndResample(src_frame_, &resampler_, &dst_frame_);
+ VerifyFramesAreEqual(golden_frame_, dst_frame_);
+}
+
+TEST_F(UtilityTest, RemixAndResampleSucceeds) {
+ const int kSampleRates[] = {8000, 16000, 32000, 44100, 48000, 96000};
+ const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
+ const int kChannels[] = {1, 2};
+ const int kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
+ for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
+ for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
+ for (int src_channel = 0; src_channel < kChannelsSize; src_channel++) {
+ for (int dst_channel = 0; dst_channel < kChannelsSize; dst_channel++) {
+ RunResampleTest(kChannels[src_channel], kSampleRates[src_rate],
+ kChannels[dst_channel], kSampleRates[dst_rate],
+ TestRemixAndResample);
+ }
+ }
+ }
+ }
+}
+
+TEST_F(UtilityTest, ConvertToCodecFormatSucceeds) {
+ const int kSampleRates[] = {8000, 16000, 32000, 44100, 48000, 96000};
+ const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
+ const int kChannels[] = {1, 2};
+ const int kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
+ for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
+ for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
+ for (int src_channel = 0; src_channel < kChannelsSize; src_channel++) {
+ for (int dst_channel = 0; dst_channel < kChannelsSize; dst_channel++) {
+ if (dst_rate <= src_rate && dst_channel <= src_channel) {
+ RunResampleTest(kChannels[src_channel], kSampleRates[src_rate],
+ kChannels[src_channel], kSampleRates[dst_rate],
+ TestDownConvertToCodecFormat);
+ }
+ }
+ }
+ }
+ }
+}
+
+} // namespace
+} // namespace voe
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_auto_test.isolate b/chromium/third_party/webrtc/voice_engine/voe_auto_test.isolate
index 651d074e55c..3722b7d764f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_auto_test.isolate
+++ b/chromium/third_party/webrtc/voice_engine/voe_auto_test.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/voe_auto_test<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/voe_auto_test<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
index 682a4a5af72..ad6314a7e50 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
@@ -25,13 +25,6 @@
#include "webrtc/voice_engine/utility.h"
#include "webrtc/voice_engine/voice_engine_impl.h"
-#if (defined(_WIN32) && defined(_DLL) && (_MSC_VER == 1400))
-// Fix for VS 2005 MD/MDd link problem
-#include <stdio.h>
-extern "C"
- { FILE _iob[3] = { __iob_func()[0], __iob_func()[1], __iob_func()[2]}; }
-#endif
-
namespace webrtc
{
@@ -49,8 +42,7 @@ VoEBase* VoEBase::GetInterface(VoiceEngine* voiceEngine)
VoEBaseImpl::VoEBaseImpl(voe::SharedData* shared) :
_voiceEngineObserverPtr(NULL),
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _voiceEngineObserver(false), _oldVoEMicLevel(0), _oldMicLevel(0),
- _shared(shared)
+ _voiceEngineObserver(false), _shared(shared)
{
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
"VoEBaseImpl() - ctor");
@@ -133,19 +125,19 @@ int32_t VoEBaseImpl::RecordedDataIsAvailable(
uint32_t samplesPerSec,
uint32_t totalDelayMS,
int32_t clockDrift,
- uint32_t currentMicLevel,
+ uint32_t micLevel,
bool keyPressed,
uint32_t& newMicLevel)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
"VoEBaseImpl::RecordedDataIsAvailable(nSamples=%u, "
"nBytesPerSample=%u, nChannels=%u, samplesPerSec=%u, "
- "totalDelayMS=%u, clockDrift=%d, currentMicLevel=%u)",
+ "totalDelayMS=%u, clockDrift=%d, micLevel=%u)",
nSamples, nBytesPerSample, nChannels, samplesPerSec,
- totalDelayMS, clockDrift, currentMicLevel);
+ totalDelayMS, clockDrift, micLevel);
newMicLevel = static_cast<uint32_t>(ProcessRecordedDataWithAPM(
NULL, 0, audioSamples, samplesPerSec, nChannels, nSamples,
- totalDelayMS, clockDrift, currentMicLevel, keyPressed));
+ totalDelayMS, clockDrift, micLevel, keyPressed));
return 0;
}
@@ -156,41 +148,23 @@ int32_t VoEBaseImpl::NeedMorePlayData(
uint8_t nChannels,
uint32_t samplesPerSec,
void* audioSamples,
- uint32_t& nSamplesOut)
+ uint32_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms)
{
- WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "VoEBaseImpl::NeedMorePlayData(nSamples=%u, "
- "nBytesPerSample=%d, nChannels=%d, samplesPerSec=%u)",
- nSamples, nBytesPerSample, nChannels, samplesPerSec);
-
- assert(_shared->output_mixer() != NULL);
-
- // TODO(andrew): if the device is running in mono, we should tell the mixer
- // here so that it will only request mono from AudioCodingModule.
- // Perform mixing of all active participants (channel-based mixing)
- _shared->output_mixer()->MixActiveChannels();
-
- // Additional operations on the combined signal
- _shared->output_mixer()->DoOperationsOnCombinedSignal();
-
- // Retrieve the final output mix (resampled to match the ADM)
- _shared->output_mixer()->GetMixedAudio(samplesPerSec, nChannels,
- &_audioFrame);
-
- assert(static_cast<int>(nSamples) == _audioFrame.samples_per_channel_);
- assert(samplesPerSec ==
- static_cast<uint32_t>(_audioFrame.sample_rate_hz_));
+ WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "VoEBaseImpl::NeedMorePlayData(nSamples=%u, "
+ "nBytesPerSample=%d, nChannels=%d, samplesPerSec=%u)",
+ nSamples, nBytesPerSample, nChannels, samplesPerSec);
- // Deliver audio (PCM) samples to the ADM
- memcpy(
- (int16_t*) audioSamples,
- (const int16_t*) _audioFrame.data_,
- sizeof(int16_t) * (_audioFrame.samples_per_channel_
- * _audioFrame.num_channels_));
+ GetPlayoutData(static_cast<int>(samplesPerSec),
+ static_cast<int>(nChannels),
+ static_cast<int>(nSamples), true, audioSamples,
+ elapsed_time_ms, ntp_time_ms);
- nSamplesOut = _audioFrame.samples_per_channel_;
+ nSamplesOut = _audioFrame.samples_per_channel_;
- return 0;
+ return 0;
}
int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
@@ -200,16 +174,16 @@ int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
int number_of_channels,
int number_of_frames,
int audio_delay_milliseconds,
- int current_volume,
+ int volume,
bool key_pressed,
bool need_audio_processing) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
"VoEBaseImpl::OnDataAvailable(number_of_voe_channels=%d, "
"sample_rate=%d, number_of_channels=%d, number_of_frames=%d, "
- "audio_delay_milliseconds=%d, current_volume=%d, "
+ "audio_delay_milliseconds=%d, volume=%d, "
"key_pressed=%d, need_audio_processing=%d)",
number_of_voe_channels, sample_rate, number_of_channels,
- number_of_frames, audio_delay_milliseconds, current_volume,
+ number_of_frames, audio_delay_milliseconds, volume,
key_pressed, need_audio_processing);
if (number_of_voe_channels == 0)
return 0;
@@ -218,32 +192,60 @@ int VoEBaseImpl::OnDataAvailable(const int voe_channels[],
return ProcessRecordedDataWithAPM(
voe_channels, number_of_voe_channels, audio_data, sample_rate,
number_of_channels, number_of_frames, audio_delay_milliseconds,
- 0, current_volume, key_pressed);
+ 0, volume, key_pressed);
}
// No need to go through the APM, demultiplex the data to each VoE channel,
// encode and send to the network.
for (int i = 0; i < number_of_voe_channels; ++i) {
- voe::ChannelOwner ch =
- _shared->channel_manager().GetChannel(voe_channels[i]);
- voe::Channel* channel_ptr = ch.channel();
- if (!channel_ptr)
- continue;
-
- if (channel_ptr->InputIsOnHold()) {
- channel_ptr->UpdateLocalTimeStamp();
- } else if (channel_ptr->Sending()) {
- channel_ptr->Demultiplex(audio_data, sample_rate, number_of_frames,
- number_of_channels);
- channel_ptr->PrepareEncodeAndSend(sample_rate);
- channel_ptr->EncodeAndSend();
- }
+ // TODO(ajm): In the case where multiple channels are using the same codec
+ // rate, this path needlessly does extra conversions. We should convert once
+ // and share between channels.
+ PushCaptureData(voe_channels[i], audio_data, 16, sample_rate,
+ number_of_channels, number_of_frames);
}
// Return 0 to indicate no need to change the volume.
return 0;
}
+void VoEBaseImpl::OnData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {
+ PushCaptureData(voe_channel, audio_data, bits_per_sample, sample_rate,
+ number_of_channels, number_of_frames);
+}
+
+void VoEBaseImpl::PushCaptureData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(voe_channel);
+ voe::Channel* channel_ptr = ch.channel();
+ if (!channel_ptr)
+ return;
+
+ if (channel_ptr->Sending()) {
+ channel_ptr->Demultiplex(static_cast<const int16_t*>(audio_data),
+ sample_rate, number_of_frames, number_of_channels);
+ channel_ptr->PrepareEncodeAndSend(sample_rate);
+ channel_ptr->EncodeAndSend();
+ }
+}
+
+void VoEBaseImpl::PullRenderData(int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {
+ assert(bits_per_sample == 16);
+ assert(number_of_frames == static_cast<int>(sample_rate / 100));
+
+ GetPlayoutData(sample_rate, number_of_channels, number_of_frames, false,
+ audio_data, elapsed_time_ms, ntp_time_ms);
+}
+
int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -385,18 +387,6 @@ int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
_shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceInfo,
"Init() failed to set the default output device");
}
- if (_shared->audio_device()->SpeakerIsAvailable(&available) != 0)
- {
- _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
- "Init() failed to check speaker availability, trying to "
- "initialize speaker anyway");
- }
- else if (!available)
- {
- _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
- "Init() speaker not available, trying to initialize speaker "
- "anyway");
- }
if (_shared->audio_device()->InitSpeaker() != 0)
{
_shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
@@ -410,18 +400,6 @@ int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
_shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceInfo,
"Init() failed to set the default input device");
}
- if (_shared->audio_device()->MicrophoneIsAvailable(&available) != 0)
- {
- _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
- "Init() failed to check microphone availability, trying to "
- "initialize microphone anyway");
- }
- else if (!available)
- {
- _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
- "Init() microphone not available, trying to initialize "
- "microphone anyway");
- }
if (_shared->audio_device()->InitMicrophone() != 0)
{
_shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
@@ -465,11 +443,6 @@ int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
// Set the error state for any failures in this block.
_shared->SetLastError(VE_APM_ERROR);
- if (audioproc->echo_cancellation()->set_device_sample_rate_hz(48000)) {
- LOG_FERR1(LS_ERROR, set_device_sample_rate_hz, 48000);
- return -1;
- }
-
// Configure AudioProcessing components.
if (audioproc->high_pass_filter()->Enable(true) != 0) {
LOG_FERR1(LS_ERROR, high_pass_filter()->Enable, true);
@@ -823,16 +796,6 @@ int VoEBaseImpl::GetVersion(char version[1024])
accLen += len;
assert(accLen < kVoiceEngineVersionMaxMessageSize);
#endif
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- len = AddExternalRecAndPlayoutBuild(versionPtr);
- if (len == -1)
- {
- return -1;
- }
- versionPtr += len;
- accLen += len;
- assert(accLen < kVoiceEngineVersionMaxMessageSize);
- #endif
memcpy(version, versionBuf, accLen);
version[accLen] = '\0';
@@ -882,13 +845,6 @@ int32_t VoEBaseImpl::AddExternalTransportBuild(char* str) const
}
#endif
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
-int32_t VoEBaseImpl::AddExternalRecAndPlayoutBuild(char* str) const
-{
- return sprintf(str, "External recording and playout build\n");
-}
-#endif
-
int VoEBaseImpl::LastError()
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -896,88 +852,6 @@ int VoEBaseImpl::LastError()
return (_shared->statistics().LastError());
}
-
-int VoEBaseImpl::SetNetEQPlayoutMode(int channel, NetEqModes mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetNetEQPlayoutMode(channel=%i, mode=%i)", channel, mode);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetNetEQPlayoutMode() failed to locate channel");
- return -1;
- }
- return channelPtr->SetNetEQPlayoutMode(mode);
-}
-
-int VoEBaseImpl::GetNetEQPlayoutMode(int channel, NetEqModes& mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetNetEQPlayoutMode(channel=%i, mode=?)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetNetEQPlayoutMode() failed to locate channel");
- return -1;
- }
- return channelPtr->GetNetEQPlayoutMode(mode);
-}
-
-int VoEBaseImpl::SetOnHoldStatus(int channel, bool enable, OnHoldModes mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetOnHoldStatus(channel=%d, enable=%d, mode=%d)", channel,
- enable, mode);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetOnHoldStatus() failed to locate channel");
- return -1;
- }
- return channelPtr->SetOnHoldStatus(enable, mode);
-}
-
-int VoEBaseImpl::GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetOnHoldStatus(channel=%d, enabled=?, mode=?)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetOnHoldStatus() failed to locate channel");
- return -1;
- }
- return channelPtr->GetOnHoldStatus(enabled, mode);
-}
-
int32_t VoEBaseImpl::StartPlayout()
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -1145,54 +1019,41 @@ int VoEBaseImpl::ProcessRecordedDataWithAPM(
uint32_t number_of_frames,
uint32_t audio_delay_milliseconds,
int32_t clock_drift,
- uint32_t current_volume,
+ uint32_t volume,
bool key_pressed) {
assert(_shared->transmit_mixer() != NULL);
assert(_shared->audio_device() != NULL);
- bool is_analog_agc(false);
- if (_shared->audio_processing() &&
- _shared->audio_processing()->gain_control()->mode() ==
- GainControl::kAdaptiveAnalog) {
- is_analog_agc = true;
- }
-
- // Only deal with the volume in adaptive analog mode.
uint32_t max_volume = 0;
- uint16_t current_voe_mic_level = 0;
- if (is_analog_agc) {
+ uint16_t voe_mic_level = 0;
+ // Check for zero to skip this calculation; the consumer may use this to
+ // indicate no volume is available.
+ if (volume != 0) {
// Scale from ADM to VoE level range
if (_shared->audio_device()->MaxMicrophoneVolume(&max_volume) == 0) {
if (max_volume) {
- current_voe_mic_level = static_cast<uint16_t>(
- (current_volume * kMaxVolumeLevel +
+ voe_mic_level = static_cast<uint16_t>(
+ (volume * kMaxVolumeLevel +
static_cast<int>(max_volume / 2)) / max_volume);
}
}
- // We learned that on certain systems (e.g Linux) the current_voe_mic_level
+ // We learned that on certain systems (e.g Linux) the voe_mic_level
// can be greater than the maxVolumeLevel therefore
- // we are going to cap the current_voe_mic_level to the maxVolumeLevel
- // and change the maxVolume to current_volume if it turns out that
- // the current_voe_mic_level is indeed greater than the maxVolumeLevel.
- if (current_voe_mic_level > kMaxVolumeLevel) {
- current_voe_mic_level = kMaxVolumeLevel;
- max_volume = current_volume;
+ // we are going to cap the voe_mic_level to the maxVolumeLevel
+ // and change the maxVolume to volume if it turns out that
+ // the voe_mic_level is indeed greater than the maxVolumeLevel.
+ if (voe_mic_level > kMaxVolumeLevel) {
+ voe_mic_level = kMaxVolumeLevel;
+ max_volume = volume;
}
}
- // Keep track if the MicLevel has been changed by the AGC, if not,
- // use the old value AGC returns to let AGC continue its trend,
- // so eventually the AGC is able to change the mic level. This handles
- // issues with truncation introduced by the scaling.
- if (_oldMicLevel == current_volume)
- current_voe_mic_level = static_cast<uint16_t>(_oldVoEMicLevel);
-
// Perform channel-independent operations
// (APM, mix with file, record to file, mute, etc.)
_shared->transmit_mixer()->PrepareDemux(
audio_data, number_of_frames, number_of_channels, sample_rate,
static_cast<uint16_t>(audio_delay_milliseconds), clock_drift,
- current_voe_mic_level, key_pressed);
+ voe_mic_level, key_pressed);
// Copy the audio frame to each sending channel and perform
// channel-dependent operations (file mixing, mute, etc.), encode and
@@ -1209,17 +1070,10 @@ int VoEBaseImpl::ProcessRecordedDataWithAPM(
number_of_voe_channels);
}
- if (!is_analog_agc)
- return 0;
-
// Scale from VoE to ADM level range.
uint32_t new_voe_mic_level = _shared->transmit_mixer()->CaptureLevel();
- // Keep track of the value AGC returns.
- _oldVoEMicLevel = new_voe_mic_level;
- _oldMicLevel = current_volume;
-
- if (new_voe_mic_level != current_voe_mic_level) {
+ if (new_voe_mic_level != voe_mic_level) {
// Return the new volume if AGC has changed the volume.
return static_cast<int>(
(new_voe_mic_level * max_volume +
@@ -1230,4 +1084,34 @@ int VoEBaseImpl::ProcessRecordedDataWithAPM(
return 0;
}
+void VoEBaseImpl::GetPlayoutData(int sample_rate, int number_of_channels,
+ int number_of_frames, bool feed_data_to_apm,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {
+ assert(_shared->output_mixer() != NULL);
+
+ // TODO(andrew): if the device is running in mono, we should tell the mixer
+ // here so that it will only request mono from AudioCodingModule.
+ // Perform mixing of all active participants (channel-based mixing)
+ _shared->output_mixer()->MixActiveChannels();
+
+ // Additional operations on the combined signal
+ _shared->output_mixer()->DoOperationsOnCombinedSignal(feed_data_to_apm);
+
+ // Retrieve the final output mix (resampled to match the ADM)
+ _shared->output_mixer()->GetMixedAudio(sample_rate, number_of_channels,
+ &_audioFrame);
+
+ assert(number_of_frames == _audioFrame.samples_per_channel_);
+ assert(sample_rate == _audioFrame.sample_rate_hz_);
+
+ // Deliver audio (PCM) samples to the ADM
+ memcpy(audio_data, _audioFrame.data_,
+ sizeof(int16_t) * number_of_frames * number_of_channels);
+
+ *elapsed_time_ms = _audioFrame.elapsed_time_ms_;
+ *ntp_time_ms = _audioFrame.ntp_time_ms_;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_base_impl.h b/chromium/third_party/webrtc/voice_engine/voe_base_impl.h
index bee2ea37cfe..985ef5d8382 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_base_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_base_impl.h
@@ -55,20 +55,12 @@ public:
virtual int StopSend(int channel);
- virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode);
-
- virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode);
-
- virtual int SetOnHoldStatus(int channel,
- bool enable,
- OnHoldModes mode = kHoldSendAndPlay);
-
- virtual int GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode);
-
virtual int GetVersion(char version[1024]);
virtual int LastError();
+ virtual AudioTransport* audio_transport() { return this; }
+
// AudioTransport
virtual int32_t
RecordedDataIsAvailable(const void* audioSamples,
@@ -78,7 +70,7 @@ public:
uint32_t samplesPerSec,
uint32_t totalDelayMS,
int32_t clockDrift,
- uint32_t currentMicLevel,
+ uint32_t micLevel,
bool keyPressed,
uint32_t& newMicLevel);
@@ -87,7 +79,9 @@ public:
uint8_t nChannels,
uint32_t samplesPerSec,
void* audioSamples,
- uint32_t& nSamplesOut);
+ uint32_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms);
virtual int OnDataAvailable(const int voe_channels[],
int number_of_voe_channels,
@@ -96,10 +90,24 @@ public:
int number_of_channels,
int number_of_frames,
int audio_delay_milliseconds,
- int current_volume,
+ int volume,
bool key_pressed,
bool need_audio_processing);
+ virtual void OnData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames);
+
+ virtual void PushCaptureData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames);
+
+ virtual void PullRenderData(int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms);
+
// AudioDeviceObserver
virtual void OnErrorIsReported(ErrorCode error);
virtual void OnWarningIsReported(WarningCode warning);
@@ -129,9 +137,15 @@ private:
uint32_t number_of_frames,
uint32_t audio_delay_milliseconds,
int32_t clock_drift,
- uint32_t current_volume,
+ uint32_t volume,
bool key_pressed);
+ void GetPlayoutData(int sample_rate, int number_of_channels,
+ int number_of_frames, bool feed_data_to_apm,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms);
+
int32_t AddBuildInfo(char* str) const;
int32_t AddVoEVersion(char* str) const;
@@ -141,15 +155,10 @@ private:
#ifdef WEBRTC_EXTERNAL_TRANSPORT
int32_t AddExternalTransportBuild(char* str) const;
#endif
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- int32_t AddExternalRecAndPlayoutBuild(char* str) const;
-#endif
VoiceEngineObserver* _voiceEngineObserverPtr;
CriticalSectionWrapper& _callbackCritSect;
bool _voiceEngineObserver;
- uint32_t _oldVoEMicLevel;
- uint32_t _oldMicLevel;
AudioFrame _audioFrame;
voe::SharedData* _shared;
};
diff --git a/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.cc
deleted file mode 100644
index 861354f3db1..00000000000
--- a/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.cc
+++ /dev/null
@@ -1,383 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/voice_engine/voe_call_report_impl.h"
-
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/file_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/voice_engine/channel.h"
-#include "webrtc/voice_engine/include/voe_errors.h"
-#include "webrtc/voice_engine/voice_engine_impl.h"
-
-namespace webrtc
-{
-
-VoECallReport* VoECallReport::GetInterface(VoiceEngine* voiceEngine)
-{
-#ifndef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
- return NULL;
-#else
- if (NULL == voiceEngine)
- {
- return NULL;
- }
- VoiceEngineImpl* s = static_cast<VoiceEngineImpl*>(voiceEngine);
- s->AddRef();
- return s;
-#endif
-}
-
-#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
-
-VoECallReportImpl::VoECallReportImpl(voe::SharedData* shared) :
- _file(*FileWrapper::Create()), _shared(shared)
-{
- WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "VoECallReportImpl() - ctor");
-}
-
-VoECallReportImpl::~VoECallReportImpl()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "~VoECallReportImpl() - dtor");
- delete &_file;
-}
-
-int VoECallReportImpl::ResetCallReportStatistics(int channel)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ResetCallReportStatistics(channel=%d)", channel);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- assert(_shared->audio_processing() != NULL);
-
- bool echoMode =
- _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
-
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
- " current AudioProcessingModule echo metric state %d)",
- echoMode);
- // Reset the APM statistics
- if (_shared->audio_processing()->echo_cancellation()->enable_metrics(true)
- != 0)
- {
- _shared->SetLastError(VE_APM_ERROR, kTraceError,
- "ResetCallReportStatistics() unable to "
- "set the AudioProcessingModule echo metrics state");
- return -1;
- }
- // Restore metric states
- _shared->audio_processing()->echo_cancellation()->enable_metrics(echoMode);
-
- // Reset channel dependent statistics
- if (channel != -1)
- {
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "ResetCallReportStatistics() failed to locate channel");
- return -1;
- }
- channelPtr->ResetDeadOrAliveCounters();
- channelPtr->ResetRTCPStatistics();
- } else {
- for (voe::ChannelManager::Iterator it(&_shared->channel_manager());
- it.IsValid();
- it.Increment()) {
- it.GetChannel()->ResetDeadOrAliveCounters();
- it.GetChannel()->ResetRTCPStatistics();
- }
- }
-
- return 0;
-}
-
-int VoECallReportImpl::GetEchoMetricSummary(EchoStatistics& stats)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetEchoMetricSummary()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- assert(_shared->audio_processing() != NULL);
-
- return (GetEchoMetricSummaryInternal(stats));
-}
-
-int VoECallReportImpl::GetEchoMetricSummaryInternal(EchoStatistics& stats)
-{
- // Retrieve echo metrics from the AudioProcessingModule
- int ret(0);
- bool mode(false);
- EchoCancellation::Metrics metrics;
-
- // Ensure that echo metrics is enabled
-
- mode =
- _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
- if (mode != false)
- {
- ret = _shared->audio_processing()->echo_cancellation()->
- GetMetrics(&metrics);
- if (ret != 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " AudioProcessingModule GetMetrics() => error");
- }
- }
- else
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " AudioProcessingModule echo metrics is not enabled");
- }
-
- if ((ret != 0) || (mode == false))
- {
- // Mark complete struct as invalid (-100 dB)
- WEBRTC_TRACE(kTraceWarning, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " unable to retrieve echo metrics from the AudioProcessingModule");
- stats.erl.min = -100;
- stats.erl.max = -100;
- stats.erl.average = -100;
- stats.erle.min = -100;
- stats.erle.max = -100;
- stats.erle.average = -100;
- stats.rerl.min = -100;
- stats.rerl.max = -100;
- stats.rerl.average = -100;
- stats.a_nlp.min = -100;
- stats.a_nlp.max = -100;
- stats.a_nlp.average = -100;
- }
- else
- {
-
- // Deliver output results to user
- stats.erl.min = metrics.echo_return_loss.minimum;
- stats.erl.max = metrics.echo_return_loss.maximum;
- stats.erl.average = metrics.echo_return_loss.average;
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1), " erl: min=%d, max=%d, avg=%d",
- stats.erl.min, stats.erl.max, stats.erl.average);
-
- stats.erle.min = metrics.echo_return_loss_enhancement.minimum;
- stats.erle.max = metrics.echo_return_loss_enhancement.maximum;
- stats.erle.average = metrics.echo_return_loss_enhancement.average;
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1), " erle: min=%d, max=%d, avg=%d",
- stats.erle.min, stats.erle.max, stats.erle.average);
-
- stats.rerl.min = metrics.residual_echo_return_loss.minimum;
- stats.rerl.max = metrics.residual_echo_return_loss.maximum;
- stats.rerl.average = metrics.residual_echo_return_loss.average;
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1), " rerl: min=%d, max=%d, avg=%d",
- stats.rerl.min, stats.rerl.max, stats.rerl.average);
-
- stats.a_nlp.min = metrics.a_nlp.minimum;
- stats.a_nlp.max = metrics.a_nlp.maximum;
- stats.a_nlp.average = metrics.a_nlp.average;
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " a_nlp: min=%d, max=%d, avg=%d",
- stats.a_nlp.min, stats.a_nlp.max, stats.a_nlp.average);
- }
- return 0;
-}
-
-int VoECallReportImpl::GetRoundTripTimeSummary(int channel, StatVal& delaysMs)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetRoundTripTimeSummary()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRoundTripTimeSummary() failed to locate channel");
- return -1;
- }
-
- return channelPtr->GetRoundTripTimeSummary(delaysMs);
-}
-
-int VoECallReportImpl::GetDeadOrAliveSummary(int channel,
- int& numOfDeadDetections,
- int& numOfAliveDetections)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetDeadOrAliveSummary(channel=%d)", channel);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- return (GetDeadOrAliveSummaryInternal(channel, numOfDeadDetections,
- numOfAliveDetections));
-}
-
-int VoECallReportImpl::GetDeadOrAliveSummaryInternal(int channel,
- int& numOfDeadDetections,
- int& numOfAliveDetections)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetDeadOrAliveSummary(channel=%d)", channel);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRoundTripTimeSummary() failed to locate channel");
- return -1;
- }
-
- return channelPtr->GetDeadOrAliveCounters(numOfDeadDetections,
- numOfAliveDetections);
-}
-
-int VoECallReportImpl::WriteReportToFile(const char* fileNameUTF8)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "WriteReportToFile(fileNameUTF8=%s)", fileNameUTF8);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (NULL == fileNameUTF8)
- {
- _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "WriteReportToFile() invalid filename");
- return -1;
- }
-
- if (_file.Open())
- {
- _file.CloseFile();
- }
-
- // Open text file in write mode
- if (_file.OpenFile(fileNameUTF8, false, false, true) != 0)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "WriteReportToFile() unable to open the file");
- return -1;
- }
-
- // Summarize information and add it to the open file
- //
- _file.WriteText("WebRtc VoiceEngine Call Report\n");
- _file.WriteText("==============================\n");
- _file.WriteText("\nNetwork Packet Round Trip Time (RTT)\n");
- _file.WriteText("------------------------------------\n\n");
-
- if (_shared->channel_manager().NumOfChannels() == 0)
- return 0;
-
- for (voe::ChannelManager::Iterator it(&_shared->channel_manager());
- it.IsValid();
- it.Increment()) {
- StatVal delaysMs;
- _file.WriteText("channel %d:\n", it.GetChannel()->ChannelId());
- it.GetChannel()->GetRoundTripTimeSummary(delaysMs);
- _file.WriteText(" min:%5d [ms]\n", delaysMs.min);
- _file.WriteText(" max:%5d [ms]\n", delaysMs.max);
- _file.WriteText(" avg:%5d [ms]\n", delaysMs.average);
- }
-
- _file.WriteText("\nDead-or-Alive Connection Detections\n");
- _file.WriteText("------------------------------------\n\n");
-
- for (voe::ChannelManager::Iterator it(&_shared->channel_manager());
- it.IsValid();
- it.Increment()) {
- int dead = 0;
- int alive = 0;
- _file.WriteText("channel %d:\n", it.GetChannel()->ChannelId());
- GetDeadOrAliveSummary(it.GetChannel()->ChannelId(), dead, alive);
- _file.WriteText(" #dead :%6d\n", dead);
- _file.WriteText(" #alive:%6d\n", alive);
- }
-
- EchoStatistics echo;
- GetEchoMetricSummary(echo);
-
- _file.WriteText("\nEcho Metrics\n");
- _file.WriteText("------------\n\n");
-
- _file.WriteText("erl:\n");
- _file.WriteText(" min:%5d [dB]\n", echo.erl.min);
- _file.WriteText(" max:%5d [dB]\n", echo.erl.max);
- _file.WriteText(" avg:%5d [dB]\n", echo.erl.average);
- _file.WriteText("\nerle:\n");
- _file.WriteText(" min:%5d [dB]\n", echo.erle.min);
- _file.WriteText(" max:%5d [dB]\n", echo.erle.max);
- _file.WriteText(" avg:%5d [dB]\n", echo.erle.average);
- _file.WriteText("rerl:\n");
- _file.WriteText(" min:%5d [dB]\n", echo.rerl.min);
- _file.WriteText(" max:%5d [dB]\n", echo.rerl.max);
- _file.WriteText(" avg:%5d [dB]\n", echo.rerl.average);
- _file.WriteText("a_nlp:\n");
- _file.WriteText(" min:%5d [dB]\n", echo.a_nlp.min);
- _file.WriteText(" max:%5d [dB]\n", echo.a_nlp.max);
- _file.WriteText(" avg:%5d [dB]\n", echo.a_nlp.average);
-
- _file.WriteText("\n<END>");
-
- _file.Flush();
- _file.CloseFile();
-
- return 0;
-}
-
-#endif // WEBRTC_VOICE_ENGINE_CALL_REPORT_API
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.h b/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.h
deleted file mode 100644
index b9927f5654d..00000000000
--- a/chromium/third_party/webrtc/voice_engine/voe_call_report_impl.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
-#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
-
-#include "webrtc/voice_engine/include/voe_call_report.h"
-
-#include "webrtc/voice_engine/shared_data.h"
-
-
-namespace webrtc
-{
-class FileWrapper;
-
-class VoECallReportImpl: public VoECallReport
-{
-public:
- virtual int ResetCallReportStatistics(int channel);
-
- virtual int GetEchoMetricSummary(EchoStatistics& stats);
-
- virtual int GetRoundTripTimeSummary(int channel,
- StatVal& delaysMs);
-
- virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
- int& numOfAliveDetections);
-
- virtual int WriteReportToFile(const char* fileNameUTF8);
-
-protected:
- VoECallReportImpl(voe::SharedData* shared);
- virtual ~VoECallReportImpl();
-
-private:
- int GetDeadOrAliveSummaryInternal(int channel,
- int& numOfDeadDetections,
- int& numOfAliveDetections);
-
- int GetEchoMetricSummaryInternal(EchoStatistics& stats);
-
- int GetSpeechAndNoiseSummaryInternal(LevelStatistics& stats);
-
- FileWrapper& _file;
- voe::SharedData* _shared;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
diff --git a/chromium/third_party/webrtc/voice_engine/voe_codec_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_codec_impl.cc
index 3acd2bee3fb..4aa0556ea3f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_codec_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_codec_impl.cc
@@ -213,114 +213,6 @@ int VoECodecImpl::GetRecCodec(int channel, CodecInst& codec)
return 0;
}
-int VoECodecImpl::SetAMREncFormat(int channel, AmrMode mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetAMREncFormat(channel=%d, mode=%d)", channel, mode);
-#ifdef WEBRTC_CODEC_AMR
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetAMREncFormat() failed to locate channel");
- return -1;
- }
- return channelPtr->SetAMREncFormat(mode);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetAMREncFormat() AMR codec is not supported");
- return -1;
-#endif
-}
-
-int VoECodecImpl::SetAMRDecFormat(int channel, AmrMode mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetAMRDecFormat(channel=%i, mode=%i)", channel, mode);
-#ifdef WEBRTC_CODEC_AMR
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetAMRDecFormat() failed to locate channel");
- return -1;
- }
- return channelPtr->SetAMRDecFormat(mode);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetAMRDecFormat() AMR codec is not supported");
- return -1;
-#endif
-}
-
-int VoECodecImpl::SetAMRWbEncFormat(int channel, AmrMode mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetAMRWbEncFormat(channel=%d, mode=%d)", channel, mode);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-#ifdef WEBRTC_CODEC_AMRWB
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetAMRWbEncFormat() failed to locate channel");
- return -1;
- }
- return channelPtr->SetAMRWbEncFormat(mode);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetAMRWbEncFormat() AMR-wb codec is not supported");
- return -1;
-#endif
-}
-
-int VoECodecImpl::SetAMRWbDecFormat(int channel, AmrMode mode)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetAMRWbDecFormat(channel=%i, mode=%i)", channel, mode);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-#ifdef WEBRTC_CODEC_AMRWB
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetAMRWbDecFormat() failed to locate channel");
- return -1;
- }
- return channelPtr->SetAMRWbDecFormat(mode);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetAMRWbDecFormat() AMR-wb codec is not supported");
- return -1;
-#endif
-}
-
int VoECodecImpl::SetRecPayloadType(int channel, const CodecInst& codec)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -403,92 +295,39 @@ int VoECodecImpl::SetSendCNPayloadType(int channel, int type,
return channelPtr->SetSendCNPayloadType(type, frequency);
}
-int VoECodecImpl::SetISACInitTargetRate(int channel, int rateBps,
- bool useFixedFrameSize)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetISACInitTargetRate(channel=%d, rateBps=%d, "
- "useFixedFrameSize=%d)", channel, rateBps, useFixedFrameSize);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-#ifdef WEBRTC_CODEC_ISAC
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetISACInitTargetRate() failed to locate channel");
- return -1;
- }
- return channelPtr->SetISACInitTargetRate(rateBps, useFixedFrameSize);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetISACInitTargetRate() iSAC codec is not supported");
+int VoECodecImpl::SetFECStatus(int channel, bool enable) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "SetCodecFECStatus(channel=%d, enable=%d)", channel, enable);
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
-#endif
-}
-
-int VoECodecImpl::SetISACMaxRate(int channel, int rateBps)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetISACMaxRate(channel=%d, rateBps=%d)", channel, rateBps);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-#ifdef WEBRTC_CODEC_ISAC
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetISACMaxRate() failed to locate channel");
- return -1;
- }
- return channelPtr->SetISACMaxRate(rateBps);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetISACMaxRate() iSAC codec is not supported");
+ }
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetCodecFECStatus() failed to locate channel");
return -1;
-#endif
+ }
+ return channelPtr->SetCodecFECStatus(enable);
}
-int VoECodecImpl::SetISACMaxPayloadSize(int channel, int sizeBytes)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetISACMaxPayloadSize(channel=%d, sizeBytes=%d)", channel,
- sizeBytes);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-#ifdef WEBRTC_CODEC_ISAC
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetISACMaxPayloadSize() failed to locate channel");
- return -1;
- }
- return channelPtr->SetISACMaxPayloadSize(sizeBytes);
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetISACMaxPayloadSize() iSAC codec is not supported");
+int VoECodecImpl::GetFECStatus(int channel, bool& enabled) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "GetCodecFECStatus(channel=%d)", channel);
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
-#endif
- return 0;
+ }
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "GetFECStatus() failed to locate channel");
+ return -1;
+ }
+ enabled = channelPtr->GetCodecFECStatus();
+ return 0;
}
int VoECodecImpl::SetVADStatus(int channel, bool enable, VadModes mode,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_codec_impl.h b/chromium/third_party/webrtc/voice_engine/voe_codec_impl.h
index ec6cbd7c57d..1b9f00e4e6f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_codec_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_codec_impl.h
@@ -31,18 +31,6 @@ public:
virtual int GetRecCodec(int channel, CodecInst& codec);
- virtual int SetAMREncFormat(int channel,
- AmrMode mode = kRfc3267BwEfficient);
-
- virtual int SetAMRDecFormat(int channel,
- AmrMode mode = kRfc3267BwEfficient);
-
- virtual int SetAMRWbEncFormat(int channel,
- AmrMode mode = kRfc3267BwEfficient);
-
- virtual int SetAMRWbDecFormat(int channel,
- AmrMode mode = kRfc3267BwEfficient);
-
virtual int SetSendCNPayloadType(
int channel, int type,
PayloadFrequencies frequency = kFreq16000Hz);
@@ -52,13 +40,9 @@ public:
virtual int GetRecPayloadType(int channel, CodecInst& codec);
- virtual int SetISACInitTargetRate(int channel,
- int rateBps,
- bool useFixedFrameSize = false);
-
- virtual int SetISACMaxRate(int channel, int rateBps);
+ virtual int SetFECStatus(int channel, bool enable);
- virtual int SetISACMaxPayloadSize(int channel, int sizeBytes);
+ virtual int GetFECStatus(int channel, bool& enabled);
virtual int SetVADStatus(int channel,
bool enable,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_codec_unittest.cc b/chromium/third_party/webrtc/voice_engine/voe_codec_unittest.cc
index baa4f37ee36..7e440b2ada0 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_codec_unittest.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_codec_unittest.cc
@@ -169,6 +169,56 @@ TEST_F(VoECodecTest, DISABLED_ON_ANDROID(DualStreamRemoveSecondaryCodec)) {
EXPECT_EQ(-1, voe_codec_->GetSecondarySendCodec(channel_, my_codec));
}
+TEST(VoECodecInst, TestCompareCodecInstances) {
+ CodecInst codec1, codec2;
+ memset(&codec1, 0, sizeof(CodecInst));
+ memset(&codec2, 0, sizeof(CodecInst));
+
+ codec1.pltype = 101;
+ strncpy(codec1.plname, "isac", 4);
+ codec1.plfreq = 8000;
+ codec1.pacsize = 110;
+ codec1.channels = 1;
+ codec1.rate = 8000;
+ memcpy(&codec2, &codec1, sizeof(CodecInst));
+ // Compare two codecs now.
+ EXPECT_TRUE(codec1 == codec2);
+ EXPECT_FALSE(codec1 != codec2);
+
+ // Changing pltype.
+ codec2.pltype = 102;
+ EXPECT_FALSE(codec1 == codec2);
+ EXPECT_TRUE(codec1 != codec2);
+
+ // Reset to codec2 to codec1 state.
+ memcpy(&codec2, &codec1, sizeof(CodecInst));
+ // payload name should be case insensitive.
+ strncpy(codec2.plname, "ISAC", 4);
+ EXPECT_TRUE(codec1 == codec2);
+
+ // Test modifying the |plfreq|
+ codec2.plfreq = 16000;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Reset to codec2 to codec1 state.
+ memcpy(&codec2, &codec1, sizeof(CodecInst));
+ // Test modifying the |pacsize|.
+ codec2.pacsize = 440;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Reset to codec2 to codec1 state.
+ memcpy(&codec2, &codec1, sizeof(CodecInst));
+ // Test modifying the |channels|.
+ codec2.channels = 2;
+ EXPECT_FALSE(codec1 == codec2);
+
+ // Reset to codec2 to codec1 state.
+ memcpy(&codec2, &codec1, sizeof(CodecInst));
+ // Test modifying the |rate|.
+ codec2.rate = 0;
+ EXPECT_FALSE(codec1 == codec2);
+}
+
} // namespace
} // namespace voe
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.cc
index ac2c54660dd..70872c694c1 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.cc
@@ -227,50 +227,6 @@ int VoEDtmfImpl::PlayDtmfTone(int eventCode,
attenuationDb);
}
-int VoEDtmfImpl::StartPlayingDtmfTone(int eventCode,
- int attenuationDb)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "StartPlayingDtmfTone(eventCode=%d, attenuationDb=%d)",
- eventCode, attenuationDb);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- if (!_shared->audio_device()->Playing())
- {
- _shared->SetLastError(VE_NOT_PLAYING, kTraceError,
- "StartPlayingDtmfTone() no channel is playing out");
- return -1;
- }
- if ((eventCode < kMinDtmfEventCode) ||
- (eventCode > kMaxDtmfEventCode) ||
- (attenuationDb < kMinTelephoneEventAttenuation) ||
- (attenuationDb > kMaxTelephoneEventAttenuation))
- {
- _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "StartPlayingDtmfTone() invalid tone parameter(s)");
- return -1;
- }
- return _shared->output_mixer()->StartPlayingDtmfTone(eventCode,
- attenuationDb);
-}
-
-int VoEDtmfImpl::StopPlayingDtmfTone()
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "StopPlayingDtmfTone()");
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- return _shared->output_mixer()->StopPlayingDtmfTone();
-}
-
int VoEDtmfImpl::SetDtmfFeedbackStatus(bool enable, bool directFeedback)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -307,7 +263,6 @@ int VoEDtmfImpl::SetDtmfPlayoutStatus(int channel, bool enable)
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetDtmfPlayoutStatus(channel=%d, enable=%d)",
channel, enable);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -329,7 +284,6 @@ int VoEDtmfImpl::GetDtmfPlayoutStatus(int channel, bool& enabled)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetDtmfPlayoutStatus(channel=%d, enabled=?)", channel);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.h b/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.h
index e879f8de4fa..921623845f4 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_dtmf_impl.h
@@ -42,11 +42,6 @@ public:
int lengthMs = 200,
int attenuationDb = 10);
- virtual int StartPlayingDtmfTone(int eventCode,
- int attenuationDb = 10);
-
- virtual int StopPlayingDtmfTone();
-
virtual int SetDtmfPlayoutStatus(int channel, bool enable);
virtual int GetDtmfPlayoutStatus(int channel, bool& enabled);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.cc
deleted file mode 100644
index 518a1c6160d..00000000000
--- a/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.cc
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/voice_engine/voe_encryption_impl.h"
-
-
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/voice_engine/channel.h"
-#include "webrtc/voice_engine/include/voe_errors.h"
-#include "webrtc/voice_engine/voice_engine_impl.h"
-
-namespace webrtc {
-
-VoEEncryption* VoEEncryption::GetInterface(VoiceEngine* voiceEngine)
-{
-#ifndef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
- return NULL;
-#else
- if (NULL == voiceEngine)
- {
- return NULL;
- }
- VoiceEngineImpl* s = static_cast<VoiceEngineImpl*>(voiceEngine);
- s->AddRef();
- return s;
-#endif
-}
-
-#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
-
-VoEEncryptionImpl::VoEEncryptionImpl(voe::SharedData* shared) : _shared(shared)
-{
- WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "VoEEncryptionImpl::VoEEncryptionImpl() - ctor");
-}
-
-VoEEncryptionImpl::~VoEEncryptionImpl()
-{
- WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "VoEEncryptionImpl::~VoEEncryptionImpl() - dtor");
-}
-
-int VoEEncryptionImpl::RegisterExternalEncryption(int channel,
- Encryption& encryption)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "RegisterExternalEncryption(channel=%d, encryption=0x%x)",
- channel, &encryption);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "RegisterExternalEncryption() failed to locate channel");
- return -1;
- }
- return channelPtr->RegisterExternalEncryption(encryption);
-}
-
-int VoEEncryptionImpl::DeRegisterExternalEncryption(int channel)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "DeRegisterExternalEncryption(channel=%d)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "DeRegisterExternalEncryption() failed to locate channel");
- return -1;
- }
- return channelPtr->DeRegisterExternalEncryption();
-}
-
-#endif // #ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
-
-// EOF
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.h b/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.h
deleted file mode 100644
index 6d482fff734..00000000000
--- a/chromium/third_party/webrtc/voice_engine/voe_encryption_impl.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
-#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
-
-#include "webrtc/voice_engine/include/voe_encryption.h"
-
-#include "webrtc/voice_engine/shared_data.h"
-
-namespace webrtc {
-
-class VoEEncryptionImpl : public VoEEncryption
-{
-public:
- // External encryption
- virtual int RegisterExternalEncryption(
- int channel,
- Encryption& encryption);
-
- virtual int DeRegisterExternalEncryption(int channel);
-
-protected:
- VoEEncryptionImpl(voe::SharedData* shared);
- virtual ~VoEEncryptionImpl();
-
-private:
- voe::SharedData* _shared;
-};
-
-} // namespace webrtc
-
-#endif // #ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
diff --git a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
index c76c280b05f..7c52692b682 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
@@ -143,203 +143,6 @@ int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing(
return -1;
}
-int VoEExternalMediaImpl::SetExternalRecordingStatus(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
- "SetExternalRecordingStatus(enable=%d)", enable);
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- if (shared_->audio_device()->Recording())
- {
- shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
- "SetExternalRecordingStatus() cannot set state while sending");
- return -1;
- }
- shared_->set_ext_recording(enable);
- return 0;
-#else
- shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetExternalRecordingStatus() external recording is not supported");
- return -1;
-#endif
-}
-
-int VoEExternalMediaImpl::ExternalRecordingInsertData(
- const int16_t speechData10ms[],
- int lengthSamples,
- int samplingFreqHz,
- int current_delay_ms)
-{
- WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1),
- "ExternalRecordingInsertData(speechData10ms=0x%x,"
- " lengthSamples=%u, samplingFreqHz=%d, current_delay_ms=%d)",
- &speechData10ms[0], lengthSamples, samplingFreqHz,
- current_delay_ms);
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- if (!shared_->statistics().Initialized())
- {
- shared_->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- if (!shared_->ext_recording())
- {
- shared_->SetLastError(VE_INVALID_OPERATION, kTraceError,
- "ExternalRecordingInsertData() external recording is not enabled");
- return -1;
- }
- if (shared_->NumOfSendingChannels() == 0)
- {
- shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
- "SetExternalRecordingStatus() no channel is sending");
- return -1;
- }
- if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
- (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
- {
- shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "SetExternalRecordingStatus() invalid sample rate");
- return -1;
- }
- if ((0 == lengthSamples) ||
- ((lengthSamples % (samplingFreqHz / 100)) != 0))
- {
- shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "SetExternalRecordingStatus() invalid buffer size");
- return -1;
- }
- if (current_delay_ms < 0)
- {
- shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "SetExternalRecordingStatus() invalid delay)");
- return -1;
- }
-
- uint16_t blockSize = samplingFreqHz / 100;
- uint32_t nBlocks = lengthSamples / blockSize;
- int16_t totalDelayMS = 0;
- uint16_t playoutDelayMS = 0;
-
- for (uint32_t i = 0; i < nBlocks; i++)
- {
- if (!shared_->ext_playout())
- {
- // Use real playout delay if external playout is not enabled.
- if (shared_->audio_device()->PlayoutDelay(&playoutDelayMS) != 0) {
- shared_->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
- "PlayoutDelay() unable to get the playout delay");
- }
- totalDelayMS = current_delay_ms + playoutDelayMS;
- }
- else
- {
- // Use stored delay value given the last call
- // to ExternalPlayoutGetData.
- totalDelayMS = current_delay_ms + playout_delay_ms_;
- // Compensate for block sizes larger than 10ms
- totalDelayMS -= (int16_t)(i*10);
- if (totalDelayMS < 0)
- totalDelayMS = 0;
- }
- shared_->transmit_mixer()->PrepareDemux(
- (const int8_t*)(&speechData10ms[i*blockSize]),
- blockSize,
- 1,
- samplingFreqHz,
- totalDelayMS,
- 0,
- 0,
- false); // Typing detection not supported
-
- shared_->transmit_mixer()->DemuxAndMix();
- shared_->transmit_mixer()->EncodeAndSend();
- }
- return 0;
-#else
- shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "ExternalRecordingInsertData() external recording is not supported");
- return -1;
-#endif
-}
-
-int VoEExternalMediaImpl::SetExternalPlayoutStatus(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
- "SetExternalPlayoutStatus(enable=%d)", enable);
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- if (shared_->audio_device()->Playing())
- {
- shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
- "SetExternalPlayoutStatus() cannot set state while playing");
- return -1;
- }
- shared_->set_ext_playout(enable);
- return 0;
-#else
- shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetExternalPlayoutStatus() external playout is not supported");
- return -1;
-#endif
-}
-
-int VoEExternalMediaImpl::ExternalPlayoutGetData(
- int16_t speechData10ms[],
- int samplingFreqHz,
- int current_delay_ms,
- int& lengthSamples)
-{
- WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1),
- "ExternalPlayoutGetData(speechData10ms=0x%x, samplingFreqHz=%d"
- ", current_delay_ms=%d)", &speechData10ms[0], samplingFreqHz,
- current_delay_ms);
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- if (!shared_->statistics().Initialized())
- {
- shared_->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- if (!shared_->ext_playout())
- {
- shared_->SetLastError(VE_INVALID_OPERATION, kTraceError,
- "ExternalPlayoutGetData() external playout is not enabled");
- return -1;
- }
- if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
- (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
- {
- shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "ExternalPlayoutGetData() invalid sample rate");
- return -1;
- }
- if (current_delay_ms < 0)
- {
- shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "ExternalPlayoutGetData() invalid delay)");
- return -1;
- }
-
- AudioFrame audioFrame;
-
- // Retrieve mixed output at the specified rate
- shared_->output_mixer()->MixActiveChannels();
- shared_->output_mixer()->DoOperationsOnCombinedSignal();
- shared_->output_mixer()->GetMixedAudio(samplingFreqHz, 1, &audioFrame);
-
- // Deliver audio (PCM) samples to the external sink
- memcpy(speechData10ms,
- audioFrame.data_,
- sizeof(int16_t)*(audioFrame.samples_per_channel_));
- lengthSamples = audioFrame.samples_per_channel_;
-
- // Store current playout delay (to be used by ExternalRecordingInsertData).
- playout_delay_ms_ = current_delay_ms;
-
- return 0;
-#else
- shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "ExternalPlayoutGetData() external playout is not supported");
- return -1;
-#endif
-}
-
int VoEExternalMediaImpl::GetAudioFrame(int channel, int desired_sample_rate_hz,
AudioFrame* frame) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.h b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.h
index ef460264f8f..14da268f35f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.h
@@ -29,20 +29,6 @@ public:
int channel,
ProcessingTypes type);
- virtual int SetExternalRecordingStatus(bool enable);
-
- virtual int SetExternalPlayoutStatus(bool enable);
-
- virtual int ExternalRecordingInsertData(
- const int16_t speechData10ms[],
- int lengthSamples,
- int samplingFreqHz,
- int current_delay_ms);
-
- virtual int ExternalPlayoutGetData(int16_t speechData10ms[],
- int samplingFreqHz,
- int current_delay_ms,
- int& lengthSamples);
virtual int GetAudioFrame(int channel, int desired_sample_rate_hz,
AudioFrame* frame);
@@ -54,9 +40,6 @@ protected:
virtual ~VoEExternalMediaImpl();
private:
-#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
- int playout_delay_ms_;
-#endif
voe::SharedData* shared_;
};
diff --git a/chromium/third_party/webrtc/voice_engine/voe_file_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_file_impl.cc
index d8779c81054..95e9d21b391 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_file_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_file_impl.cc
@@ -164,27 +164,6 @@ int VoEFileImpl::IsPlayingFileLocally(int channel)
return channelPtr->IsPlayingFileLocally();
}
-int VoEFileImpl::ScaleLocalFilePlayout(int channel, float scale)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ScaleLocalFilePlayout(channel=%d, scale=%5.3f)",
- channel, scale);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "StopPlayingFileLocally() failed to locate channel");
- return -1;
- }
- return channelPtr->ScaleLocalFilePlayout(scale);
-}
-
int VoEFileImpl::StartPlayingFileAsMicrophone(int channel,
const char fileNameUTF8[1024],
bool loop,
@@ -395,36 +374,6 @@ int VoEFileImpl::IsPlayingFileAsMicrophone(int channel)
}
}
-int VoEFileImpl::ScaleFileAsMicrophonePlayout(int channel, float scale)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ScaleFileAsMicrophonePlayout(channel=%d, scale=%5.3f)",
- channel, scale);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- if (channel == -1)
- {
- return _shared->transmit_mixer()->ScaleFileAsMicrophonePlayout(scale);
- }
- else
- {
- // Stop adding file after demultiplexing <=> affects one channel only
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "IsPlayingFileAsMicrophone() failed to locate channel");
- return -1;
- }
- return channelPtr->ScaleFileAsMicrophonePlayout(scale);
- }
-}
-
int VoEFileImpl::StartRecordingPlayout(
int channel, const char* fileNameUTF8, CodecInst* compression,
int maxSizeBytes)
@@ -642,717 +591,6 @@ int VoEFileImpl::StopRecordingMicrophone()
return err;
}
-// TODO(andrew): a cursory inspection suggests there's a large amount of
-// overlap in these convert functions which could be refactored to a helper.
-int VoEFileImpl::ConvertPCMToWAV(const char* fileNameInUTF8,
- const char* fileNameOutUTF8)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertPCMToWAV(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
- fileNameInUTF8, fileNameOutUTF8);
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
- -1,
- kFileFormatPcm16kHzFile));
-
- int res=playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToWAV failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatWavFile));
-
- CodecInst codecInst;
- strncpy(codecInst.plname,"L16",32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
-
- res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToWAV failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
-
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency, AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertPCMToWAV failed during conversion (write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertPCMToWAV(InStream* streamIn, OutStream* streamOut)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertPCMToWAV(streamIn, streamOut)");
-
- if ((streamIn == NULL) || (streamOut == NULL))
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1), "invalid stream handles");
- return (-1);
- }
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
- kFileFormatPcm16kHzFile));
- int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToWAV failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(-1,
- kFileFormatWavFile));
- CodecInst codecInst;
- strncpy(codecInst.plname, "L16", 32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
- res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToWAV failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
-
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength, frequency,
- AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertPCMToWAV failed during conversion (write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertWAVToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertWAVToPCM(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
- fileNameInUTF8, fileNameOutUTF8);
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
- kFileFormatWavFile));
- int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertWAVToPCM failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatPcm16kHzFile));
-
- CodecInst codecInst;
- strncpy(codecInst.plname,"L16",32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
-
- res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertWAVToPCM failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
-
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency, AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertWAVToPCM failed during conversion (write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertWAVToPCM(InStream* streamIn, OutStream* streamOut)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertWAVToPCM(streamIn, streamOut)");
-
- if ((streamIn == NULL) || (streamOut == NULL))
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1), "invalid stream handles");
- return (-1);
- }
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
- kFileFormatWavFile));
- int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertWAVToPCM failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatPcm16kHzFile));
-
- CodecInst codecInst;
- strncpy(codecInst.plname,"L16",32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
-
- res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertWAVToPCM failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
-
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength, frequency,
- AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertWAVToPCM failed during conversion (write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertPCMToCompressed(const char* fileNameInUTF8,
- const char* fileNameOutUTF8,
- CodecInst* compression)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertPCMToCompressed(fileNameInUTF8=%s, fileNameOutUTF8=%s"
- ", compression)", fileNameInUTF8, fileNameOutUTF8);
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
- " compression: plname=%s, plfreq=%d, pacsize=%d",
- compression->plname, compression->plfreq,
- compression->pacsize);
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
- -1,
- kFileFormatPcm16kHzFile));
- int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToCompressed failed to create player object");
- // Clean up and shutdown the file player
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1,
- kFileFormatCompressedFile));
- res = recObj.StartRecordingAudioFile(fileNameOutUTF8, *compression,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToCompressed failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency, AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertPCMToCompressed failed during conversion "
- "(write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertPCMToCompressed(InStream* streamIn,
- OutStream* streamOut,
- CodecInst* compression)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertPCMToCompressed(streamIn, streamOut, compression)");
-
- if ((streamIn == NULL) || (streamOut == NULL))
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1), "invalid stream handles");
- return (-1);
- }
-
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
- " compression: plname=%s, plfreq=%d, pacsize=%d",
- compression->plname, compression->plfreq,
- compression->pacsize);
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
- -1, kFileFormatPcm16kHzFile));
-
- int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToCompressed failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatCompressedFile));
- res = recObj.StartRecordingAudioFile(*streamOut,*compression,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertPCMToCompressed failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency, AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertPCMToCompressed failed during conversion "
- "(write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertCompressedToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertCompressedToPCM(fileNameInUTF8=%s,"
- " fileNameOutUTF8=%s)",
- fileNameInUTF8, fileNameOutUTF8);
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
- -1, kFileFormatCompressedFile));
-
- int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertCompressedToPCM failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatPcm16kHzFile));
-
- CodecInst codecInst;
- strncpy(codecInst.plname,"L16",32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
-
- res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertCompressedToPCM failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency,
- AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertCompressedToPCM failed during conversion "
- "(write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-int VoEFileImpl::ConvertCompressedToPCM(InStream* streamIn,
- OutStream* streamOut)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ConvertCompressedToPCM(file, file);");
-
- if ((streamIn == NULL) || (streamOut == NULL))
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1), "invalid stream handles");
- return (-1);
- }
-
- // Create file player object
- FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
- -1, kFileFormatCompressedFile));
- int res;
-
- res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertCompressedToPCM failed to create player object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- return -1;
- }
-
- // Create file recorder object
- FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
- -1, kFileFormatPcm16kHzFile));
-
- CodecInst codecInst;
- strncpy(codecInst.plname,"L16",32);
- codecInst.channels = 1;
- codecInst.rate = 256000;
- codecInst.plfreq = 16000;
- codecInst.pltype = 94;
- codecInst.pacsize = 160;
-
- res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "ConvertCompressedToPCM failed to create recorder object");
- playerObj.StopPlayingFile();
- FilePlayer::DestroyFilePlayer(&playerObj);
- recObj.StopRecording();
- FileRecorder::DestroyFileRecorder(&recObj);
- return -1;
- }
-
- // Run throught the file
- AudioFrame audioFrame;
- int16_t decodedData[160];
- int decLength=0;
- const uint32_t frequency = 16000;
-
- while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
- {
- if(decLength!=frequency/100)
- {
- // This is an OK way to end
- break;
- }
- audioFrame.UpdateFrame(-1, 0, decodedData,
- (uint16_t)decLength,
- frequency,
- AudioFrame::kNormalSpeech,
- AudioFrame::kVadActive);
-
- res=recObj.RecordAudioToFile(audioFrame);
- if(res)
- {
- WEBRTC_TRACE(kTraceError, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "ConvertCompressedToPCM failed during conversion "
- "(write frame)");
- }
- }
-
- playerObj.StopPlayingFile();
- recObj.StopRecording();
- FilePlayer::DestroyFilePlayer(&playerObj);
- FileRecorder::DestroyFileRecorder(&recObj);
-
- return res;
-}
-
-
-int VoEFileImpl::GetFileDuration(const char* fileNameUTF8,
- int& durationMs,
- FileFormats format)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetFileDuration(fileNameUTF8=%s, format=%d)",
- fileNameUTF8, format);
-
- // Create a dummy file module for this
- MediaFile * fileModule=MediaFile::CreateMediaFile(-1);
-
- // Temp container of the right format
- uint32_t duration;
- int res=fileModule->FileDurationMs(fileNameUTF8,duration,format);
- if (res)
- {
- _shared->SetLastError(VE_BAD_FILE, kTraceError,
- "GetFileDuration() failed measure file duration");
- return -1;
- }
- durationMs = duration;
- MediaFile::DestroyMediaFile(fileModule);
- fileModule = NULL;
-
- return(res);
-}
-
-int VoEFileImpl::GetPlaybackPosition(int channel, int& positionMs)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetPlaybackPosition(channel=%d)", channel);
-
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetPlaybackPosition() failed to locate channel");
- return -1;
- }
- return channelPtr->GetLocalPlayoutPosition(positionMs);
-}
-
#endif // #ifdef WEBRTC_VOICE_ENGINE_FILE_API
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_file_impl.h b/chromium/third_party/webrtc/voice_engine/voe_file_impl.h
index f43d4f1426f..584d0a17c76 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_file_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_file_impl.h
@@ -41,8 +41,6 @@ public:
virtual int IsPlayingFileLocally(int channel);
- virtual int ScaleLocalFilePlayout(int channel, float scale);
-
// Use file as microphone input
virtual int StartPlayingFileAsMicrophone(
@@ -64,8 +62,6 @@ public:
virtual int IsPlayingFileAsMicrophone(int channel);
- virtual int ScaleFileAsMicrophonePlayout(int channel, float scale);
-
// Record speaker signal to file
virtual int StartRecordingPlayout(int channel,
@@ -90,43 +86,6 @@ public:
virtual int StopRecordingMicrophone();
- // Conversion between different file formats
-
- virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
- const char* fileNameOutUTF8);
-
- virtual int ConvertPCMToWAV(InStream* streamIn,
- OutStream* streamOut);
-
- virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8);
-
- virtual int ConvertWAVToPCM(InStream* streamIn,
- OutStream* streamOut);
-
- virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
- const char* fileNameOutUTF8,
- CodecInst* compression);
-
- virtual int ConvertPCMToCompressed(InStream* streamIn,
- OutStream* streamOut,
- CodecInst* compression);
-
- virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
- const char* fileNameOutUTF8);
-
- virtual int ConvertCompressedToPCM(InStream* streamIn,
- OutStream* streamOut);
-
- // Misc file functions
-
- virtual int GetFileDuration(
- const char* fileNameUTF8,
- int& durationMs,
- FileFormats format = kFileFormatPcm16kHzFile);
-
- virtual int GetPlaybackPosition(int channel, int& positionMs);
-
protected:
VoEFileImpl(voe::SharedData* shared);
virtual ~VoEFileImpl();
diff --git a/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.cc
index f4550fcc12e..eaf2a28a38a 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.cc
@@ -148,8 +148,6 @@ int VoEHardwareImpl::GetNumOfRecordingDevices(int& devices)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetNumOfRecordingDevices(devices=?)");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -169,8 +167,6 @@ int VoEHardwareImpl::GetNumOfPlayoutDevices(int& devices)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetNumOfPlayoutDevices(devices=?)");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -193,8 +189,6 @@ int VoEHardwareImpl::GetRecordingDeviceName(int index,
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetRecordingDeviceName(index=%d)", index);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -251,8 +245,6 @@ int VoEHardwareImpl::GetPlayoutDeviceName(int index,
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetPlayoutDeviceName(index=%d)", index);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -310,8 +302,6 @@ int VoEHardwareImpl::SetRecordingDevice(int index,
"SetRecordingDevice(index=%d, recordingChannel=%d)",
index, (int) recordingChannel);
CriticalSectionScoped cs(_shared->crit_sec());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
- // TODO(leozwang): Add this api to Android OpenSL ES implementation.
if (!_shared->statistics().Initialized())
{
@@ -440,8 +430,6 @@ int VoEHardwareImpl::SetPlayoutDevice(int index)
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetPlayoutDevice(index=%d)", index);
CriticalSectionScoped cs(_shared->crit_sec());
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -540,231 +528,6 @@ int VoEHardwareImpl::SetPlayoutDevice(int index)
return 0;
}
-int VoEHardwareImpl::GetRecordingDeviceStatus(bool& isAvailable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetRecordingDeviceStatus()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- // We let the module do isRecording sanity
-
- bool available(false);
-
- // Check availability
- if (_shared->audio_device()->RecordingIsAvailable(&available) != 0)
- {
- _shared->SetLastError(VE_UNDEFINED_SC_REC_ERR, kTraceError,
- " Audio Device error");
- return -1;
- }
-
- isAvailable = available;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " Output: isAvailable = %d)", (int) isAvailable);
-
- return 0;
-}
-
-int VoEHardwareImpl::GetPlayoutDeviceStatus(bool& isAvailable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetPlayoutDeviceStatus()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- // We let the module do isPlaying sanity
-
- bool available(false);
-
- // Check availability
- if (_shared->audio_device()->PlayoutIsAvailable(&available) != 0)
- {
- _shared->SetLastError(VE_PLAY_UNDEFINED_SC_ERR, kTraceError,
- " Audio Device error");
- return -1;
- }
-
- isAvailable = available;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " Output: isAvailable = %d)", (int) isAvailable);
-
- return 0;
-}
-
-int VoEHardwareImpl::ResetAudioDevice()
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "ResetAudioDevice()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
-#if defined(WEBRTC_IOS)
- if (_shared->audio_device()->ResetAudioDevice() < 0)
- {
- _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
- " Failed to reset sound device");
- return -1;
- }
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- " no support for resetting sound device");
- return -1;
-#endif
-
- return 0;
-}
-
-int VoEHardwareImpl::AudioDeviceControl(unsigned int par1, unsigned int par2,
- unsigned int par3)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "AudioDeviceControl(%i, %i, %i)", par1, par2, par3);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- " no support for resetting sound device");
- return -1;
-}
-
-int VoEHardwareImpl::SetLoudspeakerStatus(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetLoudspeakerStatus(enable=%i)", (int) enable);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-#if defined(WEBRTC_ANDROID)
- if (_shared->audio_device()->SetLoudspeakerStatus(enable) < 0)
- {
- _shared->SetLastError(VE_IGNORED_FUNCTION, kTraceError,
- " Failed to set loudspeaker status");
- return -1;
- }
-
- return 0;
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- " no support for setting loudspeaker status");
- return -1;
-#endif
-}
-
-int VoEHardwareImpl::GetLoudspeakerStatus(bool& enabled)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetLoudspeakerStatus()");
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
-#if defined(WEBRTC_ANDROID)
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (_shared->audio_device()->GetLoudspeakerStatus(&enabled) < 0)
- {
- _shared->SetLastError(VE_IGNORED_FUNCTION, kTraceError,
- " Failed to get loudspeaker status");
- return -1;
- }
-
- return 0;
-#else
- _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- " no support for setting loudspeaker status");
- return -1;
-#endif
-}
-
-int VoEHardwareImpl::GetCPULoad(int& loadPercent)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetCPULoad()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- // Get CPU load from ADM
- uint16_t load(0);
- if (_shared->audio_device()->CPULoad(&load) != 0)
- {
- _shared->SetLastError(VE_CPU_INFO_ERROR, kTraceError,
- " error getting system CPU load");
- return -1;
- }
-
- loadPercent = static_cast<int> (load);
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- " Output: loadPercent = %d", loadPercent);
-
- return 0;
-}
-
-int VoEHardwareImpl::EnableBuiltInAEC(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "%s", __FUNCTION__);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- return _shared->audio_device()->EnableBuiltInAEC(enable);
-}
-
-bool VoEHardwareImpl::BuiltInAECIsEnabled() const
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "%s", __FUNCTION__);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return false;
- }
-
- return _shared->audio_device()->BuiltInAECIsEnabled();
-}
-
int VoEHardwareImpl::SetRecordingSampleRate(unsigned int samples_per_sec) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"%s", __FUNCTION__);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.h b/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.h
index 4e06f978d97..f3537efb036 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_hardware_impl.h
@@ -33,10 +33,6 @@ public:
char strNameUTF8[128],
char strGuidUTF8[128]);
- virtual int GetRecordingDeviceStatus(bool& isAvailable);
-
- virtual int GetPlayoutDeviceStatus(bool& isAvailable);
-
virtual int SetRecordingDevice(
int index,
StereoChannel recordingChannel = kStereoBoth);
@@ -47,21 +43,6 @@ public:
virtual int GetAudioDeviceLayer(AudioLayers& audioLayer);
- virtual int GetCPULoad(int& loadPercent);
-
- virtual int ResetAudioDevice();
-
- virtual int AudioDeviceControl(unsigned int par1,
- unsigned int par2,
- unsigned int par3);
-
- virtual int SetLoudspeakerStatus(bool enable);
-
- virtual int GetLoudspeakerStatus(bool& enabled);
-
- virtual int EnableBuiltInAEC(bool enable);
- virtual bool BuiltInAECIsEnabled() const;
-
virtual int SetRecordingSampleRate(unsigned int samples_per_sec);
virtual int RecordingSampleRate(unsigned int* samples_per_sec) const;
virtual int SetPlayoutSampleRate(unsigned int samples_per_sec);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_impl.cc
index 37d1cdac90d..0d8ce50eb8f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_impl.cc
@@ -53,7 +53,6 @@ int VoENetEqStatsImpl::GetNetworkStatistics(int channel,
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetNetworkStatistics(channel=%d, stats=?)", channel);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -74,7 +73,6 @@ int VoENetEqStatsImpl::GetNetworkStatistics(int channel,
int VoENetEqStatsImpl::GetDecodingCallStatistics(
int channel, AudioDecodingCallStats* stats) const {
- ANDROID_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_unittest.cc b/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_unittest.cc
deleted file mode 100644
index 4bb4d4a0fbf..00000000000
--- a/chromium/third_party/webrtc/voice_engine/voe_neteq_stats_unittest.cc
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/voice_engine/include/voe_neteq_stats.h"
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_device/include/fake_audio_device.h"
-#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/test/testsupport/gtest_disable.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/voice_engine/include/voe_hardware.h"
-#include "webrtc/voice_engine/voice_engine_defines.h"
-
-namespace webrtc {
-namespace voe {
-namespace {
-
-const int kSampleRateHz = 16000;
-const int kNumSamples10ms = kSampleRateHz / 100;
-const int kFrameSizeMs = 10; // Multiple of 10.
-const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
-const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
-const uint8_t kPayloadType = 111;
-
-class RtpUtility {
- public:
- RtpUtility(int samples_per_packet, uint8_t payload_type)
- : samples_per_packet_(samples_per_packet), payload_type_(payload_type) {}
-
- virtual ~RtpUtility() {}
-
- void Populate(WebRtcRTPHeader* rtp_header) {
- rtp_header->header.sequenceNumber = 0xABCD;
- rtp_header->header.timestamp = 0xABCDEF01;
- rtp_header->header.payloadType = payload_type_;
- rtp_header->header.markerBit = false;
- rtp_header->header.ssrc = 0x1234;
- rtp_header->header.numCSRCs = 0;
- rtp_header->frameType = kAudioFrameSpeech;
-
- rtp_header->header.payload_type_frequency = kSampleRateHz;
- rtp_header->type.Audio.channel = 1;
- rtp_header->type.Audio.isCNG = false;
- }
-
- void Forward(WebRtcRTPHeader* rtp_header) {
- ++rtp_header->header.sequenceNumber;
- rtp_header->header.timestamp += samples_per_packet_;
- }
-
- private:
- int samples_per_packet_;
- uint8_t payload_type_;
-};
-
-// This factory method allows access to ACM of a channel, facilitating insertion
-// of packets to and pulling audio of ACM.
-struct InsertAcm : AudioCodingModuleFactory {
- explicit InsertAcm(AudioCodingModule* acm) : acm_(acm) {}
- ~InsertAcm() {}
- virtual AudioCodingModule* Create(int /*id*/) const { return acm_; }
-
- AudioCodingModule* acm_;
-};
-
-class VoENetEqStatsTest : public ::testing::Test {
- protected:
- VoENetEqStatsTest()
- : acm1_(new acm1::AudioCodingModuleImpl(1, Clock::GetRealTimeClock())),
- acm2_(new acm2::AudioCodingModuleImpl(2)),
- voe_(VoiceEngine::Create()),
- base_(VoEBase::GetInterface(voe_)),
- voe_neteq_stats_(VoENetEqStats::GetInterface(voe_)),
- channel_acm1_(-1),
- channel_acm2_(-1),
- adm_(new FakeAudioDeviceModule),
- rtp_utility_(new RtpUtility(kFrameSizeSamples, kPayloadType)) {}
-
- ~VoENetEqStatsTest() {}
-
- void TearDown() {
- voe_neteq_stats_->Release();
- base_->DeleteChannel(channel_acm1_);
- base_->DeleteChannel(channel_acm2_);
- base_->Terminate();
- base_->Release();
- VoiceEngine::Delete(voe_);
- }
-
- void SetUp() {
- // Check if all components are valid.
- ASSERT_TRUE(voe_ != NULL);
- ASSERT_TRUE(base_ != NULL);
- ASSERT_TRUE(adm_.get() != NULL);
- ASSERT_EQ(0, base_->Init(adm_.get()));
-
- // Set configs.
- config_acm1_.Set<AudioCodingModuleFactory>(new InsertAcm(acm1_));
- config_acm2_.Set<AudioCodingModuleFactory>(new InsertAcm(acm2_));
-
- // Create channe1s;
- channel_acm1_ = base_->CreateChannel(config_acm1_);
- ASSERT_NE(-1, channel_acm1_);
-
- channel_acm2_ = base_->CreateChannel(config_acm2_);
- ASSERT_NE(-1, channel_acm2_);
-
- CodecInst codec;
- AudioCodingModule::Codec("L16", &codec, kSampleRateHz, 1);
- codec.pltype = kPayloadType;
-
- // Register L16 codec in ACMs.
- ASSERT_EQ(0, acm1_->RegisterReceiveCodec(codec));
- ASSERT_EQ(0, acm2_->RegisterReceiveCodec(codec));
-
- rtp_utility_->Populate(&rtp_header_);
- }
-
- void InsertPacketAndPullAudio() {
- AudioFrame audio_frame;
- const uint8_t kPayload[kPayloadSizeBytes] = {0};
-
- ASSERT_EQ(0,
- acm1_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
- ASSERT_EQ(0,
- acm2_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
-
- ASSERT_EQ(0, acm1_->PlayoutData10Ms(-1, &audio_frame));
- ASSERT_EQ(0, acm2_->PlayoutData10Ms(-1, &audio_frame));
- rtp_utility_->Forward(&rtp_header_);
- }
-
- void JustPullAudio() {
- AudioFrame audio_frame;
- ASSERT_EQ(0, acm1_->PlayoutData10Ms(-1, &audio_frame));
- ASSERT_EQ(0, acm2_->PlayoutData10Ms(-1, &audio_frame));
- }
-
- Config config_acm1_;
- Config config_acm2_;
-
- // ACMs are inserted into VoE channels, and this class is not the owner of
- // them. Therefore, they should not be deleted, not even in destructor.
- AudioCodingModule* acm1_;
- AudioCodingModule* acm2_;
-
- VoiceEngine* voe_;
- VoEBase* base_;
- VoENetEqStats* voe_neteq_stats_;
- int channel_acm1_;
- int channel_acm2_;
- scoped_ptr<FakeAudioDeviceModule> adm_;
- scoped_ptr<RtpUtility> rtp_utility_;
- WebRtcRTPHeader rtp_header_;
-};
-
-// Check if the statistics are initialized correctly. Before any call to ACM
-// all fields have to be zero.
-TEST_F(VoENetEqStatsTest, DISABLED_ON_ANDROID(InitializedToZero)) {
- AudioDecodingCallStats stats;
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm1_, &stats));
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm2_, &stats));
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-}
-
-// Apply an initial playout delay. Calls to AudioCodingModule::PlayoutData10ms()
-// should result in generating silence, check the associated field.
-TEST_F(VoENetEqStatsTest, DISABLED_ON_ANDROID(SilenceGeneratorCalled)) {
- AudioDecodingCallStats stats;
- const int kInitialDelay = 100;
-
- acm1_->SetInitialPlayoutDelay(kInitialDelay);
- acm2_->SetInitialPlayoutDelay(kInitialDelay);
-
- AudioFrame audio_frame;
- int num_calls = 0;
- for (int time_ms = 0; time_ms < kInitialDelay;
- time_ms += kFrameSizeMs, ++num_calls) {
- InsertPacketAndPullAudio();
- }
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm1_, &stats));
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(num_calls, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm2_, &stats));
- EXPECT_EQ(0, stats.calls_to_neteq);
- EXPECT_EQ(num_calls, stats.calls_to_silence_generator);
- EXPECT_EQ(0, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-}
-
-// Insert some packets and pull audio. Check statistics are valid. Then,
-// simulate packet loss and check if PLC and PLC-to-CNG statistics are
-// correctly updated.
-TEST_F(VoENetEqStatsTest, DISABLED_ON_ANDROID(NetEqCalls)) {
- AudioDecodingCallStats stats;
- const int kNumNormalCalls = 10;
-
- AudioFrame audio_frame;
- for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) {
- InsertPacketAndPullAudio();
- }
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm1_, &stats));
- EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm2_, &stats));
- EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(0, stats.decoded_plc);
- EXPECT_EQ(0, stats.decoded_plc_cng);
-
- const int kNumPlc = 3;
- const int kNumPlcCng = 5;
-
- // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG.
- for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) {
- JustPullAudio();
- }
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm1_, &stats));
- EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(kNumPlc, stats.decoded_plc);
- EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
-
- ASSERT_EQ(0,
- voe_neteq_stats_->GetDecodingCallStatistics(channel_acm2_, &stats));
- EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
- EXPECT_EQ(0, stats.calls_to_silence_generator);
- EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
- EXPECT_EQ(0, stats.decoded_cng);
- EXPECT_EQ(kNumPlc, stats.decoded_plc);
- EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
-}
-
-} // namespace
-
-} // namespace voe
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
index d6651a5b805..70c548857b8 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
@@ -88,7 +88,14 @@ int VoENetworkImpl::DeRegisterExternalTransport(int channel)
int VoENetworkImpl::ReceivedRTPPacket(int channel,
const void* data,
- unsigned int length)
+ unsigned int length) {
+ return ReceivedRTPPacket(channel, data, length, webrtc::PacketTime());
+}
+
+int VoENetworkImpl::ReceivedRTPPacket(int channel,
+ const void* data,
+ unsigned int length,
+ const PacketTime& packet_time)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
"ReceivedRTPPacket(channel=%d, length=%u)", channel, length);
@@ -125,7 +132,8 @@ int VoENetworkImpl::ReceivedRTPPacket(int channel,
"ReceivedRTPPacket() external transport is not enabled");
return -1;
}
- return channelPtr->ReceivedRTPPacket((const int8_t*) data, length);
+ return channelPtr->ReceivedRTPPacket((const int8_t*) data, length,
+ packet_time);
}
int VoENetworkImpl::ReceivedRTCPPacket(int channel, const void* data,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_network_impl.h b/chromium/third_party/webrtc/voice_engine/voe_network_impl.h
index 6a703cfa688..7e09d04d75f 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_network_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_network_impl.h
@@ -29,6 +29,10 @@ public:
virtual int ReceivedRTPPacket(int channel,
const void* data,
unsigned int length);
+ virtual int ReceivedRTPPacket(int channel,
+ const void* data,
+ unsigned int length,
+ const PacketTime& packet_time);
virtual int ReceivedRTCPPacket(int channel,
const void* data,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
index 945c5384b36..8f0e717edf1 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.cc
@@ -11,6 +11,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/voe_rtp_rtcp_impl.h"
#include "webrtc/voice_engine/voice_engine_impl.h"
@@ -49,88 +50,6 @@ VoERTP_RTCPImpl::~VoERTP_RTCPImpl()
"VoERTP_RTCPImpl::~VoERTP_RTCPImpl() - dtor");
}
-int VoERTP_RTCPImpl::RegisterRTPObserver(int channel, VoERTPObserver& observer)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "RegisterRTPObserver(channel=%d observer=0x%x)",
- channel, &observer);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "RegisterRTPObserver() failed to locate channel");
- return -1;
- }
- return channelPtr->RegisterRTPObserver(observer);
-}
-
-int VoERTP_RTCPImpl::DeRegisterRTPObserver(int channel)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "DeRegisterRTPObserver(channel=%d)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "DeRegisterRTPObserver() failed to locate channel");
- return -1;
- }
- return channelPtr->DeRegisterRTPObserver();
-}
-
-int VoERTP_RTCPImpl::RegisterRTCPObserver(int channel, VoERTCPObserver& observer)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "RegisterRTCPObserver(channel=%d observer=0x%x)",
- channel, &observer);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "RegisterRTPObserver() failed to locate channel");
- return -1;
- }
- return channelPtr->RegisterRTCPObserver(observer);
-}
-
-int VoERTP_RTCPImpl::DeRegisterRTCPObserver(int channel)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "DeRegisterRTCPObserver(channel=%d)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "DeRegisterRTCPObserver() failed to locate channel");
- return -1;
- }
- return channelPtr->DeRegisterRTCPObserver();
-}
-
int VoERTP_RTCPImpl::SetLocalSSRC(int channel, unsigned int ssrc)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -191,82 +110,126 @@ int VoERTP_RTCPImpl::GetRemoteSSRC(int channel, unsigned int& ssrc)
return channelPtr->GetRemoteSSRC(ssrc);
}
-int VoERTP_RTCPImpl::GetRemoteCSRCs(int channel, unsigned int arrCSRC[15])
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetRemoteCSRCs(channel=%d, arrCSRC=?)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRemoteCSRCs() failed to locate channel");
- return -1;
- }
- return channelPtr->GetRemoteCSRCs(arrCSRC);
-}
-
-
-int VoERTP_RTCPImpl::SetRTPAudioLevelIndicationStatus(int channel,
- bool enable,
- unsigned char ID)
+int VoERTP_RTCPImpl::SetSendAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetRTPAudioLevelIndicationStatus(channel=%d, enable=%d,"
- " ID=%u)", channel, enable, ID);
+ "SetSendAudioLevelIndicationStatus(channel=%d, enable=%d,"
+ " ID=%u)", channel, enable, id);
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
- if (enable && (ID < kVoiceEngineMinRtpExtensionId ||
- ID > kVoiceEngineMaxRtpExtensionId))
+ if (enable && (id < kVoiceEngineMinRtpExtensionId ||
+ id > kVoiceEngineMaxRtpExtensionId))
{
- // [RFC5285] The 4-bit ID is the local identifier of this element in
+ // [RFC5285] The 4-bit id is the local identifier of this element in
// the range 1-14 inclusive.
_shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
- "SetRTPAudioLevelIndicationStatus() invalid ID parameter");
+ "SetSendAudioLevelIndicationStatus() invalid ID parameter");
return -1;
}
- // Set state and ID for the specified channel.
+ // Set state and id for the specified channel.
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetRTPAudioLevelIndicationStatus() failed to locate channel");
+ "SetSendAudioLevelIndicationStatus() failed to locate channel");
return -1;
}
- return channelPtr->SetRTPAudioLevelIndicationStatus(enable, ID);
+ return channelPtr->SetSendAudioLevelIndicationStatus(enable, id);
}
-int VoERTP_RTCPImpl::GetRTPAudioLevelIndicationStatus(int channel,
- bool& enabled,
- unsigned char& ID)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetRTPAudioLevelIndicationStatus(channel=%d, enable=?, ID=?)",
- channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRTPAudioLevelIndicationStatus() failed to locate channel");
- return -1;
- }
- return channelPtr->GetRTPAudioLevelIndicationStatus(enabled, ID);
+int VoERTP_RTCPImpl::SetReceiveAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "SetReceiveAudioLevelIndicationStatus(channel=%d, enable=%d, id=%u)",
+ channel, enable, id);
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
+ return -1;
+ }
+ if (enable &&
+ (id < kVoiceEngineMinRtpExtensionId ||
+ id > kVoiceEngineMaxRtpExtensionId)) {
+ // [RFC5285] The 4-bit id is the local identifier of this element in
+ // the range 1-14 inclusive.
+ _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+ "SetReceiveAbsoluteSenderTimeStatus() invalid id parameter");
+ return -1;
+ }
+ // Set state and id for the specified channel.
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channel_ptr = ch.channel();
+ if (channel_ptr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetReceiveAudioLevelIndicationStatus() failed to locate channel");
+ return -1;
+ }
+ return channel_ptr->SetReceiveAudioLevelIndicationStatus(enable, id);
+}
+
+int VoERTP_RTCPImpl::SetSendAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "SetSendAbsoluteSenderTimeStatus(channel=%d, enable=%d, id=%u)",
+ channel, enable, id);
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
+ return -1;
+ }
+ if (enable && (id < kVoiceEngineMinRtpExtensionId ||
+ id > kVoiceEngineMaxRtpExtensionId)) {
+ // [RFC5285] The 4-bit id is the local identifier of this element in
+ // the range 1-14 inclusive.
+ _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+ "SetSendAbsoluteSenderTimeStatus() invalid id parameter");
+ return -1;
+ }
+ // Set state and id for the specified channel.
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetSendAbsoluteSenderTimeStatus() failed to locate channel");
+ return -1;
+ }
+ return channelPtr->SetSendAbsoluteSenderTimeStatus(enable, id);
+}
+
+int VoERTP_RTCPImpl::SetReceiveAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "SetReceiveAbsoluteSenderTimeStatus(channel=%d, enable=%d, id=%u)",
+ channel, enable, id);
+ if (!_shared->statistics().Initialized()) {
+ _shared->SetLastError(VE_NOT_INITED, kTraceError);
+ return -1;
+ }
+ if (enable && (id < kVoiceEngineMinRtpExtensionId ||
+ id > kVoiceEngineMaxRtpExtensionId)) {
+ // [RFC5285] The 4-bit id is the local identifier of this element in
+ // the range 1-14 inclusive.
+ _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+ "SetReceiveAbsoluteSenderTimeStatus() invalid id parameter");
+ return -1;
+ }
+ // Set state and id for the specified channel.
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetReceiveAbsoluteSenderTimeStatus() failed to locate channel");
+ return -1;
+ }
+ return channelPtr->SetReceiveAbsoluteSenderTimeStatus(enable, id);
}
int VoERTP_RTCPImpl::SetRTCPStatus(int channel, bool enable)
@@ -401,36 +364,6 @@ int VoERTP_RTCPImpl::GetRemoteRTCPData(
fractionLost);
}
-int VoERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
- int channel,
- unsigned char subType,
- unsigned int name,
- const char* data,
- unsigned short dataLengthInBytes)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SendApplicationDefinedRTCPPacket(channel=%d, subType=%u,"
- "name=%u, data=?, dataLengthInBytes=%u)",
- channel, subType, name, dataLengthInBytes);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SendApplicationDefinedRTCPPacket() failed to locate channel");
- return -1;
- }
- return channelPtr->SendApplicationDefinedRTCPPacket(subType,
- name,
- data,
- dataLengthInBytes);
-}
-
int VoERTP_RTCPImpl::GetRTPStatistics(int channel,
unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
@@ -476,24 +409,6 @@ int VoERTP_RTCPImpl::GetRTCPStatistics(int channel, CallStatistics& stats)
return channelPtr->GetRTPStatistics(stats);
}
-int VoERTP_RTCPImpl::GetRemoteRTCPSenderInfo(int channel,
- SenderInfo* sender_info) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetRemoteRTCPSenderInfo(channel=%d)", channel);
- if (!_shared->statistics().Initialized()) {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channel_ptr = ch.channel();
- if (channel_ptr == NULL) {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetRemoteRTCPSenderInfo() failed to locate channel");
- return -1;
- }
- return channel_ptr->GetRemoteRTCPSenderInfo(sender_info);
-}
-
int VoERTP_RTCPImpl::GetRemoteRTCPReportBlocks(
int channel, std::vector<ReportBlock>* report_blocks) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -512,10 +427,10 @@ int VoERTP_RTCPImpl::GetRemoteRTCPReportBlocks(
return channel_ptr->GetRemoteRTCPReportBlocks(report_blocks);
}
-int VoERTP_RTCPImpl::SetFECStatus(int channel, bool enable, int redPayloadtype)
+int VoERTP_RTCPImpl::SetREDStatus(int channel, bool enable, int redPayloadtype)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetFECStatus(channel=%d, enable=%d, redPayloadtype=%d)",
+ "SetREDStatus(channel=%d, enable=%d, redPayloadtype=%d)",
channel, enable, redPayloadtype);
#ifdef WEBRTC_CODEC_RED
if (!_shared->statistics().Initialized())
@@ -528,23 +443,23 @@ int VoERTP_RTCPImpl::SetFECStatus(int channel, bool enable, int redPayloadtype)
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetFECStatus() failed to locate channel");
+ "SetREDStatus() failed to locate channel");
return -1;
}
- return channelPtr->SetFECStatus(enable, redPayloadtype);
+ return channelPtr->SetREDStatus(enable, redPayloadtype);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "SetFECStatus() RED is not supported");
+ "SetREDStatus() RED is not supported");
return -1;
#endif
}
-int VoERTP_RTCPImpl::GetFECStatus(int channel,
+int VoERTP_RTCPImpl::GetREDStatus(int channel,
bool& enabled,
int& redPayloadtype)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetFECStatus(channel=%d, enabled=?, redPayloadtype=?)",
+ "GetREDStatus(channel=%d, enabled=?, redPayloadtype=?)",
channel);
#ifdef WEBRTC_CODEC_RED
if (!_shared->statistics().Initialized())
@@ -557,18 +472,17 @@ int VoERTP_RTCPImpl::GetFECStatus(int channel,
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetFECStatus() failed to locate channel");
+ "GetREDStatus() failed to locate channel");
return -1;
}
- return channelPtr->GetFECStatus(enabled, redPayloadtype);
+ return channelPtr->GetREDStatus(enabled, redPayloadtype);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
- "GetFECStatus() RED is not supported");
+ "GetREDStatus() RED is not supported");
return -1;
#endif
}
-
int VoERTP_RTCPImpl::SetNACKStatus(int channel,
bool enable,
int maxNoPackets)
@@ -655,55 +569,25 @@ int VoERTP_RTCPImpl::RTPDumpIsActive(int channel, RTPDirections direction)
return channelPtr->RTPDumpIsActive(direction);
}
-int VoERTP_RTCPImpl::InsertExtraRTPPacket(int channel,
- unsigned char payloadType,
- bool markerBit,
- const char* payloadData,
- unsigned short payloadSize)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "InsertExtraRTPPacket(channel=%d, payloadType=%u,"
- " markerBit=%u, payloadSize=%u)",
- channel, payloadType, markerBit, payloadSize);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "InsertExtraRTPPacket() failed to locate channel");
- return -1;
- }
- return channelPtr->InsertExtraRTPPacket(payloadType,
- markerBit,
- payloadData,
- payloadSize);
-}
+int VoERTP_RTCPImpl::SetVideoEngineBWETarget(int channel,
+ ViENetwork* vie_network,
+ int video_channel) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+ "SetVideoEngineBWETarget(channel=%d, vie_network=?, video_channel=%d)",
+ channel, vie_network, video_channel);
-int VoERTP_RTCPImpl::GetLastRemoteTimeStamp(int channel,
- uint32_t* timestamp) {
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetLastRemoteTimeStamp(channel=%d, timestamp=?)", channel);
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetLastRemoteTimeStamp() failed to locate channel");
- return -1;
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL) {
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetVideoEngineBWETarget() failed to locate channel");
+ if (vie_network) {
+ vie_network->Release();
}
- *timestamp = channelPtr->LastRemoteTimeStamp();
- return 0;
+ return -1;
+ }
+ channelPtr->SetVideoEngineBWETarget(vie_network, video_channel);
+ return 0;
}
#endif // #ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
diff --git a/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.h b/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.h
index 0f8d08481c8..c73fcdd2ed0 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_rtp_rtcp_impl.h
@@ -20,15 +20,6 @@ namespace webrtc {
class VoERTP_RTCPImpl : public VoERTP_RTCP
{
public:
- // Registration of observers for RTP and RTCP callbacks
- virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer);
-
- virtual int DeRegisterRTPObserver(int channel);
-
- virtual int RegisterRTCPObserver(int channel, VoERTCPObserver& observer);
-
- virtual int DeRegisterRTCPObserver(int channel);
-
// RTCP
virtual int SetRTCPStatus(int channel, bool enable);
@@ -48,13 +39,6 @@ public:
unsigned int* jitter = NULL,
unsigned short* fractionLost = NULL);
- virtual int SendApplicationDefinedRTCPPacket(
- int channel,
- unsigned char subType,
- unsigned int name,
- const char* data,
- unsigned short dataLengthInBytes);
-
// SSRC
virtual int SetLocalSSRC(int channel, unsigned int ssrc);
@@ -63,16 +47,20 @@ public:
virtual int GetRemoteSSRC(int channel, unsigned int& ssrc);
// RTP Header Extension for Client-to-Mixer Audio Level Indication
- virtual int SetRTPAudioLevelIndicationStatus(int channel,
- bool enable,
- unsigned char ID);
-
- virtual int GetRTPAudioLevelIndicationStatus(int channel,
- bool& enabled,
- unsigned char& ID);
-
- // CSRC
- virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]);
+ virtual int SetSendAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id);
+ virtual int SetReceiveAudioLevelIndicationStatus(int channel,
+ bool enable,
+ unsigned char id);
+
+ // RTP Header Extension for Absolute Sender Time
+ virtual int SetSendAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id);
+ virtual int SetReceiveAbsoluteSenderTimeStatus(int channel,
+ bool enable,
+ unsigned char id);
// Statistics
virtual int GetRTPStatistics(int channel,
@@ -82,17 +70,15 @@ public:
virtual int GetRTCPStatistics(int channel, CallStatistics& stats);
- virtual int GetRemoteRTCPSenderInfo(int channel, SenderInfo* sender_info);
-
virtual int GetRemoteRTCPReportBlocks(
int channel, std::vector<ReportBlock>* report_blocks);
- // FEC
- virtual int SetFECStatus(int channel,
+ // RED
+ virtual int SetREDStatus(int channel,
bool enable,
int redPayloadtype = -1);
- virtual int GetFECStatus(int channel, bool& enabled, int& redPayloadtype);
+ virtual int GetREDStatus(int channel, bool& enabled, int& redPayloadtype);
//NACK
virtual int SetNACKStatus(int channel,
@@ -110,14 +96,8 @@ public:
virtual int RTPDumpIsActive(int channel,
RTPDirections direction = kRtpIncoming);
- // Insert (and transmits) extra RTP packet into active RTP audio stream
- virtual int InsertExtraRTPPacket(int channel,
- unsigned char payloadType,
- bool markerBit,
- const char* payloadData,
- unsigned short payloadSize);
- virtual int GetLastRemoteTimeStamp(int channel,
- uint32_t* lastRemoteTimeStamp);
+ virtual int SetVideoEngineBWETarget(int channel, ViENetwork* vie_network,
+ int video_channel);
protected:
VoERTP_RTCPImpl(voe::SharedData* shared);
virtual ~VoERTP_RTCPImpl();
diff --git a/chromium/third_party/webrtc/voice_engine/voe_video_sync_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_video_sync_impl.cc
index 4645e2529f3..f4c5a6fe0be 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_video_sync_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_video_sync_impl.cc
@@ -51,7 +51,6 @@ int VoEVideoSyncImpl::GetPlayoutTimestamp(int channel, unsigned int& timestamp)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetPlayoutTimestamp(channel=%d, timestamp=?)", channel);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -75,7 +74,6 @@ int VoEVideoSyncImpl::SetInitTimestamp(int channel,
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetInitTimestamp(channel=%d, timestamp=%lu)",
channel, timestamp);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -99,7 +97,6 @@ int VoEVideoSyncImpl::SetInitSequenceNumber(int channel,
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)",
channel, sequenceNumber);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -122,7 +119,6 @@ int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel,int delayMs)
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetMinimumPlayoutDelay(channel=%d, delayMs=%d)",
channel, delayMs);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -145,7 +141,6 @@ int VoEVideoSyncImpl::SetInitialPlayoutDelay(int channel, int delay_ms)
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetInitialPlayoutDelay(channel=%d, delay_ms=%d)",
channel, delay_ms);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -168,7 +163,6 @@ int VoEVideoSyncImpl::GetDelayEstimate(int channel,
int* playout_buffer_delay_ms) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetDelayEstimate(channel=%d, delayMs=?)", channel);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
@@ -192,7 +186,6 @@ int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetPlayoutBufferSize(bufferMs=?)");
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -240,7 +233,6 @@ int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp** rtpRtcpModule,
int VoEVideoSyncImpl::GetLeastRequiredDelayMs(int channel) const {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetLeastRequiredDelayMS(channel=%d)", channel);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
diff --git a/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.cc
index 62f709f76de..f27c4ffca19 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.cc
@@ -54,7 +54,6 @@ int VoEVolumeControlImpl::SetSpeakerVolume(unsigned int volume)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetSpeakerVolume(volume=%u)", volume);
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -96,7 +95,6 @@ int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetSpeakerVolume()");
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -131,56 +129,10 @@ int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume)
return 0;
}
-int VoEVolumeControlImpl::SetSystemOutputMute(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetSystemOutputMute(enabled=%d)", enable);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (_shared->audio_device()->SetSpeakerMute(enable) != 0)
- {
- _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
- "SpeakerMute() unable to Set speaker mute");
- return -1;
- }
-
- return 0;
-}
-
-int VoEVolumeControlImpl::GetSystemOutputMute(bool& enabled)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetSystemOutputMute(enabled=?)");
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (_shared->audio_device()->SpeakerMute(&enabled) != 0)
- {
- _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
- "SpeakerMute() unable to get speaker mute state");
- return -1;
- }
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "GetSystemOutputMute() => %d", enabled);
- return 0;
-}
-
int VoEVolumeControlImpl::SetMicVolume(unsigned int volume)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetMicVolume(volume=%u)", volume);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -238,8 +190,6 @@ int VoEVolumeControlImpl::GetMicVolume(unsigned int& volume)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetMicVolume()");
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -294,20 +244,16 @@ int VoEVolumeControlImpl::SetInputMute(int channel, bool enable)
// Mute before demultiplexing <=> affects all channels
return _shared->transmit_mixer()->SetMute(enable);
}
- else
+ // Mute after demultiplexing <=> affects one channel only
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL)
{
- // Mute after demultiplexing <=> affects one channel only
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetInputMute() failed to locate channel");
- return -1;
- }
- return channelPtr->SetMute(enable);
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetInputMute() failed to locate channel");
+ return -1;
}
- return 0;
+ return channelPtr->SetMute(enable);
}
int VoEVolumeControlImpl::GetInputMute(int channel, bool& enabled)
@@ -342,50 +288,6 @@ int VoEVolumeControlImpl::GetInputMute(int channel, bool& enabled)
return 0;
}
-int VoEVolumeControlImpl::SetSystemInputMute(bool enable)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "SetSystemInputMute(enabled=%d)", enable);
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (_shared->audio_device()->SetMicrophoneMute(enable) != 0)
- {
- _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
- "MicrophoneMute() unable to set microphone mute state");
- return -1;
- }
-
- return 0;
-}
-
-int VoEVolumeControlImpl::GetSystemInputMute(bool& enabled)
-{
- WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
- "GetSystemInputMute(enabled=?)");
-
- if (!_shared->statistics().Initialized())
- {
- _shared->SetLastError(VE_NOT_INITED, kTraceError);
- return -1;
- }
-
- if (_shared->audio_device()->MicrophoneMute(&enabled) != 0)
- {
- _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
- "MicrophoneMute() unable to get microphone mute state");
- return -1;
- }
- WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
- VoEId(_shared->instance_id(), -1),
- "GetSystemInputMute() => %d", enabled);
- return 0;
-}
-
int VoEVolumeControlImpl::GetSpeechInputLevel(unsigned int& level)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
@@ -409,7 +311,7 @@ int VoEVolumeControlImpl::GetSpeechOutputLevel(int channel,
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetSpeechOutputLevel(channel=%d, level=?)", channel);
-
+
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
@@ -542,8 +444,6 @@ int VoEVolumeControlImpl::SetOutputVolumePan(int channel,
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)",
channel, left, right);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -574,20 +474,16 @@ int VoEVolumeControlImpl::SetOutputVolumePan(int channel,
// Master balance (affectes the signal after output mixing)
return _shared->output_mixer()->SetOutputVolumePan(left, right);
}
- else
+ // Per-channel balance (affects the signal before output mixing)
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL)
{
- // Per-channel balance (affects the signal before output mixing)
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "SetOutputVolumePan() failed to locate channel");
- return -1;
- }
- return channelPtr->SetOutputVolumePan(left, right);
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "SetOutputVolumePan() failed to locate channel");
+ return -1;
}
- return 0;
+ return channelPtr->SetOutputVolumePan(left, right);
}
int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
@@ -596,8 +492,6 @@ int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetOutputVolumePan(channel=%d, left=?, right=?)", channel);
- ANDROID_NOT_SUPPORTED(_shared->statistics());
- IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
@@ -618,19 +512,15 @@ int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
{
return _shared->output_mixer()->GetOutputVolumePan(left, right);
}
- else
+ voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+ voe::Channel* channelPtr = ch.channel();
+ if (channelPtr == NULL)
{
- voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
- voe::Channel* channelPtr = ch.channel();
- if (channelPtr == NULL)
- {
- _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
- "GetOutputVolumePan() failed to locate channel");
- return -1;
- }
- return channelPtr->GetOutputVolumePan(left, right);
+ _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+ "GetOutputVolumePan() failed to locate channel");
+ return -1;
}
- return 0;
+ return channelPtr->GetOutputVolumePan(left, right);
}
#endif // #ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
diff --git a/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.h b/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.h
index cdbc4280e4f..b5e3b1b02d6 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voe_volume_control_impl.h
@@ -24,10 +24,6 @@ public:
virtual int GetSpeakerVolume(unsigned int& volume);
- virtual int SetSystemOutputMute(bool enable);
-
- virtual int GetSystemOutputMute(bool& enabled);
-
virtual int SetMicVolume(unsigned int volume);
virtual int GetMicVolume(unsigned int& volume);
@@ -36,10 +32,6 @@ public:
virtual int GetInputMute(int channel, bool& enabled);
- virtual int SetSystemInputMute(bool enable);
-
- virtual int GetSystemInputMute(bool& enabled);
-
virtual int GetSpeechInputLevel(unsigned int& level);
virtual int GetSpeechOutputLevel(int channel, unsigned int& level);
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine.gyp b/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
index 8a06e86952f..19342c3a93a 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
@@ -20,6 +20,7 @@
'<(webrtc_root)/modules/modules.gyp:audio_conference_mixer',
'<(webrtc_root)/modules/modules.gyp:audio_device',
'<(webrtc_root)/modules/modules.gyp:audio_processing',
+ '<(webrtc_root)/modules/modules.gyp:bitrate_controller',
'<(webrtc_root)/modules/modules.gyp:media_file',
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
@@ -31,10 +32,8 @@
'../typedefs.h',
'include/voe_audio_processing.h',
'include/voe_base.h',
- 'include/voe_call_report.h',
'include/voe_codec.h',
'include/voe_dtmf.h',
- 'include/voe_encryption.h',
'include/voe_errors.h',
'include/voe_external_media.h',
'include/voe_file.h',
@@ -58,8 +57,6 @@
'monitor_module.h',
'output_mixer.cc',
'output_mixer.h',
- 'output_mixer_internal.cc',
- 'output_mixer_internal.h',
'shared_data.cc',
'shared_data.h',
'statistics.cc',
@@ -72,14 +69,10 @@
'voe_audio_processing_impl.h',
'voe_base_impl.cc',
'voe_base_impl.h',
- 'voe_call_report_impl.cc',
- 'voe_call_report_impl.h',
'voe_codec_impl.cc',
'voe_codec_impl.h',
'voe_dtmf_impl.cc',
'voe_dtmf_impl.h',
- 'voe_encryption_impl.cc',
- 'voe_encryption_impl.h',
'voe_external_media_impl.cc',
'voe_external_media_impl.h',
'voe_file_impl.cc',
@@ -129,17 +122,16 @@
],
'sources': [
'channel_unittest.cc',
- 'output_mixer_unittest.cc',
'transmit_mixer_unittest.cc',
+ 'utility_unittest.cc',
'voe_audio_processing_unittest.cc',
'voe_base_unittest.cc',
'voe_codec_unittest.cc',
- 'voe_neteq_stats_unittest.cc',
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -155,7 +147,7 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/test/libtest/libtest.gyp:libtest',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/test/test.gyp:test_support',
],
@@ -171,25 +163,22 @@
'test/auto_test/fixtures/after_streaming_fixture.h',
'test/auto_test/fixtures/before_initialization_fixture.cc',
'test/auto_test/fixtures/before_initialization_fixture.h',
- 'test/auto_test/fuzz/rtp_fuzz_test.cc',
+ 'test/auto_test/fixtures/before_streaming_fixture.cc',
+ 'test/auto_test/fixtures/before_streaming_fixture.h',
'test/auto_test/standard/audio_processing_test.cc',
- 'test/auto_test/standard/call_report_test.cc',
'test/auto_test/standard/codec_before_streaming_test.cc',
'test/auto_test/standard/codec_test.cc',
'test/auto_test/standard/dtmf_test.cc',
- 'test/auto_test/standard/encryption_test.cc',
'test/auto_test/standard/external_media_test.cc',
'test/auto_test/standard/file_before_streaming_test.cc',
'test/auto_test/standard/file_test.cc',
'test/auto_test/standard/hardware_before_initializing_test.cc',
'test/auto_test/standard/hardware_before_streaming_test.cc',
'test/auto_test/standard/hardware_test.cc',
- 'test/auto_test/standard/manual_hold_test.cc',
'test/auto_test/standard/mixing_test.cc',
'test/auto_test/standard/neteq_stats_test.cc',
- 'test/auto_test/standard/neteq_test.cc',
- 'test/auto_test/standard/network_test.cc',
'test/auto_test/standard/rtp_rtcp_before_streaming_test.cc',
+ 'test/auto_test/standard/rtp_rtcp_extensions.cc',
'test/auto_test/standard/rtp_rtcp_test.cc',
'test/auto_test/standard/voe_base_misc_test.cc',
'test/auto_test/standard/video_sync_test.cc',
@@ -197,16 +186,12 @@
'test/auto_test/resource_manager.cc',
'test/auto_test/voe_cpu_test.cc',
'test/auto_test/voe_cpu_test.h',
- 'test/auto_test/voe_extended_test.cc',
- 'test/auto_test/voe_extended_test.h',
'test/auto_test/voe_standard_test.cc',
'test/auto_test/voe_standard_test.h',
'test/auto_test/voe_stress_test.cc',
'test/auto_test/voe_stress_test.h',
'test/auto_test/voe_test_defines.h',
'test/auto_test/voe_test_interface.h',
- 'test/auto_test/voe_unit_test.cc',
- 'test/auto_test/voe_unit_test.h',
],
'conditions': [
['OS=="android"', {
@@ -230,6 +215,7 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/test/test.gyp:test_support',
],
@@ -287,7 +273,7 @@
}],
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'voice_engine_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine_defines.h b/chromium/third_party/webrtc/voice_engine/voice_engine_defines.h
index 8e5b24fb605..b3cba7c3307 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine_defines.h
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine_defines.h
@@ -27,6 +27,10 @@
namespace webrtc {
+// Internal buffer size required for mono audio, based on the highest sample
+// rate voice engine supports (10 ms of audio at 192 kHz).
+static const int kMaxMonoDataSizeSamples = 1920;
+
// VolumeControl
enum { kMinVolumeLevel = 0 };
enum { kMaxVolumeLevel = 255 };
@@ -279,30 +283,6 @@ inline int VoEChannelId(int moduleId)
// Default device for Linux and Android
#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
-#ifdef ANDROID
-
-// ----------------------------------------------------------------------------
-// Defines
-// ----------------------------------------------------------------------------
-
- // Always excluded for Android builds
- #undef WEBRTC_CODEC_ISAC
- #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
-
- #define ANDROID_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat)
-
-#else // LINUX PC
-
-// ----------------------------------------------------------------------------
-// Defines
-// ----------------------------------------------------------------------------
-
- #define ANDROID_NOT_SUPPORTED(stat)
-
-#endif // ANDROID - LINUX PC
-
-#else
-#define ANDROID_NOT_SUPPORTED(stat)
#endif // #ifdef WEBRTC_LINUX
// *** WEBRTC_MAC ***
@@ -359,35 +339,6 @@ inline int VoEChannelId(int moduleId)
// Default device for Mac and iPhone
#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
-
-// iPhone specific
-#if defined(WEBRTC_IOS)
-
-// ----------------------------------------------------------------------------
-// Defines
-// ----------------------------------------------------------------------------
-
- // Always excluded for iPhone builds
- #undef WEBRTC_CODEC_ISAC
- #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
-
- #define IPHONE_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat)
-
-#else // Non-iPhone
-
-// ----------------------------------------------------------------------------
-// Enumerators
-// ----------------------------------------------------------------------------
-
-// ----------------------------------------------------------------------------
-// Defines
-// ----------------------------------------------------------------------------
-
- #define IPHONE_NOT_SUPPORTED(stat)
-#endif
-
-#else
-#define IPHONE_NOT_SUPPORTED(stat)
#endif // #ifdef WEBRTC_MAC
#endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine_impl.cc b/chromium/third_party/webrtc/voice_engine/voice_engine_impl.cc
index 038609cab07..e7ae5de3c35 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine_impl.cc
@@ -12,9 +12,11 @@
#include "webrtc/modules/audio_device/android/audio_device_template.h"
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
+#if !defined(WEBRTC_CHROMIUM_BUILD)
#include "webrtc/modules/audio_device/android/opensles_input.h"
#include "webrtc/modules/audio_device/android/opensles_output.h"
#endif
+#endif
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/system_wrappers/interface/trace.h"
@@ -70,6 +72,12 @@ int VoiceEngineImpl::Release() {
"VoiceEngineImpl self deleting (voiceEngine=0x%p)",
this);
+ // Clear any pointers before starting destruction. Otherwise worker-
+ // threads will still have pointers to a partially destructed object.
+ // Example: AudioDeviceBuffer::RequestPlayoutData() can access a
+ // partially deconstructed |_ptrCbAudioTransport| during destruction
+ // if we don't call Terminate here.
+ Terminate();
delete this;
}
@@ -78,8 +86,6 @@ int VoiceEngineImpl::Release() {
VoiceEngine* VoiceEngine::Create() {
Config* config = new Config();
- config->Set<AudioCodingModuleFactory>(new AudioCodingModuleFactory());
-
return GetVoiceEngine(config, true);
}
@@ -145,19 +151,27 @@ bool VoiceEngine::Delete(VoiceEngine*& voiceEngine)
return true;
}
+#if !defined(WEBRTC_CHROMIUM_BUILD)
int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
{
#ifdef WEBRTC_ANDROID
#ifdef WEBRTC_ANDROID_OPENSLES
- return AudioDeviceTemplate<OpenSlesInput, OpenSlesOutput>::
- SetAndroidAudioDeviceObjects(javaVM, env, context);
+ typedef AudioDeviceTemplate<OpenSlesInput, OpenSlesOutput>
+ AudioDeviceInstance;
#else
- return AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>::
- SetAndroidAudioDeviceObjects(javaVM, env, context);
+ typedef AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>
+ AudioDeviceInstance;
#endif
+ if (javaVM && env && context) {
+ AudioDeviceInstance::SetAndroidAudioDeviceObjects(javaVM, env, context);
+ } else {
+ AudioDeviceInstance::ClearAndroidAudioDeviceObjects();
+ }
+ return 0;
#else
return -1;
#endif
}
+#endif
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine_impl.h b/chromium/third_party/webrtc/voice_engine/voice_engine_impl.h
index fe6a7915ac5..1d2b9c5efd7 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine_impl.h
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine_impl.h
@@ -18,18 +18,12 @@
#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
#include "webrtc/voice_engine/voe_audio_processing_impl.h"
#endif
-#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
-#include "webrtc/voice_engine/voe_call_report_impl.h"
-#endif
#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
#include "webrtc/voice_engine/voe_codec_impl.h"
#endif
#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
#include "webrtc/voice_engine/voe_dtmf_impl.h"
#endif
-#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
-#include "webrtc/voice_engine/voe_encryption_impl.h"
-#endif
#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
#include "webrtc/voice_engine/voe_external_media_impl.h"
#endif
@@ -61,18 +55,12 @@ class VoiceEngineImpl : public voe::SharedData, // Must be the first base class
#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
public VoEAudioProcessingImpl,
#endif
-#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
- public VoECallReportImpl,
-#endif
#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
public VoECodecImpl,
#endif
#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
public VoEDtmfImpl,
#endif
-#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
- public VoEEncryptionImpl,
-#endif
#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
public VoEExternalMediaImpl,
#endif
@@ -103,18 +91,12 @@ public:
#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
VoEAudioProcessingImpl(this),
#endif
-#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
- VoECallReportImpl(this),
-#endif
#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
VoECodecImpl(this),
#endif
#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
VoEDtmfImpl(this),
#endif
-#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
- VoEEncryptionImpl(this),
-#endif
#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
VoEExternalMediaImpl(this),
#endif
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine_unittests.isolate b/chromium/third_party/webrtc/voice_engine/voice_engine_unittests.isolate
index c350bce8fbb..02356804b90 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine_unittests.isolate
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine_unittests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/voice_engine_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/voice_engine_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/webrtc.gyp b/chromium/third_party/webrtc/webrtc.gyp
index ed101b66246..d50552d9cf0 100644
--- a/chromium/third_party/webrtc/webrtc.gyp
+++ b/chromium/third_party/webrtc/webrtc.gyp
@@ -19,6 +19,8 @@
],
'variables': {
'webrtc_all_dependencies': [
+ 'base/base.gyp:*',
+ 'common.gyp:*',
'common_audio/common_audio.gyp:*',
'common_video/common_video.gyp:*',
'modules/modules.gyp:*',
@@ -39,10 +41,12 @@
'conditions': [
['include_tests==1', {
'dependencies': [
+ 'base/base_tests.gyp:*',
'common_video/common_video_unittests.gyp:*',
'system_wrappers/source/system_wrappers_tests.gyp:*',
'test/metrics.gyp:*',
'test/test.gyp:*',
+ 'test/webrtc_test_common.gyp:webrtc_test_common_unittests',
'tools/tools.gyp:*',
'webrtc_tests',
],
@@ -72,6 +76,7 @@
'<@(webrtc_video_sources)',
],
'dependencies': [
+ 'common.gyp:*',
'<@(webrtc_video_dependencies)',
],
},
diff --git a/chromium/third_party/webrtc/webrtc_examples.gyp b/chromium/third_party/webrtc/webrtc_examples.gyp
index 84ac3ba3462..56a0e9efd69 100644
--- a/chromium/third_party/webrtc/webrtc_examples.gyp
+++ b/chromium/third_party/webrtc/webrtc_examples.gyp
@@ -7,7 +7,6 @@
# be found in the AUTHORS file in the root of the source tree.
{
'includes': ['build/common.gypi'],
-
'conditions': [
['OS=="android"', {
'targets': [
@@ -17,6 +16,7 @@
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(webrtc_root)/modules/modules.gyp:*',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
@@ -51,6 +51,7 @@
'action_name': 'build_webrtcdemo_apk',
'variables': {
'android_webrtc_demo_root': '<(webrtc_root)/examples/android/media_demo',
+ 'ant_log': '../../../<(INTERMEDIATE_DIR)/ant.log', # ../../.. to compensate for the cd below.
},
'inputs' : [
'<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
@@ -64,18 +65,84 @@
'<(android_webrtc_demo_root)/project.properties',
],
'outputs': ['<(PRODUCT_DIR)/WebRTCDemo-debug.apk'],
- 'action': ['bash', '-ec',
- 'rm -fr <(_outputs) <(android_webrtc_demo_root)/{bin,libs} && '
- 'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
- 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
- 'cp <(PRODUCT_DIR)/lib.java/video_capture_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
- 'cp <(PRODUCT_DIR)/lib.java/video_render_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
- '<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtcdemo-jni.so <(PRODUCT_DIR)/libwebrtcdemo-jni.so && '
- 'cd <(android_webrtc_demo_root) && '
- 'ant debug && '
- 'cd - && '
- 'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
- ],
+ 'action': [
+ 'bash', '-ec',
+ 'rm -fr <(_outputs) <(android_webrtc_demo_root)/{bin,libs} && '
+ 'mkdir -p <(INTERMEDIATE_DIR) && ' # Must happen _before_ the cd below
+ 'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
+ 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
+ 'cp <(PRODUCT_DIR)/lib.java/video_capture_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
+ 'cp <(PRODUCT_DIR)/lib.java/video_render_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
+ '<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtcdemo-jni.so <(PRODUCT_DIR)/libwebrtcdemo-jni.so && '
+ 'cd <(android_webrtc_demo_root) && '
+ '{ ANDROID_SDK_ROOT=<(android_sdk_root) '
+ 'ant debug > <(ant_log) 2>&1 || '
+ ' { cat <(ant_log) ; exit 1; } } && '
+ 'cd - > /dev/null && '
+ 'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'libopensl-demo-jni',
+ 'type': 'loadable_module',
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:audio_device',
+ ],
+ 'sources': [
+ 'examples/android/opensl_loopback/jni/opensl_runner.cc',
+ 'examples/android/opensl_loopback/fake_audio_device_buffer.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-llog',
+ '-lOpenSLES',
+ ],
+ },
+ },
+ {
+ 'target_name': 'OpenSlDemo',
+ 'type': 'none',
+ 'dependencies': [
+ 'libopensl-demo-jni',
+ '<(modules_java_gyp_path):*',
+ ],
+ 'actions': [
+ {
+ # TODO(henrik): Convert building of the demo to a proper GYP
+ # target so this action is not needed once chromium's
+ # apk-building machinery can be used. (crbug.com/225101)
+ 'action_name': 'build_opensldemo_apk',
+ 'variables': {
+ 'android_opensl_demo_root': '<(webrtc_root)/examples/android/opensl_loopback',
+ 'ant_log': '../../../<(INTERMEDIATE_DIR)/ant.log', # ../../.. to compensate for the cd below.
+ },
+ 'inputs' : [
+ '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
+ '<(PRODUCT_DIR)/libopensl-demo-jni.so',
+ '<!@(find <(android_opensl_demo_root)/src -name "*.java")',
+ '<!@(find <(android_opensl_demo_root)/res -name "*.xml")',
+ '<!@(find <(android_opensl_demo_root)/res -name "*.png")',
+ '<(android_opensl_demo_root)/AndroidManifest.xml',
+ '<(android_opensl_demo_root)/build.xml',
+ '<(android_opensl_demo_root)/project.properties',
+ ],
+ 'outputs': ['<(PRODUCT_DIR)/OpenSlDemo-debug.apk'],
+ 'action': [
+ 'bash', '-ec',
+ 'rm -f <(_outputs) && '
+ 'mkdir -p <(android_opensl_demo_root)/libs/<(android_app_abi) && '
+ 'mkdir -p <(INTERMEDIATE_DIR) && ' # Must happen _before_ the cd below
+ '<(android_strip) -o <(android_opensl_demo_root)/libs/<(android_app_abi)/libopensl-demo-jni.so <(PRODUCT_DIR)/libopensl-demo-jni.so && '
+ 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_opensl_demo_root)/libs/ &&'
+ 'cd <(android_opensl_demo_root) && '
+ '{ ANDROID_SDK_ROOT=<(android_sdk_root) '
+ 'ant debug > <(ant_log) 2>&1 || '
+ ' { cat <(ant_log) ; exit 1; } } && '
+ 'cd - > /dev/null && '
+ 'cp <(android_opensl_demo_root)/bin/OpenSlDemo-debug.apk <(_outputs)'
+ ],
},
],
},
diff --git a/chromium/third_party/webrtc/webrtc_perf_tests.isolate b/chromium/third_party/webrtc/webrtc_perf_tests.isolate
index 08f5cbafe66..b39c83df34c 100644
--- a/chromium/third_party/webrtc/webrtc_perf_tests.isolate
+++ b/chromium/third_party/webrtc/webrtc_perf_tests.isolate
@@ -8,31 +8,27 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../data/',
- '../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../testing/test_env.py',
'<(PRODUCT_DIR)/webrtc_perf_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../DEPS',
- '../resources/foreman_cif.yuv',
- '../resources/paris_qcif.yuv',
- '../resources/voice_engine/audio_long16.pcm',
- '../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/resources/paris_qcif.yuv',
+ '<(DEPTH)/resources/voice_engine/audio_long16.pcm',
'<(PRODUCT_DIR)/webrtc_perf_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/webrtc_tests.gypi b/chromium/third_party/webrtc/webrtc_tests.gypi
index 0d2b30ee184..3c1dca69e3f 100644
--- a/chromium/third_party/webrtc/webrtc_tests.gypi
+++ b/chromium/third_party/webrtc/webrtc_tests.gypi
@@ -20,12 +20,24 @@
'target_name': 'video_loopback',
'type': 'executable',
'sources': [
+ 'test/mac/run_test.mm',
+ 'test/run_test.cc',
+ 'test/run_test.h',
'video/loopback.cc',
- 'test/test_main.cc',
+ ],
+ 'conditions': [
+ ['OS=="mac"', {
+ 'sources!': [
+ 'test/run_test.cc',
+ ],
+ }],
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'test/webrtc_test_common.gyp:webrtc_test_common',
+ 'test/webrtc_test_common.gyp:webrtc_test_renderer',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'webrtc',
],
},
@@ -35,42 +47,63 @@
'sources': [
'video/bitrate_estimator_tests.cc',
'video/call_tests.cc',
+ 'video/send_statistics_proxy_unittest.cc',
'video/video_send_stream_tests.cc',
'test/common_unittest.cc',
'test/testsupport/metrics/video_metrics_unittest.cc',
- 'test/test_main.cc',
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'modules/modules.gyp:rtp_rtcp',
'test/metrics.gyp:metrics',
'test/webrtc_test_common.gyp:webrtc_test_common',
+ 'test/test.gyp:test_main',
+ 'test/webrtc_test_common.gyp:webrtc_test_video_render_dependencies',
'webrtc',
],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
},
{
'target_name': 'webrtc_perf_tests',
'type': '<(gtest_target_type)',
'sources': [
- 'test/test_main.cc',
+ 'modules/audio_coding/neteq/test/neteq_performance_unittest.cc',
'video/call_perf_tests.cc',
'video/full_stack.cc',
'video/rampup_tests.cc',
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ 'modules/modules.gyp:neteq_test_support', # Needed by neteq_performance_unittest.
'modules/modules.gyp:rtp_rtcp',
'test/webrtc_test_common.gyp:webrtc_test_common',
+ 'test/test.gyp:test_main',
+ 'test/webrtc_test_common.gyp:webrtc_test_video_render_dependencies',
'webrtc',
],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
},
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'video_engine_tests_apk_target',