jami-daemon issueshttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues2021-09-02T15:12:32Zhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/571SIP Account - use account setting to enable/disable SRTP2021-09-02T15:12:32ZMohamed ChibaniSIP Account - use account setting to enable/disable SRTPMohamed ChibaniMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/570Crash in PJSIP on SDP negotiation failure2021-07-21T18:13:20ZAdrien BéraudCrash in PJSIP on SDP negotiation failureThe crash happens if the SDP negotiation fail. The crash can be reproduced by answering to an incoming call with all media disabled. There is a test case (peer_disable_media) in the unit test sip_base_calls.cpp that reproduces the crash....The crash happens if the SDP negotiation fail. The crash can be reproduced by answering to an incoming call with all media disabled. There is a test case (peer_disable_media) in the unit test sip_base_calls.cpp that reproduces the crash.
[crash_pjsip.txt](/uploads/5267262ca4283ff78c6626c4b00c7845/crash_pjsip.txt)Olivier DionOlivier Dionhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/569Crash in jamiaccount cacheSIPConnection2021-06-22T16:27:35ZSébastien BlinCrash in jamiaccount cacheSIPConnection```
1623963179.676|45271|tls_session.cpp :1529 ] [TLS] shutdown
[LWP 176344 exited]
[LWP 176343 exited]
--Type <RET> for more, q to quit, c to continue without paging--
Thread 28 "dring" received signal SIGSEGV, Segmentation fault.
[S...```
1623963179.676|45271|tls_session.cpp :1529 ] [TLS] shutdown
[LWP 176344 exited]
[LWP 176343 exited]
--Type <RET> for more, q to quit, c to continue without paging--
Thread 28 "dring" received signal SIGSEGV, Segmentation fault.
[Switching to LWP 169557]
0x0000555555837095 in std::weak_ptr<jami::SIPAccountBase, (gnu_cxx::_Lock_policy)2>::operator=<jami::SIPAccountBase> (this=0x78, r=...)
at /usr/include/c++/10/bits/shared_ptr_base.h:1690
1690 Mptr = r.Mptr;
(gdb) bt
#0 0x0000555555837095 in std::weak_ptr<jami::SIPAccountBase, (gnu_cxx::_Lock_policy)2>::operator=<jami::SIPAccountBase> (this=0x78, __r=...)
at /usr/include/c++/10/bits/shared_ptr_base.h:1690
#1 0x00005555558256ef in std::weak_ptr<jami::SIPAccountBase>::operator=<jami::SIPAccountBase> (this=0x78, __r=...) at /usr/include/c++/10/bits/shared_ptr.h:732
#2 0x000055555581c1d3 in jami::SipTransport::setAccount (this=0x0, account=...) at /home/cyrille/ring-daemon/src/sip/siptransport.h:118
#3 0x00005555557e2f1f in jami::JamiAccount::cacheSIPConnection (this=0x7fffd0000b70, socket=..., peerId=..., deviceId=...) at jamiaccount.cpp:4798
#4 0x00005555557cc9ca in operator() (__closure=0x7fffe40949c0, deviceId=..., name=..., channel=...) at jamiaccount.cpp:2361
#5 0x0000555555809bcd in std::invoke_impl<void, jami::JamiAccount::doRegister_()::<lambda(const DeviceId&, const string&, std::shared_ptr<jami::ChannelSocket>)>&, const dht::Hash<20>&, const std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >&, std::shared_ptr<jami::ChannelSocket> >(std::invoke_other, struct {...} &)
(f=...) at /usr/include/c++/10/bits/invoke.h:60
#6 0x00005555557fee94 in std::invoke_r<void, jami::JamiAccount::doRegister_()::<lambda(const DeviceId&, const string&, std::shared_ptr<jami::ChannelSocket>)>&, const dht::Hash<20>&, const std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >&, std::shared_ptr<jami::ChannelSocket> >(struct {...} &) (__fn=...)
at /usr/include/c++/10/bits/invoke.h:110
#7 0x00005555557f6029 in std::Function_handler<void(const dht::Hash<20>&, const std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >&, std::shared_ptr<jami::ChannelSocket>), jami::JamiAccount::doRegister()::<lambda(const DeviceId&, const string&, std::shared_ptr<jami::ChannelSocket>)> >::Minvoke(const std::_Any_data &, const dht::Hash<20> &, const std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > &, std::shared_ptr<jami::ChannelSocket> &&) (functor=...,
args#0=..., args#1=..., __args#2=...) at /usr/include/c++/10/bits/std_function.h:291
#8 0x0000555555c42740 in std::function<void (dht::Hash<20ul> const&, std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::shared_ptr<jami::ChannelSocket>)>::operator()(dht::Hash<20ul> const&, std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::shared_ptr<jami::ChannelSocket>) const (this=0x7fffe40949c0, args#0=..., args#1=..., __args#2=...) at /usr/include/c++/10/bits/std_function.h:622
#9 0x0000555555c323da in operator() (__closure=0x7fff78114830, deviceId=..., socket=...) at connectionmanager.cpp:936
#10 0x0000555555c3c8f9 in std::invoke_impl<void, jami::ConnectionManager::Impl::addNewMultiplexedSocket(const DeviceId&, const Id&)::<lambda(const DeviceId&, const std::shared_ptr<jami::ChannelSocket>&)>&, const dht::Hash<20>&, const std::shared_ptr<jami::ChannelSocket>&>(std::invoke_other, struct {...} &) (__f=...)
at /usr/include/c++/10/bits/invoke.h:60
#11 0x0000555555c39b97 in std::invoke_r<void, jami::ConnectionManager::Impl::addNewMultiplexedSocket(const DeviceId&, const Id&)::<lambda(const DeviceId&, const std::shared_ptr<jami::ChannelSocket>&)>&, const dht::Hash<20>&, const std::shared_ptr<jami::ChannelSocket>&>(struct {...} &) (fn=...) at /usr/include/c++/10/bits/invoke.h:110
#12 0x0000555555c37184 in std::_Function_handler<void(const dht::Hash<20>&, const std::shared_ptr<jami::ChannelSocket>&), jami::ConnectionManager::Impl::addNewMultiplexedSocket(const DeviceId&, const Id&)::<lambda(const DeviceId&, const std::shared_ptr<jami::ChannelSocket>&)> >::Minvoke(const std::_Any_data &, const dht::Hash<20> &, const std::shared_ptr<jami::ChannelSocket> &) (functor=..., args#0=..., __args#1=...) at /usr/include/c++/10/bits/std_function.h:291
#13 0x00005555558d057f in std::function<void (dht::Hash<20ul> const&, std::shared_ptr<jami::ChannelSocket> const&)>::operator()(dht::Hash<20ul> const&, std::shared_ptr<jami::ChannelSocket> const&) const (this=0x7fff78122df0, args#0=..., args#1=...) at /usr/include/c++/10/bits/std_function.h:622
#14 0x00005555558c648d in jami::MultiplexedSocket::Impl::onRequest (this=0x7fff78122ce0, name=..., channel=44561) at multiplexed_socket.cpp:404
#15 0x00005555558c69e5 in operator() (__closure=0x7fff5805a840) at multiplexed_socket.cpp:444
--Type <RET> for more, q to quit, c to continue without paging--
#16 0x00005555558cbe72 in std::invoke_impl<void, jami::MultiplexedSocket::Impl::handleControlPacket(std::vector<unsigned char>&&)::<lambda()>&>(std::invoke_other, struct {...} &) (__f=...) at /usr/include/c++/10/bits/invoke.h:60
#17 0x00005555558cb80b in std::invoke_r<void, jami::MultiplexedSocket::Impl::handleControlPacket(std::vector<unsigned char>&&)::<lambda()>&>(struct {...} &) (fn=...)
at /usr/include/c++/10/bits/invoke.h:110
#18 0x00005555558caf93 in std::_Function_handler<void(), jami::MultiplexedSocket::Impl::handleControlPacket(std::vector<unsigned char>&&)::<lambda()> >::Minvoke(const std::_Any_data &) (__functor=...) at /usr/include/c++/10/bits/std_function.h:291
#19 0x0000555555d910bd in std::thread::_State_impl<std::thread::_Invoker<std::tuple<dht::ThreadPool::run(std::function<void ()>&&)::{lambda()#1}> > >::Mrun() ()
#20 0x00007ffff6e2cd84 in ?? ()
#21 0x0000000000000000 in ?? ()
(gdb)
```Sébastien BlinSébastien Blinhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/568Crash in conversationRepository2021-11-12T18:33:16ZSébastien BlinCrash in conversationRepository```
[1624127455.501|10853|jamiaccount.cpp :3833 ] Something went wrong when cloning conversation: Couldn't clone repository
[1624127455.506|15742|sip_utils.cpp :278 ] Registered thread 0x7ffe2f7fe428 (0x2A33)
[1624127455.510|10978...```
[1624127455.501|10853|jamiaccount.cpp :3833 ] Something went wrong when cloning conversation: Couldn't clone repository
[1624127455.506|15742|sip_utils.cpp :278 ] Registered thread 0x7ffe2f7fe428 (0x2A33)
[1624127455.510|10978|conversationrepository.cpp:1890 ] Start clone in /home/cyrille/.local/share/jami/fb33489037813e47/conversations/1bbe92a272e1207d6f1f37069bf3486110e7c166
--Type <RET> for more, q to quit, c to continue without paging--
Thread 1 "dring" received signal SIGSEGV, Segmentation fault.
0x0000555555feb844 in git_repository_workdir ()
(gdb) bt
#0 0x0000555555feb844 in git_repository_workdir ()
#1 0x0000555555ca350d in jami::ConversationRepository::infosabi:cxx11 const (this=0x7ffef801ae80) at conversationrepository.cpp:2972
#2 0x0000555555c7a0fb in jami::Conversation::infosabi:cxx11 const (
this=0x7fff640285d0) at conversation.cpp:943
#3 0x00005555557ef2f4 in jami::JamiAccount::conversationInfos (
this=0x7fffd0000b70,
conversationId="1bbe92a272e1207d6f1f37069bf3486110e7c166")
at jamiaccount.cpp:3987
#4 0x0000555555767dfa in DRing::conversationInfos (
accountId="fb33489037813e47",
conversationId="1bbe92a272e1207d6f1f37069bf3486110e7c166")
at conversation_interface.cpp:96
#5 0x000055555571ea62 in DBusConfigurationManager::conversationInfos(std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) ()
#6 0x0000555555731898 in cx::ring::Ring::ConfigurationManager_adaptor::_conversationInfos_stub(DBus::CallMessage const&) ()
#7 0x0000555555738172 in DBus::Callback<cx::ring::Ring::ConfigurationManager_adaptor, DBus::Message, DBus::CallMessage const&>::call(DBus::CallMessage const&) const ()
--Type <RET> for more, q to quit, c to continue without paging--
age const&) ()
#9 0x000055555574699a in DBus::ObjectAdaptor::handle_message(DBus::Message const&) ()
#10 0x0000555555743f06 in DBus::ObjectAdaptor::Private::message_function_stub(DBusConnection, DBusMessage, void*) ()
#11 0x00007ffff7f78270 in ?? () from /lib/x86_64-linux-gnu/libdbus-1.so.3
#12 0x00007ffff7f67f54 in dbus_connection_dispatch () from /lib/x86_64-linux-gnu/libdbus-1.so.3
#13 0x00005555557394b0 in DBus::Connection::Private::do_dispatch() ()
#14 0x000055555573b6d2 in DBus::Dispatcher::dispatch_pending(std::__cxx11::list<DBus::Connection::Private, std::allocator<DBus::Connection::Private> >&) ()
#15 0x000055555573b7cf in DBus::Dispatcher::dispatch_pending() ()
#16 0x000055555573cd2f in DBus::BusDispatcher::enter() ()
#17 0x00005555556ad40e in DBusClient::event_loop() ()
#18 0x00005555556a59b8 in main ()
(gdb)
```Sébastien BlinSébastien Blinhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/566Simple agent with console in C++2021-07-07T14:21:28ZOlivier DionSimple agent with console in C++# Scenario
1. Agents have a hard coded list of candidates to contacts. They make a contact request which is accepted.
2. The contacts all have UPNP disabled.
3. An agent randomly make a call to one of its contact with UPNP enabled or d...# Scenario
1. Agents have a hard coded list of candidates to contacts. They make a contact request which is accepted.
2. The contacts all have UPNP disabled.
3. An agent randomly make a call to one of its contact with UPNP enabled or disabled.
4. ~~A connection is made between the agent and its contact. The agent check that the connection used UPNP if enabled.~~Olivier DionOlivier Dionhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/565conference: audio only participants not visible2021-07-07T13:09:09ZAline Gondim Santosconference: audio only participants not visibleAfter multi-stream changes, audio only participants are not visible anymoreAfter multi-stream changes, audio only participants are not visible anymoreAline Gondim SantosAline Gondim Santoshttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/564Report the list of negotiated media2021-06-23T14:30:40ZMohamed ChibaniReport the list of negotiated mediaAline Gondim SantosMohamed ChibaniAline Gondim Santoshttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/563Huge battery drain when using Jami [blocked by pupnp upstream]2023-09-29T09:44:30ZAndy HiscockHuge battery drain when using Jami [blocked by pupnp upstream]I'm losing about 2% battery a minute on my iPhone 5s. Can this be improved?I'm losing about 2% battery a minute on my iPhone 5s. Can this be improved?BacklogKateryna KostiukKateryna Kostiukhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/561Jami/Daemon version - unify versioning across platforms2024-01-09T19:32:04ZMohamed ChibaniJami/Daemon version - unify versioning across platformsThe production version is used for informational purposes, and also as a SIP User-Agent identifier which will be relied on to know the peer Daemon version.\
The generated product version format must be uniform across all platforms.\
\
**...The production version is used for informational purposes, and also as a SIP User-Agent identifier which will be relied on to know the peer Daemon version.\
The generated product version format must be uniform across all platforms.\
\
**Expected format**
For official releases: Jami Daemon Major.Minor.Patch (platform). Example "Jami Deamon 10.0.1 (linux)"\
Others: Jami Daemon Major.Minor.Patch-commitID (example "Jami Deamon 10.0.1-cae2ba1653 (linux)"\
\
This format is not respected across all platforms (at least on Windows)BacklogMohamed ChibaniMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/560Jenkins: fix ut_media_negotiation2021-05-31T17:55:12ZSébastien BlinJenkins: fix ut_media_negotiationhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/559Jenkins: fix ut_call2021-06-07T19:28:17ZSébastien BlinJenkins: fix ut_callSébastien BlinSébastien Blinhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/558daemon: add tests for conferences2021-07-09T20:22:44ZSébastien Blindaemon: add tests for conferencesTo start to test layouts, mute, etcTo start to test layouts, mute, etcBackloghttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/557Fuzzing SIP over TLS2023-12-27T18:02:51ZSébastien BlinFuzzing SIP over TLS# Fuzzing SIP over TLS
Currently, I've made a lexer for SIP communication that allows one to construct an object and to manipulate a SIP message.
### Scenarios
#### Shuffle randomly the SIP message
This results in the call been hang ...# Fuzzing SIP over TLS
Currently, I've made a lexer for SIP communication that allows one to construct an object and to manipulate a SIP message.
### Scenarios
#### Shuffle randomly the SIP message
This results in the call been hang after some delay. We probably want to generate random fields instead.
#### Keep changing the SIP version
Version is changed randomly between SIP/1.0 and SIP/2.0.
#### TODO - Change the status code
### Bad SDP body
If the body content of a SDP request is invalid, an assertion failed. The null pointer seems to not be a problem because it's checked, however the caller should probably not call the function in the first place. Will investigate further.
In sdp.cpp:
```c++
583 assert(remoteSession_);
```
```
#0 0x00007ffff39fdd22 in raise () at /usr/lib/libc.so.6
#1 0x00007ffff39e7862 in abort () at /usr/lib/libc.so.6
#2 0x00007ffff39e7747 in _nl_load_domain.cold () at /usr/lib/libc.so.6
#3 0x00007ffff39f6616 in () at /usr/lib/libc.so.6
#4 0x000055555a84fa0e in jami::Sdp::processIncomingOffer(std::vector<jami::MediaAttribute, std::allocator<jami::MediaAttribute> > const&) (this=0x6110015f0080, mediaList=std::vector of length 1, capacity 1 = {...}) at sdp.cpp:583
#5 0x0000555559bc894e in jami::transaction_request_cb(pjsip_rx_data*) (rdata=0x625007721b08) at sipvoiplink.cpp:451
#6 0x000055555aeaf623 in pjsip_endpt_process_rx_data ()
#7 0x000055555aeaf856 in endpt_on_rx_msg ()
#8 0x000055555aeb685e in pjsip_tpmgr_receive_packet ()
#9 0x000055555a079ce0 in jami::tls::ChanneledSIPTransport::handleEvents() (this=0x625007721900) at /usr/include/c++/11.1.0/bits/channeled_transport.cpp:237
#10 0x000055555a070967 in operator()() const (__closure=0x603002aed2b0) at /usr/include/c++/11.1.0/bits/channeled_transport.cpp:150
#11 0x000055555a0809bd in std::__invoke_impl<void, jami::tls::ChanneledSIPTransport::ChanneledSIPTransport(pjsip_endpoint*, int, const std::shared_ptr<jami::ChannelSocket>&, const jami::IpAddr&, const jami::IpAddr&, jami::onShutdownCb&&)::<lambda(const uint8_t*, size_t)>::<lambda()>&>(std::__invoke_other, struct {...} &) (__f=...) at /usr/include/c++/11.1.0/bits/invoke.h:61
#12 0x000055555a07ea22 in std::__invoke_r<void, jami::tls::ChanneledSIPTransport::ChanneledSIPTransport(pjsip_endpoint*, int, const std::shared_ptr<jami::ChannelSocket>&, const jami::IpAddr&, const jami::IpAddr&, jami::onShutdownCb&&)::<lambda(const uint8_t*, size_t)>::<lambda()>&>(struct {...} &) (__fn=...) at /usr/include/c++/11.1.0/bits/invoke.h:111
#13 0x000055555a07d701 in std::_Function_handler<void(), jami::tls::ChanneledSIPTransport::ChanneledSIPTransport(pjsip_endpoint*, int, const std::shared_ptr<jami::ChannelSocket>&, const jami::IpAddr&, const jami::IpAddr&, jami::onShutdownCb&&)::<lambda(const uint8_t*, size_t)>::<lambda()> >::_M_invoke(const std::_Any_data &) (__functor=...) at /usr/include/c++/11.1.0/bits/std_function.h:291
#14 0x00005555596e546d in std::function<void ()>::operator()() const (this=0x603002aed2b0) at /usr/include/c++/11.1.0/bits/std_function.h:560
#15 0x0000555559a89730 in jami::ScheduledExecutor::loop() (this=0x625007723e10) at scheduled_executor.cpp:137
#16 0x0000555559a85e1e in operator()() const (__closure=0x603000073a88) at scheduled_executor.cpp:32
#17 0x0000555559a8f8be in std::__invoke_impl<void, jami::ScheduledExecutor::ScheduledExecutor()::<lambda()> >(std::__invoke_other, struct {...} &&) (__f=...) at /usr/include/c++/11.1.0/bits/invoke.h:61
#18 0x0000555559a8f779 in std::__invoke<jami::ScheduledExecutor::ScheduledExecutor()::<lambda()> >(struct {...} &&) (__fn=...) at /usr/include/c++/11.1.0/bits/invoke.h:96
#19 0x0000555559a8f64e in std::thread::_Invoker<std::tuple<jami::ScheduledExecutor::ScheduledExecutor()::<lambda()> > >::_M_invoke<0>(std::_Index_tuple<0>) (this=0x603000073a88) at /usr/include/c++/11.1.0/bits/std_thread.h:253
#20 0x0000555559a8f584 in std::thread::_Invoker<std::tuple<jami::ScheduledExecutor::ScheduledExecutor()::<lambda()> > >::operator()(void) (this=0x603000073a88) at /usr/include/c++/11.1.0/bits/std_thread.h:260
#21 0x0000555559a8f53c in std::thread::_State_impl<std::thread::_Invoker<std::tuple<jami::ScheduledExecutor::ScheduledExecutor()::<lambda()> > > >::_M_run(void) (this=0x603000073a80) at /usr/include/c++/11.1.0/bits/std_thread.h:211
#22 0x00007ffff47333c4 in std::execute_native_thread_routine(void*) (__p=0x603000073a80) at /build/gcc/src/gcc/libstdc++-v3/src/c++11/thread.cc:82
#23 0x00007ffff667a259 in start_thread () at /usr/lib/libpthread.so.0
#24 0x00007ffff3abf5e3 in clone () at /usr/lib/libc.so.6
```
#### Payload attack (SDP Huge value of Content-Length)
Alice duplicates the body of its SDP request N times. N grows at a rate of 2^M.
After some threshold, connection between peer can not be establish if iniated from Alice. Bob can still call Alice.
### Payload attack (SIP MESSAGE)
Alice spam bob with very huge message text.
The TLS connections seems to stop working and Bob receive the message text (short version) over the DHt instead.
### Sending none supported content-type (text/html, text/javascript, etc.)
The content-type is simply ignored. Call can be made.
#### TODO - Sending SIP request over none SIP channel (can be applied to other protocols as well)BacklogOlivier DionOlivier Dionhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/556SIP Account - Handle and test send/receive of empty offers (no SDP)2021-09-02T15:12:07ZMohamed ChibaniSIP Account - Handle and test send/receive of empty offers (no SDP)Handle send and receive of INVITEs with no media offer (no SDP). Empty offers are already supported in incoming incoming INVITEs. We need to be able to send empty offers as well and handle the media offer sent in the 200 OK answer.
Unit ...Handle send and receive of INVITEs with no media offer (no SDP). Empty offers are already supported in incoming incoming INVITEs. We need to be able to send empty offers as well and handle the media offer sent in the 200 OK answer.
Unit tests must be added to validate handling of empty INVITEs in incoming and outgoing calls.Mohamed ChibaniMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/555UPNP_E_BAD_HTTPMSG2021-05-26T14:29:43ZJürgen LütersUPNP_E_BAD_HTTPMSG**Problem:**
dring: PUPnP: Error downloading device XML document from http://[fe80::eadf:70ff:fe5b:17d6]:49000/fboxdesc.xml-> UPNP_E_BAD_HTTPMSG
PUPnP: IGD ctrlUrl http://[fe80::eadf:70ff:fe5b:17d6]:49000/igdupnp/control/WANIPConn1
*...**Problem:**
dring: PUPnP: Error downloading device XML document from http://[fe80::eadf:70ff:fe5b:17d6]:49000/fboxdesc.xml-> UPNP_E_BAD_HTTPMSG
PUPnP: IGD ctrlUrl http://[fe80::eadf:70ff:fe5b:17d6]:49000/igdupnp/control/WANIPConn1
**Komponents**
Router FRITZ!Box 7430 FRITZ!OS: 07.21
OS Debian bullseye
Jami: jami-qt 202105130112
The problems are reported several times in syslog.
**Tests done**
Not working
curl http://[fe80::eadf:70ff:fe5b:17d6]:49000/fboxdesc.xml
**Working**
curl http://[fe80::eadf:70ff:fe5b:17d6%eno1]:49000/fboxdesc.xml
curl http://[fe80::eadf:70ff:fe5b:17d6%2]:49000/fboxdesc.xml
So it seems that the interface as name (eno1) or as a position (2) has to be provided if a link local address is queried.https://git.jami.net/savoirfairelinux/jami-daemon/-/issues/554ICE/TCP - unexpected "Channel bind failed" on a TCP connection2021-08-25T14:27:28ZMohamed ChibaniICE/TCP - unexpected "Channel bind failed" on a TCP connectionPJNATH sometimes reports the following error:
```
[1621974703.225|50404|manager.cpp :253 ] 16:31:43.225 tcprel0x7f3b581c6390 .ChannelBind failed: 403/Channel bind cannot be used with TCP relay
```
It's unknown yet if this erro...PJNATH sometimes reports the following error:
```
[1621974703.225|50404|manager.cpp :253 ] 16:31:43.225 tcprel0x7f3b581c6390 .ChannelBind failed: 403/Channel bind cannot be used with TCP relay
```
It's unknown yet if this error has impact on the establishment of ICE connection.Mohamed ChibaniSébastien BlinMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/553Mutli-stream - Fix Hold/Resume and empty offers2021-12-20T16:08:59ZMohamed ChibaniMutli-stream - Fix Hold/Resume and empty offers- The media direction attribute in the SDP is set to "sendrecv" when a call is put "on-hold", while it should be "sendonly".
- When receiving and empty offer, we must answer with an offer that includes audio and video if enabled in the ...- The media direction attribute in the SDP is set to "sendrecv" when a call is put "on-hold", while it should be "sendonly".
- When receiving and empty offer, we must answer with an offer that includes audio and video if enabled in the account.
These regressions were introduced while adding multi-stream support.Mohamed ChibaniMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/551Multi-stream - disable media change request if the peer does not support the ...2021-06-23T14:30:35ZMohamed ChibaniMulti-stream - disable media change request if the peer does not support the featureIf the multi-stream feature is enabled and a media change request is sent to a peer that does not support the feature, the received media change request may cause unexpected behaviors.\
To prevent this issue, the local Jami Demon will fi...If the multi-stream feature is enabled and a media change request is sent to a peer that does not support the feature, the received media change request may cause unexpected behaviors.\
To prevent this issue, the local Jami Demon will first check the peer's Daemon version and enable the multi-stream feature only if the peer meets the minimum required version (currently set to 10.0.2)Mohamed ChibaniMohamed Chibanihttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/550To investigate2021-05-19T13:15:30ZSébastien BlinTo investigate```
1621367021.443|15196|tls_session.cpp :799 ] HTTP OCSP Request state=0 status_code=0
[1621367021.541| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=1 status_code=0
[1621367021.541| 8355|tls_session.cpp :799 ] HTTP OCSP ...```
1621367021.443|15196|tls_session.cpp :799 ] HTTP OCSP Request state=0 status_code=0
[1621367021.541| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=1 status_code=0
[1621367021.541| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=3 status_code=0
[1621367021.542| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=1 status_code=0
[1621367021.542| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=3 status_code=0
[1621367021.554| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=2 status_code=404
[1621367021.554| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=4 status_code=404
[1621367021.554| 8355|tls_session.cpp :746 ] HTTP OCSP Request Failed with code 404
[1621367021.554| 8355|tls_session.cpp :697 ] Skipping OCSP verification 2b6ad943b85848949df139a27c98ca4622bfaef8: request failed
[1621367021.554| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=2 status_code=404
[1621367021.554| 8355|tls_session.cpp :799 ] HTTP OCSP Request state=4 status_code=404
[1621367021.554| 8355|tls_session.cpp :746 ] HTTP OCSP Request Failed with code 404
[1621367021.554| 8355|tls_session.cpp :697 ] Skipping OCSP verification 2b6ad943b85848949df139a27c98ca4622bfaef8: request failed
[1621367021.562|15194|tls_session.cpp :1191 ] [TLS] session established: (TLS1.3)-(ECDHE-SECP384R1)-(RSA-PSS-RSAE-SHA384)-(AES-256-GCM)
[1621367021.562|15196|tls_session.cpp :1191 ] [TLS] session established: (TLS1.3)-(ECDHE-SECP384R1)-(RSA-PSS-RSAE-SHA384)-(AES-256-GCM)
[1621367021.562|15194|connectionmanager.cpp:690 ] Connection to 2b6ad943b85848949df139a27c98ca4622bfaef8 is ready - Initied by DHT request. Vid: 17729604957344243654
[1621367021.562|15196|connectionmanager.cpp:690 ] Connection to 2b6ad943b85848949df139a27c98ca4622bfaef8 is ready - Initied by DHT request. Vid: 15841830044988338181
[New Thread 0x7ffed0ff9640 (LWP 15198)]
[New Thread 0x7ffed17fa640 (LWP 15197)]
[New Thread 0x7ffeb7fff640 (LWP 15199)]
[1621367022.203| 8489|channeled_transport.cpp:48 ] ChanneledSIPTransport@0x7fff04016e80 {tr=0x7fff04016ed0}
[1621367022.203| 8489|siptransport.cpp :81 ] SipTransport@0x7fff0005a320 {tr=0x7fff04016ed0 {rc=1}}
[1621367022.203| 8489|jamiaccount.cpp :5035 ] [Account 132642d000b21ad1] New SIP channel opened with 2b6ad943b85848949df139a27c98ca4622bfaef8
[1621367022.203| 8489|jamiaccount.cpp :4975 ] Peer 2b6ad943b85848949df139a27c98ca4622bfaef8 already got an up-to-date vcard
[1621367022.243| 8391|p2p.cpp :317 ] Incoming transfer request with id(1802370016735664233)
[1621367022.742|15198|ftp_server.cpp :213 ] [FTP] header: 'Content-Length' = '52630'
[1621367022.742|15198|ftp_server.cpp :213 ] [FTP] header: 'Display-Name' = 'Screenshot from 2021-05-18 15-42-46.png'
[1621367022.742|15198|ftp_server.cpp :213 ] [FTP] header: 'Offset' = '0'
[1621367022.742|15198|data_transfer.cpp :572 ] [FTP] incoming transfert of 52630 byte(s): Screenshot from 2021-05-18 15-42-46.png
[1621367023.243|15198|data_transfer.cpp :635 ] [FTP] file closed, rx 52630 on 52630
[1621367023.243|15198|p2p.cpp :343 ] Channel down for incoming transfer with id(1802370016735664233)
[1621367023.243|15198|sip_utils.cpp :251 ] Registered thread 0x7ffed0ff9428 (0x20A0)
malloc(): unsorted double linked list corrupted
--Type <RET> for more, q to quit, c to continue without paging--
Thread 53 "dring" received signal SIGSEGV, Segmentation fault.
[Switching to Thread 0x7fff32ffd640 (LWP 8490)]
0x00007ffff71ec769 in GI_libc_free (mem=0x7ff9479176b9) at malloc.c:3288
3288 malloc.c: Aucun fichier ou dossier de ce type.
(gdb) bt
#0 0x00007ffff71ec769 in GI_libc_free (mem=0x7ff9479176b9)
at malloc.c:3288
#1 0x00005555558e6505 in gnu_cxx::new_allocator<unsigned char*>::deallocate
(this=0x7fff32ff9de7, p=0x7ff9479176b9, __t=140731993093888)
at /usr/include/c++/10/ext/new_allocator.h:133
#2 0x00005555558e0d84 in std::allocator_traits<std::allocator<unsigned char*> >::deallocate (a=..., p=0x7ff9479176b9, __n=140731993093888)
at /usr/include/c++/10/bits/alloc_traits.h:492
#3 0x00005555558db325 in std::_Deque_base<unsigned char, std::allocator<unsigned char> >::Mdeallocate_map (this=0x7fff32ff9ee0, p=0x7ff9479176b9,
n=140731993093888) at /usr/include/c++/10/bits/stl_deque.h:580
#4 0x00005555558d7dc3 in std::_Deque_base<unsigned char, std::allocator<unsigned char> >::~_Deque_base (this=0x7fff32ff9ee0, __in_chrg=<optimized out>)
at /usr/include/c++/10/bits/stl_deque.h:600
#5 0x00005555558da006 in std::deque<unsigned char, std::allocator<unsigned char> >::~deque (this=0x7fff32ff9ee0, __in_chrg=<optimized out>)
at /usr/include/c++/10/bits/stl_deque.h:1004
#6 0x00005555558d129a in jami::MultiplexedSocket::setOnRecv(unsigned short const&, std::function<long (unsigned char const*, unsigned long)>&&) (
this=0x7ffed401a640, channel=@0x7fff50238628: 43971, cb=...)
at multiplexed_socket.cpp:713
#7 0x00005555558d1f4f in jami::ChannelSocket::setOnRecv(std::function<long (unsigned char const*, unsigned long)>&&) (this=0x7fff64007a90, cb=...)
--Type <RET> for more, q to quit, c to continue without paging--
at multiplexed_socket.cpp:895
#8 0x0000555555c181cc in jami::ChanneledIncomingTransfer::~ChanneledIncomingTransfer (this=0x7fff64004b90, __in_chrg=<optimized out>)
at channeled_transfers.cpp:95
#9 0x000055555590c212 in std::default_delete<jami::ChanneledIncomingTransfer>::operator() (this=0x7fff640087d8, __ptr=0x7fff64004b90)
at /usr/include/c++/10/bits/unique_ptr.h:85
#10 0x000055555590a9f6 in std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> >::~unique_ptr (
this=0x7fff640087d8, __in_chrg=<optimized out>)
at /usr/include/c++/10/bits/unique_ptr.h:361
#11 0x000055555590fc8a in std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >::~pair (this=0x7fff640087d0, __in_chrg=<optimized out>)
at /usr/include/c++/10/bits/stl_pair.h:211
#12 0x000055555590fcae in gnu_cxx::new_allocator<std::_Rb_tree_node<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::destroy<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > (this=0x7fff8c308070,
p=0x7fff640087d0) at /usr/include/c++/10/ext/new_allocator.h:156
#13 0x000055555590f09b in std::allocator_traits<std::allocator<std::_Rb_tree_node<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer--Type <RET> for more, q to quit, c to continue without paging--
, std::default_delete<jami::ChanneledIncomingTransfer> > > > > >::destroy<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > (a=...,
p=0x7fff640087d0) at /usr/include/c++/10/bits/alloc_traits.h:531
#14 0x000055555590df71 in std::_Rb_tree<unsigned long, std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::Mdestroy_node (
this=0x7fff8c308070, __p=0x7fff640087b0)
at /usr/include/c++/10/bits/stl_tree.h:646
#15 0x000055555590c575 in std::_Rb_tree<unsigned long, std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::Mdrop_node (
this=0x7fff8c308070, __p=0x7fff640087b0)
at /usr/include/c++/10/bits/stl_tree.h:654
#16 0x000055555590ad56 in std::_Rb_tree<unsigned long, std::pair<unsigned long c--Type <RET> for more, q to quit, c to continue without paging--
onst, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::Merase (
this=0x7fff8c308070, __x=0x7fff640087b0)
at /usr/include/c++/10/bits/stl_tree.h:1921
#17 0x000055555590e3c6 in std::_Rb_tree<unsigned long, std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::clear (this=0x7fff8c308070)
at /usr/include/c++/10/bits/stl_tree.h:1261
#18 0x000055555590cb33 in std::_Rb_tree<unsigned long, std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::Merase_aux (
--Type <RET> for more, q to quit, c to continue without paging--
this=0x7fff8c308070, first=
{first = 1802370016735664233, second = std::unique_ptr<jami::ChanneledIncomingTransfer> = {get() = 0x7fff64004b90}}, last=
{first = 1, second = std::unique_ptr<jami::ChanneledIncomingTransfer> = {get() = 0x0}}) at /usr/include/c++/10/bits/stl_tree.h:2528
#19 0x000055555590b10b in std::_Rb_tree<unsigned long, std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > >, std::_Select1st<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::erase (
this=0x7fff8c308070, __x=@0x7ffeb86867e0: 1802370016735664233)
at /usr/include/c++/10/bits/stl_tree.h:2542
#20 0x0000555555909f3f in std::map<unsigned long, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> >, std::less<unsigned long>, std::allocator<std::pair<unsigned long const, std::unique_ptr<jami::ChanneledIncomingTransfer, std::default_delete<jami::ChanneledIncomingTransfer> > > > >::erase (this=0x7fff8c308070,
__x=@0x7ffeb86867e0: 1802370016735664233)
at /usr/include/c++/10/bits/stl_map.h:1069
#21 0x0000555555904c7a in operator() (__closure=0x7ffeb86867d0) at p2p.cpp:351
#22 0x0000555555907f3e in std::invoke_impl<void, jami::DhtPeerConnector::onInc--Type <RET> for more, q to quit, c to continue without paging--
omingConnection(const string&, const DataTransferId&, const std::shared_ptr<jami::ChannelSocket>&, jami::InternalCompletionCb&&)::<lambda()>::<lambda()>&>(std::invoke_other, struct {...} &) (__f=...)
at /usr/include/c++/10/bits/invoke.h:60
#23 0x000055555590721a in std::invoke_r<void, jami::DhtPeerConnector::onIncomingConnection(const string&, const DataTransferId&, const std::shared_ptr<jami::ChannelSocket>&, jami::InternalCompletionCb&&)::<lambda()>::<lambda()>&>(struct {...} &) (fn=...) at /usr/include/c++/10/bits/invoke.h:110
#24 0x0000555555906376 in std::_Function_handler<void(), jami::DhtPeerConnector::onIncomingConnection(const string&, const DataTransferId&, const std::shared_ptr<jami::ChannelSocket>&, jami::InternalCompletionCb&&)::<lambda()>::<lambda()> >::Minvoke(const std::_Any_data &) (__functor=...)
at /usr/include/c++/10/bits/std_function.h:291
#25 0x0000555555d82c4d in std::thread::_State_impl<std::thread::_Invoker<std::tuple<dht::ThreadPool::run(std::function<void ()>&&)::{lambda()#1}> > >::Mrun()
()
#26 0x00007ffff7586694 in ?? () from /lib/x86_64-linux-gnu/libstdc++.so.6
#27 0x00007ffff7be8450 in start_thread (arg=0x7fff32ffd640)
at pthread_create.c:473
#28 0x00007ffff726cd53 in clone ()
at ../sysdeps/unix/sysv/linux/x86_64/clone.S:95
(gdb)
```Sébastien BlinSébastien Blinhttps://git.jami.net/savoirfairelinux/jami-daemon/-/issues/549Crash in waitForData2021-05-18T19:07:09ZSébastien BlinCrash in waitForData```
2021-05-17 11:15:50.420 16384-17009/cx.ring D/libdring: Registered thread 0xb400007926e57898 (0x4000)
2021-05-17 11:15:50.420 16384-16937/cx.ring A/libc: FORTIFY: pthread_mutex_lock called on a destroyed mutex (0xb4000078c9163ac0)
20...```
2021-05-17 11:15:50.420 16384-17009/cx.ring D/libdring: Registered thread 0xb400007926e57898 (0x4000)
2021-05-17 11:15:50.420 16384-16937/cx.ring A/libc: FORTIFY: pthread_mutex_lock called on a destroyed mutex (0xb4000078c9163ac0)
2021-05-17 11:15:50.421 16384-16937/cx.ring A/libc: Fatal signal 6 (SIGABRT), code -1 (SI_QUEUE) in tid 16937 (Thread-1236), pid 16384 (cx.ring)
2021-05-17 11:15:50.426 16384-16925/cx.ring I/libdring: Peer 77c565d1c02d70756356c00dbf989bf7efc8ce9a already got an up-to-date vcard
2021-05-17 11:15:50.517 16384-16875/cx.ring D/libdring: [ice:0xb4000079e69c64b0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16884/cx.ring D/libdring: [ice:0xb4000079e69cd3d0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16890/cx.ring D/libdring: [ice:0xb4000079e69f3700] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16877/cx.ring D/libdring: [ice:0xb4000079e69d42f0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16923/cx.ring D/libdring: [ice:0xb4000079e6a0f380] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16888/cx.ring D/libdring: [ice:0xb4000079e69e2130] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16930/cx.ring D/libdring: [ice:0xb4000079e6a1d1c0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16902/cx.ring D/libdring: [ice:0xb4000079e69fa620] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16926/cx.ring D/libdring: [ice:0xb4000079e6a19a30] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16900/cx.ring D/libdring: [ice:0xb4000079e69fddb0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16906/cx.ring D/libdring: [ice:0xb4000079e6a04cd0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16904/cx.ring D/libdring: [ice:0xb4000079e6a01540] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16896/cx.ring D/libdring: [ice:0xb4000079e69f6e90] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16894/cx.ring D/libdring: [ice:0xb4000079e69e9050] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16886/cx.ring D/libdring: [ice:0xb4000079e69e58c0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16928/cx.ring D/libdring: [ice:0xb4000079e6a08460] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 17013-17013/? I/crash_dump64: obtaining output fd from tombstoned, type: kDebuggerdTombstone
2021-05-17 11:15:50.517 16384-16892/cx.ring D/libdring: [ice:0xb4000079e69eff70] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16883/cx.ring D/libdring: [ice:0xb4000079e69de9a0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16876/cx.ring D/libdring: [ice:0xb4000079e69c9c40] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16879/cx.ring D/libdring: [ice:0xb4000079e69db210] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.517 16384-16878/cx.ring D/libdring: [ice:0xb4000079e69d7a80] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.518 16384-16921/cx.ring D/libdring: [ice:0xb4000079e6a0bbf0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.518 16384-16898/cx.ring D/libdring: [ice:0xb4000079e69ec7e0] ioqueue error 120004: Interrupted system call
2021-05-17 11:15:50.523 1019-1019/? I/tombstoned: received crash request for pid 16937
2021-05-17 11:15:50.525 17013-17013/? I/crash_dump64: performing dump of process 16384 (target tid = 16937)
2021-05-17 11:15:50.527 16384-16425/cx.ring E/libdring: pjsip_endpt_handle_events failed with error Interrupted system call
2021-05-17 11:15:50.532 17013-17013/? A/DEBUG: *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
2021-05-17 11:15:50.532 17013-17013/? A/DEBUG: Build fingerprint: 'google/coral/coral:11/RQ2A.210505.002/7246365:user/release-keys'
2021-05-17 11:15:50.532 17013-17013/? A/DEBUG: Revision: 'MP1.0'
2021-05-17 11:15:50.532 17013-17013/? A/DEBUG: ABI: 'arm64'
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: Timestamp: 2021-05-17 11:15:50-0400
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: pid: 16384, tid: 16937, name: Thread-1236 >>> cx.ring <<<
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: uid: 10284
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: signal 6 (SIGABRT), code -1 (SI_QUEUE), fault addr --------
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: Abort message: 'FORTIFY: pthread_mutex_lock called on a destroyed mutex (0xb4000078c9163ac0)'
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x0 0000000000000000 x1 0000000000004229 x2 0000000000000006 x3 000000771fab2b30
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x4 0000000000000000 x5 0000000000000000 x6 0000000000000000 x7 0000000000000028
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x8 00000000000000f0 x9 e29ef6db2cb5d1b4 x10 0000000000000000 x11 ffffffc0fffffbdf
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x12 0000000000000001 x13 000004872fe49c80 x14 0018f749d7f00558 x15 0000000001bf5fee
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x16 0000007ada911c80 x17 0000007ada8f3870 x18 000000771f840000 x19 0000000000004000
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x20 0000000000004229 x21 00000000ffffffff x22 0000000000000000 x23 0000000000000001
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x24 000000771fab4000 x25 b4000078a69d35a0 x26 000000771fab4000 x27 00000077d65af5a8
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: x28 0000000000000000 x29 000000771fab2bb0
2021-05-17 11:15:50.533 17013-17013/? A/DEBUG: lr 0000007ada8a72a0 sp 000000771fab2b10 pc 0000007ada8a72cc pst 0000000000000000
2021-05-17 11:15:50.533 3506-12715/? W/ChimeraUtils: Module com.google.android.gms.gcm has empty metadata display_name_string_id
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: backtrace:
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #00 pc 000000000004e2cc /apex/com.android.runtime/lib64/bionic/libc.so (abort+164) (BuildId: 49090ae59e6ae37f8beae53c551820ad)
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #01 pc 00000000000b1428 /apex/com.android.runtime/lib64/bionic/libc.so (__fortify_fatal(char const*, ...)+124) (BuildId: 49090ae59e6ae37f8beae53c551820ad)
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #02 pc 00000000000b0a24 /apex/com.android.runtime/lib64/bionic/libc.so (HandleUsingDestroyedMutex(pthread_mutex_t*, char const*)+52) (BuildId: 49090ae59e6ae37f8beae53c551820ad)
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #03 pc 00000000000b087c /apex/com.android.runtime/lib64/bionic/libc.so (pthread_mutex_lock+160) (BuildId: 49090ae59e6ae37f8beae53c551820ad)
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #04 pc 00000000000af1e4 /apex/com.android.runtime/lib64/bionic/libc.so (pthread_cond_timedwait+132) (BuildId: 49090ae59e6ae37f8beae53c551820ad)
2021-05-17 11:15:50.536 17013-17013/? A/DEBUG: #05 pc 0000000000077a74 /data/app/~~P0scAgy5GUGb4kmKHvGx_A==/cx.ring-ouedHu4DuW0yvXeHMkL-rQ==/lib/arm64/libc++_shared.so (std::__ndk1::condition_variable::__do_timed_wait(std::__ndk1::unique_lock<std::__ndk1::mutex>&, std::__ndk1::chrono::time_point<std::__ndk1::chrono::system_clock, std::__ndk1::chrono::duration<long long, std::__ndk1::ratio<1l, 1000000000l> > >)+112) (BuildId: d776b1a67f4b0934ff585c6af89b742a8169d6b7)
```
In `MultiplexedSocket::waitForData` (std::unique_lock<std::mutex> lk {channelData->mutex};)Sébastien BlinSébastien Blin