Track speaking state in non-joined voice chats.

This commit is contained in:
John Preston 2020-12-11 15:04:34 +04:00
parent 49b8340695
commit 424ba1dbea
11 changed files with 386 additions and 61 deletions

View File

@ -22,6 +22,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_channel.h"
#include "data/data_chat_filters.h"
#include "data/data_cloud_themes.h"
#include "data/data_group_call.h"
#include "data/data_drafts.h"
#include "data/data_histories.h"
#include "data/data_folder.h"
@ -232,6 +233,26 @@ Updates::Updates(not_null<Main::Session*> session)
)).done([=](const MTPupdates_State &result) {
stateDone(result);
}).send();
using namespace rpl::mappers;
base::ObservableViewer(
api().fullPeerUpdated()
) | rpl::map([=](not_null<PeerData*> peer) {
return peer->asChannel();
}) | rpl::filter(
_1 != nullptr
) | rpl::start_with_next([=](not_null<ChannelData*> channel) {
if (const auto users = _pendingSpeakingCallMembers.take(channel)) {
if (const auto call = channel->call()) {
for (const auto [userId, when] : *users) {
call->applyActiveUpdate(
userId,
when,
channel->owner().userLoaded(userId));
}
}
}
}, _lifetime);
}
Main::Session &Updates::session() const {
@ -1616,20 +1637,38 @@ void Updates::feedUpdate(const MTPUpdate &update) {
const auto &d = update.c_updateChannelUserTyping();
const auto history = session().data().historyLoaded(
peerFromChannel(d.vchannel_id()));
const auto user = (d.vuser_id().v == session().userId())
? nullptr
: session().data().userLoaded(d.vuser_id().v);
if (history && user) {
const auto when = requestingDifference()
? 0
: base::unixtime::now();
const auto rootId = d.vtop_msg_id().value_or_empty();
session().data().registerSendAction(
history,
rootId,
user,
d.vaction(),
when);
if (history) {
const auto userId = d.vuser_id().v;
const auto user = (userId == session().userId())
? session().user().get()
: session().data().userLoaded(userId);
const auto isSpeakingInCall = (d.vaction().type()
== mtpc_speakingInGroupCallAction);
if (isSpeakingInCall) {
const auto channel = history->peer->asChannel();
const auto call = channel->call();
const auto now = crl::now();
if (call) {
call->applyActiveUpdate(userId, now, user);
} else if (channel->flags()
& MTPDchannel::Flag::f_call_active) {
_pendingSpeakingCallMembers.emplace(
channel).first->second[userId] = now;
session().api().requestFullPeer(channel);
}
}
if (user && !user->isSelf()) {
const auto when = requestingDifference()
? 0
: base::unixtime::now();
const auto rootId = d.vtop_msg_id().value_or_empty();
session().data().registerSendAction(
history,
rootId,
user,
d.vaction(),
when);
}
}
} break;

View File

@ -160,6 +160,9 @@ private:
bool _handlingChannelDifference = false;
base::flat_map<int, ActiveChatTracker> _activeChats;
base::flat_map<
not_null<ChannelData*>,
base::flat_map<UserId, crl::time>> _pendingSpeakingCallMembers;
mtpRequestId _onlineRequest = 0;
base::Timer _idleFinishTimer;

View File

@ -108,9 +108,14 @@ void GroupCall::setState(State state) {
}
_state = state;
if (_state.current() == State::Joined && !_pushToTalkStarted) {
_pushToTalkStarted = true;
applyGlobalShortcutChanges();
if (_state.current() == State::Joined) {
if (!_pushToTalkStarted) {
_pushToTalkStarted = true;
applyGlobalShortcutChanges();
}
if (const auto call = _channel->call(); call && call->id() == _id) {
call->setInCall();
}
}
if (false

View File

@ -36,6 +36,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace Calls {
namespace {
constexpr auto kMaxUsersInBar = 3;
constexpr auto kUpdateDebugTimeoutMs = crl::time(500);
constexpr auto kSwitchStateDuration = 120;
@ -139,6 +140,10 @@ void DebugInfoBox::updateText() {
} // namespace
struct TopBar::User {
Ui::GroupCallBarContent::User data;
};
class Mute final : public Ui::IconButton {
public:
Mute(QWidget *parent, const style::IconButton &st)
@ -521,16 +526,66 @@ void TopBar::subscribeToMembersChanges(not_null<GroupCall*> call) {
.stroke = st::groupCallTopBarUserpicStroke,
});
}) | rpl::flatten_latest(
) | rpl::start_with_next([=](const Ui::GroupCallBarContent &content) {
const auto changed = (_userpics.size() != content.userpics.size());
_userpics = content.userpics;
if (changed) {
) | rpl::filter([=](const Ui::GroupCallBarContent &content) {
if (_users.size() != content.users.size()) {
return true;
}
for (auto i = 0, count = int(_users.size()); i != count; ++i) {
if (_users[i].data.userpicKey != content.users[i].userpicKey
|| _users[i].data.id != content.users[i].id) {
return true;
}
}
return false;
}) | rpl::start_with_next([=](const Ui::GroupCallBarContent &content) {
const auto sizeChanged = (_users.size() != content.users.size());
_users = ranges::view::all(
content.users
) | ranges::view::transform([](const auto &user) {
return User{ user };
}) | ranges::to_vector;
generateUserpicsInRow();
if (sizeChanged) {
updateControlsGeometry();
}
update();
}, lifetime());
}
void TopBar::generateUserpicsInRow() {
const auto count = int(_users.size());
if (!count) {
_userpics = QImage();
return;
}
const auto limit = std::min(count, kMaxUsersInBar);
const auto single = st::groupCallTopBarUserpicSize;
const auto shift = st::groupCallTopBarUserpicShift;
const auto width = single + (limit - 1) * (single - shift);
if (_userpics.width() != width * cIntRetinaFactor()) {
_userpics = QImage(
QSize(width, single) * cIntRetinaFactor(),
QImage::Format_ARGB32_Premultiplied);
}
_userpics.fill(Qt::transparent);
_userpics.setDevicePixelRatio(cRetinaFactor());
auto q = Painter(&_userpics);
auto hq = PainterHighQualityEnabler(q);
auto pen = QPen(Qt::transparent);
pen.setWidth(st::groupCallTopBarUserpicStroke);
auto x = (count - 1) * (single - shift);
for (auto i = count; i != 0;) {
q.setCompositionMode(QPainter::CompositionMode_SourceOver);
q.drawImage(x, 0, _users[--i].data.userpic);
q.setCompositionMode(QPainter::CompositionMode_Source);
q.setBrush(Qt::NoBrush);
q.setPen(pen);
q.drawEllipse(x, 0, single, single);
x -= single - shift;
}
}
void TopBar::updateInfoLabels() {
setInfoLabels();
updateControlsGeometry();

View File

@ -49,6 +49,8 @@ protected:
void paintEvent(QPaintEvent *e) override;
private:
struct User;
TopBar(
QWidget *parent,
const base::weak_ptr<Call> &call,
@ -63,11 +65,13 @@ private:
void setMuted(bool mute);
void subscribeToMembersChanges(not_null<GroupCall*> call);
void generateUserpicsInRow();
const base::weak_ptr<Call> _call;
const base::weak_ptr<GroupCall> _groupCall;
bool _muted = false;
std::vector<User> _users;
QImage _userpics;
object_ptr<Ui::LabelSimple> _durationLabel;
object_ptr<SignalBars> _signalBars;

View File

@ -7,16 +7,22 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "data/data_group_call.h"
#include "base/unixtime.h"
#include "data/data_channel.h"
#include "data/data_changes.h"
#include "data/data_session.h"
#include "main/main_session.h"
#include "calls/calls_instance.h"
#include "calls/calls_group_call.h"
#include "core/application.h"
#include "apiwrap.h"
namespace Data {
namespace {
constexpr auto kRequestPerPage = 30;
constexpr auto kSpeakingAfterActive = crl::time(6000);
constexpr auto kActiveAfterJoined = crl::time(1000);
} // namespace
@ -26,11 +32,12 @@ GroupCall::GroupCall(
uint64 accessHash)
: _channel(channel)
, _id(id)
, _accessHash(accessHash) {
, _accessHash(accessHash)
, _speakingByActiveFinishTimer([=] { checkFinishSpeakingByActive(); }) {
}
GroupCall::~GroupCall() {
api().request(_unknownSsrcsRequestId).cancel();
api().request(_unknownUsersRequestId).cancel();
api().request(_participantsRequestId).cancel();
api().request(_reloadRequestId).cancel();
}
@ -184,6 +191,7 @@ void GroupCall::reload() {
result.match([&](const MTPDphone_groupCall &data) {
_channel->owner().processUsers(data.vusers());
_participants.clear();
_speakingByActiveFinishes.clear();
_userBySsrc.clear();
applyParticipantsSlice(
data.vparticipants().v,
@ -201,6 +209,10 @@ void GroupCall::reload() {
void GroupCall::applyParticipantsSlice(
const QVector<MTPGroupCallParticipant> &list,
ApplySliceSource sliceSource) {
const auto amInCall = inCall();
const auto now = base::unixtime::now();
const auto speakingAfterActive = TimeId(kSpeakingAfterActive / 1000);
auto changedCount = _fullCount.current();
for (const auto &participant : list) {
participant.match([&](const MTPDgroupCallParticipant &data) {
@ -216,6 +228,7 @@ void GroupCall::applyParticipantsSlice(
.was = *i,
};
_userBySsrc.erase(i->ssrc);
_speakingByActiveFinishes.remove(user);
_participants.erase(i);
if (sliceSource != ApplySliceSource::SliceLoaded) {
_participantUpdates.fire(std::move(update));
@ -231,10 +244,16 @@ void GroupCall::applyParticipantsSlice(
: std::nullopt;
const auto canSelfUnmute = !data.is_muted()
|| data.is_can_self_unmute();
const auto lastActive = data.vactive_date().value_or(
was ? was->lastActive : 0);
const auto speaking = canSelfUnmute
&& ((was ? was->speaking : false)
|| (!amInCall
&& (lastActive + speakingAfterActive > now)));
const auto value = Participant{
.user = user,
.date = data.vdate().v,
.lastActive = was ? was->lastActive : 0,
.lastActive = lastActive,
.ssrc = uint32(data.vsource().v),
.speaking = canSelfUnmute && (was ? was->speaking : false),
.muted = data.is_muted(),
@ -285,6 +304,7 @@ void GroupCall::applyParticipantsMutes(
i->canSelfUnmute = !i->muted || data.is_can_self_unmute();
if (!i->canSelfUnmute) {
i->speaking = false;
_speakingByActiveFinishes.remove(i->user);
}
_participantUpdates.fire({
.was = was,
@ -298,13 +318,14 @@ void GroupCall::applyParticipantsMutes(
void GroupCall::applyLastSpoke(uint32 ssrc, crl::time when, crl::time now) {
const auto i = _userBySsrc.find(ssrc);
if (i == end(_userBySsrc)) {
_unknownSpokenSsrcs.emplace(ssrc, when);
requestUnknownSsrcs();
_unknownSpokenSsrcs[ssrc] = when;
requestUnknownParticipants();
return;
}
const auto j = ranges::find(_participants, i->second, &Participant::user);
Assert(j != end(_participants));
_speakingByActiveFinishes.remove(j->user);
const auto speaking = (when + kSpeakStatusKeptFor >= now)
&& j->canSelfUnmute;
if (j->speaking != speaking) {
@ -317,8 +338,85 @@ void GroupCall::applyLastSpoke(uint32 ssrc, crl::time when, crl::time now) {
}
}
void GroupCall::requestUnknownSsrcs() {
if (_unknownSsrcsRequestId || _unknownSpokenSsrcs.empty()) {
void GroupCall::applyActiveUpdate(
UserId userId,
crl::time when,
UserData *userLoaded) {
if (inCall()) {
return;
}
const auto i = userLoaded
? ranges::find(
_participants,
not_null{ userLoaded },
&Participant::user)
: _participants.end();
if (i == end(_participants)) {
_unknownSpokenUids[userId] = when;
requestUnknownParticipants();
return;
} else if (!i->canSelfUnmute) {
return;
}
const auto was = std::make_optional(*i);
const auto now = crl::now();
const auto elapsed = TimeId((now - when) / crl::time(1000));
const auto lastActive = base::unixtime::now() - elapsed;
const auto finishes = when + kSpeakingAfterActive;
if (lastActive <= i->lastActive || finishes <= now) {
return;
}
_speakingByActiveFinishes[i->user] = finishes;
if (!_speakingByActiveFinishTimer.isActive()) {
_speakingByActiveFinishTimer.callOnce(finishes - now);
}
i->lastActive = lastActive;
i->speaking = true;
i->canSelfUnmute = true;
if (!was->speaking || !was->canSelfUnmute) {
_participantUpdates.fire({
.was = was,
.now = *i,
});
}
}
void GroupCall::checkFinishSpeakingByActive() {
const auto now = crl::now();
auto nearest = 0;
auto stop = std::vector<not_null<UserData*>>();
for (auto i = begin(_speakingByActiveFinishes); i != end(_speakingByActiveFinishes);) {
const auto when = i->second;
if (now >= when) {
stop.push_back(i->first);
i = _speakingByActiveFinishes.erase(i);
} else {
if (!nearest || nearest > when) {
nearest = when;
}
++i;
}
}
for (const auto user : stop) {
const auto i = ranges::find(_participants, user, &Participant::user);
if (i->speaking) {
const auto was = *i;
i->speaking = false;
_participantUpdates.fire({
.was = was,
.now = *i,
});
}
}
if (nearest) {
_speakingByActiveFinishTimer.callOnce(nearest - now);
}
}
void GroupCall::requestUnknownParticipants() {
if (_unknownUsersRequestId
|| (_unknownSpokenSsrcs.empty() && _unknownSpokenUids.empty())) {
return;
}
const auto ssrcs = [&] {
@ -334,15 +432,36 @@ void GroupCall::requestUnknownSsrcs() {
}
return result;
}();
auto inputs = QVector<MTPint>();
inputs.reserve(ssrcs.size());
const auto uids = [&] {
if (_unknownSpokenUids.size() + ssrcs.size() < kRequestPerPage) {
return base::take(_unknownSpokenUids);
}
auto result = base::flat_map<UserId, crl::time>();
const auto available = (kRequestPerPage - int(ssrcs.size()));
if (available > 0) {
result.reserve(available);
while (result.size() < available) {
const auto [userId, when] = _unknownSpokenUids.back();
result.emplace(userId, when);
_unknownSpokenUids.erase(_unknownSpokenUids.end() - 1);
}
}
return result;
}();
auto ssrcInputs = QVector<MTPint>();
ssrcInputs.reserve(ssrcs.size());
for (const auto [ssrc, when] : ssrcs) {
inputs.push_back(MTP_int(ssrc));
ssrcInputs.push_back(MTP_int(ssrc));
}
_unknownSsrcsRequestId = api().request(MTPphone_GetGroupParticipants(
auto uidInputs = QVector<MTPint>();
uidInputs.reserve(uids.size());
for (const auto [userId, when] : uids) {
uidInputs.push_back(MTP_int(userId));
}
_unknownUsersRequestId = api().request(MTPphone_GetGroupParticipants(
input(),
MTP_vector<MTPint>(), // ids
MTP_vector<MTPint>(inputs),
MTP_vector<MTPint>(uidInputs),
MTP_vector<MTPint>(ssrcInputs),
MTP_string(QString()),
MTP_int(kRequestPerPage)
)).done([=](const MTPphone_GroupParticipants &result) {
@ -352,22 +471,63 @@ void GroupCall::requestUnknownSsrcs() {
data.vparticipants().v,
ApplySliceSource::UnknownLoaded);
});
_unknownSsrcsRequestId = 0;
_unknownUsersRequestId = 0;
const auto now = crl::now();
for (const auto [ssrc, when] : ssrcs) {
applyLastSpoke(ssrc, when, now);
_unknownSpokenSsrcs.remove(ssrc);
}
requestUnknownSsrcs();
for (const auto [userId, when] : uids) {
if (const auto user = _channel->owner().userLoaded(userId)) {
const auto isParticipant = ranges::contains(
_participants,
not_null{ user },
&Participant::user);
if (isParticipant) {
applyActiveUpdate(userId, when, user);
}
}
_unknownSpokenUids.remove(userId);
}
requestUnknownParticipants();
}).fail([=](const RPCError &error) {
_unknownSsrcsRequestId = 0;
_unknownUsersRequestId = 0;
for (const auto [ssrc, when] : ssrcs) {
_unknownSpokenSsrcs.remove(ssrc);
}
requestUnknownSsrcs();
for (const auto [userId, when] : uids) {
_unknownSpokenUids.remove(userId);
}
requestUnknownParticipants();
}).send();
}
void GroupCall::setInCall() {
_unknownSpokenUids.clear();
if (_speakingByActiveFinishes.empty()) {
return;
}
auto restartTimer = true;
const auto latest = crl::now() + kActiveAfterJoined;
for (auto &[user, when] : _speakingByActiveFinishes) {
if (when > latest) {
when = latest;
} else {
restartTimer = false;
}
}
if (restartTimer) {
_speakingByActiveFinishTimer.callOnce(kActiveAfterJoined);
}
}
bool GroupCall::inCall() const {
const auto current = Core::App().calls().currentGroupCall();
return (current != nullptr)
&& (current->id() == _id)
&& (current->state() == Calls::GroupCall::State::Joined);
}
void GroupCall::applyUpdate(const MTPDupdateGroupCallParticipants &update) {
const auto version = update.vversion().v;
if (version < _version) {

View File

@ -7,6 +7,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "base/timer.h"
class UserData;
class ChannelData;
@ -53,10 +55,15 @@ public:
void applyUpdateChecked(
const MTPDupdateGroupCallParticipants &update);
void applyLastSpoke(uint32 ssrc, crl::time when, crl::time now);
void applyActiveUpdate(
UserId userId,
crl::time when,
UserData *userLoaded);
[[nodiscard]] int fullCount() const;
[[nodiscard]] rpl::producer<int> fullCountValue() const;
void setInCall();
void reload();
void setJoinMutedLocally(bool muted);
@ -71,14 +78,16 @@ private:
};
[[nodiscard]] ApiWrap &api() const;
[[nodiscard]] bool inCall() const;
void applyCall(const MTPGroupCall &call, bool force);
void applyParticipantsSlice(
const QVector<MTPGroupCallParticipant> &list,
ApplySliceSource sliceSource);
void applyParticipantsMutes(
const MTPDupdateGroupCallParticipants &update);
void requestUnknownSsrcs();
void requestUnknownParticipants();
void changeChannelEmptyCallFlag();
void checkFinishSpeakingByActive();
const not_null<ChannelData*> _channel;
const uint64 _id = 0;
@ -90,11 +99,14 @@ private:
std::vector<Participant> _participants;
base::flat_map<uint32, not_null<UserData*>> _userBySsrc;
base::flat_map<not_null<UserData*>, crl::time> _speakingByActiveFinishes;
base::Timer _speakingByActiveFinishTimer;
QString _nextOffset;
rpl::variable<int> _fullCount = 0;
base::flat_map<uint32, crl::time> _unknownSpokenSsrcs;
mtpRequestId _unknownSsrcsRequestId = 0;
base::flat_map<UserId, crl::time> _unknownSpokenUids;
mtpRequestId _unknownUsersRequestId = 0;
rpl::event_stream<ParticipantUpdate> _participantUpdates;
rpl::event_stream<> _participantsSliceAdded;

View File

@ -118,7 +118,10 @@ rpl::producer<Ui::GroupCallBarContent> GroupCallTracker::ContentByCall(
}
for (auto i = 0; i != kLimit - already; ++i) {
if (adding[i]) {
state->userpics.push_back(UserpicInRow{ adding[i]->user });
state->userpics.push_back(UserpicInRow{
.peer = adding[i]->user,
.speaking = adding[i]->speaking,
});
}
}
return true;
@ -133,12 +136,18 @@ rpl::producer<Ui::GroupCallBarContent> GroupCallTracker::ContentByCall(
if (!result) {
return false;
}
GenerateUserpicsInRow(
state->current.userpics,
state->userpics,
st);
state->current.users.reserve(state->userpics.size());
state->current.users.clear();
state->someUserpicsNotLoaded = false;
using User = Ui::GroupCallBarContent::User;
for (const auto &userpic : state->userpics) {
const auto pic = userpic.peer->genUserpic(userpic.view, st.size);
state->current.users.push_back({
.userpic = pic.toImage(),
.userpicKey = userpic.uniqueKey,
.id = userpic.peer->bareId(),
.speaking = userpic.speaking,
});
if (userpic.peer->hasUserpic()
&& userpic.peer->useEmptyUserpic(userpic.view)) {
state->someUserpicsNotLoaded = true;
@ -172,12 +181,17 @@ rpl::producer<Ui::GroupCallBarContent> GroupCallTracker::ContentByCall(
Expects(state->userpics.size() <= kLimit);
const auto &participants = call->participants();
auto i = state->userpics.begin();
auto i = begin(state->userpics);
// Find where to put a new speaking userpic.
for (; i != state->userpics.end(); ++i) {
for (; i != end(state->userpics); ++i) {
if (i->peer == user) {
return false;
if (i->speaking) {
return false;
}
const auto index = i - begin(state->userpics);
state->current.users[index].speaking = i->speaking = true;
return true;
}
const auto j = ranges::find(
participants,
@ -194,7 +208,10 @@ rpl::producer<Ui::GroupCallBarContent> GroupCallTracker::ContentByCall(
}
// Add the new speaking to the place we found.
const auto added = state->userpics.insert(i, UserpicInRow{ user });
const auto added = state->userpics.insert(i, UserpicInRow{
.peer = user,
.speaking = true,
});
// Remove him from the tail, if he was there.
for (auto i = added + 1; i != state->userpics.end(); ++i) {
@ -252,8 +269,27 @@ rpl::producer<Ui::GroupCallBarContent> GroupCallTracker::ContentByCall(
if (CheckPushToFront(state, call, user, st)) {
pushNext();
}
} else if (RegenerateUserpics(state, call, st)) {
pushNext();
} else {
auto updateSpeakingState = update.was.has_value()
&& (update.now->speaking != update.was->speaking);
if (updateSpeakingState) {
const auto i = ranges::find(
state->userpics,
user,
&UserpicInRow::peer);
if (i != end(state->userpics)) {
const auto index = i - begin(state->userpics);
state->current.users[index].speaking
= i->speaking
= update.now->speaking;
} else {
updateSpeakingState = false;
}
}
if (RegenerateUserpics(state, call, st)
|| updateSpeakingState) {
pushNext();
}
}
}, lifetime);

View File

@ -22,6 +22,7 @@ namespace HistoryView {
struct UserpicInRow {
not_null<PeerData*> peer;
bool speaking = false;
mutable std::shared_ptr<Data::CloudImageView> view;
mutable InMemoryKey uniqueKey;
};

View File

@ -134,15 +134,19 @@ void GroupCallBar::paint(Painter &p) {
? tr::lng_group_call_members(tr::now, lt_count, _content.count)
: tr::lng_group_call_no_members(tr::now)));
if (!_content.userpics.isNull()) {
const auto imageSize = _content.userpics.size()
/ _content.userpics.devicePixelRatio();
// Skip shadow of the bar above.
const auto imageTop = (st::historyReplyHeight
- st::lineWidth
- imageSize.height()) / 2 + st::lineWidth;
const auto imageLeft = (_inner->width() - imageSize.width()) / 2;
p.drawImage(imageLeft, imageTop, _content.userpics);
const auto picsSize = _content.users.size() * st::historyGroupCallUserpicSize;
// Skip shadow of the bar above.
const auto imageTop = (st::historyReplyHeight
- st::lineWidth
- st::historyGroupCallUserpicSize) / 2 + st::lineWidth;
const auto picsLeft = (_inner->width() - picsSize) / 2;
auto imageLeft = picsLeft;
for (const auto &user : _content.users) {
if (user.speaking) {
p.fillRect(imageLeft, imageTop, st::historyGroupCallUserpicSize, st::historyGroupCallUserpicSize, QColor(255, 128, 128));
}
p.drawImage(imageLeft, imageTop, user.userpic);
imageLeft += st::historyGroupCallUserpicSize;
}
}

View File

@ -18,9 +18,15 @@ class PlainShadow;
class RoundButton;
struct GroupCallBarContent {
struct User {
QImage userpic;
std::pair<uint64, uint64> userpicKey = {};
int32 id = 0;
bool speaking = false;
};
int count = 0;
bool shown = false;
QImage userpics;
std::vector<User> users;
};
class GroupCallBar final {