Beta 1000002001: Recreate audio device when no output or device changes.

This commit is contained in:
John Preston 2017-01-25 00:24:39 +03:00
parent 6ae68b337d
commit 4964b8b488
40 changed files with 1363 additions and 878 deletions

View File

@ -34,8 +34,8 @@ IDI_ICON1 ICON "..\\art\\icon256.ico"
// //
VS_VERSION_INFO VERSIONINFO VS_VERSION_INFO VERSIONINFO
FILEVERSION 1,0,2,0 FILEVERSION 1,0,2,1
PRODUCTVERSION 1,0,2,0 PRODUCTVERSION 1,0,2,1
FILEFLAGSMASK 0x3fL FILEFLAGSMASK 0x3fL
#ifdef _DEBUG #ifdef _DEBUG
FILEFLAGS 0x1L FILEFLAGS 0x1L
@ -51,10 +51,10 @@ BEGIN
BLOCK "040904b0" BLOCK "040904b0"
BEGIN BEGIN
VALUE "CompanyName", "Telegram Messenger LLP" VALUE "CompanyName", "Telegram Messenger LLP"
VALUE "FileVersion", "1.0.2.0" VALUE "FileVersion", "1.0.2.1"
VALUE "LegalCopyright", "Copyright (C) 2014-2017" VALUE "LegalCopyright", "Copyright (C) 2014-2017"
VALUE "ProductName", "Telegram Desktop" VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "1.0.2.0" VALUE "ProductVersion", "1.0.2.1"
END END
END END
BLOCK "VarFileInfo" BLOCK "VarFileInfo"

View File

@ -25,8 +25,8 @@ LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
// //
VS_VERSION_INFO VERSIONINFO VS_VERSION_INFO VERSIONINFO
FILEVERSION 1,0,2,0 FILEVERSION 1,0,2,1
PRODUCTVERSION 1,0,2,0 PRODUCTVERSION 1,0,2,1
FILEFLAGSMASK 0x3fL FILEFLAGSMASK 0x3fL
#ifdef _DEBUG #ifdef _DEBUG
FILEFLAGS 0x1L FILEFLAGS 0x1L
@ -43,10 +43,10 @@ BEGIN
BEGIN BEGIN
VALUE "CompanyName", "Telegram Messenger LLP" VALUE "CompanyName", "Telegram Messenger LLP"
VALUE "FileDescription", "Telegram Updater" VALUE "FileDescription", "Telegram Updater"
VALUE "FileVersion", "1.0.2.0" VALUE "FileVersion", "1.0.2.1"
VALUE "LegalCopyright", "Copyright (C) 2014-2017" VALUE "LegalCopyright", "Copyright (C) 2014-2017"
VALUE "ProductName", "Telegram Desktop" VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "1.0.2.0" VALUE "ProductVersion", "1.0.2.1"
END END
END END
BLOCK "VarFileInfo" BLOCK "VarFileInfo"

View File

@ -97,21 +97,11 @@ enum {
MediaOverviewStartPerPage = 5, MediaOverviewStartPerPage = 5,
MediaOverviewPreloadCount = 4, MediaOverviewPreloadCount = 4,
AudioSimultaneousLimit = 4,
AudioCheckPositionTimeout = 100, // 100ms per check audio pos
AudioCheckPositionDelta = 2400, // update position called each 2400 samples
AudioFadeTimeout = 7, // 7ms
AudioFadeDuration = 500,
AudioVoiceMsgSkip = 400, // 200ms
AudioVoiceMsgFade = 300, // 300ms
AudioPreloadSamples = 2 * 48000, // preload next part if less than 5 seconds remains
AudioVoiceMsgFrequency = 48000, // 48 kHz
AudioVoiceMsgMaxLength = 100 * 60, // 100 minutes AudioVoiceMsgMaxLength = 100 * 60, // 100 minutes
AudioVoiceMsgUpdateView = 100, // 100ms AudioVoiceMsgUpdateView = 100, // 100ms
AudioVoiceMsgChannels = 2, // stereo AudioVoiceMsgChannels = 2, // stereo
AudioVoiceMsgBufferSize = 256 * 1024, // 256 Kb buffers (1.3 - 3.0 secs) AudioVoiceMsgBufferSize = 256 * 1024, // 256 Kb buffers (1.3 - 3.0 secs)
AudioVoiceMsgInMemory = 2 * 1024 * 1024, // 2 Mb audio is hold in memory and auto loaded AudioVoiceMsgInMemory = 2 * 1024 * 1024, // 2 Mb audio is hold in memory and auto loaded
AudioPauseDeviceTimeout = 3000, // pause in 3 secs after playing is over
WaveformSamplesCount = 100, WaveformSamplesCount = 100,

View File

@ -22,7 +22,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "core/utils.h" #include "core/utils.h"
#define BETA_VERSION_MACRO (0ULL) #define BETA_VERSION_MACRO (1000002001ULL)
constexpr int AppVersion = 1000002; constexpr int AppVersion = 1000002;
constexpr str_const AppVersionStr = "1.0.2"; constexpr str_const AppVersionStr = "1.0.2";

View File

@ -1436,44 +1436,43 @@ bool HistoryDocument::updateStatusText() const {
} else if (_data->loading()) { } else if (_data->loading()) {
statusSize = _data->loadOffset(); statusSize = _data->loadOffset();
} else if (_data->loaded()) { } else if (_data->loaded()) {
using State = Media::Player::State;
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
if (_data->voice()) { if (_data->voice()) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Voice); if (state.id == AudioMsgId(_data, _parent->fullId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(_data, _parent->fullId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
if (auto voice = Get<HistoryDocumentVoice>()) { if (auto voice = Get<HistoryDocumentVoice>()) {
bool was = voice->_playback; bool was = voice->_playback;
voice->ensurePlayback(this); voice->ensurePlayback(this);
if (!was || playbackState.position != voice->_playback->_position) { if (!was || state.position != voice->_playback->_position) {
float64 prg = playbackState.duration ? snap(float64(playbackState.position) / playbackState.duration, 0., 1.) : 0.; float64 prg = state.duration ? snap(float64(state.position) / state.duration, 0., 1.) : 0.;
if (voice->_playback->_position < playbackState.position) { if (voice->_playback->_position < state.position) {
voice->_playback->a_progress.start(prg); voice->_playback->a_progress.start(prg);
} else { } else {
voice->_playback->a_progress = anim::value(0., prg); voice->_playback->a_progress = anim::value(0., prg);
} }
voice->_playback->_position = playbackState.position; voice->_playback->_position = state.position;
voice->_playback->_a_progress.start(); voice->_playback->_a_progress.start();
} }
} }
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); statusSize = -1 - (state.position / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); realDuration = (state.duration / state.frequency);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
} else { } else {
if (auto voice = Get<HistoryDocumentVoice>()) { if (auto voice = Get<HistoryDocumentVoice>()) {
voice->checkPlaybackFinished(); voice->checkPlaybackFinished();
} }
} }
} else if (_data->song()) { } else if (_data->song()) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id == AudioMsgId(_data, _parent->fullId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(_data, _parent->fullId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { statusSize = -1 - (state.position / state.frequency);
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); realDuration = (state.duration / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting);
} else { } else {
} }
if (!showPause && (playing == AudioMsgId(_data, _parent->fullId()))) { if (!showPause && (state.id == AudioMsgId(_data, _parent->fullId()))) {
showPause = Media::Player::instance()->isSeeking(); showPause = Media::Player::instance()->isSeeking();
} }
} }

View File

@ -3532,7 +3532,7 @@ void HistoryWidget::onRecordDone(QByteArray result, VoiceWaveform waveform, qint
if (!canWriteMessage() || result.isEmpty()) return; if (!canWriteMessage() || result.isEmpty()) return;
App::wnd()->activateWindow(); App::wnd()->activateWindow();
auto duration = samples / AudioVoiceMsgFrequency; auto duration = samples / Media::Player::kDefaultFrequency;
auto to = FileLoadTo(_peer->id, _silent->checked(), replyToId()); auto to = FileLoadTo(_peer->id, _silent->checked(), replyToId());
auto caption = QString(); auto caption = QString();
_fileLoader.addTask(MakeShared<FileLoadTask>(result, duration, waveform, to, caption)); _fileLoader.addTask(MakeShared<FileLoadTask>(result, duration, waveform, to, caption));
@ -3547,7 +3547,7 @@ void HistoryWidget::onRecordUpdate(quint16 level, qint32 samples) {
a_recordingLevel.start(level); a_recordingLevel.start(level);
_a_recording.start(); _a_recording.start();
_recordingSamples = samples; _recordingSamples = samples;
if (samples < 0 || samples >= AudioVoiceMsgFrequency * AudioVoiceMsgMaxLength) { if (samples < 0 || samples >= Media::Player::kDefaultFrequency * AudioVoiceMsgMaxLength) {
stopRecording(_peer && samples > 0 && _inField); stopRecording(_peer && samples > 0 && _inField);
} }
updateField(); updateField();
@ -8736,7 +8736,7 @@ void HistoryWidget::drawRecording(Painter &p, float64 recordActive) {
p.drawEllipse(_attachToggle->x() + (_attachEmoji->width() - d) / 2, _attachToggle->y() + (_attachToggle->height() - d) / 2, d, d); p.drawEllipse(_attachToggle->x() + (_attachEmoji->width() - d) / 2, _attachToggle->y() + (_attachToggle->height() - d) / 2, d, d);
} }
QString duration = formatDurationText(_recordingSamples / AudioVoiceMsgFrequency); auto duration = formatDurationText(_recordingSamples / Media::Player::kDefaultFrequency);
p.setFont(st::historyRecordFont); p.setFont(st::historyRecordFont);
p.setPen(st::historyRecordDurationFg); p.setPen(st::historyRecordDurationFg);

View File

@ -830,25 +830,24 @@ bool File::updateStatusText() const {
} else if (document->loading()) { } else if (document->loading()) {
statusSize = document->loadOffset(); statusSize = document->loadOffset();
} else if (document->loaded()) { } else if (document->loaded()) {
using State = Media::Player::State;
if (document->voice()) { if (document->voice()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Voice); if (state.id == AudioMsgId(document, FullMsgId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(document, FullMsgId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { statusSize = -1 - (state.position / state.frequency);
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); realDuration = (state.duration / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting);
} }
} else if (document->song()) { } else if (document->song()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id == AudioMsgId(document, FullMsgId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(document, FullMsgId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { statusSize = -1 - (state.position / state.frequency);
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); realDuration = (state.duration / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting);
} }
if (!showPause && (playing == AudioMsgId(document, FullMsgId())) && Media::Player::instance()->isSeeking()) { if (!showPause && (state.id == AudioMsgId(document, FullMsgId())) && Media::Player::instance()->isSeeking()) {
showPause = true; showPause = true;
} }
} else { } else {

View File

@ -1563,10 +1563,10 @@ void MainWidget::ui_autoplayMediaInlineAsync(qint32 channelId, qint32 msgId) {
} }
void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) { void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
AudioMsgId playing; using State = Media::Player::State;
auto playbackState = Media::Player::mixer()->currentState(&playing, audioId.type()); auto state = Media::Player::mixer()->currentState(audioId.type());
if (playing == audioId && playbackState.state == AudioPlayerStoppedAtStart) { if (state.id == audioId && state.state == State::StoppedAtStart) {
playbackState.state = AudioPlayerStopped; state.state = State::Stopped;
Media::Player::mixer()->clearStoppedAtStart(audioId); Media::Player::mixer()->clearStoppedAtStart(audioId);
auto document = audioId.audio(); auto document = audioId.audio();
@ -1578,8 +1578,8 @@ void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
} }
} }
if (playing == audioId && audioId.type() == AudioMsgId::Type::Song) { if (state.id == audioId && audioId.type() == AudioMsgId::Type::Song) {
if (!(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (!Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (!_playerUsingPanel && !_player) { if (!_playerUsingPanel && !_player) {
createPlayer(); createPlayer();
} }
@ -1675,9 +1675,8 @@ void MainWidget::playerHeightUpdated() {
updateControlsGeometry(); updateControlsGeometry();
} }
if (!_playerHeight && _player->isHidden()) { if (!_playerHeight && _player->isHidden()) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id && Media::Player::IsStopped(state.state)) {
if (playing && (playbackState.state & AudioPlayerStoppedMask)) {
_playerVolume.destroyDelayed(); _playerVolume.destroyDelayed();
_player.destroyDelayed(); _player.destroyDelayed();
} }

File diff suppressed because it is too large Load Diff

View File

@ -20,44 +20,73 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/ */
#pragma once #pragma once
enum AudioPlayerState {
AudioPlayerStopped = 0x01,
AudioPlayerStoppedAtEnd = 0x02,
AudioPlayerStoppedAtError = 0x03,
AudioPlayerStoppedAtStart = 0x04,
AudioPlayerStoppedMask = 0x07,
AudioPlayerStarting = 0x08,
AudioPlayerPlaying = 0x10,
AudioPlayerFinishing = 0x18,
AudioPlayerPausing = 0x20,
AudioPlayerPaused = 0x28,
AudioPlayerPausedAtEnd = 0x30,
AudioPlayerResuming = 0x38,
};
struct VideoSoundData; struct VideoSoundData;
struct VideoSoundPart; struct VideoSoundPart;
struct AudioPlaybackState {
AudioPlayerState state = AudioPlayerStopped;
int64 position = 0;
TimeMs duration = 0;
int32 frequency = 0;
};
namespace Media { namespace Media {
namespace Player { namespace Player {
constexpr auto kDefaultFrequency = 48000; // 48 kHz
constexpr auto kTogetherLimit = 4;
class Fader;
class Loaders;
void InitAudio(); void InitAudio();
void DeInitAudio(); void DeInitAudio();
base::Observable<AudioMsgId> &Updated(); base::Observable<AudioMsgId> &Updated();
bool CreateAudioPlaybackDevice(); void DetachFromDeviceByTimer();
void PlayNotify(); void PlayNotify();
class Fader; float64 ComputeVolume(AudioMsgId::Type type);
class Loaders;
enum class State {
Stopped = 0x01,
StoppedAtEnd = 0x02,
StoppedAtError = 0x03,
StoppedAtStart = 0x04,
Starting = 0x08,
Playing = 0x10,
Finishing = 0x18,
Pausing = 0x20,
Paused = 0x28,
PausedAtEnd = 0x30,
Resuming = 0x38,
};
inline bool IsStopped(State state) {
return (state == State::Stopped)
|| (state == State::StoppedAtEnd)
|| (state == State::StoppedAtError)
|| (state == State::StoppedAtStart);
}
inline bool IsPaused(State state) {
return (state == State::Paused)
|| (state == State::PausedAtEnd);
}
inline bool IsFading(State state) {
return (state == State::Starting)
|| (state == State::Finishing)
|| (state == State::Pausing)
|| (state == State::Resuming);
}
inline bool IsActive(State state) {
return !IsStopped(state) && !IsPaused(state);
}
struct TrackState {
AudioMsgId id;
State state = State::Stopped;
int64 position = 0;
TimeMs duration = 0;
int frequency = kDefaultFrequency;
};
class Mixer : public QObject, private base::Subscriber { class Mixer : public QObject, private base::Subscriber {
Q_OBJECT Q_OBJECT
@ -67,27 +96,32 @@ public:
void play(const AudioMsgId &audio, int64 position = 0); void play(const AudioMsgId &audio, int64 position = 0);
void pauseresume(AudioMsgId::Type type, bool fast = false); void pauseresume(AudioMsgId::Type type, bool fast = false);
void seek(int64 position); // type == AudioMsgId::Type::Song void seek(AudioMsgId::Type type, int64 position); // type == AudioMsgId::Type::Song
void stop(AudioMsgId::Type type); void stop(AudioMsgId::Type type);
// Video player audio stream interface. // Video player audio stream interface.
void initFromVideo(uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position); void initFromVideo(uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position);
void feedFromVideo(VideoSoundPart &&part); void feedFromVideo(VideoSoundPart &&part);
int64 getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs); int64 getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs);
AudioPlaybackState currentVideoState(uint64 videoPlayId); TrackState currentVideoState(uint64 videoPlayId);
void stopFromVideo(uint64 videoPlayId); void stopFromVideo(uint64 videoPlayId);
void pauseFromVideo(uint64 videoPlayId); void pauseFromVideo(uint64 videoPlayId);
void resumeFromVideo(uint64 videoPlayId); void resumeFromVideo(uint64 videoPlayId);
void stopAndClear(); void stopAndClear();
AudioPlaybackState currentState(AudioMsgId *audio, AudioMsgId::Type type); TrackState currentState(AudioMsgId::Type type);
void clearStoppedAtStart(const AudioMsgId &audio); void clearStoppedAtStart(const AudioMsgId &audio);
void detachFromDeviceByTimer();
void detachTracks();
void reattachIfNeeded();
void reattachTracks();
~Mixer(); ~Mixer();
private slots: private slots:
void onError(const AudioMsgId &audio); void onError(const AudioMsgId &audio);
void onStopped(const AudioMsgId &audio); void onStopped(const AudioMsgId &audio);
@ -107,55 +141,70 @@ signals:
private: private:
bool fadedStop(AudioMsgId::Type type, bool *fadedStart = 0); bool fadedStop(AudioMsgId::Type type, bool *fadedStart = 0);
bool updateCurrentStarted(AudioMsgId::Type type, int32 pos = -1); void resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered = -1);
bool checkCurrentALError(AudioMsgId::Type type); bool checkCurrentALError(AudioMsgId::Type type);
void videoSoundProgress(const AudioMsgId &audio); void videoSoundProgress(const AudioMsgId &audio);
struct AudioMsg { class Track {
void clear(); public:
static constexpr int kBuffersCount = 3;
AudioMsgId audio; void reattach(AudioMsgId::Type type);
void detach();
void clear();
void started();
bool isStreamCreated() const;
void ensureStreamCreated();
int getNotQueuedBufferIndex();
TrackState state;
FileLocation file; FileLocation file;
QByteArray data; QByteArray data;
AudioPlaybackState playbackState = defaultState(); int64 bufferedPosition = 0;
int64 skipStart = 0; int64 bufferedLength = 0;
int64 skipEnd = 0;
bool loading = false; bool loading = false;
int64 started = 0; bool loaded = false;
int64 fadeStartPosition = 0;
uint32 source = 0; int32 format = 0;
int32 nextBuffer = 0; int32 frequency = kDefaultFrequency;
uint32 buffers[3] = { 0 }; int samplesCount[kBuffersCount] = { 0 };
int64 samplesCount[3] = { 0 }; QByteArray bufferSamples[kBuffersCount];
struct Stream {
uint32 source = 0;
uint32 buffers[kBuffersCount] = { 0 };
};
Stream stream;
uint64 videoPlayId = 0; uint64 videoPlayId = 0;
std_::unique_ptr<VideoSoundData> videoData; std_::unique_ptr<VideoSoundData> videoData;
private: private:
static AudioPlaybackState defaultState() { void createStream();
AudioPlaybackState result; void destroyStream();
result.frequency = AudioVoiceMsgFrequency; void resetStream();
return result;
}
}; };
void setStoppedState(AudioMsg *current, AudioPlayerState state = AudioPlayerStopped); void setStoppedState(Track *current, State state = State::Stopped);
AudioMsg *dataForType(AudioMsgId::Type type, int index = -1); // -1 uses currentIndex(type) Track *trackForType(AudioMsgId::Type type, int index = -1); // -1 uses currentIndex(type)
const AudioMsg *dataForType(AudioMsgId::Type type, int index = -1) const; const Track *trackForType(AudioMsgId::Type type, int index = -1) const;
int *currentIndex(AudioMsgId::Type type); int *currentIndex(AudioMsgId::Type type);
const int *currentIndex(AudioMsgId::Type type) const; const int *currentIndex(AudioMsgId::Type type) const;
int _audioCurrent = 0; int _audioCurrent = 0;
AudioMsg _audioData[AudioSimultaneousLimit]; Track _audioTracks[kTogetherLimit];
int _songCurrent = 0; int _songCurrent = 0;
AudioMsg _songData[AudioSimultaneousLimit]; Track _songTracks[kTogetherLimit];
AudioMsg _videoData; Track _videoTrack;
uint64 _lastVideoPlayId = 0; uint64 _lastVideoPlayId = 0;
TimeMs _lastVideoPlaybackWhen = 0; TimeMs _lastVideoPlaybackWhen = 0;
TimeMs _lastVideoPlaybackCorrectedMs = 0; TimeMs _lastVideoPlaybackCorrectedMs = 0;
@ -179,7 +228,7 @@ class Fader : public QObject {
public: public:
Fader(QThread *thread); Fader(QThread *thread);
void resumeDevice(); void keepAttachedToDevice();
signals: signals:
void error(const AudioMsgId &audio); void error(const AudioMsgId &audio);
@ -187,13 +236,12 @@ signals:
void audioStopped(const AudioMsgId &audio); void audioStopped(const AudioMsgId &audio);
void needToPreload(const AudioMsgId &audio); void needToPreload(const AudioMsgId &audio);
void stopPauseDevice();
public slots: public slots:
void onDetachFromDeviceByTimer(bool force);
void onInit(); void onInit();
void onTimer(); void onTimer();
void onPauseTimer(); void onDetachFromDeviceTimer();
void onPauseTimerStop();
void onSuppressSong(); void onSuppressSong();
void onUnsuppressSong(); void onUnsuppressSong();
@ -208,23 +256,25 @@ private:
EmitPositionUpdated = 0x04, EmitPositionUpdated = 0x04,
EmitNeedToPreload = 0x08, EmitNeedToPreload = 0x08,
}; };
int32 updateOnePlayback(Mixer::AudioMsg *m, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged); int32 updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged);
void setStoppedState(Mixer::AudioMsg *m, AudioPlayerState state = AudioPlayerStopped); void setStoppedState(Mixer::Track *track, State state = State::Stopped);
QTimer _timer, _pauseTimer; QTimer _timer;
QMutex _pauseMutex;
bool _pauseFlag = false;
bool _paused = true;
bool _suppressAll = false; bool _suppressAll = false;
bool _suppressAllAnim = false; bool _suppressAllAnim = false;
bool _suppressSong = false; bool _suppressSong = false;
bool _suppressSongAnim = false; bool _suppressSongAnim = false;
bool _songVolumeChanged, _videoVolumeChanged; bool _songVolumeChanged = false;
bool _videoVolumeChanged = false;
anim::value _suppressAllGain, _suppressSongGain; anim::value _suppressAllGain, _suppressSongGain;
TimeMs _suppressAllStart = 0; TimeMs _suppressAllStart = 0;
TimeMs _suppressSongStart = 0; TimeMs _suppressSongStart = 0;
QTimer _detachFromDeviceTimer;
QMutex _detachFromDeviceMutex;
bool _detachFromDeviceForce = false;
}; };
} // namespace Player } // namespace Player
@ -233,10 +283,11 @@ private:
namespace internal { namespace internal {
QMutex *audioPlayerMutex(); QMutex *audioPlayerMutex();
float64 audioSuppressGain();
float64 audioSuppressSongGain();
bool audioCheckError(); bool audioCheckError();
// AudioMutex must be locked.
bool CheckAudioDeviceConnected();
} // namespace internal } // namespace internal
MTPDocumentAttribute audioReadSongAttributes(const QString &fname, const QByteArray &data, QImage &cover, QByteArray &coverBytes, QByteArray &coverFormat); MTPDocumentAttribute audioReadSongAttributes(const QString &fname, const QByteArray &data, QImage &cover, QByteArray &coverBytes, QByteArray &coverFormat);

View File

@ -30,10 +30,13 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include <AL/alext.h> #include <AL/alext.h>
namespace Media { namespace Media {
namespace Capture { namespace Capture {
namespace { namespace {
constexpr auto kCaptureFrequency = Player::kDefaultFrequency;
constexpr auto kCaptureSkipDuration = TimeMs(400);
constexpr auto kCaptureFadeInDuration = TimeMs(300);
Instance *CaptureInstance = nullptr; Instance *CaptureInstance = nullptr;
bool ErrorHappened(ALCdevice *device) { bool ErrorHappened(ALCdevice *device) {
@ -72,7 +75,7 @@ Instance::Instance() : _inner(new Inner(&_thread)) {
void Instance::check() { void Instance::check() {
_available = false; _available = false;
if (auto defaultDevice = alcGetString(0, ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER)) { if (auto defaultDevice = alcGetString(0, ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER)) {
if (auto device = alcCaptureOpenDevice(defaultDevice, AudioVoiceMsgFrequency, AL_FORMAT_MONO16, AudioVoiceMsgFrequency / 5)) { if (auto device = alcCaptureOpenDevice(defaultDevice, kCaptureFrequency, AL_FORMAT_MONO16, kCaptureFrequency / 5)) {
auto error = ErrorHappened(device); auto error = ErrorHappened(device);
alcCaptureCloseDevice(device); alcCaptureCloseDevice(device);
_available = !error; _available = !error;
@ -117,7 +120,7 @@ struct Instance::Inner::Private {
int32 dataPos = 0; int32 dataPos = 0;
int64 waveformMod = 0; int64 waveformMod = 0;
int64 waveformEach = (AudioVoiceMsgFrequency / 100); int64 waveformEach = (kCaptureFrequency / 100);
uint16 waveformPeak = 0; uint16 waveformPeak = 0;
QVector<uchar> waveform; QVector<uchar> waveform;
@ -180,7 +183,7 @@ void Instance::Inner::onStart() {
// Start OpenAL Capture // Start OpenAL Capture
const ALCchar *dName = alcGetString(0, ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER); const ALCchar *dName = alcGetString(0, ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER);
DEBUG_LOG(("Audio Info: Capture device name '%1'").arg(dName)); DEBUG_LOG(("Audio Info: Capture device name '%1'").arg(dName));
d->device = alcCaptureOpenDevice(dName, AudioVoiceMsgFrequency, AL_FORMAT_MONO16, AudioVoiceMsgFrequency / 5); d->device = alcCaptureOpenDevice(dName, kCaptureFrequency, AL_FORMAT_MONO16, kCaptureFrequency / 5);
if (!d->device) { if (!d->device) {
LOG(("Audio Error: capture device not present!")); LOG(("Audio Error: capture device not present!"));
emit error(); emit error();
@ -253,7 +256,7 @@ void Instance::Inner::onStart() {
d->codecContext->sample_fmt = AV_SAMPLE_FMT_FLTP; d->codecContext->sample_fmt = AV_SAMPLE_FMT_FLTP;
d->codecContext->bit_rate = 64000; d->codecContext->bit_rate = 64000;
d->codecContext->channel_layout = AV_CH_LAYOUT_MONO; d->codecContext->channel_layout = AV_CH_LAYOUT_MONO;
d->codecContext->sample_rate = AudioVoiceMsgFrequency; d->codecContext->sample_rate = kCaptureFrequency;
d->codecContext->channels = 1; d->codecContext->channels = 1;
if (d->fmtContext->oformat->flags & AVFMT_GLOBALHEADER) { if (d->fmtContext->oformat->flags & AVFMT_GLOBALHEADER) {
@ -341,8 +344,9 @@ void Instance::Inner::onStop(bool needResult) {
// Write what is left // Write what is left
if (!_captured.isEmpty()) { if (!_captured.isEmpty()) {
int32 fadeSamples = AudioVoiceMsgFade * AudioVoiceMsgFrequency / 1000, capturedSamples = _captured.size() / sizeof(short); auto fadeSamples = kCaptureFadeInDuration * kCaptureFrequency / 1000;
if ((_captured.size() % sizeof(short)) || (d->fullSamples + capturedSamples < AudioVoiceMsgFrequency) || (capturedSamples < fadeSamples)) { auto capturedSamples = static_cast<int>(_captured.size() / sizeof(short));
if ((_captured.size() % sizeof(short)) || (d->fullSamples + capturedSamples < kCaptureFrequency) || (capturedSamples < fadeSamples)) {
d->fullSamples = 0; d->fullSamples = 0;
d->dataPos = 0; d->dataPos = 0;
d->data.clear(); d->data.clear();
@ -489,7 +493,8 @@ void Instance::Inner::onTimeout() {
} }
if (samples > 0) { if (samples > 0) {
// Get samples from OpenAL // Get samples from OpenAL
int32 s = _captured.size(), news = s + samples * sizeof(short); auto s = _captured.size();
auto news = s + static_cast<int>(samples * sizeof(short));
if (news / AudioVoiceMsgBufferSize > s / AudioVoiceMsgBufferSize) { if (news / AudioVoiceMsgBufferSize > s / AudioVoiceMsgBufferSize) {
_captured.reserve(((news / AudioVoiceMsgBufferSize) + 1) * AudioVoiceMsgBufferSize); _captured.reserve(((news / AudioVoiceMsgBufferSize) + 1) * AudioVoiceMsgBufferSize);
} }
@ -502,9 +507,10 @@ void Instance::Inner::onTimeout() {
} }
// Count new recording level and update view // Count new recording level and update view
int32 skipSamples = AudioVoiceMsgSkip * AudioVoiceMsgFrequency / 1000, fadeSamples = AudioVoiceMsgFade * AudioVoiceMsgFrequency / 1000; auto skipSamples = kCaptureSkipDuration * kCaptureFrequency / 1000;
int32 levelindex = d->fullSamples + (s / sizeof(short)); auto fadeSamples = kCaptureFadeInDuration * kCaptureFrequency / 1000;
for (const short *ptr = (const short*)(_captured.constData() + s), *end = (const short*)(_captured.constData() + news); ptr < end; ++ptr, ++levelindex) { auto levelindex = d->fullSamples + static_cast<int>(s / sizeof(short));
for (auto ptr = (const short*)(_captured.constData() + s), end = (const short*)(_captured.constData() + news); ptr < end; ++ptr, ++levelindex) {
if (levelindex > skipSamples) { if (levelindex > skipSamples) {
uint16 value = qAbs(*ptr); uint16 value = qAbs(*ptr);
if (levelindex < skipSamples + fadeSamples) { if (levelindex < skipSamples + fadeSamples) {
@ -516,7 +522,7 @@ void Instance::Inner::onTimeout() {
} }
} }
qint32 samplesFull = d->fullSamples + _captured.size() / sizeof(short), samplesSinceUpdate = samplesFull - d->lastUpdate; qint32 samplesFull = d->fullSamples + _captured.size() / sizeof(short), samplesSinceUpdate = samplesFull - d->lastUpdate;
if (samplesSinceUpdate > AudioVoiceMsgUpdateView * AudioVoiceMsgFrequency / 1000) { if (samplesSinceUpdate > AudioVoiceMsgUpdateView * kCaptureFrequency / 1000) {
emit updated(d->levelMax, samplesFull); emit updated(d->levelMax, samplesFull);
d->lastUpdate = samplesFull; d->lastUpdate = samplesFull;
d->levelMax = 0; d->levelMax = 0;
@ -548,7 +554,7 @@ void Instance::Inner::processFrame(int32 offset, int32 framesize) {
emit error(); emit error();
return; return;
} }
int32 samplesCnt = framesize / sizeof(short); auto samplesCnt = static_cast<int>(framesize / sizeof(short));
int res = 0; int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
@ -557,7 +563,8 @@ void Instance::Inner::processFrame(int32 offset, int32 framesize) {
auto srcSamplesData = &srcSamplesDataChannel; auto srcSamplesData = &srcSamplesDataChannel;
// memcpy(d->srcSamplesData[0], _captured.constData() + offset, framesize); // memcpy(d->srcSamplesData[0], _captured.constData() + offset, framesize);
int32 skipSamples = AudioVoiceMsgSkip * AudioVoiceMsgFrequency / 1000, fadeSamples = AudioVoiceMsgFade * AudioVoiceMsgFrequency / 1000; auto skipSamples = static_cast<int>(kCaptureSkipDuration * kCaptureFrequency / 1000);
auto fadeSamples = static_cast<int>(kCaptureFadeInDuration * kCaptureFrequency / 1000);
if (d->fullSamples < skipSamples + fadeSamples) { if (d->fullSamples < skipSamples + fadeSamples) {
int32 fadedCnt = qMin(samplesCnt, skipSamples + fadeSamples - d->fullSamples); int32 fadedCnt = qMin(samplesCnt, skipSamples + fadeSamples - d->fullSamples);
float64 coef = 1. / fadeSamples, fadedFrom = d->fullSamples - skipSamples; float64 coef = 1. / fadeSamples, fadedFrom = d->fullSamples - skipSamples;

View File

@ -204,7 +204,7 @@ bool FFMpegLoader::open(qint64 &position) {
int64_t src_ch_layout = layout, dst_ch_layout = AudioToChannelLayout; int64_t src_ch_layout = layout, dst_ch_layout = AudioToChannelLayout;
srcRate = freq; srcRate = freq;
AVSampleFormat src_sample_fmt = inputFormat, dst_sample_fmt = AudioToFormat; AVSampleFormat src_sample_fmt = inputFormat, dst_sample_fmt = AudioToFormat;
dstRate = (freq != 44100 && freq != 48000) ? AudioVoiceMsgFrequency : freq; dstRate = (freq != 44100 && freq != 48000) ? Media::Player::kDefaultFrequency : freq;
av_opt_set_int(swrContext, "in_channel_layout", src_ch_layout, 0); av_opt_set_int(swrContext, "in_channel_layout", src_ch_layout, 0);
av_opt_set_int(swrContext, "in_sample_rate", srcRate, 0); av_opt_set_int(swrContext, "in_sample_rate", srcRate, 0);

View File

@ -20,6 +20,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/ */
#pragma once #pragma once
#include "media/media_audio.h"
#include "media/media_audio_loader.h" #include "media/media_audio_loader.h"
extern "C" { extern "C" {
@ -49,7 +50,7 @@ public:
~AbstractFFMpegLoader(); ~AbstractFFMpegLoader();
protected: protected:
int32 freq = AudioVoiceMsgFrequency; int32 freq = Media::Player::kDefaultFrequency;
TimeMs len = 0; TimeMs len = 0;
uchar *ioBuffer = nullptr; uchar *ioBuffer = nullptr;
@ -89,8 +90,8 @@ private:
ReadResult readFromReadyFrame(QByteArray &result, int64 &samplesAdded); ReadResult readFromReadyFrame(QByteArray &result, int64 &samplesAdded);
int32 fmt = AL_FORMAT_STEREO16; int32 fmt = AL_FORMAT_STEREO16;
int32 srcRate = AudioVoiceMsgFrequency; int32 srcRate = Media::Player::kDefaultFrequency;
int32 dstRate = AudioVoiceMsgFrequency; int32 dstRate = Media::Player::kDefaultFrequency;
int32 maxResampleSamples = 1024; int32 maxResampleSamples = 1024;
uint8_t **dstSamplesData = nullptr; uint8_t **dstSamplesData = nullptr;

View File

@ -98,10 +98,10 @@ void Loaders::onStart(const AudioMsgId &audio, qint64 position) {
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
if (!mixer()) return; if (!mixer()) return;
auto data = mixer()->dataForType(type); auto track = mixer()->trackForType(type);
if (!data) return; if (!track) return;
data->loading = true; track->loading = true;
} }
loadData(audio, position); loadData(audio, position);
@ -117,9 +117,9 @@ AudioMsgId Loaders::clear(AudioMsgId::Type type) {
return result; return result;
} }
void Loaders::setStoppedState(Mixer::AudioMsg *m, AudioPlayerState state) { void Loaders::setStoppedState(Mixer::Track *track, State state) {
m->playbackState.state = state; track->state.state = state;
m->playbackState.position = 0; track->state.position = 0;
} }
void Loaders::emitError(AudioMsgId::Type type) { void Loaders::emitError(AudioMsgId::Type type) {
@ -131,9 +131,9 @@ void Loaders::onLoad(const AudioMsgId &audio) {
} }
void Loaders::loadData(AudioMsgId audio, qint64 position) { void Loaders::loadData(AudioMsgId audio, qint64 position) {
SetupError err = SetupNoErrorStarted; auto err = SetupNoErrorStarted;
auto type = audio.type(); auto type = audio.type();
AudioPlayerLoader *l = setupLoader(audio, err, position); auto l = setupLoader(audio, err, position);
if (!l) { if (!l) {
if (err == SetupErrorAtStart) { if (err == SetupErrorAtStart) {
emitError(type); emitError(type);
@ -141,10 +141,10 @@ void Loaders::loadData(AudioMsgId audio, qint64 position) {
return; return;
} }
bool started = (err == SetupNoErrorStarted); auto started = (err == SetupNoErrorStarted);
bool finished = false; auto finished = false;
bool waiting = false; auto waiting = false;
bool errAtStart = started; auto errAtStart = started;
QByteArray samples; QByteArray samples;
int64 samplesCount = 0; int64 samplesCount = 0;
@ -158,8 +158,9 @@ void Loaders::loadData(AudioMsgId audio, qint64 position) {
if (errAtStart) { if (errAtStart) {
{ {
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
auto m = checkLoader(type); if (auto track = checkLoader(type)) {
if (m) m->playbackState.state = AudioPlayerStoppedAtStart; track->state.state = State::StoppedAtStart;
}
} }
emitError(type); emitError(type);
return; return;
@ -187,93 +188,54 @@ void Loaders::loadData(AudioMsgId audio, qint64 position) {
} }
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
auto m = checkLoader(type); auto track = checkLoader(type);
if (!m) { if (!track) {
clear(type); clear(type);
return; return;
} }
if (started) { if (started) {
if (m->source) { mixer()->reattachTracks();
alSourceStop(m->source);
for (int32 i = 0; i < 3; ++i) { track->started();
if (m->samplesCount[i]) { if (!internal::audioCheckError()) {
ALuint buffer = 0; setStoppedState(track, State::StoppedAtStart);
alSourceUnqueueBuffers(m->source, 1, &buffer); emitError(type);
m->samplesCount[i] = 0; return;
}
}
m->nextBuffer = 0;
} }
m->skipStart = position;
m->skipEnd = m->playbackState.duration - position; track->bufferedPosition = position;
m->playbackState.position = position; track->state.position = position;
m->started = 0; track->fadeStartPosition = position;
track->format = l->format();
track->frequency = l->frequency();
} }
if (samplesCount) { if (samplesCount) {
if (!m->source) { track->ensureStreamCreated();
alGenSources(1, &m->source);
alSourcef(m->source, AL_PITCH, 1.f);
alSource3f(m->source, AL_POSITION, 0, 0, 0);
alSource3f(m->source, AL_VELOCITY, 0, 0, 0);
alSourcei(m->source, AL_LOOPING, 0);
}
if (!m->buffers[m->nextBuffer]) {
alGenBuffers(3, m->buffers);
}
// If this buffer is queued, try to unqueue some buffer. auto bufferIndex = track->getNotQueuedBufferIndex();
if (m->samplesCount[m->nextBuffer]) {
ALint processed = 0;
alGetSourcei(m->source, AL_BUFFERS_PROCESSED, &processed);
if (processed < 1) { // No processed buffers, wait.
l->saveDecodedSamples(&samples, &samplesCount);
return;
}
// Unqueue some processed buffer.
ALuint buffer = 0;
alSourceUnqueueBuffers(m->source, 1, &buffer);
if (!internal::audioCheckError()) {
setStoppedState(m, AudioPlayerStoppedAtError);
emitError(type);
return;
}
// Find it in the list and make it the nextBuffer.
bool found = false;
for (int i = 0; i < 3; ++i) {
if (m->buffers[i] == buffer) {
found = true;
m->nextBuffer = i;
break;
}
}
if (!found) {
LOG(("Audio Error: Could not find the unqueued buffer! Buffer %1 in source %2 with processed count %3").arg(buffer).arg(m->source).arg(processed));
setStoppedState(m, AudioPlayerStoppedAtError);
emitError(type);
return;
}
if (m->samplesCount[m->nextBuffer]) {
m->skipStart += m->samplesCount[m->nextBuffer];
m->samplesCount[m->nextBuffer] = 0;
}
}
auto frequency = l->frequency();
auto format = l->format();
m->samplesCount[m->nextBuffer] = samplesCount;
alBufferData(m->buffers[m->nextBuffer], format, samples.constData(), samples.size(), frequency);
alSourceQueueBuffers(m->source, 1, m->buffers + m->nextBuffer);
m->skipEnd -= samplesCount;
m->nextBuffer = (m->nextBuffer + 1) % 3;
if (!internal::audioCheckError()) { if (!internal::audioCheckError()) {
setStoppedState(m, AudioPlayerStoppedAtError); setStoppedState(track, State::StoppedAtError);
emitError(type);
return;
}
if (bufferIndex < 0) { // No free buffers, wait.
l->saveDecodedSamples(&samples, &samplesCount);
return;
}
track->bufferSamples[bufferIndex] = samples;
track->samplesCount[bufferIndex] = samplesCount;
track->bufferedLength += samplesCount;
alBufferData(track->stream.buffers[bufferIndex], track->format, samples.constData(), samples.size(), track->frequency);
alSourceQueueBuffers(track->stream.source, 1, track->stream.buffers + bufferIndex);
if (!internal::audioCheckError()) {
setStoppedState(track, State::StoppedAtError);
emitError(type); emitError(type);
return; return;
} }
@ -285,31 +247,31 @@ void Loaders::loadData(AudioMsgId audio, qint64 position) {
} }
if (finished) { if (finished) {
m->skipEnd = 0; track->loaded = true;
m->playbackState.duration = m->skipStart + m->samplesCount[0] + m->samplesCount[1] + m->samplesCount[2]; track->state.duration = track->bufferedPosition + track->bufferedLength;
clear(type); clear(type);
} }
m->loading = false; track->loading = false;
if (m->playbackState.state == AudioPlayerResuming || m->playbackState.state == AudioPlayerPlaying || m->playbackState.state == AudioPlayerStarting) { if (track->state.state == State::Resuming || track->state.state == State::Playing || track->state.state == State::Starting) {
ALint state = AL_INITIAL; ALint state = AL_INITIAL;
alGetSourcei(m->source, AL_SOURCE_STATE, &state); alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
if (internal::audioCheckError()) { if (internal::audioCheckError()) {
if (state != AL_PLAYING) { if (state != AL_PLAYING) {
switch (type) { if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
case AudioMsgId::Type::Voice: alSourcef(m->source, AL_GAIN, internal::audioSuppressGain()); break; return;
case AudioMsgId::Type::Song: alSourcef(m->source, AL_GAIN, internal::audioSuppressSongGain() * Global::SongVolume()); break;
case AudioMsgId::Type::Video: alSourcef(m->source, AL_GAIN, internal::audioSuppressSongGain() * Global::VideoVolume()); break;
} }
alSourcef(track->stream.source, AL_GAIN, ComputeVolume(type));
if (!internal::audioCheckError()) { if (!internal::audioCheckError()) {
setStoppedState(m, AudioPlayerStoppedAtError); setStoppedState(track, State::StoppedAtError);
emitError(type); emitError(type);
return; return;
} }
alSourcePlay(m->source); alSourcePlay(track->stream.source);
if (!internal::audioCheckError()) { if (!internal::audioCheckError()) {
setStoppedState(m, AudioPlayerStoppedAtError); setStoppedState(track, State::StoppedAtError);
emitError(type); emitError(type);
return; return;
} }
@ -317,7 +279,7 @@ void Loaders::loadData(AudioMsgId audio, qint64 position) {
emit needToCheck(); emit needToCheck();
} }
} else { } else {
setStoppedState(m, AudioPlayerStoppedAtError); setStoppedState(track, State::StoppedAtError);
emitError(type); emitError(type);
} }
} }
@ -328,8 +290,8 @@ AudioPlayerLoader *Loaders::setupLoader(const AudioMsgId &audio, SetupError &err
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
if (!mixer()) return nullptr; if (!mixer()) return nullptr;
auto data = mixer()->dataForType(audio.type()); auto track = mixer()->trackForType(audio.type());
if (!data || data->audio != audio || !data->loading) { if (!track || track->state.id != audio || !track->loading) {
emit error(audio); emit error(audio);
LOG(("Audio Error: trying to load part of audio, that is not current at the moment")); LOG(("Audio Error: trying to load part of audio, that is not current at the moment"));
err = SetupErrorNotPlaying; err = SetupErrorNotPlaying;
@ -344,7 +306,7 @@ AudioPlayerLoader *Loaders::setupLoader(const AudioMsgId &audio, SetupError &err
case AudioMsgId::Type::Video: l = _videoLoader.get(); isGoodId = (_video == audio); break; case AudioMsgId::Type::Video: l = _videoLoader.get(); isGoodId = (_video == audio); break;
} }
if (l && (!isGoodId || !l->check(data->file, data->data))) { if (l && (!isGoodId || !l->check(track->file, track->data))) {
clear(audio.type()); clear(audio.type());
l = nullptr; l = nullptr;
} }
@ -358,61 +320,59 @@ AudioPlayerLoader *Loaders::setupLoader(const AudioMsgId &audio, SetupError &err
} }
if (audio.type() == AudioMsgId::Type::Video) { if (audio.type() == AudioMsgId::Type::Video) {
if (!data->videoData) { if (!track->videoData) {
data->playbackState.state = AudioPlayerStoppedAtError; track->state.state = State::StoppedAtError;
emit error(audio); emit error(audio);
LOG(("Audio Error: video sound data not ready")); LOG(("Audio Error: video sound data not ready"));
return nullptr; return nullptr;
} }
_videoLoader = std_::make_unique<ChildFFMpegLoader>(data->videoPlayId, std_::move(data->videoData)); _videoLoader = std_::make_unique<ChildFFMpegLoader>(track->videoPlayId, std_::move(track->videoData));
l = _videoLoader.get(); l = _videoLoader.get();
} else { } else {
*loader = std_::make_unique<FFMpegLoader>(data->file, data->data); *loader = std_::make_unique<FFMpegLoader>(track->file, track->data);
l = loader->get(); l = loader->get();
} }
if (!l->open(position)) { if (!l->open(position)) {
data->playbackState.state = AudioPlayerStoppedAtStart; track->state.state = State::StoppedAtStart;
return nullptr; return nullptr;
} }
int64 duration = l->duration(); int64 duration = l->duration();
if (duration <= 0) { if (duration <= 0) {
data->playbackState.state = AudioPlayerStoppedAtStart; track->state.state = State::StoppedAtStart;
return nullptr; return nullptr;
} }
data->playbackState.duration = duration; track->state.duration = duration;
data->playbackState.frequency = l->frequency(); track->state.frequency = l->frequency();
if (!data->playbackState.frequency) data->playbackState.frequency = AudioVoiceMsgFrequency; if (!track->state.frequency) track->state.frequency = kDefaultFrequency;
err = SetupNoErrorStarted; err = SetupNoErrorStarted;
} else { } else if (track->loaded) {
if (!data->skipEnd) { err = SetupErrorLoadedFull;
err = SetupErrorLoadedFull; LOG(("Audio Error: trying to load part of audio, that is already loaded to the end"));
LOG(("Audio Error: trying to load part of audio, that is already loaded to the end")); return nullptr;
return nullptr;
}
} }
return l; return l;
} }
Mixer::AudioMsg *Loaders::checkLoader(AudioMsgId::Type type) { Mixer::Track *Loaders::checkLoader(AudioMsgId::Type type) {
if (!mixer()) return nullptr; if (!mixer()) return nullptr;
auto data = mixer()->dataForType(type); auto track = mixer()->trackForType(type);
bool isGoodId = false; auto isGoodId = false;
AudioPlayerLoader *l = nullptr; AudioPlayerLoader *l = nullptr;
switch (type) { switch (type) {
case AudioMsgId::Type::Voice: l = _audioLoader.get(); isGoodId = (data->audio == _audio); break; case AudioMsgId::Type::Voice: l = _audioLoader.get(); isGoodId = (track->state.id == _audio); break;
case AudioMsgId::Type::Song: l = _songLoader.get(); isGoodId = (data->audio == _song); break; case AudioMsgId::Type::Song: l = _songLoader.get(); isGoodId = (track->state.id == _song); break;
case AudioMsgId::Type::Video: l = _videoLoader.get(); isGoodId = (data->audio == _video); break; case AudioMsgId::Type::Video: l = _videoLoader.get(); isGoodId = (track->state.id == _video); break;
} }
if (!l || !data) return nullptr; if (!l || !track) return nullptr;
if (!isGoodId || !data->loading || !l->check(data->file, data->data)) { if (!isGoodId || !track->loading || !l->check(track->file, track->data)) {
LOG(("Audio Error: playing changed while loading")); LOG(("Audio Error: playing changed while loading"));
return nullptr; return nullptr;
} }
return data; return track;
} }
void Loaders::onCancel(const AudioMsgId &audio) { void Loaders::onCancel(const AudioMsgId &audio) {
@ -425,10 +385,10 @@ void Loaders::onCancel(const AudioMsgId &audio) {
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
if (!mixer()) return; if (!mixer()) return;
for (int i = 0; i < AudioSimultaneousLimit; ++i) { for (auto i = 0; i != kTogetherLimit; ++i) {
auto data = mixer()->dataForType(audio.type(), i); auto track = mixer()->trackForType(audio.type(), i);
if (data->audio == audio) { if (track->state.id == audio) {
data->loading = false; track->loading = false;
} }
} }
} }

View File

@ -68,7 +68,7 @@ private:
void emitError(AudioMsgId::Type type); void emitError(AudioMsgId::Type type);
AudioMsgId clear(AudioMsgId::Type type); AudioMsgId clear(AudioMsgId::Type type);
void setStoppedState(Mixer::AudioMsg *m, AudioPlayerState state = AudioPlayerStopped); void setStoppedState(Mixer::Track *m, State state = State::Stopped);
enum SetupError { enum SetupError {
SetupErrorAtStart = 0, SetupErrorAtStart = 0,
@ -78,7 +78,7 @@ private:
}; };
void loadData(AudioMsgId audio, qint64 position); void loadData(AudioMsgId audio, qint64 position);
AudioPlayerLoader *setupLoader(const AudioMsgId &audio, SetupError &err, qint64 &position); AudioPlayerLoader *setupLoader(const AudioMsgId &audio, SetupError &err, qint64 &position);
Mixer::AudioMsg *checkLoader(AudioMsgId::Type type); Mixer::Track *checkLoader(AudioMsgId::Type type);
}; };

View File

@ -83,7 +83,7 @@ bool ChildFFMpegLoader::open(qint64 &position) {
int64_t src_ch_layout = layout, dst_ch_layout = AudioToChannelLayout; int64_t src_ch_layout = layout, dst_ch_layout = AudioToChannelLayout;
_srcRate = _parentData->frequency; _srcRate = _parentData->frequency;
AVSampleFormat src_sample_fmt = _inputFormat, dst_sample_fmt = AudioToFormat; AVSampleFormat src_sample_fmt = _inputFormat, dst_sample_fmt = AudioToFormat;
_dstRate = (_parentData->frequency != 44100 && _parentData->frequency != 48000) ? AudioVoiceMsgFrequency : _parentData->frequency; _dstRate = (_parentData->frequency != 44100 && _parentData->frequency != 48000) ? Media::Player::kDefaultFrequency : _parentData->frequency;
av_opt_set_int(_swrContext, "in_channel_layout", src_ch_layout, 0); av_opt_set_int(_swrContext, "in_channel_layout", src_ch_layout, 0);
av_opt_set_int(_swrContext, "in_sample_rate", _srcRate, 0); av_opt_set_int(_swrContext, "in_sample_rate", _srcRate, 0);

View File

@ -21,6 +21,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#pragma once #pragma once
#include "media/media_audio_loader.h" #include "media/media_audio_loader.h"
#include "media/media_audio.h"
extern "C" { extern "C" {
#include <libavcodec/avcodec.h> #include <libavcodec/avcodec.h>
@ -33,7 +34,7 @@ extern "C" {
struct VideoSoundData { struct VideoSoundData {
AVCodecContext *context = nullptr; AVCodecContext *context = nullptr;
int32 frequency = AudioVoiceMsgFrequency; int32 frequency = Media::Player::kDefaultFrequency;
TimeMs length = 0; TimeMs length = 0;
~VideoSoundData(); ~VideoSoundData();
}; };
@ -120,8 +121,8 @@ private:
int32 _sampleSize = 2 * sizeof(uint16); int32 _sampleSize = 2 * sizeof(uint16);
int32 _format = AL_FORMAT_STEREO16; int32 _format = AL_FORMAT_STEREO16;
int32 _srcRate = AudioVoiceMsgFrequency; int32 _srcRate = Media::Player::kDefaultFrequency;
int32 _dstRate = AudioVoiceMsgFrequency; int32 _dstRate = Media::Player::kDefaultFrequency;
int32 _maxResampleSamples = 1024; int32 _maxResampleSamples = 1024;
uint8_t **_dstSamplesData = nullptr; uint8_t **_dstSamplesData = nullptr;

View File

@ -371,7 +371,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0); _audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0);
if (_mode == Mode::OnlyGifv) { if (_mode == Mode::OnlyGifv) {
if (_audioStreamId >= 0) { // should be no audio stream if (_audioStreamId >= 0) { // should be no audio stream
_audioStreamId = -1; // do not attempt to access mixer() _audioStreamId = -1;
return false; return false;
} }
if (dataSize() > AnimationInMemory) { if (dataSize() > AnimationInMemory) {
@ -380,7 +380,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
if (_codecContext->codec_id != AV_CODEC_ID_H264) { if (_codecContext->codec_id != AV_CODEC_ID_H264) {
return false; return false;
} }
} else if (_mode == Mode::Silent || !Player::mixer() || !_playId) { } else if (_mode == Mode::Silent || !_playId) {
_audioStreamId = -1; _audioStreamId = -1;
} }
@ -436,7 +436,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
} }
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
int64 position = (positionMs * soundData->frequency) / 1000LL; auto position = (positionMs * soundData->frequency) / 1000LL;
Player::mixer()->initFromVideo(_playId, std_::move(soundData), position); Player::mixer()->initFromVideo(_playId, std_::move(soundData), position);
} }

View File

@ -35,13 +35,13 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
namespace Media { namespace Media {
namespace Player { namespace Player {
using State = PlayButtonLayout::State; using ButtonState = PlayButtonLayout::State;
class CoverWidget::PlayButton : public Ui::AbstractButton { class CoverWidget::PlayButton : public Ui::AbstractButton {
public: public:
PlayButton(QWidget *parent); PlayButton(QWidget *parent);
void setState(PlayButtonLayout::State state) { void setState(ButtonState state) {
_layout.setState(state); _layout.setState(state);
} }
void finishTransform() { void finishTransform() {
@ -114,17 +114,15 @@ CoverWidget::CoverWidget(QWidget *parent) : TWidget(parent)
subscribe(instance()->playlistChangedNotifier(), [this] { subscribe(instance()->playlistChangedNotifier(), [this] {
handlePlaylistUpdate(); handlePlaylistUpdate();
}); });
subscribe(instance()->updatedNotifier(), [this](const UpdatedEvent &e) { subscribe(instance()->updatedNotifier(), [this](const TrackState &state) {
handleSongUpdate(e); handleSongUpdate(state);
}); });
subscribe(instance()->songChangedNotifier(), [this] { subscribe(instance()->songChangedNotifier(), [this] {
handleSongChange(); handleSongChange();
}); });
handleSongChange(); handleSongChange();
AudioMsgId playing; handleSongUpdate(mixer()->currentState(AudioMsgId::Type::Song));
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song);
handleSongUpdate(UpdatedEvent(&playing, &playbackState));
_playPause->finishTransform(); _playPause->finishTransform();
} }
@ -153,10 +151,10 @@ void CoverWidget::handleSeekFinished(float64 progress) {
auto positionMs = snap(static_cast<TimeMs>(progress * _lastDurationMs), 0LL, _lastDurationMs); auto positionMs = snap(static_cast<TimeMs>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1; _seekPositionMs = -1;
AudioMsgId playing; auto type = AudioMsgId::Type::Song;
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); auto state = Media::Player::mixer()->currentState(type);
if (playing && playbackState.duration) { if (state.id && state.duration) {
Media::Player::mixer()->seek(qRound(progress * playbackState.duration)); Media::Player::mixer()->seek(type, qRound(progress * state.duration));
} }
instance()->stopSeeking(); instance()->stopSeeking();
@ -228,52 +226,50 @@ void CoverWidget::updateRepeatTrackIcon() {
_repeatTrack->setIconOverride(instance()->repeatEnabled() ? nullptr : &st::mediaPlayerRepeatInactiveIcon); _repeatTrack->setIconOverride(instance()->repeatEnabled() ? nullptr : &st::mediaPlayerRepeatInactiveIcon);
} }
void CoverWidget::handleSongUpdate(const UpdatedEvent &e) { void CoverWidget::handleSongUpdate(const TrackState &state) {
auto &audioId = *e.audioId; if (!state.id || !state.id.audio()->song()) {
auto &playbackState = *e.playbackState;
if (!audioId || !audioId.audio()->song()) {
return; return;
} }
if (audioId.audio()->loading()) { if (state.id.audio()->loading()) {
_playback->updateLoadingState(audioId.audio()->progress()); _playback->updateLoadingState(state.id.audio()->progress());
} else { } else {
_playback->updateState(*e.playbackState); _playback->updateState(state);
} }
auto stopped = ((playbackState.state & AudioPlayerStoppedMask) || playbackState.state == AudioPlayerFinishing); auto stopped = (IsStopped(state.state) || state.state == State::Finishing);
auto showPause = !stopped && (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
if (instance()->isSeeking()) { if (instance()->isSeeking()) {
showPause = true; showPause = true;
} }
auto state = [audio = audioId.audio(), showPause] { auto buttonState = [audio = state.id.audio(), showPause] {
if (audio->loading()) { if (audio->loading()) {
return State::Cancel; return ButtonState::Cancel;
} else if (showPause) { } else if (showPause) {
return State::Pause; return ButtonState::Pause;
} }
return State::Play; return ButtonState::Play;
}; };
_playPause->setState(state()); _playPause->setState(buttonState());
updateTimeText(audioId, playbackState); updateTimeText(state);
} }
void CoverWidget::updateTimeText(const AudioMsgId &audioId, const AudioPlaybackState &playbackState) { void CoverWidget::updateTimeText(const TrackState &state) {
QString time; QString time;
qint64 position = 0, duration = 0, display = 0; qint64 position = 0, duration = 0, display = 0;
auto frequency = (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); auto frequency = state.frequency;
if (!(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (!IsStopped(state.state) && state.state != State::Finishing) {
display = position = playbackState.position; display = position = state.position;
duration = playbackState.duration; duration = state.duration;
} else { } else {
display = playbackState.duration ? playbackState.duration : (audioId.audio()->song()->duration * frequency); display = state.duration ? state.duration : (state.id.audio()->song()->duration * frequency);
} }
_lastDurationMs = (playbackState.duration * 1000LL) / frequency; _lastDurationMs = (state.duration * 1000LL) / frequency;
if (audioId.audio()->loading()) { if (state.id.audio()->loading()) {
_time = QString::number(qRound(audioId.audio()->progress() * 100)) + '%'; _time = QString::number(qRound(state.id.audio()->progress() * 100)) + '%';
_playback->setDisabled(true); _playback->setDisabled(true);
} else { } else {
display = display / frequency; display = display / frequency;

View File

@ -21,7 +21,6 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#pragma once #pragma once
class AudioMsgId; class AudioMsgId;
struct AudioPlaybackState;
namespace Ui { namespace Ui {
class FlatLabel; class FlatLabel;
@ -37,7 +36,7 @@ class Playback;
namespace Player { namespace Player {
class VolumeController; class VolumeController;
struct UpdatedEvent; struct TrackState;
class CoverWidget : public TWidget, private base::Subscriber { class CoverWidget : public TWidget, private base::Subscriber {
public: public:
@ -66,11 +65,11 @@ private:
void updateVolumeToggleIcon(); void updateVolumeToggleIcon();
void handleSongUpdate(const UpdatedEvent &e); void handleSongUpdate(const TrackState &state);
void handleSongChange(); void handleSongChange();
void handlePlaylistUpdate(); void handlePlaylistUpdate();
void updateTimeText(const AudioMsgId &audioId, const AudioPlaybackState &playbackState); void updateTimeText(const TrackState &state);
void updateTimeLabel(); void updateTimeLabel();
TimeMs _seekPositionMs = -1; TimeMs _seekPositionMs = -1;

View File

@ -150,15 +150,12 @@ Instance *instance() {
} }
void Instance::play() { void Instance::play() {
AudioMsgId playing; auto state = mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id) {
if (playing) { if (IsStopped(state.state)) {
if (playbackState.state & AudioPlayerStoppedMask) { mixer()->play(state.id);
mixer()->play(playing); } else if (IsPaused(state.state) || state.state == State::Pausing) {
} else { mixer()->pauseresume(AudioMsgId::Type::Song);
if (playbackState.state == AudioPlayerPausing || playbackState.state == AudioPlayerPaused || playbackState.state == AudioPlayerPausedAtEnd) {
mixer()->pauseresume(AudioMsgId::Type::Song);
}
} }
} else if (_current) { } else if (_current) {
mixer()->play(_current); mixer()->play(_current);
@ -177,11 +174,10 @@ void Instance::play(const AudioMsgId &audioId) {
} }
void Instance::pause() { void Instance::pause() {
AudioMsgId playing; auto state = mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id) {
if (playing) { if (!IsStopped(state.state)) {
if (!(playbackState.state & AudioPlayerStoppedMask)) { if (state.state == State::Starting || state.state == State::Resuming || state.state == State::Playing || state.state == State::Finishing) {
if (playbackState.state == AudioPlayerStarting || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerFinishing) {
mixer()->pauseresume(AudioMsgId::Type::Song); mixer()->pauseresume(AudioMsgId::Type::Song);
} }
} }
@ -193,11 +189,10 @@ void Instance::stop() {
} }
void Instance::playPause() { void Instance::playPause() {
AudioMsgId playing; auto state = mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id) {
if (playing) { if (IsStopped(state.state)) {
if (playbackState.state & AudioPlayerStoppedMask) { mixer()->play(state.id);
mixer()->play(playing);
} else { } else {
mixer()->pauseresume(AudioMsgId::Type::Song); mixer()->pauseresume(AudioMsgId::Type::Song);
} }
@ -219,11 +214,10 @@ void Instance::playPauseCancelClicked() {
return; return;
} }
AudioMsgId playing; auto state = mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); auto stopped = (IsStopped(state.state) || state.state == State::Finishing);
auto stopped = ((playbackState.state & AudioPlayerStoppedMask) || playbackState.state == AudioPlayerFinishing); auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
auto showPause = !stopped && (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); auto audio = state.id.audio();
auto audio = playing.audio();
if (audio && audio->loading()) { if (audio && audio->loading()) {
audio->cancel(); audio->cancel();
} else if (showPause) { } else if (showPause) {
@ -252,23 +246,22 @@ void Instance::documentLoadProgress(DocumentData *document) {
template <typename CheckCallback> template <typename CheckCallback>
void Instance::emitUpdate(CheckCallback check) { void Instance::emitUpdate(CheckCallback check) {
AudioMsgId playing; auto state = mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); if (!state.id || !check(state.id)) {
if (!playing || !check(playing)) {
return; return;
} }
setCurrent(playing); setCurrent(state.id);
_updatedNotifier.notify(UpdatedEvent(&playing, &playbackState), true); _updatedNotifier.notify(state, true);
if (_isPlaying && playbackState.state == AudioPlayerStoppedAtEnd) { if (_isPlaying && state.state == State::StoppedAtEnd) {
if (_repeatEnabled) { if (_repeatEnabled) {
mixer()->play(_current); mixer()->play(_current);
} else { } else {
next(); next();
} }
} }
auto isPlaying = !(playbackState.state & AudioPlayerStoppedMask); auto isPlaying = !IsStopped(state.state);
if (_isPlaying != isPlaying) { if (_isPlaying != isPlaying) {
_isPlaying = isPlaying; _isPlaying = isPlaying;
if (_isPlaying) { if (_isPlaying) {

View File

@ -23,7 +23,6 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
namespace Notify { namespace Notify {
struct PeerUpdate; struct PeerUpdate;
} // namespace Notify } // namespace Notify
struct AudioPlaybackState;
class AudioMsgId; class AudioMsgId;
namespace Media { namespace Media {
@ -35,12 +34,7 @@ void finish();
class Instance; class Instance;
Instance *instance(); Instance *instance();
struct UpdatedEvent { struct TrackState;
UpdatedEvent(const AudioMsgId *audioId, const AudioPlaybackState *playbackState) : audioId(audioId), playbackState(playbackState) {
}
const AudioMsgId *audioId;
const AudioPlaybackState *playbackState;
};
class Instance : private base::Subscriber { class Instance : private base::Subscriber {
public: public:
@ -85,7 +79,7 @@ public:
base::Observable<bool> &playerWidgetOver() { base::Observable<bool> &playerWidgetOver() {
return _playerWidgetOver; return _playerWidgetOver;
} }
base::Observable<UpdatedEvent> &updatedNotifier() { base::Observable<TrackState> &updatedNotifier() {
return _updatedNotifier; return _updatedNotifier;
} }
base::Observable<void> &playlistChangedNotifier() { base::Observable<void> &playlistChangedNotifier() {
@ -131,7 +125,7 @@ private:
base::Observable<bool> _usePanelPlayer; base::Observable<bool> _usePanelPlayer;
base::Observable<bool> _titleButtonOver; base::Observable<bool> _titleButtonOver;
base::Observable<bool> _playerWidgetOver; base::Observable<bool> _playerWidgetOver;
base::Observable<UpdatedEvent> _updatedNotifier; base::Observable<TrackState> _updatedNotifier;
base::Observable<void> _playlistChangedNotifier; base::Observable<void> _playlistChangedNotifier;
base::Observable<void> _songChangedNotifier; base::Observable<void> _songChangedNotifier;
base::Observable<void> _repeatChangedNotifier; base::Observable<void> _repeatChangedNotifier;

View File

@ -37,7 +37,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
namespace Media { namespace Media {
namespace Player { namespace Player {
using State = PlayButtonLayout::State; using ButtonState = PlayButtonLayout::State;
class Widget::PlayButton : public Ui::RippleButton { class Widget::PlayButton : public Ui::RippleButton {
public: public:
@ -128,17 +128,15 @@ Widget::Widget(QWidget *parent) : TWidget(parent)
subscribe(instance()->playlistChangedNotifier(), [this] { subscribe(instance()->playlistChangedNotifier(), [this] {
handlePlaylistUpdate(); handlePlaylistUpdate();
}); });
subscribe(instance()->updatedNotifier(), [this](const UpdatedEvent &e) { subscribe(instance()->updatedNotifier(), [this](const TrackState &state) {
handleSongUpdate(e); handleSongUpdate(state);
}); });
subscribe(instance()->songChangedNotifier(), [this] { subscribe(instance()->songChangedNotifier(), [this] {
handleSongChange(); handleSongChange();
}); });
handleSongChange(); handleSongChange();
AudioMsgId playing; handleSongUpdate(mixer()->currentState(AudioMsgId::Type::Song));
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song);
handleSongUpdate(UpdatedEvent(&playing, &playbackState));
_playPause->finishTransform(); _playPause->finishTransform();
} }
@ -207,10 +205,10 @@ void Widget::handleSeekFinished(float64 progress) {
auto positionMs = snap(static_cast<TimeMs>(progress * _lastDurationMs), 0LL, _lastDurationMs); auto positionMs = snap(static_cast<TimeMs>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1; _seekPositionMs = -1;
AudioMsgId playing; auto type = AudioMsgId::Type::Song;
auto playbackState = mixer()->currentState(&playing, AudioMsgId::Type::Song); auto state = mixer()->currentState(type);
if (playing && playbackState.duration) { if (state.id && state.duration) {
mixer()->seek(qRound(progress * playbackState.duration)); mixer()->seek(type, qRound(progress * state.duration));
} }
instance()->stopSeeking(); instance()->stopSeeking();
@ -301,52 +299,50 @@ void Widget::updateRepeatTrackIcon() {
_repeatTrack->setRippleColorOverride(repeating ? nullptr : &st::mediaPlayerRepeatDisabledRippleBg); _repeatTrack->setRippleColorOverride(repeating ? nullptr : &st::mediaPlayerRepeatDisabledRippleBg);
} }
void Widget::handleSongUpdate(const UpdatedEvent &e) { void Widget::handleSongUpdate(const TrackState &state) {
auto &audioId = *e.audioId; if (!state.id || !state.id.audio()->song()) {
auto &playbackState = *e.playbackState;
if (!audioId || !audioId.audio()->song()) {
return; return;
} }
if (audioId.audio()->loading()) { if (state.id.audio()->loading()) {
_playback->updateLoadingState(audioId.audio()->progress()); _playback->updateLoadingState(state.id.audio()->progress());
} else { } else {
_playback->updateState(*e.playbackState); _playback->updateState(state);
} }
auto stopped = ((playbackState.state & AudioPlayerStoppedMask) || playbackState.state == AudioPlayerFinishing); auto stopped = (IsStopped(state.state) || state.state == State::Finishing);
auto showPause = !stopped && (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
if (instance()->isSeeking()) { if (instance()->isSeeking()) {
showPause = true; showPause = true;
} }
auto state = [audio = audioId.audio(), showPause] { auto buttonState = [audio = state.id.audio(), showPause] {
if (audio->loading()) { if (audio->loading()) {
return State::Cancel; return ButtonState::Cancel;
} else if (showPause) { } else if (showPause) {
return State::Pause; return ButtonState::Pause;
} }
return State::Play; return ButtonState::Play;
}; };
_playPause->setState(state()); _playPause->setState(buttonState());
updateTimeText(audioId, playbackState); updateTimeText(state);
} }
void Widget::updateTimeText(const AudioMsgId &audioId, const AudioPlaybackState &playbackState) { void Widget::updateTimeText(const TrackState &state) {
QString time; QString time;
qint64 position = 0, duration = 0, display = 0; qint64 position = 0, duration = 0, display = 0;
auto frequency = (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); auto frequency = state.frequency;
if (!(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (!IsStopped(state.state) && state.state != State::Finishing) {
display = position = playbackState.position; display = position = state.position;
duration = playbackState.duration; duration = state.duration;
} else { } else {
display = playbackState.duration ? playbackState.duration : (audioId.audio()->song()->duration * frequency); display = state.duration ? state.duration : (state.id.audio()->song()->duration * frequency);
} }
_lastDurationMs = (playbackState.duration * 1000LL) / frequency; _lastDurationMs = (state.duration * 1000LL) / frequency;
if (audioId.audio()->loading()) { if (state.id.audio()->loading()) {
_time = QString::number(qRound(audioId.audio()->progress() * 100)) + '%'; _time = QString::number(qRound(state.id.audio()->progress() * 100)) + '%';
_playback->setDisabled(true); _playback->setDisabled(true);
} else { } else {
display = display / frequency; display = display / frequency;

View File

@ -21,7 +21,6 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#pragma once #pragma once
class AudioMsgId; class AudioMsgId;
struct AudioPlaybackState;
namespace Ui { namespace Ui {
class FlatLabel; class FlatLabel;
@ -39,7 +38,7 @@ namespace Player {
class PlayButton; class PlayButton;
class VolumeWidget; class VolumeWidget;
struct UpdatedEvent; struct TrackState;
class Widget : public TWidget, private base::Subscriber { class Widget : public TWidget, private base::Subscriber {
public: public:
@ -81,11 +80,11 @@ private:
void updateVolumeToggleIcon(); void updateVolumeToggleIcon();
void handleSongUpdate(const UpdatedEvent &e); void handleSongUpdate(const TrackState &state);
void handleSongChange(); void handleSongChange();
void handlePlaylistUpdate(); void handlePlaylistUpdate();
void updateTimeText(const AudioMsgId &audioId, const AudioPlaybackState &playbackState); void updateTimeText(const TrackState &state);
void updateTimeLabel(); void updateTimeLabel();
TimeMs _seekPositionMs = -1; TimeMs _seekPositionMs = -1;

View File

@ -105,14 +105,14 @@ void Controller::fadeUpdated(float64 opacity) {
_playback->setFadeOpacity(opacity); _playback->setFadeOpacity(opacity);
} }
void Controller::updatePlayback(const AudioPlaybackState &playbackState) { void Controller::updatePlayback(const Player::TrackState &state) {
updatePlayPauseResumeState(playbackState); updatePlayPauseResumeState(state);
_playback->updateState(playbackState); _playback->updateState(state);
updateTimeTexts(playbackState); updateTimeTexts(state);
} }
void Controller::updatePlayPauseResumeState(const AudioPlaybackState &playbackState) { void Controller::updatePlayPauseResumeState(const Player::TrackState &state) {
bool showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || _seekPositionMs >= 0); auto showPause = (state.state == Player::State::Playing || state.state == Player::State::Resuming || _seekPositionMs >= 0);
if (showPause != _showPause) { if (showPause != _showPause) {
disconnect(_playPauseResume, SIGNAL(clicked()), this, _showPause ? SIGNAL(pausePressed()) : SIGNAL(playPressed())); disconnect(_playPauseResume, SIGNAL(clicked()), this, _showPause ? SIGNAL(pausePressed()) : SIGNAL(playPressed()));
_showPause = showPause; _showPause = showPause;
@ -122,21 +122,21 @@ void Controller::updatePlayPauseResumeState(const AudioPlaybackState &playbackSt
} }
} }
void Controller::updateTimeTexts(const AudioPlaybackState &playbackState) { void Controller::updateTimeTexts(const Player::TrackState &state) {
qint64 position = 0, duration = playbackState.duration; qint64 position = 0, duration = state.duration;
if (!(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (!Player::IsStopped(state.state) && state.state != Player::State::Finishing) {
position = playbackState.position; position = state.position;
} else if (playbackState.state == AudioPlayerStoppedAtEnd) { } else if (state.state == Player::State::StoppedAtEnd) {
position = playbackState.duration; position = state.duration;
} else { } else {
position = 0; position = 0;
} }
auto playFrequency = (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); auto playFrequency = state.frequency;
auto playAlready = position / playFrequency; auto playAlready = position / playFrequency;
auto playLeft = (playbackState.duration / playFrequency) - playAlready; auto playLeft = (state.duration / playFrequency) - playAlready;
_lastDurationMs = (playbackState.duration * 1000LL) / playFrequency; _lastDurationMs = (state.duration * 1000LL) / playFrequency;
_timeAlready = formatDurationText(playAlready); _timeAlready = formatDurationText(playAlready);
auto minus = QChar(8722); auto minus = QChar(8722);

View File

@ -26,9 +26,11 @@ class FadeAnimation;
class IconButton; class IconButton;
} // namespace Ui } // namespace Ui
struct AudioPlaybackState;
namespace Media { namespace Media {
namespace Player {
struct TrackState;
} // namespace Player
namespace Clip { namespace Clip {
class Playback; class Playback;
@ -43,7 +45,7 @@ public:
void showAnimated(); void showAnimated();
void hideAnimated(); void hideAnimated();
void updatePlayback(const AudioPlaybackState &playbackState); void updatePlayback(const Player::TrackState &state);
void setInFullScreen(bool inFullScreen); void setInFullScreen(bool inFullScreen);
void grabStart() override; void grabStart() override;
@ -74,8 +76,8 @@ private:
void fadeFinished(); void fadeFinished();
void fadeUpdated(float64 opacity); void fadeUpdated(float64 opacity);
void updatePlayPauseResumeState(const AudioPlaybackState &playbackState); void updatePlayPauseResumeState(const Player::TrackState &state);
void updateTimeTexts(const AudioPlaybackState &playbackState); void updateTimeTexts(const Player::TrackState &state);
void refreshTimeTexts(); void refreshTimeTexts();
bool _showPause = false; bool _showPause = false;

View File

@ -30,17 +30,17 @@ namespace Clip {
Playback::Playback(Ui::ContinuousSlider *slider) : _slider(slider) { Playback::Playback(Ui::ContinuousSlider *slider) : _slider(slider) {
} }
void Playback::updateState(const AudioPlaybackState &playbackState) { void Playback::updateState(const Player::TrackState &state) {
qint64 position = 0, duration = playbackState.duration; qint64 position = 0, duration = state.duration;
auto wasDisabled = _slider->isDisabled(); auto wasDisabled = _slider->isDisabled();
if (wasDisabled) setDisabled(false); if (wasDisabled) setDisabled(false);
_playing = !(playbackState.state & AudioPlayerStoppedMask); _playing = !Player::IsStopped(state.state);
if (_playing || playbackState.state == AudioPlayerStopped) { if (_playing || state.state == Player::State::Stopped) {
position = playbackState.position; position = state.position;
} else if (playbackState.state == AudioPlayerStoppedAtEnd) { } else if (state.state == Player::State::StoppedAtEnd) {
position = playbackState.duration; position = state.duration;
} else { } else {
position = 0; position = 0;
} }

View File

@ -22,16 +22,18 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "ui/widgets/continuous_sliders.h" #include "ui/widgets/continuous_sliders.h"
struct AudioPlaybackState;
namespace Media { namespace Media {
namespace Player {
struct TrackState;
} // namespace Player
namespace Clip { namespace Clip {
class Playback { class Playback {
public: public:
Playback(Ui::ContinuousSlider *slider); Playback(Ui::ContinuousSlider *slider);
void updateState(const AudioPlaybackState &playbackState); void updateState(const Player::TrackState &state);
void updateLoadingState(float64 progress); void updateLoadingState(float64 progress);
void setFadeOpacity(float64 opacity) { void setFadeOpacity(float64 opacity) {

View File

@ -1536,8 +1536,8 @@ void MediaView::restartVideoAtSeekPosition(TimeMs positionMs) {
_videoPaused = _videoIsSilent = _videoStopped = false; _videoPaused = _videoIsSilent = _videoStopped = false;
_videoPositionMs = positionMs; _videoPositionMs = positionMs;
AudioPlaybackState state; Media::Player::TrackState state;
state.state = AudioPlayerPlaying; state.state = Media::Player::State::Playing;
state.position = _videoPositionMs; state.position = _videoPositionMs;
state.duration = _videoDurationMs; state.duration = _videoDurationMs;
state.frequency = _videoFrequencyMs; state.frequency = _videoFrequencyMs;
@ -1587,9 +1587,9 @@ void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
} }
} }
void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state) { void MediaView::updateVideoPlaybackState(const Media::Player::TrackState &state) {
if (state.frequency) { if (state.frequency) {
if (state.state & AudioPlayerStoppedMask) { if (Media::Player::IsStopped(state.state)) {
_videoStopped = true; _videoStopped = true;
} }
_clipController->updatePlayback(state); _clipController->updatePlayback(state);
@ -1600,13 +1600,13 @@ void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state) {
} }
void MediaView::updateSilentVideoPlaybackState() { void MediaView::updateSilentVideoPlaybackState() {
AudioPlaybackState state; Media::Player::TrackState state;
if (_videoPaused) { if (_videoPaused) {
state.state = AudioPlayerPaused; state.state = Media::Player::State::Paused;
} else if (_videoPositionMs == _videoDurationMs) { } else if (_videoPositionMs == _videoDurationMs) {
state.state = AudioPlayerStoppedAtEnd; state.state = Media::Player::State::StoppedAtEnd;
} else { } else {
state.state = AudioPlayerPlaying; state.state = Media::Player::State::Playing;
} }
state.position = _videoPositionMs; state.position = _videoPositionMs;
state.duration = _videoDurationMs; state.duration = _videoDurationMs;

View File

@ -24,6 +24,9 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "ui/effects/radial_animation.h" #include "ui/effects/radial_animation.h"
namespace Media { namespace Media {
namespace Player {
struct TrackState;
} // namespace Player
namespace Clip { namespace Clip {
class Controller; class Controller;
} // namespace Clip } // namespace Clip
@ -45,8 +48,6 @@ namespace Notify {
struct PeerUpdate; struct PeerUpdate;
} // namespace Notify } // namespace Notify
struct AudioPlaybackState;
class MediaView : public TWidget, private base::Subscriber, public RPCSender, public ClickHandlerHost { class MediaView : public TWidget, private base::Subscriber, public RPCSender, public ClickHandlerHost {
Q_OBJECT Q_OBJECT
@ -170,7 +171,7 @@ private:
void updateCursor(); void updateCursor();
void setZoomLevel(int newZoom); void setZoomLevel(int newZoom);
void updateVideoPlaybackState(const AudioPlaybackState &state); void updateVideoPlaybackState(const Media::Player::TrackState &state);
void updateSilentVideoPlaybackState(); void updateSilentVideoPlaybackState();
void restartVideoAtSeekPosition(TimeMs positionMs); void restartVideoAtSeekPosition(TimeMs positionMs);

View File

@ -649,12 +649,12 @@ bool Voice::updateStatusText() {
statusSize = FileStatusSizeFailed; statusSize = FileStatusSizeFailed;
} else if (_data->loaded()) { } else if (_data->loaded()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
AudioMsgId playing; using State = Media::Player::State;
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Voice); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (playing == AudioMsgId(_data, _parent->fullId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (state.id == AudioMsgId(_data, _parent->fullId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); statusSize = -1 - (state.position / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); realDuration = (state.duration / state.frequency);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
} }
} else { } else {
statusSize = FileStatusSizeReady; statusSize = FileStatusSizeReady;
@ -933,14 +933,14 @@ bool Document::updateStatusText() {
} else if (_data->loaded()) { } else if (_data->loaded()) {
if (_data->song()) { if (_data->song()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
AudioMsgId playing; using State = Media::Player::State;
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (playing == AudioMsgId(_data, _parent->fullId()) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) { if (state.id == AudioMsgId(_data, _parent->fullId()) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
statusSize = -1 - (playbackState.position / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency)); statusSize = -1 - (state.position / state.frequency);
realDuration = playbackState.duration / (playbackState.frequency ? playbackState.frequency : AudioVoiceMsgFrequency); realDuration = (state.duration / state.frequency);
showPause = (playbackState.state == AudioPlayerPlaying || playbackState.state == AudioPlayerResuming || playbackState.state == AudioPlayerStarting); showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
} }
if (!showPause && (playing == AudioMsgId(_data, _parent->fullId())) && Media::Player::instance()->isSeeking()) { if (!showPause && (state.id == AudioMsgId(_data, _parent->fullId())) && Media::Player::instance()->isSeeking()) {
showPause = true; showPause = true;
} }
} else { } else {

View File

@ -2092,10 +2092,9 @@ int32 OverviewWidget::lastScrollTop() const {
int32 OverviewWidget::countBestScroll() const { int32 OverviewWidget::countBestScroll() const {
if (type() == OverviewMusicFiles) { if (type() == OverviewMusicFiles) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id) {
if (playing) { int32 top = _inner->itemTop(state.id.contextId());
int32 top = _inner->itemTop(playing.contextId());
if (top >= 0) { if (top >= 0) {
return snap(top - int(_scroll->height() - (st::msgPadding.top() + st::mediaThumbSize + st::msgPadding.bottom())) / 2, 0, _scroll->scrollTopMax()); return snap(top - int(_scroll->height() - (st::msgPadding.top() + st::mediaThumbSize + st::msgPadding.bottom())) / 2, 0, _scroll->scrollTopMax());
} }

View File

@ -0,0 +1,37 @@
/*
This file is part of Telegram Desktop,
the official desktop version of Telegram messaging app, see https://telegram.org
Telegram Desktop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
In addition, as a special exception, the copyright holders give permission
to link the code of portions of this program with the OpenSSL library.
Full license: https://github.com/telegramdesktop/tdesktop/blob/master/LICENSE
Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/
#pragma once
#if defined Q_OS_MAC || defined Q_OS_LINUX
namespace Platform {
namespace Audio {
inline void Init() {
}
inline void DeInit() {
}
} // namespace Audio
} // namespace Platform
#elif defined Q_OS_WINRT || defined Q_OS_WIN // Q_OS_MAC || Q_OS_LINUX
#include "platform/win/audio_win.h"
#endif // Q_OS_MAC || Q_OS_LINUX || Q_OS_WINRT || Q_OS_WIN

View File

@ -0,0 +1,157 @@
/*
This file is part of Telegram Desktop,
the official desktop version of Telegram messaging app, see https://telegram.org
Telegram Desktop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
In addition, as a special exception, the copyright holders give permission
to link the code of portions of this program with the OpenSSL library.
Full license: https://github.com/telegramdesktop/tdesktop/blob/master/LICENSE
Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/
#include "stdafx.h"
#include "platform/win/audio_win.h"
#include "media/media_audio.h"
#include <mmdeviceapi.h>
#include <audioclient.h>
#include <wrl\client.h>
using namespace Microsoft::WRL;
namespace Platform {
namespace Audio {
namespace {
// Inspired by Chromium.
class DeviceListener : public IMMNotificationClient {
public:
DeviceListener() = default;
DeviceListener(const DeviceListener &other) = delete;
DeviceListener &operator=(const DeviceListener &other) = delete;
virtual ~DeviceListener() = default;
private:
// IMMNotificationClient implementation.
STDMETHOD_(ULONG, AddRef)() override {
return 1;
}
STDMETHOD_(ULONG, Release)() override {
return 1;
}
STDMETHOD(QueryInterface)(REFIID iid, void** object) override;
STDMETHOD(OnPropertyValueChanged)(LPCWSTR device_id, const PROPERTYKEY key) override;
STDMETHOD(OnDeviceAdded)(LPCWSTR device_id) override {
return S_OK;
}
STDMETHOD(OnDeviceRemoved)(LPCWSTR device_id) override {
return S_OK;
}
STDMETHOD(OnDeviceStateChanged)(LPCWSTR device_id, DWORD new_state) override;
STDMETHOD(OnDefaultDeviceChanged)(EDataFlow flow, ERole role, LPCWSTR new_default_device_id) override;
};
STDMETHODIMP DeviceListener::QueryInterface(REFIID iid, void** object) {
if (iid == IID_IUnknown || iid == __uuidof(IMMNotificationClient)) {
*object = static_cast<IMMNotificationClient*>(this);
return S_OK;
}
*object = NULL;
return E_NOINTERFACE;
}
STDMETHODIMP DeviceListener::OnPropertyValueChanged(LPCWSTR device_id, const PROPERTYKEY key) {
LOG(("Audio Info: OnPropertyValueChanged() scheduling detach from audio device."));
Media::Player::DetachFromDeviceByTimer();
return S_OK;
}
STDMETHODIMP DeviceListener::OnDeviceStateChanged(LPCWSTR device_id, DWORD new_state) {
LOG(("Audio Info: OnDeviceStateChanged() scheduling detach from audio device."));
Media::Player::DetachFromDeviceByTimer();
return S_OK;
}
STDMETHODIMP DeviceListener::OnDefaultDeviceChanged(EDataFlow flow, ERole role, LPCWSTR new_default_device_id) {
// Only listen for console and communication device changes.
if ((role != eConsole && role != eCommunications) || (flow != eRender && flow != eCapture)) {
LOG(("Audio Info: skipping OnDefaultDeviceChanged() flow %1, role %2, new_default_device_id: %3").arg(flow).arg(role).arg(new_default_device_id ? '"' + QString::fromWCharArray(new_default_device_id) + '"' : QString("nullptr")));
return S_OK;
}
LOG(("Audio Info: OnDefaultDeviceChanged() scheduling detach from audio device, flow %1, role %2, new_default_device_id: %3").arg(flow).arg(role).arg(new_default_device_id ? '"' + QString::fromWCharArray(new_default_device_id) + '"' : QString("nullptr")));
Media::Player::DetachFromDeviceByTimer();
return S_OK;
}
auto WasCoInitialized = false;
ComPtr<IMMDeviceEnumerator> Enumerator;
DeviceListener *Listener = nullptr;
} // namespace
void Init() {
auto hr = CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&Enumerator));
if (FAILED(hr)) {
Enumerator.Reset();
if (hr == CO_E_NOTINITIALIZED) {
LOG(("Audio Info: CoCreateInstance fails with CO_E_NOTINITIALIZED"));
hr = CoInitialize(nullptr);
if (SUCCEEDED(hr)) {
WasCoInitialized = true;
hr = CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&Enumerator));
if (FAILED(hr)) {
Enumerator.Reset();
LOG(("Audio Error: could not CoCreateInstance of MMDeviceEnumerator, HRESULT: %1").arg(hr));
return;
}
}
} else {
LOG(("Audio Error: could not CoCreateInstance of MMDeviceEnumerator, HRESULT: %1").arg(hr));
return;
}
}
Listener = new DeviceListener();
hr = Enumerator->RegisterEndpointNotificationCallback(Listener);
if (FAILED(hr)) {
LOG(("Audio Error: RegisterEndpointNotificationCallback failed, HRESULT: %1").arg(hr));
delete base::take(Listener);
}
}
void DeInit() {
if (Enumerator) {
if (Listener) {
auto hr = Enumerator->UnregisterEndpointNotificationCallback(Listener);
if (FAILED(hr)) {
LOG(("Audio Error: UnregisterEndpointNotificationCallback failed, HRESULT: %1").arg(hr));
}
delete base::take(Listener);
}
Enumerator.Reset();
}
if (WasCoInitialized) {
CoUninitialize();
}
AUDCLNT_E_NOT_INITIALIZED;
}
} // namespace Audio
} // namespace Platform

View File

@ -0,0 +1,31 @@
/*
This file is part of Telegram Desktop,
the official desktop version of Telegram messaging app, see https://telegram.org
Telegram Desktop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
In addition, as a special exception, the copyright holders give permission
to link the code of portions of this program with the OpenSSL library.
Full license: https://github.com/telegramdesktop/tdesktop/blob/master/LICENSE
Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/
#pragma once
namespace Platform {
namespace Audio {
void Init();
void DeInit();
} // namespace Audio
} // namespace Platform

View File

@ -23,6 +23,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "application.h" #include "application.h"
#include "localstorage.h" #include "localstorage.h"
#include "media/player/media_player_instance.h" #include "media/player/media_player_instance.h"
#include "media/media_audio.h"
#include "platform/mac/mac_utilities.h" #include "platform/mac/mac_utilities.h"
#include "styles/style_window.h" #include "styles/style_window.h"
#include "lang.h" #include "lang.h"
@ -125,6 +126,9 @@ ApplicationDelegate *_sharedDelegate = nil;
- (void)receiveWakeNote:(NSNotification*)aNotification { - (void)receiveWakeNote:(NSNotification*)aNotification {
if (App::app()) App::app()->checkLocalTime(); if (App::app()) App::app()->checkLocalTime();
LOG(("Audio Info: -receiveWakeNote: received, scheduling detach from audio device"));
Media::Player::DetachFromDeviceByTimer();
} }
- (void)setWatchingMediaKeys:(BOOL)watching { - (void)setWatchingMediaKeys:(BOOL)watching {
@ -216,27 +220,21 @@ bool objc_handleMediaKeyEvent(void *ev) {
switch (keyCode) { switch (keyCode) {
case NX_KEYTYPE_PLAY: case NX_KEYTYPE_PLAY:
if (keyState == 0) { // Play pressed and released if (keyState == 0) { // Play pressed and released
if (Media::Player::exists()) { Media::Player::instance()->playPause();
Media::Player::instance()->playPause();
}
return true; return true;
} }
break; break;
case NX_KEYTYPE_FAST: case NX_KEYTYPE_FAST:
if (keyState == 0) { // Next pressed and released if (keyState == 0) { // Next pressed and released
if (Media::Player::exists()) { Media::Player::instance()->next();
Media::Player::instance()->next();
}
return true; return true;
} }
break; break;
case NX_KEYTYPE_REWIND: case NX_KEYTYPE_REWIND:
if (keyState == 0) { // Previous pressed and released if (keyState == 0) { // Previous pressed and released
if (Media::Player::exists()) { Media::Player::instance()->previous();
Media::Player::instance()->previous();
}
return true; return true;
} }
break; break;

View File

@ -1174,10 +1174,10 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
} }
} }
if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playMusic || playVideo || playAnimation))) { if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playMusic || playVideo || playAnimation))) {
using State = Media::Player::State;
if (playVoice) { if (playVoice) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Voice); if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(data, msgId) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice); Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice);
} else { } else {
auto audio = AudioMsgId(data, msgId); auto audio = AudioMsgId(data, msgId);
@ -1188,9 +1188,8 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
} }
} }
} else if (playMusic) { } else if (playMusic) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(data, msgId) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song); Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song);
} else { } else {
auto song = AudioMsgId(data, msgId); auto song = AudioMsgId(data, msgId);
@ -1480,25 +1479,24 @@ void DocumentData::performActionOnLoad() {
return; return;
} }
} }
using State = Media::Player::State;
if (playVoice) { if (playVoice) {
if (loaded()) { if (loaded()) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Voice); if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(this, _actionOnLoadMsgId) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice); Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice);
} else if (playbackState.state & AudioPlayerStoppedMask) { } else if (Media::Player::IsStopped(state.state)) {
Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId)); Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId));
if (App::main()) App::main()->mediaMarkRead(this); if (App::main()) App::main()->mediaMarkRead(this);
} }
} }
} else if (playMusic) { } else if (playMusic) {
if (loaded()) { if (loaded()) {
AudioMsgId playing; auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
auto playbackState = Media::Player::mixer()->currentState(&playing, AudioMsgId::Type::Song); if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
if (playing == AudioMsgId(this, _actionOnLoadMsgId) && !(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song); Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song);
} else if (playbackState.state & AudioPlayerStoppedMask) { } else if (Media::Player::IsStopped(state.state)) {
AudioMsgId song(this, _actionOnLoadMsgId); auto song = AudioMsgId(this, _actionOnLoadMsgId);
Media::Player::mixer()->play(song); Media::Player::mixer()->play(song);
Media::Player::Updated().notify(song); Media::Player::Updated().notify(song);
} }

View File

@ -30,7 +30,7 @@ windowDefaultHeight: 600px;
windowShadow: icon {{ "window_shadow", windowShadowFg }}; windowShadow: icon {{ "window_shadow", windowShadowFg }};
windowShadowShift: 1px; windowShadowShift: 1px;
adaptiveChatWideWidth: 860px; adaptiveChatWideWidth: 880px;
notifyBorder: windowShadowFgFallback; notifyBorder: windowShadowFgFallback;
notifyBorderWidth: 1px; notifyBorderWidth: 1px;

View File

@ -3,4 +3,4 @@ AppVersionStrMajor 1.0
AppVersionStrSmall 1.0.2 AppVersionStrSmall 1.0.2
AppVersionStr 1.0.2 AppVersionStr 1.0.2
AlphaChannel 0 AlphaChannel 0
BetaVersion 0 BetaVersion 1000002001

View File

@ -380,6 +380,8 @@
'<(src_loc)/platform/mac/notifications_manager_mac.h', '<(src_loc)/platform/mac/notifications_manager_mac.h',
'<(src_loc)/platform/mac/window_title_mac.mm', '<(src_loc)/platform/mac/window_title_mac.mm',
'<(src_loc)/platform/mac/window_title_mac.h', '<(src_loc)/platform/mac/window_title_mac.h',
'<(src_loc)/platform/win/audio_win.cpp',
'<(src_loc)/platform/win/audio_win.h',
'<(src_loc)/platform/win/main_window_win.cpp', '<(src_loc)/platform/win/main_window_win.cpp',
'<(src_loc)/platform/win/main_window_win.h', '<(src_loc)/platform/win/main_window_win.h',
'<(src_loc)/platform/win/notifications_manager_win.cpp', '<(src_loc)/platform/win/notifications_manager_win.cpp',
@ -392,6 +394,7 @@
'<(src_loc)/platform/win/windows_dlls.h', '<(src_loc)/platform/win/windows_dlls.h',
'<(src_loc)/platform/win/windows_event_filter.cpp', '<(src_loc)/platform/win/windows_event_filter.cpp',
'<(src_loc)/platform/win/windows_event_filter.h', '<(src_loc)/platform/win/windows_event_filter.h',
'<(src_loc)/platform/platform_audio.h',
'<(src_loc)/platform/platform_file_dialog.h', '<(src_loc)/platform/platform_file_dialog.h',
'<(src_loc)/platform/platform_main_window.h', '<(src_loc)/platform/platform_main_window.h',
'<(src_loc)/platform/platform_notifications_manager.h', '<(src_loc)/platform/platform_notifications_manager.h',
@ -644,12 +647,14 @@
'sources!': [ 'sources!': [
'<(src_loc)/pspecific_win.cpp', '<(src_loc)/pspecific_win.cpp',
'<(src_loc)/pspecific_win.h', '<(src_loc)/pspecific_win.h',
'<(src_loc)/platform/win/audio_win.cpp',
'<(src_loc)/platform/win/audio_win.h',
'<(src_loc)/platform/win/main_window_win.cpp', '<(src_loc)/platform/win/main_window_win.cpp',
'<(src_loc)/platform/win/main_window_win.h', '<(src_loc)/platform/win/main_window_win.h',
'<(src_loc)/platform/win/notifications_manager_win.cpp', '<(src_loc)/platform/win/notifications_manager_win.cpp',
'<(src_loc)/platform/win/notifications_manager_win.h', '<(src_loc)/platform/win/notifications_manager_win.h',
'<(src_loc)/platform/win/window_title_win.cpp', '<(src_loc)/platform/win/window_title_win.cpp',
'<(src_loc)/platform/win/window_title_win.h', '<(src_loc)/platform/win/window_title_win.h',
'<(src_loc)/platform/win/windows_app_user_model_id.cpp', '<(src_loc)/platform/win/windows_app_user_model_id.cpp',
'<(src_loc)/platform/win/windows_app_user_model_id.h', '<(src_loc)/platform/win/windows_app_user_model_id.h',
'<(src_loc)/platform/win/windows_dlls.cpp', '<(src_loc)/platform/win/windows_dlls.cpp',